Ausgabe der neuen DB Einträge
This commit is contained in:
parent
bad48e1627
commit
cfbbb9ee3d
2399 changed files with 843193 additions and 43 deletions
|
|
@ -0,0 +1,7 @@
|
|||
# -*- test-case-name: twisted.logger.test -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Unit tests for L{twisted.logger}.
|
||||
"""
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._buffer}.
|
||||
"""
|
||||
|
||||
from zope.interface.verify import verifyObject, BrokenMethodImplementation
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from .._observer import ILogObserver
|
||||
from .._buffer import LimitedHistoryLogObserver
|
||||
|
||||
|
||||
|
||||
class LimitedHistoryLogObserverTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{LimitedHistoryLogObserver}.
|
||||
"""
|
||||
|
||||
def test_interface(self):
|
||||
"""
|
||||
L{LimitedHistoryLogObserver} provides L{ILogObserver}.
|
||||
"""
|
||||
observer = LimitedHistoryLogObserver(0)
|
||||
try:
|
||||
verifyObject(ILogObserver, observer)
|
||||
except BrokenMethodImplementation as e:
|
||||
self.fail(e)
|
||||
|
||||
|
||||
def test_order(self):
|
||||
"""
|
||||
L{LimitedHistoryLogObserver} saves history in the order it is received.
|
||||
"""
|
||||
size = 4
|
||||
events = [dict(n=n) for n in range(size//2)]
|
||||
observer = LimitedHistoryLogObserver(size)
|
||||
|
||||
for event in events:
|
||||
observer(event)
|
||||
|
||||
outEvents = []
|
||||
observer.replayTo(outEvents.append)
|
||||
self.assertEqual(events, outEvents)
|
||||
|
||||
|
||||
def test_limit(self):
|
||||
"""
|
||||
When more events than a L{LimitedHistoryLogObserver}'s maximum size are
|
||||
buffered, older events will be dropped.
|
||||
"""
|
||||
size = 4
|
||||
events = [dict(n=n) for n in range(size*2)]
|
||||
observer = LimitedHistoryLogObserver(size)
|
||||
|
||||
for event in events:
|
||||
observer(event)
|
||||
outEvents = []
|
||||
observer.replayTo(outEvents.append)
|
||||
self.assertEqual(events[-size:], outEvents)
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._capture}.
|
||||
"""
|
||||
|
||||
from twisted.logger import Logger, LogLevel
|
||||
from twisted.trial.unittest import TestCase
|
||||
|
||||
from .._capture import capturedLogs
|
||||
|
||||
|
||||
|
||||
class LogCaptureTests(TestCase):
|
||||
"""
|
||||
Tests for L{LogCaptureTests}.
|
||||
"""
|
||||
|
||||
log = Logger()
|
||||
|
||||
|
||||
def test_capture(self):
|
||||
"""
|
||||
Events logged within context are captured.
|
||||
"""
|
||||
foo = object()
|
||||
|
||||
with capturedLogs() as captured:
|
||||
self.log.debug("Capture this, please", foo=foo)
|
||||
self.log.info("Capture this too, please", foo=foo)
|
||||
|
||||
self.assertTrue(len(captured) == 2)
|
||||
self.assertEqual(captured[0]["log_format"], "Capture this, please")
|
||||
self.assertEqual(captured[0]["log_level"], LogLevel.debug)
|
||||
self.assertEqual(captured[0]["foo"], foo)
|
||||
self.assertEqual(captured[1]["log_format"], "Capture this too, please")
|
||||
self.assertEqual(captured[1]["log_level"], LogLevel.info)
|
||||
self.assertEqual(captured[1]["foo"], foo)
|
||||
|
|
@ -0,0 +1,199 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._file}.
|
||||
"""
|
||||
|
||||
from io import StringIO
|
||||
|
||||
from zope.interface.verify import verifyObject, BrokenMethodImplementation
|
||||
|
||||
from twisted.trial.unittest import TestCase
|
||||
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.python.compat import unicode
|
||||
from .._observer import ILogObserver
|
||||
from .._file import FileLogObserver
|
||||
from .._file import textFileLogObserver
|
||||
|
||||
|
||||
|
||||
class FileLogObserverTests(TestCase):
|
||||
"""
|
||||
Tests for L{FileLogObserver}.
|
||||
"""
|
||||
|
||||
def test_interface(self):
|
||||
"""
|
||||
L{FileLogObserver} is an L{ILogObserver}.
|
||||
"""
|
||||
with StringIO() as fileHandle:
|
||||
observer = FileLogObserver(fileHandle, lambda e: unicode(e))
|
||||
try:
|
||||
verifyObject(ILogObserver, observer)
|
||||
except BrokenMethodImplementation as e:
|
||||
self.fail(e)
|
||||
|
||||
|
||||
def test_observeWrites(self):
|
||||
"""
|
||||
L{FileLogObserver} writes to the given file when it observes events.
|
||||
"""
|
||||
with StringIO() as fileHandle:
|
||||
observer = FileLogObserver(fileHandle, lambda e: unicode(e))
|
||||
event = dict(x=1)
|
||||
observer(event)
|
||||
self.assertEqual(fileHandle.getvalue(), unicode(event))
|
||||
|
||||
|
||||
def _test_observeWrites(self, what, count):
|
||||
"""
|
||||
Verify that observer performs an expected number of writes when the
|
||||
formatter returns a given value.
|
||||
|
||||
@param what: the value for the formatter to return.
|
||||
@type what: L{unicode}
|
||||
|
||||
@param count: the expected number of writes.
|
||||
@type count: L{int}
|
||||
"""
|
||||
with DummyFile() as fileHandle:
|
||||
observer = FileLogObserver(fileHandle, lambda e: what)
|
||||
event = dict(x=1)
|
||||
observer(event)
|
||||
self.assertEqual(fileHandle.writes, count)
|
||||
|
||||
|
||||
def test_observeWritesNone(self):
|
||||
"""
|
||||
L{FileLogObserver} does not write to the given file when it observes
|
||||
events and C{formatEvent} returns L{None}.
|
||||
"""
|
||||
self._test_observeWrites(None, 0)
|
||||
|
||||
|
||||
def test_observeWritesEmpty(self):
|
||||
"""
|
||||
L{FileLogObserver} does not write to the given file when it observes
|
||||
events and C{formatEvent} returns C{u""}.
|
||||
"""
|
||||
self._test_observeWrites(u"", 0)
|
||||
|
||||
|
||||
def test_observeFlushes(self):
|
||||
"""
|
||||
L{FileLogObserver} calles C{flush()} on the output file when it
|
||||
observes an event.
|
||||
"""
|
||||
with DummyFile() as fileHandle:
|
||||
observer = FileLogObserver(fileHandle, lambda e: unicode(e))
|
||||
event = dict(x=1)
|
||||
observer(event)
|
||||
self.assertEqual(fileHandle.flushes, 1)
|
||||
|
||||
|
||||
class TextFileLogObserverTests(TestCase):
|
||||
"""
|
||||
Tests for L{textFileLogObserver}.
|
||||
"""
|
||||
|
||||
def test_returnsFileLogObserver(self):
|
||||
"""
|
||||
L{textFileLogObserver} returns a L{FileLogObserver}.
|
||||
"""
|
||||
with StringIO() as fileHandle:
|
||||
observer = textFileLogObserver(fileHandle)
|
||||
self.assertIsInstance(observer, FileLogObserver)
|
||||
|
||||
|
||||
def test_outFile(self):
|
||||
"""
|
||||
Returned L{FileLogObserver} has the correct outFile.
|
||||
"""
|
||||
with StringIO() as fileHandle:
|
||||
observer = textFileLogObserver(fileHandle)
|
||||
self.assertIs(observer._outFile, fileHandle)
|
||||
|
||||
|
||||
def test_timeFormat(self):
|
||||
"""
|
||||
Returned L{FileLogObserver} has the correct outFile.
|
||||
"""
|
||||
with StringIO() as fileHandle:
|
||||
observer = textFileLogObserver(fileHandle, timeFormat=u"%f")
|
||||
observer(dict(log_format=u"XYZZY", log_time=112345.6))
|
||||
self.assertEqual(fileHandle.getvalue(), u"600000 [-#-] XYZZY\n")
|
||||
|
||||
|
||||
def test_observeFailure(self):
|
||||
"""
|
||||
If the C{"log_failure"} key exists in an event, the observer appends
|
||||
the failure's traceback to the output.
|
||||
"""
|
||||
with StringIO() as fileHandle:
|
||||
observer = textFileLogObserver(fileHandle)
|
||||
|
||||
try:
|
||||
1 / 0
|
||||
except ZeroDivisionError:
|
||||
failure = Failure()
|
||||
|
||||
event = dict(log_failure=failure)
|
||||
observer(event)
|
||||
output = fileHandle.getvalue()
|
||||
self.assertTrue(output.split("\n")[1].startswith("\tTraceback "),
|
||||
msg=repr(output))
|
||||
|
||||
|
||||
def test_observeFailureThatRaisesInGetTraceback(self):
|
||||
"""
|
||||
If the C{"log_failure"} key exists in an event, and contains an object
|
||||
that raises when you call its C{getTraceback()}, then the observer
|
||||
appends a message noting the problem, instead of raising.
|
||||
"""
|
||||
with StringIO() as fileHandle:
|
||||
observer = textFileLogObserver(fileHandle)
|
||||
event = dict(log_failure=object()) # object has no getTraceback()
|
||||
observer(event)
|
||||
output = fileHandle.getvalue()
|
||||
expected = (
|
||||
"(UNABLE TO OBTAIN TRACEBACK FROM EVENT)"
|
||||
)
|
||||
self.assertIn(expected, output)
|
||||
|
||||
|
||||
|
||||
class DummyFile(object):
|
||||
"""
|
||||
File that counts writes and flushes.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.writes = 0
|
||||
self.flushes = 0
|
||||
|
||||
|
||||
def write(self, data):
|
||||
"""
|
||||
Write data.
|
||||
|
||||
@param data: data
|
||||
@type data: L{unicode} or L{bytes}
|
||||
"""
|
||||
self.writes += 1
|
||||
|
||||
|
||||
def flush(self):
|
||||
"""
|
||||
Flush buffers.
|
||||
"""
|
||||
self.flushes += 1
|
||||
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
pass
|
||||
|
|
@ -0,0 +1,411 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._filter}.
|
||||
"""
|
||||
|
||||
from zope.interface.verify import verifyObject, BrokenMethodImplementation
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from .._levels import InvalidLogLevelError
|
||||
from .._levels import LogLevel
|
||||
from .._observer import ILogObserver
|
||||
from .._observer import LogPublisher
|
||||
from .._filter import FilteringLogObserver
|
||||
from .._filter import PredicateResult
|
||||
from .._filter import LogLevelFilterPredicate
|
||||
|
||||
|
||||
|
||||
class FilteringLogObserverTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{FilteringLogObserver}.
|
||||
"""
|
||||
|
||||
def test_interface(self):
|
||||
"""
|
||||
L{FilteringLogObserver} is an L{ILogObserver}.
|
||||
"""
|
||||
observer = FilteringLogObserver(lambda e: None, ())
|
||||
try:
|
||||
verifyObject(ILogObserver, observer)
|
||||
except BrokenMethodImplementation as e:
|
||||
self.fail(e)
|
||||
|
||||
|
||||
def filterWith(self, filters, other=False):
|
||||
"""
|
||||
Apply a set of pre-defined filters on a known set of events and return
|
||||
the filtered list of event numbers.
|
||||
|
||||
The pre-defined events are four events with a C{count} attribute set to
|
||||
C{0}, C{1}, C{2}, and C{3}.
|
||||
|
||||
@param filters: names of the filters to apply.
|
||||
|
||||
Options are:
|
||||
|
||||
- C{"twoMinus"} (count <=2),
|
||||
|
||||
- C{"twoPlus"} (count >= 2),
|
||||
|
||||
- C{"notTwo"} (count != 2),
|
||||
|
||||
- C{"no"} (False).
|
||||
|
||||
@type filters: iterable of str
|
||||
|
||||
@param other: Whether to return a list of filtered events as well.
|
||||
@type other: L{bool}
|
||||
|
||||
@return: event numbers or 2-tuple of lists of event numbers.
|
||||
@rtype: L{list} of L{int} or 2-L{tuple} of L{list} of L{int}
|
||||
"""
|
||||
events = [
|
||||
dict(count=0),
|
||||
dict(count=1),
|
||||
dict(count=2),
|
||||
dict(count=3),
|
||||
]
|
||||
|
||||
class Filters(object):
|
||||
@staticmethod
|
||||
def twoMinus(event):
|
||||
"""
|
||||
count <= 2
|
||||
|
||||
@param event: an event
|
||||
@type event: dict
|
||||
|
||||
@return: L{PredicateResult.yes} if C{event["count"] <= 2},
|
||||
otherwise L{PredicateResult.maybe}.
|
||||
"""
|
||||
if event["count"] <= 2:
|
||||
return PredicateResult.yes
|
||||
return PredicateResult.maybe
|
||||
|
||||
@staticmethod
|
||||
def twoPlus(event):
|
||||
"""
|
||||
count >= 2
|
||||
|
||||
@param event: an event
|
||||
@type event: dict
|
||||
|
||||
@return: L{PredicateResult.yes} if C{event["count"] >= 2},
|
||||
otherwise L{PredicateResult.maybe}.
|
||||
"""
|
||||
if event["count"] >= 2:
|
||||
return PredicateResult.yes
|
||||
return PredicateResult.maybe
|
||||
|
||||
@staticmethod
|
||||
def notTwo(event):
|
||||
"""
|
||||
count != 2
|
||||
|
||||
@param event: an event
|
||||
@type event: dict
|
||||
|
||||
@return: L{PredicateResult.yes} if C{event["count"] != 2},
|
||||
otherwise L{PredicateResult.maybe}.
|
||||
"""
|
||||
if event["count"] == 2:
|
||||
return PredicateResult.no
|
||||
return PredicateResult.maybe
|
||||
|
||||
@staticmethod
|
||||
def no(event):
|
||||
"""
|
||||
No way, man.
|
||||
|
||||
@param event: an event
|
||||
@type event: dict
|
||||
|
||||
@return: L{PredicateResult.no}
|
||||
"""
|
||||
return PredicateResult.no
|
||||
|
||||
@staticmethod
|
||||
def bogus(event):
|
||||
"""
|
||||
Bogus result.
|
||||
|
||||
@param event: an event
|
||||
@type event: dict
|
||||
|
||||
@return: something other than a valid predicate result.
|
||||
"""
|
||||
return None
|
||||
|
||||
predicates = (getattr(Filters, f) for f in filters)
|
||||
eventsSeen = []
|
||||
eventsNotSeen = []
|
||||
trackingObserver = eventsSeen.append
|
||||
if other:
|
||||
extra = [eventsNotSeen.append]
|
||||
else:
|
||||
extra = []
|
||||
filteringObserver = FilteringLogObserver(
|
||||
trackingObserver, predicates, *extra
|
||||
)
|
||||
for e in events:
|
||||
filteringObserver(e)
|
||||
|
||||
if extra:
|
||||
return (
|
||||
[e["count"] for e in eventsSeen],
|
||||
[e["count"] for e in eventsNotSeen],
|
||||
)
|
||||
return [e["count"] for e in eventsSeen]
|
||||
|
||||
|
||||
def test_shouldLogEventNoFilters(self):
|
||||
"""
|
||||
No filters: all events come through.
|
||||
"""
|
||||
self.assertEqual(self.filterWith([]), [0, 1, 2, 3])
|
||||
|
||||
|
||||
def test_shouldLogEventNoFilter(self):
|
||||
"""
|
||||
Filter with negative predicate result.
|
||||
"""
|
||||
self.assertEqual(self.filterWith(["notTwo"]), [0, 1, 3])
|
||||
|
||||
|
||||
def test_shouldLogEventOtherObserver(self):
|
||||
"""
|
||||
Filtered results get sent to the other observer, if passed.
|
||||
"""
|
||||
self.assertEqual(self.filterWith(["notTwo"], True), ([0, 1, 3], [2]))
|
||||
|
||||
|
||||
def test_shouldLogEventYesFilter(self):
|
||||
"""
|
||||
Filter with positive predicate result.
|
||||
"""
|
||||
self.assertEqual(self.filterWith(["twoPlus"]), [0, 1, 2, 3])
|
||||
|
||||
|
||||
def test_shouldLogEventYesNoFilter(self):
|
||||
"""
|
||||
Series of filters with positive and negative predicate results.
|
||||
"""
|
||||
self.assertEqual(self.filterWith(["twoPlus", "no"]), [2, 3])
|
||||
|
||||
|
||||
def test_shouldLogEventYesYesNoFilter(self):
|
||||
"""
|
||||
Series of filters with positive, positive and negative predicate
|
||||
results.
|
||||
"""
|
||||
self.assertEqual(
|
||||
self.filterWith(["twoPlus", "twoMinus", "no"]),
|
||||
[0, 1, 2, 3]
|
||||
)
|
||||
|
||||
|
||||
def test_shouldLogEventBadPredicateResult(self):
|
||||
"""
|
||||
Filter with invalid predicate result.
|
||||
"""
|
||||
self.assertRaises(TypeError, self.filterWith, ["bogus"])
|
||||
|
||||
|
||||
def test_call(self):
|
||||
"""
|
||||
Test filtering results from each predicate type.
|
||||
"""
|
||||
e = dict(obj=object())
|
||||
|
||||
def callWithPredicateResult(result):
|
||||
seen = []
|
||||
observer = FilteringLogObserver(
|
||||
lambda e: seen.append(e),
|
||||
(lambda e: result,)
|
||||
)
|
||||
observer(e)
|
||||
return seen
|
||||
|
||||
self.assertIn(e, callWithPredicateResult(PredicateResult.yes))
|
||||
self.assertIn(e, callWithPredicateResult(PredicateResult.maybe))
|
||||
self.assertNotIn(e, callWithPredicateResult(PredicateResult.no))
|
||||
|
||||
|
||||
def test_trace(self):
|
||||
"""
|
||||
Tracing keeps track of forwarding through the filtering observer.
|
||||
"""
|
||||
event = dict(log_trace=[])
|
||||
|
||||
oYes = lambda e: None
|
||||
oNo = lambda e: None
|
||||
|
||||
def testObserver(e):
|
||||
self.assertIs(e, event)
|
||||
self.assertEqual(
|
||||
event["log_trace"],
|
||||
[
|
||||
(publisher, yesFilter),
|
||||
(yesFilter, oYes),
|
||||
(publisher, noFilter),
|
||||
# ... noFilter doesn't call oNo
|
||||
(publisher, oTest),
|
||||
]
|
||||
)
|
||||
oTest = testObserver
|
||||
|
||||
yesFilter = FilteringLogObserver(
|
||||
oYes,
|
||||
(lambda e: PredicateResult.yes,)
|
||||
)
|
||||
noFilter = FilteringLogObserver(
|
||||
oNo,
|
||||
(lambda e: PredicateResult.no,)
|
||||
)
|
||||
|
||||
publisher = LogPublisher(yesFilter, noFilter, testObserver)
|
||||
publisher(event)
|
||||
|
||||
|
||||
|
||||
class LogLevelFilterPredicateTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{LogLevelFilterPredicate}.
|
||||
"""
|
||||
|
||||
def test_defaultLogLevel(self):
|
||||
"""
|
||||
Default log level is used.
|
||||
"""
|
||||
predicate = LogLevelFilterPredicate()
|
||||
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace(None),
|
||||
predicate.defaultLogLevel
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace(""),
|
||||
predicate.defaultLogLevel
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("rocker.cool.namespace"),
|
||||
predicate.defaultLogLevel
|
||||
)
|
||||
|
||||
|
||||
def test_setLogLevel(self):
|
||||
"""
|
||||
Setting and retrieving log levels.
|
||||
"""
|
||||
predicate = LogLevelFilterPredicate()
|
||||
|
||||
predicate.setLogLevelForNamespace(None, LogLevel.error)
|
||||
predicate.setLogLevelForNamespace("twext.web2", LogLevel.debug)
|
||||
predicate.setLogLevelForNamespace("twext.web2.dav", LogLevel.warn)
|
||||
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace(None),
|
||||
LogLevel.error
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("twisted"),
|
||||
LogLevel.error
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("twext.web2"),
|
||||
LogLevel.debug
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("twext.web2.dav"),
|
||||
LogLevel.warn
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("twext.web2.dav.test"),
|
||||
LogLevel.warn
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("twext.web2.dav.test1.test2"),
|
||||
LogLevel.warn
|
||||
)
|
||||
|
||||
|
||||
def test_setInvalidLogLevel(self):
|
||||
"""
|
||||
Can't pass invalid log levels to C{setLogLevelForNamespace()}.
|
||||
"""
|
||||
predicate = LogLevelFilterPredicate()
|
||||
|
||||
self.assertRaises(
|
||||
InvalidLogLevelError,
|
||||
predicate.setLogLevelForNamespace, "twext.web2", object()
|
||||
)
|
||||
|
||||
# Level must be a constant, not the name of a constant
|
||||
self.assertRaises(
|
||||
InvalidLogLevelError,
|
||||
predicate.setLogLevelForNamespace, "twext.web2", "debug"
|
||||
)
|
||||
|
||||
|
||||
def test_clearLogLevels(self):
|
||||
"""
|
||||
Clearing log levels.
|
||||
"""
|
||||
predicate = LogLevelFilterPredicate()
|
||||
|
||||
predicate.setLogLevelForNamespace("twext.web2", LogLevel.debug)
|
||||
predicate.setLogLevelForNamespace("twext.web2.dav", LogLevel.error)
|
||||
|
||||
predicate.clearLogLevels()
|
||||
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("twisted"),
|
||||
predicate.defaultLogLevel
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("twext.web2"),
|
||||
predicate.defaultLogLevel
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("twext.web2.dav"),
|
||||
predicate.defaultLogLevel
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("twext.web2.dav.test"),
|
||||
predicate.defaultLogLevel
|
||||
)
|
||||
self.assertEqual(
|
||||
predicate.logLevelForNamespace("twext.web2.dav.test1.test2"),
|
||||
predicate.defaultLogLevel
|
||||
)
|
||||
|
||||
|
||||
def test_filtering(self):
|
||||
"""
|
||||
Events are filtered based on log level/namespace.
|
||||
"""
|
||||
predicate = LogLevelFilterPredicate()
|
||||
|
||||
predicate.setLogLevelForNamespace(None, LogLevel.error)
|
||||
predicate.setLogLevelForNamespace("twext.web2", LogLevel.debug)
|
||||
predicate.setLogLevelForNamespace("twext.web2.dav", LogLevel.warn)
|
||||
|
||||
def checkPredicate(namespace, level, expectedResult):
|
||||
event = dict(log_namespace=namespace, log_level=level)
|
||||
self.assertEqual(expectedResult, predicate(event))
|
||||
|
||||
checkPredicate("", LogLevel.debug, PredicateResult.no)
|
||||
checkPredicate("", LogLevel.error, PredicateResult.maybe)
|
||||
|
||||
checkPredicate("twext.web2", LogLevel.debug, PredicateResult.maybe)
|
||||
checkPredicate("twext.web2", LogLevel.error, PredicateResult.maybe)
|
||||
|
||||
checkPredicate("twext.web2.dav", LogLevel.debug, PredicateResult.no)
|
||||
checkPredicate("twext.web2.dav", LogLevel.error, PredicateResult.maybe)
|
||||
|
||||
checkPredicate(None, LogLevel.critical, PredicateResult.no)
|
||||
checkPredicate("twext.web2", None, PredicateResult.no)
|
||||
|
|
@ -0,0 +1,323 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._format}.
|
||||
"""
|
||||
|
||||
from itertools import count
|
||||
import json
|
||||
|
||||
try:
|
||||
from time import tzset
|
||||
# We should upgrade to a version of pyflakes that does not require this.
|
||||
tzset
|
||||
except ImportError:
|
||||
tzset = None
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from .._format import formatEvent
|
||||
from .._flatten import (
|
||||
flattenEvent, extractField, KeyFlattener, aFormatter
|
||||
)
|
||||
|
||||
|
||||
|
||||
class FlatFormattingTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for flattened event formatting functions.
|
||||
"""
|
||||
|
||||
def test_formatFlatEvent(self):
|
||||
"""
|
||||
L{flattenEvent} will "flatten" an event so that, if scrubbed of all but
|
||||
serializable objects, it will preserve all necessary data to be
|
||||
formatted once serialized. When presented with an event thusly
|
||||
flattened, L{formatEvent} will produce the same output.
|
||||
"""
|
||||
counter = count()
|
||||
|
||||
class Ephemeral(object):
|
||||
attribute = "value"
|
||||
|
||||
event1 = dict(
|
||||
log_format=(
|
||||
"callable: {callme()} "
|
||||
"attribute: {object.attribute} "
|
||||
"numrepr: {number!r} "
|
||||
"numstr: {number!s} "
|
||||
"strrepr: {string!r} "
|
||||
"unistr: {unistr!s}"
|
||||
),
|
||||
callme=lambda: next(counter), object=Ephemeral(),
|
||||
number=7, string="hello", unistr=u"ö"
|
||||
)
|
||||
|
||||
flattenEvent(event1)
|
||||
|
||||
event2 = dict(event1)
|
||||
del event2["callme"]
|
||||
del event2["object"]
|
||||
event3 = json.loads(json.dumps(event2))
|
||||
self.assertEqual(
|
||||
formatEvent(event3),
|
||||
(
|
||||
u"callable: 0 "
|
||||
"attribute: value "
|
||||
"numrepr: 7 "
|
||||
"numstr: 7 "
|
||||
"strrepr: 'hello' "
|
||||
u"unistr: ö"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def test_formatFlatEventBadFormat(self):
|
||||
"""
|
||||
If the format string is invalid, an error is produced.
|
||||
"""
|
||||
event1 = dict(
|
||||
log_format=(
|
||||
"strrepr: {string!X}"
|
||||
),
|
||||
string="hello",
|
||||
)
|
||||
|
||||
flattenEvent(event1)
|
||||
event2 = json.loads(json.dumps(event1))
|
||||
|
||||
self.assertTrue(
|
||||
formatEvent(event2).startswith(u"Unable to format event")
|
||||
)
|
||||
|
||||
|
||||
def test_formatFlatEventWithMutatedFields(self):
|
||||
"""
|
||||
L{formatEvent} will prefer the stored C{str()} or C{repr()} value for
|
||||
an object, in case the other version.
|
||||
"""
|
||||
class Unpersistable(object):
|
||||
"""
|
||||
Unpersitable object.
|
||||
"""
|
||||
destructed = False
|
||||
|
||||
def selfDestruct(self):
|
||||
"""
|
||||
Self destruct.
|
||||
"""
|
||||
self.destructed = True
|
||||
|
||||
def __repr__(self):
|
||||
if self.destructed:
|
||||
return "post-serialization garbage"
|
||||
else:
|
||||
return "un-persistable"
|
||||
|
||||
up = Unpersistable()
|
||||
event1 = dict(
|
||||
log_format="unpersistable: {unpersistable}", unpersistable=up
|
||||
)
|
||||
|
||||
flattenEvent(event1)
|
||||
up.selfDestruct()
|
||||
|
||||
self.assertEqual(formatEvent(event1), "unpersistable: un-persistable")
|
||||
|
||||
|
||||
def test_keyFlattening(self):
|
||||
"""
|
||||
Test that L{KeyFlattener.flatKey} returns the expected keys for format
|
||||
fields.
|
||||
"""
|
||||
|
||||
def keyFromFormat(format):
|
||||
for (
|
||||
literalText,
|
||||
fieldName,
|
||||
formatSpec,
|
||||
conversion,
|
||||
) in aFormatter.parse(format):
|
||||
return KeyFlattener().flatKey(
|
||||
fieldName, formatSpec, conversion
|
||||
)
|
||||
|
||||
# No name
|
||||
try:
|
||||
self.assertEqual(keyFromFormat("{}"), "!:")
|
||||
except ValueError:
|
||||
# In python 2.6, an empty field name causes Formatter.parse to
|
||||
# raise ValueError.
|
||||
# In Python 2.7, it's allowed, so this exception is unexpected.
|
||||
raise
|
||||
|
||||
# Just a name
|
||||
self.assertEqual(keyFromFormat("{foo}"), "foo!:")
|
||||
|
||||
# Add conversion
|
||||
self.assertEqual(keyFromFormat("{foo!s}"), "foo!s:")
|
||||
self.assertEqual(keyFromFormat("{foo!r}"), "foo!r:")
|
||||
|
||||
# Add format spec
|
||||
self.assertEqual(keyFromFormat("{foo:%s}"), "foo!:%s")
|
||||
self.assertEqual(keyFromFormat("{foo:!}"), "foo!:!")
|
||||
self.assertEqual(keyFromFormat("{foo::}"), "foo!::")
|
||||
|
||||
# Both
|
||||
self.assertEqual(keyFromFormat("{foo!s:%s}"), "foo!s:%s")
|
||||
self.assertEqual(keyFromFormat("{foo!s:!}"), "foo!s:!")
|
||||
self.assertEqual(keyFromFormat("{foo!s::}"), "foo!s::")
|
||||
[keyPlusLiteral] = aFormatter.parse("{x}")
|
||||
key = keyPlusLiteral[1:]
|
||||
sameFlattener = KeyFlattener()
|
||||
self.assertEqual(sameFlattener.flatKey(*key), "x!:")
|
||||
self.assertEqual(sameFlattener.flatKey(*key), "x!:/2")
|
||||
|
||||
|
||||
def _test_formatFlatEvent_fieldNamesSame(self, event=None):
|
||||
"""
|
||||
The same format field used twice in one event is rendered twice.
|
||||
|
||||
@param event: An event to flatten. If L{None}, create a new event.
|
||||
@return: C{event} or the event created.
|
||||
"""
|
||||
if event is None:
|
||||
counter = count()
|
||||
|
||||
class CountStr(object):
|
||||
"""
|
||||
Hack
|
||||
"""
|
||||
def __str__(self):
|
||||
return str(next(counter))
|
||||
|
||||
event = dict(
|
||||
log_format="{x} {x}",
|
||||
x=CountStr(),
|
||||
)
|
||||
|
||||
flattenEvent(event)
|
||||
self.assertEqual(formatEvent(event), u"0 1")
|
||||
|
||||
return event
|
||||
|
||||
|
||||
def test_formatFlatEventFieldNamesSame(self):
|
||||
"""
|
||||
The same format field used twice in one event is rendered twice.
|
||||
"""
|
||||
self._test_formatFlatEvent_fieldNamesSame()
|
||||
|
||||
|
||||
def test_formatFlatEventFieldNamesSameAgain(self):
|
||||
"""
|
||||
The same event flattened twice gives the same (already rendered)
|
||||
result.
|
||||
"""
|
||||
event = self._test_formatFlatEvent_fieldNamesSame()
|
||||
self._test_formatFlatEvent_fieldNamesSame(event)
|
||||
|
||||
|
||||
def test_formatEventFlatTrailingText(self):
|
||||
"""
|
||||
L{formatEvent} will handle a flattened event with tailing text after
|
||||
a replacement field.
|
||||
"""
|
||||
event = dict(
|
||||
log_format="test {x} trailing",
|
||||
x='value',
|
||||
)
|
||||
flattenEvent(event)
|
||||
|
||||
result = formatEvent(event)
|
||||
|
||||
self.assertEqual(result, u"test value trailing")
|
||||
|
||||
|
||||
def test_extractField(self, flattenFirst=lambda x: x):
|
||||
"""
|
||||
L{extractField} will extract a field used in the format string.
|
||||
|
||||
@param flattenFirst: callable to flatten an event
|
||||
"""
|
||||
class ObjectWithRepr(object):
|
||||
def __repr__(self):
|
||||
return "repr"
|
||||
|
||||
class Something(object):
|
||||
def __init__(self):
|
||||
self.number = 7
|
||||
self.object = ObjectWithRepr()
|
||||
|
||||
def __getstate__(self):
|
||||
raise NotImplementedError("Just in case.")
|
||||
|
||||
event = dict(
|
||||
log_format="{something.number} {something.object}",
|
||||
something=Something(),
|
||||
)
|
||||
|
||||
flattened = flattenFirst(event)
|
||||
|
||||
def extract(field):
|
||||
return extractField(field, flattened)
|
||||
|
||||
self.assertEqual(extract("something.number"), 7)
|
||||
self.assertEqual(extract("something.number!s"), "7")
|
||||
self.assertEqual(extract("something.object!s"), "repr")
|
||||
|
||||
|
||||
def test_extractFieldFlattenFirst(self):
|
||||
"""
|
||||
L{extractField} behaves identically if the event is explicitly
|
||||
flattened first.
|
||||
"""
|
||||
def flattened(evt):
|
||||
flattenEvent(evt)
|
||||
return evt
|
||||
self.test_extractField(flattened)
|
||||
|
||||
|
||||
def test_flattenEventWithoutFormat(self):
|
||||
"""
|
||||
L{flattenEvent} will do nothing to an event with no format string.
|
||||
"""
|
||||
inputEvent = {'a': 'b', 'c': 1}
|
||||
flattenEvent(inputEvent)
|
||||
self.assertEqual(inputEvent, {'a': 'b', 'c': 1})
|
||||
|
||||
|
||||
def test_flattenEventWithInertFormat(self):
|
||||
"""
|
||||
L{flattenEvent} will do nothing to an event with a format string that
|
||||
contains no format fields.
|
||||
"""
|
||||
inputEvent = {'a': 'b', 'c': 1, 'log_format': 'simple message'}
|
||||
flattenEvent(inputEvent)
|
||||
self.assertEqual(
|
||||
inputEvent,
|
||||
{
|
||||
'a': 'b',
|
||||
'c': 1,
|
||||
'log_format': 'simple message',
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def test_flattenEventWithNoneFormat(self):
|
||||
"""
|
||||
L{flattenEvent} will do nothing to an event with log_format set to
|
||||
None.
|
||||
"""
|
||||
inputEvent = {'a': 'b', 'c': 1, 'log_format': None}
|
||||
flattenEvent(inputEvent)
|
||||
self.assertEqual(
|
||||
inputEvent,
|
||||
{
|
||||
'a': 'b',
|
||||
'c': 1,
|
||||
'log_format': None,
|
||||
}
|
||||
)
|
||||
|
|
@ -0,0 +1,783 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._format}.
|
||||
"""
|
||||
|
||||
from twisted.python.test.test_tzhelper import mktime, addTZCleanup, setTZ
|
||||
|
||||
try:
|
||||
from time import tzset
|
||||
# We should upgrade to a version of pyflakes that does not require this.
|
||||
tzset
|
||||
except ImportError:
|
||||
tzset = None
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.trial.unittest import SkipTest
|
||||
|
||||
from twisted.python.compat import _PY3, unicode
|
||||
from .._levels import LogLevel
|
||||
from .._format import (
|
||||
formatEvent, formatUnformattableEvent, formatTime,
|
||||
formatEventAsClassicLogText, formatWithCall, eventAsText
|
||||
)
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
|
||||
|
||||
class FormattingTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for basic event formatting functions.
|
||||
"""
|
||||
|
||||
def test_formatEvent(self):
|
||||
"""
|
||||
L{formatEvent} will format an event according to several rules:
|
||||
|
||||
- A string with no formatting instructions will be passed straight
|
||||
through.
|
||||
|
||||
- PEP 3101 strings will be formatted using the keys and values of
|
||||
the event as named fields.
|
||||
|
||||
- PEP 3101 keys ending with C{()} will be treated as instructions
|
||||
to call that key (which ought to be a callable) before
|
||||
formatting.
|
||||
|
||||
L{formatEvent} will always return L{unicode}, and if given bytes, will
|
||||
always treat its format string as UTF-8 encoded.
|
||||
"""
|
||||
def format(logFormat, **event):
|
||||
event["log_format"] = logFormat
|
||||
result = formatEvent(event)
|
||||
self.assertIs(type(result), unicode)
|
||||
return result
|
||||
|
||||
self.assertEqual(u"", format(b""))
|
||||
self.assertEqual(u"", format(u""))
|
||||
self.assertEqual(u"abc", format("{x}", x="abc"))
|
||||
self.assertEqual(
|
||||
u"no, yes.",
|
||||
format(
|
||||
"{not_called}, {called()}.",
|
||||
not_called="no", called=lambda: "yes"
|
||||
)
|
||||
)
|
||||
self.assertEqual(u"S\xe1nchez", format(b"S\xc3\xa1nchez"))
|
||||
badResult = format(b"S\xe1nchez")
|
||||
self.assertIn(u"Unable to format event", badResult)
|
||||
maybeResult = format(b"S{a!s}nchez", a=b"\xe1")
|
||||
# The behavior of unicode.format("{x}", x=bytes) differs on py2 and
|
||||
# py3. Perhaps we should make our modified formatting more consistent
|
||||
# than this? -glyph
|
||||
if not _PY3:
|
||||
self.assertIn(u"Unable to format event", maybeResult)
|
||||
else:
|
||||
self.assertIn(u"Sb'\\xe1'nchez", maybeResult)
|
||||
|
||||
xe1 = unicode(repr(b"\xe1"))
|
||||
self.assertIn(u"S" + xe1 + "nchez", format(b"S{a!r}nchez", a=b"\xe1"))
|
||||
|
||||
|
||||
def test_formatEventNoFormat(self):
|
||||
"""
|
||||
Formatting an event with no format.
|
||||
"""
|
||||
event = dict(foo=1, bar=2)
|
||||
result = formatEvent(event)
|
||||
|
||||
self.assertEqual(u"", result)
|
||||
|
||||
|
||||
def test_formatEventWeirdFormat(self):
|
||||
"""
|
||||
Formatting an event with a bogus format.
|
||||
"""
|
||||
event = dict(log_format=object(), foo=1, bar=2)
|
||||
result = formatEvent(event)
|
||||
|
||||
self.assertIn("Log format must be unicode or bytes", result)
|
||||
self.assertIn(repr(event), result)
|
||||
|
||||
|
||||
def test_formatUnformattableEvent(self):
|
||||
"""
|
||||
Formatting an event that's just plain out to get us.
|
||||
"""
|
||||
event = dict(log_format="{evil()}", evil=lambda: 1 / 0)
|
||||
result = formatEvent(event)
|
||||
|
||||
self.assertIn("Unable to format event", result)
|
||||
self.assertIn(repr(event), result)
|
||||
|
||||
|
||||
def test_formatUnformattableEventWithUnformattableKey(self):
|
||||
"""
|
||||
Formatting an unformattable event that has an unformattable key.
|
||||
"""
|
||||
event = {
|
||||
"log_format": "{evil()}",
|
||||
"evil": lambda: 1 / 0,
|
||||
Unformattable(): "gurk",
|
||||
}
|
||||
result = formatEvent(event)
|
||||
self.assertIn("MESSAGE LOST: unformattable object logged:", result)
|
||||
self.assertIn("Recoverable data:", result)
|
||||
self.assertIn("Exception during formatting:", result)
|
||||
|
||||
|
||||
def test_formatUnformattableEventWithUnformattableValue(self):
|
||||
"""
|
||||
Formatting an unformattable event that has an unformattable value.
|
||||
"""
|
||||
event = dict(
|
||||
log_format="{evil()}",
|
||||
evil=lambda: 1 / 0,
|
||||
gurk=Unformattable(),
|
||||
)
|
||||
result = formatEvent(event)
|
||||
self.assertIn("MESSAGE LOST: unformattable object logged:", result)
|
||||
self.assertIn("Recoverable data:", result)
|
||||
self.assertIn("Exception during formatting:", result)
|
||||
|
||||
|
||||
def test_formatUnformattableEventWithUnformattableErrorOMGWillItStop(self):
|
||||
"""
|
||||
Formatting an unformattable event that has an unformattable value.
|
||||
"""
|
||||
event = dict(
|
||||
log_format="{evil()}",
|
||||
evil=lambda: 1 / 0,
|
||||
recoverable="okay",
|
||||
)
|
||||
# Call formatUnformattableEvent() directly with a bogus exception.
|
||||
result = formatUnformattableEvent(event, Unformattable())
|
||||
self.assertIn("MESSAGE LOST: unformattable object logged:", result)
|
||||
self.assertIn(repr("recoverable") + " = " + repr("okay"), result)
|
||||
|
||||
|
||||
|
||||
class TimeFormattingTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for time formatting functions.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
addTZCleanup(self)
|
||||
|
||||
|
||||
def test_formatTimeWithDefaultFormat(self):
|
||||
"""
|
||||
Default time stamp format is RFC 3339 and offset respects the timezone
|
||||
as set by the standard C{TZ} environment variable and L{tzset} API.
|
||||
"""
|
||||
if tzset is None:
|
||||
raise SkipTest(
|
||||
"Platform cannot change timezone; unable to verify offsets."
|
||||
)
|
||||
|
||||
def testForTimeZone(name, expectedDST, expectedSTD):
|
||||
setTZ(name)
|
||||
|
||||
localDST = mktime((2006, 6, 30, 0, 0, 0, 4, 181, 1))
|
||||
localSTD = mktime((2007, 1, 31, 0, 0, 0, 2, 31, 0))
|
||||
|
||||
self.assertEqual(formatTime(localDST), expectedDST)
|
||||
self.assertEqual(formatTime(localSTD), expectedSTD)
|
||||
|
||||
# UTC
|
||||
testForTimeZone(
|
||||
"UTC+00",
|
||||
u"2006-06-30T00:00:00+0000",
|
||||
u"2007-01-31T00:00:00+0000",
|
||||
)
|
||||
|
||||
# West of UTC
|
||||
testForTimeZone(
|
||||
"EST+05EDT,M4.1.0,M10.5.0",
|
||||
u"2006-06-30T00:00:00-0400",
|
||||
u"2007-01-31T00:00:00-0500",
|
||||
)
|
||||
|
||||
# East of UTC
|
||||
testForTimeZone(
|
||||
"CEST-01CEDT,M4.1.0,M10.5.0",
|
||||
u"2006-06-30T00:00:00+0200",
|
||||
u"2007-01-31T00:00:00+0100",
|
||||
)
|
||||
|
||||
# No DST
|
||||
testForTimeZone(
|
||||
"CST+06",
|
||||
u"2006-06-30T00:00:00-0600",
|
||||
u"2007-01-31T00:00:00-0600",
|
||||
)
|
||||
|
||||
|
||||
def test_formatTimeWithNoTime(self):
|
||||
"""
|
||||
If C{when} argument is L{None}, we get the default output.
|
||||
"""
|
||||
self.assertEqual(formatTime(None), u"-")
|
||||
self.assertEqual(formatTime(None, default=u"!"), u"!")
|
||||
|
||||
|
||||
def test_formatTimeWithNoFormat(self):
|
||||
"""
|
||||
If C{timeFormat} argument is L{None}, we get the default output.
|
||||
"""
|
||||
t = mktime((2013, 9, 24, 11, 40, 47, 1, 267, 1))
|
||||
self.assertEqual(formatTime(t, timeFormat=None), u"-")
|
||||
self.assertEqual(formatTime(t, timeFormat=None, default=u"!"), u"!")
|
||||
|
||||
|
||||
def test_formatTimeWithAlternateTimeFormat(self):
|
||||
"""
|
||||
Alternate time format in output.
|
||||
"""
|
||||
t = mktime((2013, 9, 24, 11, 40, 47, 1, 267, 1))
|
||||
self.assertEqual(formatTime(t, timeFormat="%Y/%W"), u"2013/38")
|
||||
|
||||
|
||||
def test_formatTimePercentF(self):
|
||||
"""
|
||||
"%f" supported in time format.
|
||||
"""
|
||||
self.assertEqual(formatTime(1000000.23456, timeFormat="%f"), u"234560")
|
||||
|
||||
|
||||
|
||||
class ClassicLogFormattingTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for classic text log event formatting functions.
|
||||
"""
|
||||
|
||||
def test_formatTimeDefault(self):
|
||||
"""
|
||||
Time is first field. Default time stamp format is RFC 3339 and offset
|
||||
respects the timezone as set by the standard C{TZ} environment variable
|
||||
and L{tzset} API.
|
||||
"""
|
||||
if tzset is None:
|
||||
raise SkipTest(
|
||||
"Platform cannot change timezone; unable to verify offsets."
|
||||
)
|
||||
|
||||
addTZCleanup(self)
|
||||
setTZ("UTC+00")
|
||||
|
||||
t = mktime((2013, 9, 24, 11, 40, 47, 1, 267, 1))
|
||||
event = dict(log_format=u"XYZZY", log_time=t)
|
||||
self.assertEqual(
|
||||
formatEventAsClassicLogText(event),
|
||||
u"2013-09-24T11:40:47+0000 [-\x23-] XYZZY\n",
|
||||
)
|
||||
|
||||
|
||||
def test_formatTimeCustom(self):
|
||||
"""
|
||||
Time is first field. Custom formatting function is an optional
|
||||
argument.
|
||||
"""
|
||||
formatTime = lambda t: u"__{0}__".format(t)
|
||||
event = dict(log_format=u"XYZZY", log_time=12345)
|
||||
self.assertEqual(
|
||||
formatEventAsClassicLogText(event, formatTime=formatTime),
|
||||
u"__12345__ [-\x23-] XYZZY\n",
|
||||
)
|
||||
|
||||
|
||||
def test_formatNamespace(self):
|
||||
"""
|
||||
Namespace is first part of second field.
|
||||
"""
|
||||
event = dict(log_format=u"XYZZY", log_namespace="my.namespace")
|
||||
self.assertEqual(
|
||||
formatEventAsClassicLogText(event),
|
||||
u"- [my.namespace\x23-] XYZZY\n",
|
||||
)
|
||||
|
||||
|
||||
def test_formatLevel(self):
|
||||
"""
|
||||
Level is second part of second field.
|
||||
"""
|
||||
event = dict(log_format=u"XYZZY", log_level=LogLevel.warn)
|
||||
self.assertEqual(
|
||||
formatEventAsClassicLogText(event),
|
||||
u"- [-\x23warn] XYZZY\n",
|
||||
)
|
||||
|
||||
|
||||
def test_formatSystem(self):
|
||||
"""
|
||||
System is second field.
|
||||
"""
|
||||
event = dict(log_format=u"XYZZY", log_system=u"S.Y.S.T.E.M.")
|
||||
self.assertEqual(
|
||||
formatEventAsClassicLogText(event),
|
||||
u"- [S.Y.S.T.E.M.] XYZZY\n",
|
||||
)
|
||||
|
||||
|
||||
def test_formatSystemRulz(self):
|
||||
"""
|
||||
System is not supplanted by namespace and level.
|
||||
"""
|
||||
event = dict(
|
||||
log_format=u"XYZZY",
|
||||
log_namespace="my.namespace",
|
||||
log_level=LogLevel.warn,
|
||||
log_system=u"S.Y.S.T.E.M.",
|
||||
)
|
||||
self.assertEqual(
|
||||
formatEventAsClassicLogText(event),
|
||||
u"- [S.Y.S.T.E.M.] XYZZY\n",
|
||||
)
|
||||
|
||||
|
||||
def test_formatSystemUnformattable(self):
|
||||
"""
|
||||
System is not supplanted by namespace and level.
|
||||
"""
|
||||
event = dict(log_format=u"XYZZY", log_system=Unformattable())
|
||||
self.assertEqual(
|
||||
formatEventAsClassicLogText(event),
|
||||
u"- [UNFORMATTABLE] XYZZY\n",
|
||||
)
|
||||
|
||||
|
||||
def test_formatFormat(self):
|
||||
"""
|
||||
Formatted event is last field.
|
||||
"""
|
||||
event = dict(log_format=u"id:{id}", id="123")
|
||||
self.assertEqual(
|
||||
formatEventAsClassicLogText(event),
|
||||
u"- [-\x23-] id:123\n",
|
||||
)
|
||||
|
||||
|
||||
def test_formatNoFormat(self):
|
||||
"""
|
||||
No format string.
|
||||
"""
|
||||
event = dict(id="123")
|
||||
self.assertIs(
|
||||
formatEventAsClassicLogText(event),
|
||||
None
|
||||
)
|
||||
|
||||
|
||||
def test_formatEmptyFormat(self):
|
||||
"""
|
||||
Empty format string.
|
||||
"""
|
||||
event = dict(log_format="", id="123")
|
||||
self.assertIs(
|
||||
formatEventAsClassicLogText(event),
|
||||
None
|
||||
)
|
||||
|
||||
|
||||
def test_formatFormatMultiLine(self):
|
||||
"""
|
||||
If the formatted event has newlines, indent additional lines.
|
||||
"""
|
||||
event = dict(log_format=u'XYZZY\nA hollow voice says:\n"Plugh"')
|
||||
self.assertEqual(
|
||||
formatEventAsClassicLogText(event),
|
||||
u'- [-\x23-] XYZZY\n\tA hollow voice says:\n\t"Plugh"\n',
|
||||
)
|
||||
|
||||
|
||||
|
||||
class FormatFieldTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for format field functions.
|
||||
"""
|
||||
|
||||
def test_formatWithCall(self):
|
||||
"""
|
||||
L{formatWithCall} is an extended version of L{unicode.format} that
|
||||
will interpret a set of parentheses "C{()}" at the end of a format key
|
||||
to mean that the format key ought to be I{called} rather than
|
||||
stringified.
|
||||
"""
|
||||
self.assertEqual(
|
||||
formatWithCall(
|
||||
u"Hello, {world}. {callme()}.",
|
||||
dict(world="earth", callme=lambda: "maybe")
|
||||
),
|
||||
"Hello, earth. maybe."
|
||||
)
|
||||
self.assertEqual(
|
||||
formatWithCall(
|
||||
u"Hello, {repr()!r}.",
|
||||
dict(repr=lambda: "repr")
|
||||
),
|
||||
"Hello, 'repr'."
|
||||
)
|
||||
|
||||
|
||||
|
||||
class Unformattable(object):
|
||||
"""
|
||||
An object that raises an exception from C{__repr__}.
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
return str(1 / 0)
|
||||
|
||||
|
||||
|
||||
class CapturedError(Exception):
|
||||
"""
|
||||
A captured error for use in format tests.
|
||||
"""
|
||||
|
||||
|
||||
|
||||
class EventAsTextTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{eventAsText}, all of which ensure that the
|
||||
returned type is UTF-8 decoded text.
|
||||
"""
|
||||
|
||||
def test_eventWithTraceback(self):
|
||||
"""
|
||||
An event with a C{log_failure} key will have a traceback appended.
|
||||
"""
|
||||
try:
|
||||
raise CapturedError("This is a fake error")
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
|
||||
event = {
|
||||
"log_format": u"This is a test log message"
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=True,
|
||||
includeSystem=False
|
||||
)
|
||||
self.assertIn(unicode(f.getTraceback()), eventText)
|
||||
self.assertIn(u'This is a test log message', eventText)
|
||||
|
||||
|
||||
def test_formatEmptyEventWithTraceback(self):
|
||||
"""
|
||||
An event with an empty C{log_format} key appends a traceback from
|
||||
the accompanying failure.
|
||||
"""
|
||||
try:
|
||||
raise CapturedError("This is a fake error")
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
event = {
|
||||
"log_format": u""
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=True,
|
||||
includeSystem=False
|
||||
)
|
||||
self.assertIn(unicode(f.getTraceback()), eventText)
|
||||
self.assertIn(u'This is a fake error', eventText)
|
||||
|
||||
|
||||
def test_formatUnformattableWithTraceback(self):
|
||||
"""
|
||||
An event with an unformattable value in the C{log_format} key still
|
||||
has a traceback appended.
|
||||
"""
|
||||
try:
|
||||
raise CapturedError("This is a fake error")
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
|
||||
event = {
|
||||
"log_format": "{evil()}",
|
||||
"evil": lambda: 1 / 0,
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=True,
|
||||
includeSystem=False
|
||||
)
|
||||
self.assertIsInstance(eventText, unicode)
|
||||
self.assertIn(unicode(f.getTraceback()), eventText)
|
||||
self.assertIn(u'This is a fake error', eventText)
|
||||
|
||||
|
||||
def test_formatUnformattableErrorWithTraceback(self):
|
||||
"""
|
||||
An event with an unformattable value in the C{log_format} key, that
|
||||
throws an exception when __repr__ is invoked still has a traceback
|
||||
appended.
|
||||
"""
|
||||
try:
|
||||
raise CapturedError("This is a fake error")
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
|
||||
event = {
|
||||
"log_format": "{evil()}",
|
||||
"evil": lambda: 1 / 0,
|
||||
Unformattable(): "gurk",
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=True,
|
||||
includeSystem=False
|
||||
)
|
||||
self.assertIsInstance(eventText, unicode)
|
||||
self.assertIn(u'MESSAGE LOST', eventText)
|
||||
self.assertIn(unicode(f.getTraceback()), eventText)
|
||||
self.assertIn(u'This is a fake error', eventText)
|
||||
|
||||
|
||||
def test_formatEventUnformattableTraceback(self):
|
||||
"""
|
||||
If a traceback cannot be appended, a message indicating this is true
|
||||
is appended.
|
||||
"""
|
||||
event = {
|
||||
"log_format": u""
|
||||
}
|
||||
event["log_failure"] = object()
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=True,
|
||||
includeSystem=False
|
||||
)
|
||||
self.assertIsInstance(eventText, unicode)
|
||||
self.assertIn(u"(UNABLE TO OBTAIN TRACEBACK FROM EVENT)", eventText)
|
||||
|
||||
|
||||
def test_formatEventNonCritical(self):
|
||||
"""
|
||||
An event with no C{log_failure} key will not have a traceback appended.
|
||||
"""
|
||||
event = {
|
||||
"log_format": u"This is a test log message"
|
||||
}
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=True,
|
||||
includeSystem=False
|
||||
)
|
||||
self.assertIsInstance(eventText, unicode)
|
||||
self.assertIn(u'This is a test log message', eventText)
|
||||
|
||||
|
||||
def test_formatTracebackMultibyte(self):
|
||||
"""
|
||||
An exception message with multibyte characters is properly handled.
|
||||
"""
|
||||
try:
|
||||
raise CapturedError('€')
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
|
||||
event = {
|
||||
"log_format": u"This is a test log message"
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=True,
|
||||
includeSystem=False
|
||||
)
|
||||
self.assertIn(u'€', eventText)
|
||||
self.assertIn(u'Traceback', eventText)
|
||||
|
||||
|
||||
def test_formatTracebackHandlesUTF8DecodeFailure(self):
|
||||
"""
|
||||
An error raised attempting to decode the UTF still produces a
|
||||
valid log message.
|
||||
"""
|
||||
try:
|
||||
# 'test' in utf-16
|
||||
raise CapturedError(b'\xff\xfet\x00e\x00s\x00t\x00')
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
|
||||
event = {
|
||||
"log_format": u"This is a test log message"
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=True,
|
||||
includeSystem=False
|
||||
)
|
||||
self.assertIn(u'Traceback', eventText)
|
||||
if not _PY3:
|
||||
self.assertIn(u'\ufffd\ufffdt\x00e\x00s\x00t\x00', eventText)
|
||||
else:
|
||||
self.assertIn(
|
||||
r"CapturedError(b'\xff\xfet\x00e\x00s\x00t\x00')",
|
||||
eventText
|
||||
)
|
||||
|
||||
|
||||
def test_eventAsTextSystemOnly(self):
|
||||
"""
|
||||
If includeSystem is specified as the only option no timestamp or
|
||||
traceback are printed.
|
||||
"""
|
||||
try:
|
||||
raise CapturedError("This is a fake error")
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
|
||||
t = mktime((2013, 9, 24, 11, 40, 47, 1, 267, 1))
|
||||
event = {
|
||||
"log_format": u"ABCD",
|
||||
"log_system": u"fake_system",
|
||||
"log_time": t,
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=False,
|
||||
includeTraceback=False,
|
||||
includeSystem=True,
|
||||
)
|
||||
self.assertEqual(
|
||||
eventText,
|
||||
u"[fake_system] ABCD",
|
||||
)
|
||||
|
||||
|
||||
def test_eventAsTextTimestampOnly(self):
|
||||
"""
|
||||
If includeTimestamp is specified as the only option no system or
|
||||
traceback are printed.
|
||||
"""
|
||||
if tzset is None:
|
||||
raise SkipTest(
|
||||
"Platform cannot change timezone; unable to verify offsets."
|
||||
)
|
||||
|
||||
addTZCleanup(self)
|
||||
setTZ("UTC+00")
|
||||
|
||||
try:
|
||||
raise CapturedError("This is a fake error")
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
|
||||
t = mktime((2013, 9, 24, 11, 40, 47, 1, 267, 1))
|
||||
event = {
|
||||
"log_format": u"ABCD",
|
||||
"log_system": u"fake_system",
|
||||
"log_time": t,
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=True,
|
||||
includeTraceback=False,
|
||||
includeSystem=False,
|
||||
)
|
||||
self.assertEqual(
|
||||
eventText,
|
||||
u"2013-09-24T11:40:47+0000 ABCD",
|
||||
)
|
||||
|
||||
|
||||
def test_eventAsTextSystemMissing(self):
|
||||
"""
|
||||
If includeSystem is specified with a missing system [-#-]
|
||||
is used.
|
||||
"""
|
||||
try:
|
||||
raise CapturedError("This is a fake error")
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
|
||||
t = mktime((2013, 9, 24, 11, 40, 47, 1, 267, 1))
|
||||
event = {
|
||||
"log_format": u"ABCD",
|
||||
"log_time": t,
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=False,
|
||||
includeTraceback=False,
|
||||
includeSystem=True,
|
||||
)
|
||||
self.assertEqual(
|
||||
eventText,
|
||||
u"[-\x23-] ABCD",
|
||||
)
|
||||
|
||||
|
||||
def test_eventAsTextSystemMissingNamespaceAndLevel(self):
|
||||
"""
|
||||
If includeSystem is specified with a missing system but
|
||||
namespace and level are present they are used.
|
||||
"""
|
||||
try:
|
||||
raise CapturedError("This is a fake error")
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
|
||||
t = mktime((2013, 9, 24, 11, 40, 47, 1, 267, 1))
|
||||
event = {
|
||||
"log_format": u"ABCD",
|
||||
"log_time": t,
|
||||
"log_level": LogLevel.info,
|
||||
"log_namespace": u'test',
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=False,
|
||||
includeTraceback=False,
|
||||
includeSystem=True,
|
||||
)
|
||||
self.assertEqual(
|
||||
eventText,
|
||||
u"[test\x23info] ABCD",
|
||||
)
|
||||
|
||||
|
||||
def test_eventAsTextSystemMissingLevelOnly(self):
|
||||
"""
|
||||
If includeSystem is specified with a missing system but
|
||||
level is present, level is included.
|
||||
"""
|
||||
try:
|
||||
raise CapturedError("This is a fake error")
|
||||
except CapturedError:
|
||||
f = Failure()
|
||||
|
||||
t = mktime((2013, 9, 24, 11, 40, 47, 1, 267, 1))
|
||||
event = {
|
||||
"log_format": u"ABCD",
|
||||
"log_time": t,
|
||||
"log_level": LogLevel.info,
|
||||
}
|
||||
event["log_failure"] = f
|
||||
eventText = eventAsText(
|
||||
event,
|
||||
includeTimestamp=False,
|
||||
includeTraceback=False,
|
||||
includeSystem=True,
|
||||
)
|
||||
self.assertEqual(
|
||||
eventText,
|
||||
u"[-\x23info] ABCD",
|
||||
)
|
||||
|
|
@ -0,0 +1,372 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._global}.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import io
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from .._file import textFileLogObserver
|
||||
from .._observer import LogPublisher
|
||||
from .._logger import Logger
|
||||
from .._global import LogBeginner
|
||||
from .._global import MORE_THAN_ONCE_WARNING
|
||||
from .._levels import LogLevel
|
||||
from ..test.test_stdlib import nextLine
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
|
||||
|
||||
def compareEvents(test, actualEvents, expectedEvents):
|
||||
"""
|
||||
Compare two sequences of log events, examining only the the keys which are
|
||||
present in both.
|
||||
|
||||
@param test: a test case doing the comparison
|
||||
@type test: L{unittest.TestCase}
|
||||
|
||||
@param actualEvents: A list of log events that were emitted by a logger.
|
||||
@type actualEvents: L{list} of L{dict}
|
||||
|
||||
@param expectedEvents: A list of log events that were expected by a test.
|
||||
@type expected: L{list} of L{dict}
|
||||
"""
|
||||
if len(actualEvents) != len(expectedEvents):
|
||||
test.assertEqual(actualEvents, expectedEvents)
|
||||
allMergedKeys = set()
|
||||
|
||||
for event in expectedEvents:
|
||||
allMergedKeys |= set(event.keys())
|
||||
|
||||
def simplify(event):
|
||||
copy = event.copy()
|
||||
for key in event.keys():
|
||||
if key not in allMergedKeys:
|
||||
copy.pop(key)
|
||||
return copy
|
||||
|
||||
simplifiedActual = [simplify(event) for event in actualEvents]
|
||||
test.assertEqual(simplifiedActual, expectedEvents)
|
||||
|
||||
|
||||
|
||||
class LogBeginnerTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{LogBeginner}.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.publisher = LogPublisher()
|
||||
self.errorStream = io.StringIO()
|
||||
|
||||
class NotSys(object):
|
||||
stdout = object()
|
||||
stderr = object()
|
||||
|
||||
class NotWarnings(object):
|
||||
def __init__(self):
|
||||
self.warnings = []
|
||||
|
||||
def showwarning(
|
||||
self, message, category, filename, lineno,
|
||||
file=None, line=None
|
||||
):
|
||||
"""
|
||||
Emulate warnings.showwarning.
|
||||
|
||||
@param message: A warning message to emit.
|
||||
@type message: L{str}
|
||||
|
||||
@param category: A warning category to associate with
|
||||
C{message}.
|
||||
@type category: L{warnings.Warning}
|
||||
|
||||
@param filename: A file name for the source code file issuing
|
||||
the warning.
|
||||
@type warning: L{str}
|
||||
|
||||
@param lineno: A line number in the source file where the
|
||||
warning was issued.
|
||||
@type lineno: L{int}
|
||||
|
||||
@param file: A file to write the warning message to. If
|
||||
L{None}, write to L{sys.stderr}.
|
||||
@type file: file-like object
|
||||
|
||||
@param line: A line of source code to include with the warning
|
||||
message. If L{None}, attempt to read the line from
|
||||
C{filename} and C{lineno}.
|
||||
@type line: L{str}
|
||||
"""
|
||||
self.warnings.append(
|
||||
(message, category, filename, lineno, file, line)
|
||||
)
|
||||
|
||||
self.sysModule = NotSys()
|
||||
self.warningsModule = NotWarnings()
|
||||
self.beginner = LogBeginner(
|
||||
self.publisher, self.errorStream, self.sysModule,
|
||||
self.warningsModule
|
||||
)
|
||||
|
||||
|
||||
def test_beginLoggingToAddObservers(self):
|
||||
"""
|
||||
Test that C{beginLoggingTo()} adds observers.
|
||||
"""
|
||||
event = dict(foo=1, bar=2)
|
||||
|
||||
events1 = []
|
||||
events2 = []
|
||||
|
||||
o1 = lambda e: events1.append(e)
|
||||
o2 = lambda e: events2.append(e)
|
||||
|
||||
self.beginner.beginLoggingTo((o1, o2))
|
||||
self.publisher(event)
|
||||
|
||||
self.assertEqual([event], events1)
|
||||
self.assertEqual([event], events2)
|
||||
|
||||
|
||||
def test_beginLoggingToBufferedEvents(self):
|
||||
"""
|
||||
Test that events are buffered until C{beginLoggingTo()} is
|
||||
called.
|
||||
"""
|
||||
event = dict(foo=1, bar=2)
|
||||
|
||||
events1 = []
|
||||
events2 = []
|
||||
|
||||
o1 = lambda e: events1.append(e)
|
||||
o2 = lambda e: events2.append(e)
|
||||
|
||||
self.publisher(event) # Before beginLoggingTo; this is buffered
|
||||
self.beginner.beginLoggingTo((o1, o2))
|
||||
|
||||
self.assertEqual([event], events1)
|
||||
self.assertEqual([event], events2)
|
||||
|
||||
|
||||
def _bufferLimitTest(self, limit, beginner):
|
||||
"""
|
||||
Verify that when more than C{limit} events are logged to L{LogBeginner},
|
||||
only the last C{limit} are replayed by L{LogBeginner.beginLoggingTo}.
|
||||
|
||||
@param limit: The maximum number of events the log beginner should
|
||||
buffer.
|
||||
@type limit: L{int}
|
||||
|
||||
@param beginner: The L{LogBeginner} against which to verify.
|
||||
@type beginner: L{LogBeginner}
|
||||
|
||||
@raise: C{self.failureException} if the wrong events are replayed by
|
||||
C{beginner}.
|
||||
|
||||
@return: L{None}
|
||||
"""
|
||||
for count in range(limit + 1):
|
||||
self.publisher(dict(count=count))
|
||||
events = []
|
||||
beginner.beginLoggingTo([events.append])
|
||||
self.assertEqual(
|
||||
list(range(1, limit + 1)),
|
||||
list(event["count"] for event in events),
|
||||
)
|
||||
|
||||
|
||||
def test_defaultBufferLimit(self):
|
||||
"""
|
||||
Up to C{LogBeginner._DEFAULT_BUFFER_SIZE} log events are buffered for
|
||||
replay by L{LogBeginner.beginLoggingTo}.
|
||||
"""
|
||||
limit = LogBeginner._DEFAULT_BUFFER_SIZE
|
||||
self._bufferLimitTest(limit, self.beginner)
|
||||
|
||||
|
||||
def test_overrideBufferLimit(self):
|
||||
"""
|
||||
The size of the L{LogBeginner} event buffer can be overridden with the
|
||||
C{initialBufferSize} initilizer argument.
|
||||
"""
|
||||
limit = 3
|
||||
beginner = LogBeginner(
|
||||
self.publisher, self.errorStream, self.sysModule,
|
||||
self.warningsModule, initialBufferSize=limit,
|
||||
)
|
||||
self._bufferLimitTest(limit, beginner)
|
||||
|
||||
|
||||
def test_beginLoggingToTwice(self):
|
||||
"""
|
||||
When invoked twice, L{LogBeginner.beginLoggingTo} will emit a log
|
||||
message warning the user that they previously began logging, and add
|
||||
the new log observers.
|
||||
"""
|
||||
events1 = []
|
||||
events2 = []
|
||||
fileHandle = io.StringIO()
|
||||
textObserver = textFileLogObserver(fileHandle)
|
||||
self.publisher(dict(event="prebuffer"))
|
||||
firstFilename, firstLine = nextLine()
|
||||
self.beginner.beginLoggingTo([events1.append, textObserver])
|
||||
self.publisher(dict(event="postbuffer"))
|
||||
secondFilename, secondLine = nextLine()
|
||||
self.beginner.beginLoggingTo([events2.append, textObserver])
|
||||
self.publisher(dict(event="postwarn"))
|
||||
warning = dict(
|
||||
log_format=MORE_THAN_ONCE_WARNING,
|
||||
log_level=LogLevel.warn,
|
||||
fileNow=secondFilename, lineNow=secondLine,
|
||||
fileThen=firstFilename, lineThen=firstLine
|
||||
)
|
||||
|
||||
compareEvents(
|
||||
self, events1,
|
||||
[
|
||||
dict(event="prebuffer"),
|
||||
dict(event="postbuffer"),
|
||||
warning,
|
||||
dict(event="postwarn")
|
||||
]
|
||||
)
|
||||
compareEvents(self, events2, [warning, dict(event="postwarn")])
|
||||
|
||||
output = fileHandle.getvalue()
|
||||
self.assertIn('<{0}:{1}>'.format(firstFilename, firstLine),
|
||||
output)
|
||||
self.assertIn('<{0}:{1}>'.format(secondFilename, secondLine),
|
||||
output)
|
||||
|
||||
|
||||
def test_criticalLogging(self):
|
||||
"""
|
||||
Critical messages will be written as text to the error stream.
|
||||
"""
|
||||
log = Logger(observer=self.publisher)
|
||||
log.info("ignore this")
|
||||
log.critical("a critical {message}", message="message")
|
||||
self.assertEqual(self.errorStream.getvalue(), u"a critical message\n")
|
||||
|
||||
|
||||
def test_criticalLoggingStops(self):
|
||||
"""
|
||||
Once logging has begun with C{beginLoggingTo}, critical messages are no
|
||||
longer written to the output stream.
|
||||
"""
|
||||
log = Logger(observer=self.publisher)
|
||||
self.beginner.beginLoggingTo(())
|
||||
log.critical("another critical message")
|
||||
self.assertEqual(self.errorStream.getvalue(), u"")
|
||||
|
||||
|
||||
def test_beginLoggingToRedirectStandardIO(self):
|
||||
"""
|
||||
L{LogBeginner.beginLoggingTo} will re-direct the standard output and
|
||||
error streams by setting the C{stdio} and C{stderr} attributes on its
|
||||
sys module object.
|
||||
"""
|
||||
x = []
|
||||
self.beginner.beginLoggingTo([x.append])
|
||||
print("Hello, world.", file=self.sysModule.stdout)
|
||||
compareEvents(
|
||||
self, x, [dict(log_namespace="stdout", log_io="Hello, world.")]
|
||||
)
|
||||
del x[:]
|
||||
print("Error, world.", file=self.sysModule.stderr)
|
||||
compareEvents(
|
||||
self, x, [dict(log_namespace="stderr", log_io="Error, world.")]
|
||||
)
|
||||
|
||||
|
||||
def test_beginLoggingToDontRedirect(self):
|
||||
"""
|
||||
L{LogBeginner.beginLoggingTo} will leave the existing stdout/stderr in
|
||||
place if it has been told not to replace them.
|
||||
"""
|
||||
oldOut = self.sysModule.stdout
|
||||
oldErr = self.sysModule.stderr
|
||||
self.beginner.beginLoggingTo((), redirectStandardIO=False)
|
||||
self.assertIs(self.sysModule.stdout, oldOut)
|
||||
self.assertIs(self.sysModule.stderr, oldErr)
|
||||
|
||||
|
||||
def test_beginLoggingToPreservesEncoding(self):
|
||||
"""
|
||||
When L{LogBeginner.beginLoggingTo} redirects stdout/stderr streams, the
|
||||
replacement streams will preserve the encoding of the replaced streams,
|
||||
to minimally disrupt any application relying on a specific encoding.
|
||||
"""
|
||||
|
||||
weird = io.TextIOWrapper(io.BytesIO(), "shift-JIS")
|
||||
weirderr = io.TextIOWrapper(io.BytesIO(), "big5")
|
||||
|
||||
self.sysModule.stdout = weird
|
||||
self.sysModule.stderr = weirderr
|
||||
|
||||
x = []
|
||||
self.beginner.beginLoggingTo([x.append])
|
||||
self.assertEqual(self.sysModule.stdout.encoding, "shift-JIS")
|
||||
self.assertEqual(self.sysModule.stderr.encoding, "big5")
|
||||
|
||||
self.sysModule.stdout.write(b"\x97\x9B\n")
|
||||
self.sysModule.stderr.write(b"\xBC\xFC\n")
|
||||
compareEvents(
|
||||
self, x, [dict(log_io=u"\u674e"), dict(log_io=u"\u7469")]
|
||||
)
|
||||
|
||||
|
||||
def test_warningsModule(self):
|
||||
"""
|
||||
L{LogBeginner.beginLoggingTo} will redirect the warnings of its
|
||||
warnings module into the logging system.
|
||||
"""
|
||||
self.warningsModule.showwarning(
|
||||
"a message", DeprecationWarning, __file__, 1
|
||||
)
|
||||
x = []
|
||||
self.beginner.beginLoggingTo([x.append])
|
||||
self.warningsModule.showwarning(
|
||||
"another message", DeprecationWarning, __file__, 2
|
||||
)
|
||||
f = io.StringIO()
|
||||
self.warningsModule.showwarning(
|
||||
"yet another", DeprecationWarning, __file__, 3, file=f
|
||||
)
|
||||
self.assertEqual(
|
||||
self.warningsModule.warnings,
|
||||
[
|
||||
("a message", DeprecationWarning, __file__, 1, None, None),
|
||||
("yet another", DeprecationWarning, __file__, 3, f, None),
|
||||
]
|
||||
)
|
||||
compareEvents(
|
||||
self, x,
|
||||
[dict(
|
||||
warning="another message",
|
||||
category=(
|
||||
DeprecationWarning.__module__ + "." +
|
||||
DeprecationWarning.__name__
|
||||
),
|
||||
filename=__file__, lineno=2,
|
||||
)]
|
||||
)
|
||||
|
||||
|
||||
def test_failuresAppendTracebacks(self):
|
||||
"""
|
||||
The string resulting from a logged failure contains a traceback.
|
||||
"""
|
||||
f = Failure(Exception("this is not the behavior you are looking for"))
|
||||
log = Logger(observer=self.publisher)
|
||||
log.failure('a failure', failure=f)
|
||||
msg = self.errorStream.getvalue()
|
||||
self.assertIn('a failure', msg)
|
||||
self.assertIn('this is not the behavior you are looking for', msg)
|
||||
self.assertIn('Traceback', msg)
|
||||
274
venv/lib/python3.9/site-packages/twisted/logger/test/test_io.py
Normal file
274
venv/lib/python3.9/site-packages/twisted/logger/test/test_io.py
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._io}.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from .._levels import LogLevel
|
||||
from .._logger import Logger
|
||||
from .._observer import LogPublisher
|
||||
from .._io import LoggingFile
|
||||
|
||||
|
||||
|
||||
class LoggingFileTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{LoggingFile}.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""
|
||||
Create a logger for test L{LoggingFile} instances to use.
|
||||
"""
|
||||
self.publisher = LogPublisher()
|
||||
self.logger = Logger(observer=self.publisher)
|
||||
|
||||
|
||||
def test_softspace(self):
|
||||
"""
|
||||
L{LoggingFile.softspace} is 0.
|
||||
"""
|
||||
self.assertEqual(LoggingFile.softspace, 0)
|
||||
|
||||
|
||||
def test_readOnlyAttributes(self):
|
||||
"""
|
||||
Some L{LoggingFile} attributes are read-only.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
|
||||
self.assertRaises(AttributeError, setattr, f, "closed", True)
|
||||
self.assertRaises(AttributeError, setattr, f, "encoding", "utf-8")
|
||||
self.assertRaises(AttributeError, setattr, f, "mode", "r")
|
||||
self.assertRaises(AttributeError, setattr, f, "newlines", ["\n"])
|
||||
self.assertRaises(AttributeError, setattr, f, "name", "foo")
|
||||
|
||||
|
||||
def test_unsupportedMethods(self):
|
||||
"""
|
||||
Some L{LoggingFile} methods are unsupported.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
|
||||
self.assertRaises(IOError, f.read)
|
||||
self.assertRaises(IOError, f.next)
|
||||
self.assertRaises(IOError, f.readline)
|
||||
self.assertRaises(IOError, f.readlines)
|
||||
self.assertRaises(IOError, f.xreadlines)
|
||||
self.assertRaises(IOError, f.seek)
|
||||
self.assertRaises(IOError, f.tell)
|
||||
self.assertRaises(IOError, f.truncate)
|
||||
|
||||
|
||||
def test_level(self):
|
||||
"""
|
||||
Default level is L{LogLevel.info} if not set.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
self.assertEqual(f.level, LogLevel.info)
|
||||
|
||||
f = LoggingFile(self.logger, level=LogLevel.error)
|
||||
self.assertEqual(f.level, LogLevel.error)
|
||||
|
||||
|
||||
def test_encoding(self):
|
||||
"""
|
||||
Default encoding is C{sys.getdefaultencoding()} if not set.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
self.assertEqual(f.encoding, sys.getdefaultencoding())
|
||||
|
||||
f = LoggingFile(self.logger, encoding="utf-8")
|
||||
self.assertEqual(f.encoding, "utf-8")
|
||||
|
||||
|
||||
def test_mode(self):
|
||||
"""
|
||||
Reported mode is C{"w"}.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
self.assertEqual(f.mode, "w")
|
||||
|
||||
|
||||
def test_newlines(self):
|
||||
"""
|
||||
The C{newlines} attribute is L{None}.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
self.assertIsNone(f.newlines)
|
||||
|
||||
|
||||
def test_name(self):
|
||||
"""
|
||||
The C{name} attribute is fixed.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
self.assertEqual(
|
||||
f.name,
|
||||
"<LoggingFile twisted.logger.test.test_io#info>"
|
||||
)
|
||||
|
||||
|
||||
def test_close(self):
|
||||
"""
|
||||
L{LoggingFile.close} closes the file.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
f.close()
|
||||
|
||||
self.assertTrue(f.closed)
|
||||
self.assertRaises(ValueError, f.write, "Hello")
|
||||
|
||||
|
||||
def test_flush(self):
|
||||
"""
|
||||
L{LoggingFile.flush} does nothing.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
f.flush()
|
||||
|
||||
|
||||
def test_fileno(self):
|
||||
"""
|
||||
L{LoggingFile.fileno} returns C{-1}.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
self.assertEqual(f.fileno(), -1)
|
||||
|
||||
|
||||
def test_isatty(self):
|
||||
"""
|
||||
L{LoggingFile.isatty} returns C{False}.
|
||||
"""
|
||||
f = LoggingFile(self.logger)
|
||||
self.assertFalse(f.isatty())
|
||||
|
||||
|
||||
def test_writeBuffering(self):
|
||||
"""
|
||||
Writing buffers correctly.
|
||||
"""
|
||||
f = self.observedFile()
|
||||
f.write("Hello")
|
||||
self.assertEqual(f.messages, [])
|
||||
f.write(", world!\n")
|
||||
self.assertEqual(f.messages, [u"Hello, world!"])
|
||||
f.write("It's nice to meet you.\n\nIndeed.")
|
||||
self.assertEqual(
|
||||
f.messages,
|
||||
[
|
||||
u"Hello, world!",
|
||||
u"It's nice to meet you.",
|
||||
u"",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_writeBytesDecoded(self):
|
||||
"""
|
||||
Bytes are decoded to unicode.
|
||||
"""
|
||||
f = self.observedFile(encoding="utf-8")
|
||||
f.write(b"Hello, Mr. S\xc3\xa1nchez\n")
|
||||
self.assertEqual(f.messages, [u"Hello, Mr. S\xe1nchez"])
|
||||
|
||||
|
||||
def test_writeUnicode(self):
|
||||
"""
|
||||
Unicode is unmodified.
|
||||
"""
|
||||
f = self.observedFile(encoding="utf-8")
|
||||
f.write(u"Hello, Mr. S\xe1nchez\n")
|
||||
self.assertEqual(f.messages, [u"Hello, Mr. S\xe1nchez"])
|
||||
|
||||
|
||||
def test_writeLevel(self):
|
||||
"""
|
||||
Log level is emitted properly.
|
||||
"""
|
||||
f = self.observedFile()
|
||||
f.write("Hello\n")
|
||||
self.assertEqual(len(f.events), 1)
|
||||
self.assertEqual(f.events[0]["log_level"], LogLevel.info)
|
||||
|
||||
f = self.observedFile(level=LogLevel.error)
|
||||
f.write("Hello\n")
|
||||
self.assertEqual(len(f.events), 1)
|
||||
self.assertEqual(f.events[0]["log_level"], LogLevel.error)
|
||||
|
||||
|
||||
def test_writeFormat(self):
|
||||
"""
|
||||
Log format is C{u"{message}"}.
|
||||
"""
|
||||
f = self.observedFile()
|
||||
f.write("Hello\n")
|
||||
self.assertEqual(len(f.events), 1)
|
||||
self.assertEqual(f.events[0]["log_format"], u"{log_io}")
|
||||
|
||||
|
||||
def test_writelinesBuffering(self):
|
||||
"""
|
||||
C{writelines} does not add newlines.
|
||||
"""
|
||||
# Note this is different behavior than t.p.log.StdioOnnaStick.
|
||||
f = self.observedFile()
|
||||
f.writelines(("Hello", ", ", ""))
|
||||
self.assertEqual(f.messages, [])
|
||||
f.writelines(("world!\n",))
|
||||
self.assertEqual(f.messages, [u"Hello, world!"])
|
||||
f.writelines(("It's nice to meet you.\n\n", "Indeed."))
|
||||
self.assertEqual(
|
||||
f.messages,
|
||||
[
|
||||
u"Hello, world!",
|
||||
u"It's nice to meet you.",
|
||||
u"",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_print(self):
|
||||
"""
|
||||
L{LoggingFile} can replace L{sys.stdout}.
|
||||
"""
|
||||
f = self.observedFile()
|
||||
self.patch(sys, "stdout", f)
|
||||
|
||||
print("Hello,", end=" ")
|
||||
print("world.")
|
||||
|
||||
self.assertEqual(f.messages, [u"Hello, world."])
|
||||
|
||||
|
||||
def observedFile(self, **kwargs):
|
||||
"""
|
||||
Construct a L{LoggingFile} with a built-in observer.
|
||||
|
||||
@param kwargs: keyword arguments for the L{LoggingFile}.
|
||||
@type kwargs: L{dict}
|
||||
|
||||
@return: a L{LoggingFile} with an observer that appends received
|
||||
events into the file's C{events} attribute (a L{list}) and
|
||||
event messages into the file's C{messages} attribute (a L{list}).
|
||||
@rtype: L{LoggingFile}
|
||||
"""
|
||||
def observer(event):
|
||||
f.events.append(event)
|
||||
if "log_io" in event:
|
||||
f.messages.append(event["log_io"])
|
||||
|
||||
log = Logger(observer=observer)
|
||||
|
||||
f = LoggingFile(logger=log, **kwargs)
|
||||
f.events = []
|
||||
f.messages = []
|
||||
|
||||
return f
|
||||
|
|
@ -0,0 +1,567 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.logger._json}.
|
||||
"""
|
||||
|
||||
from io import StringIO, BytesIO
|
||||
|
||||
from zope.interface.verify import verifyObject, BrokenMethodImplementation
|
||||
|
||||
from twisted.python.compat import unicode, _PYPY, _PY3
|
||||
|
||||
from twisted.trial.unittest import TestCase
|
||||
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
from .._observer import ILogObserver, LogPublisher
|
||||
from .._format import formatEvent
|
||||
from .._levels import LogLevel
|
||||
from .._flatten import extractField
|
||||
from .._global import globalLogPublisher
|
||||
from .._json import (
|
||||
eventAsJSON, eventFromJSON, jsonFileLogObserver, eventsFromJSONLogFile,
|
||||
log as jsonLog
|
||||
)
|
||||
from .._logger import Logger
|
||||
|
||||
|
||||
def savedJSONInvariants(testCase, savedJSON):
|
||||
"""
|
||||
Assert a few things about the result of L{eventAsJSON}, then return it.
|
||||
|
||||
@param testCase: The L{TestCase} with which to perform the assertions.
|
||||
@type testCase: L{TestCase}
|
||||
|
||||
@param savedJSON: The result of L{eventAsJSON}.
|
||||
@type savedJSON: L{unicode} (we hope)
|
||||
|
||||
@return: C{savedJSON}
|
||||
@rtype: L{unicode}
|
||||
|
||||
@raise AssertionError: If any of the preconditions fail.
|
||||
"""
|
||||
testCase.assertIsInstance(savedJSON, unicode)
|
||||
testCase.assertEqual(savedJSON.count("\n"), 0)
|
||||
return savedJSON
|
||||
|
||||
|
||||
|
||||
class SaveLoadTests(TestCase):
|
||||
"""
|
||||
Tests for loading and saving log events.
|
||||
"""
|
||||
|
||||
def savedEventJSON(self, event):
|
||||
"""
|
||||
Serialize some an events, assert some things about it, and return the
|
||||
JSON.
|
||||
|
||||
@param event: An event.
|
||||
@type event: L{dict}
|
||||
|
||||
@return: JSON.
|
||||
"""
|
||||
return savedJSONInvariants(self, eventAsJSON(event))
|
||||
|
||||
|
||||
def test_simpleSaveLoad(self):
|
||||
"""
|
||||
Saving and loading an empty dictionary results in an empty dictionary.
|
||||
"""
|
||||
self.assertEqual(eventFromJSON(self.savedEventJSON({})), {})
|
||||
|
||||
|
||||
def test_saveLoad(self):
|
||||
"""
|
||||
Saving and loading a dictionary with some simple values in it results
|
||||
in those same simple values in the output; according to JSON's rules,
|
||||
though, all dictionary keys must be L{unicode} and any non-L{unicode}
|
||||
keys will be converted.
|
||||
"""
|
||||
self.assertEqual(
|
||||
eventFromJSON(self.savedEventJSON({1: 2, u"3": u"4"})),
|
||||
{u"1": 2, u"3": u"4"}
|
||||
)
|
||||
|
||||
|
||||
def test_saveUnPersistable(self):
|
||||
"""
|
||||
Saving and loading an object which cannot be represented in JSON will
|
||||
result in a placeholder.
|
||||
"""
|
||||
self.assertEqual(
|
||||
eventFromJSON(self.savedEventJSON({u"1": 2, u"3": object()})),
|
||||
{u"1": 2, u"3": {u"unpersistable": True}}
|
||||
)
|
||||
|
||||
|
||||
def test_saveNonASCII(self):
|
||||
"""
|
||||
Non-ASCII keys and values can be saved and loaded.
|
||||
"""
|
||||
self.assertEqual(
|
||||
eventFromJSON(self.savedEventJSON(
|
||||
{u"\u1234": u"\u4321", u"3": object()}
|
||||
)),
|
||||
{u"\u1234": u"\u4321", u"3": {u"unpersistable": True}}
|
||||
)
|
||||
|
||||
|
||||
def test_saveBytes(self):
|
||||
"""
|
||||
Any L{bytes} objects will be saved as if they are latin-1 so they can
|
||||
be faithfully re-loaded.
|
||||
"""
|
||||
def asbytes(x):
|
||||
if bytes is str:
|
||||
return b"".join(map(chr, x))
|
||||
else:
|
||||
return bytes(x)
|
||||
|
||||
inputEvent = {"hello": asbytes(range(255))}
|
||||
if bytes is not str:
|
||||
# On Python 3, bytes keys will be skipped by the JSON encoder. Not
|
||||
# much we can do about that. Let's make sure that we don't get an
|
||||
# error, though.
|
||||
inputEvent.update({b"skipped": "okay"})
|
||||
self.assertEqual(
|
||||
eventFromJSON(self.savedEventJSON(inputEvent)),
|
||||
{u"hello": asbytes(range(255)).decode("charmap")}
|
||||
)
|
||||
|
||||
if _PYPY and _PY3:
|
||||
test_saveBytes.skip = "https://bitbucket.org/pypy/pypy/issues/3052/"
|
||||
|
||||
|
||||
def test_saveUnPersistableThenFormat(self):
|
||||
"""
|
||||
Saving and loading an object which cannot be represented in JSON, but
|
||||
has a string representation which I{can} be saved as JSON, will result
|
||||
in the same string formatting; any extractable fields will retain their
|
||||
data types.
|
||||
"""
|
||||
class Reprable(object):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __repr__(self):
|
||||
return("reprable")
|
||||
|
||||
inputEvent = {
|
||||
"log_format": "{object} {object.value}",
|
||||
"object": Reprable(7)
|
||||
}
|
||||
outputEvent = eventFromJSON(self.savedEventJSON(inputEvent))
|
||||
self.assertEqual(formatEvent(outputEvent), "reprable 7")
|
||||
|
||||
|
||||
def test_extractingFieldsPostLoad(self):
|
||||
"""
|
||||
L{extractField} can extract fields from an object that's been saved and
|
||||
loaded from JSON.
|
||||
"""
|
||||
class Obj(object):
|
||||
def __init__(self):
|
||||
self.value = 345
|
||||
|
||||
inputEvent = dict(log_format="{object.value}", object=Obj())
|
||||
loadedEvent = eventFromJSON(self.savedEventJSON(inputEvent))
|
||||
self.assertEqual(extractField("object.value", loadedEvent), 345)
|
||||
|
||||
# The behavior of extractField is consistent between pre-persistence
|
||||
# and post-persistence events, although looking up the key directly
|
||||
# won't be:
|
||||
self.assertRaises(KeyError, extractField, "object", loadedEvent)
|
||||
self.assertRaises(KeyError, extractField, "object", inputEvent)
|
||||
|
||||
|
||||
def test_failureStructurePreserved(self):
|
||||
"""
|
||||
Round-tripping a failure through L{eventAsJSON} preserves its class and
|
||||
structure.
|
||||
"""
|
||||
events = []
|
||||
log = Logger(observer=events.append)
|
||||
try:
|
||||
1 / 0
|
||||
except ZeroDivisionError:
|
||||
f = Failure()
|
||||
log.failure("a message about failure", f)
|
||||
import sys
|
||||
if sys.exc_info()[0] is not None:
|
||||
# Make sure we don't get the same Failure by accident.
|
||||
sys.exc_clear()
|
||||
self.assertEqual(len(events), 1)
|
||||
loaded = eventFromJSON(self.savedEventJSON(events[0]))['log_failure']
|
||||
self.assertIsInstance(loaded, Failure)
|
||||
self.assertTrue(loaded.check(ZeroDivisionError))
|
||||
self.assertIsInstance(loaded.getTraceback(), str)
|
||||
|
||||
|
||||
def test_saveLoadLevel(self):
|
||||
"""
|
||||
It's important that the C{log_level} key remain a
|
||||
L{constantly.NamedConstant} object.
|
||||
"""
|
||||
inputEvent = dict(log_level=LogLevel.warn)
|
||||
loadedEvent = eventFromJSON(self.savedEventJSON(inputEvent))
|
||||
self.assertIs(loadedEvent["log_level"], LogLevel.warn)
|
||||
|
||||
|
||||
def test_saveLoadUnknownLevel(self):
|
||||
"""
|
||||
If a saved bit of JSON (let's say, from a future version of Twisted)
|
||||
were to persist a different log_level, it will resolve as None.
|
||||
"""
|
||||
loadedEvent = eventFromJSON(
|
||||
'{"log_level": {"name": "other", '
|
||||
'"__class_uuid__": "02E59486-F24D-46AD-8224-3ACDF2A5732A"}}'
|
||||
)
|
||||
self.assertEqual(loadedEvent, dict(log_level=None))
|
||||
|
||||
|
||||
|
||||
class FileLogObserverTests(TestCase):
|
||||
"""
|
||||
Tests for L{jsonFileLogObserver}.
|
||||
"""
|
||||
|
||||
def test_interface(self):
|
||||
"""
|
||||
A L{FileLogObserver} returned by L{jsonFileLogObserver} is an
|
||||
L{ILogObserver}.
|
||||
"""
|
||||
with StringIO() as fileHandle:
|
||||
observer = jsonFileLogObserver(fileHandle)
|
||||
try:
|
||||
verifyObject(ILogObserver, observer)
|
||||
except BrokenMethodImplementation as e:
|
||||
self.fail(e)
|
||||
|
||||
|
||||
def assertObserverWritesJSON(self, **kwargs):
|
||||
"""
|
||||
Asserts that an observer created by L{jsonFileLogObserver} with the
|
||||
given arguments writes events serialized as JSON text, using the given
|
||||
record separator.
|
||||
|
||||
@param recordSeparator: A record separator.
|
||||
@type recordSeparator: L{unicode}
|
||||
|
||||
@param kwargs: Keyword arguments to pass to L{jsonFileLogObserver}.
|
||||
@type kwargs: L{dict}
|
||||
"""
|
||||
recordSeparator = kwargs.get("recordSeparator", u"\x1e")
|
||||
|
||||
with StringIO() as fileHandle:
|
||||
observer = jsonFileLogObserver(fileHandle, **kwargs)
|
||||
event = dict(x=1)
|
||||
observer(event)
|
||||
self.assertEqual(
|
||||
fileHandle.getvalue(),
|
||||
u'{0}{{"x": 1}}\n'.format(recordSeparator)
|
||||
)
|
||||
|
||||
|
||||
def test_observeWritesDefaultRecordSeparator(self):
|
||||
"""
|
||||
A L{FileLogObserver} created by L{jsonFileLogObserver} writes events
|
||||
serialzed as JSON text to a file when it observes events.
|
||||
By default, the record separator is C{u"\\x1e"}.
|
||||
"""
|
||||
self.assertObserverWritesJSON()
|
||||
|
||||
|
||||
def test_observeWritesEmptyRecordSeparator(self):
|
||||
"""
|
||||
A L{FileLogObserver} created by L{jsonFileLogObserver} writes events
|
||||
serialzed as JSON text to a file when it observes events.
|
||||
This test sets the record separator to C{u""}.
|
||||
"""
|
||||
self.assertObserverWritesJSON(recordSeparator=u"")
|
||||
|
||||
|
||||
def test_failureFormatting(self):
|
||||
"""
|
||||
A L{FileLogObserver} created by L{jsonFileLogObserver} writes failures
|
||||
serialized as JSON text to a file when it observes events.
|
||||
"""
|
||||
io = StringIO()
|
||||
publisher = LogPublisher()
|
||||
logged = []
|
||||
publisher.addObserver(logged.append)
|
||||
publisher.addObserver(jsonFileLogObserver(io))
|
||||
logger = Logger(observer=publisher)
|
||||
try:
|
||||
1 / 0
|
||||
except:
|
||||
logger.failure("failed as expected")
|
||||
reader = StringIO(io.getvalue())
|
||||
deserialized = list(eventsFromJSONLogFile(reader))
|
||||
def checkEvents(logEvents):
|
||||
self.assertEqual(len(logEvents), 1)
|
||||
[failureEvent] = logEvents
|
||||
self.assertIn("log_failure", failureEvent)
|
||||
failureObject = failureEvent["log_failure"]
|
||||
self.assertIsInstance(failureObject, Failure)
|
||||
tracebackObject = failureObject.getTracebackObject()
|
||||
self.assertEqual(
|
||||
tracebackObject.tb_frame.f_code.co_filename.rstrip("co"),
|
||||
__file__.rstrip("co")
|
||||
)
|
||||
checkEvents(logged)
|
||||
checkEvents(deserialized)
|
||||
|
||||
|
||||
|
||||
class LogFileReaderTests(TestCase):
|
||||
"""
|
||||
Tests for L{eventsFromJSONLogFile}.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.errorEvents = []
|
||||
|
||||
def observer(event):
|
||||
if (
|
||||
event["log_namespace"] == jsonLog.namespace and
|
||||
"record" in event
|
||||
):
|
||||
self.errorEvents.append(event)
|
||||
|
||||
self.logObserver = observer
|
||||
|
||||
globalLogPublisher.addObserver(observer)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
globalLogPublisher.removeObserver(self.logObserver)
|
||||
|
||||
|
||||
def _readEvents(self, fileHandle, **kwargs):
|
||||
"""
|
||||
Test that L{eventsFromJSONLogFile} reads two pre-defined events from a
|
||||
file: C{{u"x": 1}} and C{{u"y": 2}}.
|
||||
|
||||
@param fileHandle: The file to read from.
|
||||
|
||||
@param kwargs: Keyword arguments to pass to L{eventsFromJSONLogFile}.
|
||||
"""
|
||||
events = eventsFromJSONLogFile(fileHandle, **kwargs)
|
||||
|
||||
self.assertEqual(next(events), {u"x": 1})
|
||||
self.assertEqual(next(events), {u"y": 2})
|
||||
self.assertRaises(StopIteration, next, events) # No more events
|
||||
|
||||
|
||||
def test_readEventsAutoWithRecordSeparator(self):
|
||||
"""
|
||||
L{eventsFromJSONLogFile} reads events from a file and automatically
|
||||
detects use of C{u"\\x1e"} as the record separator.
|
||||
"""
|
||||
with StringIO(
|
||||
u'\x1e{"x": 1}\n'
|
||||
u'\x1e{"y": 2}\n'
|
||||
) as fileHandle:
|
||||
self._readEvents(fileHandle)
|
||||
self.assertEqual(len(self.errorEvents), 0)
|
||||
|
||||
|
||||
def test_readEventsAutoEmptyRecordSeparator(self):
|
||||
"""
|
||||
L{eventsFromJSONLogFile} reads events from a file and automatically
|
||||
detects use of C{u""} as the record separator.
|
||||
"""
|
||||
with StringIO(
|
||||
u'{"x": 1}\n'
|
||||
u'{"y": 2}\n'
|
||||
) as fileHandle:
|
||||
self._readEvents(fileHandle)
|
||||
self.assertEqual(len(self.errorEvents), 0)
|
||||
|
||||
|
||||
def test_readEventsExplicitRecordSeparator(self):
|
||||
"""
|
||||
L{eventsFromJSONLogFile} reads events from a file and is told to use
|
||||
a specific record separator.
|
||||
"""
|
||||
# Use u"\x08" (backspace)... because that seems weird enough.
|
||||
with StringIO(
|
||||
u'\x08{"x": 1}\n'
|
||||
u'\x08{"y": 2}\n'
|
||||
) as fileHandle:
|
||||
self._readEvents(fileHandle, recordSeparator=u"\x08")
|
||||
self.assertEqual(len(self.errorEvents), 0)
|
||||
|
||||
|
||||
def test_readEventsPartialBuffer(self):
|
||||
"""
|
||||
L{eventsFromJSONLogFile} handles buffering a partial event.
|
||||
"""
|
||||
with StringIO(
|
||||
u'\x1e{"x": 1}\n'
|
||||
u'\x1e{"y": 2}\n'
|
||||
) as fileHandle:
|
||||
# Use a buffer size smaller than the event text.
|
||||
self._readEvents(fileHandle, bufferSize=1)
|
||||
self.assertEqual(len(self.errorEvents), 0)
|
||||
|
||||
|
||||
def test_readTruncated(self):
|
||||
"""
|
||||
If the JSON text for a record is truncated, skip it.
|
||||
"""
|
||||
with StringIO(
|
||||
u'\x1e{"x": 1'
|
||||
u'\x1e{"y": 2}\n'
|
||||
) as fileHandle:
|
||||
events = eventsFromJSONLogFile(fileHandle)
|
||||
|
||||
self.assertEqual(next(events), {u"y": 2})
|
||||
self.assertRaises(StopIteration, next, events) # No more events
|
||||
|
||||
# We should have logged the lost record
|
||||
self.assertEqual(len(self.errorEvents), 1)
|
||||
self.assertEqual(
|
||||
self.errorEvents[0]["log_format"],
|
||||
u"Unable to read truncated JSON record: {record!r}"
|
||||
)
|
||||
self.assertEqual(self.errorEvents[0]["record"], b'{"x": 1')
|
||||
|
||||
|
||||
def test_readUnicode(self):
|
||||
"""
|
||||
If the file being read from vends L{unicode}, strings decode from JSON
|
||||
as-is.
|
||||
"""
|
||||
# The Euro currency sign is u"\u20ac"
|
||||
with StringIO(u'\x1e{"currency": "\u20ac"}\n') as fileHandle:
|
||||
events = eventsFromJSONLogFile(fileHandle)
|
||||
|
||||
self.assertEqual(next(events), {u"currency": u"\u20ac"})
|
||||
self.assertRaises(StopIteration, next, events) # No more events
|
||||
self.assertEqual(len(self.errorEvents), 0)
|
||||
|
||||
|
||||
def test_readUTF8Bytes(self):
|
||||
"""
|
||||
If the file being read from vends L{bytes}, strings decode from JSON as
|
||||
UTF-8.
|
||||
"""
|
||||
# The Euro currency sign is b"\xe2\x82\xac" in UTF-8
|
||||
with BytesIO(b'\x1e{"currency": "\xe2\x82\xac"}\n') as fileHandle:
|
||||
events = eventsFromJSONLogFile(fileHandle)
|
||||
|
||||
# The Euro currency sign is u"\u20ac"
|
||||
self.assertEqual(next(events), {u"currency": u"\u20ac"})
|
||||
self.assertRaises(StopIteration, next, events) # No more events
|
||||
self.assertEqual(len(self.errorEvents), 0)
|
||||
|
||||
|
||||
def test_readTruncatedUTF8Bytes(self):
|
||||
"""
|
||||
If the JSON text for a record is truncated in the middle of a two-byte
|
||||
Unicode codepoint, we don't want to see a codec exception and the
|
||||
stream is read properly when the additional data arrives.
|
||||
"""
|
||||
# The Euro currency sign is u"\u20ac" and encodes in UTF-8 as three
|
||||
# bytes: b"\xe2\x82\xac".
|
||||
with BytesIO(b'\x1e{"x": "\xe2\x82\xac"}\n') as fileHandle:
|
||||
events = eventsFromJSONLogFile(fileHandle, bufferSize=8)
|
||||
|
||||
self.assertEqual(next(events), {u"x": u"\u20ac"}) # Got unicode
|
||||
self.assertRaises(StopIteration, next, events) # No more events
|
||||
self.assertEqual(len(self.errorEvents), 0)
|
||||
|
||||
|
||||
def test_readInvalidUTF8Bytes(self):
|
||||
"""
|
||||
If the JSON text for a record contains invalid UTF-8 text, ignore that
|
||||
record.
|
||||
"""
|
||||
# The string b"\xe2\xac" is bogus
|
||||
with BytesIO(
|
||||
b'\x1e{"x": "\xe2\xac"}\n'
|
||||
b'\x1e{"y": 2}\n'
|
||||
) as fileHandle:
|
||||
events = eventsFromJSONLogFile(fileHandle)
|
||||
|
||||
self.assertEqual(next(events), {u"y": 2})
|
||||
self.assertRaises(StopIteration, next, events) # No more events
|
||||
|
||||
# We should have logged the lost record
|
||||
self.assertEqual(len(self.errorEvents), 1)
|
||||
self.assertEqual(
|
||||
self.errorEvents[0]["log_format"],
|
||||
u"Unable to decode UTF-8 for JSON record: {record!r}"
|
||||
)
|
||||
self.assertEqual(
|
||||
self.errorEvents[0]["record"],
|
||||
b'{"x": "\xe2\xac"}\n'
|
||||
)
|
||||
|
||||
|
||||
def test_readInvalidJSON(self):
|
||||
"""
|
||||
If the JSON text for a record is invalid, skip it.
|
||||
"""
|
||||
with StringIO(
|
||||
u'\x1e{"x": }\n'
|
||||
u'\x1e{"y": 2}\n'
|
||||
) as fileHandle:
|
||||
events = eventsFromJSONLogFile(fileHandle)
|
||||
|
||||
self.assertEqual(next(events), {u"y": 2})
|
||||
self.assertRaises(StopIteration, next, events) # No more events
|
||||
|
||||
# We should have logged the lost record
|
||||
self.assertEqual(len(self.errorEvents), 1)
|
||||
self.assertEqual(
|
||||
self.errorEvents[0]["log_format"],
|
||||
u"Unable to read JSON record: {record!r}"
|
||||
)
|
||||
self.assertEqual(self.errorEvents[0]["record"], b'{"x": }\n')
|
||||
|
||||
|
||||
def test_readUnseparated(self):
|
||||
"""
|
||||
Multiple events without a record separator are skipped.
|
||||
"""
|
||||
with StringIO(
|
||||
u'\x1e{"x": 1}\n'
|
||||
u'{"y": 2}\n'
|
||||
) as fileHandle:
|
||||
events = eventsFromJSONLogFile(fileHandle)
|
||||
|
||||
self.assertRaises(StopIteration, next, events) # No more events
|
||||
|
||||
# We should have logged the lost record
|
||||
self.assertEqual(len(self.errorEvents), 1)
|
||||
self.assertEqual(
|
||||
self.errorEvents[0]["log_format"],
|
||||
u"Unable to read JSON record: {record!r}"
|
||||
)
|
||||
self.assertEqual(
|
||||
self.errorEvents[0]["record"],
|
||||
b'{"x": 1}\n{"y": 2}\n'
|
||||
)
|
||||
|
||||
|
||||
def test_roundTrip(self):
|
||||
"""
|
||||
Data written by a L{FileLogObserver} returned by L{jsonFileLogObserver}
|
||||
and read by L{eventsFromJSONLogFile} is reconstructed properly.
|
||||
"""
|
||||
event = dict(x=1)
|
||||
|
||||
with StringIO() as fileHandle:
|
||||
observer = jsonFileLogObserver(fileHandle)
|
||||
observer(event)
|
||||
|
||||
fileHandle.seek(0)
|
||||
events = eventsFromJSONLogFile(fileHandle)
|
||||
|
||||
self.assertEqual(tuple(events), (event,))
|
||||
self.assertEqual(len(self.errorEvents), 0)
|
||||
|
|
@ -0,0 +1,472 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._legacy}.
|
||||
"""
|
||||
|
||||
from time import time
|
||||
import logging as py_logging
|
||||
|
||||
from zope.interface.verify import verifyObject, BrokenMethodImplementation
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from twisted.python import context
|
||||
from twisted.python import log as legacyLog
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
from .._levels import LogLevel
|
||||
from .._observer import ILogObserver
|
||||
from .._format import formatEvent
|
||||
from .._legacy import LegacyLogObserverWrapper
|
||||
from .._legacy import publishToNewObserver
|
||||
|
||||
|
||||
|
||||
class LegacyLogObserverWrapperTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{LegacyLogObserverWrapper}.
|
||||
"""
|
||||
|
||||
def test_interface(self):
|
||||
"""
|
||||
L{LegacyLogObserverWrapper} is an L{ILogObserver}.
|
||||
"""
|
||||
legacyObserver = lambda e: None
|
||||
observer = LegacyLogObserverWrapper(legacyObserver)
|
||||
try:
|
||||
verifyObject(ILogObserver, observer)
|
||||
except BrokenMethodImplementation as e:
|
||||
self.fail(e)
|
||||
|
||||
|
||||
def test_repr(self):
|
||||
"""
|
||||
L{LegacyLogObserverWrapper} returns the expected string.
|
||||
"""
|
||||
class LegacyObserver(object):
|
||||
def __repr__(self):
|
||||
return "<Legacy Observer>"
|
||||
|
||||
def __call__(self):
|
||||
return
|
||||
|
||||
observer = LegacyLogObserverWrapper(LegacyObserver())
|
||||
|
||||
self.assertEqual(
|
||||
repr(observer),
|
||||
"LegacyLogObserverWrapper(<Legacy Observer>)"
|
||||
)
|
||||
|
||||
|
||||
def observe(self, event):
|
||||
"""
|
||||
Send an event to a wrapped legacy observer and capture the event as
|
||||
seen by that observer.
|
||||
|
||||
@param event: an event
|
||||
@type event: L{dict}
|
||||
|
||||
@return: the event as observed by the legacy wrapper
|
||||
"""
|
||||
events = []
|
||||
|
||||
legacyObserver = lambda e: events.append(e)
|
||||
observer = LegacyLogObserverWrapper(legacyObserver)
|
||||
observer(event)
|
||||
self.assertEqual(len(events), 1)
|
||||
|
||||
return events[0]
|
||||
|
||||
|
||||
def forwardAndVerify(self, event):
|
||||
"""
|
||||
Send an event to a wrapped legacy observer and verify that its data is
|
||||
preserved.
|
||||
|
||||
@param event: an event
|
||||
@type event: L{dict}
|
||||
|
||||
@return: the event as observed by the legacy wrapper
|
||||
"""
|
||||
# Make sure keys that are expected by the logging system are present
|
||||
event.setdefault("log_time", time())
|
||||
event.setdefault("log_system", "-")
|
||||
event.setdefault("log_level", LogLevel.info)
|
||||
|
||||
# Send a copy: don't mutate me, bro
|
||||
observed = self.observe(dict(event))
|
||||
|
||||
# Don't expect modifications
|
||||
for key, value in event.items():
|
||||
self.assertIn(key, observed)
|
||||
|
||||
return observed
|
||||
|
||||
|
||||
def test_forward(self):
|
||||
"""
|
||||
Basic forwarding: event keys as observed by a legacy observer are the
|
||||
same.
|
||||
"""
|
||||
self.forwardAndVerify(dict(foo=1, bar=2))
|
||||
|
||||
|
||||
def test_time(self):
|
||||
"""
|
||||
The new-style C{"log_time"} key is copied to the old-style C{"time"}
|
||||
key.
|
||||
"""
|
||||
stamp = time()
|
||||
event = self.forwardAndVerify(dict(log_time=stamp))
|
||||
self.assertEqual(event["time"], stamp)
|
||||
|
||||
|
||||
def test_timeAlreadySet(self):
|
||||
"""
|
||||
The new-style C{"log_time"} key does not step on a pre-existing
|
||||
old-style C{"time"} key.
|
||||
"""
|
||||
stamp = time()
|
||||
event = self.forwardAndVerify(dict(log_time=stamp + 1, time=stamp))
|
||||
self.assertEqual(event["time"], stamp)
|
||||
|
||||
|
||||
def test_system(self):
|
||||
"""
|
||||
The new-style C{"log_system"} key is copied to the old-style
|
||||
C{"system"} key.
|
||||
"""
|
||||
event = self.forwardAndVerify(dict(log_system="foo"))
|
||||
self.assertEqual(event["system"], "foo")
|
||||
|
||||
|
||||
def test_systemAlreadySet(self):
|
||||
"""
|
||||
The new-style C{"log_system"} key does not step on a pre-existing
|
||||
old-style C{"system"} key.
|
||||
"""
|
||||
event = self.forwardAndVerify(dict(log_system="foo", system="bar"))
|
||||
self.assertEqual(event["system"], "bar")
|
||||
|
||||
|
||||
def test_noSystem(self):
|
||||
"""
|
||||
If the new-style C{"log_system"} key is absent, the old-style
|
||||
C{"system"} key is set to C{"-"}.
|
||||
"""
|
||||
# Don't use forwardAndVerify(), since that sets log_system.
|
||||
event = dict(log_time=time(), log_level=LogLevel.info)
|
||||
observed = self.observe(dict(event))
|
||||
self.assertEqual(observed["system"], "-")
|
||||
|
||||
|
||||
def test_levelNotChange(self):
|
||||
"""
|
||||
If explicitly set, the C{isError} key will be preserved when forwarding
|
||||
from a new-style logging emitter to a legacy logging observer,
|
||||
regardless of log level.
|
||||
"""
|
||||
self.forwardAndVerify(dict(log_level=LogLevel.info, isError=1))
|
||||
self.forwardAndVerify(dict(log_level=LogLevel.warn, isError=1))
|
||||
self.forwardAndVerify(dict(log_level=LogLevel.error, isError=0))
|
||||
self.forwardAndVerify(dict(log_level=LogLevel.critical, isError=0))
|
||||
|
||||
|
||||
def test_pythonLogLevelNotSet(self):
|
||||
"""
|
||||
The new-style C{"log_level"} key is not translated to the old-style
|
||||
C{"logLevel"} key.
|
||||
|
||||
Events are forwarded from the old module from to new module and are
|
||||
then seen by old-style observers.
|
||||
We don't want to add unexpected keys to old-style events.
|
||||
"""
|
||||
event = self.forwardAndVerify(dict(log_level=LogLevel.info))
|
||||
self.assertNotIn("logLevel", event)
|
||||
|
||||
|
||||
def test_stringPythonLogLevel(self):
|
||||
"""
|
||||
If a stdlib log level was provided as a string (eg. C{"WARNING"}) in
|
||||
the legacy "logLevel" key, it does not get converted to a number.
|
||||
The documentation suggested that numerical values should be used but
|
||||
this was not a requirement.
|
||||
"""
|
||||
event = self.forwardAndVerify(dict(
|
||||
logLevel="WARNING", # py_logging.WARNING is 30
|
||||
))
|
||||
self.assertEqual(event["logLevel"], "WARNING")
|
||||
|
||||
|
||||
def test_message(self):
|
||||
"""
|
||||
The old-style C{"message"} key is added, even if no new-style
|
||||
C{"log_format"} is given, as it is required, but may be empty.
|
||||
"""
|
||||
event = self.forwardAndVerify(dict())
|
||||
self.assertEqual(event["message"], ()) # "message" is a tuple
|
||||
|
||||
|
||||
def test_messageAlreadySet(self):
|
||||
"""
|
||||
The old-style C{"message"} key is not modified if it already exists.
|
||||
"""
|
||||
event = self.forwardAndVerify(dict(message=("foo", "bar")))
|
||||
self.assertEqual(event["message"], ("foo", "bar"))
|
||||
|
||||
|
||||
def test_format(self):
|
||||
"""
|
||||
Formatting is translated such that text is rendered correctly, even
|
||||
though old-style logging doesn't use PEP 3101 formatting.
|
||||
"""
|
||||
event = self.forwardAndVerify(
|
||||
dict(log_format="Hello, {who}!", who="world")
|
||||
)
|
||||
self.assertEqual(
|
||||
legacyLog.textFromEventDict(event),
|
||||
"Hello, world!"
|
||||
)
|
||||
|
||||
|
||||
def test_formatMessage(self):
|
||||
"""
|
||||
Using the message key, which is special in old-style, works for
|
||||
new-style formatting.
|
||||
"""
|
||||
event = self.forwardAndVerify(
|
||||
dict(log_format="Hello, {message}!", message="world")
|
||||
)
|
||||
self.assertEqual(
|
||||
legacyLog.textFromEventDict(event),
|
||||
"Hello, world!"
|
||||
)
|
||||
|
||||
|
||||
def test_formatAlreadySet(self):
|
||||
"""
|
||||
Formatting is not altered if the old-style C{"format"} key already
|
||||
exists.
|
||||
"""
|
||||
event = self.forwardAndVerify(
|
||||
dict(log_format="Hello!", format="Howdy!")
|
||||
)
|
||||
self.assertEqual(legacyLog.textFromEventDict(event), "Howdy!")
|
||||
|
||||
|
||||
def eventWithFailure(self, **values):
|
||||
"""
|
||||
Create a new-style event with a captured failure.
|
||||
|
||||
@param values: Additional values to include in the event.
|
||||
@type values: L{dict}
|
||||
|
||||
@return: the new event
|
||||
@rtype: L{dict}
|
||||
"""
|
||||
failure = Failure(RuntimeError("nyargh!"))
|
||||
return self.forwardAndVerify(dict(
|
||||
log_failure=failure,
|
||||
log_format="oopsie...",
|
||||
**values
|
||||
))
|
||||
|
||||
|
||||
def test_failure(self):
|
||||
"""
|
||||
Captured failures in the new style set the old-style C{"failure"},
|
||||
C{"isError"}, and C{"why"} keys.
|
||||
"""
|
||||
event = self.eventWithFailure()
|
||||
self.assertIs(event["failure"], event["log_failure"])
|
||||
self.assertTrue(event["isError"])
|
||||
self.assertEqual(event["why"], "oopsie...")
|
||||
|
||||
|
||||
def test_failureAlreadySet(self):
|
||||
"""
|
||||
Captured failures in the new style do not step on a pre-existing
|
||||
old-style C{"failure"} key.
|
||||
"""
|
||||
failure = Failure(RuntimeError("Weak salsa!"))
|
||||
event = self.eventWithFailure(failure=failure)
|
||||
self.assertIs(event["failure"], failure)
|
||||
|
||||
|
||||
def test_isErrorAlreadySet(self):
|
||||
"""
|
||||
Captured failures in the new style do not step on a pre-existing
|
||||
old-style C{"isError"} key.
|
||||
"""
|
||||
event = self.eventWithFailure(isError=0)
|
||||
self.assertEqual(event["isError"], 0)
|
||||
|
||||
|
||||
def test_whyAlreadySet(self):
|
||||
"""
|
||||
Captured failures in the new style do not step on a pre-existing
|
||||
old-style C{"failure"} key.
|
||||
"""
|
||||
event = self.eventWithFailure(why="blah")
|
||||
self.assertEqual(event["why"], "blah")
|
||||
|
||||
|
||||
|
||||
class PublishToNewObserverTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{publishToNewObserver}.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.events = []
|
||||
self.observer = self.events.append
|
||||
|
||||
|
||||
def legacyEvent(self, *message, **values):
|
||||
"""
|
||||
Return a basic old-style event as would be created by L{legacyLog.msg}.
|
||||
|
||||
@param message: a message event value in the legacy event format
|
||||
@type message: L{tuple} of L{bytes}
|
||||
|
||||
@param values: additional event values in the legacy event format
|
||||
@type event: L{dict}
|
||||
|
||||
@return: a legacy event
|
||||
"""
|
||||
event = (context.get(legacyLog.ILogContext) or {}).copy()
|
||||
event.update(values)
|
||||
event["message"] = message
|
||||
event["time"] = time()
|
||||
if "isError" not in event:
|
||||
event["isError"] = 0
|
||||
return event
|
||||
|
||||
|
||||
def test_observed(self):
|
||||
"""
|
||||
The observer is called exactly once.
|
||||
"""
|
||||
publishToNewObserver(
|
||||
self.observer, self.legacyEvent(), legacyLog.textFromEventDict
|
||||
)
|
||||
self.assertEqual(len(self.events), 1)
|
||||
|
||||
|
||||
def test_time(self):
|
||||
"""
|
||||
The old-style C{"time"} key is copied to the new-style C{"log_time"}
|
||||
key.
|
||||
"""
|
||||
publishToNewObserver(
|
||||
self.observer, self.legacyEvent(), legacyLog.textFromEventDict
|
||||
)
|
||||
self.assertEqual(
|
||||
self.events[0]["log_time"], self.events[0]["time"]
|
||||
)
|
||||
|
||||
|
||||
def test_message(self):
|
||||
"""
|
||||
A published old-style event should format as text in the same way as
|
||||
the given C{textFromEventDict} callable would format it.
|
||||
"""
|
||||
def textFromEventDict(event):
|
||||
return "".join(reversed(" ".join(event["message"])))
|
||||
|
||||
event = self.legacyEvent("Hello,", "world!")
|
||||
text = textFromEventDict(event)
|
||||
|
||||
publishToNewObserver(self.observer, event, textFromEventDict)
|
||||
self.assertEqual(formatEvent(self.events[0]), text)
|
||||
|
||||
|
||||
def test_defaultLogLevel(self):
|
||||
"""
|
||||
Published event should have log level of L{LogLevel.info}.
|
||||
"""
|
||||
publishToNewObserver(
|
||||
self.observer, self.legacyEvent(), legacyLog.textFromEventDict
|
||||
)
|
||||
self.assertEqual(self.events[0]["log_level"], LogLevel.info)
|
||||
|
||||
|
||||
def test_isError(self):
|
||||
"""
|
||||
If C{"isError"} is set to C{1} (true) on the legacy event, the
|
||||
C{"log_level"} key should get set to L{LogLevel.critical}.
|
||||
"""
|
||||
publishToNewObserver(
|
||||
self.observer,
|
||||
self.legacyEvent(isError=1),
|
||||
legacyLog.textFromEventDict
|
||||
)
|
||||
self.assertEqual(self.events[0]["log_level"], LogLevel.critical)
|
||||
|
||||
|
||||
def test_stdlibLogLevel(self):
|
||||
"""
|
||||
If the old-style C{"logLevel"} key is set to a standard library logging
|
||||
level, using a predefined (L{int}) constant, the new-style
|
||||
C{"log_level"} key should get set to the corresponding log level.
|
||||
"""
|
||||
publishToNewObserver(
|
||||
self.observer,
|
||||
self.legacyEvent(logLevel=py_logging.WARNING),
|
||||
legacyLog.textFromEventDict
|
||||
)
|
||||
self.assertEqual(self.events[0]["log_level"], LogLevel.warn)
|
||||
|
||||
|
||||
def test_stdlibLogLevelWithString(self):
|
||||
"""
|
||||
If the old-style C{"logLevel"} key is set to a standard library logging
|
||||
level, using a string value, the new-style C{"log_level"} key should
|
||||
get set to the corresponding log level.
|
||||
"""
|
||||
publishToNewObserver(
|
||||
self.observer,
|
||||
self.legacyEvent(logLevel="WARNING"),
|
||||
legacyLog.textFromEventDict
|
||||
)
|
||||
self.assertEqual(self.events[0]["log_level"], LogLevel.warn)
|
||||
|
||||
|
||||
def test_stdlibLogLevelWithGarbage(self):
|
||||
"""
|
||||
If the old-style C{"logLevel"} key is set to a standard library logging
|
||||
level, using an unknown value, the new-style C{"log_level"} key should
|
||||
not get set.
|
||||
"""
|
||||
publishToNewObserver(
|
||||
self.observer,
|
||||
self.legacyEvent(logLevel="Foo!!!!!"),
|
||||
legacyLog.textFromEventDict
|
||||
)
|
||||
self.assertNotIn("log_level", self.events[0])
|
||||
|
||||
|
||||
def test_defaultNamespace(self):
|
||||
"""
|
||||
Published event should have a namespace of C{"log_legacy"} to indicate
|
||||
that it was forwarded from legacy logging.
|
||||
"""
|
||||
publishToNewObserver(
|
||||
self.observer, self.legacyEvent(), legacyLog.textFromEventDict
|
||||
)
|
||||
self.assertEqual(self.events[0]["log_namespace"], "log_legacy")
|
||||
|
||||
|
||||
def test_system(self):
|
||||
"""
|
||||
The old-style C{"system"} key is copied to the new-style
|
||||
C{"log_system"} key.
|
||||
"""
|
||||
publishToNewObserver(
|
||||
self.observer, self.legacyEvent(), legacyLog.textFromEventDict
|
||||
)
|
||||
self.assertEqual(
|
||||
self.events[0]["log_system"], self.events[0]["system"]
|
||||
)
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._levels}.
|
||||
"""
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from .._levels import InvalidLogLevelError
|
||||
from .._levels import LogLevel
|
||||
|
||||
|
||||
|
||||
class LogLevelTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{LogLevel}.
|
||||
"""
|
||||
|
||||
def test_levelWithName(self):
|
||||
"""
|
||||
Look up log level by name.
|
||||
"""
|
||||
for level in LogLevel.iterconstants():
|
||||
self.assertIs(LogLevel.levelWithName(level.name), level)
|
||||
|
||||
|
||||
def test_levelWithInvalidName(self):
|
||||
"""
|
||||
You can't make up log level names.
|
||||
"""
|
||||
bogus = "*bogus*"
|
||||
try:
|
||||
LogLevel.levelWithName(bogus)
|
||||
except InvalidLogLevelError as e:
|
||||
self.assertIs(e.level, bogus)
|
||||
else:
|
||||
self.fail("Expected InvalidLogLevelError.")
|
||||
|
|
@ -0,0 +1,268 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._logger}.
|
||||
"""
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from .._levels import InvalidLogLevelError
|
||||
from .._levels import LogLevel
|
||||
from .._format import formatEvent
|
||||
from .._logger import Logger
|
||||
from .._global import globalLogPublisher
|
||||
|
||||
|
||||
|
||||
class TestLogger(Logger):
|
||||
"""
|
||||
L{Logger} with an overridden C{emit} method that keeps track of received
|
||||
events.
|
||||
"""
|
||||
|
||||
def emit(self, level, format=None, **kwargs):
|
||||
def observer(event):
|
||||
self.event = event
|
||||
|
||||
globalLogPublisher.addObserver(observer)
|
||||
try:
|
||||
Logger.emit(self, level, format, **kwargs)
|
||||
finally:
|
||||
globalLogPublisher.removeObserver(observer)
|
||||
|
||||
self.emitted = {
|
||||
"level": level,
|
||||
"format": format,
|
||||
"kwargs": kwargs,
|
||||
}
|
||||
|
||||
|
||||
|
||||
class LogComposedObject(object):
|
||||
"""
|
||||
A regular object, with a logger attached.
|
||||
"""
|
||||
log = TestLogger()
|
||||
|
||||
def __init__(self, state=None):
|
||||
self.state = state
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return "<LogComposedObject {state}>".format(state=self.state)
|
||||
|
||||
|
||||
|
||||
class LoggerTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{Logger}.
|
||||
"""
|
||||
|
||||
def test_repr(self):
|
||||
"""
|
||||
repr() on Logger
|
||||
"""
|
||||
namespace = "bleargh"
|
||||
log = Logger(namespace)
|
||||
self.assertEqual(repr(log), "<Logger {0}>".format(repr(namespace)))
|
||||
|
||||
|
||||
def test_namespaceDefault(self):
|
||||
"""
|
||||
Default namespace is module name.
|
||||
"""
|
||||
log = Logger()
|
||||
self.assertEqual(log.namespace, __name__)
|
||||
|
||||
|
||||
def test_namespaceOMGItsTooHard(self):
|
||||
"""
|
||||
Default namespace is C{"<unknown>"} when a logger is created from a
|
||||
context in which is can't be determined automatically and no namespace
|
||||
was specified.
|
||||
"""
|
||||
result = []
|
||||
exec(
|
||||
"result.append(Logger())",
|
||||
dict(Logger=Logger), locals(),
|
||||
)
|
||||
self.assertEqual(result[0].namespace, "<unknown>")
|
||||
|
||||
|
||||
def test_namespaceAttribute(self):
|
||||
"""
|
||||
Default namespace for classes using L{Logger} as a descriptor is the
|
||||
class name they were retrieved from.
|
||||
"""
|
||||
obj = LogComposedObject()
|
||||
expectedNamespace = "{0}.{1}".format(
|
||||
obj.__module__,
|
||||
obj.__class__.__name__,
|
||||
)
|
||||
self.assertEqual(obj.log.namespace, expectedNamespace)
|
||||
self.assertEqual(LogComposedObject.log.namespace, expectedNamespace)
|
||||
self.assertIs(LogComposedObject.log.source, LogComposedObject)
|
||||
self.assertIs(obj.log.source, obj)
|
||||
self.assertIsNone(Logger().source)
|
||||
|
||||
|
||||
def test_descriptorObserver(self):
|
||||
"""
|
||||
When used as a descriptor, the observer is propagated.
|
||||
"""
|
||||
observed = []
|
||||
|
||||
class MyObject(object):
|
||||
log = Logger(observer=observed.append)
|
||||
|
||||
MyObject.log.info("hello")
|
||||
self.assertEqual(len(observed), 1)
|
||||
self.assertEqual(observed[0]['log_format'], "hello")
|
||||
|
||||
|
||||
def test_sourceAvailableForFormatting(self):
|
||||
"""
|
||||
On instances that have a L{Logger} class attribute, the C{log_source}
|
||||
key is available to format strings.
|
||||
"""
|
||||
obj = LogComposedObject("hello")
|
||||
log = obj.log
|
||||
log.error("Hello, {log_source}.")
|
||||
|
||||
self.assertIn("log_source", log.event)
|
||||
self.assertEqual(log.event["log_source"], obj)
|
||||
|
||||
stuff = formatEvent(log.event)
|
||||
self.assertIn("Hello, <LogComposedObject hello>.", stuff)
|
||||
|
||||
|
||||
def test_basicLogger(self):
|
||||
"""
|
||||
Test that log levels and messages are emitted correctly for
|
||||
Logger.
|
||||
"""
|
||||
log = TestLogger()
|
||||
|
||||
for level in LogLevel.iterconstants():
|
||||
format = "This is a {level_name} message"
|
||||
message = format.format(level_name=level.name)
|
||||
|
||||
logMethod = getattr(log, level.name)
|
||||
logMethod(format, junk=message, level_name=level.name)
|
||||
|
||||
# Ensure that test_emit got called with expected arguments
|
||||
self.assertEqual(log.emitted["level"], level)
|
||||
self.assertEqual(log.emitted["format"], format)
|
||||
self.assertEqual(log.emitted["kwargs"]["junk"], message)
|
||||
|
||||
self.assertTrue(hasattr(log, "event"), "No event observed.")
|
||||
|
||||
self.assertEqual(log.event["log_format"], format)
|
||||
self.assertEqual(log.event["log_level"], level)
|
||||
self.assertEqual(log.event["log_namespace"], __name__)
|
||||
self.assertIsNone(log.event["log_source"])
|
||||
self.assertEqual(log.event["junk"], message)
|
||||
|
||||
self.assertEqual(formatEvent(log.event), message)
|
||||
|
||||
|
||||
def test_sourceOnClass(self):
|
||||
"""
|
||||
C{log_source} event key refers to the class.
|
||||
"""
|
||||
def observer(event):
|
||||
self.assertEqual(event["log_source"], Thingo)
|
||||
|
||||
class Thingo(object):
|
||||
log = TestLogger(observer=observer)
|
||||
|
||||
Thingo.log.info()
|
||||
|
||||
|
||||
def test_sourceOnInstance(self):
|
||||
"""
|
||||
C{log_source} event key refers to the instance.
|
||||
"""
|
||||
def observer(event):
|
||||
self.assertEqual(event["log_source"], thingo)
|
||||
|
||||
class Thingo(object):
|
||||
log = TestLogger(observer=observer)
|
||||
|
||||
thingo = Thingo()
|
||||
thingo.log.info()
|
||||
|
||||
|
||||
def test_sourceUnbound(self):
|
||||
"""
|
||||
C{log_source} event key is L{None}.
|
||||
"""
|
||||
def observer(event):
|
||||
self.assertIsNone(event["log_source"])
|
||||
|
||||
log = TestLogger(observer=observer)
|
||||
log.info()
|
||||
|
||||
|
||||
def test_defaultFailure(self):
|
||||
"""
|
||||
Test that log.failure() emits the right data.
|
||||
"""
|
||||
log = TestLogger()
|
||||
try:
|
||||
raise RuntimeError("baloney!")
|
||||
except RuntimeError:
|
||||
log.failure("Whoops")
|
||||
|
||||
errors = self.flushLoggedErrors(RuntimeError)
|
||||
self.assertEqual(len(errors), 1)
|
||||
|
||||
self.assertEqual(log.emitted["level"], LogLevel.critical)
|
||||
self.assertEqual(log.emitted["format"], "Whoops")
|
||||
|
||||
|
||||
def test_conflictingKwargs(self):
|
||||
"""
|
||||
Make sure that kwargs conflicting with args don't pass through.
|
||||
"""
|
||||
log = TestLogger()
|
||||
|
||||
log.warn(
|
||||
u"*",
|
||||
log_format="#",
|
||||
log_level=LogLevel.error,
|
||||
log_namespace="*namespace*",
|
||||
log_source="*source*",
|
||||
)
|
||||
|
||||
self.assertEqual(log.event["log_format"], u"*")
|
||||
self.assertEqual(log.event["log_level"], LogLevel.warn)
|
||||
self.assertEqual(log.event["log_namespace"], log.namespace)
|
||||
self.assertIsNone(log.event["log_source"])
|
||||
|
||||
|
||||
def test_logInvalidLogLevel(self):
|
||||
"""
|
||||
Test passing in a bogus log level to C{emit()}.
|
||||
"""
|
||||
log = TestLogger()
|
||||
|
||||
log.emit("*bogus*")
|
||||
|
||||
errors = self.flushLoggedErrors(InvalidLogLevelError)
|
||||
self.assertEqual(len(errors), 1)
|
||||
|
||||
|
||||
def test_trace(self):
|
||||
"""
|
||||
Tracing keeps track of forwarding to the publisher.
|
||||
"""
|
||||
def publisher(event):
|
||||
observer(event)
|
||||
|
||||
def observer(event):
|
||||
self.assertEqual(event["log_trace"], [(log, publisher)])
|
||||
|
||||
log = TestLogger(observer=publisher)
|
||||
log.info("Hello.", log_trace=[])
|
||||
|
|
@ -0,0 +1,190 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._observer}.
|
||||
"""
|
||||
|
||||
from zope.interface.verify import verifyObject, BrokenMethodImplementation
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from .._logger import Logger
|
||||
from .._observer import ILogObserver
|
||||
from .._observer import LogPublisher
|
||||
|
||||
|
||||
|
||||
class LogPublisherTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{LogPublisher}.
|
||||
"""
|
||||
|
||||
def test_interface(self):
|
||||
"""
|
||||
L{LogPublisher} is an L{ILogObserver}.
|
||||
"""
|
||||
publisher = LogPublisher()
|
||||
try:
|
||||
verifyObject(ILogObserver, publisher)
|
||||
except BrokenMethodImplementation as e:
|
||||
self.fail(e)
|
||||
|
||||
|
||||
def test_observers(self):
|
||||
"""
|
||||
L{LogPublisher.observers} returns the observers.
|
||||
"""
|
||||
o1 = lambda e: None
|
||||
o2 = lambda e: None
|
||||
|
||||
publisher = LogPublisher(o1, o2)
|
||||
self.assertEqual(set((o1, o2)), set(publisher._observers))
|
||||
|
||||
|
||||
def test_addObserver(self):
|
||||
"""
|
||||
L{LogPublisher.addObserver} adds an observer.
|
||||
"""
|
||||
o1 = lambda e: None
|
||||
o2 = lambda e: None
|
||||
o3 = lambda e: None
|
||||
|
||||
publisher = LogPublisher(o1, o2)
|
||||
publisher.addObserver(o3)
|
||||
self.assertEqual(set((o1, o2, o3)), set(publisher._observers))
|
||||
|
||||
|
||||
def test_addObserverNotCallable(self):
|
||||
"""
|
||||
L{LogPublisher.addObserver} refuses to add an observer that's
|
||||
not callable.
|
||||
"""
|
||||
publisher = LogPublisher()
|
||||
self.assertRaises(TypeError, publisher.addObserver, object())
|
||||
|
||||
|
||||
def test_removeObserver(self):
|
||||
"""
|
||||
L{LogPublisher.removeObserver} removes an observer.
|
||||
"""
|
||||
o1 = lambda e: None
|
||||
o2 = lambda e: None
|
||||
o3 = lambda e: None
|
||||
|
||||
publisher = LogPublisher(o1, o2, o3)
|
||||
publisher.removeObserver(o2)
|
||||
self.assertEqual(set((o1, o3)), set(publisher._observers))
|
||||
|
||||
|
||||
def test_removeObserverNotRegistered(self):
|
||||
"""
|
||||
L{LogPublisher.removeObserver} removes an observer that is not
|
||||
registered.
|
||||
"""
|
||||
o1 = lambda e: None
|
||||
o2 = lambda e: None
|
||||
o3 = lambda e: None
|
||||
|
||||
publisher = LogPublisher(o1, o2)
|
||||
publisher.removeObserver(o3)
|
||||
self.assertEqual(set((o1, o2)), set(publisher._observers))
|
||||
|
||||
|
||||
def test_fanOut(self):
|
||||
"""
|
||||
L{LogPublisher} calls its observers.
|
||||
"""
|
||||
event = dict(foo=1, bar=2)
|
||||
|
||||
events1 = []
|
||||
events2 = []
|
||||
events3 = []
|
||||
|
||||
o1 = lambda e: events1.append(e)
|
||||
o2 = lambda e: events2.append(e)
|
||||
o3 = lambda e: events3.append(e)
|
||||
|
||||
publisher = LogPublisher(o1, o2, o3)
|
||||
publisher(event)
|
||||
self.assertIn(event, events1)
|
||||
self.assertIn(event, events2)
|
||||
self.assertIn(event, events3)
|
||||
|
||||
|
||||
def test_observerRaises(self):
|
||||
"""
|
||||
Observer raises an exception during fan out: a failure is logged, but
|
||||
not re-raised. Life goes on.
|
||||
"""
|
||||
event = dict(foo=1, bar=2)
|
||||
exception = RuntimeError("ARGH! EVIL DEATH!")
|
||||
|
||||
events = []
|
||||
|
||||
def observer(event):
|
||||
shouldRaise = not events
|
||||
events.append(event)
|
||||
if shouldRaise:
|
||||
raise exception
|
||||
|
||||
collector = []
|
||||
|
||||
publisher = LogPublisher(observer, collector.append)
|
||||
publisher(event)
|
||||
|
||||
# Verify that the observer saw my event
|
||||
self.assertIn(event, events)
|
||||
|
||||
# Verify that the observer raised my exception
|
||||
errors = [
|
||||
e["log_failure"] for e in collector
|
||||
if "log_failure" in e
|
||||
]
|
||||
self.assertEqual(len(errors), 1)
|
||||
self.assertIs(errors[0].value, exception)
|
||||
# Make sure the exceptional observer does not receive its own error.
|
||||
self.assertEqual(len(events), 1)
|
||||
|
||||
|
||||
def test_observerRaisesAndLoggerHatesMe(self):
|
||||
"""
|
||||
Observer raises an exception during fan out and the publisher's Logger
|
||||
pukes when the failure is reported. The exception does not propagate
|
||||
back to the caller.
|
||||
"""
|
||||
event = dict(foo=1, bar=2)
|
||||
exception = RuntimeError("ARGH! EVIL DEATH!")
|
||||
|
||||
def observer(event):
|
||||
raise RuntimeError("Sad panda")
|
||||
|
||||
class GurkLogger(Logger):
|
||||
def failure(self, *args, **kwargs):
|
||||
raise exception
|
||||
|
||||
publisher = LogPublisher(observer)
|
||||
publisher.log = GurkLogger()
|
||||
publisher(event)
|
||||
|
||||
# Here, the lack of an exception thus far is a success, of sorts
|
||||
|
||||
|
||||
def test_trace(self):
|
||||
"""
|
||||
Tracing keeps track of forwarding to observers.
|
||||
"""
|
||||
event = dict(foo=1, bar=2, log_trace=[])
|
||||
|
||||
traces = {}
|
||||
|
||||
# Copy trace to a tuple; otherwise, both observers will store the same
|
||||
# mutable list, and we won't be able to see o1's view distinctly.
|
||||
o1 = lambda e: traces.setdefault(1, tuple(e["log_trace"]))
|
||||
o2 = lambda e: traces.setdefault(2, tuple(e["log_trace"]))
|
||||
|
||||
publisher = LogPublisher(o1, o2)
|
||||
publisher(event)
|
||||
|
||||
self.assertEqual(traces[1], ((publisher, o1),))
|
||||
self.assertEqual(traces[2], ((publisher, o1), (publisher, o2)))
|
||||
|
|
@ -0,0 +1,304 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._format}.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from io import BytesIO, TextIOWrapper
|
||||
import logging as py_logging
|
||||
from inspect import getsourcefile
|
||||
|
||||
from zope.interface.verify import verifyObject, BrokenMethodImplementation
|
||||
|
||||
from twisted.python.compat import _PY3, currentframe
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.trial import unittest
|
||||
|
||||
from .._levels import LogLevel
|
||||
from .._observer import ILogObserver
|
||||
from .._stdlib import STDLibLogObserver
|
||||
|
||||
|
||||
def nextLine():
|
||||
"""
|
||||
Retrive the file name and line number immediately after where this function
|
||||
is called.
|
||||
|
||||
@return: the file name and line number
|
||||
@rtype: 2-L{tuple} of L{str}, L{int}
|
||||
"""
|
||||
caller = currentframe(1)
|
||||
return (getsourcefile(sys.modules[caller.f_globals['__name__']]),
|
||||
caller.f_lineno + 1)
|
||||
|
||||
|
||||
|
||||
class STDLibLogObserverTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{STDLibLogObserver}.
|
||||
"""
|
||||
|
||||
def test_interface(self):
|
||||
"""
|
||||
L{STDLibLogObserver} is an L{ILogObserver}.
|
||||
"""
|
||||
observer = STDLibLogObserver()
|
||||
try:
|
||||
verifyObject(ILogObserver, observer)
|
||||
except BrokenMethodImplementation as e:
|
||||
self.fail(e)
|
||||
|
||||
|
||||
def py_logger(self):
|
||||
"""
|
||||
Create a logging object we can use to test with.
|
||||
|
||||
@return: a stdlib-style logger
|
||||
@rtype: L{StdlibLoggingContainer}
|
||||
"""
|
||||
logger = StdlibLoggingContainer()
|
||||
self.addCleanup(logger.close)
|
||||
return logger
|
||||
|
||||
|
||||
def logEvent(self, *events):
|
||||
"""
|
||||
Send one or more events to Python's logging module, and
|
||||
capture the emitted L{logging.LogRecord}s and output stream as
|
||||
a string.
|
||||
|
||||
@param events: events
|
||||
@type events: L{tuple} of L{dict}
|
||||
|
||||
@return: a tuple: (records, output)
|
||||
@rtype: 2-tuple of (L{list} of L{logging.LogRecord}, L{bytes}.)
|
||||
"""
|
||||
pl = self.py_logger()
|
||||
observer = STDLibLogObserver(
|
||||
# Add 1 to default stack depth to skip *this* frame, since
|
||||
# tests will want to know about their own frames.
|
||||
stackDepth=STDLibLogObserver.defaultStackDepth + 1
|
||||
)
|
||||
for event in events:
|
||||
observer(event)
|
||||
return pl.bufferedHandler.records, pl.outputAsText()
|
||||
|
||||
|
||||
def test_name(self):
|
||||
"""
|
||||
Logger name.
|
||||
"""
|
||||
records, output = self.logEvent({})
|
||||
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(records[0].name, "twisted")
|
||||
|
||||
|
||||
def test_levels(self):
|
||||
"""
|
||||
Log levels.
|
||||
"""
|
||||
levelMapping = {
|
||||
None: py_logging.INFO, # Default
|
||||
LogLevel.debug: py_logging.DEBUG,
|
||||
LogLevel.info: py_logging.INFO,
|
||||
LogLevel.warn: py_logging.WARNING,
|
||||
LogLevel.error: py_logging.ERROR,
|
||||
LogLevel.critical: py_logging.CRITICAL,
|
||||
}
|
||||
|
||||
# Build a set of events for each log level
|
||||
events = []
|
||||
for level, pyLevel in levelMapping.items():
|
||||
event = {}
|
||||
|
||||
# Set the log level on the event, except for default
|
||||
if level is not None:
|
||||
event["log_level"] = level
|
||||
|
||||
# Remember the Python log level we expect to see for this
|
||||
# event (as an int)
|
||||
event["py_levelno"] = int(pyLevel)
|
||||
|
||||
events.append(event)
|
||||
|
||||
records, output = self.logEvent(*events)
|
||||
self.assertEqual(len(records), len(levelMapping))
|
||||
|
||||
# Check that each event has the correct level
|
||||
for i in range(len(records)):
|
||||
self.assertEqual(records[i].levelno, events[i]["py_levelno"])
|
||||
|
||||
|
||||
def test_callerInfo(self):
|
||||
"""
|
||||
C{pathname}, C{lineno}, C{exc_info}, C{func} is set properly on
|
||||
records.
|
||||
"""
|
||||
filename, logLine = nextLine()
|
||||
records, output = self.logEvent({})
|
||||
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(records[0].pathname, filename)
|
||||
self.assertEqual(records[0].lineno, logLine)
|
||||
self.assertIsNone(records[0].exc_info)
|
||||
|
||||
# Attribute "func" is missing from record, which is weird because it's
|
||||
# documented.
|
||||
# self.assertEqual(records[0].func, "test_callerInfo")
|
||||
|
||||
|
||||
def test_basicFormat(self):
|
||||
"""
|
||||
Basic formattable event passes the format along correctly.
|
||||
"""
|
||||
event = dict(log_format="Hello, {who}!", who="dude")
|
||||
records, output = self.logEvent(event)
|
||||
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(str(records[0].msg), u"Hello, dude!")
|
||||
self.assertEqual(records[0].args, ())
|
||||
|
||||
|
||||
def test_basicFormatRendered(self):
|
||||
"""
|
||||
Basic formattable event renders correctly.
|
||||
"""
|
||||
event = dict(log_format="Hello, {who}!", who="dude")
|
||||
records, output = self.logEvent(event)
|
||||
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertTrue(output.endswith(u":Hello, dude!\n"),
|
||||
repr(output))
|
||||
|
||||
|
||||
def test_noFormat(self):
|
||||
"""
|
||||
Event with no format.
|
||||
"""
|
||||
records, output = self.logEvent({})
|
||||
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertEqual(str(records[0].msg), "")
|
||||
|
||||
|
||||
def test_failure(self):
|
||||
"""
|
||||
An event with a failure logs the failure details as well.
|
||||
"""
|
||||
def failing_func():
|
||||
1 / 0
|
||||
try:
|
||||
failing_func()
|
||||
except ZeroDivisionError:
|
||||
failure = Failure()
|
||||
event = dict(log_format='Hi mom', who='me', log_failure=failure)
|
||||
records, output = self.logEvent(event)
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertIn(u'Hi mom', output)
|
||||
self.assertIn(u'in failing_func', output)
|
||||
self.assertIn(u'ZeroDivisionError', output)
|
||||
|
||||
|
||||
def test_cleanedFailure(self):
|
||||
"""
|
||||
A cleaned Failure object has a fake traceback object; make sure that
|
||||
logging such a failure still results in the exception details being
|
||||
logged.
|
||||
"""
|
||||
def failing_func():
|
||||
1 / 0
|
||||
try:
|
||||
failing_func()
|
||||
except ZeroDivisionError:
|
||||
failure = Failure()
|
||||
failure.cleanFailure()
|
||||
event = dict(log_format='Hi mom', who='me', log_failure=failure)
|
||||
records, output = self.logEvent(event)
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertIn(u'Hi mom', output)
|
||||
self.assertIn(u'in failing_func', output)
|
||||
self.assertIn(u'ZeroDivisionError', output)
|
||||
|
||||
|
||||
|
||||
class StdlibLoggingContainer(object):
|
||||
"""
|
||||
Continer for a test configuration of stdlib logging objects.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.rootLogger = py_logging.getLogger("")
|
||||
|
||||
self.originalLevel = self.rootLogger.getEffectiveLevel()
|
||||
self.rootLogger.setLevel(py_logging.DEBUG)
|
||||
|
||||
self.bufferedHandler = BufferedHandler()
|
||||
self.rootLogger.addHandler(self.bufferedHandler)
|
||||
|
||||
self.streamHandler, self.output = handlerAndBytesIO()
|
||||
self.rootLogger.addHandler(self.streamHandler)
|
||||
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the logger.
|
||||
"""
|
||||
self.rootLogger.setLevel(self.originalLevel)
|
||||
self.rootLogger.removeHandler(self.bufferedHandler)
|
||||
self.rootLogger.removeHandler(self.streamHandler)
|
||||
self.streamHandler.close()
|
||||
self.output.close()
|
||||
|
||||
|
||||
def outputAsText(self):
|
||||
"""
|
||||
Get the output to the underlying stream as text.
|
||||
|
||||
@return: the output text
|
||||
@rtype: L{unicode}
|
||||
"""
|
||||
return self.output.getvalue().decode("utf-8")
|
||||
|
||||
|
||||
|
||||
def handlerAndBytesIO():
|
||||
"""
|
||||
Construct a 2-tuple of C{(StreamHandler, BytesIO)} for testing interaction
|
||||
with the 'logging' module.
|
||||
|
||||
@return: handler and io object
|
||||
@rtype: tuple of L{StreamHandler} and L{io.BytesIO}
|
||||
"""
|
||||
output = BytesIO()
|
||||
stream = output
|
||||
template = py_logging.BASIC_FORMAT
|
||||
if _PY3:
|
||||
stream = TextIOWrapper(output, encoding="utf-8", newline="\n")
|
||||
formatter = py_logging.Formatter(template)
|
||||
handler = py_logging.StreamHandler(stream)
|
||||
handler.setFormatter(formatter)
|
||||
return handler, output
|
||||
|
||||
|
||||
|
||||
class BufferedHandler(py_logging.Handler):
|
||||
"""
|
||||
A L{py_logging.Handler} that remembers all logged records in a list.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Initialize this L{BufferedHandler}.
|
||||
"""
|
||||
py_logging.Handler.__init__(self)
|
||||
self.records = []
|
||||
|
||||
|
||||
def emit(self, record):
|
||||
"""
|
||||
Remember the record.
|
||||
"""
|
||||
self.records.append(record)
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for L{twisted.logger._util}.
|
||||
"""
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from .._observer import LogPublisher
|
||||
from .._util import formatTrace
|
||||
|
||||
|
||||
|
||||
class UtilTests(unittest.TestCase):
|
||||
"""
|
||||
Utility tests.
|
||||
"""
|
||||
|
||||
def test_trace(self):
|
||||
"""
|
||||
Tracing keeps track of forwarding done by the publisher.
|
||||
"""
|
||||
publisher = LogPublisher()
|
||||
|
||||
event = dict(log_trace=[])
|
||||
|
||||
o1 = lambda e: None
|
||||
|
||||
def o2(e):
|
||||
self.assertIs(e, event)
|
||||
self.assertEqual(
|
||||
e["log_trace"],
|
||||
[
|
||||
(publisher, o1),
|
||||
(publisher, o2),
|
||||
# Event hasn't been sent to o3 yet
|
||||
]
|
||||
)
|
||||
|
||||
def o3(e):
|
||||
self.assertIs(e, event)
|
||||
self.assertEqual(
|
||||
e["log_trace"],
|
||||
[
|
||||
(publisher, o1),
|
||||
(publisher, o2),
|
||||
(publisher, o3),
|
||||
]
|
||||
)
|
||||
|
||||
publisher.addObserver(o1)
|
||||
publisher.addObserver(o2)
|
||||
publisher.addObserver(o3)
|
||||
publisher(event)
|
||||
|
||||
|
||||
def test_formatTrace(self):
|
||||
"""
|
||||
Format trace as string.
|
||||
"""
|
||||
event = dict(log_trace=[])
|
||||
|
||||
def noOp(e):
|
||||
pass
|
||||
|
||||
o1, o2, o3, o4, o5 = noOp, noOp, noOp, noOp, noOp
|
||||
|
||||
o1.name = "root/o1"
|
||||
o2.name = "root/p1/o2"
|
||||
o3.name = "root/p1/o3"
|
||||
o4.name = "root/p1/p2/o4"
|
||||
o5.name = "root/o5"
|
||||
|
||||
def testObserver(e):
|
||||
self.assertIs(e, event)
|
||||
trace = formatTrace(e["log_trace"])
|
||||
self.assertEqual(
|
||||
trace,
|
||||
(
|
||||
u"{root} ({root.name})\n"
|
||||
u" -> {o1} ({o1.name})\n"
|
||||
u" -> {p1} ({p1.name})\n"
|
||||
u" -> {o2} ({o2.name})\n"
|
||||
u" -> {o3} ({o3.name})\n"
|
||||
u" -> {p2} ({p2.name})\n"
|
||||
u" -> {o4} ({o4.name})\n"
|
||||
u" -> {o5} ({o5.name})\n"
|
||||
u" -> {oTest}\n"
|
||||
).format(
|
||||
root=root,
|
||||
o1=o1, o2=o2, o3=o3, o4=o4, o5=o5,
|
||||
p1=p1, p2=p2,
|
||||
oTest=oTest
|
||||
)
|
||||
)
|
||||
oTest = testObserver
|
||||
|
||||
p2 = LogPublisher(o4)
|
||||
p1 = LogPublisher(o2, o3, p2)
|
||||
|
||||
p2.name = "root/p1/p2/"
|
||||
p1.name = "root/p1/"
|
||||
|
||||
root = LogPublisher(o1, p1, o5, oTest)
|
||||
root.name = "root/"
|
||||
root(event)
|
||||
Loading…
Add table
Add a link
Reference in a new issue