Ausgabe der neuen DB Einträge

This commit is contained in:
hubobel 2022-01-02 21:50:48 +01:00
parent bad48e1627
commit cfbbb9ee3d
2399 changed files with 843193 additions and 43 deletions

View file

@ -0,0 +1,29 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Twisted Python: Utilities and Enhancements for Python.
"""
from __future__ import absolute_import, division
# Deprecating twisted.python.constants.
from .compat import unicode
from .versions import Version
from .deprecate import deprecatedModuleAttribute
deprecatedModuleAttribute(
Version("Twisted", 16, 5, 0),
"Please use constantly from PyPI instead.",
"twisted.python", "constants")
deprecatedModuleAttribute(
Version('Twisted', 17, 5, 0),
"Please use hyperlink from PyPI instead.",
"twisted.python", "url")
del Version
del deprecatedModuleAttribute
del unicode

View file

@ -0,0 +1,32 @@
# -*- test-case-name: twisted.python.test.test_appdirs -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Application data directory support.
"""
from __future__ import division, absolute_import
import appdirs
import inspect
from twisted.python.compat import currentframe
def getDataDirectory(moduleName=None):
"""
Get a data directory for the caller function, or C{moduleName} if given.
@param moduleName: The module name if you don't wish to have the caller's
module.
@type moduleName: L{str}
@returns: A directory for putting data in.
@rtype: L{str}
"""
if not moduleName:
caller = currentframe(1)
moduleName = inspect.getmodule(caller).__name__
return appdirs.user_data_dir(moduleName)

View file

@ -0,0 +1,110 @@
# -*- test-case-name: twisted.internet.test.test_inotify -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Very low-level ctypes-based interface to Linux inotify(7).
ctypes and a version of libc which supports inotify system calls are
required.
"""
import ctypes
import ctypes.util
class INotifyError(Exception):
"""
Unify all the possible exceptions that can be raised by the INotify API.
"""
def init():
"""
Create an inotify instance and return the associated file descriptor.
"""
fd = libc.inotify_init()
if fd < 0:
raise INotifyError("INotify initialization error.")
return fd
def add(fd, path, mask):
"""
Add a watch for the given path to the inotify file descriptor, and return
the watch descriptor.
@param fd: The file descriptor returned by C{libc.inotify_init}.
@type fd: L{int}
@param path: The path to watch via inotify.
@type path: L{twisted.python.filepath.FilePath}
@param mask: Bitmask specifying the events that inotify should monitor.
@type mask: L{int}
"""
wd = libc.inotify_add_watch(fd, path.asBytesMode().path, mask)
if wd < 0:
raise INotifyError("Failed to add watch on '%r' - (%r)" % (path, wd))
return wd
def remove(fd, wd):
"""
Remove the given watch descriptor from the inotify file descriptor.
"""
# When inotify_rm_watch returns -1 there's an error:
# The errno for this call can be either one of the following:
# EBADF: fd is not a valid file descriptor.
# EINVAL: The watch descriptor wd is not valid; or fd is
# not an inotify file descriptor.
#
# if we can't access the errno here we cannot even raise
# an exception and we need to ignore the problem, one of
# the most common cases is when you remove a directory from
# the filesystem and that directory is observed. When inotify
# tries to call inotify_rm_watch with a non existing directory
# either of the 2 errors might come up because the files inside
# it might have events generated way before they were handled.
# Unfortunately only ctypes in Python 2.6 supports accessing errno:
# http://bugs.python.org/issue1798 and in order to solve
# the problem for previous versions we need to introduce
# code that is quite complex:
# http://stackoverflow.com/questions/661017/access-to-errno-from-python
#
# See #4310 for future resolution of this issue.
libc.inotify_rm_watch(fd, wd)
def initializeModule(libc):
"""
Initialize the module, checking if the expected APIs exist and setting the
argtypes and restype for C{inotify_init}, C{inotify_add_watch}, and
C{inotify_rm_watch}.
"""
for function in ("inotify_add_watch", "inotify_init", "inotify_rm_watch"):
if getattr(libc, function, None) is None:
raise ImportError("libc6 2.4 or higher needed")
libc.inotify_init.argtypes = []
libc.inotify_init.restype = ctypes.c_int
libc.inotify_rm_watch.argtypes = [
ctypes.c_int, ctypes.c_int]
libc.inotify_rm_watch.restype = ctypes.c_int
libc.inotify_add_watch.argtypes = [
ctypes.c_int, ctypes.c_char_p, ctypes.c_uint32]
libc.inotify_add_watch.restype = ctypes.c_int
name = ctypes.util.find_library('c')
if not name:
raise ImportError("Can't find C library.")
libc = ctypes.cdll.LoadLibrary(name)
initializeModule(libc)

View file

@ -0,0 +1,99 @@
# -*- test-case-name: twisted.test.test_nooldstyle -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utilities to assist in the "flag day" new-style object transition.
"""
from __future__ import absolute_import, division
import types
from functools import wraps
from twisted.python.compat import _shouldEnableNewStyle, _PY3
def _replaceIf(condition, alternative):
"""
If C{condition}, replace this function with C{alternative}.
@param condition: A L{bool} which says whether this should be replaced.
@param alternative: An alternative function that will be swapped in instead
of the original, if C{condition} is truthy.
@return: A decorator.
"""
def decorator(func):
if condition is True:
call = alternative
elif condition is False:
call = func
else:
raise ValueError(("condition argument to _replaceIf requires a "
"bool, not {}").format(repr(condition)))
@wraps(func)
def wrapped(*args, **kwargs):
return call(*args, **kwargs)
return wrapped
return decorator
def passthru(arg):
"""
Return C{arg}. Do nothing.
@param arg: The arg to return.
@return: C{arg}
"""
return arg
def _ensureOldClass(cls):
"""
Ensure that C{cls} is an old-style class.
@param cls: The class to check.
@return: The class, if it is an old-style class.
@raises: L{ValueError} if it is a new-style class.
"""
if not type(cls) is types.ClassType:
from twisted.python.reflect import fullyQualifiedName
raise ValueError(
("twisted.python._oldstyle._oldStyle is being used to decorate a "
"new-style class ({cls}). This should only be used to "
"decorate old-style classes.").format(
cls=fullyQualifiedName(cls)))
return cls
@_replaceIf(_PY3, passthru)
@_replaceIf(not _shouldEnableNewStyle(), _ensureOldClass)
def _oldStyle(cls):
"""
A decorator which conditionally converts old-style classes to new-style
classes. If it is Python 3, or if the C{TWISTED_NEWSTYLE} environment
variable has a falsey (C{no}, C{false}, C{False}, or C{0}) value in the
environment, this decorator is a no-op.
@param cls: An old-style class to convert to new-style.
@type cls: L{types.ClassType}
@return: A new-style version of C{cls}.
"""
_ensureOldClass(cls)
_bases = cls.__bases__ + (object,)
return type(cls.__name__, _bases, cls.__dict__)

View file

@ -0,0 +1,108 @@
<!DOCTYPE html>
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1" t:render="all">
<head>
<title><t:slot name="title">Something</t:slot> : API documentation</title>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
<link rel="stylesheet" type="text/css" href="bootstrap.min.css" />
<link rel="stylesheet" type="text/css" href="apidocs.css"/>
</head>
<body>
<nav class="navbar navbar-default">
<div class="container">
<div class="navbar-header">
<a class="navbar-brand" href="index.html">
Twisted API Documentation
</a>
</div>
</div>
</nav>
<div style="display: none" id="current-docs-container" class="container">
<div class="col-sm-12">
<a id="current-docs-link">
Go to the latest version of this document.
</a>
</div>
</div>
<div id="showPrivate">
<button class="btn btn-link" onclick="togglePrivate()">Toggle Private API</button>
</div>
<div class="container">
<div class="page-header">
<t:slot name="heading"><h1>Heading</h1></t:slot>
<span id="partOf">
<t:slot name="part">Part of something</t:slot>
<a t:render="source">View Source</a>
<a t:render="inhierarchy">(View In Hierarchy)</a>
</span>
</div>
<div class="extrasDocstring">
<t:slot name="extras">
A docstring.
</t:slot>
</div>
<t:transparent t:render="deprecated" />
<div class="moduleDocstring">
<t:slot name="docstring">
A docstring.
</t:slot>
</div>
<div id="splitTables">
<t:slot name="mainTable" />
<t:transparent t:render="baseTables">
<p class="inheritedFrom">
Inherited from <t:slot name="baseName" />:
</p>
<t:slot name="baseTable" />
</t:transparent>
<t:slot name="packageInitTable"> </t:slot>
</div>
<div id="childList">
<t:slot name="childlist"> </t:slot>
</div>
<address>
<a href="index.html">API Documentation</a> for <t:slot name="project">Some
Project</t:slot>, generated by <a href="https://github.com/twisted/pydoctor/">pydoctor</a> at <t:slot name="buildtime">some time</t:slot>.
</address>
</div>
<script src="pydoctor.js" type='text/javascript'></script>
<!-- Google analytics, obviously. -->
<script src="//www.google-analytics.com/urchin.js" type="text/javascript"></script>
<script type="text/javascript">
_uacct = "UA-99018-6";
urchinTracker();
</script>
<!-- If the documentation isn't current, insert a current link. -->
<script type="text/javascript">
if (window.location.pathname.indexOf('/current/') == -1) {
<!-- Give the user a link to this page, but in the current version of the docs. -->
var link = document.getElementById('current-docs-link');
link.href = window.location.pathname.replace(/\/\d+\.\d+\.\d+\/api\//, '/current/api/');
<!-- And make it visible -->
var container = document.getElementById('current-docs-container');
container.style.display = "";
delete link;
delete container;
}
</script>
</body>
</html>

View file

@ -0,0 +1,106 @@
<!DOCTYPE html>
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
<head>
<title>
API Documentation for
<t:transparent t:render="project">Some Project</t:transparent>
</title>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
<link rel="stylesheet" type="text/css" href="bootstrap.min.css" />
<link rel="stylesheet" type="text/css" href="apidocs.css" />
</head>
<body>
<nav class="navbar navbar-default">
<div class="container">
<div class="navbar-header">
<a class="navbar-brand" href="index.html">
<t:transparent t:render="project">Some Project</t:transparent> API Documentation
</a>
</div>
</div>
</nav>
<div style="display: none" id="current-docs-container" class="container">
<div class="col-sm-12">
<a id="current-docs-link">
Go to the latest version of this document.
</a>
</div>
</div>
<div class="container">
<h2>
Get Started
</h2>
<ul>
<li>
A listing of <a href="moduleIndex.html">all modules and
packages</a>, organized by package hierarchy.
</li>
<li>
A listing of <a href="classIndex.html">all classes</a>,
organized by inheritance hierarchy.
</li>
<li>
A listing of <a href="nameIndex.html">all functions, classes,
modules and packages</a>, ordered by name.
</li>
<li t:render="recentChanges">
See <a href="recentChanges">recent changes</a> made online to
docstrings.
</li>
<li t:render="problemObjects">
See <a href="problemObjects">objects with formatting problems</a>.
</li>
<li t:render="onlyIfOneRoot">
Start at <a href="root.html">root</a>, the root package.
</li>
<t:transparent t:render="onlyIfMultipleRoots">
<li>
Or start at one of the root <t:transparent t:render="rootkind">
packages</t:transparent>:
<ul>
<li t:render="roots">
<t:slot name="root"/>
</li>
</ul>
</li>
</t:transparent>
</ul>
<h2>
About
</h2>
<p>
<address>
<a href="index.html">API Documentation</a> for <t:transparent t:render="project_link">Some
Project</t:transparent>, generated by <a href="https://github.com/twisted/pydoctor/">pydoctor</a>
at <t:transparent t:render="buildtime">some time</t:transparent>.
</address>
</p>
</div>
<!-- Google analytics, obviously. -->
<script src="//www.google-analytics.com/urchin.js" type="text/javascript"></script>
<script type="text/javascript">
_uacct = "UA-99018-6";
urchinTracker();
</script>
<!-- If the documentation isn't current, insert a current link. -->
<script type="text/javascript">
if (window.location.pathname.indexOf('/current/') == -1) {
<!-- Give the user a link to this page, but in the current version of the docs. -->
var link = document.getElementById('current-docs-link');
link.href = window.location.pathname.replace(/\/\d+\.\d+\.\d+\/api\//, '/current/api/');
<!-- And make it visible -->
var container = document.getElementById('current-docs-container');
container.style.display = "";
delete link;
delete container;
}
</script>
</body>
</html>

View file

@ -0,0 +1,62 @@
<!DOCTYPE html>
<html xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
<head>
<title t:render="title">Something</title>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
<link rel="stylesheet" type="text/css" href="bootstrap.min.css" />
<link rel="stylesheet" type="text/css" href="apidocs.css"/>
</head>
<body>
<nav class="navbar navbar-default">
<div class="container">
<div class="navbar-header">
<a class="navbar-brand" href="index.html">
<t:transparent t:render="project">Some Project</t:transparent> API Documentation
</a>
</div>
</div>
</nav>
<div style="display: none" id="current-docs-container" class="container">
<div class="col-sm-12">
<a id="current-docs-link">
Go to the latest version of this document.
</a>
</div>
</div>
<div class="container">
<div class="page-header">
<h1 t:render="heading">Heading</h1>
</div>
<ul t:render="stuff">
</ul>
</div>
<!-- Google analytics, obviously. -->
<script src="//www.google-analytics.com/urchin.js" type="text/javascript"></script>
<script type="text/javascript">
_uacct = "UA-99018-6";
urchinTracker();
</script>
<!-- If the documentation isn't current, insert a current link. -->
<script type="text/javascript">
if (window.location.pathname.indexOf('/current/') == -1) {
<!-- Give the user a link to this page, but in the current version of the docs. -->
var link = document.getElementById('current-docs-link');
link.href = window.location.pathname.replace(/\/\d+\.\d+\.\d+\/api\//, '/current/api/');
<!-- And make it visible -->
var container = document.getElementById('current-docs-container');
container.style.display = "";
delete link;
delete container;
}
</script>
</body>
</html>

View file

@ -0,0 +1,576 @@
# -*- test-case-name: twisted.python.test.test_release -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Twisted's automated release system.
This module is only for use within Twisted's release system. If you are anyone
else, do not use it. The interface and behaviour will change without notice.
Only Linux is supported by this code. It should not be used by any tools
which must run on multiple platforms (eg the setup.py script).
"""
import os
import sys
from zope.interface import Interface, implementer
from subprocess import check_output, STDOUT, CalledProcessError
from twisted.python.compat import execfile
from twisted.python.filepath import FilePath
from twisted.python.monkey import MonkeyPatcher
# Types of newsfragments.
NEWSFRAGMENT_TYPES = ["doc", "bugfix", "misc", "feature", "removal"]
intersphinxURLs = [
u"https://docs.python.org/2/objects.inv",
u"https://docs.python.org/3/objects.inv",
u"https://pyopenssl.readthedocs.io/en/stable/objects.inv",
u"https://hyperlink.readthedocs.io/en/stable/objects.inv",
u"https://twisted.github.io/constantly/docs/objects.inv",
u"https://twisted.github.io/incremental/docs/objects.inv",
u"https://hyper-h2.readthedocs.io/en/stable/objects.inv",
u"https://priority.readthedocs.io/en/stable/objects.inv",
u"https://zopeinterface.readthedocs.io/en/latest/objects.inv",
u"https://automat.readthedocs.io/en/latest/objects.inv",
]
def runCommand(args, **kwargs):
"""Execute a vector of arguments.
This is a wrapper around L{subprocess.check_output}, so it takes
the same arguments as L{subprocess.Popen} with one difference: all
arguments after the vector must be keyword arguments.
@param args: arguments passed to L{subprocess.check_output}
@param kwargs: keyword arguments passed to L{subprocess.check_output}
@return: command output
@rtype: L{bytes}
"""
kwargs['stderr'] = STDOUT
return check_output(args, **kwargs)
class IVCSCommand(Interface):
"""
An interface for VCS commands.
"""
def ensureIsWorkingDirectory(path):
"""
Ensure that C{path} is a working directory of this VCS.
@type path: L{twisted.python.filepath.FilePath}
@param path: The path to check.
"""
def isStatusClean(path):
"""
Return the Git status of the files in the specified path.
@type path: L{twisted.python.filepath.FilePath}
@param path: The path to get the status from (can be a directory or a
file.)
"""
def remove(path):
"""
Remove the specified path from a the VCS.
@type path: L{twisted.python.filepath.FilePath}
@param path: The path to remove from the repository.
"""
def exportTo(fromDir, exportDir):
"""
Export the content of the VCSrepository to the specified directory.
@type fromDir: L{twisted.python.filepath.FilePath}
@param fromDir: The path to the VCS repository to export.
@type exportDir: L{twisted.python.filepath.FilePath}
@param exportDir: The directory to export the content of the
repository to. This directory doesn't have to exist prior to
exporting the repository.
"""
@implementer(IVCSCommand)
class GitCommand(object):
"""
Subset of Git commands to release Twisted from a Git repository.
"""
@staticmethod
def ensureIsWorkingDirectory(path):
"""
Ensure that C{path} is a Git working directory.
@type path: L{twisted.python.filepath.FilePath}
@param path: The path to check.
"""
try:
runCommand(["git", "rev-parse"], cwd=path.path)
except (CalledProcessError, OSError):
raise NotWorkingDirectory(
"%s does not appear to be a Git repository."
% (path.path,))
@staticmethod
def isStatusClean(path):
"""
Return the Git status of the files in the specified path.
@type path: L{twisted.python.filepath.FilePath}
@param path: The path to get the status from (can be a directory or a
file.)
"""
status = runCommand(
["git", "-C", path.path, "status", "--short"]).strip()
return status == b''
@staticmethod
def remove(path):
"""
Remove the specified path from a Git repository.
@type path: L{twisted.python.filepath.FilePath}
@param path: The path to remove from the repository.
"""
runCommand(["git", "-C", path.dirname(), "rm", path.path])
@staticmethod
def exportTo(fromDir, exportDir):
"""
Export the content of a Git repository to the specified directory.
@type fromDir: L{twisted.python.filepath.FilePath}
@param fromDir: The path to the Git repository to export.
@type exportDir: L{twisted.python.filepath.FilePath}
@param exportDir: The directory to export the content of the
repository to. This directory doesn't have to exist prior to
exporting the repository.
"""
runCommand(["git", "-C", fromDir.path,
"checkout-index", "--all", "--force",
# prefix has to end up with a "/" so that files get copied
# to a directory whose name is the prefix.
"--prefix", exportDir.path + "/"])
def getRepositoryCommand(directory):
"""
Detect the VCS used in the specified directory and return a L{GitCommand}
if the directory is a Git repository. If the directory is not git, it
raises a L{NotWorkingDirectory} exception.
@type directory: L{FilePath}
@param directory: The directory to detect the VCS used from.
@rtype: L{GitCommand}
@raise NotWorkingDirectory: if no supported VCS can be found from the
specified directory.
"""
try:
GitCommand.ensureIsWorkingDirectory(directory)
return GitCommand
except (NotWorkingDirectory, OSError):
# It's not Git, but that's okay, eat the error
pass
raise NotWorkingDirectory("No supported VCS can be found in %s" %
(directory.path,))
class Project(object):
"""
A representation of a project that has a version.
@ivar directory: A L{twisted.python.filepath.FilePath} pointing to the base
directory of a Twisted-style Python package. The package should contain
a C{_version.py} file and a C{newsfragments} directory that contains a
C{README} file.
"""
def __init__(self, directory):
self.directory = directory
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__, self.directory)
def getVersion(self):
"""
@return: A L{incremental.Version} specifying the version number of the
project based on live python modules.
"""
namespace = {}
directory = self.directory
while not namespace:
if directory.path == "/":
raise Exception("Not inside a Twisted project.")
elif not directory.basename() == "twisted":
directory = directory.parent()
else:
execfile(directory.child("_version.py").path, namespace)
return namespace["__version__"]
def findTwistedProjects(baseDirectory):
"""
Find all Twisted-style projects beneath a base directory.
@param baseDirectory: A L{twisted.python.filepath.FilePath} to look inside.
@return: A list of L{Project}.
"""
projects = []
for filePath in baseDirectory.walk():
if filePath.basename() == 'newsfragments':
projectDirectory = filePath.parent()
projects.append(Project(projectDirectory))
return projects
def replaceInFile(filename, oldToNew):
"""
I replace the text `oldstr' with `newstr' in `filename' using science.
"""
os.rename(filename, filename + '.bak')
with open(filename + '.bak') as f:
d = f.read()
for k, v in oldToNew.items():
d = d.replace(k, v)
with open(filename + '.new', 'w') as f:
f.write(d)
os.rename(filename + '.new', filename)
os.unlink(filename + '.bak')
class NoDocumentsFound(Exception):
"""
Raised when no input documents are found.
"""
class APIBuilder(object):
"""
Generate API documentation from source files using
U{pydoctor<https://github.com/twisted/pydoctor>}. This requires
pydoctor to be installed and usable.
"""
def build(self, projectName, projectURL, sourceURL, packagePath,
outputPath):
"""
Call pydoctor's entry point with options which will generate HTML
documentation for the specified package's API.
@type projectName: C{str}
@param projectName: The name of the package for which to generate
documentation.
@type projectURL: C{str}
@param projectURL: The location (probably an HTTP URL) of the project
on the web.
@type sourceURL: C{str}
@param sourceURL: The location (probably an HTTP URL) of the root of
the source browser for the project.
@type packagePath: L{FilePath}
@param packagePath: The path to the top-level of the package named by
C{projectName}.
@type outputPath: L{FilePath}
@param outputPath: An existing directory to which the generated API
documentation will be written.
"""
intersphinxes = []
for intersphinx in intersphinxURLs:
intersphinxes.append("--intersphinx")
intersphinxes.append(intersphinx)
# Super awful monkeypatch that will selectively use our templates.
from pydoctor.templatewriter import util
originalTemplatefile = util.templatefile
def templatefile(filename):
if filename in ["summary.html", "index.html", "common.html"]:
twistedPythonDir = FilePath(__file__).parent()
templatesDir = twistedPythonDir.child("_pydoctortemplates")
return templatesDir.child(filename).path
else:
return originalTemplatefile(filename)
monkeyPatch = MonkeyPatcher((util, "templatefile", templatefile))
monkeyPatch.patch()
from pydoctor.driver import main
args = [u"--project-name", projectName,
u"--project-url", projectURL,
u"--system-class", u"twisted.python._pydoctor.TwistedSystem",
u"--project-base-dir", packagePath.parent().path,
u"--html-viewsource-base", sourceURL,
u"--add-package", packagePath.path,
u"--html-output", outputPath.path,
u"--html-write-function-pages", u"--quiet", u"--make-html",
] + intersphinxes
args = [arg.encode("utf-8") for arg in args]
main(args)
monkeyPatch.restore()
class SphinxBuilder(object):
"""
Generate HTML documentation using Sphinx.
Generates and runs a shell command that looks something like::
sphinx-build -b html -d [BUILDDIR]/doctrees
[DOCDIR]/source
[BUILDDIR]/html
where DOCDIR is a directory containing another directory called "source"
which contains the Sphinx source files, and BUILDDIR is the directory in
which the Sphinx output will be created.
"""
def main(self, args):
"""
Build the main documentation.
@type args: list of str
@param args: The command line arguments to process. This must contain
one string argument: the path to the root of a Twisted checkout.
Additional arguments will be ignored for compatibility with legacy
build infrastructure.
"""
output = self.build(FilePath(args[0]).child("docs"))
if output:
sys.stdout.write(u"Unclean build:\n{}\n".format(output))
raise sys.exit(1)
def build(self, docDir, buildDir=None, version=''):
"""
Build the documentation in C{docDir} with Sphinx.
@param docDir: The directory of the documentation. This is a directory
which contains another directory called "source" which contains the
Sphinx "conf.py" file and sphinx source documents.
@type docDir: L{twisted.python.filepath.FilePath}
@param buildDir: The directory to build the documentation in. By
default this will be a child directory of {docDir} named "build".
@type buildDir: L{twisted.python.filepath.FilePath}
@param version: The version of Twisted to set in the docs.
@type version: C{str}
@return: the output produced by running the command
@rtype: L{str}
"""
if buildDir is None:
buildDir = docDir.parent().child('doc')
doctreeDir = buildDir.child('doctrees')
output = runCommand(['sphinx-build', '-q', '-b', 'html',
'-d', doctreeDir.path, docDir.path,
buildDir.path]).decode("utf-8")
# Delete the doctrees, as we don't want them after the docs are built
doctreeDir.remove()
for path in docDir.walk():
if path.basename() == "man":
segments = path.segmentsFrom(docDir)
dest = buildDir
while segments:
dest = dest.child(segments.pop(0))
if not dest.parent().isdir():
dest.parent().makedirs()
path.copyTo(dest)
return output
def filePathDelta(origin, destination):
"""
Return a list of strings that represent C{destination} as a path relative
to C{origin}.
It is assumed that both paths represent directories, not files. That is to
say, the delta of L{twisted.python.filepath.FilePath} /foo/bar to
L{twisted.python.filepath.FilePath} /foo/baz will be C{../baz},
not C{baz}.
@type origin: L{twisted.python.filepath.FilePath}
@param origin: The origin of the relative path.
@type destination: L{twisted.python.filepath.FilePath}
@param destination: The destination of the relative path.
"""
commonItems = 0
path1 = origin.path.split(os.sep)
path2 = destination.path.split(os.sep)
for elem1, elem2 in zip(path1, path2):
if elem1 == elem2:
commonItems += 1
else:
break
path = [".."] * (len(path1) - commonItems)
return path + path2[commonItems:]
class NotWorkingDirectory(Exception):
"""
Raised when a directory does not appear to be a repository directory of a
supported VCS.
"""
class BuildAPIDocsScript(object):
"""
A thing for building API documentation. See L{main}.
"""
def buildAPIDocs(self, projectRoot, output):
"""
Build the API documentation of Twisted, with our project policy.
@param projectRoot: A L{FilePath} representing the root of the Twisted
checkout.
@param output: A L{FilePath} pointing to the desired output directory.
"""
version = Project(
projectRoot.child("twisted")).getVersion()
versionString = version.base()
sourceURL = ("https://github.com/twisted/twisted/tree/"
"twisted-%s" % (versionString,) + "/src")
apiBuilder = APIBuilder()
apiBuilder.build(
"Twisted",
"http://twistedmatrix.com/",
sourceURL,
projectRoot.child("twisted"),
output)
def main(self, args):
"""
Build API documentation.
@type args: list of str
@param args: The command line arguments to process. This must contain
two strings: the path to the root of the Twisted checkout, and a
path to an output directory.
"""
if len(args) != 2:
sys.exit("Must specify two arguments: "
"Twisted checkout and destination path")
self.buildAPIDocs(FilePath(args[0]), FilePath(args[1]))
class CheckNewsfragmentScript(object):
"""
A thing for checking whether a checkout has a newsfragment.
"""
def __init__(self, _print):
self._print = _print
def main(self, args):
"""
Run the script.
@type args: L{list} of L{str}
@param args: The command line arguments to process. This must contain
one string: the path to the root of the Twisted checkout.
"""
if len(args) != 1:
sys.exit("Must specify one argument: the Twisted checkout")
encoding = sys.stdout.encoding or 'ascii'
location = os.path.abspath(args[0])
branch = runCommand([b"git", b"rev-parse", b"--abbrev-ref", "HEAD"],
cwd=location).decode(encoding).strip()
# diff-filter=d to exclude deleted newsfiles (which will happen on the
# release branch)
r = runCommand(
[
b"git",
b"diff",
b"--name-only",
b"origin/trunk...",
b"--diff-filter=d"
],
cwd=location
).decode(encoding).strip()
if not r:
self._print(
"On trunk or no diffs from trunk; no need to look at this.")
sys.exit(0)
files = r.strip().split(os.linesep)
self._print("Looking at these files:")
for change in files:
self._print(change)
self._print("----")
if len(files) == 1:
if files[0] == os.sep.join(["docs", "fun", "Twisted.Quotes"]):
self._print("Quotes change only; no newsfragment needed.")
sys.exit(0)
newsfragments = []
for change in files:
if os.sep + "newsfragments" + os.sep in change:
if "." in change and change.rsplit(".", 1)[1] in NEWSFRAGMENT_TYPES:
newsfragments.append(change)
if branch.startswith("release-"):
if newsfragments:
self._print("No newsfragments should be on the release branch.")
sys.exit(1)
else:
self._print("Release branch with no newsfragments, all good.")
sys.exit(0)
for change in newsfragments:
self._print("Found " + change)
sys.exit(0)
self._print("No newsfragment found. Have you committed it?")
sys.exit(1)

View file

@ -0,0 +1,452 @@
# -*- test-case-name: twisted.python.test.test_setup -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
# pylint: disable=I0011,C0103,C9302,W9401,W9402
"""
Setuptools convenience functionality.
This file must not import anything from Twisted, as it is loaded by C{exec} in
C{setup.py}. If you need compatibility functions for this code, duplicate them
here.
@var _EXTRA_OPTIONS: These are the actual package names and versions that will
be used by C{extras_require}. This is not passed to setup directly so that
combinations of the packages can be created without the need to copy
package names multiple times.
@var _EXTRAS_REQUIRE: C{extras_require} is a dictionary of items that can be
passed to setup.py to install optional dependencies. For example, to
install the optional dev dependencies one would type::
pip install -e ".[dev]"
This has been supported by setuptools since 0.5a4.
@var _PLATFORM_INDEPENDENT: A list of all optional cross-platform dependencies,
as setuptools version specifiers, used to populate L{_EXTRAS_REQUIRE}.
@var _EXTENSIONS: The list of L{ConditionalExtension} used by the setup
process.
@var notPortedModules: Modules that are not yet ported to Python 3.
"""
import io
import os
import platform
import re
import sys
from distutils.command import build_ext
from distutils.errors import CompileError
from setuptools import Extension, find_packages
from setuptools.command.build_py import build_py
# Do not replace this with t.p.compat imports, this file must not import
# from Twisted. See the docstring.
if sys.version_info < (3, 0):
_PY3 = False
else:
_PY3 = True
STATIC_PACKAGE_METADATA = dict(
name="Twisted",
description="An asynchronous networking framework written in Python",
author="Twisted Matrix Laboratories",
author_email="twisted-python@twistedmatrix.com",
maintainer="Glyph Lefkowitz",
maintainer_email="glyph@twistedmatrix.com",
url="https://twistedmatrix.com/",
project_urls={
'Documentation': 'https://twistedmatrix.com/documents/current/',
'Source': 'https://github.com/twisted/twisted',
'Issues': 'https://twistedmatrix.com/trac/report',
},
license="MIT",
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',
)
_dev = [
'pyflakes >= 1.0.0',
'twisted-dev-tools >= 0.0.2',
'python-subunit',
'sphinx >= 1.3.1',
'towncrier >= 17.4.0'
]
if not _PY3:
# These modules do not yet work on Python 3.
_dev += [
'twistedchecker >= 0.4.0',
'pydoctor >= 16.2.0',
]
_EXTRA_OPTIONS = dict(
dev=_dev,
tls=[
'pyopenssl >= 16.0.0',
# service_identity 18.1.0 added support for validating IP addresses in
# certificate subjectAltNames
'service_identity >= 18.1.0',
# idna 2.3 introduced some changes that break a few things. Avoid it.
# The problems were fixed in 2.4.
'idna >= 0.6, != 2.3',
],
conch=[
'pyasn1',
'cryptography >= 2.5',
'appdirs >= 1.4.0',
'bcrypt >= 3.0.0',
],
soap=['soappy'],
serial=['pyserial >= 3.0',
'pywin32 != 226; platform_system == "Windows"'],
macos=['pyobjc-core',
'pyobjc-framework-CFNetwork',
'pyobjc-framework-Cocoa'],
windows=['pywin32 != 226'],
http2=['h2 >= 3.0, < 4.0',
'priority >= 1.1.0, < 2.0'],
)
_PLATFORM_INDEPENDENT = (
_EXTRA_OPTIONS['tls'] +
_EXTRA_OPTIONS['conch'] +
_EXTRA_OPTIONS['soap'] +
_EXTRA_OPTIONS['serial'] +
_EXTRA_OPTIONS['http2']
)
_EXTRAS_REQUIRE = {
'dev': _EXTRA_OPTIONS['dev'],
'tls': _EXTRA_OPTIONS['tls'],
'conch': _EXTRA_OPTIONS['conch'],
'soap': _EXTRA_OPTIONS['soap'],
'serial': _EXTRA_OPTIONS['serial'],
'http2': _EXTRA_OPTIONS['http2'],
'all_non_platform': _PLATFORM_INDEPENDENT,
'macos_platform': (
_EXTRA_OPTIONS['macos'] + _PLATFORM_INDEPENDENT
),
'windows_platform': (
_EXTRA_OPTIONS['windows'] + _PLATFORM_INDEPENDENT
),
}
_EXTRAS_REQUIRE['osx_platform'] = _EXTRAS_REQUIRE['macos_platform']
# Scripts provided by Twisted on Python 2 and 3.
_CONSOLE_SCRIPTS = [
"ckeygen = twisted.conch.scripts.ckeygen:run",
"cftp = twisted.conch.scripts.cftp:run",
"conch = twisted.conch.scripts.conch:run",
"mailmail = twisted.mail.scripts.mailmail:run",
"pyhtmlizer = twisted.scripts.htmlizer:run",
"tkconch = twisted.conch.scripts.tkconch:run",
"trial = twisted.scripts.trial:run",
"twist = twisted.application.twist._twist:Twist.main",
"twistd = twisted.scripts.twistd:run",
]
class ConditionalExtension(Extension, object):
"""
An extension module that will only be compiled if certain conditions are
met.
@param condition: A callable of one argument which returns True or False to
indicate whether the extension should be built. The argument is an
instance of L{build_ext_twisted}, which has useful methods for checking
things about the platform.
"""
def __init__(self, *args, **kwargs):
self.condition = kwargs.pop("condition", lambda builder: True)
Extension.__init__(self, *args, **kwargs)
# The C extensions used for Twisted.
_EXTENSIONS = [
ConditionalExtension(
"twisted.test.raiser",
sources=["src/twisted/test/raiser.c"],
condition=lambda _: _isCPython),
ConditionalExtension(
"twisted.internet.iocpreactor.iocpsupport",
sources=[
"src/twisted/internet/iocpreactor/iocpsupport/iocpsupport.c",
"src/twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c",
],
libraries=["ws2_32"],
condition=lambda _: _isCPython and sys.platform == "win32"),
ConditionalExtension(
"twisted.python._sendmsg",
sources=["src/twisted/python/_sendmsg.c"],
condition=lambda _: not _PY3 and sys.platform != "win32"),
]
def _longDescriptionArgsFromReadme(readme):
"""
Generate a PyPI long description from the readme.
@param readme: Path to the readme reStructuredText file.
@type readme: C{str}
@return: Keyword arguments to be passed to C{setuptools.setup()}.
@rtype: C{str}
"""
with io.open(readme, encoding='utf-8') as f:
readmeRst = f.read()
# Munge links of the form `NEWS <NEWS.rst>`_ to point at the appropriate
# location on GitHub so that they function when the long description is
# displayed on PyPI.
longDesc = re.sub(
r'`([^`]+)\s+<(?!https?://)([^>]+)>`_',
r'`\1 <https://github.com/twisted/twisted/blob/trunk/\2>`_',
readmeRst,
flags=re.I,
)
return {
'long_description': longDesc,
'long_description_content_type': 'text/x-rst',
}
def getSetupArgs(extensions=_EXTENSIONS, readme='README.rst'):
"""
Generate arguments for C{setuptools.setup()}
@param extensions: C extension modules to maybe build. This argument is to
be used for testing.
@type extensions: C{list} of C{ConditionalExtension}
@param readme: Path to the readme reStructuredText file. This argument is
to be used for testing.
@type readme: C{str}
@return: The keyword arguments to be used by the setup method.
@rtype: L{dict}
"""
arguments = STATIC_PACKAGE_METADATA.copy()
if readme:
arguments.update(_longDescriptionArgsFromReadme(readme))
# This is a workaround for distutils behavior; ext_modules isn't
# actually used by our custom builder. distutils deep-down checks
# to see if there are any ext_modules defined before invoking
# the build_ext command. We need to trigger build_ext regardless
# because it is the thing that does the conditional checks to see
# if it should build any extensions. The reason we have to delay
# the conditional checks until then is that the compiler objects
# are not yet set up when this code is executed.
arguments["ext_modules"] = extensions
# Use custome class to build the extensions.
class my_build_ext(build_ext_twisted):
conditionalExtensions = extensions
command_classes = {
'build_ext': my_build_ext,
}
if sys.version_info[0] >= 3:
command_classes['build_py'] = BuildPy3
requirements = [
"zope.interface >= 4.4.2",
"constantly >= 15.1",
"incremental >= 16.10.1",
"Automat >= 0.3.0",
"hyperlink >= 17.1.1",
# PyHamcrest 1.10.0 is Python 3 only, but lacks package metadata that
# says so. This condition can be dropped when Twisted drops support for
# Python 2.7.
"PyHamcrest >= 1.9.0, != 1.10.0",
"attrs >= 19.2.0",
]
arguments.update(dict(
packages=find_packages("src"),
use_incremental=True,
setup_requires=["incremental >= 16.10.1"],
install_requires=requirements,
entry_points={
'console_scripts': _CONSOLE_SCRIPTS
},
cmdclass=command_classes,
include_package_data=True,
exclude_package_data={
"": ["*.c", "*.h", "*.pxi", "*.pyx", "build.bat"],
},
zip_safe=False,
extras_require=_EXTRAS_REQUIRE,
package_dir={"": "src"},
))
return arguments
class BuildPy3(build_py, object):
"""
A version of build_py that doesn't install the modules that aren't yet
ported to Python 3.
"""
def find_package_modules(self, package, package_dir):
modules = [
module for module
in build_py.find_package_modules(self, package, package_dir)
if ".".join([module[0], module[1]]) not in notPortedModules]
return modules
## Helpers and distutil tweaks
class build_ext_twisted(build_ext.build_ext, object):
"""
Allow subclasses to easily detect and customize Extensions to
build at install-time.
"""
def prepare_extensions(self):
"""
Prepare the C{self.extensions} attribute (used by
L{build_ext.build_ext}) by checking which extensions in
I{conditionalExtensions} should be built. In addition, if we are
building on NT, define the WIN32 macro to 1.
"""
# always define WIN32 under Windows
if os.name == 'nt':
self.define_macros = [("WIN32", 1)]
else:
self.define_macros = []
# On Solaris 10, we need to define the _XOPEN_SOURCE and
# _XOPEN_SOURCE_EXTENDED macros to build in order to gain access to
# the msg_control, msg_controllen, and msg_flags members in
# sendmsg.c. (according to
# https://stackoverflow.com/questions/1034587). See the documentation
# of X/Open CAE in the standards(5) man page of Solaris.
if sys.platform.startswith('sunos'):
self.define_macros.append(('_XOPEN_SOURCE', 1))
self.define_macros.append(('_XOPEN_SOURCE_EXTENDED', 1))
self.extensions = [
x for x in self.conditionalExtensions if x.condition(self)
]
for ext in self.extensions:
ext.define_macros.extend(self.define_macros)
def build_extensions(self):
"""
Check to see which extension modules to build and then build them.
"""
self.prepare_extensions()
build_ext.build_ext.build_extensions(self)
def _remove_conftest(self):
for filename in ("conftest.c", "conftest.o", "conftest.obj"):
try:
os.unlink(filename)
except EnvironmentError:
pass
def _compile_helper(self, content):
conftest = open("conftest.c", "w")
try:
with conftest:
conftest.write(content)
try:
self.compiler.compile(["conftest.c"], output_dir='')
except CompileError:
return False
return True
finally:
self._remove_conftest()
def _check_header(self, header_name):
"""
Check if the given header can be included by trying to compile a file
that contains only an #include line.
"""
self.compiler.announce("checking for {} ...".format(header_name), 0)
return self._compile_helper("#include <{}>\n".format(header_name))
def _checkCPython(sys=sys, platform=platform):
"""
Checks if this implementation is CPython.
This uses C{platform.python_implementation}.
This takes C{sys} and C{platform} kwargs that by default use the real
modules. You shouldn't care about these -- they are for testing purposes
only.
@return: C{False} if the implementation is definitely not CPython, C{True}
otherwise.
"""
return platform.python_implementation() == "CPython"
_isCPython = _checkCPython()
notPortedModules = [
"twisted.mail.alias",
"twisted.mail.bounce",
"twisted.mail.mail",
"twisted.mail.maildir",
"twisted.mail.pb",
"twisted.mail.relaymanager",
"twisted.mail.scripts.__init__",
"twisted.mail.tap",
"twisted.mail.test.test_bounce",
"twisted.mail.test.test_mail",
"twisted.mail.test.test_options",
"twisted.mail.test.test_scripts",
"twisted.news.__init__",
"twisted.news.database",
"twisted.news.news",
"twisted.news.nntp",
"twisted.news.tap",
"twisted.news.test.__init__",
"twisted.news.test.test_database",
"twisted.news.test.test_news",
"twisted.news.test.test_nntp",
"twisted.plugins.twisted_mail",
"twisted.plugins.twisted_news",
"twisted.protocols.shoutcast",
"twisted.python._pydoctor",
"twisted.python.finalize",
"twisted.python.hook",
"twisted.python.test.cmodulepullpipe",
"twisted.python.test.test_pydoctor",
"twisted.python.test.test_win32",
"twisted.test.test_hook",
"twisted.web.soap",
"twisted.web.test.test_soap",
]

View file

@ -0,0 +1,677 @@
# -*- test-case-name: twisted.python.test.test_shellcomp -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
No public APIs are provided by this module. Internal use only.
This module implements dynamic tab-completion for any command that uses
twisted.python.usage. Currently, only zsh is supported. Bash support may
be added in the future.
Maintainer: Eric P. Mangold - twisted AT teratorn DOT org
In order for zsh completion to take place the shell must be able to find an
appropriate "stub" file ("completion function") that invokes this code and
displays the results to the user.
The stub used for Twisted commands is in the file C{twisted-completion.zsh},
which is also included in the official Zsh distribution at
C{Completion/Unix/Command/_twisted}. Use this file as a basis for completion
functions for your own commands. You should only need to change the first line
to something like C{#compdef mycommand}.
The main public documentation exists in the L{twisted.python.usage.Options}
docstring, the L{twisted.python.usage.Completions} docstring, and the
Options howto.
"""
import itertools, getopt, inspect
from twisted.python import reflect, util, usage
from twisted.python.compat import ioType, unicode
def shellComplete(config, cmdName, words, shellCompFile):
"""
Perform shell completion.
A completion function (shell script) is generated for the requested
shell and written to C{shellCompFile}, typically C{stdout}. The result
is then eval'd by the shell to produce the desired completions.
@type config: L{twisted.python.usage.Options}
@param config: The L{twisted.python.usage.Options} instance to generate
completions for.
@type cmdName: C{str}
@param cmdName: The name of the command we're generating completions for.
In the case of zsh, this is used to print an appropriate
"#compdef $CMD" line at the top of the output. This is
not necessary for the functionality of the system, but it
helps in debugging, since the output we produce is properly
formed and may be saved in a file and used as a stand-alone
completion function.
@type words: C{list} of C{str}
@param words: The raw command-line words passed to use by the shell
stub function. argv[0] has already been stripped off.
@type shellCompFile: C{file}
@param shellCompFile: The file to write completion data to.
"""
# If given a file with unicode semantics, such as sys.stdout on Python 3,
# we must get at the the underlying buffer which has bytes semantics.
if shellCompFile and ioType(shellCompFile) == unicode:
shellCompFile = shellCompFile.buffer
# shellName is provided for forward-compatibility. It is not used,
# since we currently only support zsh.
shellName, position = words[-1].split(":")
position = int(position)
# zsh gives the completion position ($CURRENT) as a 1-based index,
# and argv[0] has already been stripped off, so we subtract 2 to
# get the real 0-based index.
position -= 2
cWord = words[position]
# since the user may hit TAB at any time, we may have been called with an
# incomplete command-line that would generate getopt errors if parsed
# verbatim. However, we must do *some* parsing in order to determine if
# there is a specific subcommand that we need to provide completion for.
# So, to make the command-line more sane we work backwards from the
# current completion position and strip off all words until we find one
# that "looks" like a subcommand. It may in fact be the argument to a
# normal command-line option, but that won't matter for our purposes.
while position >= 1:
if words[position - 1].startswith("-"):
position -= 1
else:
break
words = words[:position]
subCommands = getattr(config, 'subCommands', None)
if subCommands:
# OK, this command supports sub-commands, so lets see if we have been
# given one.
# If the command-line arguments are not valid then we won't be able to
# sanely detect the sub-command, so just generate completions as if no
# sub-command was found.
args = None
try:
opts, args = getopt.getopt(words,
config.shortOpt, config.longOpt)
except getopt.error:
pass
if args:
# yes, we have a subcommand. Try to find it.
for (cmd, short, parser, doc) in config.subCommands:
if args[0] == cmd or args[0] == short:
subOptions = parser()
subOptions.parent = config
gen = ZshSubcommandBuilder(subOptions, config, cmdName,
shellCompFile)
gen.write()
return
# sub-command not given, or did not match any knowns sub-command names
genSubs = True
if cWord.startswith("-"):
# optimization: if the current word being completed starts
# with a hyphen then it can't be a sub-command, so skip
# the expensive generation of the sub-command list
genSubs = False
gen = ZshBuilder(config, cmdName, shellCompFile)
gen.write(genSubs=genSubs)
else:
gen = ZshBuilder(config, cmdName, shellCompFile)
gen.write()
class SubcommandAction(usage.Completer):
def _shellCode(self, optName, shellType):
if shellType == usage._ZSH:
return '*::subcmd:->subcmd'
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class ZshBuilder(object):
"""
Constructs zsh code that will complete options for a given usage.Options
instance, possibly including a list of subcommand names.
Completions for options to subcommands won't be generated because this
class will never be used if the user is completing options for a specific
subcommand. (See L{ZshSubcommandBuilder} below)
@type options: L{twisted.python.usage.Options}
@ivar options: The L{twisted.python.usage.Options} instance defined for this
command.
@type cmdName: C{str}
@ivar cmdName: The name of the command we're generating completions for.
@type file: C{file}
@ivar file: The C{file} to write the completion function to. The C{file}
must have L{bytes} I/O semantics.
"""
def __init__(self, options, cmdName, file):
self.options = options
self.cmdName = cmdName
self.file = file
def write(self, genSubs=True):
"""
Generate the completion function and write it to the output file
@return: L{None}
@type genSubs: C{bool}
@param genSubs: Flag indicating whether or not completions for the list
of subcommand should be generated. Only has an effect
if the C{subCommands} attribute has been defined on the
L{twisted.python.usage.Options} instance.
"""
if genSubs and getattr(self.options, 'subCommands', None) is not None:
gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
gen.extraActions.insert(0, SubcommandAction())
gen.write()
self.file.write(b'local _zsh_subcmds_array\n_zsh_subcmds_array=(\n')
for (cmd, short, parser, desc) in self.options.subCommands:
self.file.write(
b'\"' + cmd.encode('utf-8') + b':' + desc.encode('utf-8') +b'\"\n')
self.file.write(b")\n\n")
self.file.write(b'_describe "sub-command" _zsh_subcmds_array\n')
else:
gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
gen.write()
class ZshSubcommandBuilder(ZshBuilder):
"""
Constructs zsh code that will complete options for a given usage.Options
instance, and also for a single sub-command. This will only be used in
the case where the user is completing options for a specific subcommand.
@type subOptions: L{twisted.python.usage.Options}
@ivar subOptions: The L{twisted.python.usage.Options} instance defined for
the sub command.
"""
def __init__(self, subOptions, *args):
self.subOptions = subOptions
ZshBuilder.__init__(self, *args)
def write(self):
"""
Generate the completion function and write it to the output file
@return: L{None}
"""
gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
gen.extraActions.insert(0, SubcommandAction())
gen.write()
gen = ZshArgumentsGenerator(self.subOptions, self.cmdName, self.file)
gen.write()
class ZshArgumentsGenerator(object):
"""
Generate a call to the zsh _arguments completion function
based on data in a usage.Options instance
The first three instance variables are populated based on constructor
arguments. The remaining non-constructor variables are populated by this
class with data gathered from the C{Options} instance passed in, and its
base classes.
@type options: L{twisted.python.usage.Options}
@ivar options: The L{twisted.python.usage.Options} instance to generate for
@type cmdName: C{str}
@ivar cmdName: The name of the command we're generating completions for.
@type file: C{file}
@ivar file: The C{file} to write the completion function to. The C{file}
must have L{bytes} I/O semantics.
@type descriptions: C{dict}
@ivar descriptions: A dict mapping long option names to alternate
descriptions. When this variable is defined, the descriptions
contained here will override those descriptions provided in the
optFlags and optParameters variables.
@type multiUse: C{list}
@ivar multiUse: An iterable containing those long option names which may
appear on the command line more than once. By default, options will
only be completed one time.
@type mutuallyExclusive: C{list} of C{tuple}
@ivar mutuallyExclusive: A sequence of sequences, with each sub-sequence
containing those long option names that are mutually exclusive. That is,
those options that cannot appear on the command line together.
@type optActions: C{dict}
@ivar optActions: A dict mapping long option names to shell "actions".
These actions define what may be completed as the argument to the
given option, and should be given as instances of
L{twisted.python.usage.Completer}.
Callables may instead be given for the values in this dict. The
callable should accept no arguments, and return a C{Completer}
instance used as the action.
@type extraActions: C{list} of C{twisted.python.usage.Completer}
@ivar extraActions: Extra arguments are those arguments typically
appearing at the end of the command-line, which are not associated
with any particular named option. That is, the arguments that are
given to the parseArgs() method of your usage.Options subclass.
"""
def __init__(self, options, cmdName, file):
self.options = options
self.cmdName = cmdName
self.file = file
self.descriptions = {}
self.multiUse = set()
self.mutuallyExclusive = []
self.optActions = {}
self.extraActions = []
for cls in reversed(inspect.getmro(options.__class__)):
data = getattr(cls, 'compData', None)
if data:
self.descriptions.update(data.descriptions)
self.optActions.update(data.optActions)
self.multiUse.update(data.multiUse)
self.mutuallyExclusive.extend(data.mutuallyExclusive)
# I don't see any sane way to aggregate extraActions, so just
# take the one at the top of the MRO (nearest the `options'
# instance).
if data.extraActions:
self.extraActions = data.extraActions
aCL = reflect.accumulateClassList
optFlags = []
optParams = []
aCL(options.__class__, 'optFlags', optFlags)
aCL(options.__class__, 'optParameters', optParams)
for i, optList in enumerate(optFlags):
if len(optList) != 3:
optFlags[i] = util.padTo(3, optList)
for i, optList in enumerate(optParams):
if len(optList) != 5:
optParams[i] = util.padTo(5, optList)
self.optFlags = optFlags
self.optParams = optParams
paramNameToDefinition = {}
for optList in optParams:
paramNameToDefinition[optList[0]] = optList[1:]
self.paramNameToDefinition = paramNameToDefinition
flagNameToDefinition = {}
for optList in optFlags:
flagNameToDefinition[optList[0]] = optList[1:]
self.flagNameToDefinition = flagNameToDefinition
allOptionsNameToDefinition = {}
allOptionsNameToDefinition.update(paramNameToDefinition)
allOptionsNameToDefinition.update(flagNameToDefinition)
self.allOptionsNameToDefinition = allOptionsNameToDefinition
self.addAdditionalOptions()
# makes sure none of the Completions metadata references
# option names that don't exist. (great for catching typos)
self.verifyZshNames()
self.excludes = self.makeExcludesDict()
def write(self):
"""
Write the zsh completion code to the file given to __init__
@return: L{None}
"""
self.writeHeader()
self.writeExtras()
self.writeOptions()
self.writeFooter()
def writeHeader(self):
"""
This is the start of the code that calls _arguments
@return: L{None}
"""
self.file.write(b'#compdef ' + self.cmdName.encode('utf-8') +
b'\n\n'
b'_arguments -s -A "-*" \\\n')
def writeOptions(self):
"""
Write out zsh code for each option in this command
@return: L{None}
"""
optNames = list(self.allOptionsNameToDefinition.keys())
optNames.sort()
for longname in optNames:
self.writeOpt(longname)
def writeExtras(self):
"""
Write out completion information for extra arguments appearing on the
command-line. These are extra positional arguments not associated
with a named option. That is, the stuff that gets passed to
Options.parseArgs().
@return: L{None}
@raises: ValueError: if C{Completer} with C{repeat=True} is found and
is not the last item in the C{extraActions} list.
"""
for i, action in enumerate(self.extraActions):
# a repeatable action must be the last action in the list
if action._repeat and i != len(self.extraActions) - 1:
raise ValueError("Completer with repeat=True must be "
"last item in Options.extraActions")
self.file.write(
escape(action._shellCode('', usage._ZSH)).encode('utf-8'))
self.file.write(b' \\\n')
def writeFooter(self):
"""
Write the last bit of code that finishes the call to _arguments
@return: L{None}
"""
self.file.write(b'&& return 0\n')
def verifyZshNames(self):
"""
Ensure that none of the option names given in the metadata are typoed
@return: L{None}
@raise ValueError: Raised if unknown option names have been found.
"""
def err(name):
raise ValueError("Unknown option name \"%s\" found while\n"
"examining Completions instances on %s" % (
name, self.options))
for name in itertools.chain(self.descriptions, self.optActions,
self.multiUse):
if name not in self.allOptionsNameToDefinition:
err(name)
for seq in self.mutuallyExclusive:
for name in seq:
if name not in self.allOptionsNameToDefinition:
err(name)
def excludeStr(self, longname, buildShort=False):
"""
Generate an "exclusion string" for the given option
@type longname: C{str}
@param longname: The long option name (e.g. "verbose" instead of "v")
@type buildShort: C{bool}
@param buildShort: May be True to indicate we're building an excludes
string for the short option that corresponds to the given long opt.
@return: The generated C{str}
"""
if longname in self.excludes:
exclusions = self.excludes[longname].copy()
else:
exclusions = set()
# if longname isn't a multiUse option (can't appear on the cmd line more
# than once), then we have to exclude the short option if we're
# building for the long option, and vice versa.
if longname not in self.multiUse:
if buildShort is False:
short = self.getShortOption(longname)
if short is not None:
exclusions.add(short)
else:
exclusions.add(longname)
if not exclusions:
return ''
strings = []
for optName in exclusions:
if len(optName) == 1:
# short option
strings.append("-" + optName)
else:
strings.append("--" + optName)
strings.sort() # need deterministic order for reliable unit-tests
return "(%s)" % " ".join(strings)
def makeExcludesDict(self):
"""
@return: A C{dict} that maps each option name appearing in
self.mutuallyExclusive to a list of those option names that is it
mutually exclusive with (can't appear on the cmd line with).
"""
#create a mapping of long option name -> single character name
longToShort = {}
for optList in itertools.chain(self.optParams, self.optFlags):
if optList[1] != None:
longToShort[optList[0]] = optList[1]
excludes = {}
for lst in self.mutuallyExclusive:
for i, longname in enumerate(lst):
tmp = set(lst[:i] + lst[i+1:])
for name in tmp.copy():
if name in longToShort:
tmp.add(longToShort[name])
if longname in excludes:
excludes[longname] = excludes[longname].union(tmp)
else:
excludes[longname] = tmp
return excludes
def writeOpt(self, longname):
"""
Write out the zsh code for the given argument. This is just part of the
one big call to _arguments
@type longname: C{str}
@param longname: The long option name (e.g. "verbose" instead of "v")
@return: L{None}
"""
if longname in self.flagNameToDefinition:
# It's a flag option. Not one that takes a parameter.
longField = "--%s" % longname
else:
longField = "--%s=" % longname
short = self.getShortOption(longname)
if short != None:
shortField = "-" + short
else:
shortField = ''
descr = self.getDescription(longname)
descriptionField = descr.replace("[", "\[")
descriptionField = descriptionField.replace("]", "\]")
descriptionField = '[%s]' % descriptionField
actionField = self.getAction(longname)
if longname in self.multiUse:
multiField = '*'
else:
multiField = ''
longExclusionsField = self.excludeStr(longname)
if short:
#we have to write an extra line for the short option if we have one
shortExclusionsField = self.excludeStr(longname, buildShort=True)
self.file.write(escape('%s%s%s%s%s' % (shortExclusionsField,
multiField, shortField, descriptionField, actionField)).encode('utf-8'))
self.file.write(b' \\\n')
self.file.write(escape('%s%s%s%s%s' % (longExclusionsField,
multiField, longField, descriptionField, actionField)).encode('utf-8'))
self.file.write(b' \\\n')
def getAction(self, longname):
"""
Return a zsh "action" string for the given argument
@return: C{str}
"""
if longname in self.optActions:
if callable(self.optActions[longname]):
action = self.optActions[longname]()
else:
action = self.optActions[longname]
return action._shellCode(longname, usage._ZSH)
if longname in self.paramNameToDefinition:
return ':%s:_files' % (longname,)
return ''
def getDescription(self, longname):
"""
Return the description to be used for this argument
@return: C{str}
"""
#check if we have an alternate descr for this arg, and if so use it
if longname in self.descriptions:
return self.descriptions[longname]
#otherwise we have to get it from the optFlags or optParams
try:
descr = self.flagNameToDefinition[longname][1]
except KeyError:
try:
descr = self.paramNameToDefinition[longname][2]
except KeyError:
descr = None
if descr is not None:
return descr
# let's try to get it from the opt_foo method doc string if there is one
longMangled = longname.replace('-', '_') # this is what t.p.usage does
obj = getattr(self.options, 'opt_%s' % longMangled, None)
if obj is not None:
descr = descrFromDoc(obj)
if descr is not None:
return descr
return longname # we really ought to have a good description to use
def getShortOption(self, longname):
"""
Return the short option letter or None
@return: C{str} or L{None}
"""
optList = self.allOptionsNameToDefinition[longname]
return optList[0] or None
def addAdditionalOptions(self):
"""
Add additional options to the optFlags and optParams lists.
These will be defined by 'opt_foo' methods of the Options subclass
@return: L{None}
"""
methodsDict = {}
reflect.accumulateMethods(self.options, methodsDict, 'opt_')
methodToShort = {}
for name in methodsDict.copy():
if len(name) == 1:
methodToShort[methodsDict[name]] = name
del methodsDict[name]
for methodName, methodObj in methodsDict.items():
longname = methodName.replace('_', '-') # t.p.usage does this
# if this option is already defined by the optFlags or
# optParameters then we don't want to override that data
if longname in self.allOptionsNameToDefinition:
continue
descr = self.getDescription(longname)
short = None
if methodObj in methodToShort:
short = methodToShort[methodObj]
reqArgs = methodObj.__func__.__code__.co_argcount
if reqArgs == 2:
self.optParams.append([longname, short, None, descr])
self.paramNameToDefinition[longname] = [short, None, descr]
self.allOptionsNameToDefinition[longname] = [short, None, descr]
else:
# reqArgs must equal 1. self.options would have failed
# to instantiate if it had opt_ methods with bad signatures.
self.optFlags.append([longname, short, descr])
self.flagNameToDefinition[longname] = [short, descr]
self.allOptionsNameToDefinition[longname] = [short, None, descr]
def descrFromDoc(obj):
"""
Generate an appropriate description from docstring of the given object
"""
if obj.__doc__ is None or obj.__doc__.isspace():
return None
lines = [x.strip() for x in obj.__doc__.split("\n")
if x and not x.isspace()]
return " ".join(lines)
def escape(x):
"""
Shell escape the given string
Implementation borrowed from now-deprecated commands.mkarg() in the stdlib
"""
if '\'' not in x:
return '\'' + x + '\''
s = '"'
for c in x:
if c in '\\$"`':
s = s + '\\'
s = s + c
s = s + '"'
return s

View file

@ -0,0 +1,320 @@
# -*- test-case-name: twisted.python.test.test_textattributes -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module provides some common functionality for the manipulation of
formatting states.
Defining the mechanism by which text containing character attributes is
constructed begins by subclassing L{CharacterAttributesMixin}.
Defining how a single formatting state is to be serialized begins by
subclassing L{_FormattingStateMixin}.
Serializing a formatting structure is done with L{flatten}.
@see: L{twisted.conch.insults.helper._FormattingState}
@see: L{twisted.conch.insults.text._CharacterAttributes}
@see: L{twisted.words.protocols.irc._FormattingState}
@see: L{twisted.words.protocols.irc._CharacterAttributes}
"""
from __future__ import print_function
from twisted.python.util import FancyEqMixin
class _Attribute(FancyEqMixin, object):
"""
A text attribute.
Indexing a text attribute with a C{str} or another text attribute adds that
object as a child, indexing with a C{list} or C{tuple} adds the elements as
children; in either case C{self} is returned.
@type children: C{list}
@ivar children: Child attributes.
"""
compareAttributes = ('children',)
def __init__(self):
self.children = []
def __repr__(self):
return '<%s %r>' % (type(self).__name__, vars(self))
def __getitem__(self, item):
assert isinstance(item, (list, tuple, _Attribute, str))
if isinstance(item, (list, tuple)):
self.children.extend(item)
else:
self.children.append(item)
return self
def serialize(self, write, attrs=None, attributeRenderer='toVT102'):
"""
Serialize the text attribute and its children.
@param write: C{callable}, taking one C{str} argument, called to output
a single text attribute at a time.
@param attrs: A formatting state instance used to determine how to
serialize the attribute children.
@type attributeRenderer: C{str}
@param attributeRenderer: Name of the method on I{attrs} that should be
called to render the attributes during serialization. Defaults to
C{'toVT102'}.
"""
if attrs is None:
attrs = DefaultFormattingState()
for ch in self.children:
if isinstance(ch, _Attribute):
ch.serialize(write, attrs.copy(), attributeRenderer)
else:
renderMeth = getattr(attrs, attributeRenderer)
write(renderMeth())
write(ch)
class _NormalAttr(_Attribute):
"""
A text attribute for normal text.
"""
def serialize(self, write, attrs, attributeRenderer):
attrs.__init__()
_Attribute.serialize(self, write, attrs, attributeRenderer)
class _OtherAttr(_Attribute):
"""
A text attribute for text with formatting attributes.
The unary minus operator returns the inverse of this attribute, where that
makes sense.
@type attrname: C{str}
@ivar attrname: Text attribute name.
@ivar attrvalue: Text attribute value.
"""
compareAttributes = ('attrname', 'attrvalue', 'children')
def __init__(self, attrname, attrvalue):
_Attribute.__init__(self)
self.attrname = attrname
self.attrvalue = attrvalue
def __neg__(self):
result = _OtherAttr(self.attrname, not self.attrvalue)
result.children.extend(self.children)
return result
def serialize(self, write, attrs, attributeRenderer):
attrs = attrs._withAttribute(self.attrname, self.attrvalue)
_Attribute.serialize(self, write, attrs, attributeRenderer)
class _ColorAttr(_Attribute):
"""
Generic color attribute.
@param color: Color value.
@param ground: Foreground or background attribute name.
"""
compareAttributes = ('color', 'ground', 'children')
def __init__(self, color, ground):
_Attribute.__init__(self)
self.color = color
self.ground = ground
def serialize(self, write, attrs, attributeRenderer):
attrs = attrs._withAttribute(self.ground, self.color)
_Attribute.serialize(self, write, attrs, attributeRenderer)
class _ForegroundColorAttr(_ColorAttr):
"""
Foreground color attribute.
"""
def __init__(self, color):
_ColorAttr.__init__(self, color, 'foreground')
class _BackgroundColorAttr(_ColorAttr):
"""
Background color attribute.
"""
def __init__(self, color):
_ColorAttr.__init__(self, color, 'background')
class _ColorAttribute(object):
"""
A color text attribute.
Attribute access results in a color value lookup, by name, in
I{_ColorAttribute.attrs}.
@type ground: L{_ColorAttr}
@param ground: Foreground or background color attribute to look color names
up from.
@param attrs: Mapping of color names to color values.
@type attrs: Dict like object.
"""
def __init__(self, ground, attrs):
self.ground = ground
self.attrs = attrs
def __getattr__(self, name):
try:
return self.ground(self.attrs[name])
except KeyError:
raise AttributeError(name)
class CharacterAttributesMixin(object):
"""
Mixin for character attributes that implements a C{__getattr__} method
returning a new C{_NormalAttr} instance when attempting to access
a C{'normal'} attribute; otherwise a new C{_OtherAttr} instance is returned
for names that appears in the C{'attrs'} attribute.
"""
def __getattr__(self, name):
if name == 'normal':
return _NormalAttr()
if name in self.attrs:
return _OtherAttr(name, True)
raise AttributeError(name)
class DefaultFormattingState(FancyEqMixin, object):
"""
A character attribute that does nothing, thus applying no attributes to
text.
"""
compareAttributes = ('_dummy',)
_dummy = 0
def copy(self):
"""
Make a copy of this formatting state.
@return: A formatting state instance.
"""
return type(self)()
def _withAttribute(self, name, value):
"""
Add a character attribute to a copy of this formatting state.
@param name: Attribute name to be added to formatting state.
@param value: Attribute value.
@return: A formatting state instance with the new attribute.
"""
return self.copy()
def toVT102(self):
"""
Emit a VT102 control sequence that will set up all the attributes this
formatting state has set.
@return: A string containing VT102 control sequences that mimic this
formatting state.
"""
return ''
class _FormattingStateMixin(DefaultFormattingState):
"""
Mixin for the formatting state/attributes of a single character.
"""
def copy(self):
c = DefaultFormattingState.copy(self)
c.__dict__.update(vars(self))
return c
def _withAttribute(self, name, value):
if getattr(self, name) != value:
attr = self.copy()
attr._subtracting = not value
setattr(attr, name, value)
return attr
else:
return self.copy()
def flatten(output, attrs, attributeRenderer='toVT102'):
"""
Serialize a sequence of characters with attribute information
The resulting string can be interpreted by compatible software so that the
contained characters are displayed and, for those attributes which are
supported by the software, the attributes expressed. The exact result of
the serialization depends on the behavior of the method specified by
I{attributeRenderer}.
For example, if your terminal is VT102 compatible, you might run
this for a colorful variation on the \"hello world\" theme::
from twisted.conch.insults.text import flatten, attributes as A
from twisted.conch.insults.helper import CharacterAttribute
print(flatten(
A.normal[A.bold[A.fg.red['He'], A.fg.green['ll'], A.fg.magenta['o'], ' ',
A.fg.yellow['Wo'], A.fg.blue['rl'], A.fg.cyan['d!']]],
CharacterAttribute()))
@param output: Object returned by accessing attributes of the
module-level attributes object.
@param attrs: A formatting state instance used to determine how to
serialize C{output}.
@type attributeRenderer: C{str}
@param attributeRenderer: Name of the method on I{attrs} that should be
called to render the attributes during serialization. Defaults to
C{'toVT102'}.
@return: A string expressing the text and display attributes specified by
L{output}.
"""
flattened = []
output.serialize(flattened.append, attrs, attributeRenderer)
return ''.join(flattened)
__all__ = [
'flatten', 'DefaultFormattingState', 'CharacterAttributesMixin']

View file

@ -0,0 +1,119 @@
# -*- test-case-name: twisted.python.test.test_tzhelper -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Time zone utilities.
"""
from datetime import datetime, timedelta, tzinfo
__all__ = [
"FixedOffsetTimeZone",
"UTC",
]
class FixedOffsetTimeZone(tzinfo):
"""
Represents a fixed timezone offset (without daylight saving time).
@ivar name: A L{str} giving the name of this timezone; the name just
includes how much time this offset represents.
@ivar offset: A L{timedelta} giving the amount of time this timezone is
offset.
"""
def __init__(self, offset, name=None):
"""
Construct a L{FixedOffsetTimeZone} with a fixed offset.
@param offset: a delta representing the offset from UTC.
@type offset: L{timedelta}
@param name: A name to be given for this timezone.
@type name: L{str} or L{None}
"""
self.offset = offset
self.name = name
@classmethod
def fromSignHoursMinutes(cls, sign, hours, minutes):
"""
Construct a L{FixedOffsetTimeZone} from an offset described by sign
('+' or '-'), hours, and minutes.
@note: For protocol compatibility with AMP, this method never uses 'Z'
@param sign: A string describing the positive or negative-ness of the
offset.
@param hours: The number of hours in the offset.
@type hours: L{int}
@param minutes: The number of minutes in the offset
@type minutes: L{int}
@return: A time zone with the given offset, and a name describing the
offset.
@rtype: L{FixedOffsetTimeZone}
"""
name = "%s%02i:%02i" % (sign, hours, minutes)
if sign == "-":
hours = -hours
minutes = -minutes
elif sign != "+":
raise ValueError("Invalid sign for timezone %r" % (sign,))
return cls(timedelta(hours=hours, minutes=minutes), name)
@classmethod
def fromLocalTimeStamp(cls, timeStamp):
"""
Create a time zone with a fixed offset corresponding to a time stamp in
the system's locally configured time zone.
@param timeStamp: a time stamp
@type timeStamp: L{int}
@return: a time zone
@rtype: L{FixedOffsetTimeZone}
"""
offset = (
datetime.fromtimestamp(timeStamp) -
datetime.utcfromtimestamp(timeStamp)
)
return cls(offset)
def utcoffset(self, dt):
"""
Return this timezone's offset from UTC.
"""
return self.offset
def dst(self, dt):
"""
Return a zero C{datetime.timedelta} for the daylight saving time
offset, since there is never one.
"""
return timedelta(0)
def tzname(self, dt):
"""
Return a string describing this timezone.
"""
if self.name is not None:
return self.name
# XXX this is wrong; the tests are
dt = datetime.fromtimestamp(0, self)
return dt.strftime("UTC%z")
UTC = FixedOffsetTimeZone.fromSignHoursMinutes("+", 0, 0)

View file

@ -0,0 +1,13 @@
# -*- test-case-name: twisted.python.test.test_url -*-
# -*- coding: utf-8 -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
URL parsing, construction and rendering.
"""
from hyperlink._url import URL
__all__ = ["URL"]

View file

@ -0,0 +1,928 @@
# -*- test-case-name: twisted.test.test_compat -*-
#
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Compatibility module to provide backwards compatibility for useful Python
features.
This is mainly for use of internal Twisted code. We encourage you to use
the latest version of Python directly from your code, if possible.
@var unicode: The type of Unicode strings, C{unicode} on Python 2 and C{str}
on Python 3.
@var NativeStringIO: An in-memory file-like object that operates on the native
string type (bytes in Python 2, unicode in Python 3).
@var urllib_parse: a URL-parsing module (urlparse on Python 2, urllib.parse on
Python 3)
"""
from __future__ import absolute_import, division
import inspect
import os
import platform
import socket
import struct
import sys
import tokenize
from types import MethodType as _MethodType
import warnings
from io import TextIOBase, IOBase
if sys.version_info < (3, 0):
_PY3 = False
else:
_PY3 = True
if sys.version_info >= (3, 5, 0):
_PY35PLUS = True
else:
_PY35PLUS = False
if sys.version_info >= (3, 7, 0):
_PY37PLUS = True
else:
_PY37PLUS = False
if platform.python_implementation() == 'PyPy':
_PYPY = True
else:
_PYPY = False
def _shouldEnableNewStyle():
"""
Returns whether or not we should enable the new-style conversion of
old-style classes. It inspects the environment for C{TWISTED_NEWSTYLE},
accepting an empty string, C{no}, C{false}, C{False}, and C{0} as falsey
values and everything else as a truthy value.
@rtype: L{bool}
"""
value = os.environ.get('TWISTED_NEWSTYLE', '')
if value in ['', 'no', 'false', 'False', '0']:
return False
else:
return True
_EXPECT_NEWSTYLE = _PY3 or _shouldEnableNewStyle()
def currentframe(n=0):
"""
In Python 3, L{inspect.currentframe} does not take a stack-level argument.
Restore that functionality from Python 2 so we don't have to re-implement
the C{f_back}-walking loop in places where it's called.
@param n: The number of stack levels above the caller to walk.
@type n: L{int}
@return: a frame, n levels up the stack from the caller.
@rtype: L{types.FrameType}
"""
f = inspect.currentframe()
for x in range(n + 1):
f = f.f_back
return f
def inet_pton(af, addr):
"""
Emulator of L{socket.inet_pton}.
@param af: An address family to parse; C{socket.AF_INET} or
C{socket.AF_INET6}.
@type af: L{int}
@param addr: An address.
@type addr: native L{str}
@return: The binary packed version of the passed address.
@rtype: L{bytes}
"""
if not addr:
raise ValueError("illegal IP address string passed to inet_pton")
if af == socket.AF_INET:
return socket.inet_aton(addr)
elif af == getattr(socket, 'AF_INET6', 'AF_INET6'):
if '%' in addr and (addr.count('%') > 1 or addr.index("%") == 0):
raise ValueError("illegal IP address string passed to inet_pton")
addr = addr.split('%')[0]
parts = addr.split(':')
elided = parts.count('')
ipv4Component = '.' in parts[-1]
if len(parts) > (8 - ipv4Component) or elided > 3:
raise ValueError("Syntactically invalid address")
if elided == 3:
return '\x00' * 16
if elided:
zeros = ['0'] * (8 - len(parts) - ipv4Component + elided)
if addr.startswith('::'):
parts[:2] = zeros
elif addr.endswith('::'):
parts[-2:] = zeros
else:
idx = parts.index('')
parts[idx:idx+1] = zeros
if len(parts) != 8 - ipv4Component:
raise ValueError("Syntactically invalid address")
else:
if len(parts) != (8 - ipv4Component):
raise ValueError("Syntactically invalid address")
if ipv4Component:
if parts[-1].count('.') != 3:
raise ValueError("Syntactically invalid address")
rawipv4 = socket.inet_aton(parts[-1])
unpackedipv4 = struct.unpack('!HH', rawipv4)
parts[-1:] = [hex(x)[2:] for x in unpackedipv4]
parts = [int(x, 16) for x in parts]
return struct.pack('!8H', *parts)
else:
raise socket.error(97, 'Address family not supported by protocol')
def inet_ntop(af, addr):
if af == socket.AF_INET:
return socket.inet_ntoa(addr)
elif af == socket.AF_INET6:
if len(addr) != 16:
raise ValueError("address length incorrect")
parts = struct.unpack('!8H', addr)
curBase = bestBase = None
for i in range(8):
if not parts[i]:
if curBase is None:
curBase = i
curLen = 0
curLen += 1
else:
if curBase is not None:
bestLen = None
if bestBase is None or curLen > bestLen:
bestBase = curBase
bestLen = curLen
curBase = None
if curBase is not None and (bestBase is None or curLen > bestLen):
bestBase = curBase
bestLen = curLen
parts = [hex(x)[2:] for x in parts]
if bestBase is not None:
parts[bestBase:bestBase + bestLen] = ['']
if parts[0] == '':
parts.insert(0, '')
if parts[-1] == '':
parts.insert(len(parts) - 1, '')
return ':'.join(parts)
else:
raise socket.error(97, 'Address family not supported by protocol')
try:
socket.AF_INET6
except AttributeError:
socket.AF_INET6 = 'AF_INET6'
try:
socket.inet_pton(socket.AF_INET6, "::")
except (AttributeError, NameError, socket.error):
socket.inet_pton = inet_pton
socket.inet_ntop = inet_ntop
adict = dict
if _PY3:
# These are actually useless in Python 2 as well, but we need to go
# through deprecation process there (ticket #5895):
del adict, inet_pton, inet_ntop
set = set
frozenset = frozenset
try:
from functools import reduce
except ImportError:
reduce = reduce
def execfile(filename, globals, locals=None):
"""
Execute a Python script in the given namespaces.
Similar to the execfile builtin, but a namespace is mandatory, partly
because that's a sensible thing to require, and because otherwise we'd
have to do some frame hacking.
This is a compatibility implementation for Python 3 porting, to avoid the
use of the deprecated builtin C{execfile} function.
"""
if locals is None:
locals = globals
with open(filename, "rb") as fin:
source = fin.read()
code = compile(source, filename, "exec")
exec(code, globals, locals)
try:
cmp = cmp
except NameError:
def cmp(a, b):
"""
Compare two objects.
Returns a negative number if C{a < b}, zero if they are equal, and a
positive number if C{a > b}.
"""
if a < b:
return -1
elif a == b:
return 0
else:
return 1
def comparable(klass):
"""
Class decorator that ensures support for the special C{__cmp__} method.
On Python 2 this does nothing.
On Python 3, C{__eq__}, C{__lt__}, etc. methods are added to the class,
relying on C{__cmp__} to implement their comparisons.
"""
# On Python 2, __cmp__ will just work, so no need to add extra methods:
if not _PY3:
return klass
def __eq__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c == 0
def __ne__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c != 0
def __lt__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c < 0
def __le__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c <= 0
def __gt__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c > 0
def __ge__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c >= 0
klass.__lt__ = __lt__
klass.__gt__ = __gt__
klass.__le__ = __le__
klass.__ge__ = __ge__
klass.__eq__ = __eq__
klass.__ne__ = __ne__
return klass
if _PY3:
unicode = str
long = int
else:
unicode = unicode
long = long
def ioType(fileIshObject, default=unicode):
"""
Determine the type which will be returned from the given file object's
read() and accepted by its write() method as an argument.
In other words, determine whether the given file is 'opened in text mode'.
@param fileIshObject: Any object, but ideally one which resembles a file.
@type fileIshObject: L{object}
@param default: A default value to return when the type of C{fileIshObject}
cannot be determined.
@type default: L{type}
@return: There are 3 possible return values:
1. L{unicode}, if the file is unambiguously opened in text mode.
2. L{bytes}, if the file is unambiguously opened in binary mode.
3. L{basestring}, if we are on python 2 (the L{basestring} type
does not exist on python 3) and the file is opened in binary
mode, but has an encoding and can therefore accept both bytes
and text reliably for writing, but will return L{bytes} from
read methods.
4. The C{default} parameter, if the given type is not understood.
@rtype: L{type}
"""
if isinstance(fileIshObject, TextIOBase):
# If it's for text I/O, then it's for text I/O.
return unicode
if isinstance(fileIshObject, IOBase):
# If it's for I/O but it's _not_ for text I/O, it's for bytes I/O.
return bytes
encoding = getattr(fileIshObject, 'encoding', None)
import codecs
if isinstance(fileIshObject, (codecs.StreamReader, codecs.StreamWriter)):
# On StreamReaderWriter, the 'encoding' attribute has special meaning;
# it is unambiguously unicode.
if encoding:
return unicode
else:
return bytes
if not _PY3:
# Special case: if we have an encoding file, we can *give* it unicode,
# but we can't expect to *get* unicode.
if isinstance(fileIshObject, file):
if encoding is not None:
return basestring
else:
return bytes
from cStringIO import InputType, OutputType
from StringIO import StringIO
if isinstance(fileIshObject, (StringIO, InputType, OutputType)):
return bytes
return default
def nativeString(s):
"""
Convert C{bytes} or C{unicode} to the native C{str} type, using ASCII
encoding if conversion is necessary.
@raise UnicodeError: The input string is not ASCII encodable/decodable.
@raise TypeError: The input is neither C{bytes} nor C{unicode}.
"""
if not isinstance(s, (bytes, unicode)):
raise TypeError("%r is neither bytes nor unicode" % s)
if _PY3:
if isinstance(s, bytes):
return s.decode("ascii")
else:
# Ensure we're limited to ASCII subset:
s.encode("ascii")
else:
if isinstance(s, unicode):
return s.encode("ascii")
else:
# Ensure we're limited to ASCII subset:
s.decode("ascii")
return s
def _matchingString(constantString, inputString):
"""
Some functions, such as C{os.path.join}, operate on string arguments which
may be bytes or text, and wish to return a value of the same type. In
those cases you may wish to have a string constant (in the case of
C{os.path.join}, that constant would be C{os.path.sep}) involved in the
parsing or processing, that must be of a matching type in order to use
string operations on it. L{_matchingString} will take a constant string
(either L{bytes} or L{unicode}) and convert it to the same type as the
input string. C{constantString} should contain only characters from ASCII;
to ensure this, it will be encoded or decoded regardless.
@param constantString: A string literal used in processing.
@type constantString: L{unicode} or L{bytes}
@param inputString: A byte string or text string provided by the user.
@type inputString: L{unicode} or L{bytes}
@return: C{constantString} converted into the same type as C{inputString}
@rtype: the type of C{inputString}
"""
if isinstance(constantString, bytes):
otherType = constantString.decode("ascii")
else:
otherType = constantString.encode("ascii")
if type(constantString) == type(inputString):
return constantString
else:
return otherType
if _PY3:
def reraise(exception, traceback):
raise exception.with_traceback(traceback)
else:
exec("""def reraise(exception, traceback):
raise exception.__class__, exception, traceback""")
reraise.__doc__ = """
Re-raise an exception, with an optional traceback, in a way that is compatible
with both Python 2 and Python 3.
Note that on Python 3, re-raised exceptions will be mutated, with their
C{__traceback__} attribute being set.
@param exception: The exception instance.
@param traceback: The traceback to use, or L{None} indicating a new traceback.
"""
if _PY3:
from io import StringIO as NativeStringIO
else:
from io import BytesIO as NativeStringIO
# Functions for dealing with Python 3's bytes type, which is somewhat
# different than Python 2's:
if _PY3:
def iterbytes(originalBytes):
for i in range(len(originalBytes)):
yield originalBytes[i:i+1]
def intToBytes(i):
return ("%d" % i).encode("ascii")
def lazyByteSlice(object, offset=0, size=None):
"""
Return a copy of the given bytes-like object.
If an offset is given, the copy starts at that offset. If a size is
given, the copy will only be of that length.
@param object: C{bytes} to be copied.
@param offset: C{int}, starting index of copy.
@param size: Optional, if an C{int} is given limit the length of copy
to this size.
"""
view = memoryview(object)
if size is None:
return view[offset:]
else:
return view[offset:(offset + size)]
def networkString(s):
if not isinstance(s, unicode):
raise TypeError("Can only convert text to bytes on Python 3")
return s.encode('ascii')
else:
def iterbytes(originalBytes):
return originalBytes
def intToBytes(i):
return b"%d" % i
lazyByteSlice = buffer
def networkString(s):
if not isinstance(s, str):
raise TypeError("Can only pass-through bytes on Python 2")
# Ensure we're limited to ASCII subset:
s.decode('ascii')
return s
iterbytes.__doc__ = """
Return an iterable wrapper for a C{bytes} object that provides the behavior of
iterating over C{bytes} on Python 2.
In particular, the results of iteration are the individual bytes (rather than
integers as on Python 3).
@param originalBytes: A C{bytes} object that will be wrapped.
"""
intToBytes.__doc__ = """
Convert the given integer into C{bytes}, as ASCII-encoded Arab numeral.
In other words, this is equivalent to calling C{bytes} in Python 2 on an
integer.
@param i: The C{int} to convert to C{bytes}.
@rtype: C{bytes}
"""
networkString.__doc__ = """
Convert the native string type to C{bytes} if it is not already C{bytes} using
ASCII encoding if conversion is necessary.
This is useful for sending text-like bytes that are constructed using string
interpolation. For example, this is safe on Python 2 and Python 3:
networkString("Hello %d" % (n,))
@param s: A native string to convert to bytes if necessary.
@type s: C{str}
@raise UnicodeError: The input string is not ASCII encodable/decodable.
@raise TypeError: The input is neither C{bytes} nor C{unicode}.
@rtype: C{bytes}
"""
try:
StringType = basestring
except NameError:
# Python 3+
StringType = str
try:
from types import InstanceType
except ImportError:
# Python 3+
InstanceType = object
try:
from types import FileType
except ImportError:
# Python 3+
FileType = IOBase
if _PY3:
import urllib.parse as urllib_parse
from html import escape
from urllib.parse import quote as urlquote
from urllib.parse import unquote as urlunquote
from http import cookiejar as cookielib
else:
import urlparse as urllib_parse
from cgi import escape
from urllib import quote as urlquote
from urllib import unquote as urlunquote
import cookielib
# Dealing with the differences in items/iteritems
if _PY3:
def iteritems(d):
return d.items()
def itervalues(d):
return d.values()
def items(d):
return list(d.items())
range = range
xrange = range
izip = zip
else:
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def items(d):
return d.items()
range = xrange
xrange = xrange
from itertools import izip
izip # shh pyflakes
iteritems.__doc__ = """
Return an iterable of the items of C{d}.
@type d: L{dict}
@rtype: iterable
"""
itervalues.__doc__ = """
Return an iterable of the values of C{d}.
@type d: L{dict}
@rtype: iterable
"""
items.__doc__ = """
Return a list of the items of C{d}.
@type d: L{dict}
@rtype: L{list}
"""
def _keys(d):
"""
Return a list of the keys of C{d}.
@type d: L{dict}
@rtype: L{list}
"""
if _PY3:
return list(d.keys())
else:
return d.keys()
def bytesEnviron():
"""
Return a L{dict} of L{os.environ} where all text-strings are encoded into
L{bytes}.
This function is POSIX only; environment variables are always text strings
on Windows.
"""
if not _PY3:
# On py2, nothing to do.
return dict(os.environ)
target = dict()
for x, y in os.environ.items():
target[os.environ.encodekey(x)] = os.environ.encodevalue(y)
return target
def _constructMethod(cls, name, self):
"""
Construct a bound method.
@param cls: The class that the method should be bound to.
@type cls: L{types.ClassType} or L{type}.
@param name: The name of the method.
@type name: native L{str}
@param self: The object that the method is bound to.
@type self: any object
@return: a bound method
@rtype: L{types.MethodType}
"""
func = cls.__dict__[name]
if _PY3:
return _MethodType(func, self)
return _MethodType(func, self, cls)
if _PY3:
from base64 import encodebytes as _b64encodebytes
from base64 import decodebytes as _b64decodebytes
else:
from base64 import encodestring as _b64encodebytes
from base64 import decodestring as _b64decodebytes
def _bytesChr(i):
"""
Like L{chr} but always works on ASCII, returning L{bytes}.
@param i: The ASCII code point to return.
@type i: L{int}
@rtype: L{bytes}
"""
if _PY3:
return bytes([i])
else:
return chr(i)
try:
from sys import intern
except ImportError:
intern = intern
def _coercedUnicode(s):
"""
Coerce ASCII-only byte strings into unicode for Python 2.
In Python 2 C{unicode(b'bytes')} returns a unicode string C{'bytes'}. In
Python 3, the equivalent C{str(b'bytes')} will return C{"b'bytes'"}
instead. This function mimics the behavior for Python 2. It will decode the
byte string as ASCII. In Python 3 it simply raises a L{TypeError} when
passing a byte string. Unicode strings are returned as-is.
@param s: The string to coerce.
@type s: L{bytes} or L{unicode}
@raise UnicodeError: The input L{bytes} is not ASCII decodable.
@raise TypeError: The input is L{bytes} on Python 3.
"""
if isinstance(s, bytes):
if _PY3:
raise TypeError("Expected str not %r (bytes)" % (s,))
else:
return s.decode('ascii')
else:
return s
if _PY3:
unichr = chr
raw_input = input
else:
unichr = unichr
raw_input = raw_input
def _bytesRepr(bytestring):
"""
Provide a repr for a byte string that begins with 'b' on both
Python 2 and 3.
@param bytestring: The string to repr.
@type bytestring: L{bytes}
@raise TypeError: The input is not L{bytes}.
@return: The repr with a leading 'b'.
@rtype: L{bytes}
"""
if not isinstance(bytestring, bytes):
raise TypeError("Expected bytes not %r" % (bytestring,))
if _PY3:
return repr(bytestring)
else:
return 'b' + repr(bytestring)
if _PY3:
_tokenize = tokenize.tokenize
else:
_tokenize = tokenize.generate_tokens
try:
from collections.abc import Sequence
except ImportError:
from collections import Sequence
def _get_async_param(isAsync=None, **kwargs):
"""
Provide a backwards-compatible way to get async param value that does not
cause a syntax error under Python 3.7.
@param isAsync: isAsync param value (should default to None)
@type isAsync: L{bool}
@param kwargs: keyword arguments of the caller (only async is allowed)
@type kwargs: L{dict}
@raise TypeError: Both isAsync and async specified.
@return: Final isAsync param value
@rtype: L{bool}
"""
if 'async' in kwargs:
warnings.warn(
"'async' keyword argument is deprecated, please use isAsync",
DeprecationWarning, stacklevel=2)
if isAsync is None and 'async' in kwargs:
isAsync = kwargs.pop('async')
if kwargs:
raise TypeError
return bool(isAsync)
def _pypy3BlockingHack():
"""
Work around U{this pypy bug
<https://bitbucket.org/pypy/pypy/issues/3051/socketfromfd-sets-sockets-to-blocking-on>}
by replacing C{socket.fromfd} with a more conservative version.
"""
try:
from fcntl import fcntl, F_GETFL, F_SETFL
except ImportError:
return
if not (_PY3 and _PYPY):
return
def fromFDWithoutModifyingFlags(fd, family, type, proto=None):
passproto = [proto] * (proto is not None)
flags = fcntl(fd, F_GETFL)
try:
return realFromFD(fd, family, type, *passproto)
finally:
fcntl(fd, F_SETFL, flags)
realFromFD = socket.fromfd
if realFromFD.__name__ == fromFDWithoutModifyingFlags.__name__:
return
socket.fromfd = fromFDWithoutModifyingFlags
_pypy3BlockingHack()
__all__ = [
"reraise",
"execfile",
"frozenset",
"reduce",
"set",
"cmp",
"comparable",
"OrderedDict",
"nativeString",
"NativeStringIO",
"networkString",
"unicode",
"iterbytes",
"intToBytes",
"lazyByteSlice",
"StringType",
"InstanceType",
"FileType",
"items",
"iteritems",
"itervalues",
"range",
"xrange",
"urllib_parse",
"bytesEnviron",
"escape",
"urlquote",
"urlunquote",
"cookielib",
"_keys",
"_b64encodebytes",
"_b64decodebytes",
"_bytesChr",
"_coercedUnicode",
"_bytesRepr",
"intern",
"unichr",
"raw_input",
"_tokenize",
"_get_async_param",
"Sequence",
]

View file

@ -0,0 +1,430 @@
# -*- test-case-name: twisted.python.test.test_components -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Component architecture for Twisted, based on Zope3 components.
Using the Zope3 API directly is strongly recommended. Everything
you need is in the top-level of the zope.interface package, e.g.::
from zope.interface import Interface, implementer
class IFoo(Interface):
pass
@implementer(IFoo)
class Foo:
pass
print(IFoo.implementedBy(Foo)) # True
print(IFoo.providedBy(Foo())) # True
L{twisted.python.components.registerAdapter} from this module may be used to
add to Twisted's global adapter registry.
L{twisted.python.components.proxyForInterface} is a factory for classes
which allow access to only the parts of another class defined by a specified
interface.
"""
from __future__ import division, absolute_import, print_function
# zope3 imports
from zope.interface import interface, declarations
from zope.interface.adapter import AdapterRegistry
# twisted imports
from twisted.python.compat import NativeStringIO
from twisted.python import reflect
from twisted.python._oldstyle import _oldStyle
# Twisted's global adapter registry
globalRegistry = AdapterRegistry()
# Attribute that registerAdapter looks at. Is this supposed to be public?
ALLOW_DUPLICATES = 0
def registerAdapter(adapterFactory, origInterface, *interfaceClasses):
"""Register an adapter class.
An adapter class is expected to implement the given interface, by
adapting instances implementing 'origInterface'. An adapter class's
__init__ method should accept one parameter, an instance implementing
'origInterface'.
"""
self = globalRegistry
assert interfaceClasses, "You need to pass an Interface"
global ALLOW_DUPLICATES
# deal with class->interface adapters:
if not isinstance(origInterface, interface.InterfaceClass):
origInterface = declarations.implementedBy(origInterface)
for interfaceClass in interfaceClasses:
factory = self.registered([origInterface], interfaceClass)
if factory is not None and not ALLOW_DUPLICATES:
raise ValueError("an adapter (%s) was already registered." % (factory, ))
for interfaceClass in interfaceClasses:
self.register([origInterface], interfaceClass, '', adapterFactory)
def getAdapterFactory(fromInterface, toInterface, default):
"""Return registered adapter for a given class and interface.
Note that is tied to the *Twisted* global registry, and will
thus not find adapters registered elsewhere.
"""
self = globalRegistry
if not isinstance(fromInterface, interface.InterfaceClass):
fromInterface = declarations.implementedBy(fromInterface)
factory = self.lookup1(fromInterface, toInterface)
if factory is None:
factory = default
return factory
def _addHook(registry):
"""
Add an adapter hook which will attempt to look up adapters in the given
registry.
@type registry: L{zope.interface.adapter.AdapterRegistry}
@return: The hook which was added, for later use with L{_removeHook}.
"""
lookup = registry.lookup1
def _hook(iface, ob):
factory = lookup(declarations.providedBy(ob), iface)
if factory is None:
return None
else:
return factory(ob)
interface.adapter_hooks.append(_hook)
return _hook
def _removeHook(hook):
"""
Remove a previously added adapter hook.
@param hook: An object previously returned by a call to L{_addHook}. This
will be removed from the list of adapter hooks.
"""
interface.adapter_hooks.remove(hook)
# add global adapter lookup hook for our newly created registry
_addHook(globalRegistry)
def getRegistry():
"""Returns the Twisted global
C{zope.interface.adapter.AdapterRegistry} instance.
"""
return globalRegistry
# FIXME: deprecate attribute somehow?
CannotAdapt = TypeError
@_oldStyle
class Adapter:
"""I am the default implementation of an Adapter for some interface.
This docstring contains a limerick, by popular demand::
Subclassing made Zope and TR
much harder to work with by far.
So before you inherit,
be sure to declare it
Adapter, not PyObject*
@cvar temporaryAdapter: If this is True, the adapter will not be
persisted on the Componentized.
@cvar multiComponent: If this adapter is persistent, should it be
automatically registered for all appropriate interfaces.
"""
# These attributes are used with Componentized.
temporaryAdapter = 0
multiComponent = 1
def __init__(self, original):
"""Set my 'original' attribute to be the object I am adapting.
"""
self.original = original
def __conform__(self, interface):
"""
I forward __conform__ to self.original if it has it, otherwise I
simply return None.
"""
if hasattr(self.original, "__conform__"):
return self.original.__conform__(interface)
return None
def isuper(self, iface, adapter):
"""
Forward isuper to self.original
"""
return self.original.isuper(iface, adapter)
@_oldStyle
class Componentized:
"""I am a mixin to allow you to be adapted in various ways persistently.
I define a list of persistent adapters. This is to allow adapter classes
to store system-specific state, and initialized on demand. The
getComponent method implements this. You must also register adapters for
this class for the interfaces that you wish to pass to getComponent.
Many other classes and utilities listed here are present in Zope3; this one
is specific to Twisted.
"""
persistenceVersion = 1
def __init__(self):
self._adapterCache = {}
def locateAdapterClass(self, klass, interfaceClass, default):
return getAdapterFactory(klass, interfaceClass, default)
def setAdapter(self, interfaceClass, adapterClass):
"""
Cache a provider for the given interface, by adapting C{self} using
the given adapter class.
"""
self.setComponent(interfaceClass, adapterClass(self))
def addAdapter(self, adapterClass, ignoreClass=0):
"""Utility method that calls addComponent. I take an adapter class and
instantiate it with myself as the first argument.
@return: The adapter instantiated.
"""
adapt = adapterClass(self)
self.addComponent(adapt, ignoreClass)
return adapt
def setComponent(self, interfaceClass, component):
"""
Cache a provider of the given interface.
"""
self._adapterCache[reflect.qual(interfaceClass)] = component
def addComponent(self, component, ignoreClass=0):
"""
Add a component to me, for all appropriate interfaces.
In order to determine which interfaces are appropriate, the component's
provided interfaces will be scanned.
If the argument 'ignoreClass' is True, then all interfaces are
considered appropriate.
Otherwise, an 'appropriate' interface is one for which its class has
been registered as an adapter for my class according to the rules of
getComponent.
"""
for iface in declarations.providedBy(component):
if (ignoreClass or
(self.locateAdapterClass(self.__class__, iface, None)
== component.__class__)):
self._adapterCache[reflect.qual(iface)] = component
def unsetComponent(self, interfaceClass):
"""Remove my component specified by the given interface class."""
del self._adapterCache[reflect.qual(interfaceClass)]
def removeComponent(self, component):
"""
Remove the given component from me entirely, for all interfaces for which
it has been registered.
@return: a list of the interfaces that were removed.
"""
l = []
for k, v in list(self._adapterCache.items()):
if v is component:
del self._adapterCache[k]
l.append(reflect.namedObject(k))
return l
def getComponent(self, interface, default=None):
"""Create or retrieve an adapter for the given interface.
If such an adapter has already been created, retrieve it from the cache
that this instance keeps of all its adapters. Adapters created through
this mechanism may safely store system-specific state.
If you want to register an adapter that will be created through
getComponent, but you don't require (or don't want) your adapter to be
cached and kept alive for the lifetime of this Componentized object,
set the attribute 'temporaryAdapter' to True on your adapter class.
If you want to automatically register an adapter for all appropriate
interfaces (with addComponent), set the attribute 'multiComponent' to
True on your adapter class.
"""
k = reflect.qual(interface)
if k in self._adapterCache:
return self._adapterCache[k]
else:
adapter = interface.__adapt__(self)
if adapter is not None and not (
hasattr(adapter, "temporaryAdapter") and
adapter.temporaryAdapter):
self._adapterCache[k] = adapter
if (hasattr(adapter, "multiComponent") and
adapter.multiComponent):
self.addComponent(adapter)
if adapter is None:
return default
return adapter
def __conform__(self, interface):
return self.getComponent(interface)
class ReprableComponentized(Componentized):
def __init__(self):
Componentized.__init__(self)
def __repr__(self):
from pprint import pprint
sio = NativeStringIO()
pprint(self._adapterCache, sio)
return sio.getvalue()
def proxyForInterface(iface, originalAttribute='original'):
"""
Create a class which proxies all method calls which adhere to an interface
to another provider of that interface.
This function is intended for creating specialized proxies. The typical way
to use it is by subclassing the result::
class MySpecializedProxy(proxyForInterface(IFoo)):
def someInterfaceMethod(self, arg):
if arg == 3:
return 3
return self.original.someInterfaceMethod(arg)
@param iface: The Interface to which the resulting object will conform, and
which the wrapped object must provide.
@param originalAttribute: name of the attribute used to save the original
object in the resulting class. Default to C{original}.
@type originalAttribute: C{str}
@return: A class whose constructor takes the original object as its only
argument. Constructing the class creates the proxy.
"""
def __init__(self, original):
setattr(self, originalAttribute, original)
contents = {"__init__": __init__}
for name in iface:
contents[name] = _ProxyDescriptor(name, originalAttribute)
proxy = type("(Proxy for %s)"
% (reflect.qual(iface),), (object,), contents)
declarations.classImplements(proxy, iface)
return proxy
class _ProxiedClassMethod(object):
"""
A proxied class method.
@ivar methodName: the name of the method which this should invoke when
called.
@type methodName: L{str}
@ivar __name__: The name of the method being proxied (the same as
C{methodName}).
@type __name__: L{str}
@ivar originalAttribute: name of the attribute of the proxy where the
original object is stored.
@type originalAttribute: L{str}
"""
def __init__(self, methodName, originalAttribute):
self.methodName = self.__name__ = methodName
self.originalAttribute = originalAttribute
def __call__(self, oself, *args, **kw):
"""
Invoke the specified L{methodName} method of the C{original} attribute
for proxyForInterface.
@param oself: an instance of a L{proxyForInterface} object.
@return: the result of the underlying method.
"""
original = getattr(oself, self.originalAttribute)
actualMethod = getattr(original, self.methodName)
return actualMethod(*args, **kw)
class _ProxyDescriptor(object):
"""
A descriptor which will proxy attribute access, mutation, and
deletion to the L{_ProxyDescriptor.originalAttribute} of the
object it is being accessed from.
@ivar attributeName: the name of the attribute which this descriptor will
retrieve from instances' C{original} attribute.
@type attributeName: C{str}
@ivar originalAttribute: name of the attribute of the proxy where the
original object is stored.
@type originalAttribute: C{str}
"""
def __init__(self, attributeName, originalAttribute):
self.attributeName = attributeName
self.originalAttribute = originalAttribute
def __get__(self, oself, type=None):
"""
Retrieve the C{self.attributeName} property from I{oself}.
"""
if oself is None:
return _ProxiedClassMethod(self.attributeName,
self.originalAttribute)
original = getattr(oself, self.originalAttribute)
return getattr(original, self.attributeName)
def __set__(self, oself, value):
"""
Set the C{self.attributeName} property of I{oself}.
"""
original = getattr(oself, self.originalAttribute)
setattr(original, self.attributeName, value)
def __delete__(self, oself):
"""
Delete the C{self.attributeName} property of I{oself}.
"""
original = getattr(oself, self.originalAttribute)
delattr(original, self.attributeName)
__all__ = [
"registerAdapter", "getAdapterFactory",
"Adapter", "Componentized", "ReprableComponentized", "getRegistry",
"proxyForInterface",
]

View file

@ -0,0 +1,18 @@
# -*- test-case-name: twisted.python.test.test_constants -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Symbolic constant support, including collections and constants with text,
numeric, and bit flag values.
"""
from __future__ import division, absolute_import
# Import and re-export Constantly
from constantly import (NamedConstant, ValueConstant, FlagConstant, Names,
Values, Flags)
__all__ = [
'NamedConstant', 'ValueConstant', 'FlagConstant',
'Names', 'Values', 'Flags']

View file

@ -0,0 +1,137 @@
# -*- test-case-name: twisted.test.test_context -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Dynamic pseudo-scoping for Python.
Call functions with context.call({key: value}, func); func and
functions that it calls will be able to use 'context.get(key)' to
retrieve 'value'.
This is thread-safe.
"""
from __future__ import division, absolute_import
from threading import local
from twisted.python._oldstyle import _oldStyle
defaultContextDict = {}
setDefault = defaultContextDict.__setitem__
@_oldStyle
class ContextTracker:
"""
A L{ContextTracker} provides a way to pass arbitrary key/value data up and
down a call stack without passing them as parameters to the functions on
that call stack.
This can be useful when functions on the top and bottom of the call stack
need to cooperate but the functions in between them do not allow passing the
necessary state. For example::
from twisted.python.context import call, get
def handleRequest(request):
call({'request-id': request.id}, renderRequest, request.url)
def renderRequest(url):
renderHeader(url)
renderBody(url)
def renderHeader(url):
return "the header"
def renderBody(url):
return "the body (request id=%r)" % (get("request-id"),)
This should be used sparingly, since the lack of a clear connection between
the two halves can result in code which is difficult to understand and
maintain.
@ivar contexts: A C{list} of C{dict}s tracking the context state. Each new
L{ContextTracker.callWithContext} pushes a new C{dict} onto this stack
for the duration of the call, making the data available to the function
called and restoring the previous data once it is complete..
"""
def __init__(self):
self.contexts = [defaultContextDict]
def callWithContext(self, newContext, func, *args, **kw):
"""
Call C{func(*args, **kw)} such that the contents of C{newContext} will
be available for it to retrieve using L{getContext}.
@param newContext: A C{dict} of data to push onto the context for the
duration of the call to C{func}.
@param func: A callable which will be called.
@param *args: Any additional positional arguments to pass to C{func}.
@param **kw: Any additional keyword arguments to pass to C{func}.
@return: Whatever is returned by C{func}
@raise: Whatever is raised by C{func}.
"""
self.contexts.append(newContext)
try:
return func(*args,**kw)
finally:
self.contexts.pop()
def getContext(self, key, default=None):
"""
Retrieve the value for a key from the context.
@param key: The key to look up in the context.
@param default: The value to return if C{key} is not found in the
context.
@return: The value most recently remembered in the context for C{key}.
"""
for ctx in reversed(self.contexts):
try:
return ctx[key]
except KeyError:
pass
return default
class ThreadedContextTracker(object):
def __init__(self):
self.storage = local()
def currentContext(self):
try:
return self.storage.ct
except AttributeError:
ct = self.storage.ct = ContextTracker()
return ct
def callWithContext(self, ctx, func, *args, **kw):
return self.currentContext().callWithContext(ctx, func, *args, **kw)
def getContext(self, key, default=None):
return self.currentContext().getContext(key, default)
def installContextTracker(ctr):
global theContextTracker
global call
global get
theContextTracker = ctr
call = theContextTracker.callWithContext
get = theContextTracker.getContext
installContextTracker(ThreadedContextTracker())

View file

@ -0,0 +1,797 @@
# -*- test-case-name: twisted.python.test.test_deprecate -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Deprecation framework for Twisted.
To mark a method, function, or class as being deprecated do this::
from incremental import Version
from twisted.python.deprecate import deprecated
@deprecated(Version("Twisted", 8, 0, 0))
def badAPI(self, first, second):
'''
Docstring for badAPI.
'''
...
@deprecated(Version("Twisted", 16, 0, 0))
class BadClass(object):
'''
Docstring for BadClass.
'''
The newly-decorated badAPI will issue a warning when called, and BadClass will
issue a warning when instantiated. Both will also have a deprecation notice
appended to their docstring.
To deprecate properties you can use::
from incremental import Version
from twisted.python.deprecate import deprecatedProperty
class OtherwiseUndeprecatedClass(object):
@deprecatedProperty(Version('Twisted', 16, 0, 0))
def badProperty(self):
'''
Docstring for badProperty.
'''
@badProperty.setter
def badProperty(self, value):
'''
Setter sill also raise the deprecation warning.
'''
To mark module-level attributes as being deprecated you can use::
badAttribute = "someValue"
...
deprecatedModuleAttribute(
Version("Twisted", 8, 0, 0),
"Use goodAttribute instead.",
"your.full.module.name",
"badAttribute")
The deprecated attributes will issue a warning whenever they are accessed. If
the attributes being deprecated are in the same module as the
L{deprecatedModuleAttribute} call is being made from, the C{__name__} global
can be used as the C{moduleName} parameter.
See also L{incremental.Version}.
@type DEPRECATION_WARNING_FORMAT: C{str}
@var DEPRECATION_WARNING_FORMAT: The default deprecation warning string format
to use when one is not provided by the user.
"""
from __future__ import division, absolute_import
__all__ = [
'deprecated',
'deprecatedProperty',
'getDeprecationWarningString',
'getWarningMethod',
'setWarningMethod',
'deprecatedModuleAttribute',
]
import sys, inspect
from warnings import warn, warn_explicit
from dis import findlinestarts
from functools import wraps
from incremental import getVersionString
from twisted.python.compat import _PY3
DEPRECATION_WARNING_FORMAT = '%(fqpn)s was deprecated in %(version)s'
# Notionally, part of twisted.python.reflect, but defining it there causes a
# cyclic dependency between this module and that module. Define it here,
# instead, and let reflect import it to re-expose to the public.
def _fullyQualifiedName(obj):
"""
Return the fully qualified name of a module, class, method or function.
Classes and functions need to be module level ones to be correctly
qualified.
@rtype: C{str}.
"""
try:
name = obj.__qualname__
except AttributeError:
name = obj.__name__
if inspect.isclass(obj) or inspect.isfunction(obj):
moduleName = obj.__module__
return "%s.%s" % (moduleName, name)
elif inspect.ismethod(obj):
try:
cls = obj.im_class
except AttributeError:
# Python 3 eliminates im_class, substitutes __module__ and
# __qualname__ to provide similar information.
return "%s.%s" % (obj.__module__, obj.__qualname__)
else:
className = _fullyQualifiedName(cls)
return "%s.%s" % (className, name)
return name
# Try to keep it looking like something in twisted.python.reflect.
_fullyQualifiedName.__module__ = 'twisted.python.reflect'
_fullyQualifiedName.__name__ = 'fullyQualifiedName'
_fullyQualifiedName.__qualname__ = 'fullyQualifiedName'
def _getReplacementString(replacement):
"""
Surround a replacement for a deprecated API with some polite text exhorting
the user to consider it as an alternative.
@type replacement: C{str} or callable
@return: a string like "please use twisted.python.modules.getModule
instead".
"""
if callable(replacement):
replacement = _fullyQualifiedName(replacement)
return "please use %s instead" % (replacement,)
def _getDeprecationDocstring(version, replacement=None):
"""
Generate an addition to a deprecated object's docstring that explains its
deprecation.
@param version: the version it was deprecated.
@type version: L{incremental.Version}
@param replacement: The replacement, if specified.
@type replacement: C{str} or callable
@return: a string like "Deprecated in Twisted 27.2.0; please use
twisted.timestream.tachyon.flux instead."
"""
doc = "Deprecated in %s" % (getVersionString(version),)
if replacement:
doc = "%s; %s" % (doc, _getReplacementString(replacement))
return doc + "."
def _getDeprecationWarningString(fqpn, version, format=None, replacement=None):
"""
Return a string indicating that the Python name was deprecated in the given
version.
@param fqpn: Fully qualified Python name of the thing being deprecated
@type fqpn: C{str}
@param version: Version that C{fqpn} was deprecated in.
@type version: L{incremental.Version}
@param format: A user-provided format to interpolate warning values into, or
L{DEPRECATION_WARNING_FORMAT
<twisted.python.deprecate.DEPRECATION_WARNING_FORMAT>} if L{None} is
given.
@type format: C{str}
@param replacement: what should be used in place of C{fqpn}. Either pass in
a string, which will be inserted into the warning message, or a
callable, which will be expanded to its full import path.
@type replacement: C{str} or callable
@return: A textual description of the deprecation
@rtype: C{str}
"""
if format is None:
format = DEPRECATION_WARNING_FORMAT
warningString = format % {
'fqpn': fqpn,
'version': getVersionString(version)}
if replacement:
warningString = "%s; %s" % (
warningString, _getReplacementString(replacement))
return warningString
def getDeprecationWarningString(callableThing, version, format=None,
replacement=None):
"""
Return a string indicating that the callable was deprecated in the given
version.
@type callableThing: C{callable}
@param callableThing: Callable object to be deprecated
@type version: L{incremental.Version}
@param version: Version that C{callableThing} was deprecated in
@type format: C{str}
@param format: A user-provided format to interpolate warning values into,
or L{DEPRECATION_WARNING_FORMAT
<twisted.python.deprecate.DEPRECATION_WARNING_FORMAT>} if L{None} is
given
@param callableThing: A callable to be deprecated.
@param version: The L{incremental.Version} that the callable
was deprecated in.
@param replacement: what should be used in place of the callable. Either
pass in a string, which will be inserted into the warning message,
or a callable, which will be expanded to its full import path.
@type replacement: C{str} or callable
@return: A string describing the deprecation.
@rtype: C{str}
"""
return _getDeprecationWarningString(
_fullyQualifiedName(callableThing), version, format, replacement)
def _appendToDocstring(thingWithDoc, textToAppend):
"""
Append the given text to the docstring of C{thingWithDoc}.
If C{thingWithDoc} has no docstring, then the text just replaces the
docstring. If it has a single-line docstring then it appends a blank line
and the message text. If it has a multi-line docstring, then in appends a
blank line a the message text, and also does the indentation correctly.
"""
if thingWithDoc.__doc__:
docstringLines = thingWithDoc.__doc__.splitlines()
else:
docstringLines = []
if len(docstringLines) == 0:
docstringLines.append(textToAppend)
elif len(docstringLines) == 1:
docstringLines.extend(['', textToAppend, ''])
else:
spaces = docstringLines.pop()
docstringLines.extend(['',
spaces + textToAppend,
spaces])
thingWithDoc.__doc__ = '\n'.join(docstringLines)
def deprecated(version, replacement=None):
"""
Return a decorator that marks callables as deprecated. To deprecate a
property, see L{deprecatedProperty}.
@type version: L{incremental.Version}
@param version: The version in which the callable will be marked as
having been deprecated. The decorated function will be annotated
with this version, having it set as its C{deprecatedVersion}
attribute.
@param version: the version that the callable was deprecated in.
@type version: L{incremental.Version}
@param replacement: what should be used in place of the callable. Either
pass in a string, which will be inserted into the warning message,
or a callable, which will be expanded to its full import path.
@type replacement: C{str} or callable
"""
def deprecationDecorator(function):
"""
Decorator that marks C{function} as deprecated.
"""
warningString = getDeprecationWarningString(
function, version, None, replacement)
@wraps(function)
def deprecatedFunction(*args, **kwargs):
warn(
warningString,
DeprecationWarning,
stacklevel=2)
return function(*args, **kwargs)
_appendToDocstring(deprecatedFunction,
_getDeprecationDocstring(version, replacement))
deprecatedFunction.deprecatedVersion = version
return deprecatedFunction
return deprecationDecorator
def deprecatedProperty(version, replacement=None):
"""
Return a decorator that marks a property as deprecated. To deprecate a
regular callable or class, see L{deprecated}.
@type version: L{incremental.Version}
@param version: The version in which the callable will be marked as
having been deprecated. The decorated function will be annotated
with this version, having it set as its C{deprecatedVersion}
attribute.
@param version: the version that the callable was deprecated in.
@type version: L{incremental.Version}
@param replacement: what should be used in place of the callable.
Either pass in a string, which will be inserted into the warning
message, or a callable, which will be expanded to its full import
path.
@type replacement: C{str} or callable
@return: A new property with deprecated setter and getter.
@rtype: C{property}
@since: 16.1.0
"""
class _DeprecatedProperty(property):
"""
Extension of the build-in property to allow deprecated setters.
"""
def _deprecatedWrapper(self, function):
@wraps(function)
def deprecatedFunction(*args, **kwargs):
warn(
self.warningString,
DeprecationWarning,
stacklevel=2)
return function(*args, **kwargs)
return deprecatedFunction
def setter(self, function):
return property.setter(self, self._deprecatedWrapper(function))
def deprecationDecorator(function):
if _PY3:
warningString = getDeprecationWarningString(
function, version, None, replacement)
else:
# Because Python 2 sucks, we need to implement our own here -- lack
# of __qualname__ means that we kinda have to stack walk. It maybe
# probably works. Probably. -Amber
functionName = function.__name__
className = inspect.stack()[1][3] # wow hax
moduleName = function.__module__
fqdn = "%s.%s.%s" % (moduleName, className, functionName)
warningString = _getDeprecationWarningString(
fqdn, version, None, replacement)
@wraps(function)
def deprecatedFunction(*args, **kwargs):
warn(
warningString,
DeprecationWarning,
stacklevel=2)
return function(*args, **kwargs)
_appendToDocstring(deprecatedFunction,
_getDeprecationDocstring(version, replacement))
deprecatedFunction.deprecatedVersion = version
result = _DeprecatedProperty(deprecatedFunction)
result.warningString = warningString
return result
return deprecationDecorator
def getWarningMethod():
"""
Return the warning method currently used to record deprecation warnings.
"""
return warn
def setWarningMethod(newMethod):
"""
Set the warning method to use to record deprecation warnings.
The callable should take message, category and stacklevel. The return
value is ignored.
"""
global warn
warn = newMethod
class _InternalState(object):
"""
An L{_InternalState} is a helper object for a L{_ModuleProxy}, so that it
can easily access its own attributes, bypassing its logic for delegating to
another object that it's proxying for.
@ivar proxy: a L{_ModuleProxy}
"""
def __init__(self, proxy):
object.__setattr__(self, 'proxy', proxy)
def __getattribute__(self, name):
return object.__getattribute__(object.__getattribute__(self, 'proxy'),
name)
def __setattr__(self, name, value):
return object.__setattr__(object.__getattribute__(self, 'proxy'),
name, value)
class _ModuleProxy(object):
"""
Python module wrapper to hook module-level attribute access.
Access to deprecated attributes first checks
L{_ModuleProxy._deprecatedAttributes}, if the attribute does not appear
there then access falls through to L{_ModuleProxy._module}, the wrapped
module object.
@ivar _module: Module on which to hook attribute access.
@type _module: C{module}
@ivar _deprecatedAttributes: Mapping of attribute names to objects that
retrieve the module attribute's original value.
@type _deprecatedAttributes: C{dict} mapping C{str} to
L{_DeprecatedAttribute}
@ivar _lastWasPath: Heuristic guess as to whether warnings about this
package should be ignored for the next call. If the last attribute
access of this module was a C{getattr} of C{__path__}, we will assume
that it was the import system doing it and we won't emit a warning for
the next access, even if it is to a deprecated attribute. The CPython
import system always tries to access C{__path__}, then the attribute
itself, then the attribute itself again, in both successful and failed
cases.
@type _lastWasPath: C{bool}
"""
def __init__(self, module):
state = _InternalState(self)
state._module = module
state._deprecatedAttributes = {}
state._lastWasPath = False
def __repr__(self):
"""
Get a string containing the type of the module proxy and a
representation of the wrapped module object.
"""
state = _InternalState(self)
return '<%s module=%r>' % (type(self).__name__, state._module)
def __setattr__(self, name, value):
"""
Set an attribute on the wrapped module object.
"""
state = _InternalState(self)
state._lastWasPath = False
setattr(state._module, name, value)
def __getattribute__(self, name):
"""
Get an attribute from the module object, possibly emitting a warning.
If the specified name has been deprecated, then a warning is issued.
(Unless certain obscure conditions are met; see
L{_ModuleProxy._lastWasPath} for more information about what might quash
such a warning.)
"""
state = _InternalState(self)
if state._lastWasPath:
deprecatedAttribute = None
else:
deprecatedAttribute = state._deprecatedAttributes.get(name)
if deprecatedAttribute is not None:
# If we have a _DeprecatedAttribute object from the earlier lookup,
# allow it to issue the warning.
value = deprecatedAttribute.get()
else:
# Otherwise, just retrieve the underlying value directly; it's not
# deprecated, there's no warning to issue.
value = getattr(state._module, name)
if name == '__path__':
state._lastWasPath = True
else:
state._lastWasPath = False
return value
class _DeprecatedAttribute(object):
"""
Wrapper for deprecated attributes.
This is intended to be used by L{_ModuleProxy}. Calling
L{_DeprecatedAttribute.get} will issue a warning and retrieve the
underlying attribute's value.
@type module: C{module}
@ivar module: The original module instance containing this attribute
@type fqpn: C{str}
@ivar fqpn: Fully qualified Python name for the deprecated attribute
@type version: L{incremental.Version}
@ivar version: Version that the attribute was deprecated in
@type message: C{str}
@ivar message: Deprecation message
"""
def __init__(self, module, name, version, message):
"""
Initialise a deprecated name wrapper.
"""
self.module = module
self.__name__ = name
self.fqpn = module.__name__ + '.' + name
self.version = version
self.message = message
def get(self):
"""
Get the underlying attribute value and issue a deprecation warning.
"""
# This might fail if the deprecated thing is a module inside a package.
# In that case, don't emit the warning this time. The import system
# will come back again when it's not an AttributeError and we can emit
# the warning then.
result = getattr(self.module, self.__name__)
message = _getDeprecationWarningString(self.fqpn, self.version,
DEPRECATION_WARNING_FORMAT + ': ' + self.message)
warn(message, DeprecationWarning, stacklevel=3)
return result
def _deprecateAttribute(proxy, name, version, message):
"""
Mark a module-level attribute as being deprecated.
@type proxy: L{_ModuleProxy}
@param proxy: The module proxy instance proxying the deprecated attributes
@type name: C{str}
@param name: Attribute name
@type version: L{incremental.Version}
@param version: Version that the attribute was deprecated in
@type message: C{str}
@param message: Deprecation message
"""
_module = object.__getattribute__(proxy, '_module')
attr = _DeprecatedAttribute(_module, name, version, message)
# Add a deprecated attribute marker for this module's attribute. When this
# attribute is accessed via _ModuleProxy a warning is emitted.
_deprecatedAttributes = object.__getattribute__(
proxy, '_deprecatedAttributes')
_deprecatedAttributes[name] = attr
def deprecatedModuleAttribute(version, message, moduleName, name):
"""
Declare a module-level attribute as being deprecated.
@type version: L{incremental.Version}
@param version: Version that the attribute was deprecated in
@type message: C{str}
@param message: Deprecation message
@type moduleName: C{str}
@param moduleName: Fully-qualified Python name of the module containing
the deprecated attribute; if called from the same module as the
attributes are being deprecated in, using the C{__name__} global can
be helpful
@type name: C{str}
@param name: Attribute name to deprecate
"""
module = sys.modules[moduleName]
if not isinstance(module, _ModuleProxy):
module = _ModuleProxy(module)
sys.modules[moduleName] = module
_deprecateAttribute(module, name, version, message)
def warnAboutFunction(offender, warningString):
"""
Issue a warning string, identifying C{offender} as the responsible code.
This function is used to deprecate some behavior of a function. It differs
from L{warnings.warn} in that it is not limited to deprecating the behavior
of a function currently on the call stack.
@param function: The function that is being deprecated.
@param warningString: The string that should be emitted by this warning.
@type warningString: C{str}
@since: 11.0
"""
# inspect.getmodule() is attractive, but somewhat
# broken in Python < 2.6. See Python bug 4845.
offenderModule = sys.modules[offender.__module__]
filename = inspect.getabsfile(offenderModule)
lineStarts = list(findlinestarts(offender.__code__))
lastLineNo = lineStarts[-1][1]
globals = offender.__globals__
kwargs = dict(
category=DeprecationWarning,
filename=filename,
lineno=lastLineNo,
module=offenderModule.__name__,
registry=globals.setdefault("__warningregistry__", {}),
module_globals=None)
warn_explicit(warningString, **kwargs)
def _passedArgSpec(argspec, positional, keyword):
"""
Take an I{inspect.ArgSpec}, a tuple of positional arguments, and a dict of
keyword arguments, and return a mapping of arguments that were actually
passed to their passed values.
@param argspec: The argument specification for the function to inspect.
@type argspec: I{inspect.ArgSpec}
@param positional: The positional arguments that were passed.
@type positional: L{tuple}
@param keyword: The keyword arguments that were passed.
@type keyword: L{dict}
@return: A dictionary mapping argument names (those declared in C{argspec})
to values that were passed explicitly by the user.
@rtype: L{dict} mapping L{str} to L{object}
"""
result = {}
unpassed = len(argspec.args) - len(positional)
if argspec.keywords is not None:
kwargs = result[argspec.keywords] = {}
if unpassed < 0:
if argspec.varargs is None:
raise TypeError("Too many arguments.")
else:
result[argspec.varargs] = positional[len(argspec.args):]
for name, value in zip(argspec.args, positional):
result[name] = value
for name, value in keyword.items():
if name in argspec.args:
if name in result:
raise TypeError("Already passed.")
result[name] = value
elif argspec.keywords is not None:
kwargs[name] = value
else:
raise TypeError("no such param")
return result
def _passedSignature(signature, positional, keyword):
"""
Take an L{inspect.Signature}, a tuple of positional arguments, and a dict of
keyword arguments, and return a mapping of arguments that were actually
passed to their passed values.
@param signature: The signature of the function to inspect.
@type signature: L{inspect.Signature}
@param positional: The positional arguments that were passed.
@type positional: L{tuple}
@param keyword: The keyword arguments that were passed.
@type keyword: L{dict}
@return: A dictionary mapping argument names (those declared in
C{signature}) to values that were passed explicitly by the user.
@rtype: L{dict} mapping L{str} to L{object}
"""
result = {}
kwargs = None
numPositional = 0
for (n, (name, param)) in enumerate(signature.parameters.items()):
if param.kind == inspect.Parameter.VAR_POSITIONAL:
# Varargs, for example: *args
result[name] = positional[n:]
numPositional = len(result[name]) + 1
elif param.kind == inspect.Parameter.VAR_KEYWORD:
# Variable keyword args, for example: **my_kwargs
kwargs = result[name] = {}
elif param.kind in (inspect.Parameter.POSITIONAL_OR_KEYWORD,
inspect.Parameter.POSITIONAL_ONLY):
if n < len(positional):
result[name] = positional[n]
numPositional += 1
elif param.kind == inspect.Parameter.KEYWORD_ONLY:
if name not in keyword:
if param.default == inspect.Parameter.empty:
raise TypeError("missing keyword arg {}".format(name))
else:
result[name] = param.default
else:
raise TypeError("'{}' parameter is invalid kind: {}".format(
name, param.kind))
if len(positional) > numPositional:
raise TypeError("Too many arguments.")
for name, value in keyword.items():
if name in signature.parameters.keys():
if name in result:
raise TypeError("Already passed.")
result[name] = value
elif kwargs is not None:
kwargs[name] = value
else:
raise TypeError("no such param")
return result
def _mutuallyExclusiveArguments(argumentPairs):
"""
Decorator which causes its decoratee to raise a L{TypeError} if two of the
given arguments are passed at the same time.
@param argumentPairs: pairs of argument identifiers, each pair indicating
an argument that may not be passed in conjunction with another.
@type argumentPairs: sequence of 2-sequences of L{str}
@return: A decorator, used like so::
@_mutuallyExclusiveArguments([["tweedledum", "tweedledee"]])
def function(tweedledum=1, tweedledee=2):
"Don't pass tweedledum and tweedledee at the same time."
@rtype: 1-argument callable taking a callable and returning a callable.
"""
def wrapper(wrappee):
if getattr(inspect, "signature", None):
# Python 3
spec = inspect.signature(wrappee)
_passed = _passedSignature
else:
# Python 2
spec = inspect.getargspec(wrappee)
_passed = _passedArgSpec
@wraps(wrappee)
def wrapped(*args, **kwargs):
arguments = _passed(spec, args, kwargs)
for this, that in argumentPairs:
if this in arguments and that in arguments:
raise TypeError(
("The %r and %r arguments to %s "
"are mutually exclusive.") %
(this, that, _fullyQualifiedName(wrappee)))
return wrappee(*args, **kwargs)
return wrapped
return wrapper

View file

@ -0,0 +1,798 @@
# -*- test-case-name: twisted.test.test_failure -*-
# See also test suite twisted.test.test_pbfailure
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Asynchronous-friendly error mechanism.
See L{Failure}.
"""
from __future__ import division, absolute_import, print_function
# System Imports
import copy
import sys
import linecache
import inspect
import opcode
from inspect import getmro
from twisted.python import reflect
from twisted.python.compat import _PY3, NativeStringIO as StringIO
count = 0
traceupLength = 4
class DefaultException(Exception):
pass
def format_frames(frames, write, detail="default"):
"""
Format and write frames.
@param frames: is a list of frames as used by Failure.frames, with
each frame being a list of
(funcName, fileName, lineNumber, locals.items(), globals.items())
@type frames: list
@param write: this will be called with formatted strings.
@type write: callable
@param detail: Four detail levels are available:
default, brief, verbose, and verbose-vars-not-captured.
C{Failure.printDetailedTraceback} uses the latter when the caller asks
for verbose, but no vars were captured, so that an explicit warning
about the missing data is shown.
@type detail: string
"""
if detail not in ('default', 'brief', 'verbose',
'verbose-vars-not-captured'):
raise ValueError(
"Detail must be default, brief, verbose, or "
"verbose-vars-not-captured. (not %r)" % (detail,))
w = write
if detail == "brief":
for method, filename, lineno, localVars, globalVars in frames:
w('%s:%s:%s\n' % (filename, lineno, method))
elif detail == "default":
for method, filename, lineno, localVars, globalVars in frames:
w(' File "%s", line %s, in %s\n' % (filename, lineno, method))
w(' %s\n' % linecache.getline(filename, lineno).strip())
elif detail == "verbose-vars-not-captured":
for method, filename, lineno, localVars, globalVars in frames:
w("%s:%d: %s(...)\n" % (filename, lineno, method))
w(' [Capture of Locals and Globals disabled (use captureVars=True)]\n')
elif detail == "verbose":
for method, filename, lineno, localVars, globalVars in frames:
w("%s:%d: %s(...)\n" % (filename, lineno, method))
w(' [ Locals ]\n')
# Note: the repr(val) was (self.pickled and val) or repr(val)))
for name, val in localVars:
w(" %s : %s\n" % (name, repr(val)))
w(' ( Globals )\n')
for name, val in globalVars:
w(" %s : %s\n" % (name, repr(val)))
# slyphon: i have a need to check for this value in trial
# so I made it a module-level constant
EXCEPTION_CAUGHT_HERE = "--- <exception caught here> ---"
class NoCurrentExceptionError(Exception):
"""
Raised when trying to create a Failure from the current interpreter
exception state and there is no current exception state.
"""
def _Traceback(stackFrames, tbFrames):
"""
Construct a fake traceback object using a list of frames. Note that
although frames generally include locals and globals, this information
is not kept by this method, since locals and globals are not used in
standard tracebacks.
@param stackFrames: [(methodname, filename, lineno, locals, globals), ...]
@param tbFrames: [(methodname, filename, lineno, locals, globals), ...]
"""
assert len(tbFrames) > 0, "Must pass some frames"
# We deliberately avoid using recursion here, as the frames list may be
# long.
# 'stackFrames' is a list of frames above (ie, older than) the point the
# exception was caught, with oldest at the start. Start by building these
# into a linked list of _Frame objects (with the f_back links pointing back
# towards the oldest frame).
stack = None
for sf in stackFrames:
stack = _Frame(sf, stack)
# 'tbFrames' is a list of frames from the point the exception was caught,
# down to where it was thrown, with the oldest at the start. Add these to
# the linked list of _Frames, but also wrap each one with a _Traceback
# frame which is linked in the opposite direction (towards the newest
# frame).
stack = _Frame(tbFrames[0], stack)
firstTb = tb = _TracebackFrame(stack)
for sf in tbFrames[1:]:
stack = _Frame(sf, stack)
tb.tb_next = _TracebackFrame(stack)
tb = tb.tb_next
# Return the first _TracebackFrame.
return firstTb
class _TracebackFrame(object):
"""
Fake traceback object which can be passed to functions in the standard
library L{traceback} module.
"""
def __init__(self, frame):
"""
@param frame: _Frame object
"""
self.tb_frame = frame
self.tb_lineno = frame.f_lineno
self.tb_next = None
class _Frame(object):
"""
A fake frame object, used by L{_Traceback}.
@ivar f_code: fake L{code<types.CodeType>} object
@ivar f_lineno: line number
@ivar f_globals: fake f_globals dictionary (usually empty)
@ivar f_locals: fake f_locals dictionary (usually empty)
@ivar f_back: previous stack frame (towards the caller)
"""
def __init__(self, frameinfo, back):
"""
@param frameinfo: (methodname, filename, lineno, locals, globals)
@param back: previous (older) stack frame
@type back: C{frame}
"""
name, filename, lineno, localz, globalz = frameinfo
self.f_code = _Code(name, filename)
self.f_lineno = lineno
self.f_globals = {}
self.f_locals = {}
self.f_back = back
class _Code(object):
"""
A fake code object, used by L{_Traceback} via L{_Frame}.
"""
def __init__(self, name, filename):
self.co_name = name
self.co_filename = filename
_inlineCallbacksExtraneous = []
def _extraneous(f):
"""
Mark the given callable as extraneous to inlineCallbacks exception
reporting; don't show these functions.
@param f: a function that you NEVER WANT TO SEE AGAIN in ANY TRACEBACK
reported by Failure.
@type f: function
@return: f
"""
_inlineCallbacksExtraneous.append(f.__code__)
return f
class Failure(BaseException):
"""
A basic abstraction for an error that has occurred.
This is necessary because Python's built-in error mechanisms are
inconvenient for asynchronous communication.
The C{stack} and C{frame} attributes contain frames. Each frame is a tuple
of (funcName, fileName, lineNumber, localsItems, globalsItems), where
localsItems and globalsItems are the contents of
C{locals().items()}/C{globals().items()} for that frame, or an empty tuple
if those details were not captured.
@ivar value: The exception instance responsible for this failure.
@ivar type: The exception's class.
@ivar stack: list of frames, innermost last, excluding C{Failure.__init__}.
@ivar frames: list of frames, innermost first.
"""
pickled = 0
stack = None
# The opcode of "yield" in Python bytecode. We need this in
# _findFailure in order to identify whether an exception was
# thrown by a throwExceptionIntoGenerator.
# on PY3, b'a'[0] == 97 while in py2 b'a'[0] == b'a' opcodes
# are stored in bytes so we need to properly account for this
# difference.
if _PY3:
_yieldOpcode = opcode.opmap["YIELD_VALUE"]
else:
_yieldOpcode = chr(opcode.opmap["YIELD_VALUE"])
def __init__(self, exc_value=None, exc_type=None, exc_tb=None,
captureVars=False):
"""
Initialize me with an explanation of the error.
By default, this will use the current C{exception}
(L{sys.exc_info}()). However, if you want to specify a
particular kind of failure, you can pass an exception as an
argument.
If no C{exc_value} is passed, then an "original" C{Failure} will
be searched for. If the current exception handler that this
C{Failure} is being constructed in is handling an exception
raised by L{raiseException}, then this C{Failure} will act like
the original C{Failure}.
For C{exc_tb} only L{traceback} instances or L{None} are allowed.
If L{None} is supplied for C{exc_value}, the value of C{exc_tb} is
ignored, otherwise if C{exc_tb} is L{None}, it will be found from
execution context (ie, L{sys.exc_info}).
@param captureVars: if set, capture locals and globals of stack
frames. This is pretty slow, and makes no difference unless you
are going to use L{printDetailedTraceback}.
"""
global count
count = count + 1
self.count = count
self.type = self.value = tb = None
self.captureVars = captureVars
if isinstance(exc_value, str) and exc_type is None:
raise TypeError("Strings are not supported by Failure")
stackOffset = 0
if exc_value is None:
exc_value = self._findFailure()
if exc_value is None:
self.type, self.value, tb = sys.exc_info()
if self.type is None:
raise NoCurrentExceptionError()
stackOffset = 1
elif exc_type is None:
if isinstance(exc_value, Exception):
self.type = exc_value.__class__
else:
# Allow arbitrary objects.
self.type = type(exc_value)
self.value = exc_value
else:
self.type = exc_type
self.value = exc_value
if isinstance(self.value, Failure):
self._extrapolate(self.value)
return
if hasattr(self.value, "__failure__"):
# For exceptions propagated through coroutine-awaiting (see
# Deferred.send, AKA Deferred.__next__), which can't be raised as
# Failure because that would mess up the ability to except: them:
self._extrapolate(self.value.__failure__)
# Clean up the inherently circular reference established by storing
# the failure there. This should make the common case of a Twisted
# / Deferred-returning coroutine somewhat less hard on the garbage
# collector.
del self.value.__failure__
return
if tb is None:
if exc_tb:
tb = exc_tb
elif getattr(self.value, "__traceback__", None):
# Python 3
tb = self.value.__traceback__
frames = self.frames = []
stack = self.stack = []
# Added 2003-06-23 by Chris Armstrong. Yes, I actually have a
# use case where I need this traceback object, and I've made
# sure that it'll be cleaned up.
self.tb = tb
if tb:
f = tb.tb_frame
elif not isinstance(self.value, Failure):
# We don't do frame introspection since it's expensive,
# and if we were passed a plain exception with no
# traceback, it's not useful anyway
f = stackOffset = None
while stackOffset and f:
# This excludes this Failure.__init__ frame from the
# stack, leaving it to start with our caller instead.
f = f.f_back
stackOffset -= 1
# Keeps the *full* stack. Formerly in spread.pb.print_excFullStack:
#
# The need for this function arises from the fact that several
# PB classes have the peculiar habit of discarding exceptions
# with bareword "except:"s. This premature exception
# catching means tracebacks generated here don't tend to show
# what called upon the PB object.
while f:
if captureVars:
localz = f.f_locals.copy()
if f.f_locals is f.f_globals:
globalz = {}
else:
globalz = f.f_globals.copy()
for d in globalz, localz:
if "__builtins__" in d:
del d["__builtins__"]
localz = localz.items()
globalz = globalz.items()
else:
localz = globalz = ()
stack.insert(0, (
f.f_code.co_name,
f.f_code.co_filename,
f.f_lineno,
localz,
globalz,
))
f = f.f_back
while tb is not None:
f = tb.tb_frame
if captureVars:
localz = f.f_locals.copy()
if f.f_locals is f.f_globals:
globalz = {}
else:
globalz = f.f_globals.copy()
for d in globalz, localz:
if "__builtins__" in d:
del d["__builtins__"]
localz = list(localz.items())
globalz = list(globalz.items())
else:
localz = globalz = ()
frames.append((
f.f_code.co_name,
f.f_code.co_filename,
tb.tb_lineno,
localz,
globalz,
))
tb = tb.tb_next
if inspect.isclass(self.type) and issubclass(self.type, Exception):
parentCs = getmro(self.type)
self.parents = list(map(reflect.qual, parentCs))
else:
self.parents = [self.type]
def _extrapolate(self, otherFailure):
"""
Extrapolate from one failure into another, copying its stack frames.
@param otherFailure: Another L{Failure}, whose traceback information,
if any, should be preserved as part of the stack presented by this
one.
@type otherFailure: L{Failure}
"""
# Copy all infos from that failure (including self.frames).
self.__dict__ = copy.copy(otherFailure.__dict__)
# If we are re-throwing a Failure, we merge the stack-trace stored in
# the failure with the current exception's stack. This integrated with
# throwExceptionIntoGenerator and allows to provide full stack trace,
# even if we go through several layers of inlineCallbacks.
_, _, tb = sys.exc_info()
frames = []
while tb is not None:
f = tb.tb_frame
if f.f_code not in _inlineCallbacksExtraneous:
frames.append((
f.f_code.co_name,
f.f_code.co_filename,
tb.tb_lineno, (), ()
))
tb = tb.tb_next
# Merging current stack with stack stored in the Failure.
frames.extend(self.frames)
self.frames = frames
def trap(self, *errorTypes):
"""
Trap this failure if its type is in a predetermined list.
This allows you to trap a Failure in an error callback. It will be
automatically re-raised if it is not a type that you expect.
The reason for having this particular API is because it's very useful
in Deferred errback chains::
def _ebFoo(self, failure):
r = failure.trap(Spam, Eggs)
print('The Failure is due to either Spam or Eggs!')
if r == Spam:
print('Spam did it!')
elif r == Eggs:
print('Eggs did it!')
If the failure is not a Spam or an Eggs, then the Failure will be
'passed on' to the next errback. In Python 2 the Failure will be
raised; in Python 3 the underlying exception will be re-raised.
@type errorTypes: L{Exception}
"""
error = self.check(*errorTypes)
if not error:
if _PY3:
self.raiseException()
else:
raise self
return error
def check(self, *errorTypes):
"""
Check if this failure's type is in a predetermined list.
@type errorTypes: list of L{Exception} classes or
fully-qualified class names.
@returns: the matching L{Exception} type, or None if no match.
"""
for error in errorTypes:
err = error
if inspect.isclass(error) and issubclass(error, Exception):
err = reflect.qual(error)
if err in self.parents:
return error
return None
# It would be nice to use twisted.python.compat.reraise, but that breaks
# the stack exploration in _findFailure; possibly this can be fixed in
# #5931.
if getattr(BaseException, "with_traceback", None):
# Python 3
def raiseException(self):
raise self.value.with_traceback(self.tb)
else:
exec("""def raiseException(self):
raise self.type, self.value, self.tb""")
raiseException.__doc__ = (
"""
raise the original exception, preserving traceback
information if available.
""")
@_extraneous
def throwExceptionIntoGenerator(self, g):
"""
Throw the original exception into the given generator,
preserving traceback information if available.
@return: The next value yielded from the generator.
@raise StopIteration: If there are no more values in the generator.
@raise anything else: Anything that the generator raises.
"""
# Note that the actual magic to find the traceback information
# is done in _findFailure.
return g.throw(self.type, self.value, self.tb)
def _findFailure(cls):
"""
Find the failure that represents the exception currently in context.
"""
tb = sys.exc_info()[-1]
if not tb:
return
secondLastTb = None
lastTb = tb
while lastTb.tb_next:
secondLastTb = lastTb
lastTb = lastTb.tb_next
lastFrame = lastTb.tb_frame
# NOTE: f_locals.get('self') is used rather than
# f_locals['self'] because psyco frames do not contain
# anything in their locals() dicts. psyco makes debugging
# difficult anyhow, so losing the Failure objects (and thus
# the tracebacks) here when it is used is not that big a deal.
# Handle raiseException-originated exceptions
if lastFrame.f_code is cls.raiseException.__code__:
return lastFrame.f_locals.get('self')
# Handle throwExceptionIntoGenerator-originated exceptions
# this is tricky, and differs if the exception was caught
# inside the generator, or above it:
# It is only really originating from
# throwExceptionIntoGenerator if the bottom of the traceback
# is a yield.
# Pyrex and Cython extensions create traceback frames
# with no co_code, but they can't yield so we know it's okay to
# just return here.
if ((not lastFrame.f_code.co_code) or
lastFrame.f_code.co_code[lastTb.tb_lasti] != cls._yieldOpcode):
return
# If the exception was caught above the generator.throw
# (outside the generator), it will appear in the tb (as the
# second last item):
if secondLastTb:
frame = secondLastTb.tb_frame
if frame.f_code is cls.throwExceptionIntoGenerator.__code__:
return frame.f_locals.get('self')
# If the exception was caught below the generator.throw
# (inside the generator), it will appear in the frames' linked
# list, above the top-level traceback item (which must be the
# generator frame itself, thus its caller is
# throwExceptionIntoGenerator).
frame = tb.tb_frame.f_back
if frame and frame.f_code is cls.throwExceptionIntoGenerator.__code__:
return frame.f_locals.get('self')
_findFailure = classmethod(_findFailure)
def __repr__(self):
return "<%s %s: %s>" % (reflect.qual(self.__class__),
reflect.qual(self.type),
self.getErrorMessage())
def __str__(self):
return "[Failure instance: %s]" % self.getBriefTraceback()
def __getstate__(self):
"""Avoid pickling objects in the traceback.
"""
if self.pickled:
return self.__dict__
c = self.__dict__.copy()
c['frames'] = [
[
v[0], v[1], v[2],
_safeReprVars(v[3]),
_safeReprVars(v[4]),
] for v in self.frames
]
# Added 2003-06-23. See comment above in __init__
c['tb'] = None
if self.stack is not None:
# XXX: This is a band-aid. I can't figure out where these
# (failure.stack is None) instances are coming from.
c['stack'] = [
[
v[0], v[1], v[2],
_safeReprVars(v[3]),
_safeReprVars(v[4]),
] for v in self.stack
]
c['pickled'] = 1
return c
def cleanFailure(self):
"""
Remove references to other objects, replacing them with strings.
On Python 3, this will also set the C{__traceback__} attribute of the
exception instance to L{None}.
"""
self.__dict__ = self.__getstate__()
if getattr(self.value, "__traceback__", None):
# Python 3
self.value.__traceback__ = None
def getTracebackObject(self):
"""
Get an object that represents this Failure's stack that can be passed
to traceback.extract_tb.
If the original traceback object is still present, return that. If this
traceback object has been lost but we still have the information,
return a fake traceback object (see L{_Traceback}). If there is no
traceback information at all, return None.
"""
if self.tb is not None:
return self.tb
elif len(self.frames) > 0:
return _Traceback(self.stack, self.frames)
else:
return None
def getErrorMessage(self):
"""
Get a string of the exception which caused this Failure.
"""
if isinstance(self.value, Failure):
return self.value.getErrorMessage()
return reflect.safe_str(self.value)
def getBriefTraceback(self):
io = StringIO()
self.printBriefTraceback(file=io)
return io.getvalue()
def getTraceback(self, elideFrameworkCode=0, detail='default'):
io = StringIO()
self.printTraceback(file=io, elideFrameworkCode=elideFrameworkCode,
detail=detail)
return io.getvalue()
def printTraceback(self, file=None, elideFrameworkCode=False,
detail='default'):
"""
Emulate Python's standard error reporting mechanism.
@param file: If specified, a file-like object to which to write the
traceback.
@param elideFrameworkCode: A flag indicating whether to attempt to
remove uninteresting frames from within Twisted itself from the
output.
@param detail: A string indicating how much information to include
in the traceback. Must be one of C{'brief'}, C{'default'}, or
C{'verbose'}.
"""
if file is None:
from twisted.python import log
file = log.logerr
w = file.write
if detail == 'verbose' and not self.captureVars:
# We don't have any locals or globals, so rather than show them as
# empty make the output explicitly say that we don't have them at
# all.
formatDetail = 'verbose-vars-not-captured'
else:
formatDetail = detail
# Preamble
if detail == 'verbose':
w('*--- Failure #%d%s---\n' %
(self.count,
(self.pickled and ' (pickled) ') or ' '))
elif detail == 'brief':
if self.frames:
hasFrames = 'Traceback'
else:
hasFrames = 'Traceback (failure with no frames)'
w("%s: %s: %s\n" % (
hasFrames,
reflect.safe_str(self.type),
reflect.safe_str(self.value)))
else:
w('Traceback (most recent call last):\n')
# Frames, formatted in appropriate style
if self.frames:
if not elideFrameworkCode:
format_frames(self.stack[-traceupLength:], w, formatDetail)
w("%s\n" % (EXCEPTION_CAUGHT_HERE,))
format_frames(self.frames, w, formatDetail)
elif not detail == 'brief':
# Yeah, it's not really a traceback, despite looking like one...
w("Failure: ")
# Postamble, if any
if not detail == 'brief':
w("%s: %s\n" % (reflect.qual(self.type),
reflect.safe_str(self.value)))
# Chaining
if isinstance(self.value, Failure):
# TODO: indentation for chained failures?
file.write(" (chained Failure)\n")
self.value.printTraceback(file, elideFrameworkCode, detail)
if detail == 'verbose':
w('*--- End of Failure #%d ---\n' % self.count)
def printBriefTraceback(self, file=None, elideFrameworkCode=0):
"""
Print a traceback as densely as possible.
"""
self.printTraceback(file, elideFrameworkCode, detail='brief')
def printDetailedTraceback(self, file=None, elideFrameworkCode=0):
"""
Print a traceback with detailed locals and globals information.
"""
self.printTraceback(file, elideFrameworkCode, detail='verbose')
def _safeReprVars(varsDictItems):
"""
Convert a list of (name, object) pairs into (name, repr) pairs.
L{twisted.python.reflect.safe_repr} is used to generate the repr, so no
exceptions will be raised by faulty C{__repr__} methods.
@param varsDictItems: a sequence of (name, value) pairs as returned by e.g.
C{locals().items()}.
@returns: a sequence of (name, repr) pairs.
"""
return [(name, reflect.safe_repr(obj)) for (name, obj) in varsDictItems]
# slyphon: make post-morteming exceptions tweakable
DO_POST_MORTEM = True
def _debuginit(self, exc_value=None, exc_type=None, exc_tb=None,
captureVars=False,
Failure__init__=Failure.__init__):
"""
Initialize failure object, possibly spawning pdb.
"""
if (exc_value, exc_type, exc_tb) == (None, None, None):
exc = sys.exc_info()
if not exc[0] == self.__class__ and DO_POST_MORTEM:
try:
strrepr = str(exc[1])
except:
strrepr = "broken str"
print("Jumping into debugger for post-mortem of exception '%s':" %
(strrepr,))
import pdb
pdb.post_mortem(exc[2])
Failure__init__(self, exc_value, exc_type, exc_tb, captureVars)
def startDebugMode():
"""
Enable debug hooks for Failures.
"""
Failure.__init__ = _debuginit

View file

@ -0,0 +1,220 @@
# -*- test-case-name: twisted.python.test.test_fakepwd -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
L{twisted.python.fakepwd} provides a fake implementation of the L{pwd} API.
"""
from __future__ import absolute_import, division
__all__ = ['UserDatabase', 'ShadowDatabase']
class _UserRecord(object):
"""
L{_UserRecord} holds the user data for a single user in L{UserDatabase}.
It corresponds to L{pwd.struct_passwd}. See that class for attribute
documentation.
"""
def __init__(self, name, password, uid, gid, gecos, home, shell):
self.pw_name = name
self.pw_passwd = password
self.pw_uid = uid
self.pw_gid = gid
self.pw_gecos = gecos
self.pw_dir = home
self.pw_shell = shell
def __len__(self):
return 7
def __getitem__(self, index):
return (
self.pw_name, self.pw_passwd, self.pw_uid,
self.pw_gid, self.pw_gecos, self.pw_dir, self.pw_shell)[index]
class UserDatabase(object):
"""
L{UserDatabase} holds a traditional POSIX user data in memory and makes it
available via the same API as L{pwd}.
@ivar _users: A C{list} of L{_UserRecord} instances holding all user data
added to this database.
"""
def __init__(self):
self._users = []
def addUser(self, username, password, uid, gid, gecos, home, shell):
"""
Add a new user record to this database.
@param username: The value for the C{pw_name} field of the user
record to add.
@type username: C{str}
@param password: The value for the C{pw_passwd} field of the user
record to add.
@type password: C{str}
@param uid: The value for the C{pw_uid} field of the user record to
add.
@type uid: C{int}
@param gid: The value for the C{pw_gid} field of the user record to
add.
@type gid: C{int}
@param gecos: The value for the C{pw_gecos} field of the user record
to add.
@type gecos: C{str}
@param home: The value for the C{pw_dir} field of the user record to
add.
@type home: C{str}
@param shell: The value for the C{pw_shell} field of the user record to
add.
@type shell: C{str}
"""
self._users.append(_UserRecord(
username, password, uid, gid, gecos, home, shell))
def getpwuid(self, uid):
"""
Return the user record corresponding to the given uid.
"""
for entry in self._users:
if entry.pw_uid == uid:
return entry
raise KeyError()
def getpwnam(self, name):
"""
Return the user record corresponding to the given username.
"""
for entry in self._users:
if entry.pw_name == name:
return entry
raise KeyError()
def getpwall(self):
"""
Return a list of all user records.
"""
return self._users
class _ShadowRecord(object):
"""
L{_ShadowRecord} holds the shadow user data for a single user in
L{ShadowDatabase}. It corresponds to C{spwd.struct_spwd}. See that class
for attribute documentation.
"""
def __init__(self, username, password, lastChange, min, max, warn, inact,
expire, flag):
self.sp_nam = username
self.sp_pwd = password
self.sp_lstchg = lastChange
self.sp_min = min
self.sp_max = max
self.sp_warn = warn
self.sp_inact = inact
self.sp_expire = expire
self.sp_flag = flag
def __len__(self):
return 9
def __getitem__(self, index):
return (
self.sp_nam, self.sp_pwd, self.sp_lstchg, self.sp_min,
self.sp_max, self.sp_warn, self.sp_inact, self.sp_expire,
self.sp_flag)[index]
class ShadowDatabase(object):
"""
L{ShadowDatabase} holds a shadow user database in memory and makes it
available via the same API as C{spwd}.
@ivar _users: A C{list} of L{_ShadowRecord} instances holding all user data
added to this database.
@since: 12.0
"""
def __init__(self):
self._users = []
def addUser(self, username, password, lastChange, min, max, warn, inact,
expire, flag):
"""
Add a new user record to this database.
@param username: The value for the C{sp_nam} field of the user record to
add.
@type username: C{str}
@param password: The value for the C{sp_pwd} field of the user record to
add.
@type password: C{str}
@param lastChange: The value for the C{sp_lstchg} field of the user
record to add.
@type lastChange: C{int}
@param min: The value for the C{sp_min} field of the user record to add.
@type min: C{int}
@param max: The value for the C{sp_max} field of the user record to add.
@type max: C{int}
@param warn: The value for the C{sp_warn} field of the user record to
add.
@type warn: C{int}
@param inact: The value for the C{sp_inact} field of the user record to
add.
@type inact: C{int}
@param expire: The value for the C{sp_expire} field of the user record
to add.
@type expire: C{int}
@param flag: The value for the C{sp_flag} field of the user record to
add.
@type flag: C{int}
"""
self._users.append(_ShadowRecord(
username, password, lastChange,
min, max, warn, inact, expire, flag))
def getspnam(self, username):
"""
Return the shadow user record corresponding to the given username.
"""
for entry in self._users:
if entry.sp_nam == username:
return entry
raise KeyError
def getspall(self):
"""
Return a list of all shadow user records.
"""
return self._users

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,377 @@
# -*- test-case-name: twisted.test.test_formmethod -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Form-based method objects.
This module contains support for descriptive method signatures that can be used
to format methods.
"""
import calendar
from twisted.python._oldstyle import _oldStyle
class FormException(Exception):
"""An error occurred calling the form method.
"""
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args)
self.descriptions = kwargs
class InputError(FormException):
"""
An error occurred with some input.
"""
@_oldStyle
class Argument:
"""Base class for form arguments."""
# default value for argument, if no other default is given
defaultDefault = None
def __init__(self, name, default=None, shortDesc=None,
longDesc=None, hints=None, allowNone=1):
self.name = name
self.allowNone = allowNone
if default is None:
default = self.defaultDefault
self.default = default
self.shortDesc = shortDesc
self.longDesc = longDesc
if not hints:
hints = {}
self.hints = hints
def addHints(self, **kwargs):
self.hints.update(kwargs)
def getHint(self, name, default=None):
return self.hints.get(name, default)
def getShortDescription(self):
return self.shortDesc or self.name.capitalize()
def getLongDescription(self):
return self.longDesc or '' #self.shortDesc or "The %s." % self.name
def coerce(self, val):
"""Convert the value to the correct format."""
raise NotImplementedError("implement in subclass")
class String(Argument):
"""A single string.
"""
defaultDefault = ''
min = 0
max = None
def __init__(self, name, default=None, shortDesc=None,
longDesc=None, hints=None, allowNone=1, min=0, max=None):
Argument.__init__(self, name, default=default, shortDesc=shortDesc,
longDesc=longDesc, hints=hints, allowNone=allowNone)
self.min = min
self.max = max
def coerce(self, val):
s = str(val)
if len(s) < self.min:
raise InputError("Value must be at least %s characters long" % self.min)
if self.max != None and len(s) > self.max:
raise InputError("Value must be at most %s characters long" % self.max)
return str(val)
class Text(String):
"""A long string.
"""
class Password(String):
"""A string which should be obscured when input.
"""
class VerifiedPassword(String):
"""A string that should be obscured when input and needs verification."""
def coerce(self, vals):
if len(vals) != 2 or vals[0] != vals[1]:
raise InputError("Please enter the same password twice.")
s = str(vals[0])
if len(s) < self.min:
raise InputError("Value must be at least %s characters long" % self.min)
if self.max != None and len(s) > self.max:
raise InputError("Value must be at most %s characters long" % self.max)
return s
class Hidden(String):
"""A string which is not displayed.
The passed default is used as the value.
"""
class Integer(Argument):
"""A single integer.
"""
defaultDefault = None
def __init__(self, name, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
#although Argument now has allowNone, that was recently added, and
#putting it at the end kept things which relied on argument order
#from breaking. However, allowNone originally was in here, so
#I have to keep the same order, to prevent breaking code that
#depends on argument order only
Argument.__init__(self, name, default, shortDesc, longDesc, hints,
allowNone)
def coerce(self, val):
if not val.strip() and self.allowNone:
return None
try:
return int(val)
except ValueError:
raise InputError("%s is not valid, please enter a whole number, e.g. 10" % val)
class IntegerRange(Integer):
def __init__(self, name, min, max, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
self.min = min
self.max = max
Integer.__init__(self, name, allowNone=allowNone, default=default, shortDesc=shortDesc,
longDesc=longDesc, hints=hints)
def coerce(self, val):
result = Integer.coerce(self, val)
if self.allowNone and result == None:
return result
if result < self.min:
raise InputError("Value %s is too small, it should be at least %s" % (result, self.min))
if result > self.max:
raise InputError("Value %s is too large, it should be at most %s" % (result, self.max))
return result
class Float(Argument):
defaultDefault = None
def __init__(self, name, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
#although Argument now has allowNone, that was recently added, and
#putting it at the end kept things which relied on argument order
#from breaking. However, allowNone originally was in here, so
#I have to keep the same order, to prevent breaking code that
#depends on argument order only
Argument.__init__(self, name, default, shortDesc, longDesc, hints,
allowNone)
def coerce(self, val):
if not val.strip() and self.allowNone:
return None
try:
return float(val)
except ValueError:
raise InputError("Invalid float: %s" % val)
class Choice(Argument):
"""
The result of a choice between enumerated types. The choices should
be a list of tuples of tag, value, and description. The tag will be
the value returned if the user hits "Submit", and the description
is the bale for the enumerated type. default is a list of all the
values (seconds element in choices). If no defaults are specified,
initially the first item will be selected. Only one item can (should)
be selected at once.
"""
def __init__(self, name, choices=[], default=[], shortDesc=None,
longDesc=None, hints=None, allowNone=1):
self.choices = choices
if choices and not default:
default.append(choices[0][1])
Argument.__init__(self, name, default, shortDesc, longDesc, hints, allowNone=allowNone)
def coerce(self, inIdent):
for ident, val, desc in self.choices:
if ident == inIdent:
return val
else:
raise InputError("Invalid Choice: %s" % inIdent)
class Flags(Argument):
"""
The result of a checkbox group or multi-menu. The flags should be a
list of tuples of tag, value, and description. The tag will be
the value returned if the user hits "Submit", and the description
is the bale for the enumerated type. default is a list of all the
values (second elements in flags). If no defaults are specified,
initially nothing will be selected. Several items may be selected at
once.
"""
def __init__(self, name, flags=(), default=(), shortDesc=None,
longDesc=None, hints=None, allowNone=1):
self.flags = flags
Argument.__init__(self, name, default, shortDesc, longDesc, hints, allowNone=allowNone)
def coerce(self, inFlagKeys):
if not inFlagKeys:
return []
outFlags = []
for inFlagKey in inFlagKeys:
for flagKey, flagVal, flagDesc in self.flags:
if inFlagKey == flagKey:
outFlags.append(flagVal)
break
else:
raise InputError("Invalid Flag: %s" % inFlagKey)
return outFlags
class CheckGroup(Flags):
pass
class RadioGroup(Choice):
pass
class Boolean(Argument):
def coerce(self, inVal):
if not inVal:
return 0
lInVal = str(inVal).lower()
if lInVal in ('no', 'n', 'f', 'false', '0'):
return 0
return 1
class File(Argument):
def __init__(self, name, allowNone=1, shortDesc=None, longDesc=None,
hints=None):
Argument.__init__(self, name, None, shortDesc, longDesc, hints,
allowNone=allowNone)
def coerce(self, file):
if not file and self.allowNone:
return None
elif file:
return file
else:
raise InputError("Invalid File")
def positiveInt(x):
x = int(x)
if x <= 0: raise ValueError
return x
class Date(Argument):
"""A date -- (year, month, day) tuple."""
defaultDefault = None
def __init__(self, name, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
Argument.__init__(self, name, default, shortDesc, longDesc, hints)
self.allowNone = allowNone
if not allowNone:
self.defaultDefault = (1970, 1, 1)
def coerce(self, args):
"""Return tuple of ints (year, month, day)."""
if tuple(args) == ("", "", "") and self.allowNone:
return None
try:
year, month, day = map(positiveInt, args)
except ValueError:
raise InputError("Invalid date")
if (month, day) == (2, 29):
if not calendar.isleap(year):
raise InputError("%d was not a leap year" % year)
else:
return year, month, day
try:
mdays = calendar.mdays[month]
except IndexError:
raise InputError("Invalid date")
if day > mdays:
raise InputError("Invalid date")
return year, month, day
class Submit(Choice):
"""Submit button or a reasonable facsimile thereof."""
def __init__(self, name, choices=[("Submit", "submit", "Submit form")],
reset=0, shortDesc=None, longDesc=None, allowNone=0, hints=None):
Choice.__init__(self, name, choices=choices, shortDesc=shortDesc,
longDesc=longDesc, hints=hints)
self.allowNone = allowNone
self.reset = reset
def coerce(self, value):
if self.allowNone and not value:
return None
else:
return Choice.coerce(self, value)
@_oldStyle
class PresentationHint:
"""
A hint to a particular system.
"""
@_oldStyle
class MethodSignature:
"""
A signature of a callable.
"""
def __init__(self, *sigList):
"""
"""
self.methodSignature = sigList
def getArgument(self, name):
for a in self.methodSignature:
if a.name == name:
return a
def method(self, callable, takesRequest=False):
return FormMethod(self, callable, takesRequest)
@_oldStyle
class FormMethod:
"""A callable object with a signature."""
def __init__(self, signature, callable, takesRequest=False):
self.signature = signature
self.callable = callable
self.takesRequest = takesRequest
def getArgs(self):
return tuple(self.signature.methodSignature)
def call(self,*args,**kw):
return self.callable(*args,**kw)

View file

@ -0,0 +1,131 @@
# -*- test-case-name: twisted.python.test.test_htmlizer -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
HTML rendering of Python source.
"""
from twisted.python.compat import _tokenize, escape
import tokenize, keyword
from . import reflect
from twisted.python._oldstyle import _oldStyle
@_oldStyle
class TokenPrinter:
"""
Format a stream of tokens and intermediate whitespace, for pretty-printing.
"""
currentCol, currentLine = 0, 1
lastIdentifier = parameters = 0
encoding = "utf-8"
def __init__(self, writer):
"""
@param writer: A file-like object, opened in bytes mode.
"""
self.writer = writer
def printtoken(self, type, token, sCoordinates, eCoordinates, line):
if hasattr(tokenize, "ENCODING") and type == tokenize.ENCODING:
self.encoding = token
return
if not isinstance(token, bytes):
token = token.encode(self.encoding)
(srow, scol) = sCoordinates
(erow, ecol) = eCoordinates
if self.currentLine < srow:
self.writer(b'\n' * (srow-self.currentLine))
self.currentLine, self.currentCol = srow, 0
self.writer(b' ' * (scol-self.currentCol))
if self.lastIdentifier:
type = "identifier"
self.parameters = 1
elif type == tokenize.NAME:
if keyword.iskeyword(token):
type = 'keyword'
else:
if self.parameters:
type = 'parameter'
else:
type = 'variable'
else:
type = tokenize.tok_name.get(type).lower()
self.writer(token, type)
self.currentCol = ecol
self.currentLine += token.count(b'\n')
if self.currentLine != erow:
self.currentCol = 0
self.lastIdentifier = token in (b'def', b'class')
if token == b':':
self.parameters = 0
@_oldStyle
class HTMLWriter:
"""
Write the stream of tokens and whitespace from L{TokenPrinter}, formating
tokens as HTML spans.
"""
noSpan = []
def __init__(self, writer):
self.writer = writer
noSpan = []
reflect.accumulateClassList(self.__class__, "noSpan", noSpan)
self.noSpan = noSpan
def write(self, token, type=None):
if isinstance(token, bytes):
token = token.decode("utf-8")
token = escape(token)
token = token.encode("utf-8")
if (type is None) or (type in self.noSpan):
self.writer(token)
else:
self.writer(
b'<span class="py-src-' + type.encode("utf-8") + b'">' +
token + b'</span>')
class SmallerHTMLWriter(HTMLWriter):
"""
HTMLWriter that doesn't generate spans for some junk.
Results in much smaller HTML output.
"""
noSpan = ["endmarker", "indent", "dedent", "op", "newline", "nl"]
def filter(inp, out, writer=HTMLWriter):
out.write(b'<pre>')
printer = TokenPrinter(writer(out.write).write).printtoken
try:
for token in _tokenize(inp.readline):
(tokenType, string, start, end, line) = token
printer(tokenType, string, start, end, line)
except tokenize.TokenError:
pass
out.write(b'</pre>\n')
def main():
import sys
stdout = getattr(sys.stdout, "buffer", sys.stdout)
with open(sys.argv[1], "rb") as f:
filter(f, stdout)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,248 @@
# -*- test-case-name: twisted.test.test_lockfile -*-
# Copyright (c) 2005 Divmod, Inc.
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Filesystem-based interprocess mutex.
"""
from __future__ import absolute_import, division
import errno
import os
from time import time as _uniquefloat
from twisted.python.runtime import platform
from twisted.python.compat import _PY3
def unique():
return str(int(_uniquefloat() * 1000))
from os import rename
if not platform.isWindows():
from os import kill
from os import symlink
from os import readlink
from os import remove as rmlink
_windows = False
else:
_windows = True
# On UNIX, a symlink can be made to a nonexistent location, and
# FilesystemLock uses this by making the target of the symlink an
# imaginary, non-existing file named that of the PID of the process with
# the lock. This has some benefits on UNIX -- making and removing this
# symlink is atomic. However, because Windows doesn't support symlinks (at
# least as how we know them), we have to fake this and actually write a
# file with the PID of the process holding the lock instead.
# These functions below perform that unenviable, probably-fraught-with-
# race-conditions duty. - hawkie
try:
from win32api import OpenProcess
import pywintypes
except ImportError:
kill = None
else:
ERROR_ACCESS_DENIED = 5
ERROR_INVALID_PARAMETER = 87
def kill(pid, signal):
try:
OpenProcess(0, 0, pid)
except pywintypes.error as e:
if e.args[0] == ERROR_ACCESS_DENIED:
return
elif e.args[0] == ERROR_INVALID_PARAMETER:
raise OSError(errno.ESRCH, None)
raise
else:
raise RuntimeError("OpenProcess is required to fail.")
# For monkeypatching in tests
_open = open
def symlink(value, filename):
"""
Write a file at C{filename} with the contents of C{value}. See the
above comment block as to why this is needed.
"""
# XXX Implement an atomic thingamajig for win32
newlinkname = filename + "." + unique() + '.newlink'
newvalname = os.path.join(newlinkname, "symlink")
os.mkdir(newlinkname)
# Python 3 does not support the 'commit' flag of fopen in the MSVCRT
# (http://msdn.microsoft.com/en-us/library/yeby3zcb%28VS.71%29.aspx)
if _PY3:
mode = 'w'
else:
mode = 'wc'
with _open(newvalname, mode) as f:
f.write(value)
f.flush()
try:
rename(newlinkname, filename)
except:
os.remove(newvalname)
os.rmdir(newlinkname)
raise
def readlink(filename):
"""
Read the contents of C{filename}. See the above comment block as to why
this is needed.
"""
try:
fObj = _open(os.path.join(filename, 'symlink'), 'r')
except IOError as e:
if e.errno == errno.ENOENT or e.errno == errno.EIO:
raise OSError(e.errno, None)
raise
else:
with fObj:
result = fObj.read()
return result
def rmlink(filename):
os.remove(os.path.join(filename, 'symlink'))
os.rmdir(filename)
class FilesystemLock(object):
"""
A mutex.
This relies on the filesystem property that creating
a symlink is an atomic operation and that it will
fail if the symlink already exists. Deleting the
symlink will release the lock.
@ivar name: The name of the file associated with this lock.
@ivar clean: Indicates whether this lock was released cleanly by its
last owner. Only meaningful after C{lock} has been called and
returns True.
@ivar locked: Indicates whether the lock is currently held by this
object.
"""
clean = None
locked = False
def __init__(self, name):
self.name = name
def lock(self):
"""
Acquire this lock.
@rtype: C{bool}
@return: True if the lock is acquired, false otherwise.
@raise: Any exception os.symlink() may raise, other than
EEXIST.
"""
clean = True
while True:
try:
symlink(str(os.getpid()), self.name)
except OSError as e:
if _windows and e.errno in (errno.EACCES, errno.EIO):
# The lock is in the middle of being deleted because we're
# on Windows where lock removal isn't atomic. Give up, we
# don't know how long this is going to take.
return False
if e.errno == errno.EEXIST:
try:
pid = readlink(self.name)
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
# The lock has vanished, try to claim it in the
# next iteration through the loop.
continue
elif _windows and e.errno == errno.EACCES:
# The lock is in the middle of being
# deleted because we're on Windows where
# lock removal isn't atomic. Give up, we
# don't know how long this is going to
# take.
return False
raise
try:
if kill is not None:
kill(int(pid), 0)
except OSError as e:
if e.errno == errno.ESRCH:
# The owner has vanished, try to claim it in the
# next iteration through the loop.
try:
rmlink(self.name)
except OSError as e:
if e.errno == errno.ENOENT:
# Another process cleaned up the lock.
# Race them to acquire it in the next
# iteration through the loop.
continue
raise
clean = False
continue
raise
return False
raise
self.locked = True
self.clean = clean
return True
def unlock(self):
"""
Release this lock.
This deletes the directory with the given name.
@raise: Any exception os.readlink() may raise, or
ValueError if the lock is not owned by this process.
"""
pid = readlink(self.name)
if int(pid) != os.getpid():
raise ValueError(
"Lock %r not owned by this process" % (self.name,))
rmlink(self.name)
self.locked = False
def isLocked(name):
"""
Determine if the lock of the given name is held or not.
@type name: C{str}
@param name: The filesystem path to the lock to test
@rtype: C{bool}
@return: True if the lock is held, False otherwise.
"""
l = FilesystemLock(name)
result = None
try:
result = l.lock()
finally:
if result:
l.unlock()
return not result
__all__ = ['FilesystemLock', 'isLocked']

View file

@ -0,0 +1,767 @@
# -*- test-case-name: twisted.test.test_log -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Logging and metrics infrastructure.
"""
from __future__ import division, absolute_import
import sys
import time
import warnings
from datetime import datetime
from zope.interface import Interface
from twisted.python.compat import unicode, _PY3
from twisted.python import context
from twisted.python import reflect
from twisted.python import util
from twisted.python import failure
from twisted.python._oldstyle import _oldStyle
from twisted.python.threadable import synchronize
from twisted.logger import (
Logger as NewLogger, LogLevel as NewLogLevel,
STDLibLogObserver as NewSTDLibLogObserver,
LegacyLogObserverWrapper, LoggingFile, LogPublisher as NewPublisher,
globalLogPublisher as newGlobalLogPublisher,
globalLogBeginner as newGlobalLogBeginner,
)
from twisted.logger._global import LogBeginner
from twisted.logger._legacy import publishToNewObserver as _publishNew
@_oldStyle
class ILogContext:
"""
Actually, this interface is just a synonym for the dictionary interface,
but it serves as a key for the default information in a log.
I do not inherit from C{Interface} because the world is a cruel place.
"""
class ILogObserver(Interface):
"""
An observer which can do something with log events.
Given that most log observers are actually bound methods, it's okay to not
explicitly declare provision of this interface.
"""
def __call__(eventDict):
"""
Log an event.
@type eventDict: C{dict} with C{str} keys.
@param eventDict: A dictionary with arbitrary keys. However, these
keys are often available:
- C{message}: A C{tuple} of C{str} containing messages to be
logged.
- C{system}: A C{str} which indicates the "system" which is
generating this event.
- C{isError}: A C{bool} indicating whether this event represents
an error.
- C{failure}: A L{failure.Failure} instance
- C{why}: Used as header of the traceback in case of errors.
- C{format}: A string format used in place of C{message} to
customize the event. The intent is for the observer to format
a message by doing something like C{format % eventDict}.
"""
context.setDefault(ILogContext,
{"system": "-"})
def callWithContext(ctx, func, *args, **kw):
newCtx = context.get(ILogContext).copy()
newCtx.update(ctx)
return context.call({ILogContext: newCtx}, func, *args, **kw)
def callWithLogger(logger, func, *args, **kw):
"""
Utility method which wraps a function in a try:/except:, logs a failure if
one occurs, and uses the system's logPrefix.
"""
try:
lp = logger.logPrefix()
except KeyboardInterrupt:
raise
except:
lp = '(buggy logPrefix method)'
err(system=lp)
try:
return callWithContext({"system": lp}, func, *args, **kw)
except KeyboardInterrupt:
raise
except:
err(system=lp)
def err(_stuff=None, _why=None, **kw):
"""
Write a failure to the log.
The C{_stuff} and C{_why} parameters use an underscore prefix to lessen
the chance of colliding with a keyword argument the application wishes
to pass. It is intended that they be supplied with arguments passed
positionally, not by keyword.
@param _stuff: The failure to log. If C{_stuff} is L{None} a new
L{Failure} will be created from the current exception state. If
C{_stuff} is an C{Exception} instance it will be wrapped in a
L{Failure}.
@type _stuff: L{None}, C{Exception}, or L{Failure}.
@param _why: The source of this failure. This will be logged along with
C{_stuff} and should describe the context in which the failure
occurred.
@type _why: C{str}
"""
if _stuff is None:
_stuff = failure.Failure()
if isinstance(_stuff, failure.Failure):
msg(failure=_stuff, why=_why, isError=1, **kw)
elif isinstance(_stuff, Exception):
msg(failure=failure.Failure(_stuff), why=_why, isError=1, **kw)
else:
msg(repr(_stuff), why=_why, isError=1, **kw)
deferr = err
@_oldStyle
class Logger:
"""
This represents a class which may 'own' a log. Used by subclassing.
"""
def logPrefix(self):
"""
Override this method to insert custom logging behavior. Its
return value will be inserted in front of every line. It may
be called more times than the number of output lines.
"""
return '-'
@_oldStyle
class LogPublisher:
"""
Class for singleton log message publishing.
"""
synchronized = ['msg']
def __init__(self, observerPublisher=None, publishPublisher=None,
logBeginner=None, warningsModule=warnings):
if publishPublisher is None:
publishPublisher = NewPublisher()
if observerPublisher is None:
observerPublisher = publishPublisher
if observerPublisher is None:
observerPublisher = NewPublisher()
self._observerPublisher = observerPublisher
self._publishPublisher = publishPublisher
self._legacyObservers = []
if logBeginner is None:
# This default behavior is really only used for testing.
beginnerPublisher = NewPublisher()
beginnerPublisher.addObserver(observerPublisher)
logBeginner = LogBeginner(beginnerPublisher, NullFile(), sys,
warnings)
self._logBeginner = logBeginner
self._warningsModule = warningsModule
self._oldshowwarning = warningsModule.showwarning
self.showwarning = self._logBeginner.showwarning
@property
def observers(self):
"""
Property returning all observers registered on this L{LogPublisher}.
@return: observers previously added with L{LogPublisher.addObserver}
@rtype: L{list} of L{callable}
"""
return [x.legacyObserver for x in self._legacyObservers]
def _startLogging(self, other, setStdout):
"""
Begin logging to the L{LogBeginner} associated with this
L{LogPublisher}.
@param other: the observer to log to.
@type other: L{LogBeginner}
@param setStdout: if true, send standard I/O to the observer as well.
@type setStdout: L{bool}
"""
wrapped = LegacyLogObserverWrapper(other)
self._legacyObservers.append(wrapped)
self._logBeginner.beginLoggingTo([wrapped], True, setStdout)
def _stopLogging(self):
"""
Clean-up hook for fixing potentially global state. Only for testing of
this module itself. If you want less global state, use the new
warnings system in L{twisted.logger}.
"""
if self._warningsModule.showwarning == self.showwarning:
self._warningsModule.showwarning = self._oldshowwarning
def addObserver(self, other):
"""
Add a new observer.
@type other: Provider of L{ILogObserver}
@param other: A callable object that will be called with each new log
message (a dict).
"""
wrapped = LegacyLogObserverWrapper(other)
self._legacyObservers.append(wrapped)
self._observerPublisher.addObserver(wrapped)
def removeObserver(self, other):
"""
Remove an observer.
"""
for observer in self._legacyObservers:
if observer.legacyObserver == other:
self._legacyObservers.remove(observer)
self._observerPublisher.removeObserver(observer)
break
def msg(self, *message, **kw):
"""
Log a new message.
The message should be a native string, i.e. bytes on Python 2 and
Unicode on Python 3. For compatibility with both use the native string
syntax, for example::
>>> log.msg('Hello, world.')
You MUST avoid passing in Unicode on Python 2, and the form::
>>> log.msg('Hello ', 'world.')
This form only works (sometimes) by accident.
Keyword arguments will be converted into items in the event
dict that is passed to L{ILogObserver} implementations.
Each implementation, in turn, can define keys that are used
by it specifically, in addition to common keys listed at
L{ILogObserver.__call__}.
For example, to set the C{system} parameter while logging
a message::
>>> log.msg('Started', system='Foo')
"""
actualEventDict = (context.get(ILogContext) or {}).copy()
actualEventDict.update(kw)
actualEventDict['message'] = message
actualEventDict['time'] = time.time()
if "isError" not in actualEventDict:
actualEventDict["isError"] = 0
_publishNew(self._publishPublisher, actualEventDict, textFromEventDict)
synchronize(LogPublisher)
if 'theLogPublisher' not in globals():
def _actually(something):
"""
A decorator that returns its argument rather than the thing it is
decorating.
This allows the documentation generator to see an alias for a method or
constant as an object with a docstring and thereby document it and
allow references to it statically.
@param something: An object to create an alias for.
@type something: L{object}
@return: a 1-argument callable that returns C{something}
@rtype: L{object}
"""
def decorate(thingWithADocstring):
return something
return decorate
theLogPublisher = LogPublisher(
observerPublisher=newGlobalLogPublisher,
publishPublisher=newGlobalLogPublisher,
logBeginner=newGlobalLogBeginner,
)
@_actually(theLogPublisher.addObserver)
def addObserver(observer):
"""
Add a log observer to the global publisher.
@see: L{LogPublisher.addObserver}
@param observer: a log observer
@type observer: L{callable}
"""
@_actually(theLogPublisher.removeObserver)
def removeObserver(observer):
"""
Remove a log observer from the global publisher.
@see: L{LogPublisher.removeObserver}
@param observer: a log observer previously added with L{addObserver}
@type observer: L{callable}
"""
@_actually(theLogPublisher.msg)
def msg(*message, **event):
"""
Publish a message to the global log publisher.
@see: L{LogPublisher.msg}
@param message: the log message
@type message: C{tuple} of L{str} (native string)
@param event: fields for the log event
@type event: L{dict} mapping L{str} (native string) to L{object}
"""
@_actually(theLogPublisher.showwarning)
def showwarning():
"""
Publish a Python warning through the global log publisher.
@see: L{LogPublisher.showwarning}
"""
def _safeFormat(fmtString, fmtDict):
"""
Try to format a string, swallowing all errors to always return a string.
@note: For backward-compatibility reasons, this function ensures that it
returns a native string, meaning C{bytes} in Python 2 and C{unicode} in
Python 3.
@param fmtString: a C{%}-format string
@param fmtDict: string formatting arguments for C{fmtString}
@return: A native string, formatted from C{fmtString} and C{fmtDict}.
@rtype: L{str}
"""
# There's a way we could make this if not safer at least more
# informative: perhaps some sort of str/repr wrapper objects
# could be wrapped around the things inside of C{fmtDict}. That way
# if the event dict contains an object with a bad __repr__, we
# can only cry about that individual object instead of the
# entire event dict.
try:
text = fmtString % fmtDict
except KeyboardInterrupt:
raise
except:
try:
text = ('Invalid format string or unformattable object in '
'log message: %r, %s' % (fmtString, fmtDict))
except:
try:
text = ('UNFORMATTABLE OBJECT WRITTEN TO LOG with fmt %r, '
'MESSAGE LOST' % (fmtString,))
except:
text = ('PATHOLOGICAL ERROR IN BOTH FORMAT STRING AND '
'MESSAGE DETAILS, MESSAGE LOST')
# Return a native string
if _PY3:
if isinstance(text, bytes):
text = text.decode("utf-8")
else:
if isinstance(text, unicode):
text = text.encode("utf-8")
return text
def textFromEventDict(eventDict):
"""
Extract text from an event dict passed to a log observer. If it cannot
handle the dict, it returns None.
The possible keys of eventDict are:
- C{message}: by default, it holds the final text. It's required, but can
be empty if either C{isError} or C{format} is provided (the first
having the priority).
- C{isError}: boolean indicating the nature of the event.
- C{failure}: L{failure.Failure} instance, required if the event is an
error.
- C{why}: if defined, used as header of the traceback in case of errors.
- C{format}: string format used in place of C{message} to customize
the event. It uses all keys present in C{eventDict} to format
the text.
Other keys will be used when applying the C{format}, or ignored.
"""
edm = eventDict['message']
if not edm:
if eventDict['isError'] and 'failure' in eventDict:
why = eventDict.get('why')
if why:
why = reflect.safe_str(why)
else:
why = 'Unhandled Error'
try:
traceback = eventDict['failure'].getTraceback()
except Exception as e:
traceback = '(unable to obtain traceback): ' + str(e)
text = (why + '\n' + traceback)
elif 'format' in eventDict:
text = _safeFormat(eventDict['format'], eventDict)
else:
# We don't know how to log this
return None
else:
text = ' '.join(map(reflect.safe_str, edm))
return text
@_oldStyle
class _GlobalStartStopMixIn:
"""
Mix-in for global log observers that can start and stop.
"""
def start(self):
"""
Start observing log events.
"""
addObserver(self.emit)
def stop(self):
"""
Stop observing log events.
"""
removeObserver(self.emit)
class FileLogObserver(_GlobalStartStopMixIn):
"""
Log observer that writes to a file-like object.
@type timeFormat: C{str} or L{None}
@ivar timeFormat: If not L{None}, the format string passed to strftime().
"""
timeFormat = None
def __init__(self, f):
# Compatibility
self.write = f.write
self.flush = f.flush
def getTimezoneOffset(self, when):
"""
Return the current local timezone offset from UTC.
@type when: C{int}
@param when: POSIX (ie, UTC) timestamp for which to find the offset.
@rtype: C{int}
@return: The number of seconds offset from UTC. West is positive,
east is negative.
"""
offset = datetime.utcfromtimestamp(when) - datetime.fromtimestamp(when)
return offset.days * (60 * 60 * 24) + offset.seconds
def formatTime(self, when):
"""
Format the given UTC value as a string representing that time in the
local timezone.
By default it's formatted as an ISO8601-like string (ISO8601 date and
ISO8601 time separated by a space). It can be customized using the
C{timeFormat} attribute, which will be used as input for the underlying
L{datetime.datetime.strftime} call.
@type when: C{int}
@param when: POSIX (ie, UTC) timestamp for which to find the offset.
@rtype: C{str}
"""
if self.timeFormat is not None:
return datetime.fromtimestamp(when).strftime(self.timeFormat)
tzOffset = -self.getTimezoneOffset(when)
when = datetime.utcfromtimestamp(when + tzOffset)
tzHour = abs(int(tzOffset / 60 / 60))
tzMin = abs(int(tzOffset / 60 % 60))
if tzOffset < 0:
tzSign = '-'
else:
tzSign = '+'
return '%d-%02d-%02d %02d:%02d:%02d%s%02d%02d' % (
when.year, when.month, when.day,
when.hour, when.minute, when.second,
tzSign, tzHour, tzMin)
def emit(self, eventDict):
"""
Format the given log event as text and write it to the output file.
@param eventDict: a log event
@type eventDict: L{dict} mapping L{str} (native string) to L{object}
"""
text = textFromEventDict(eventDict)
if text is None:
return
timeStr = self.formatTime(eventDict["time"])
fmtDict = {
"system": eventDict["system"],
"text": text.replace("\n", "\n\t")
}
msgStr = _safeFormat("[%(system)s] %(text)s\n", fmtDict)
util.untilConcludes(self.write, timeStr + " " + msgStr)
util.untilConcludes(self.flush) # Hoorj!
class PythonLoggingObserver(_GlobalStartStopMixIn, object):
"""
Output twisted messages to Python standard library L{logging} module.
WARNING: specific logging configurations (example: network) can lead to
a blocking system. Nothing is done here to prevent that, so be sure to not
use this: code within Twisted, such as twisted.web, assumes that logging
does not block.
"""
def __init__(self, loggerName="twisted"):
"""
@param loggerName: identifier used for getting logger.
@type loggerName: C{str}
"""
self._newObserver = NewSTDLibLogObserver(loggerName)
def emit(self, eventDict):
"""
Receive a twisted log entry, format it and bridge it to python.
By default the logging level used is info; log.err produces error
level, and you can customize the level by using the C{logLevel} key::
>>> log.msg('debugging', logLevel=logging.DEBUG)
"""
if 'log_format' in eventDict:
_publishNew(self._newObserver, eventDict, textFromEventDict)
@_oldStyle
class StdioOnnaStick:
"""
Class that pretends to be stdout/err, and turns writes into log messages.
@ivar isError: boolean indicating whether this is stderr, in which cases
log messages will be logged as errors.
@ivar encoding: unicode encoding used to encode any unicode strings
written to this object.
"""
closed = 0
softspace = 0
mode = 'wb'
name = '<stdio (log)>'
def __init__(self, isError=0, encoding=None):
self.isError = isError
if encoding is None:
encoding = sys.getdefaultencoding()
self.encoding = encoding
self.buf = ''
def close(self):
pass
def fileno(self):
return -1
def flush(self):
pass
def read(self):
raise IOError("can't read from the log!")
readline = read
readlines = read
seek = read
tell = read
def write(self, data):
if not _PY3 and isinstance(data, unicode):
data = data.encode(self.encoding)
d = (self.buf + data).split('\n')
self.buf = d[-1]
messages = d[0:-1]
for message in messages:
msg(message, printed=1, isError=self.isError)
def writelines(self, lines):
for line in lines:
if not _PY3 and isinstance(line, unicode):
line = line.encode(self.encoding)
msg(line, printed=1, isError=self.isError)
def startLogging(file, *a, **kw):
"""
Initialize logging to a specified file.
@return: A L{FileLogObserver} if a new observer is added, None otherwise.
"""
if isinstance(file, LoggingFile):
return
flo = FileLogObserver(file)
startLoggingWithObserver(flo.emit, *a, **kw)
return flo
def startLoggingWithObserver(observer, setStdout=1):
"""
Initialize logging to a specified observer. If setStdout is true
(defaults to yes), also redirect sys.stdout and sys.stderr
to the specified file.
"""
theLogPublisher._startLogging(observer, setStdout)
msg("Log opened.")
@_oldStyle
class NullFile:
"""
A file-like object that discards everything.
"""
softspace = 0
def read(self):
"""
Do nothing.
"""
def write(self, bytes):
"""
Do nothing.
@param bytes: data
@type bytes: L{bytes}
"""
def flush(self):
"""
Do nothing.
"""
def close(self):
"""
Do nothing.
"""
def discardLogs():
"""
Discard messages logged via the global C{logfile} object.
"""
global logfile
logfile = NullFile()
# Prevent logfile from being erased on reload. This only works in cpython.
if 'logfile' not in globals():
logfile = LoggingFile(logger=NewLogger(),
level=NewLogLevel.info,
encoding=getattr(sys.stdout, "encoding", None))
logerr = LoggingFile(logger=NewLogger(),
level=NewLogLevel.error,
encoding=getattr(sys.stderr, "encoding", None))
class DefaultObserver(_GlobalStartStopMixIn):
"""
Default observer.
Will ignore all non-error messages and send error messages to sys.stderr.
Will be removed when startLogging() is called for the first time.
"""
stderr = sys.stderr
def emit(self, eventDict):
"""
Emit an event dict.
@param eventDict: an event dict
@type eventDict: dict
"""
if eventDict["isError"]:
text = textFromEventDict(eventDict)
self.stderr.write(text)
self.stderr.flush()
if 'defaultObserver' not in globals():
defaultObserver = DefaultObserver()

View file

@ -0,0 +1,340 @@
# -*- test-case-name: twisted.test.test_logfile -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A rotating, browsable log file.
"""
from __future__ import division, absolute_import
# System Imports
import os, glob, time, stat
from twisted.python import threadable
from twisted.python._oldstyle import _oldStyle
from twisted.python.compat import unicode
@_oldStyle
class BaseLogFile:
"""
The base class for a log file that can be rotated.
"""
synchronized = ["write", "rotate"]
def __init__(self, name, directory, defaultMode=None):
"""
Create a log file.
@param name: name of the file
@param directory: directory holding the file
@param defaultMode: permissions used to create the file. Default to
current permissions of the file if the file exists.
"""
self.directory = directory
self.name = name
self.path = os.path.join(directory, name)
if defaultMode is None and os.path.exists(self.path):
self.defaultMode = stat.S_IMODE(os.stat(self.path)[stat.ST_MODE])
else:
self.defaultMode = defaultMode
self._openFile()
def fromFullPath(cls, filename, *args, **kwargs):
"""
Construct a log file from a full file path.
"""
logPath = os.path.abspath(filename)
return cls(os.path.basename(logPath),
os.path.dirname(logPath), *args, **kwargs)
fromFullPath = classmethod(fromFullPath)
def shouldRotate(self):
"""
Override with a method to that returns true if the log
should be rotated.
"""
raise NotImplementedError
def _openFile(self):
"""
Open the log file.
The log file is always opened in binary mode.
"""
self.closed = False
if os.path.exists(self.path):
self._file = open(self.path, "rb+", 0)
self._file.seek(0, 2)
else:
if self.defaultMode is not None:
# Set the lowest permissions
oldUmask = os.umask(0o777)
try:
self._file = open(self.path, "wb+", 0)
finally:
os.umask(oldUmask)
else:
self._file = open(self.path, "wb+", 0)
if self.defaultMode is not None:
try:
os.chmod(self.path, self.defaultMode)
except OSError:
# Probably /dev/null or something?
pass
def write(self, data):
"""
Write some data to the file.
@param data: The data to write. Text will be encoded as UTF-8.
@type data: L{bytes} or L{unicode}
"""
if self.shouldRotate():
self.flush()
self.rotate()
if isinstance(data, unicode):
data = data.encode('utf8')
self._file.write(data)
def flush(self):
"""
Flush the file.
"""
self._file.flush()
def close(self):
"""
Close the file.
The file cannot be used once it has been closed.
"""
self.closed = True
self._file.close()
self._file = None
def reopen(self):
"""
Reopen the log file. This is mainly useful if you use an external log
rotation tool, which moves under your feet.
Note that on Windows you probably need a specific API to rename the
file, as it's not supported to simply use os.rename, for example.
"""
self.close()
self._openFile()
def getCurrentLog(self):
"""
Return a LogReader for the current log file.
"""
return LogReader(self.path)
class LogFile(BaseLogFile):
"""
A log file that can be rotated.
A rotateLength of None disables automatic log rotation.
"""
def __init__(self, name, directory, rotateLength=1000000, defaultMode=None,
maxRotatedFiles=None):
"""
Create a log file rotating on length.
@param name: file name.
@type name: C{str}
@param directory: path of the log file.
@type directory: C{str}
@param rotateLength: size of the log file where it rotates. Default to
1M.
@type rotateLength: C{int}
@param defaultMode: mode used to create the file.
@type defaultMode: C{int}
@param maxRotatedFiles: if not None, max number of log files the class
creates. Warning: it removes all log files above this number.
@type maxRotatedFiles: C{int}
"""
BaseLogFile.__init__(self, name, directory, defaultMode)
self.rotateLength = rotateLength
self.maxRotatedFiles = maxRotatedFiles
def _openFile(self):
BaseLogFile._openFile(self)
self.size = self._file.tell()
def shouldRotate(self):
"""
Rotate when the log file size is larger than rotateLength.
"""
return self.rotateLength and self.size >= self.rotateLength
def getLog(self, identifier):
"""
Given an integer, return a LogReader for an old log file.
"""
filename = "%s.%d" % (self.path, identifier)
if not os.path.exists(filename):
raise ValueError("no such logfile exists")
return LogReader(filename)
def write(self, data):
"""
Write some data to the file.
"""
BaseLogFile.write(self, data)
self.size += len(data)
def rotate(self):
"""
Rotate the file and create a new one.
If it's not possible to open new logfile, this will fail silently,
and continue logging to old logfile.
"""
if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
return
logs = self.listLogs()
logs.reverse()
for i in logs:
if self.maxRotatedFiles is not None and i >= self.maxRotatedFiles:
os.remove("%s.%d" % (self.path, i))
else:
os.rename("%s.%d" % (self.path, i), "%s.%d" % (self.path, i + 1))
self._file.close()
os.rename(self.path, "%s.1" % self.path)
self._openFile()
def listLogs(self):
"""
Return sorted list of integers - the old logs' identifiers.
"""
result = []
for name in glob.glob("%s.*" % self.path):
try:
counter = int(name.split('.')[-1])
if counter:
result.append(counter)
except ValueError:
pass
result.sort()
return result
def __getstate__(self):
state = BaseLogFile.__getstate__(self)
del state["size"]
return state
threadable.synchronize(LogFile)
class DailyLogFile(BaseLogFile):
"""A log file that is rotated daily (at or after midnight localtime)
"""
def _openFile(self):
BaseLogFile._openFile(self)
self.lastDate = self.toDate(os.stat(self.path)[8])
def shouldRotate(self):
"""Rotate when the date has changed since last write"""
return self.toDate() > self.lastDate
def toDate(self, *args):
"""Convert a unixtime to (year, month, day) localtime tuple,
or return the current (year, month, day) localtime tuple.
This function primarily exists so you may overload it with
gmtime, or some cruft to make unit testing possible.
"""
# primarily so this can be unit tested easily
return time.localtime(*args)[:3]
def suffix(self, tupledate):
"""Return the suffix given a (year, month, day) tuple or unixtime"""
try:
return '_'.join(map(str, tupledate))
except:
# try taking a float unixtime
return '_'.join(map(str, self.toDate(tupledate)))
def getLog(self, identifier):
"""Given a unix time, return a LogReader for an old log file."""
if self.toDate(identifier) == self.lastDate:
return self.getCurrentLog()
filename = "%s.%s" % (self.path, self.suffix(identifier))
if not os.path.exists(filename):
raise ValueError("no such logfile exists")
return LogReader(filename)
def write(self, data):
"""Write some data to the log file"""
BaseLogFile.write(self, data)
# Guard against a corner case where time.time()
# could potentially run backwards to yesterday.
# Primarily due to network time.
self.lastDate = max(self.lastDate, self.toDate())
def rotate(self):
"""Rotate the file and create a new one.
If it's not possible to open new logfile, this will fail silently,
and continue logging to old logfile.
"""
if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
return
newpath = "%s.%s" % (self.path, self.suffix(self.lastDate))
if os.path.exists(newpath):
return
self._file.close()
os.rename(self.path, newpath)
self._openFile()
def __getstate__(self):
state = BaseLogFile.__getstate__(self)
del state["lastDate"]
return state
threadable.synchronize(DailyLogFile)
@_oldStyle
class LogReader:
"""Read from a log file."""
def __init__(self, name):
"""
Open the log file for reading.
The comments about binary-mode for L{BaseLogFile._openFile} also apply
here.
"""
self._file = open(name, "r")
def readLines(self, lines=10):
"""Read a list of lines from the log file.
This doesn't returns all of the files lines - call it multiple times.
"""
result = []
for i in range(lines):
line = self._file.readline()
if not line:
break
result.append(line)
return result
def close(self):
self._file.close()

View file

@ -0,0 +1,789 @@
# -*- test-case-name: twisted.test.test_modules -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module aims to provide a unified, object-oriented view of Python's
runtime hierarchy.
Python is a very dynamic language with wide variety of introspection utilities.
However, these utilities can be hard to use, because there is no consistent
API. The introspection API in python is made up of attributes (__name__,
__module__, func_name, etc) on instances, modules, classes and functions which
vary between those four types, utility modules such as 'inspect' which provide
some functionality, the 'imp' module, the "compiler" module, the semantics of
PEP 302 support, and setuptools, among other things.
At the top, you have "PythonPath", an abstract representation of sys.path which
includes methods to locate top-level modules, with or without loading them.
The top-level exposed functions in this module for accessing the system path
are "walkModules", "iterModules", and "getModule".
From most to least specific, here are the objects provided::
PythonPath # sys.path
|
v
PathEntry # one entry on sys.path: an importer
|
v
PythonModule # a module or package that can be loaded
|
v
PythonAttribute # an attribute of a module (function or class)
|
v
PythonAttribute # an attribute of a function or class
|
v
...
Here's an example of idiomatic usage: this is what you would do to list all of
the modules outside the standard library's python-files directory::
import os
stdlibdir = os.path.dirname(os.__file__)
from twisted.python.modules import iterModules
for modinfo in iterModules():
if (modinfo.pathEntry.filePath.path != stdlibdir
and not modinfo.isPackage()):
print('unpackaged: %s: %s' % (
modinfo.name, modinfo.filePath.path))
@var theSystemPath: The very top of the Python object space.
@type: L{PythonPath}
"""
from __future__ import division, absolute_import, print_function
__metaclass__ = type
# let's try to keep path imports to a minimum...
from os.path import dirname, split as splitpath
import sys
import inspect
import warnings
import zipimport
from zope.interface import Interface, implementer
from twisted.python.compat import nativeString
from twisted.python.components import registerAdapter
from twisted.python.filepath import FilePath, UnlistableError
from twisted.python.reflect import namedAny
from twisted.python.zippath import ZipArchive
_nothing = object()
PYTHON_EXTENSIONS = ['.py']
OPTIMIZED_MODE = __doc__ is None
if OPTIMIZED_MODE:
PYTHON_EXTENSIONS.append('.pyo')
else:
PYTHON_EXTENSIONS.append('.pyc')
def _isPythonIdentifier(string):
"""
cheezy fake test for proper identifier-ness.
@param string: a L{str} which might or might not be a valid python
identifier.
@return: True or False
"""
textString = nativeString(string)
return (' ' not in textString and
'.' not in textString and
'-' not in textString)
def _isPackagePath(fpath):
# Determine if a FilePath-like object is a Python package. TODO: deal with
# __init__module.(so|dll|pyd)?
extless = fpath.splitext()[0]
basend = splitpath(extless)[1]
return basend == "__init__"
class _ModuleIteratorHelper:
"""
This mixin provides common behavior between python module and path entries,
since the mechanism for searching sys.path and __path__ attributes is
remarkably similar.
"""
def iterModules(self):
"""
Loop over the modules present below this entry or package on PYTHONPATH.
For modules which are not packages, this will yield nothing.
For packages and path entries, this will only yield modules one level
down; i.e. if there is a package a.b.c, iterModules on a will only
return a.b. If you want to descend deeply, use walkModules.
@return: a generator which yields PythonModule instances that describe
modules which can be, or have been, imported.
"""
yielded = {}
if not self.filePath.exists():
return
for placeToLook in self._packagePaths():
try:
children = sorted(placeToLook.children())
except UnlistableError:
continue
for potentialTopLevel in children:
ext = potentialTopLevel.splitext()[1]
potentialBasename = potentialTopLevel.basename()[:-len(ext)]
if ext in PYTHON_EXTENSIONS:
# TODO: this should be a little choosier about which path entry
# it selects first, and it should do all the .so checking and
# crud
if not _isPythonIdentifier(potentialBasename):
continue
modname = self._subModuleName(potentialBasename)
if modname.split(".")[-1] == '__init__':
# This marks the directory as a package so it can't be
# a module.
continue
if modname not in yielded:
yielded[modname] = True
pm = PythonModule(modname, potentialTopLevel, self._getEntry())
assert pm != self
yield pm
else:
if (ext or not _isPythonIdentifier(potentialBasename)
or not potentialTopLevel.isdir()):
continue
modname = self._subModuleName(potentialTopLevel.basename())
for ext in PYTHON_EXTENSIONS:
initpy = potentialTopLevel.child("__init__"+ext)
if initpy.exists() and modname not in yielded:
yielded[modname] = True
pm = PythonModule(modname, initpy, self._getEntry())
assert pm != self
yield pm
break
def walkModules(self, importPackages=False):
"""
Similar to L{iterModules}, this yields self, and then every module in my
package or entry, and every submodule in each package or entry.
In other words, this is deep, and L{iterModules} is shallow.
"""
yield self
for package in self.iterModules():
for module in package.walkModules(importPackages=importPackages):
yield module
def _subModuleName(self, mn):
"""
This is a hook to provide packages with the ability to specify their names
as a prefix to submodules here.
"""
return mn
def _packagePaths(self):
"""
Implement in subclasses to specify where to look for modules.
@return: iterable of FilePath-like objects.
"""
raise NotImplementedError()
def _getEntry(self):
"""
Implement in subclasses to specify what path entry submodules will come
from.
@return: a PathEntry instance.
"""
raise NotImplementedError()
def __getitem__(self, modname):
"""
Retrieve a module from below this path or package.
@param modname: a str naming a module to be loaded. For entries, this
is a top-level, undotted package name, and for packages it is the name
of the module without the package prefix. For example, if you have a
PythonModule representing the 'twisted' package, you could use::
twistedPackageObj['python']['modules']
to retrieve this module.
@raise: KeyError if the module is not found.
@return: a PythonModule.
"""
for module in self.iterModules():
if module.name == self._subModuleName(modname):
return module
raise KeyError(modname)
def __iter__(self):
"""
Implemented to raise NotImplementedError for clarity, so that attempting to
loop over this object won't call __getitem__.
Note: in the future there might be some sensible default for iteration,
like 'walkEverything', so this is deliberately untested and undefined
behavior.
"""
raise NotImplementedError()
class PythonAttribute:
"""
I represent a function, class, or other object that is present.
@ivar name: the fully-qualified python name of this attribute.
@ivar onObject: a reference to a PythonModule or other PythonAttribute that
is this attribute's logical parent.
@ivar name: the fully qualified python name of the attribute represented by
this class.
"""
def __init__(self, name, onObject, loaded, pythonValue):
"""
Create a PythonAttribute. This is a private constructor. Do not construct
me directly, use PythonModule.iterAttributes.
@param name: the FQPN
@param onObject: see ivar
@param loaded: always True, for now
@param pythonValue: the value of the attribute we're pointing to.
"""
self.name = name
self.onObject = onObject
self._loaded = loaded
self.pythonValue = pythonValue
def __repr__(self):
return 'PythonAttribute<%r>'%(self.name,)
def isLoaded(self):
"""
Return a boolean describing whether the attribute this describes has
actually been loaded into memory by importing its module.
Note: this currently always returns true; there is no Python parser
support in this module yet.
"""
return self._loaded
def load(self, default=_nothing):
"""
Load the value associated with this attribute.
@return: an arbitrary Python object, or 'default' if there is an error
loading it.
"""
return self.pythonValue
def iterAttributes(self):
for name, val in inspect.getmembers(self.load()):
yield PythonAttribute(self.name+'.'+name, self, True, val)
class PythonModule(_ModuleIteratorHelper):
"""
Representation of a module which could be imported from sys.path.
@ivar name: the fully qualified python name of this module.
@ivar filePath: a FilePath-like object which points to the location of this
module.
@ivar pathEntry: a L{PathEntry} instance which this module was located
from.
"""
def __init__(self, name, filePath, pathEntry):
"""
Create a PythonModule. Do not construct this directly, instead inspect a
PythonPath or other PythonModule instances.
@param name: see ivar
@param filePath: see ivar
@param pathEntry: see ivar
"""
_name = nativeString(name)
assert not _name.endswith(".__init__")
self.name = _name
self.filePath = filePath
self.parentPath = filePath.parent()
self.pathEntry = pathEntry
def _getEntry(self):
return self.pathEntry
def __repr__(self):
"""
Return a string representation including the module name.
"""
return 'PythonModule<%r>' % (self.name,)
def isLoaded(self):
"""
Determine if the module is loaded into sys.modules.
@return: a boolean: true if loaded, false if not.
"""
return self.pathEntry.pythonPath.moduleDict.get(self.name) is not None
def iterAttributes(self):
"""
List all the attributes defined in this module.
Note: Future work is planned here to make it possible to list python
attributes on a module without loading the module by inspecting ASTs or
bytecode, but currently any iteration of PythonModule objects insists
they must be loaded, and will use inspect.getmodule.
@raise NotImplementedError: if this module is not loaded.
@return: a generator yielding PythonAttribute instances describing the
attributes of this module.
"""
if not self.isLoaded():
raise NotImplementedError(
"You can't load attributes from non-loaded modules yet.")
for name, val in inspect.getmembers(self.load()):
yield PythonAttribute(self.name+'.'+name, self, True, val)
def isPackage(self):
"""
Returns true if this module is also a package, and might yield something
from iterModules.
"""
return _isPackagePath(self.filePath)
def load(self, default=_nothing):
"""
Load this module.
@param default: if specified, the value to return in case of an error.
@return: a genuine python module.
@raise: any type of exception. Importing modules is a risky business;
the erorrs of any code run at module scope may be raised from here, as
well as ImportError if something bizarre happened to the system path
between the discovery of this PythonModule object and the attempt to
import it. If you specify a default, the error will be swallowed
entirely, and not logged.
@rtype: types.ModuleType.
"""
try:
return self.pathEntry.pythonPath.moduleLoader(self.name)
except: # this needs more thought...
if default is not _nothing:
return default
raise
def __eq__(self, other):
"""
PythonModules with the same name are equal.
"""
if not isinstance(other, PythonModule):
return False
return other.name == self.name
def __ne__(self, other):
"""
PythonModules with different names are not equal.
"""
if not isinstance(other, PythonModule):
return True
return other.name != self.name
def walkModules(self, importPackages=False):
if importPackages and self.isPackage():
self.load()
return super(PythonModule, self).walkModules(importPackages=importPackages)
def _subModuleName(self, mn):
"""
submodules of this module are prefixed with our name.
"""
return self.name + '.' + mn
def _packagePaths(self):
"""
Yield a sequence of FilePath-like objects which represent path segments.
"""
if not self.isPackage():
return
if self.isLoaded():
load = self.load()
if hasattr(load, '__path__'):
for fn in load.__path__:
if fn == self.parentPath.path:
# this should _really_ exist.
assert self.parentPath.exists()
yield self.parentPath
else:
smp = self.pathEntry.pythonPath._smartPath(fn)
if smp.exists():
yield smp
else:
yield self.parentPath
class PathEntry(_ModuleIteratorHelper):
"""
I am a proxy for a single entry on sys.path.
@ivar filePath: a FilePath-like object pointing at the filesystem location
or archive file where this path entry is stored.
@ivar pythonPath: a PythonPath instance.
"""
def __init__(self, filePath, pythonPath):
"""
Create a PathEntry. This is a private constructor.
"""
self.filePath = filePath
self.pythonPath = pythonPath
def _getEntry(self):
return self
def __repr__(self):
return 'PathEntry<%r>' % (self.filePath,)
def _packagePaths(self):
yield self.filePath
class IPathImportMapper(Interface):
"""
This is an internal interface, used to map importers to factories for
FilePath-like objects.
"""
def mapPath(self, pathLikeString):
"""
Return a FilePath-like object.
@param pathLikeString: a path-like string, like one that might be
passed to an import hook.
@return: a L{FilePath}, or something like it (currently only a
L{ZipPath}, but more might be added later).
"""
@implementer(IPathImportMapper)
class _DefaultMapImpl:
""" Wrapper for the default importer, i.e. None. """
def mapPath(self, fsPathString):
return FilePath(fsPathString)
_theDefaultMapper = _DefaultMapImpl()
@implementer(IPathImportMapper)
class _ZipMapImpl:
""" IPathImportMapper implementation for zipimport.ZipImporter. """
def __init__(self, importer):
self.importer = importer
def mapPath(self, fsPathString):
"""
Map the given FS path to a ZipPath, by looking at the ZipImporter's
"archive" attribute and using it as our ZipArchive root, then walking
down into the archive from there.
@return: a L{zippath.ZipPath} or L{zippath.ZipArchive} instance.
"""
za = ZipArchive(self.importer.archive)
myPath = FilePath(self.importer.archive)
itsPath = FilePath(fsPathString)
if myPath == itsPath:
return za
# This is NOT a general-purpose rule for sys.path or __file__:
# zipimport specifically uses regular OS path syntax in its
# pathnames, even though zip files specify that slashes are always
# the separator, regardless of platform.
segs = itsPath.segmentsFrom(myPath)
zp = za
for seg in segs:
zp = zp.child(seg)
return zp
registerAdapter(_ZipMapImpl, zipimport.zipimporter, IPathImportMapper)
def _defaultSysPathFactory():
"""
Provide the default behavior of PythonPath's sys.path factory, which is to
return the current value of sys.path.
@return: L{sys.path}
"""
return sys.path
class PythonPath:
"""
I represent the very top of the Python object-space, the module list in
C{sys.path} and the modules list in C{sys.modules}.
@ivar _sysPath: A sequence of strings like C{sys.path}. This attribute is
read-only.
@ivar sysPath: The current value of the module search path list.
@type sysPath: C{list}
@ivar moduleDict: A dictionary mapping string module names to module
objects, like C{sys.modules}.
@ivar sysPathHooks: A list of PEP-302 path hooks, like C{sys.path_hooks}.
@ivar moduleLoader: A function that takes a fully-qualified python name and
returns a module, like L{twisted.python.reflect.namedAny}.
"""
def __init__(self,
sysPath=None,
moduleDict=sys.modules,
sysPathHooks=sys.path_hooks,
importerCache=sys.path_importer_cache,
moduleLoader=namedAny,
sysPathFactory=None):
"""
Create a PythonPath. You almost certainly want to use
modules.theSystemPath, or its aliased methods, rather than creating a
new instance yourself, though.
All parameters are optional, and if unspecified, will use 'system'
equivalents that makes this PythonPath like the global L{theSystemPath}
instance.
@param sysPath: a sys.path-like list to use for this PythonPath, to
specify where to load modules from.
@param moduleDict: a sys.modules-like dictionary to use for keeping
track of what modules this PythonPath has loaded.
@param sysPathHooks: sys.path_hooks-like list of PEP-302 path hooks to
be used for this PythonPath, to determie which importers should be
used.
@param importerCache: a sys.path_importer_cache-like list of PEP-302
importers. This will be used in conjunction with the given
sysPathHooks.
@param moduleLoader: a module loader function which takes a string and
returns a module. That is to say, it is like L{namedAny} - *not* like
L{__import__}.
@param sysPathFactory: a 0-argument callable which returns the current
value of a sys.path-like list of strings. Specify either this, or
sysPath, not both. This alternative interface is provided because the
way the Python import mechanism works, you can re-bind the 'sys.path'
name and that is what is used for current imports, so it must be a
factory rather than a value to deal with modification by rebinding
rather than modification by mutation. Note: it is not recommended to
rebind sys.path. Although this mechanism can deal with that, it is a
subtle point which some tools that it is easy for tools which interact
with sys.path to miss.
"""
if sysPath is not None:
sysPathFactory = lambda : sysPath
elif sysPathFactory is None:
sysPathFactory = _defaultSysPathFactory
self._sysPathFactory = sysPathFactory
self._sysPath = sysPath
self.moduleDict = moduleDict
self.sysPathHooks = sysPathHooks
self.importerCache = importerCache
self.moduleLoader = moduleLoader
def _getSysPath(self):
"""
Retrieve the current value of the module search path list.
"""
return self._sysPathFactory()
sysPath = property(_getSysPath)
def _findEntryPathString(self, modobj):
"""
Determine where a given Python module object came from by looking at path
entries.
"""
topPackageObj = modobj
while '.' in topPackageObj.__name__:
topPackageObj = self.moduleDict['.'.join(
topPackageObj.__name__.split('.')[:-1])]
if _isPackagePath(FilePath(topPackageObj.__file__)):
# if package 'foo' is on sys.path at /a/b/foo, package 'foo's
# __file__ will be /a/b/foo/__init__.py, and we are looking for
# /a/b here, the path-entry; so go up two steps.
rval = dirname(dirname(topPackageObj.__file__))
else:
# the module is completely top-level, not within any packages. The
# path entry it's on is just its dirname.
rval = dirname(topPackageObj.__file__)
# There are probably some awful tricks that an importer could pull
# which would break this, so let's just make sure... it's a loaded
# module after all, which means that its path MUST be in
# path_importer_cache according to PEP 302 -glyph
if rval not in self.importerCache:
warnings.warn(
"%s (for module %s) not in path importer cache "
"(PEP 302 violation - check your local configuration)." % (
rval, modobj.__name__),
stacklevel=3)
return rval
def _smartPath(self, pathName):
"""
Given a path entry from sys.path which may refer to an importer,
return the appropriate FilePath-like instance.
@param pathName: a str describing the path.
@return: a FilePath-like object.
"""
importr = self.importerCache.get(pathName, _nothing)
if importr is _nothing:
for hook in self.sysPathHooks:
try:
importr = hook(pathName)
except ImportError:
pass
if importr is _nothing: # still
importr = None
return IPathImportMapper(importr, _theDefaultMapper).mapPath(pathName)
def iterEntries(self):
"""
Iterate the entries on my sysPath.
@return: a generator yielding PathEntry objects
"""
for pathName in self.sysPath:
fp = self._smartPath(pathName)
yield PathEntry(fp, self)
def __getitem__(self, modname):
"""
Get a python module by its given fully-qualified name.
@param modname: The fully-qualified Python module name to load.
@type modname: C{str}
@return: an object representing the module identified by C{modname}
@rtype: L{PythonModule}
@raise KeyError: if the module name is not a valid module name, or no
such module can be identified as loadable.
"""
# See if the module is already somewhere in Python-land.
moduleObject = self.moduleDict.get(modname)
if moduleObject is not None:
# we need 2 paths; one of the path entry and one for the module.
pe = PathEntry(
self._smartPath(
self._findEntryPathString(moduleObject)),
self)
mp = self._smartPath(moduleObject.__file__)
return PythonModule(modname, mp, pe)
# Recurse if we're trying to get a submodule.
if '.' in modname:
pkg = self
for name in modname.split('.'):
pkg = pkg[name]
return pkg
# Finally do the slowest possible thing and iterate
for module in self.iterModules():
if module.name == modname:
return module
raise KeyError(modname)
def __contains__(self, module):
"""
Check to see whether or not a module exists on my import path.
@param module: The name of the module to look for on my import path.
@type module: C{str}
"""
try:
self.__getitem__(module)
return True
except KeyError:
return False
def __repr__(self):
"""
Display my sysPath and moduleDict in a string representation.
"""
return "PythonPath(%r,%r)" % (self.sysPath, self.moduleDict)
def iterModules(self):
"""
Yield all top-level modules on my sysPath.
"""
for entry in self.iterEntries():
for module in entry.iterModules():
yield module
def walkModules(self, importPackages=False):
"""
Similar to L{iterModules}, this yields every module on the path, then every
submodule in each package or entry.
"""
for package in self.iterModules():
for module in package.walkModules(importPackages=False):
yield module
theSystemPath = PythonPath()
def walkModules(importPackages=False):
"""
Deeply iterate all modules on the global python path.
@param importPackages: Import packages as they are seen.
"""
return theSystemPath.walkModules(importPackages=importPackages)
def iterModules():
"""
Iterate all modules and top-level packages on the global Python path, but
do not descend into packages.
@param importPackages: Import packages as they are seen.
"""
return theSystemPath.iterModules()
def getModule(moduleName):
"""
Retrieve a module from the system path.
"""
return theSystemPath[moduleName]

View file

@ -0,0 +1,75 @@
# -*- test-case-name: twisted.test.test_monkey -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import division, absolute_import
class MonkeyPatcher(object):
"""
Cover up attributes with new objects. Neat for monkey-patching things for
unit-testing purposes.
"""
def __init__(self, *patches):
# List of patches to apply in (obj, name, value).
self._patchesToApply = []
# List of the original values for things that have been patched.
# (obj, name, value) format.
self._originals = []
for patch in patches:
self.addPatch(*patch)
def addPatch(self, obj, name, value):
"""
Add a patch so that the attribute C{name} on C{obj} will be assigned to
C{value} when C{patch} is called or during C{runWithPatches}.
You can restore the original values with a call to restore().
"""
self._patchesToApply.append((obj, name, value))
def _alreadyPatched(self, obj, name):
"""
Has the C{name} attribute of C{obj} already been patched by this
patcher?
"""
for o, n, v in self._originals:
if (o, n) == (obj, name):
return True
return False
def patch(self):
"""
Apply all of the patches that have been specified with L{addPatch}.
Reverse this operation using L{restore}.
"""
for obj, name, value in self._patchesToApply:
if not self._alreadyPatched(obj, name):
self._originals.append((obj, name, getattr(obj, name)))
setattr(obj, name, value)
def restore(self):
"""
Restore all original values to any patched objects.
"""
while self._originals:
obj, name, value = self._originals.pop()
setattr(obj, name, value)
def runWithPatches(self, f, *args, **kw):
"""
Apply each patch already specified. Then run the function f with the
given args and kwargs. Restore everything when done.
"""
self.patch()
try:
return f(*args, **kw)
finally:
self.restore()

View file

@ -0,0 +1,51 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utilities for dealing with processes.
"""
from __future__ import division, absolute_import
import os
def which(name, flags=os.X_OK):
"""
Search PATH for executable files with the given name.
On newer versions of MS-Windows, the PATHEXT environment variable will be
set to the list of file extensions for files considered executable. This
will normally include things like ".EXE". This function will also find files
with the given name ending with any of these extensions.
On MS-Windows the only flag that has any meaning is os.F_OK. Any other
flags will be ignored.
@type name: C{str}
@param name: The name for which to search.
@type flags: C{int}
@param flags: Arguments to L{os.access}.
@rtype: C{list}
@param: A list of the full paths to files found, in the order in which they
were found.
"""
result = []
exts = list(filter(None, os.environ.get('PATHEXT', '').split(os.pathsep)))
path = os.environ.get('PATH', None)
if path is None:
return []
for p in os.environ.get('PATH', '').split(os.pathsep):
p = os.path.join(p, name)
if os.access(p, flags):
result.append(p)
for e in exts:
pext = p + e
if os.access(pext, flags):
result.append(pext)
return result

View file

@ -0,0 +1,150 @@
# -*- test-case-name: twisted.test.test_randbytes -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Cryptographically secure random implementation, with fallback on normal random.
"""
from __future__ import division, absolute_import
import warnings, os, random, string
from twisted.python.compat import _PY3
getrandbits = getattr(random, 'getrandbits', None)
if _PY3:
_fromhex = bytes.fromhex
else:
def _fromhex(hexBytes):
return hexBytes.decode('hex')
class SecureRandomNotAvailable(RuntimeError):
"""
Exception raised when no secure random algorithm is found.
"""
class SourceNotAvailable(RuntimeError):
"""
Internal exception used when a specific random source is not available.
"""
class RandomFactory(object):
"""
Factory providing L{secureRandom} and L{insecureRandom} methods.
You shouldn't have to instantiate this class, use the module level
functions instead: it is an implementation detail and could be removed or
changed arbitrarily.
"""
# This variable is no longer used, and will eventually be removed.
randomSources = ()
getrandbits = getrandbits
def _osUrandom(self, nbytes):
"""
Wrapper around C{os.urandom} that cleanly manage its absence.
"""
try:
return os.urandom(nbytes)
except (AttributeError, NotImplementedError) as e:
raise SourceNotAvailable(e)
def secureRandom(self, nbytes, fallback=False):
"""
Return a number of secure random bytes.
@param nbytes: number of bytes to generate.
@type nbytes: C{int}
@param fallback: Whether the function should fallback on non-secure
random or not. Default to C{False}.
@type fallback: C{bool}
@return: a string of random bytes.
@rtype: C{str}
"""
try:
return self._osUrandom(nbytes)
except SourceNotAvailable:
pass
if fallback:
warnings.warn(
"urandom unavailable - "
"proceeding with non-cryptographically secure random source",
category=RuntimeWarning,
stacklevel=2)
return self.insecureRandom(nbytes)
else:
raise SecureRandomNotAvailable("No secure random source available")
def _randBits(self, nbytes):
"""
Wrapper around C{os.getrandbits}.
"""
if self.getrandbits is not None:
n = self.getrandbits(nbytes * 8)
hexBytes = ("%%0%dx" % (nbytes * 2)) % n
return _fromhex(hexBytes)
raise SourceNotAvailable("random.getrandbits is not available")
if _PY3:
_maketrans = bytes.maketrans
def _randModule(self, nbytes):
"""
Wrapper around the C{random} module.
"""
return b"".join([
bytes([random.choice(self._BYTES)]) for i in range(nbytes)])
else:
_maketrans = string.maketrans
def _randModule(self, nbytes):
"""
Wrapper around the C{random} module.
"""
return b"".join([
random.choice(self._BYTES) for i in range(nbytes)])
_BYTES = _maketrans(b'', b'')
def insecureRandom(self, nbytes):
"""
Return a number of non secure random bytes.
@param nbytes: number of bytes to generate.
@type nbytes: C{int}
@return: a string of random bytes.
@rtype: C{str}
"""
for src in ("_randBits", "_randModule"):
try:
return getattr(self, src)(nbytes)
except SourceNotAvailable:
pass
factory = RandomFactory()
secureRandom = factory.secureRandom
insecureRandom = factory.insecureRandom
del factory
__all__ = ["secureRandom", "insecureRandom", "SecureRandomNotAvailable"]

View file

@ -0,0 +1,310 @@
# -*- test-case-name: twisted.test.test_rebuild -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
*Real* reloading support for Python.
"""
# System Imports
import sys
import types
import time
import linecache
from imp import reload
try:
# Python 2
from types import InstanceType
except ImportError:
# Python 3
pass
# Sibling Imports
from twisted.python import log, reflect
from twisted.python.compat import _PY3
lastRebuild = time.time()
def _isClassType(t):
"""
Compare to types.ClassType in a py2/3-compatible way
Python 2 used comparison to types.ClassType to check for old-style
classes Python 3 has no concept of old-style classes, so if
ClassType doesn't exist, it can't be an old-style class - return
False in that case.
Note that the type() of new-style classes is NOT ClassType, and
so this should return False for new-style classes in python 2
as well.
"""
_ClassType = getattr(types, 'ClassType', None)
if _ClassType is None:
return False
return t == _ClassType
class Sensitive(object):
"""
A utility mixin that's sensitive to rebuilds.
This is a mixin for classes (usually those which represent collections of
callbacks) to make sure that their code is up-to-date before running.
"""
lastRebuild = lastRebuild
def needRebuildUpdate(self):
yn = (self.lastRebuild < lastRebuild)
return yn
def rebuildUpToDate(self):
self.lastRebuild = time.time()
def latestVersionOf(self, anObject):
"""
Get the latest version of an object.
This can handle just about anything callable; instances, functions,
methods, and classes.
"""
t = type(anObject)
if t == types.FunctionType:
return latestFunction(anObject)
elif t == types.MethodType:
if anObject.__self__ is None:
return getattr(anObject.im_class, anObject.__name__)
else:
return getattr(anObject.__self__, anObject.__name__)
elif not _PY3 and t == InstanceType:
# Kick it, if it's out of date.
getattr(anObject, 'nothing', None)
return anObject
elif _isClassType(t):
return latestClass(anObject)
else:
log.msg('warning returning anObject!')
return anObject
_modDictIDMap = {}
def latestFunction(oldFunc):
"""
Get the latest version of a function.
"""
# This may be CPython specific, since I believe jython instantiates a new
# module upon reload.
dictID = id(oldFunc.__globals__)
module = _modDictIDMap.get(dictID)
if module is None:
return oldFunc
return getattr(module, oldFunc.__name__)
def latestClass(oldClass):
"""
Get the latest version of a class.
"""
module = reflect.namedModule(oldClass.__module__)
newClass = getattr(module, oldClass.__name__)
newBases = [latestClass(base) for base in newClass.__bases__]
try:
# This makes old-style stuff work
newClass.__bases__ = tuple(newBases)
return newClass
except TypeError:
if newClass.__module__ in ("__builtin__", "builtins"):
# __builtin__ members can't be reloaded sanely
return newClass
ctor = type(newClass)
# The value of type(newClass) is the metaclass
# in both Python 2 and 3, except if it was old-style.
if _isClassType(ctor):
ctor = getattr(newClass, '__metaclass__', type)
return ctor(newClass.__name__, tuple(newBases),
dict(newClass.__dict__))
class RebuildError(Exception):
"""
Exception raised when trying to rebuild a class whereas it's not possible.
"""
def updateInstance(self):
"""
Updates an instance to be current.
"""
self.__class__ = latestClass(self.__class__)
def __injectedgetattr__(self, name):
"""
A getattr method to cause a class to be refreshed.
"""
if name == '__del__':
raise AttributeError("Without this, Python segfaults.")
updateInstance(self)
log.msg("(rebuilding stale {} instance ({}))".format(
reflect.qual(self.__class__), name))
result = getattr(self, name)
return result
def rebuild(module, doLog=1):
"""
Reload a module and do as much as possible to replace its references.
"""
global lastRebuild
lastRebuild = time.time()
if hasattr(module, 'ALLOW_TWISTED_REBUILD'):
# Is this module allowed to be rebuilt?
if not module.ALLOW_TWISTED_REBUILD:
raise RuntimeError("I am not allowed to be rebuilt.")
if doLog:
log.msg('Rebuilding {}...'.format(str(module.__name__)))
# Safely handle adapter re-registration
from twisted.python import components
components.ALLOW_DUPLICATES = True
d = module.__dict__
_modDictIDMap[id(d)] = module
newclasses = {}
classes = {}
functions = {}
values = {}
if doLog:
log.msg(' (scanning {}): '.format(str(module.__name__)))
for k, v in d.items():
if _isClassType(type(v)):
# ClassType exists on Python 2.x and earlier.
# Failure condition -- instances of classes with buggy
# __hash__/__cmp__ methods referenced at the module level...
if v.__module__ == module.__name__:
classes[v] = 1
if doLog:
log.logfile.write("c")
log.logfile.flush()
elif type(v) == types.FunctionType:
if v.__globals__ is module.__dict__:
functions[v] = 1
if doLog:
log.logfile.write("f")
log.logfile.flush()
elif isinstance(v, type):
if v.__module__ == module.__name__:
newclasses[v] = 1
if doLog:
log.logfile.write("o")
log.logfile.flush()
values.update(classes)
values.update(functions)
fromOldModule = values.__contains__
newclasses = newclasses.keys()
classes = classes.keys()
functions = functions.keys()
if doLog:
log.msg('')
log.msg(' (reload {})'.format(str(module.__name__)))
# Boom.
reload(module)
# Make sure that my traceback printing will at least be recent...
linecache.clearcache()
if doLog:
log.msg(' (cleaning {}): '.format(str(module.__name__)))
for clazz in classes:
if getattr(module, clazz.__name__) is clazz:
log.msg("WARNING: class {} not replaced by reload!".format(
reflect.qual(clazz)))
else:
if doLog:
log.logfile.write("x")
log.logfile.flush()
clazz.__bases__ = ()
clazz.__dict__.clear()
clazz.__getattr__ = __injectedgetattr__
clazz.__module__ = module.__name__
if newclasses:
import gc
for nclass in newclasses:
ga = getattr(module, nclass.__name__)
if ga is nclass:
log.msg("WARNING: new-class {} not replaced by reload!".format(
reflect.qual(nclass)))
else:
for r in gc.get_referrers(nclass):
if getattr(r, '__class__', None) is nclass:
r.__class__ = ga
if doLog:
log.msg('')
log.msg(' (fixing {}): '.format(str(module.__name__)))
modcount = 0
for mk, mod in sys.modules.items():
modcount = modcount + 1
if mod == module or mod is None:
continue
if not hasattr(mod, '__file__'):
# It's a builtin module; nothing to replace here.
continue
if hasattr(mod, '__bundle__'):
# PyObjC has a few buggy objects which segfault if you hash() them.
# It doesn't make sense to try rebuilding extension modules like
# this anyway, so don't try.
continue
changed = 0
for k, v in mod.__dict__.items():
try:
hash(v)
except Exception:
continue
if fromOldModule(v):
if _isClassType(type(v)):
if doLog:
log.logfile.write("c")
log.logfile.flush()
nv = latestClass(v)
else:
if doLog:
log.logfile.write("f")
log.logfile.flush()
nv = latestFunction(v)
changed = 1
setattr(mod, k, nv)
else:
# Replace bases of non-module classes just to be sure.
if _isClassType(type(v)):
for base in v.__bases__:
if fromOldModule(base):
latestClass(v)
if doLog and not changed and ((modcount % 10) == 0) :
log.logfile.write(".")
log.logfile.flush()
components.ALLOW_DUPLICATES = False
if doLog:
log.msg('')
log.msg(' Rebuilt {}.'.format(str(module.__name__)))
return module

View file

@ -0,0 +1,634 @@
# -*- test-case-name: twisted.test.test_reflect -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Standardized versions of various cool and/or strange things that you can do
with Python's reflection capabilities.
"""
from __future__ import division, absolute_import, print_function
import sys
import types
import os
import pickle
import weakref
import re
import traceback
from collections import deque
RegexType = type(re.compile(""))
from twisted.python.compat import reraise, nativeString, NativeStringIO
from twisted.python.compat import _PY3
from twisted.python import compat
from twisted.python.deprecate import _fullyQualifiedName as fullyQualifiedName
from twisted.python._oldstyle import _oldStyle
def prefixedMethodNames(classObj, prefix):
"""
Given a class object C{classObj}, returns a list of method names that match
the string C{prefix}.
@param classObj: A class object from which to collect method names.
@param prefix: A native string giving a prefix. Each method with a name
which begins with this prefix will be returned.
@type prefix: L{str}
@return: A list of the names of matching methods of C{classObj} (and base
classes of C{classObj}).
@rtype: L{list} of L{str}
"""
dct = {}
addMethodNamesToDict(classObj, dct, prefix)
return list(dct.keys())
def addMethodNamesToDict(classObj, dict, prefix, baseClass=None):
"""
This goes through C{classObj} (and its bases) and puts method names
starting with 'prefix' in 'dict' with a value of 1. if baseClass isn't
None, methods will only be added if classObj is-a baseClass
If the class in question has the methods 'prefix_methodname' and
'prefix_methodname2', the resulting dict should look something like:
{"methodname": 1, "methodname2": 1}.
@param classObj: A class object from which to collect method names.
@param dict: A L{dict} which will be updated with the results of the
accumulation. Items are added to this dictionary, with method names as
keys and C{1} as values.
@type dict: L{dict}
@param prefix: A native string giving a prefix. Each method of C{classObj}
(and base classes of C{classObj}) with a name which begins with this
prefix will be returned.
@type prefix: L{str}
@param baseClass: A class object at which to stop searching upwards for new
methods. To collect all method names, do not pass a value for this
parameter.
@return: L{None}
"""
for base in classObj.__bases__:
addMethodNamesToDict(base, dict, prefix, baseClass)
if baseClass is None or baseClass in classObj.__bases__:
for name, method in classObj.__dict__.items():
optName = name[len(prefix):]
if ((type(method) is types.FunctionType)
and (name[:len(prefix)] == prefix)
and (len(optName))):
dict[optName] = 1
def prefixedMethods(obj, prefix=''):
"""
Given an object C{obj}, returns a list of method objects that match the
string C{prefix}.
@param obj: An arbitrary object from which to collect methods.
@param prefix: A native string giving a prefix. Each method of C{obj} with
a name which begins with this prefix will be returned.
@type prefix: L{str}
@return: A list of the matching method objects.
@rtype: L{list}
"""
dct = {}
accumulateMethods(obj, dct, prefix)
return list(dct.values())
def accumulateMethods(obj, dict, prefix='', curClass=None):
"""
Given an object C{obj}, add all methods that begin with C{prefix}.
@param obj: An arbitrary object to collect methods from.
@param dict: A L{dict} which will be updated with the results of the
accumulation. Items are added to this dictionary, with method names as
keys and corresponding instance method objects as values.
@type dict: L{dict}
@param prefix: A native string giving a prefix. Each method of C{obj} with
a name which begins with this prefix will be returned.
@type prefix: L{str}
@param curClass: The class in the inheritance hierarchy at which to start
collecting methods. Collection proceeds up. To collect all methods
from C{obj}, do not pass a value for this parameter.
@return: L{None}
"""
if not curClass:
curClass = obj.__class__
for base in curClass.__bases__:
# The implementation of the object class is different on PyPy vs.
# CPython. This has the side effect of making accumulateMethods()
# pick up object methods from all new-style classes -
# such as __getattribute__, etc.
# If we ignore 'object' when accumulating methods, we can get
# consistent behavior on Pypy and CPython.
if base is not object:
accumulateMethods(obj, dict, prefix, base)
for name, method in curClass.__dict__.items():
optName = name[len(prefix):]
if ((type(method) is types.FunctionType)
and (name[:len(prefix)] == prefix)
and (len(optName))):
dict[optName] = getattr(obj, name)
def namedModule(name):
"""
Return a module given its name.
"""
topLevel = __import__(name)
packages = name.split(".")[1:]
m = topLevel
for p in packages:
m = getattr(m, p)
return m
def namedObject(name):
"""
Get a fully named module-global object.
"""
classSplit = name.split('.')
module = namedModule('.'.join(classSplit[:-1]))
return getattr(module, classSplit[-1])
namedClass = namedObject # backwards compat
def requireModule(name, default=None):
"""
Try to import a module given its name, returning C{default} value if
C{ImportError} is raised during import.
@param name: Module name as it would have been passed to C{import}.
@type name: C{str}.
@param default: Value returned in case C{ImportError} is raised while
importing the module.
@return: Module or default value.
"""
try:
return namedModule(name)
except ImportError:
return default
class _NoModuleFound(Exception):
"""
No module was found because none exists.
"""
class InvalidName(ValueError):
"""
The given name is not a dot-separated list of Python objects.
"""
class ModuleNotFound(InvalidName):
"""
The module associated with the given name doesn't exist and it can't be
imported.
"""
class ObjectNotFound(InvalidName):
"""
The object associated with the given name doesn't exist and it can't be
imported.
"""
def _importAndCheckStack(importName):
"""
Import the given name as a module, then walk the stack to determine whether
the failure was the module not existing, or some code in the module (for
example a dependent import) failing. This can be helpful to determine
whether any actual application code was run. For example, to distiguish
administrative error (entering the wrong module name), from programmer
error (writing buggy code in a module that fails to import).
@param importName: The name of the module to import.
@type importName: C{str}
@raise Exception: if something bad happens. This can be any type of
exception, since nobody knows what loading some arbitrary code might
do.
@raise _NoModuleFound: if no module was found.
"""
try:
return __import__(importName)
except ImportError:
excType, excValue, excTraceback = sys.exc_info()
while excTraceback:
execName = excTraceback.tb_frame.f_globals["__name__"]
# in Python 2 execName is None when an ImportError is encountered,
# where in Python 3 execName is equal to the importName.
if execName is None or execName == importName:
reraise(excValue, excTraceback)
excTraceback = excTraceback.tb_next
raise _NoModuleFound()
def namedAny(name):
"""
Retrieve a Python object by its fully qualified name from the global Python
module namespace. The first part of the name, that describes a module,
will be discovered and imported. Each subsequent part of the name is
treated as the name of an attribute of the object specified by all of the
name which came before it. For example, the fully-qualified name of this
object is 'twisted.python.reflect.namedAny'.
@type name: L{str}
@param name: The name of the object to return.
@raise InvalidName: If the name is an empty string, starts or ends with
a '.', or is otherwise syntactically incorrect.
@raise ModuleNotFound: If the name is syntactically correct but the
module it specifies cannot be imported because it does not appear to
exist.
@raise ObjectNotFound: If the name is syntactically correct, includes at
least one '.', but the module it specifies cannot be imported because
it does not appear to exist.
@raise AttributeError: If an attribute of an object along the way cannot be
accessed, or a module along the way is not found.
@return: the Python object identified by 'name'.
"""
if not name:
raise InvalidName('Empty module name')
names = name.split('.')
# if the name starts or ends with a '.' or contains '..', the __import__
# will raise an 'Empty module name' error. This will provide a better error
# message.
if '' in names:
raise InvalidName(
"name must be a string giving a '.'-separated list of Python "
"identifiers, not %r" % (name,))
topLevelPackage = None
moduleNames = names[:]
while not topLevelPackage:
if moduleNames:
trialname = '.'.join(moduleNames)
try:
topLevelPackage = _importAndCheckStack(trialname)
except _NoModuleFound:
moduleNames.pop()
else:
if len(names) == 1:
raise ModuleNotFound("No module named %r" % (name,))
else:
raise ObjectNotFound('%r does not name an object' % (name,))
obj = topLevelPackage
for n in names[1:]:
obj = getattr(obj, n)
return obj
def filenameToModuleName(fn):
"""
Convert a name in the filesystem to the name of the Python module it is.
This is aggressive about getting a module name back from a file; it will
always return a string. Aggressive means 'sometimes wrong'; it won't look
at the Python path or try to do any error checking: don't use this method
unless you already know that the filename you're talking about is a Python
module.
@param fn: A filesystem path to a module or package; C{bytes} on Python 2,
C{bytes} or C{unicode} on Python 3.
@return: A hopefully importable module name.
@rtype: C{str}
"""
if isinstance(fn, bytes):
initPy = b"__init__.py"
else:
initPy = "__init__.py"
fullName = os.path.abspath(fn)
base = os.path.basename(fn)
if not base:
# this happens when fn ends with a path separator, just skit it
base = os.path.basename(fn[:-1])
modName = nativeString(os.path.splitext(base)[0])
while 1:
fullName = os.path.dirname(fullName)
if os.path.exists(os.path.join(fullName, initPy)):
modName = "%s.%s" % (
nativeString(os.path.basename(fullName)),
nativeString(modName))
else:
break
return modName
def qual(clazz):
"""
Return full import path of a class.
"""
return clazz.__module__ + '.' + clazz.__name__
def _determineClass(x):
try:
return x.__class__
except:
return type(x)
def _determineClassName(x):
c = _determineClass(x)
try:
return c.__name__
except:
try:
return str(c)
except:
return '<BROKEN CLASS AT 0x%x>' % id(c)
def _safeFormat(formatter, o):
"""
Helper function for L{safe_repr} and L{safe_str}.
Called when C{repr} or C{str} fail. Returns a string containing info about
C{o} and the latest exception.
@param formatter: C{str} or C{repr}.
@type formatter: C{type}
@param o: Any object.
@rtype: C{str}
@return: A string containing information about C{o} and the raised
exception.
"""
io = NativeStringIO()
traceback.print_exc(file=io)
className = _determineClassName(o)
tbValue = io.getvalue()
return "<%s instance at 0x%x with %s error:\n %s>" % (
className, id(o), formatter.__name__, tbValue)
def safe_repr(o):
"""
Returns a string representation of an object, or a string containing a
traceback, if that object's __repr__ raised an exception.
@param o: Any object.
@rtype: C{str}
"""
try:
return repr(o)
except:
return _safeFormat(repr, o)
def safe_str(o):
"""
Returns a string representation of an object, or a string containing a
traceback, if that object's __str__ raised an exception.
@param o: Any object.
@rtype: C{str}
"""
if _PY3 and isinstance(o, bytes):
# If o is bytes and seems to holds a utf-8 encoded string,
# convert it to str.
try:
return o.decode('utf-8')
except:
pass
try:
return str(o)
except:
return _safeFormat(str, o)
@_oldStyle
class QueueMethod:
"""
I represent a method that doesn't exist yet.
"""
def __init__(self, name, calls):
self.name = name
self.calls = calls
def __call__(self, *args):
self.calls.append((self.name, args))
def fullFuncName(func):
qualName = (str(pickle.whichmodule(func, func.__name__)) + '.' + func.__name__)
if namedObject(qualName) is not func:
raise Exception("Couldn't find %s as %s." % (func, qualName))
return qualName
def getClass(obj):
"""
Return the class or type of object 'obj'.
Returns sensible result for oldstyle and newstyle instances and types.
"""
if hasattr(obj, '__class__'):
return obj.__class__
else:
return type(obj)
def accumulateClassDict(classObj, attr, adict, baseClass=None):
"""
Accumulate all attributes of a given name in a class hierarchy into a single dictionary.
Assuming all class attributes of this name are dictionaries.
If any of the dictionaries being accumulated have the same key, the
one highest in the class hierarchy wins.
(XXX: If \"highest\" means \"closest to the starting class\".)
Ex::
class Soy:
properties = {\"taste\": \"bland\"}
class Plant:
properties = {\"colour\": \"green\"}
class Seaweed(Plant):
pass
class Lunch(Soy, Seaweed):
properties = {\"vegan\": 1 }
dct = {}
accumulateClassDict(Lunch, \"properties\", dct)
print(dct)
{\"taste\": \"bland\", \"colour\": \"green\", \"vegan\": 1}
"""
for base in classObj.__bases__:
accumulateClassDict(base, attr, adict)
if baseClass is None or baseClass in classObj.__bases__:
adict.update(classObj.__dict__.get(attr, {}))
def accumulateClassList(classObj, attr, listObj, baseClass=None):
"""
Accumulate all attributes of a given name in a class hierarchy into a single list.
Assuming all class attributes of this name are lists.
"""
for base in classObj.__bases__:
accumulateClassList(base, attr, listObj)
if baseClass is None or baseClass in classObj.__bases__:
listObj.extend(classObj.__dict__.get(attr, []))
def isSame(a, b):
return (a is b)
def isLike(a, b):
return (a == b)
def modgrep(goal):
return objgrep(sys.modules, goal, isLike, 'sys.modules')
def isOfType(start, goal):
return ((type(start) is goal) or
(isinstance(start, compat.InstanceType) and
start.__class__ is goal))
def findInstances(start, t):
return objgrep(start, t, isOfType)
if not _PY3:
# The function objgrep() currently doesn't work on Python 3 due to some
# edge cases, as described in #6986.
# twisted.python.reflect is quite important and objgrep is not used in
# Twisted itself, so in #5929, we decided to port everything but objgrep()
# and to finish the porting in #6986
def objgrep(start, goal, eq=isLike, path='', paths=None, seen=None,
showUnknowns=0, maxDepth=None):
"""
An insanely CPU-intensive process for finding stuff.
"""
if paths is None:
paths = []
if seen is None:
seen = {}
if eq(start, goal):
paths.append(path)
if id(start) in seen:
if seen[id(start)] is start:
return
if maxDepth is not None:
if maxDepth == 0:
return
maxDepth -= 1
seen[id(start)] = start
# Make an alias for those arguments which are passed recursively to
# objgrep for container objects.
args = (paths, seen, showUnknowns, maxDepth)
if isinstance(start, dict):
for k, v in start.items():
objgrep(k, goal, eq, path+'{'+repr(v)+'}', *args)
objgrep(v, goal, eq, path+'['+repr(k)+']', *args)
elif isinstance(start, (list, tuple, deque)):
for idx, _elem in enumerate(start):
objgrep(start[idx], goal, eq, path+'['+str(idx)+']', *args)
elif isinstance(start, types.MethodType):
objgrep(start.__self__, goal, eq, path+'.__self__', *args)
objgrep(start.__func__, goal, eq, path+'.__func__', *args)
objgrep(start.__self__.__class__, goal, eq,
path+'.__self__.__class__', *args)
elif hasattr(start, '__dict__'):
for k, v in start.__dict__.items():
objgrep(v, goal, eq, path+'.'+k, *args)
if isinstance(start, compat.InstanceType):
objgrep(start.__class__, goal, eq, path+'.__class__', *args)
elif isinstance(start, weakref.ReferenceType):
objgrep(start(), goal, eq, path+'()', *args)
elif (isinstance(start, (compat.StringType,
int, types.FunctionType,
types.BuiltinMethodType, RegexType, float,
type(None), compat.FileType)) or
type(start).__name__ in ('wrapper_descriptor',
'method_descriptor', 'member_descriptor',
'getset_descriptor')):
pass
elif showUnknowns:
print('unknown type', type(start), start)
return paths
__all__ = [
'InvalidName', 'ModuleNotFound', 'ObjectNotFound',
'QueueMethod',
'namedModule', 'namedObject', 'namedClass', 'namedAny', 'requireModule',
'safe_repr', 'safe_str', 'prefixedMethodNames', 'addMethodNamesToDict',
'prefixedMethods', 'accumulateMethods', 'fullFuncName', 'qual', 'getClass',
'accumulateClassDict', 'accumulateClassList', 'isSame', 'isLike',
'modgrep', 'isOfType', 'findInstances', 'objgrep', 'filenameToModuleName',
'fullyQualifiedName']
if _PY3:
# This is to be removed when fixing #6986
__all__.remove('objgrep')

View file

@ -0,0 +1,67 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A release-automation toolkit.
Don't use this outside of Twisted.
Maintainer: Christopher Armstrong
"""
from __future__ import print_function
import os
from twisted.python.compat import raw_input
# errors
class DirectoryExists(OSError):
"""
Some directory exists when it shouldn't.
"""
pass
class DirectoryDoesntExist(OSError):
"""
Some directory doesn't exist when it should.
"""
pass
class CommandFailed(OSError):
pass
# utilities
def sh(command, null=True, prompt=False):
"""
I'll try to execute C{command}, and if C{prompt} is true, I'll
ask before running it. If the command returns something other
than 0, I'll raise C{CommandFailed(command)}.
"""
print("--$", command)
if prompt:
if raw_input("run ?? ").startswith('n'):
return
if null:
command = "%s > /dev/null" % command
if os.system(command) != 0:
raise CommandFailed(command)
def runChdirSafe(f, *args, **kw):
origdir = os.path.abspath('.')
try:
return f(*args, **kw)
finally:
os.chdir(origdir)

View file

@ -0,0 +1,257 @@
# -*- test-case-name: twisted.test.test_roots -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Twisted Python Roots: an abstract hierarchy representation for Twisted.
Maintainer: Glyph Lefkowitz
"""
from __future__ import absolute_import, division
from twisted.python import reflect
from twisted.python._oldstyle import _oldStyle
class NotSupportedError(NotImplementedError):
"""
An exception meaning that the tree-manipulation operation
you're attempting to perform is not supported.
"""
@_oldStyle
class Request:
"""I am an abstract representation of a request for an entity.
I also function as the response. The request is responded to by calling
self.write(data) until there is no data left and then calling
self.finish().
"""
# This attribute should be set to the string name of the protocol being
# responded to (e.g. HTTP or FTP)
wireProtocol = None
def write(self, data):
"""Add some data to the response to this request.
"""
raise NotImplementedError("%s.write" % reflect.qual(self.__class__))
def finish(self):
"""The response to this request is finished; flush all data to the network stream.
"""
raise NotImplementedError("%s.finish" % reflect.qual(self.__class__))
@_oldStyle
class Entity:
"""I am a terminal object in a hierarchy, with no children.
I represent a null interface; certain non-instance objects (strings and
integers, notably) are Entities.
Methods on this class are suggested to be implemented, but are not
required, and will be emulated on a per-protocol basis for types which do
not handle them.
"""
def render(self, request):
"""
I produce a stream of bytes for the request, by calling request.write()
and request.finish().
"""
raise NotImplementedError("%s.render" % reflect.qual(self.__class__))
@_oldStyle
class Collection:
"""I represent a static collection of entities.
I contain methods designed to represent collections that can be dynamically
created.
"""
def __init__(self, entities=None):
"""Initialize me.
"""
if entities is not None:
self.entities = entities
else:
self.entities = {}
def getStaticEntity(self, name):
"""Get an entity that was added to me using putEntity.
This method will return 'None' if it fails.
"""
return self.entities.get(name)
def getDynamicEntity(self, name, request):
"""Subclass this to generate an entity on demand.
This method should return 'None' if it fails.
"""
def getEntity(self, name, request):
"""Retrieve an entity from me.
I will first attempt to retrieve an entity statically; static entities
will obscure dynamic ones. If that fails, I will retrieve the entity
dynamically.
If I cannot retrieve an entity, I will return 'None'.
"""
ent = self.getStaticEntity(name)
if ent is not None:
return ent
ent = self.getDynamicEntity(name, request)
if ent is not None:
return ent
return None
def putEntity(self, name, entity):
"""Store a static reference on 'name' for 'entity'.
Raises a KeyError if the operation fails.
"""
self.entities[name] = entity
def delEntity(self, name):
"""Remove a static reference for 'name'.
Raises a KeyError if the operation fails.
"""
del self.entities[name]
def storeEntity(self, name, request):
"""Store an entity for 'name', based on the content of 'request'.
"""
raise NotSupportedError("%s.storeEntity" % reflect.qual(self.__class__))
def removeEntity(self, name, request):
"""Remove an entity for 'name', based on the content of 'request'.
"""
raise NotSupportedError("%s.removeEntity" % reflect.qual(self.__class__))
def listStaticEntities(self):
"""Retrieve a list of all name, entity pairs that I store references to.
See getStaticEntity.
"""
return self.entities.items()
def listDynamicEntities(self, request):
"""A list of all name, entity that I can generate on demand.
See getDynamicEntity.
"""
return []
def listEntities(self, request):
"""Retrieve a list of all name, entity pairs I contain.
See getEntity.
"""
return self.listStaticEntities() + self.listDynamicEntities(request)
def listStaticNames(self):
"""Retrieve a list of the names of entities that I store references to.
See getStaticEntity.
"""
return self.entities.keys()
def listDynamicNames(self):
"""Retrieve a list of the names of entities that I store references to.
See getDynamicEntity.
"""
return []
def listNames(self, request):
"""Retrieve a list of all names for entities that I contain.
See getEntity.
"""
return self.listStaticNames()
class ConstraintViolation(Exception):
"""An exception raised when a constraint is violated.
"""
class Constrained(Collection):
"""A collection that has constraints on its names and/or entities."""
def nameConstraint(self, name):
"""A method that determines whether an entity may be added to me with a given name.
If the constraint is satisfied, return 1; if the constraint is not
satisfied, either return 0 or raise a descriptive ConstraintViolation.
"""
return 1
def entityConstraint(self, entity):
"""A method that determines whether an entity may be added to me.
If the constraint is satisfied, return 1; if the constraint is not
satisfied, either return 0 or raise a descriptive ConstraintViolation.
"""
return 1
def reallyPutEntity(self, name, entity):
Collection.putEntity(self, name, entity)
def putEntity(self, name, entity):
"""Store an entity if it meets both constraints.
Otherwise raise a ConstraintViolation.
"""
if self.nameConstraint(name):
if self.entityConstraint(entity):
self.reallyPutEntity(name, entity)
else:
raise ConstraintViolation("Entity constraint violated.")
else:
raise ConstraintViolation("Name constraint violated.")
class Locked(Constrained):
"""A collection that can be locked from adding entities."""
locked = 0
def lock(self):
self.locked = 1
def entityConstraint(self, entity):
return not self.locked
class Homogenous(Constrained):
"""A homogenous collection of entities.
I will only contain entities that are an instance of the class or type
specified by my 'entityType' attribute.
"""
entityType = object
def entityConstraint(self, entity):
if isinstance(entity, self.entityType):
return 1
else:
raise ConstraintViolation("%s of incorrect type (%s)" %
(entity, self.entityType))
def getNameType(self):
return "Name"
def getEntityType(self):
return self.entityType.__name__

View file

@ -0,0 +1,231 @@
# -*- test-case-name: twisted.python.test.test_runtime -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import division, absolute_import
import os
import sys
import time
import warnings
from twisted.python._oldstyle import _oldStyle
def shortPythonVersion():
"""
Returns the Python version as a dot-separated string.
"""
return "%s.%s.%s" % sys.version_info[:3]
knownPlatforms = {
'nt': 'win32',
'ce': 'win32',
'posix': 'posix',
'java': 'java',
'org.python.modules.os': 'java',
}
_timeFunctions = {
#'win32': time.clock,
'win32': time.time,
}
@_oldStyle
class Platform:
"""
Gives us information about the platform we're running on.
"""
type = knownPlatforms.get(os.name)
seconds = staticmethod(_timeFunctions.get(type, time.time))
_platform = sys.platform
def __init__(self, name=None, platform=None):
if name is not None:
self.type = knownPlatforms.get(name)
self.seconds = _timeFunctions.get(self.type, time.time)
if platform is not None:
self._platform = platform
def isKnown(self):
"""
Do we know about this platform?
@return: Boolean indicating whether this is a known platform or not.
@rtype: C{bool}
"""
return self.type != None
def getType(self):
"""
Get platform type.
@return: Either 'posix', 'win32' or 'java'
@rtype: C{str}
"""
return self.type
def isMacOSX(self):
"""
Check if current platform is macOS.
@return: C{True} if the current platform has been detected as macOS.
@rtype: C{bool}
"""
return self._platform == "darwin"
def isWinNT(self):
"""
Are we running in Windows NT?
This is deprecated and always returns C{True} on win32 because
Twisted only supports Windows NT-derived platforms at this point.
@return: C{True} if the current platform has been detected as
Windows NT.
@rtype: C{bool}
"""
warnings.warn(
"twisted.python.runtime.Platform.isWinNT was deprecated in "
"Twisted 13.0. Use Platform.isWindows instead.",
DeprecationWarning, stacklevel=2)
return self.isWindows()
def isWindows(self):
"""
Are we running in Windows?
@return: C{True} if the current platform has been detected as
Windows.
@rtype: C{bool}
"""
return self.getType() == 'win32'
def isVista(self):
"""
Check if current platform is Windows Vista or Windows Server 2008.
@return: C{True} if the current platform has been detected as Vista
@rtype: C{bool}
"""
if getattr(sys, "getwindowsversion", None) is not None:
return sys.getwindowsversion()[0] == 6
else:
return False
def isLinux(self):
"""
Check if current platform is Linux.
@return: C{True} if the current platform has been detected as Linux.
@rtype: C{bool}
"""
return self._platform.startswith("linux")
def isDocker(self, _initCGroupLocation="/proc/1/cgroup"):
"""
Check if the current platform is Linux in a Docker container.
@return: C{True} if the current platform has been detected as Linux
inside a Docker container.
@rtype: C{bool}
"""
if not self.isLinux():
return False
from twisted.python.filepath import FilePath
# Ask for the cgroups of init (pid 1)
initCGroups = FilePath(_initCGroupLocation)
if initCGroups.exists():
# The cgroups file looks like "2:cpu:/". The third element will
# begin with /docker if it is inside a Docker container.
controlGroups = [x.split(b":")
for x in initCGroups.getContent().split(b"\n")]
for group in controlGroups:
if len(group) == 3 and group[2].startswith(b"/docker/"):
# If it starts with /docker/, we're in a docker container
return True
return False
def _supportsSymlinks(self):
"""
Check for symlink support usable for Twisted's purposes.
@return: C{True} if symlinks are supported on the current platform,
otherwise C{False}.
@rtype: L{bool}
"""
if self.isWindows():
# We do the isWindows() check as newer Pythons support the symlink
# support in Vista+, but only if you have some obscure permission
# (SeCreateSymbolicLinkPrivilege), which can only be given on
# platforms with msc.exe (so, Business/Enterprise editions).
# This uncommon requirement makes the Twisted test suite test fail
# in 99.99% of cases as general users don't have permission to do
# it, even if there is "symlink support".
return False
else:
# If we're not on Windows, check for existence of os.symlink.
try:
os.symlink
except AttributeError:
return False
else:
return True
def supportsThreads(self):
"""
Can threads be created?
@return: C{True} if the threads are supported on the current platform.
@rtype: C{bool}
"""
try:
import threading
return threading is not None # shh pyflakes
except ImportError:
return False
def supportsINotify(self):
"""
Return C{True} if we can use the inotify API on this platform.
@since: 10.1
"""
try:
from twisted.python._inotify import INotifyError, init
except ImportError:
return False
try:
os.close(init())
except INotifyError:
return False
return True
platform = Platform()
platformType = platform.getType()
seconds = platform.seconds

View file

@ -0,0 +1,106 @@
# -*- test-case-name: twisted.test.test_sendmsg -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
sendmsg(2) and recvmsg(2) support for Python.
"""
from __future__ import absolute_import, division
from collections import namedtuple
from twisted.python.compat import _PY3
__all__ = ["sendmsg", "recvmsg", "getSocketFamily", "SCM_RIGHTS"]
if not _PY3:
from twisted.python._sendmsg import send1msg, recv1msg
from twisted.python._sendmsg import getsockfam, SCM_RIGHTS
__all__ += ["send1msg", "recv1msg", "getsockfam"]
else:
from socket import SCM_RIGHTS, CMSG_SPACE
RecievedMessage = namedtuple('RecievedMessage', ['data', 'ancillary', 'flags'])
def sendmsg(socket, data, ancillary=[], flags=0):
"""
Send a message on a socket.
@param socket: The socket to send the message on.
@type socket: L{socket.socket}
@param data: Bytes to write to the socket.
@type data: bytes
@param ancillary: Extra data to send over the socket outside of the normal
datagram or stream mechanism. By default no ancillary data is sent.
@type ancillary: C{list} of C{tuple} of C{int}, C{int}, and C{bytes}.
@param flags: Flags to affect how the message is sent. See the C{MSG_}
constants in the sendmsg(2) manual page. By default no flags are set.
@type flags: C{int}
@return: The return value of the underlying syscall, if it succeeds.
"""
if _PY3:
return socket.sendmsg([data], ancillary, flags)
else:
return send1msg(socket.fileno(), data, flags, ancillary)
def recvmsg(socket, maxSize=8192, cmsgSize=4096, flags=0):
"""
Receive a message on a socket.
@param socket: The socket to receive the message on.
@type socket: L{socket.socket}
@param maxSize: The maximum number of bytes to receive from the socket using
the datagram or stream mechanism. The default maximum is 8192.
@type maxSize: L{int}
@param cmsgSize: The maximum number of bytes to receive from the socket
outside of the normal datagram or stream mechanism. The default maximum
is 4096.
@type cmsgSize: L{int}
@param flags: Flags to affect how the message is sent. See the C{MSG_}
constants in the sendmsg(2) manual page. By default no flags are set.
@type flags: L{int}
@return: A named 3-tuple of the bytes received using the datagram/stream
mechanism, a L{list} of L{tuple}s giving ancillary received data, and
flags as an L{int} describing the data received.
"""
if _PY3:
# In Twisted's sendmsg.c, the csmg_space is defined as:
# int cmsg_size = 4096;
# cmsg_space = CMSG_SPACE(cmsg_size);
# Since the default in Python 3's socket is 0, we need to define our
# own default of 4096. -hawkie
data, ancillary, flags = socket.recvmsg(
maxSize, CMSG_SPACE(cmsgSize), flags)[0:3]
else:
data, flags, ancillary = recv1msg(
socket.fileno(), flags, maxSize, cmsgSize)
return RecievedMessage(data=data, ancillary=ancillary, flags=flags)
def getSocketFamily(socket):
"""
Return the family of the given socket.
@param socket: The socket to get the family of.
@type socket: L{socket.socket}
@rtype: L{int}
"""
if _PY3:
return socket.family
else:
return getsockfam(socket.fileno())

View file

@ -0,0 +1,85 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Creation of Windows shortcuts.
Requires win32all.
"""
from win32com.shell import shell
import pythoncom
import os
def open(filename):
"""
Open an existing shortcut for reading.
@return: The shortcut object
@rtype: Shortcut
"""
sc = Shortcut()
sc.load(filename)
return sc
class Shortcut:
"""
A shortcut on Win32.
"""
def __init__(self,
path=None,
arguments=None,
description=None,
workingdir=None,
iconpath=None,
iconidx=0):
"""
@param path: Location of the target
@param arguments: If path points to an executable, optional arguments
to pass
@param description: Human-readable description of target
@param workingdir: Directory from which target is launched
@param iconpath: Filename that contains an icon for the shortcut
@param iconidx: If iconpath is set, optional index of the icon desired
"""
self._base = pythoncom.CoCreateInstance(
shell.CLSID_ShellLink, None,
pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink
)
if path is not None:
self.SetPath(os.path.abspath(path))
if arguments is not None:
self.SetArguments(arguments)
if description is not None:
self.SetDescription(description)
if workingdir is not None:
self.SetWorkingDirectory(os.path.abspath(workingdir))
if iconpath is not None:
self.SetIconLocation(os.path.abspath(iconpath), iconidx)
def load(self, filename):
"""
Read a shortcut file from disk.
"""
self._base.QueryInterface(pythoncom.IID_IPersistFile).Load(
os.path.abspath(filename))
def save(self, filename):
"""
Write the shortcut to disk.
The file should be named something.lnk.
"""
self._base.QueryInterface(pythoncom.IID_IPersistFile).Save(
os.path.abspath(filename), 0)
def __getattr__(self, name):
return getattr(self._base, name)

View file

@ -0,0 +1,109 @@
# -*- test-case-name: twisted.python.test.test_syslog -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Classes and utility functions for integrating Twisted and syslog.
You probably want to call L{startLogging}.
"""
syslog = __import__('syslog')
from twisted.python import log
from twisted.python._oldstyle import _oldStyle
# These defaults come from the Python syslog docs.
DEFAULT_OPTIONS = 0
DEFAULT_FACILITY = syslog.LOG_USER
@_oldStyle
class SyslogObserver:
"""
A log observer for logging to syslog.
See L{twisted.python.log} for context.
This logObserver will automatically use LOG_ALERT priority for logged
failures (such as from C{log.err()}), but you can use any priority and
facility by setting the 'C{syslogPriority}' and 'C{syslogFacility}' keys in
the event dict.
"""
openlog = syslog.openlog
syslog = syslog.syslog
def __init__(self, prefix, options=DEFAULT_OPTIONS,
facility=DEFAULT_FACILITY):
"""
@type prefix: C{str}
@param prefix: The syslog prefix to use.
@type options: C{int}
@param options: A bitvector represented as an integer of the syslog
options to use.
@type facility: C{int}
@param facility: An indication to the syslog daemon of what sort of
program this is (essentially, an additional arbitrary metadata
classification for messages sent to syslog by this observer).
"""
self.openlog(prefix, options, facility)
def emit(self, eventDict):
"""
Send a message event to the I{syslog}.
@param eventDict: The event to send. If it has no C{'message'} key, it
will be ignored. Otherwise, if it has C{'syslogPriority'} and/or
C{'syslogFacility'} keys, these will be used as the syslog priority
and facility. If it has no C{'syslogPriority'} key but a true
value for the C{'isError'} key, the B{LOG_ALERT} priority will be
used; if it has a false value for C{'isError'}, B{LOG_INFO} will be
used. If the C{'message'} key is multiline, each line will be sent
to the syslog separately.
"""
# Figure out what the message-text is.
text = log.textFromEventDict(eventDict)
if text is None:
return
# Figure out what syslog parameters we might need to use.
priority = syslog.LOG_INFO
facility = 0
if eventDict['isError']:
priority = syslog.LOG_ALERT
if 'syslogPriority' in eventDict:
priority = int(eventDict['syslogPriority'])
if 'syslogFacility' in eventDict:
facility = int(eventDict['syslogFacility'])
# Break the message up into lines and send them.
lines = text.split('\n')
while lines[-1:] == ['']:
lines.pop()
firstLine = True
for line in lines:
if firstLine:
firstLine = False
else:
line = '\t' + line
self.syslog(priority | facility,
'[%s] %s' % (eventDict['system'], line))
def startLogging(prefix='Twisted', options=DEFAULT_OPTIONS,
facility=DEFAULT_FACILITY, setStdout=1):
"""
Send all Twisted logging output to syslog from now on.
The prefix, options and facility arguments are passed to
C{syslog.openlog()}, see the Python syslog documentation for details. For
other parameters, see L{twisted.python.log.startLoggingWithObserver}.
"""
obs = SyslogObserver(prefix, options, facility)
log.startLoggingWithObserver(obs.emit, setStdout=setStdout)

View file

@ -0,0 +1,89 @@
# -*- test-case-name: twisted.python.test.test_systemd -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Integration with systemd.
Currently only the minimum APIs necessary for using systemd's socket activation
feature are supported.
"""
from __future__ import division, absolute_import
__all__ = ['ListenFDs']
from os import getpid
class ListenFDs(object):
"""
L{ListenFDs} provides access to file descriptors inherited from systemd.
Typically L{ListenFDs.fromEnvironment} should be used to construct a new
instance of L{ListenFDs}.
@cvar _START: File descriptors inherited from systemd are always
consecutively numbered, with a fixed lowest "starting" descriptor. This
gives the default starting descriptor. Since this must agree with the
value systemd is using, it typically should not be overridden.
@type _START: C{int}
@ivar _descriptors: A C{list} of C{int} giving the descriptors which were
inherited.
"""
_START = 3
def __init__(self, descriptors):
"""
@param descriptors: The descriptors which will be returned from calls to
C{inheritedDescriptors}.
"""
self._descriptors = descriptors
@classmethod
def fromEnvironment(cls, environ=None, start=None):
"""
@param environ: A dictionary-like object to inspect to discover
inherited descriptors. By default, L{None}, indicating that the
real process environment should be inspected. The default is
suitable for typical usage.
@param start: An integer giving the lowest value of an inherited
descriptor systemd will give us. By default, L{None}, indicating
the known correct (that is, in agreement with systemd) value will be
used. The default is suitable for typical usage.
@return: A new instance of C{cls} which can be used to look up the
descriptors which have been inherited.
"""
if environ is None:
from os import environ
if start is None:
start = cls._START
descriptors = []
try:
pid = int(environ['LISTEN_PID'])
except (KeyError, ValueError):
pass
else:
if pid == getpid():
try:
count = int(environ['LISTEN_FDS'])
except (KeyError, ValueError):
pass
else:
descriptors = range(start, start + count)
del environ['LISTEN_PID'], environ['LISTEN_FDS']
return cls(descriptors)
def inheritedDescriptors(self):
"""
@return: The configured list of descriptors.
"""
return list(self._descriptors)

View file

@ -0,0 +1,3 @@
"""
Unit tests for L{twisted.python}.
"""

View file

@ -0,0 +1,56 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
import inspect
from twisted.python.deprecate import _passedSignature
from twisted.trial.unittest import SynchronousTestCase
class KeywordOnlyTests(SynchronousTestCase):
"""
Keyword only arguments (PEP 3102).
"""
def checkPassed(self, func, *args, **kw):
"""
Test an invocation of L{passed} with the given function, arguments, and
keyword arguments.
@param func: A function whose argspec to pass to L{_passed}.
@type func: A callable.
@param args: The arguments which could be passed to L{func}.
@param kw: The keyword arguments which could be passed to L{func}.
@return: L{_passed}'s return value
@rtype: L{dict}
"""
return _passedSignature(inspect.signature(func), args, kw)
def test_passedKeywordOnly(self):
"""
Keyword only arguments follow varargs.
They are specified in PEP 3102.
"""
def func1(*a, b=True):
"""
b is a keyword-only argument, with a default value.
"""
def func2(*a, b=True, c, d, e):
"""
b, c, d, e are keyword-only arguments.
b has a default value.
"""
self.assertEqual(self.checkPassed(func1, 1, 2, 3),
dict(a=(1, 2, 3), b=True))
self.assertEqual(self.checkPassed(func1, 1, 2, 3, b=False),
dict(a=(1, 2, 3), b=False))
self.assertEqual(self.checkPassed(func2,
1, 2, b=False, c=1, d=2, e=3),
dict(a=(1, 2), b=False, c=1, d=2, e=3))
self.assertRaises(TypeError, self.checkPassed,
func2, 1, 2, b=False, c=1, d=2)

View file

@ -0,0 +1,28 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A module that is deprecated, used by L{twisted.python.test.test_deprecate} for
testing purposes.
"""
from __future__ import division, absolute_import
from incremental import Version
from twisted.python.deprecate import deprecatedModuleAttribute
# Known module-level attributes.
DEPRECATED_ATTRIBUTE = 42
ANOTHER_ATTRIBUTE = 'hello'
version = Version('Twisted', 8, 0, 0)
message = 'Oh noes!'
deprecatedModuleAttribute(
version,
message,
__name__,
'DEPRECATED_ATTRIBUTE')

View file

@ -0,0 +1,55 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Facilities for helping test code which interacts with Python's module system
to load code.
"""
from __future__ import division, absolute_import
import sys
from twisted.python.filepath import FilePath
class TwistedModulesMixin(object):
"""
A mixin for C{twisted.trial.unittest.SynchronousTestCase} providing useful
methods for manipulating Python's module system.
"""
def replaceSysPath(self, sysPath):
"""
Replace sys.path, for the duration of the test, with the given value.
"""
originalSysPath = sys.path[:]
def cleanUpSysPath():
sys.path[:] = originalSysPath
self.addCleanup(cleanUpSysPath)
sys.path[:] = sysPath
def replaceSysModules(self, sysModules):
"""
Replace sys.modules, for the duration of the test, with the given value.
"""
originalSysModules = sys.modules.copy()
def cleanUpSysModules():
sys.modules.clear()
sys.modules.update(originalSysModules)
self.addCleanup(cleanUpSysModules)
sys.modules.clear()
sys.modules.update(sysModules)
def pathEntryWithOnePackage(self, pkgname="test_package"):
"""
Generate a L{FilePath} with one package, named C{pkgname}, on it, and
return the L{FilePath} of the path entry.
"""
entry = FilePath(self.mktemp())
pkg = entry.child("test_package")
pkg.makedirs()
pkg.child("__init__.py").setContent(b"")
return entry

View file

@ -0,0 +1,39 @@
# -*- test-case-name: twisted.python.test.test_sendmsg -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
import sys
import os
import socket
from struct import unpack
from twisted.python.sendmsg import recvmsg
def recvfd(socketfd):
"""
Receive a file descriptor from a L{sendmsg} message on the given C{AF_UNIX}
socket.
@param socketfd: An C{AF_UNIX} socket, attached to another process waiting
to send sockets via the ancillary data mechanism in L{send1msg}.
@param fd: C{int}
@return: a 2-tuple of (new file descriptor, description).
@rtype: 2-tuple of (C{int}, C{bytes})
"""
ourSocket = socket.fromfd(socketfd, socket.AF_UNIX, socket.SOCK_STREAM)
data, ancillary, flags = recvmsg(ourSocket)
[(cmsgLevel, cmsgType, packedFD)] = ancillary
# cmsgLevel and cmsgType really need to be SOL_SOCKET / SCM_RIGHTS, but
# since those are the *only* standard values, there's not much point in
# checking.
[unpackedFD] = unpack("i", packedFD)
return (unpackedFD, data)
if __name__ == '__main__':
fd, description = recvfd(int(sys.argv[1]))
os.write(fd, b"Test fixture data: " + description + b".\n")
os.close(fd)

View file

@ -0,0 +1,42 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for the data directory support.
"""
from __future__ import division, absolute_import
try:
from twisted.python import _appdirs
except ImportError:
_appdirs = None
from twisted.trial import unittest
class AppdirsTests(unittest.TestCase):
"""
Tests for L{_appdirs}.
"""
if not _appdirs:
skip = "appdirs package not installed"
def test_moduleName(self):
"""
Calling L{appdirs.getDataDirectory} will return a user data directory
in the system convention, with the module of the caller as the
subdirectory.
"""
res = _appdirs.getDataDirectory()
self.assertTrue(res.endswith("twisted.python.test.test_appdirs"))
def test_manual(self):
"""
Calling L{appdirs.getDataDirectory} with a C{moduleName} argument will
make a data directory with that name instead.
"""
res = _appdirs.getDataDirectory("foo.bar.baz")
self.assertTrue(res.endswith("foo.bar.baz"))

View file

@ -0,0 +1,898 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for Twisted component architecture.
"""
from __future__ import division, absolute_import
from functools import wraps
from zope.interface import Interface, implementer, Attribute
from zope.interface.adapter import AdapterRegistry
from twisted.python.compat import comparable, cmp
from twisted.trial import unittest
from twisted.python import components
from twisted.python.components import _addHook, _removeHook, proxyForInterface
class Compo(components.Componentized):
num = 0
def inc(self):
self.num = self.num + 1
return self.num
class IAdept(Interface):
def adaptorFunc():
raise NotImplementedError()
class IElapsed(Interface):
def elapsedFunc():
"""
1!
"""
@implementer(IAdept)
class Adept(components.Adapter):
def __init__(self, orig):
self.original = orig
self.num = 0
def adaptorFunc(self):
self.num = self.num + 1
return self.num, self.original.inc()
@implementer(IElapsed)
class Elapsed(components.Adapter):
def elapsedFunc(self):
return 1
class AComp(components.Componentized):
pass
class BComp(AComp):
pass
class CComp(BComp):
pass
class ITest(Interface):
pass
class ITest2(Interface):
pass
class ITest3(Interface):
pass
class ITest4(Interface):
pass
@implementer(ITest, ITest3, ITest4)
class Test(components.Adapter):
def __init__(self, orig):
pass
@implementer(ITest2)
class Test2(object):
temporaryAdapter = 1
def __init__(self, orig):
pass
class RegistryUsingMixin(object):
"""
Mixin for test cases which modify the global registry somehow.
"""
def setUp(self):
"""
Configure L{twisted.python.components.registerAdapter} to mutate an
alternate registry to improve test isolation.
"""
# Create a brand new, empty registry and put it onto the components
# module where registerAdapter will use it. Also ensure that it goes
# away at the end of the test.
scratchRegistry = AdapterRegistry()
self.patch(components, 'globalRegistry', scratchRegistry)
# Hook the new registry up to the adapter lookup system and ensure that
# association is also discarded after the test.
hook = _addHook(scratchRegistry)
self.addCleanup(_removeHook, hook)
class ComponentizedTests(unittest.SynchronousTestCase, RegistryUsingMixin):
"""
Simple test case for caching in Componentized.
"""
def setUp(self):
RegistryUsingMixin.setUp(self)
components.registerAdapter(Test, AComp, ITest)
components.registerAdapter(Test, AComp, ITest3)
components.registerAdapter(Test2, AComp, ITest2)
def testComponentized(self):
components.registerAdapter(Adept, Compo, IAdept)
components.registerAdapter(Elapsed, Compo, IElapsed)
c = Compo()
assert c.getComponent(IAdept).adaptorFunc() == (1, 1)
assert c.getComponent(IAdept).adaptorFunc() == (2, 2)
assert IElapsed(IAdept(c)).elapsedFunc() == 1
def testInheritanceAdaptation(self):
c = CComp()
co1 = c.getComponent(ITest)
co2 = c.getComponent(ITest)
co3 = c.getComponent(ITest2)
co4 = c.getComponent(ITest2)
assert co1 is co2
assert co3 is not co4
c.removeComponent(co1)
co5 = c.getComponent(ITest)
co6 = c.getComponent(ITest)
assert co5 is co6
assert co1 is not co5
def testMultiAdapter(self):
c = CComp()
co1 = c.getComponent(ITest)
co3 = c.getComponent(ITest3)
co4 = c.getComponent(ITest4)
self.assertIsNone(co4)
self.assertIs(co1, co3)
def test_getComponentDefaults(self):
"""
Test that a default value specified to Componentized.getComponent if
there is no component for the requested interface.
"""
componentized = components.Componentized()
default = object()
self.assertIs(
componentized.getComponent(ITest, default),
default)
self.assertIs(
componentized.getComponent(ITest, default=default),
default)
self.assertIs(
componentized.getComponent(ITest),
None)
def test_setAdapter(self):
"""
C{Componentized.setAdapter} sets a component for an interface by
wrapping the instance with the given adapter class.
"""
componentized = components.Componentized()
componentized.setAdapter(IAdept, Adept)
component = componentized.getComponent(IAdept)
self.assertEqual(component.original, componentized)
self.assertIsInstance(component, Adept)
def test_addAdapter(self):
"""
C{Componentized.setAdapter} adapts the instance by wrapping it with
given adapter class, then stores it using C{addComponent}.
"""
componentized = components.Componentized()
componentized.addAdapter(Adept, ignoreClass=True)
component = componentized.getComponent(IAdept)
self.assertEqual(component.original, componentized)
self.assertIsInstance(component, Adept)
def test_setComponent(self):
"""
C{Componentized.setComponent} stores the given component using the
given interface as the key.
"""
componentized = components.Componentized()
obj = object()
componentized.setComponent(ITest, obj)
self.assertIs(componentized.getComponent(ITest), obj)
def test_unsetComponent(self):
"""
C{Componentized.setComponent} removes the cached component for the
given interface.
"""
componentized = components.Componentized()
obj = object()
componentized.setComponent(ITest, obj)
componentized.unsetComponent(ITest)
self.assertIsNone(componentized.getComponent(ITest))
def test_reprableComponentized(self):
"""
C{ReprableComponentized} has a C{__repr__} that lists its cache.
"""
rc = components.ReprableComponentized()
rc.setComponent(ITest, "hello")
result = repr(rc)
self.assertIn("ITest", result)
self.assertIn("hello", result)
class AdapterTests(unittest.SynchronousTestCase):
"""Test adapters."""
def testAdapterGetComponent(self):
o = object()
a = Adept(o)
self.assertRaises(components.CannotAdapt, ITest, a)
self.assertIsNone(ITest(a, None))
class IMeta(Interface):
pass
@implementer(IMeta)
class MetaAdder(components.Adapter):
def add(self, num):
return self.original.num + num
@implementer(IMeta)
class BackwardsAdder(components.Adapter):
def add(self, num):
return self.original.num - num
class MetaNumber(object):
"""
Integer wrapper for Interface adaptation tests.
"""
def __init__(self, num):
self.num = num
class ComponentNumber(components.Componentized):
def __init__(self):
self.num = 0
components.Componentized.__init__(self)
@implementer(IMeta)
class ComponentAdder(components.Adapter):
"""
Adder for componentized adapter tests.
"""
def __init__(self, original):
components.Adapter.__init__(self, original)
self.num = self.original.num
def add(self, num):
self.num += num
return self.num
class IAttrX(Interface):
"""
Base interface for test of adapter with C{__cmp__}.
"""
def x():
"""
Return a value.
"""
class IAttrXX(Interface):
"""
Adapted interface for test of adapter with C{__cmp__}.
"""
def xx():
"""
Return a tuple of values.
"""
@implementer(IAttrX)
class Xcellent(object):
"""
L{IAttrX} implementation for test of adapter with C{__cmp__}.
"""
def x(self):
"""
Return a value.
@return: a value
"""
return 'x!'
@comparable
class DoubleXAdapter(object):
"""
Adapter with __cmp__.
"""
num = 42
def __init__(self, original):
self.original = original
def xx(self):
return (self.original.x(), self.original.x())
def __cmp__(self, other):
return cmp(self.num, other.num)
class MetaInterfaceTests(RegistryUsingMixin, unittest.SynchronousTestCase):
def test_basic(self):
"""
Registered adapters can be used to adapt classes to an interface.
"""
components.registerAdapter(MetaAdder, MetaNumber, IMeta)
n = MetaNumber(1)
self.assertEqual(IMeta(n).add(1), 2)
def testComponentizedInteraction(self):
components.registerAdapter(ComponentAdder, ComponentNumber, IMeta)
c = ComponentNumber()
IMeta(c).add(1)
IMeta(c).add(1)
self.assertEqual(IMeta(c).add(1), 3)
def testAdapterWithCmp(self):
# Make sure that a __cmp__ on an adapter doesn't break anything
components.registerAdapter(DoubleXAdapter, IAttrX, IAttrXX)
xx = IAttrXX(Xcellent())
self.assertEqual(('x!', 'x!'), xx.xx())
class RegistrationTests(RegistryUsingMixin, unittest.SynchronousTestCase):
"""
Tests for adapter registration.
"""
def _registerAdapterForClassOrInterface(self, original):
"""
Register an adapter with L{components.registerAdapter} for the given
class or interface and verify that the adapter can be looked up with
L{components.getAdapterFactory}.
"""
adapter = lambda o: None
components.registerAdapter(adapter, original, ITest)
self.assertIs(
components.getAdapterFactory(original, ITest, None),
adapter)
def test_registerAdapterForClass(self):
"""
Test that an adapter from a class can be registered and then looked
up.
"""
class TheOriginal(object):
pass
return self._registerAdapterForClassOrInterface(TheOriginal)
def test_registerAdapterForInterface(self):
"""
Test that an adapter from an interface can be registered and then
looked up.
"""
return self._registerAdapterForClassOrInterface(ITest2)
def _duplicateAdapterForClassOrInterface(self, original):
"""
Verify that L{components.registerAdapter} raises L{ValueError} if the
from-type/interface and to-interface pair is not unique.
"""
firstAdapter = lambda o: False
secondAdapter = lambda o: True
components.registerAdapter(firstAdapter, original, ITest)
self.assertRaises(
ValueError,
components.registerAdapter,
secondAdapter, original, ITest)
# Make sure that the original adapter is still around as well
self.assertIs(
components.getAdapterFactory(original, ITest, None),
firstAdapter)
def test_duplicateAdapterForClass(self):
"""
Test that attempting to register a second adapter from a class
raises the appropriate exception.
"""
class TheOriginal(object):
pass
return self._duplicateAdapterForClassOrInterface(TheOriginal)
def test_duplicateAdapterForInterface(self):
"""
Test that attempting to register a second adapter from an interface
raises the appropriate exception.
"""
return self._duplicateAdapterForClassOrInterface(ITest2)
def _duplicateAdapterForClassOrInterfaceAllowed(self, original):
"""
Verify that when C{components.ALLOW_DUPLICATES} is set to C{True}, new
adapter registrations for a particular from-type/interface and
to-interface pair replace older registrations.
"""
firstAdapter = lambda o: False
secondAdapter = lambda o: True
class TheInterface(Interface):
pass
components.registerAdapter(firstAdapter, original, TheInterface)
components.ALLOW_DUPLICATES = True
try:
components.registerAdapter(secondAdapter, original, TheInterface)
self.assertIs(
components.getAdapterFactory(original, TheInterface, None),
secondAdapter)
finally:
components.ALLOW_DUPLICATES = False
# It should be rejected again at this point
self.assertRaises(
ValueError,
components.registerAdapter,
firstAdapter, original, TheInterface)
self.assertIs(
components.getAdapterFactory(original, TheInterface, None),
secondAdapter)
def test_duplicateAdapterForClassAllowed(self):
"""
Test that when L{components.ALLOW_DUPLICATES} is set to a true
value, duplicate registrations from classes are allowed to override
the original registration.
"""
class TheOriginal(object):
pass
return self._duplicateAdapterForClassOrInterfaceAllowed(TheOriginal)
def test_duplicateAdapterForInterfaceAllowed(self):
"""
Test that when L{components.ALLOW_DUPLICATES} is set to a true
value, duplicate registrations from interfaces are allowed to
override the original registration.
"""
class TheOriginal(Interface):
pass
return self._duplicateAdapterForClassOrInterfaceAllowed(TheOriginal)
def _multipleInterfacesForClassOrInterface(self, original):
"""
Verify that an adapter can be registered for multiple to-interfaces at a
time.
"""
adapter = lambda o: None
components.registerAdapter(adapter, original, ITest, ITest2)
self.assertIs(
components.getAdapterFactory(original, ITest, None), adapter)
self.assertIs(
components.getAdapterFactory(original, ITest2, None), adapter)
def test_multipleInterfacesForClass(self):
"""
Test the registration of an adapter from a class to several
interfaces at once.
"""
class TheOriginal(object):
pass
return self._multipleInterfacesForClassOrInterface(TheOriginal)
def test_multipleInterfacesForInterface(self):
"""
Test the registration of an adapter from an interface to several
interfaces at once.
"""
return self._multipleInterfacesForClassOrInterface(ITest3)
def _subclassAdapterRegistrationForClassOrInterface(self, original):
"""
Verify that a new adapter can be registered for a particular
to-interface from a subclass of a type or interface which already has an
adapter registered to that interface and that the subclass adapter takes
precedence over the base class adapter.
"""
firstAdapter = lambda o: True
secondAdapter = lambda o: False
class TheSubclass(original):
pass
components.registerAdapter(firstAdapter, original, ITest)
components.registerAdapter(secondAdapter, TheSubclass, ITest)
self.assertIs(
components.getAdapterFactory(original, ITest, None),
firstAdapter)
self.assertIs(
components.getAdapterFactory(TheSubclass, ITest, None),
secondAdapter)
def test_subclassAdapterRegistrationForClass(self):
"""
Test that an adapter to a particular interface can be registered
from both a class and its subclass.
"""
class TheOriginal(object):
pass
return self._subclassAdapterRegistrationForClassOrInterface(TheOriginal)
def test_subclassAdapterRegistrationForInterface(self):
"""
Test that an adapter to a particular interface can be registered
from both an interface and its subclass.
"""
return self._subclassAdapterRegistrationForClassOrInterface(ITest2)
class IProxiedInterface(Interface):
"""
An interface class for use by L{proxyForInterface}.
"""
ifaceAttribute = Attribute("""
An example declared attribute, which should be proxied.""")
def yay(*a, **kw):
"""
A sample method which should be proxied.
"""
class IProxiedSubInterface(IProxiedInterface):
"""
An interface that derives from another for use with L{proxyForInterface}.
"""
def boo(self):
"""
A different sample method which should be proxied.
"""
@implementer(IProxiedInterface)
class Yayable(object):
"""
A provider of L{IProxiedInterface} which increments a counter for
every call to C{yay}.
@ivar yays: The number of times C{yay} has been called.
"""
def __init__(self):
self.yays = 0
self.yayArgs = []
def yay(self, *a, **kw):
"""
Increment C{self.yays}.
"""
self.yays += 1
self.yayArgs.append((a, kw))
return self.yays
@implementer(IProxiedSubInterface)
class Booable(object):
"""
An implementation of IProxiedSubInterface
"""
yayed = False
booed = False
def yay(self):
"""
Mark the fact that 'yay' has been called.
"""
self.yayed = True
def boo(self):
"""
Mark the fact that 'boo' has been called.1
"""
self.booed = True
class IMultipleMethods(Interface):
"""
An interface with multiple methods.
"""
def methodOne():
"""
The first method. Should return 1.
"""
def methodTwo():
"""
The second method. Should return 2.
"""
class MultipleMethodImplementor(object):
"""
A precise implementation of L{IMultipleMethods}.
"""
def methodOne(self):
"""
@return: 1
"""
return 1
def methodTwo(self):
"""
@return: 2
"""
return 2
class ProxyForInterfaceTests(unittest.SynchronousTestCase):
"""
Tests for L{proxyForInterface}.
"""
def test_original(self):
"""
Proxy objects should have an C{original} attribute which refers to the
original object passed to the constructor.
"""
original = object()
proxy = proxyForInterface(IProxiedInterface)(original)
self.assertIs(proxy.original, original)
def test_proxyMethod(self):
"""
The class created from L{proxyForInterface} passes methods on an
interface to the object which is passed to its constructor.
"""
klass = proxyForInterface(IProxiedInterface)
yayable = Yayable()
proxy = klass(yayable)
proxy.yay()
self.assertEqual(proxy.yay(), 2)
self.assertEqual(yayable.yays, 2)
def test_decoratedProxyMethod(self):
"""
Methods of the class created from L{proxyForInterface} can be used with
the decorator-helper L{functools.wraps}.
"""
base = proxyForInterface(IProxiedInterface)
class klass(base):
@wraps(base.yay)
def yay(self):
self.original.yays += 1
return base.yay(self)
original = Yayable()
yayable = klass(original)
yayable.yay()
self.assertEqual(2, original.yays)
def test_proxyAttribute(self):
"""
Proxy objects should proxy declared attributes, but not other
attributes.
"""
yayable = Yayable()
yayable.ifaceAttribute = object()
proxy = proxyForInterface(IProxiedInterface)(yayable)
self.assertIs(proxy.ifaceAttribute, yayable.ifaceAttribute)
self.assertRaises(AttributeError, lambda: proxy.yays)
def test_proxySetAttribute(self):
"""
The attributes that proxy objects proxy should be assignable and affect
the original object.
"""
yayable = Yayable()
proxy = proxyForInterface(IProxiedInterface)(yayable)
thingy = object()
proxy.ifaceAttribute = thingy
self.assertIs(yayable.ifaceAttribute, thingy)
def test_proxyDeleteAttribute(self):
"""
The attributes that proxy objects proxy should be deletable and affect
the original object.
"""
yayable = Yayable()
yayable.ifaceAttribute = None
proxy = proxyForInterface(IProxiedInterface)(yayable)
del proxy.ifaceAttribute
self.assertFalse(hasattr(yayable, 'ifaceAttribute'))
def test_multipleMethods(self):
"""
[Regression test] The proxy should send its method calls to the correct
method, not the incorrect one.
"""
multi = MultipleMethodImplementor()
proxy = proxyForInterface(IMultipleMethods)(multi)
self.assertEqual(proxy.methodOne(), 1)
self.assertEqual(proxy.methodTwo(), 2)
def test_subclassing(self):
"""
It is possible to subclass the result of L{proxyForInterface}.
"""
class SpecializedProxy(proxyForInterface(IProxiedInterface)):
"""
A specialized proxy which can decrement the number of yays.
"""
def boo(self):
"""
Decrement the number of yays.
"""
self.original.yays -= 1
yayable = Yayable()
special = SpecializedProxy(yayable)
self.assertEqual(yayable.yays, 0)
special.boo()
self.assertEqual(yayable.yays, -1)
def test_proxyName(self):
"""
The name of a proxy class indicates which interface it proxies.
"""
proxy = proxyForInterface(IProxiedInterface)
self.assertEqual(
proxy.__name__,
"(Proxy for "
"twisted.python.test.test_components.IProxiedInterface)")
def test_implements(self):
"""
The resulting proxy implements the interface that it proxies.
"""
proxy = proxyForInterface(IProxiedInterface)
self.assertTrue(IProxiedInterface.implementedBy(proxy))
def test_proxyDescriptorGet(self):
"""
_ProxyDescriptor's __get__ method should return the appropriate
attribute of its argument's 'original' attribute if it is invoked with
an object. If it is invoked with None, it should return a false
class-method emulator instead.
For some reason, Python's documentation recommends to define
descriptors' __get__ methods with the 'type' parameter as optional,
despite the fact that Python itself never actually calls the descriptor
that way. This is probably do to support 'foo.__get__(bar)' as an
idiom. Let's make sure that the behavior is correct. Since we don't
actually use the 'type' argument at all, this test calls it the
idiomatic way to ensure that signature works; test_proxyInheritance
verifies the how-Python-actually-calls-it signature.
"""
class Sample(object):
called = False
def hello(self):
self.called = True
fakeProxy = Sample()
testObject = Sample()
fakeProxy.original = testObject
pd = components._ProxyDescriptor("hello", "original")
self.assertEqual(pd.__get__(fakeProxy), testObject.hello)
fakeClassMethod = pd.__get__(None)
fakeClassMethod(fakeProxy)
self.assertTrue(testObject.called)
def test_proxyInheritance(self):
"""
Subclasses of the class returned from L{proxyForInterface} should be
able to upcall methods by reference to their superclass, as any normal
Python class can.
"""
class YayableWrapper(proxyForInterface(IProxiedInterface)):
"""
This class does not override any functionality.
"""
class EnhancedWrapper(YayableWrapper):
"""
This class overrides the 'yay' method.
"""
wrappedYays = 1
def yay(self, *a, **k):
self.wrappedYays += 1
return YayableWrapper.yay(self, *a, **k) + 7
yayable = Yayable()
wrapper = EnhancedWrapper(yayable)
self.assertEqual(wrapper.yay(3, 4, x=5, y=6), 8)
self.assertEqual(yayable.yayArgs,
[((3, 4), dict(x=5, y=6))])
def test_interfaceInheritance(self):
"""
Proxies of subinterfaces generated with proxyForInterface should allow
access to attributes of both the child and the base interfaces.
"""
proxyClass = proxyForInterface(IProxiedSubInterface)
booable = Booable()
proxy = proxyClass(booable)
proxy.yay()
proxy.boo()
self.assertTrue(booable.yayed)
self.assertTrue(booable.booed)
def test_attributeCustomization(self):
"""
The original attribute name can be customized via the
C{originalAttribute} argument of L{proxyForInterface}: the attribute
should change, but the methods of the original object should still be
callable, and the attributes still accessible.
"""
yayable = Yayable()
yayable.ifaceAttribute = object()
proxy = proxyForInterface(
IProxiedInterface, originalAttribute='foo')(yayable)
self.assertIs(proxy.foo, yayable)
# Check the behavior
self.assertEqual(proxy.yay(), 1)
self.assertIs(proxy.ifaceAttribute, yayable.ifaceAttribute)
thingy = object()
proxy.ifaceAttribute = thingy
self.assertIs(yayable.ifaceAttribute, thingy)
del proxy.ifaceAttribute
self.assertFalse(hasattr(yayable, 'ifaceAttribute'))

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,55 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.dist3}.
"""
from __future__ import absolute_import, division
import os
import twisted
from twisted.trial.unittest import TestCase
from twisted.python.compat import _PY3
from twisted.python._setup import notPortedModules
class ModulesToInstallTests(TestCase):
"""
Tests for L{notPortedModules}.
"""
def test_exist(self):
"""
All modules listed in L{notPortedModules} exist on Py2.
"""
root = os.path.dirname(os.path.dirname(twisted.__file__))
for module in notPortedModules:
segments = module.split(".")
segments[-1] += ".py"
path = os.path.join(root, *segments)
alternateSegments = module.split(".") + ["__init__.py"]
packagePath = os.path.join(root, *alternateSegments)
self.assertTrue(os.path.exists(path) or
os.path.exists(packagePath),
"Module {0} does not exist".format(module))
def test_notexist(self):
"""
All modules listed in L{notPortedModules} do not exist on Py3.
"""
root = os.path.dirname(os.path.dirname(twisted.__file__))
for module in notPortedModules:
segments = module.split(".")
segments[-1] += ".py"
path = os.path.join(root, *segments)
alternateSegments = module.split(".") + ["__init__.py"]
packagePath = os.path.join(root, *alternateSegments)
self.assertFalse(os.path.exists(path) or
os.path.exists(packagePath),
"Module {0} exists".format(module))
if _PY3:
test_exist.skip = "Only on Python 2"
else:
test_notexist.skip = "Only on Python 3"

View file

@ -0,0 +1,413 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.fakepwd}.
"""
try:
import pwd
except ImportError:
pwd = None
try:
import spwd
except ImportError:
spwd = None
import os
from operator import getitem
from twisted.trial.unittest import TestCase
from twisted.python.fakepwd import UserDatabase, ShadowDatabase
SYSTEM_UID_MAX = 999
def findInvalidUID():
"""
By convention, UIDs less than 1000 are reserved for the system. A system
which allocated every single one of those UIDs would likely have practical
problems with allocating new ones, so let's assume that we'll be able to
find one. (If we don't, this will wrap around to negative values and
I{eventually} find something.)
@return: a user ID which does not exist on the local system. Or, on
systems without a L{pwd} module, return C{SYSTEM_UID_MAX}.
"""
guess = SYSTEM_UID_MAX
if pwd is not None:
while True:
try:
pwd.getpwuid(guess)
except KeyError:
break
else:
guess -= 1
return guess
INVALID_UID = findInvalidUID()
class UserDatabaseTestsMixin(object):
"""
L{UserDatabaseTestsMixin} defines tests which apply to any user database
implementation. Subclasses should mix it in, implement C{setUp} to create
C{self.database} bound to a user database instance, and implement
C{getExistingUserInfo} to return information about a user (such information
should be unique per test method).
"""
def test_getpwuid(self):
"""
I{getpwuid} accepts a uid and returns the user record associated with
it.
"""
for i in range(2):
# Get some user which exists in the database.
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
# Now try to look it up and make sure the result is correct.
entry = self.database.getpwuid(uid)
self.assertEqual(entry.pw_name, username)
self.assertEqual(entry.pw_passwd, password)
self.assertEqual(entry.pw_uid, uid)
self.assertEqual(entry.pw_gid, gid)
self.assertEqual(entry.pw_gecos, gecos)
self.assertEqual(entry.pw_dir, dir)
self.assertEqual(entry.pw_shell, shell)
def test_noSuchUID(self):
"""
I{getpwuid} raises L{KeyError} when passed a uid which does not exist
in the user database.
"""
self.assertRaises(KeyError, self.database.getpwuid, INVALID_UID)
def test_getpwnam(self):
"""
I{getpwnam} accepts a username and returns the user record associated
with it.
"""
for i in range(2):
# Get some user which exists in the database.
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
# Now try to look it up and make sure the result is correct.
entry = self.database.getpwnam(username)
self.assertEqual(entry.pw_name, username)
self.assertEqual(entry.pw_passwd, password)
self.assertEqual(entry.pw_uid, uid)
self.assertEqual(entry.pw_gid, gid)
self.assertEqual(entry.pw_gecos, gecos)
self.assertEqual(entry.pw_dir, dir)
self.assertEqual(entry.pw_shell, shell)
def test_noSuchName(self):
"""
I{getpwnam} raises L{KeyError} when passed a username which does not
exist in the user database.
"""
self.assertRaises(
KeyError, self.database.getpwnam,
'no' 'such' 'user' 'exists' 'the' 'name' 'is' 'too' 'long' 'and' 'has'
'\1' 'in' 'it' 'too')
def test_recordLength(self):
"""
The user record returned by I{getpwuid}, I{getpwnam}, and I{getpwall}
has a length.
"""
db = self.database
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
for entry in [db.getpwuid(uid), db.getpwnam(username), db.getpwall()[0]]:
self.assertIsInstance(len(entry), int)
self.assertEqual(len(entry), 7)
def test_recordIndexable(self):
"""
The user record returned by I{getpwuid}, I{getpwnam}, and I{getpwall}
is indexable, with successive indexes starting from 0 corresponding to
the values of the C{pw_name}, C{pw_passwd}, C{pw_uid}, C{pw_gid},
C{pw_gecos}, C{pw_dir}, and C{pw_shell} attributes, respectively.
"""
db = self.database
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
for entry in [db.getpwuid(uid), db.getpwnam(username), db.getpwall()[0]]:
self.assertEqual(entry[0], username)
self.assertEqual(entry[1], password)
self.assertEqual(entry[2], uid)
self.assertEqual(entry[3], gid)
self.assertEqual(entry[4], gecos)
self.assertEqual(entry[5], dir)
self.assertEqual(entry[6], shell)
self.assertEqual(len(entry), len(list(entry)))
self.assertRaises(IndexError, getitem, entry, 7)
class UserDatabaseTests(TestCase, UserDatabaseTestsMixin):
"""
Tests for L{UserDatabase}.
"""
def setUp(self):
"""
Create a L{UserDatabase} with no user data in it.
"""
self.database = UserDatabase()
self._counter = SYSTEM_UID_MAX + 1
def getExistingUserInfo(self):
"""
Add a new user to C{self.database} and return its information.
"""
self._counter += 1
suffix = '_' + str(self._counter)
username = 'username' + suffix
password = 'password' + suffix
uid = self._counter
gid = self._counter + 1000
gecos = 'gecos' + suffix
dir = 'dir' + suffix
shell = 'shell' + suffix
self.database.addUser(username, password, uid, gid, gecos, dir, shell)
return (username, password, uid, gid, gecos, dir, shell)
def test_addUser(self):
"""
L{UserDatabase.addUser} accepts seven arguments, one for each field of
a L{pwd.struct_passwd}, and makes the new record available via
L{UserDatabase.getpwuid}, L{UserDatabase.getpwnam}, and
L{UserDatabase.getpwall}.
"""
username = 'alice'
password = 'secr3t'
uid = 123
gid = 456
gecos = 'Alice,,,'
home = '/users/alice'
shell = '/usr/bin/foosh'
db = self.database
db.addUser(username, password, uid, gid, gecos, home, shell)
for [entry] in [[db.getpwuid(uid)], [db.getpwnam(username)],
db.getpwall()]:
self.assertEqual(entry.pw_name, username)
self.assertEqual(entry.pw_passwd, password)
self.assertEqual(entry.pw_uid, uid)
self.assertEqual(entry.pw_gid, gid)
self.assertEqual(entry.pw_gecos, gecos)
self.assertEqual(entry.pw_dir, home)
self.assertEqual(entry.pw_shell, shell)
class PwdModuleTests(TestCase, UserDatabaseTestsMixin):
"""
L{PwdModuleTests} runs the tests defined by L{UserDatabaseTestsMixin}
against the built-in C{pwd} module. This serves to verify that
L{UserDatabase} is really a fake of that API.
"""
if pwd is None:
skip = "Cannot verify UserDatabase against pwd without pwd"
else:
database = pwd
def setUp(self):
self._users = iter(self.database.getpwall())
self._uids = set()
def getExistingUserInfo(self):
"""
Read and return the next record from C{self._users}, filtering out
any records with previously seen uid values (as these cannot be
found with C{getpwuid} and only cause trouble).
"""
while True:
entry = next(self._users)
uid = entry.pw_uid
if uid not in self._uids:
self._uids.add(uid)
return entry
class ShadowDatabaseTestsMixin(object):
"""
L{ShadowDatabaseTestsMixin} defines tests which apply to any shadow user
database implementation. Subclasses should mix it in, implement C{setUp} to
create C{self.database} bound to a shadow user database instance, and
implement C{getExistingUserInfo} to return information about a user (such
information should be unique per test method).
"""
def test_getspnam(self):
"""
L{getspnam} accepts a username and returns the user record associated
with it.
"""
for i in range(2):
# Get some user which exists in the database.
(username, password, lastChange, min, max, warn, inact, expire,
flag) = self.getExistingUserInfo()
entry = self.database.getspnam(username)
self.assertEqual(entry.sp_nam, username)
self.assertEqual(entry.sp_pwd, password)
self.assertEqual(entry.sp_lstchg, lastChange)
self.assertEqual(entry.sp_min, min)
self.assertEqual(entry.sp_max, max)
self.assertEqual(entry.sp_warn, warn)
self.assertEqual(entry.sp_inact, inact)
self.assertEqual(entry.sp_expire, expire)
self.assertEqual(entry.sp_flag, flag)
def test_noSuchName(self):
"""
I{getspnam} raises L{KeyError} when passed a username which does not
exist in the user database.
"""
self.assertRaises(KeyError, self.database.getspnam, "alice")
def test_recordLength(self):
"""
The shadow user record returned by I{getspnam} and I{getspall} has a
length.
"""
db = self.database
username = self.getExistingUserInfo()[0]
for entry in [db.getspnam(username), db.getspall()[0]]:
self.assertIsInstance(len(entry), int)
self.assertEqual(len(entry), 9)
def test_recordIndexable(self):
"""
The shadow user record returned by I{getpwnam} and I{getspall} is
indexable, with successive indexes starting from 0 corresponding to the
values of the C{sp_nam}, C{sp_pwd}, C{sp_lstchg}, C{sp_min}, C{sp_max},
C{sp_warn}, C{sp_inact}, C{sp_expire}, and C{sp_flag} attributes,
respectively.
"""
db = self.database
(username, password, lastChange, min, max, warn, inact, expire,
flag) = self.getExistingUserInfo()
for entry in [db.getspnam(username), db.getspall()[0]]:
self.assertEqual(entry[0], username)
self.assertEqual(entry[1], password)
self.assertEqual(entry[2], lastChange)
self.assertEqual(entry[3], min)
self.assertEqual(entry[4], max)
self.assertEqual(entry[5], warn)
self.assertEqual(entry[6], inact)
self.assertEqual(entry[7], expire)
self.assertEqual(entry[8], flag)
self.assertEqual(len(entry), len(list(entry)))
self.assertRaises(IndexError, getitem, entry, 9)
class ShadowDatabaseTests(TestCase, ShadowDatabaseTestsMixin):
"""
Tests for L{ShadowDatabase}.
"""
def setUp(self):
"""
Create a L{ShadowDatabase} with no user data in it.
"""
self.database = ShadowDatabase()
self._counter = 0
def getExistingUserInfo(self):
"""
Add a new user to C{self.database} and return its information.
"""
self._counter += 1
suffix = '_' + str(self._counter)
username = 'username' + suffix
password = 'password' + suffix
lastChange = self._counter + 1
min = self._counter + 2
max = self._counter + 3
warn = self._counter + 4
inact = self._counter + 5
expire = self._counter + 6
flag = self._counter + 7
self.database.addUser(username, password, lastChange, min, max, warn,
inact, expire, flag)
return (username, password, lastChange, min, max, warn, inact,
expire, flag)
def test_addUser(self):
"""
L{UserDatabase.addUser} accepts seven arguments, one for each field of
a L{pwd.struct_passwd}, and makes the new record available via
L{UserDatabase.getpwuid}, L{UserDatabase.getpwnam}, and
L{UserDatabase.getpwall}.
"""
username = 'alice'
password = 'secr3t'
lastChange = 17
min = 42
max = 105
warn = 12
inact = 3
expire = 400
flag = 3
db = self.database
db.addUser(username, password, lastChange, min, max, warn, inact,
expire, flag)
for [entry] in [[db.getspnam(username)], db.getspall()]:
self.assertEqual(entry.sp_nam, username)
self.assertEqual(entry.sp_pwd, password)
self.assertEqual(entry.sp_lstchg, lastChange)
self.assertEqual(entry.sp_min, min)
self.assertEqual(entry.sp_max, max)
self.assertEqual(entry.sp_warn, warn)
self.assertEqual(entry.sp_inact, inact)
self.assertEqual(entry.sp_expire, expire)
self.assertEqual(entry.sp_flag, flag)
class SPwdModuleTests(TestCase, ShadowDatabaseTestsMixin):
"""
L{SPwdModuleTests} runs the tests defined by L{ShadowDatabaseTestsMixin}
against the built-in C{spwd} module. This serves to verify that
L{ShadowDatabase} is really a fake of that API.
"""
if spwd is None:
skip = "Cannot verify ShadowDatabase against spwd without spwd"
elif os.getuid() != 0:
skip = "Cannot access shadow user database except as root"
else:
database = spwd
def setUp(self):
self._users = iter(self.database.getspall())
def getExistingUserInfo(self):
"""
Read and return the next record from C{self._users}.
"""
return next(self._users)

View file

@ -0,0 +1,44 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.htmlizer}.
"""
from io import BytesIO
from twisted.trial.unittest import TestCase
from twisted.python.htmlizer import filter
class FilterTests(TestCase):
"""
Tests for L{twisted.python.htmlizer.filter}.
"""
def test_empty(self):
"""
If passed an empty input file, L{filter} writes a I{pre} tag containing
only an end marker to the output file.
"""
input = BytesIO(b"")
output = BytesIO()
filter(input, output)
self.assertEqual(
output.getvalue(),
b'<pre><span class="py-src-endmarker"></span></pre>\n')
def test_variable(self):
"""
If passed an input file containing a variable access, L{filter} writes
a I{pre} tag containing a I{py-src-variable} span containing the
variable.
"""
input = BytesIO(b"foo\n")
output = BytesIO()
filter(input, output)
self.assertEqual(
output.getvalue(),
b'<pre><span class="py-src-variable">foo</span>'
b'<span class="py-src-newline">\n'
b'</span><span class="py-src-endmarker"></span></pre>\n')

View file

@ -0,0 +1,121 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python._inotify}.
"""
from twisted.trial.unittest import TestCase
from twisted.python.filepath import FilePath
from twisted.python.runtime import platform
if platform.supportsINotify():
from ctypes import c_int, c_uint32, c_char_p
from twisted.python import _inotify
from twisted.python._inotify import (
INotifyError, initializeModule, init, add)
else:
_inotify = None
class INotifyTests(TestCase):
"""
Tests for L{twisted.python._inotify}.
"""
if _inotify is None:
skip = "This platform doesn't support INotify."
def test_missingInit(self):
"""
If the I{libc} object passed to L{initializeModule} has no
C{inotify_init} attribute, L{ImportError} is raised.
"""
class libc:
def inotify_add_watch(self):
pass
def inotify_rm_watch(self):
pass
self.assertRaises(ImportError, initializeModule, libc())
def test_missingAdd(self):
"""
If the I{libc} object passed to L{initializeModule} has no
C{inotify_add_watch} attribute, L{ImportError} is raised.
"""
class libc:
def inotify_init(self):
pass
def inotify_rm_watch(self):
pass
self.assertRaises(ImportError, initializeModule, libc())
def test_missingRemove(self):
"""
If the I{libc} object passed to L{initializeModule} has no
C{inotify_rm_watch} attribute, L{ImportError} is raised.
"""
class libc:
def inotify_init(self):
pass
def inotify_add_watch(self):
pass
self.assertRaises(ImportError, initializeModule, libc())
def test_setTypes(self):
"""
If the I{libc} object passed to L{initializeModule} has all of the
necessary attributes, it sets the C{argtypes} and C{restype} attributes
of the three ctypes methods used from libc.
"""
class libc:
def inotify_init(self):
pass
inotify_init = staticmethod(inotify_init)
def inotify_rm_watch(self):
pass
inotify_rm_watch = staticmethod(inotify_rm_watch)
def inotify_add_watch(self):
pass
inotify_add_watch = staticmethod(inotify_add_watch)
c = libc()
initializeModule(c)
self.assertEqual(c.inotify_init.argtypes, [])
self.assertEqual(c.inotify_init.restype, c_int)
self.assertEqual(c.inotify_rm_watch.argtypes, [c_int, c_int])
self.assertEqual(c.inotify_rm_watch.restype, c_int)
self.assertEqual(
c.inotify_add_watch.argtypes, [c_int, c_char_p, c_uint32])
self.assertEqual(c.inotify_add_watch.restype, c_int)
def test_failedInit(self):
"""
If C{inotify_init} returns a negative number, L{init} raises
L{INotifyError}.
"""
class libc:
def inotify_init(self):
return -1
self.patch(_inotify, 'libc', libc())
self.assertRaises(INotifyError, init)
def test_failedAddWatch(self):
"""
If C{inotify_add_watch} returns a negative number, L{add}
raises L{INotifyError}.
"""
class libc:
def inotify_add_watch(self, fd, path, mask):
return -1
self.patch(_inotify, 'libc', libc())
self.assertRaises(INotifyError, add, 3, FilePath('/foo'), 0)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,236 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.runtime}.
"""
from __future__ import division, absolute_import
import sys
from twisted.python.reflect import namedModule
from twisted.trial.util import suppress as SUPRESS
from twisted.trial.unittest import SynchronousTestCase
from twisted.python.runtime import Platform, shortPythonVersion
class PythonVersionTests(SynchronousTestCase):
"""
Tests the shortPythonVersion method.
"""
def test_shortPythonVersion(self):
"""
Verify if the Python version is returned correctly.
"""
ver = shortPythonVersion().split('.')
for i in range(3):
self.assertEqual(int(ver[i]), sys.version_info[i])
class PlatformTests(SynchronousTestCase):
"""
Tests for the default L{Platform} initializer.
"""
isWinNTDeprecationMessage = ('twisted.python.runtime.Platform.isWinNT was '
'deprecated in Twisted 13.0. Use Platform.isWindows instead.')
def test_isKnown(self):
"""
L{Platform.isKnown} returns a boolean indicating whether this is one of
the L{runtime.knownPlatforms}.
"""
platform = Platform()
self.assertTrue(platform.isKnown())
def test_isVistaConsistency(self):
"""
Verify consistency of L{Platform.isVista}: it can only be C{True} if
L{Platform.isWinNT} and L{Platform.isWindows} are C{True}.
"""
platform = Platform()
if platform.isVista():
self.assertTrue(platform.isWinNT())
self.assertTrue(platform.isWindows())
self.assertFalse(platform.isMacOSX())
def test_isMacOSXConsistency(self):
"""
L{Platform.isMacOSX} can only return C{True} if L{Platform.getType}
returns C{'posix'}.
"""
platform = Platform()
if platform.isMacOSX():
self.assertEqual(platform.getType(), 'posix')
def test_isLinuxConsistency(self):
"""
L{Platform.isLinux} can only return C{True} if L{Platform.getType}
returns C{'posix'} and L{sys.platform} starts with C{"linux"}.
"""
platform = Platform()
if platform.isLinux():
self.assertTrue(sys.platform.startswith("linux"))
def test_isWinNT(self):
"""
L{Platform.isWinNT} can return only C{False} or C{True} and can not
return C{True} if L{Platform.getType} is not C{"win32"}.
"""
platform = Platform()
isWinNT = platform.isWinNT()
self.assertIn(isWinNT, (False, True))
if platform.getType() != "win32":
self.assertFalse(isWinNT)
test_isWinNT.suppress = [SUPRESS(category=DeprecationWarning,
message=isWinNTDeprecationMessage)]
def test_isWinNTDeprecated(self):
"""
L{Platform.isWinNT} is deprecated in favor of L{platform.isWindows}.
"""
platform = Platform()
platform.isWinNT()
warnings = self.flushWarnings([self.test_isWinNTDeprecated])
self.assertEqual(len(warnings), 1)
self.assertEqual(
warnings[0]['message'], self.isWinNTDeprecationMessage)
def test_supportsThreads(self):
"""
L{Platform.supportsThreads} returns C{True} if threads can be created in
this runtime, C{False} otherwise.
"""
# It's difficult to test both cases of this without faking the threading
# module. Perhaps an adequate test is to just test the behavior with
# the current runtime, whatever that happens to be.
try:
namedModule('threading')
except ImportError:
self.assertFalse(Platform().supportsThreads())
else:
self.assertTrue(Platform().supportsThreads())
class ForeignPlatformTests(SynchronousTestCase):
"""
Tests for L{Platform} based overridden initializer values.
"""
def test_getType(self):
"""
If an operating system name is supplied to L{Platform}'s initializer,
L{Platform.getType} returns the platform type which corresponds to that
name.
"""
self.assertEqual(Platform('nt').getType(), 'win32')
self.assertEqual(Platform('ce').getType(), 'win32')
self.assertEqual(Platform('posix').getType(), 'posix')
self.assertEqual(Platform('java').getType(), 'java')
def test_isMacOSX(self):
"""
If a system platform name is supplied to L{Platform}'s initializer, it
is used to determine the result of L{Platform.isMacOSX}, which returns
C{True} for C{"darwin"}, C{False} otherwise.
"""
self.assertTrue(Platform(None, 'darwin').isMacOSX())
self.assertFalse(Platform(None, 'linux2').isMacOSX())
self.assertFalse(Platform(None, 'win32').isMacOSX())
def test_isLinux(self):
"""
If a system platform name is supplied to L{Platform}'s initializer, it
is used to determine the result of L{Platform.isLinux}, which returns
C{True} for values beginning with C{"linux"}, C{False} otherwise.
"""
self.assertFalse(Platform(None, 'darwin').isLinux())
self.assertTrue(Platform(None, 'linux').isLinux())
self.assertTrue(Platform(None, 'linux2').isLinux())
self.assertTrue(Platform(None, 'linux3').isLinux())
self.assertFalse(Platform(None, 'win32').isLinux())
class DockerPlatformTests(SynchronousTestCase):
"""
Tests for L{twisted.python.runtime.Platform.isDocker}.
"""
def test_noChecksOnLinux(self):
"""
If the platform is not Linux, C{isDocker()} always returns L{False}.
"""
platform = Platform(None, 'win32')
self.assertFalse(platform.isDocker())
def test_noCGroups(self):
"""
If the platform is Linux, and the cgroups file in C{/proc} does not
exist, C{isDocker()} returns L{False}
"""
platform = Platform(None, 'linux')
self.assertFalse(platform.isDocker(_initCGroupLocation="fakepath"))
def test_cgroupsSuggestsDocker(self):
"""
If the platform is Linux, and the cgroups file (faked out here) exists,
and one of the paths starts with C{/docker/}, C{isDocker()} returns
C{True}.
"""
cgroupsFile = self.mktemp()
with open(cgroupsFile, 'wb') as f:
# real cgroups file from inside a Debian 7 docker container
f.write(b"""10:debug:/
9:net_prio:/
8:perf_event:/docker/104155a6453cb67590027e397dc90fc25a06a7508403c797bc89ea43adf8d35f
7:net_cls:/
6:freezer:/docker/104155a6453cb67590027e397dc90fc25a06a7508403c797bc89ea43adf8d35f
5:devices:/docker/104155a6453cb67590027e397dc90fc25a06a7508403c797bc89ea43adf8d35f
4:blkio:/docker/104155a6453cb67590027e397dc90fc25a06a7508403c797bc89ea43adf8d35f
3:cpuacct:/docker/104155a6453cb67590027e397dc90fc25a06a7508403c797bc89ea43adf8d35f
2:cpu:/docker/104155a6453cb67590027e397dc90fc25a06a7508403c797bc89ea43adf8d35f
1:cpuset:/docker/104155a6453cb67590027e397dc90fc25a06a7508403c797bc89ea43adf8d35f""")
platform = Platform(None, 'linux')
self.assertTrue(platform.isDocker(_initCGroupLocation=cgroupsFile))
def test_cgroupsSuggestsRealSystem(self):
"""
If the platform is Linux, and the cgroups file (faked out here) exists,
and none of the paths starts with C{/docker/}, C{isDocker()} returns
C{False}.
"""
cgroupsFile = self.mktemp()
with open(cgroupsFile, 'wb') as f:
# real cgroups file from a Fedora 17 system
f.write(b"""9:perf_event:/
8:blkio:/
7:net_cls:/
6:freezer:/
5:devices:/
4:memory:/
3:cpuacct,cpu:/
2:cpuset:/
1:name=systemd:/system""")
platform = Platform(None, 'linux')
self.assertFalse(platform.isDocker(_initCGroupLocation=cgroupsFile))

View file

@ -0,0 +1,793 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.sendmsg}.
"""
import sys
import errno
import warnings
from os import devnull, pipe, read, close, pathsep
from struct import pack
from socket import SOL_SOCKET, AF_INET, AF_INET6, socket, error
try:
from socket import AF_UNIX, socketpair
except ImportError:
nonUNIXSkip = "Platform does not support AF_UNIX sockets"
else:
nonUNIXSkip = None
from twisted.internet import reactor
from twisted.internet.defer import Deferred, inlineCallbacks
from twisted.internet.error import ProcessDone
from twisted.internet.protocol import ProcessProtocol
from twisted.python.compat import _PY3, intToBytes, bytesEnviron
from twisted.python.filepath import FilePath
from twisted.python.runtime import platform
from twisted.trial.unittest import TestCase
if platform.isLinux():
from socket import MSG_DONTWAIT
dontWaitSkip = None
else:
# It would be nice to be able to test flags on more platforms, but finding
# a flag that works *at all* is somewhat challenging.
dontWaitSkip = "MSG_DONTWAIT is only known to work as intended on Linux"
try:
from twisted.python.sendmsg import sendmsg, recvmsg
from twisted.python.sendmsg import SCM_RIGHTS, getSocketFamily
except ImportError:
importSkip = "Platform doesn't support sendmsg."
else:
importSkip = None
try:
from twisted.python.sendmsg import send1msg, recv1msg
from twisted.python.sendmsg import getsockfam
except ImportError:
CModuleImportSkip = "Cannot import twisted.python.sendmsg"
else:
CModuleImportSkip = None
class _FDHolder(object):
"""
A wrapper around a FD that will remember if it has been closed or not.
"""
def __init__(self, fd):
self._fd = fd
def fileno(self):
"""
Return the fileno of this FD.
"""
return self._fd
def close(self):
"""
Close the FD. If it's already been closed, do nothing.
"""
if self._fd:
close(self._fd)
self._fd = None
def __del__(self):
"""
If C{self._fd} is unclosed, raise a warning.
"""
if self._fd:
if not _PY3:
ResourceWarning = Warning
warnings.warn("FD %s was not closed!" % (self._fd,),
ResourceWarning)
self.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def _makePipe():
"""
Create a pipe, and return the two FDs wrapped in L{_FDHolders}.
"""
r, w = pipe()
return (_FDHolder(r), _FDHolder(w))
class ExitedWithStderr(Exception):
"""
A process exited with some stderr.
"""
def __str__(self):
"""
Dump the errors in a pretty way in the event of a subprocess traceback.
"""
result = b'\n'.join([b''] + list(self.args))
if _PY3:
result = repr(result)
return result
class StartStopProcessProtocol(ProcessProtocol):
"""
An L{IProcessProtocol} with a Deferred for events where the subprocess
starts and stops.
@ivar started: A L{Deferred} which fires with this protocol's
L{IProcessTransport} provider when it is connected to one.
@ivar stopped: A L{Deferred} which fires with the process output or a
failure if the process produces output on standard error.
@ivar output: A C{str} used to accumulate standard output.
@ivar errors: A C{str} used to accumulate standard error.
"""
def __init__(self):
self.started = Deferred()
self.stopped = Deferred()
self.output = b''
self.errors = b''
def connectionMade(self):
self.started.callback(self.transport)
def outReceived(self, data):
self.output += data
def errReceived(self, data):
self.errors += data
def processEnded(self, reason):
if reason.check(ProcessDone):
self.stopped.callback(self.output)
else:
self.stopped.errback(ExitedWithStderr(
self.errors, self.output))
def _spawn(script, outputFD):
"""
Start a script that is a peer of this test as a subprocess.
@param script: the module name of the script in this directory (no
package prefix, no '.py')
@type script: C{str}
@rtype: L{StartStopProcessProtocol}
"""
pyExe = FilePath(sys.executable).asBytesMode().path
env = bytesEnviron()
env[b"PYTHONPATH"] = FilePath(
pathsep.join(sys.path)).asBytesMode().path
sspp = StartStopProcessProtocol()
reactor.spawnProcess(
sspp, pyExe, [
pyExe,
FilePath(__file__).sibling(script + ".py").asBytesMode().path,
intToBytes(outputFD),
],
env=env,
childFDs={0: "w", 1: "r", 2: "r", outputFD: outputFD}
)
return sspp
class BadList(list):
"""
A list which cannot be iterated sometimes.
This is a C{list} subclass to get past the type check in L{send1msg}, not
as an example of how real programs might want to interact with L{send1msg}
(or anything else). A custom C{list} subclass makes it easier to trigger
certain error cases in the implementation.
@ivar iterate: A flag which indicates whether an instance of L{BadList}
will allow iteration over itself or not. If C{False}, an attempt to
iterate over the instance will raise an exception.
"""
iterate = True
def __iter__(self):
"""
Allow normal list iteration, or raise an exception.
If C{self.iterate} is C{True}, it will be flipped to C{False} and then
normal iteration will proceed. If C{self.iterate} is C{False},
L{RuntimeError} is raised instead.
"""
if self.iterate:
self.iterate = False
return super(BadList, self).__iter__()
raise RuntimeError("Something bad happened")
class WorseList(list):
"""
A list which at first gives the appearance of being iterable, but then
raises an exception.
See L{BadList} for a warning about not writing code like this.
"""
def __iter__(self):
"""
Return an iterator which will raise an exception as soon as C{next} is
called on it.
"""
class BadIterator(object):
def next(self):
raise RuntimeError("This is a really bad case.")
return BadIterator()
class CModuleSendmsgTests(TestCase):
"""
Tests for sendmsg extension module and associated file-descriptor sending
functionality.
"""
if nonUNIXSkip is not None:
skip = nonUNIXSkip
elif CModuleImportSkip is not None:
skip = CModuleImportSkip
def setUp(self):
"""
Create a pair of UNIX sockets.
"""
self.input, self.output = socketpair(AF_UNIX)
def tearDown(self):
"""
Close the sockets opened by setUp.
"""
self.input.close()
self.output.close()
def test_sendmsgBadArguments(self):
"""
The argument types accepted by L{send1msg} are:
1. C{int}
2. read-only character buffer
3. C{int}
4. sequence
The 3rd and 4th arguments are optional. If fewer than two arguments or
more than four arguments are passed, or if any of the arguments passed
are not compatible with these types, L{TypeError} is raised.
"""
# Exercise the wrong number of arguments cases
self.assertRaises(TypeError, send1msg)
self.assertRaises(TypeError, send1msg, 1)
self.assertRaises(TypeError, send1msg,
1, "hello world", 2, [], object())
# Exercise the wrong type of arguments cases
self.assertRaises(TypeError, send1msg, object(), "hello world", 2, [])
self.assertRaises(TypeError, send1msg, 1, object(), 2, [])
self.assertRaises(TypeError, send1msg, 1, "hello world", object(), [])
self.assertRaises(TypeError, send1msg, 1, "hello world", 2, object())
def test_badAncillaryIter(self):
"""
If iteration over the ancillary data list fails (at the point of the
C{__iter__} call), the exception with which it fails is propagated to
the caller of L{send1msg}.
"""
badList = BadList()
badList.append((1, 2, "hello world"))
badList.iterate = False
self.assertRaises(RuntimeError, send1msg, 1, "hello world", 2, badList)
# Hit the second iteration
badList.iterate = True
self.assertRaises(RuntimeError, send1msg, 1, "hello world", 2, badList)
def test_badAncillaryNext(self):
"""
If iteration over the ancillary data list fails (at the point of a
C{next} call), the exception with which it fails is propagated to the
caller of L{send1msg}.
"""
worseList = WorseList()
self.assertRaises(RuntimeError, send1msg,
1, "hello world", 2,worseList)
def test_sendmsgBadAncillaryItem(self):
"""
The ancillary data list contains three-tuples with element types of:
1. C{int}
2. C{int}
3. read-only character buffer
If a tuple in the ancillary data list does not elements of these types,
L{TypeError} is raised.
"""
# Exercise the wrong number of arguments cases
self.assertRaises(TypeError, send1msg, 1, "hello world", 2, [()])
self.assertRaises(TypeError, send1msg, 1, "hello world", 2, [(1,)])
self.assertRaises(TypeError, send1msg, 1, "hello world", 2, [(1, 2)])
self.assertRaises(
TypeError,
send1msg, 1, "hello world", 2, [(1, 2, "goodbye", object())])
# Exercise the wrong type of arguments cases
exc = self.assertRaises(
TypeError, send1msg, 1, "hello world", 2, [object()])
self.assertEqual(
"send1msg argument 3 expected list of tuple, "
"got list containing object",
str(exc))
self.assertRaises(
TypeError,
send1msg, 1, "hello world", 2, [(object(), 1, "goodbye")])
self.assertRaises(
TypeError,
send1msg, 1, "hello world", 2, [(1, object(), "goodbye")])
self.assertRaises(
TypeError,
send1msg, 1, "hello world", 2, [(1, 1, object())])
def test_syscallError(self):
"""
If the underlying C{sendmsg} call fails, L{send1msg} raises
L{socket.error} with its errno set to the underlying errno value.
"""
with open(devnull) as probe:
fd = probe.fileno()
exc = self.assertRaises(error, send1msg, fd, "hello, world")
self.assertEqual(exc.args[0], errno.EBADF)
def test_syscallErrorWithControlMessage(self):
"""
The behavior when the underlying C{sendmsg} call fails is the same
whether L{send1msg} is passed ancillary data or not.
"""
with open(devnull) as probe:
fd = probe.fileno()
exc = self.assertRaises(
error, send1msg, fd, "hello, world", 0, [(0, 0, "0123")])
self.assertEqual(exc.args[0], errno.EBADF)
def test_roundtrip(self):
"""
L{recv1msg} will retrieve a message sent via L{send1msg}.
"""
message = "hello, world!"
self.assertEqual(
len(message),
send1msg(self.input.fileno(), message, 0))
result = recv1msg(fd=self.output.fileno())
self.assertEqual(result, (message, 0, []))
def test_roundtripEmptyAncillary(self):
"""
L{send1msg} treats an empty ancillary data list the same way it treats
receiving no argument for the ancillary parameter at all.
"""
send1msg(self.input.fileno(), "hello, world!", 0, [])
result = recv1msg(fd=self.output.fileno())
self.assertEqual(result, ("hello, world!", 0, []))
def test_flags(self):
"""
The C{flags} argument to L{send1msg} is passed on to the underlying
C{sendmsg} call, to affect it in whatever way is defined by those
flags.
"""
# Just exercise one flag with simple, well-known behavior. MSG_DONTWAIT
# makes the send a non-blocking call, even if the socket is in blocking
# mode. See also test_flags in RecvmsgTests
for i in range(8 * 1024):
try:
send1msg(self.input.fileno(), "x" * 1024, MSG_DONTWAIT)
except error as e:
self.assertEqual(e.args[0], errno.EAGAIN)
break
else:
self.fail(
"Failed to fill up the send buffer, "
"or maybe send1msg blocked for a while")
if dontWaitSkip is not None:
test_flags.skip = dontWaitSkip
def test_wrongTypeAncillary(self):
"""
L{send1msg} will show a helpful exception message when given the wrong
type of object for the 'ancillary' argument.
"""
error = self.assertRaises(TypeError,
send1msg, self.input.fileno(),
"hello, world!", 0, 4321)
self.assertEqual(str(error),
"send1msg argument 3 expected list, got int")
@inlineCallbacks
def test_sendSubProcessFD(self):
"""
Calling L{sendsmsg} with SOL_SOCKET, SCM_RIGHTS, and a platform-endian
packed file descriptor number should send that file descriptor to a
different process, where it can be retrieved by using L{recv1msg}.
"""
sspp = _spawn("cmodulepullpipe", self.output.fileno())
yield sspp.started
pipeOut, pipeIn = _makePipe()
self.addCleanup(pipeOut.close)
self.addCleanup(pipeIn.close)
with pipeIn:
send1msg(
self.input.fileno(), "blonk", 0,
[(SOL_SOCKET, SCM_RIGHTS, pack("i", pipeIn.fileno()))])
yield sspp.stopped
self.assertEqual(read(pipeOut.fileno(), 1024),
"Test fixture data: blonk.\n")
# Make sure that the pipe is actually closed now.
self.assertEqual(read(pipeOut.fileno(), 1024), "")
def test_sendmsgTwoAncillaryDoesNotSegfault(self):
"""
L{sendmsg} with two FDs in two separate ancillary entries
does not segfault.
"""
ancillary = [
(SOL_SOCKET, SCM_RIGHTS, pack("i", self.input.fileno())),
(SOL_SOCKET, SCM_RIGHTS, pack("i", self.output.fileno())),
]
try:
send1msg(self.input.fileno(), b"some data", 0, ancillary)
except error:
# Ok as long as it doesn't segfault.
pass
class CModuleRecvmsgTests(TestCase):
"""
Tests for L{recv1msg} (primarily error handling cases).
"""
if CModuleImportSkip is not None:
skip = CModuleImportSkip
def test_badArguments(self):
"""
The argument types accepted by L{recv1msg} are:
1. C{int}
2. C{int}
3. C{int}
4. C{int}
The 2nd, 3rd, and 4th arguments are optional. If fewer than one
argument or more than four arguments are passed, or if any of the
arguments passed are not compatible with these types, L{TypeError} is
raised.
"""
# Exercise the wrong number of arguments cases
self.assertRaises(TypeError, recv1msg)
self.assertRaises(TypeError, recv1msg, 1, 2, 3, 4, object())
# Exercise the wrong type of arguments cases
self.assertRaises(TypeError, recv1msg, object(), 2, 3, 4)
self.assertRaises(TypeError, recv1msg, 1, object(), 3, 4)
self.assertRaises(TypeError, recv1msg, 1, 2, object(), 4)
self.assertRaises(TypeError, recv1msg, 1, 2, 3, object())
def test_cmsgSpaceOverflow(self):
"""
L{recv1msg} raises L{OverflowError} if passed a value for the
C{cmsg_size} argument which exceeds C{SOCKLEN_MAX}.
"""
self.assertRaises(OverflowError, recv1msg, 0, 0, 0, 0x7FFFFFFF)
def test_syscallError(self):
"""
If the underlying C{recvmsg} call fails, L{recv1msg} raises
L{socket.error} with its errno set to the underlying errno value.
"""
with open(devnull) as probe:
fd = probe.fileno()
exc = self.assertRaises(error, recv1msg, fd)
self.assertEqual(exc.args[0], errno.EBADF)
def test_flags(self):
"""
The C{flags} argument to L{recv1msg} is passed on to the underlying
C{recvmsg} call, to affect it in whatever way is defined by those
flags.
"""
# See test_flags in SendmsgTests
reader, writer = socketpair(AF_UNIX)
exc = self.assertRaises(
error, recv1msg, reader.fileno(), MSG_DONTWAIT)
self.assertEqual(exc.args[0], errno.EAGAIN)
if dontWaitSkip is not None:
test_flags.skip = dontWaitSkip
class CModuleGetSocketFamilyTests(TestCase):
"""
Tests for L{getsockfam}, a helper which reveals the address family of an
arbitrary socket.
"""
if CModuleImportSkip is not None:
skip = CModuleImportSkip
def _socket(self, addressFamily):
"""
Create a new socket using the given address family and return that
socket's file descriptor. The socket will automatically be closed when
the test is torn down.
"""
s = socket(addressFamily)
self.addCleanup(s.close)
return s.fileno()
def test_badArguments(self):
"""
L{getsockfam} accepts a single C{int} argument. If it is called in
some other way, L{TypeError} is raised.
"""
self.assertRaises(TypeError, getsockfam)
self.assertRaises(TypeError, getsockfam, 1, 2)
self.assertRaises(TypeError, getsockfam, object())
def test_syscallError(self):
"""
If the underlying C{getsockname} call fails, L{getsockfam} raises
L{socket.error} with its errno set to the underlying errno value.
"""
with open(devnull) as probe:
fd = probe.fileno()
exc = self.assertRaises(error, getsockfam, fd)
self.assertEqual(errno.EBADF, exc.args[0])
def test_inet(self):
"""
When passed the file descriptor of a socket created with the C{AF_INET}
address family, L{getsockfam} returns C{AF_INET}.
"""
self.assertEqual(AF_INET, getsockfam(self._socket(AF_INET)))
def test_inet6(self):
"""
When passed the file descriptor of a socket created with the
C{AF_INET6} address family, L{getsockfam} returns C{AF_INET6}.
"""
self.assertEqual(AF_INET6, getsockfam(self._socket(AF_INET6)))
def test_unix(self):
"""
When passed the file descriptor of a socket created with the C{AF_UNIX}
address family, L{getsockfam} returns C{AF_UNIX}.
"""
self.assertEqual(AF_UNIX, getsockfam(self._socket(AF_UNIX)))
if nonUNIXSkip is not None:
test_unix.skip = nonUNIXSkip
class SendmsgTests(TestCase):
"""
Tests for the Python2/3 compatible L{sendmsg} interface.
"""
if importSkip is not None:
skip = importSkip
def setUp(self):
"""
Create a pair of UNIX sockets.
"""
self.input, self.output = socketpair(AF_UNIX)
def tearDown(self):
"""
Close the sockets opened by setUp.
"""
self.input.close()
self.output.close()
def test_syscallError(self):
"""
If the underlying C{sendmsg} call fails, L{send1msg} raises
L{socket.error} with its errno set to the underlying errno value.
"""
self.input.close()
exc = self.assertRaises(error, sendmsg, self.input, b"hello, world")
self.assertEqual(exc.args[0], errno.EBADF)
def test_syscallErrorWithControlMessage(self):
"""
The behavior when the underlying C{sendmsg} call fails is the same
whether L{sendmsg} is passed ancillary data or not.
"""
self.input.close()
exc = self.assertRaises(
error, sendmsg, self.input, b"hello, world", [(0, 0, b"0123")], 0)
self.assertEqual(exc.args[0], errno.EBADF)
def test_roundtrip(self):
"""
L{recvmsg} will retrieve a message sent via L{sendmsg}.
"""
message = b"hello, world!"
self.assertEqual(
len(message),
sendmsg(self.input, message))
result = recvmsg(self.output)
self.assertEqual(result.data, b"hello, world!")
self.assertEqual(result.flags, 0)
self.assertEqual(result.ancillary, [])
def test_shortsend(self):
"""
L{sendmsg} returns the number of bytes which it was able to send.
"""
message = b"x" * 1024 * 1024 * 16
self.input.setblocking(False)
sent = sendmsg(self.input, message)
# Sanity check - make sure the amount of data we sent was less than the
# message, but not the whole message, as we should have filled the send
# buffer. This won't work if the send buffer is large enough for
# message, though.
self.assertTrue(sent < len(message))
received = recvmsg(self.output, len(message))
self.assertEqual(len(received[0]), sent)
def test_roundtripEmptyAncillary(self):
"""
L{sendmsg} treats an empty ancillary data list the same way it treats
receiving no argument for the ancillary parameter at all.
"""
sendmsg(self.input, b"hello, world!", [], 0)
result = recvmsg(self.output)
self.assertEqual(result, (b"hello, world!", [], 0))
def test_flags(self):
"""
The C{flags} argument to L{sendmsg} is passed on to the underlying
C{sendmsg} call, to affect it in whatever way is defined by those
flags.
"""
# Just exercise one flag with simple, well-known behavior. MSG_DONTWAIT
# makes the send a non-blocking call, even if the socket is in blocking
# mode. See also test_flags in RecvmsgTests
for i in range(8 * 1024):
try:
sendmsg(self.input, b"x" * 1024, flags=MSG_DONTWAIT)
except error as e:
self.assertEqual(e.args[0], errno.EAGAIN)
break
else:
self.fail(
"Failed to fill up the send buffer, "
"or maybe send1msg blocked for a while")
if dontWaitSkip is not None:
test_flags.skip = dontWaitSkip
@inlineCallbacks
def test_sendSubProcessFD(self):
"""
Calling L{sendmsg} with SOL_SOCKET, SCM_RIGHTS, and a platform-endian
packed file descriptor number should send that file descriptor to a
different process, where it can be retrieved by using L{recv1msg}.
"""
sspp = _spawn("pullpipe", self.output.fileno())
yield sspp.started
pipeOut, pipeIn = _makePipe()
self.addCleanup(pipeOut.close)
self.addCleanup(pipeIn.close)
with pipeIn:
sendmsg(
self.input, b"blonk",
[(SOL_SOCKET, SCM_RIGHTS, pack("i", pipeIn.fileno()))])
yield sspp.stopped
self.assertEqual(read(pipeOut.fileno(), 1024),
b"Test fixture data: blonk.\n")
# Make sure that the pipe is actually closed now.
self.assertEqual(read(pipeOut.fileno(), 1024), b"")
class GetSocketFamilyTests(TestCase):
"""
Tests for L{getSocketFamily}.
"""
if importSkip is not None:
skip = importSkip
def _socket(self, addressFamily):
"""
Create a new socket using the given address family and return that
socket's file descriptor. The socket will automatically be closed when
the test is torn down.
"""
s = socket(addressFamily)
self.addCleanup(s.close)
return s
def test_inet(self):
"""
When passed the file descriptor of a socket created with the C{AF_INET}
address family, L{getSocketFamily} returns C{AF_INET}.
"""
self.assertEqual(AF_INET, getSocketFamily(self._socket(AF_INET)))
def test_inet6(self):
"""
When passed the file descriptor of a socket created with the
C{AF_INET6} address family, L{getSocketFamily} returns C{AF_INET6}.
"""
self.assertEqual(AF_INET6, getSocketFamily(self._socket(AF_INET6)))
def test_unix(self):
"""
When passed the file descriptor of a socket created with the C{AF_UNIX}
address family, L{getSocketFamily} returns C{AF_UNIX}.
"""
self.assertEqual(AF_UNIX, getSocketFamily(self._socket(AF_UNIX)))
if nonUNIXSkip is not None:
test_unix.skip = nonUNIXSkip

View file

@ -0,0 +1,401 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for parts of our release automation system.
"""
import os
from pkg_resources import parse_requirements
from setuptools.dist import Distribution
import twisted
from twisted.trial.unittest import SynchronousTestCase
from twisted.python import _setup, filepath
from twisted.python.compat import _PY3
from twisted.python._setup import (
BuildPy3,
getSetupArgs,
_longDescriptionArgsFromReadme,
ConditionalExtension,
_EXTRAS_REQUIRE,
)
class SetupTests(SynchronousTestCase):
"""
Tests for L{getSetupArgs}.
"""
def test_conditionalExtensions(self):
"""
Will return the arguments with a custom build_ext which knows how to
check whether they should be built.
"""
good_ext = ConditionalExtension("whatever", ["whatever.c"],
condition=lambda b: True)
bad_ext = ConditionalExtension("whatever", ["whatever.c"],
condition=lambda b: False)
args = getSetupArgs(extensions=[good_ext, bad_ext], readme=None)
# ext_modules should be set even though it's not used. See comment
# in getSetupArgs
self.assertEqual(args["ext_modules"], [good_ext, bad_ext])
cmdclass = args["cmdclass"]
build_ext = cmdclass["build_ext"]
builder = build_ext(Distribution())
builder.prepare_extensions()
self.assertEqual(builder.extensions, [good_ext])
def test_win32Definition(self):
"""
When building on Windows NT, the WIN32 macro will be defined as 1 on
the extensions.
"""
ext = ConditionalExtension("whatever", ["whatever.c"],
define_macros=[("whatever", 2)])
args = getSetupArgs(extensions=[ext], readme=None)
builder = args["cmdclass"]["build_ext"](Distribution())
self.patch(os, "name", "nt")
builder.prepare_extensions()
self.assertEqual(ext.define_macros, [("whatever", 2), ("WIN32", 1)])
class OptionalDependenciesTests(SynchronousTestCase):
"""
Tests for L{_EXTRAS_REQUIRE}
"""
def test_distributeTakesExtrasRequire(self):
"""
Setuptools' Distribution object parses and stores its C{extras_require}
argument as an attribute.
Requirements for install_requires/setup_requires can specified as:
* a single requirement as a string, such as:
{'im_an_extra_dependency': 'thing'}
* a series of requirements as a list, such as:
{'im_an_extra_dependency': ['thing']}
* a series of requirements as a multi-line string, such as:
{'im_an_extra_dependency': '''
thing
'''}
The extras need to be parsed with pkg_resources.parse_requirements(),
which returns a generator.
"""
extras = dict(im_an_extra_dependency="thing")
attrs = dict(extras_require=extras)
distribution = Distribution(attrs)
def canonicalizeExtras(myExtras):
parsedExtras = {}
for name, val in myExtras.items():
parsedExtras[name] = list(parse_requirements(val))
return parsedExtras
self.assertEqual(
canonicalizeExtras(extras),
canonicalizeExtras(distribution.extras_require)
)
def test_extrasRequireDictContainsKeys(self):
"""
L{_EXTRAS_REQUIRE} contains options for all documented extras: C{dev},
C{tls}, C{conch}, C{soap}, C{serial}, C{all_non_platform},
C{macos_platform}, and C{windows_platform}.
"""
self.assertIn('dev', _EXTRAS_REQUIRE)
self.assertIn('tls', _EXTRAS_REQUIRE)
self.assertIn('conch', _EXTRAS_REQUIRE)
self.assertIn('soap', _EXTRAS_REQUIRE)
self.assertIn('serial', _EXTRAS_REQUIRE)
self.assertIn('all_non_platform', _EXTRAS_REQUIRE)
self.assertIn('macos_platform', _EXTRAS_REQUIRE)
self.assertIn('osx_platform', _EXTRAS_REQUIRE) # Compat for macOS
self.assertIn('windows_platform', _EXTRAS_REQUIRE)
self.assertIn('http2', _EXTRAS_REQUIRE)
def test_extrasRequiresDevDeps(self):
"""
L{_EXTRAS_REQUIRE}'s C{dev} extra contains setuptools requirements for
the tools required for Twisted development.
"""
deps = _EXTRAS_REQUIRE['dev']
self.assertIn('pyflakes >= 1.0.0', deps)
self.assertIn('twisted-dev-tools >= 0.0.2', deps)
self.assertIn('python-subunit', deps)
self.assertIn('sphinx >= 1.3.1', deps)
if not _PY3:
self.assertIn('twistedchecker >= 0.4.0', deps)
self.assertIn('pydoctor >= 16.2.0', deps)
def test_extrasRequiresTlsDeps(self):
"""
L{_EXTRAS_REQUIRE}'s C{tls} extra contains setuptools requirements for
the packages required to make Twisted's transport layer security fully
work for both clients and servers.
"""
deps = _EXTRAS_REQUIRE['tls']
self.assertIn('pyopenssl >= 16.0.0', deps)
self.assertIn('service_identity >= 18.1.0', deps)
self.assertIn('idna >= 0.6, != 2.3', deps)
def test_extrasRequiresConchDeps(self):
"""
L{_EXTRAS_REQUIRE}'s C{conch} extra contains setuptools requirements
for the packages required to make Twisted Conch's secure shell server
work.
"""
deps = _EXTRAS_REQUIRE['conch']
self.assertIn('pyasn1', deps)
self.assertIn('cryptography >= 2.5', deps)
self.assertIn('appdirs >= 1.4.0', deps)
def test_extrasRequiresSoapDeps(self):
"""
L{_EXTRAS_REQUIRE}' C{soap} extra contains setuptools requirements for
the packages required to make the C{twisted.web.soap} module function.
"""
self.assertIn(
'soappy',
_EXTRAS_REQUIRE['soap']
)
def test_extrasRequiresSerialDeps(self):
"""
L{_EXTRAS_REQUIRE}'s C{serial} extra contains setuptools requirements
for the packages required to make Twisted's serial support work.
"""
self.assertIn(
'pyserial >= 3.0',
_EXTRAS_REQUIRE['serial']
)
def test_extrasRequiresHttp2Deps(self):
"""
L{_EXTRAS_REQUIRE}'s C{http2} extra contains setuptools requirements
for the packages required to make Twisted HTTP/2 support work.
"""
deps = _EXTRAS_REQUIRE['http2']
self.assertIn('h2 >= 3.0, < 4.0', deps)
self.assertIn('priority >= 1.1.0, < 2.0', deps)
def test_extrasRequiresAllNonPlatformDeps(self):
"""
L{_EXTRAS_REQUIRE}'s C{all_non_platform} extra contains setuptools
requirements for all of Twisted's optional dependencies which work on
all supported operating systems.
"""
deps = _EXTRAS_REQUIRE['all_non_platform']
self.assertIn('pyopenssl >= 16.0.0', deps)
self.assertIn('service_identity >= 18.1.0', deps)
self.assertIn('idna >= 0.6, != 2.3', deps)
self.assertIn('pyasn1', deps)
self.assertIn('cryptography >= 2.5', deps)
self.assertIn('soappy', deps)
self.assertIn('pyserial >= 3.0', deps)
self.assertIn('appdirs >= 1.4.0', deps)
self.assertIn('h2 >= 3.0, < 4.0', deps)
self.assertIn('priority >= 1.1.0, < 2.0', deps)
def test_extrasRequiresMacosPlatformDeps(self):
"""
L{_EXTRAS_REQUIRE}'s C{macos_platform} extra contains setuptools
requirements for all of Twisted's optional dependencies usable on the
macOS platform.
"""
deps = _EXTRAS_REQUIRE['macos_platform']
self.assertIn('pyopenssl >= 16.0.0', deps)
self.assertIn('service_identity >= 18.1.0', deps)
self.assertIn('idna >= 0.6, != 2.3', deps)
self.assertIn('pyasn1', deps)
self.assertIn('cryptography >= 2.5', deps)
self.assertIn('soappy', deps)
self.assertIn('pyserial >= 3.0', deps)
self.assertIn('h2 >= 3.0, < 4.0', deps)
self.assertIn('priority >= 1.1.0, < 2.0', deps)
self.assertIn('pyobjc-core', deps)
def test_extrasRequireMacOSXPlatformDeps(self):
"""
L{_EXTRAS_REQUIRE}'s C{osx_platform} is an alias to C{macos_platform}.
"""
self.assertEqual(_EXTRAS_REQUIRE['macos_platform'],
_EXTRAS_REQUIRE['osx_platform'])
def test_extrasRequiresWindowsPlatformDeps(self):
"""
L{_EXTRAS_REQUIRE}'s C{windows_platform} extra contains setuptools
requirements for all of Twisted's optional dependencies usable on the
Microsoft Windows platform.
"""
deps = _EXTRAS_REQUIRE['windows_platform']
self.assertIn('pyopenssl >= 16.0.0', deps)
self.assertIn('service_identity >= 18.1.0', deps)
self.assertIn('idna >= 0.6, != 2.3', deps)
self.assertIn('pyasn1', deps)
self.assertIn('cryptography >= 2.5', deps)
self.assertIn('soappy', deps)
self.assertIn('pyserial >= 3.0', deps)
self.assertIn('h2 >= 3.0, < 4.0', deps)
self.assertIn('priority >= 1.1.0, < 2.0', deps)
self.assertIn('pywin32 != 226', deps)
class FakeModule(object):
"""
A fake module, suitable for dependency injection in testing.
"""
def __init__(self, attrs):
"""
Initializes a fake module.
@param attrs: The attrs that will be accessible on the module.
@type attrs: C{dict} of C{str} (Python names) to objects
"""
self._attrs = attrs
def __getattr__(self, name):
"""
Gets an attribute of this fake module from its attrs.
@raise AttributeError: When the requested attribute is missing.
"""
try:
return self._attrs[name]
except KeyError:
raise AttributeError()
fakeCPythonPlatform = FakeModule({"python_implementation": lambda: "CPython"})
fakeOtherPlatform = FakeModule({"python_implementation": lambda: "lvhpy"})
class WithPlatformTests(SynchronousTestCase):
"""
Tests for L{_checkCPython} when used with a (fake) C{platform} module.
"""
def test_cpython(self):
"""
L{_checkCPython} returns C{True} when C{platform.python_implementation}
says we're running on CPython.
"""
self.assertTrue(_setup._checkCPython(platform=fakeCPythonPlatform))
def test_other(self):
"""
L{_checkCPython} returns C{False} when C{platform.python_implementation}
says we're not running on CPython.
"""
self.assertFalse(_setup._checkCPython(platform=fakeOtherPlatform))
class BuildPy3Tests(SynchronousTestCase):
"""
Tests for L{BuildPy3}.
"""
maxDiff = None
if not _PY3:
skip = "BuildPy3 setuptools command used with Python 3 only."
def test_find_package_modules(self):
"""
Will filter the found modules excluding the modules listed in
L{twisted.python.dist3}.
"""
distribution = Distribution()
distribution.script_name = 'setup.py'
distribution.script_args = 'build_py'
builder = BuildPy3(distribution)
# Rig the dist3 data so that we can reduce the scope of this test and
# reduce the risk of getting false failures, while doing a minimum
# level of patching.
self.patch(
_setup,
'notPortedModules',
[
"twisted.spread.test.test_pbfailure",
],
)
twistedPackageDir = filepath.FilePath(twisted.__file__).parent()
packageDir = twistedPackageDir.child("spread").child("test")
result = builder.find_package_modules('twisted.spread.test',
packageDir.path)
self.assertEqual(sorted([
('twisted.spread.test', '__init__',
packageDir.child('__init__.py').path),
('twisted.spread.test', 'test_banana',
packageDir.child('test_banana.py').path),
('twisted.spread.test', 'test_jelly',
packageDir.child('test_jelly.py').path),
('twisted.spread.test', 'test_pb',
packageDir.child('test_pb.py').path),
]),
sorted(result),
)
class LongDescriptionTests(SynchronousTestCase):
"""
Tests for C{_getLongDescriptionArgs()}
Note that the validity of the reStructuredText syntax is tested separately
using L{twine check} in L{tox.ini}.
"""
def test_generate(self):
"""
L{_longDescriptionArgsFromReadme()} outputs a L{long_description} in
reStructuredText format. Local links are transformed into absolute ones
that point at the Twisted GitHub repository.
"""
path = self.mktemp()
with open(path, 'w') as f:
f.write('\n'.join([
'Twisted',
'=======',
'',
'Changes: `NEWS <NEWS.rst>`_.',
"Read `the docs <https://twistedmatrix.com/documents/>`_.\n",
]))
self.assertEqual({
'long_description': '''\
Twisted
=======
Changes: `NEWS <https://github.com/twisted/twisted/blob/trunk/NEWS.rst>`_.
Read `the docs <https://twistedmatrix.com/documents/>`_.
''',
'long_description_content_type': 'text/x-rst',
}, _longDescriptionArgsFromReadme(path))

View file

@ -0,0 +1,625 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for twisted.python._shellcomp
"""
from __future__ import division, absolute_import
import sys
from io import BytesIO
from twisted.trial import unittest
from twisted.python import _shellcomp, usage, reflect
from twisted.python.usage import Completions, Completer, CompleteFiles
from twisted.python.usage import CompleteList
class ZshScriptTestMeta(type):
"""
Metaclass of ZshScriptTestMixin.
"""
def __new__(cls, name, bases, attrs):
def makeTest(cmdName, optionsFQPN):
def runTest(self):
return test_genZshFunction(self, cmdName, optionsFQPN)
return runTest
# add test_ methods to the class for each script
# we are testing.
if 'generateFor' in attrs:
for cmdName, optionsFQPN in attrs['generateFor']:
test = makeTest(cmdName, optionsFQPN)
attrs['test_genZshFunction_' + cmdName] = test
return type.__new__(cls, name, bases, attrs)
class ZshScriptTestMixin(object):
"""
Integration test helper to show that C{usage.Options} classes can have zsh
completion functions generated for them without raising errors.
In your subclasses set a class variable like so:
# | cmd name | Fully Qualified Python Name of Options class |
#
generateFor = [('conch', 'twisted.conch.scripts.conch.ClientOptions'),
('twistd', 'twisted.scripts.twistd.ServerOptions'),
]
Each package that contains Twisted scripts should contain one TestCase
subclass which also inherits from this mixin, and contains a C{generateFor}
list appropriate for the scripts in that package.
"""
__metaclass__ = ZshScriptTestMeta
def test_genZshFunction(self, cmdName, optionsFQPN):
"""
Generate completion functions for given twisted command - no errors
should be raised
@type cmdName: C{str}
@param cmdName: The name of the command-line utility e.g. 'twistd'
@type optionsFQPN: C{str}
@param optionsFQPN: The Fully Qualified Python Name of the C{Options}
class to be tested.
"""
outputFile = BytesIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
# some scripts won't import or instantiate because of missing
# dependencies (pyOpenSSL, etc) so we have to skip them.
try:
o = reflect.namedAny(optionsFQPN)()
except Exception as e:
raise unittest.SkipTest("Couldn't import or instantiate "
"Options class: %s" % (e,))
try:
o.parseOptions(["", "--_shell-completion", "zsh:2"])
except ImportError as e:
# this can happen for commands which don't have all
# the necessary dependencies installed. skip test.
# skip
raise unittest.SkipTest("ImportError calling parseOptions(): %s", (e,))
except SystemExit:
pass # expected
else:
self.fail('SystemExit not raised')
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
outputFile.seek(0)
outputFile.truncate()
# now, if it has sub commands, we have to test those too
if hasattr(o, 'subCommands'):
for (cmd, short, parser, doc) in o.subCommands:
try:
o.parseOptions([cmd, "", "--_shell-completion",
"zsh:3"])
except ImportError as e:
# this can happen for commands which don't have all
# the necessary dependencies installed. skip test.
raise unittest.SkipTest("ImportError calling parseOptions() "
"on subcommand: %s", (e,))
except SystemExit:
pass # expected
else:
self.fail('SystemExit not raised')
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
outputFile.seek(0)
outputFile.truncate()
# flushed because we don't want DeprecationWarnings to be printed when
# running these test cases.
self.flushWarnings()
class ZshTests(unittest.TestCase):
"""
Tests for zsh completion code
"""
def test_accumulateMetadata(self):
"""
Are `compData' attributes you can place on Options classes
picked up correctly?
"""
opts = FighterAceExtendedOptions()
ag = _shellcomp.ZshArgumentsGenerator(opts, 'ace', BytesIO())
descriptions = FighterAceOptions.compData.descriptions.copy()
descriptions.update(FighterAceExtendedOptions.compData.descriptions)
self.assertEqual(ag.descriptions, descriptions)
self.assertEqual(ag.multiUse,
set(FighterAceOptions.compData.multiUse))
self.assertEqual(ag.mutuallyExclusive,
FighterAceOptions.compData.mutuallyExclusive)
optActions = FighterAceOptions.compData.optActions.copy()
optActions.update(FighterAceExtendedOptions.compData.optActions)
self.assertEqual(ag.optActions, optActions)
self.assertEqual(ag.extraActions,
FighterAceOptions.compData.extraActions)
def test_mutuallyExclusiveCornerCase(self):
"""
Exercise a corner-case of ZshArgumentsGenerator.makeExcludesDict()
where the long option name already exists in the `excludes` dict being
built.
"""
class OddFighterAceOptions(FighterAceExtendedOptions):
# since "fokker", etc, are already defined as mutually-
# exclusive on the super-class, defining them again here forces
# the corner-case to be exercised.
optFlags = [['anatra', None,
'Select the Anatra DS as your dogfighter aircraft']]
compData = Completions(
mutuallyExclusive=[['anatra', 'fokker', 'albatros',
'spad', 'bristol']])
opts = OddFighterAceOptions()
ag = _shellcomp.ZshArgumentsGenerator(opts, 'ace', BytesIO())
expected = {
'albatros': set(['anatra', 'b', 'bristol', 'f',
'fokker', 's', 'spad']),
'anatra': set(['a', 'albatros', 'b', 'bristol',
'f', 'fokker', 's', 'spad']),
'bristol': set(['a', 'albatros', 'anatra', 'f',
'fokker', 's', 'spad']),
'fokker': set(['a', 'albatros', 'anatra', 'b',
'bristol', 's', 'spad']),
'spad': set(['a', 'albatros', 'anatra', 'b',
'bristol', 'f', 'fokker'])}
self.assertEqual(ag.excludes, expected)
def test_accumulateAdditionalOptions(self):
"""
We pick up options that are only defined by having an
appropriately named method on your Options class,
e.g. def opt_foo(self, foo)
"""
opts = FighterAceExtendedOptions()
ag = _shellcomp.ZshArgumentsGenerator(opts, 'ace', BytesIO())
self.assertIn('nocrash', ag.flagNameToDefinition)
self.assertIn('nocrash', ag.allOptionsNameToDefinition)
self.assertIn('difficulty', ag.paramNameToDefinition)
self.assertIn('difficulty', ag.allOptionsNameToDefinition)
def test_verifyZshNames(self):
"""
Using a parameter/flag name that doesn't exist
will raise an error
"""
class TmpOptions(FighterAceExtendedOptions):
# Note typo of detail
compData = Completions(optActions={'detaill' : None})
self.assertRaises(ValueError, _shellcomp.ZshArgumentsGenerator,
TmpOptions(), 'ace', BytesIO())
class TmpOptions2(FighterAceExtendedOptions):
# Note that 'foo' and 'bar' are not real option
# names defined in this class
compData = Completions(
mutuallyExclusive=[("foo", "bar")])
self.assertRaises(ValueError, _shellcomp.ZshArgumentsGenerator,
TmpOptions2(), 'ace', BytesIO())
def test_zshCode(self):
"""
Generate a completion function, and test the textual output
against a known correct output
"""
outputFile = BytesIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
self.patch(sys, 'argv', ["silly", "", "--_shell-completion", "zsh:2"])
opts = SimpleProgOptions()
self.assertRaises(SystemExit, opts.parseOptions)
self.assertEqual(testOutput1, outputFile.getvalue())
def test_zshCodeWithSubs(self):
"""
Generate a completion function with subcommands,
and test the textual output against a known correct output
"""
outputFile = BytesIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
self.patch(sys, 'argv', ["silly2", "", "--_shell-completion", "zsh:2"])
opts = SimpleProgWithSubcommands()
self.assertRaises(SystemExit, opts.parseOptions)
self.assertEqual(testOutput2, outputFile.getvalue())
def test_incompleteCommandLine(self):
"""
Completion still happens even if a command-line is given
that would normally throw UsageError.
"""
outputFile = BytesIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
opts = FighterAceOptions()
self.assertRaises(SystemExit, opts.parseOptions,
["--fokker", "server", "--unknown-option",
"--unknown-option2",
"--_shell-completion", "zsh:5"])
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
def test_incompleteCommandLine_case2(self):
"""
Completion still happens even if a command-line is given
that would normally throw UsageError.
The existence of --unknown-option prior to the subcommand
will break subcommand detection... but we complete anyway
"""
outputFile = BytesIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
opts = FighterAceOptions()
self.assertRaises(SystemExit, opts.parseOptions,
["--fokker", "--unknown-option", "server",
"--list-server", "--_shell-completion", "zsh:5"])
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
outputFile.seek(0)
outputFile.truncate()
def test_incompleteCommandLine_case3(self):
"""
Completion still happens even if a command-line is given
that would normally throw UsageError.
Break subcommand detection in a different way by providing
an invalid subcommand name.
"""
outputFile = BytesIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
opts = FighterAceOptions()
self.assertRaises(SystemExit, opts.parseOptions,
["--fokker", "unknown-subcommand",
"--list-server", "--_shell-completion", "zsh:4"])
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
def test_skipSubcommandList(self):
"""
Ensure the optimization which skips building the subcommand list
under certain conditions isn't broken.
"""
outputFile = BytesIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
opts = FighterAceOptions()
self.assertRaises(SystemExit, opts.parseOptions,
["--alba", "--_shell-completion", "zsh:2"])
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
def test_poorlyDescribedOptMethod(self):
"""
Test corner case fetching an option description from a method docstring
"""
opts = FighterAceOptions()
argGen = _shellcomp.ZshArgumentsGenerator(opts, 'ace', None)
descr = argGen.getDescription('silly')
# docstring for opt_silly is useless so it should just use the
# option name as the description
self.assertEqual(descr, 'silly')
def test_brokenActions(self):
"""
A C{Completer} with repeat=True may only be used as the
last item in the extraActions list.
"""
class BrokenActions(usage.Options):
compData = usage.Completions(
extraActions=[usage.Completer(repeat=True),
usage.Completer()]
)
outputFile = BytesIO()
opts = BrokenActions()
self.patch(opts, '_shellCompFile', outputFile)
self.assertRaises(ValueError, opts.parseOptions,
["", "--_shell-completion", "zsh:2"])
def test_optMethodsDontOverride(self):
"""
opt_* methods on Options classes should not override the
data provided in optFlags or optParameters.
"""
class Options(usage.Options):
optFlags = [['flag', 'f', 'A flag']]
optParameters = [['param', 'p', None, 'A param']]
def opt_flag(self):
""" junk description """
def opt_param(self, param):
""" junk description """
opts = Options()
argGen = _shellcomp.ZshArgumentsGenerator(opts, 'ace', None)
self.assertEqual(argGen.getDescription('flag'), 'A flag')
self.assertEqual(argGen.getDescription('param'), 'A param')
class EscapeTests(unittest.TestCase):
def test_escape(self):
"""
Verify _shellcomp.escape() function
"""
esc = _shellcomp.escape
test = "$"
self.assertEqual(esc(test), "'$'")
test = 'A--\'$"\\`--B'
self.assertEqual(esc(test), '"A--\'\\$\\"\\\\\\`--B"')
class CompleterNotImplementedTests(unittest.TestCase):
"""
Test that using an unknown shell constant with SubcommandAction
raises NotImplementedError
The other Completer() subclasses are tested in test_usage.py
"""
def test_unknownShell(self):
"""
Using an unknown shellType should raise NotImplementedError
"""
action = _shellcomp.SubcommandAction()
self.assertRaises(NotImplementedError, action._shellCode,
None, "bad_shell_type")
class FighterAceServerOptions(usage.Options):
"""
Options for FighterAce 'server' subcommand
"""
optFlags = [['list-server', None,
'List this server with the online FighterAce network']]
optParameters = [['packets-per-second', None,
'Number of update packets to send per second', '20']]
class FighterAceOptions(usage.Options):
"""
Command-line options for an imaginary `Fighter Ace` game
"""
optFlags = [['fokker', 'f',
'Select the Fokker Dr.I as your dogfighter aircraft'],
['albatros', 'a',
'Select the Albatros D-III as your dogfighter aircraft'],
['spad', 's',
'Select the SPAD S.VII as your dogfighter aircraft'],
['bristol', 'b',
'Select the Bristol Scout as your dogfighter aircraft'],
['physics', 'p',
'Enable secret Twisted physics engine'],
['jam', 'j',
'Enable a small chance that your machine guns will jam!'],
['verbose', 'v',
'Verbose logging (may be specified more than once)'],
]
optParameters = [['pilot-name', None, "What's your name, Ace?",
'Manfred von Richthofen'],
['detail', 'd',
'Select the level of rendering detail (1-5)', '3'],
]
subCommands = [['server', None, FighterAceServerOptions,
'Start FighterAce game-server.'],
]
compData = Completions(
descriptions={'physics' : 'Twisted-Physics',
'detail' : 'Rendering detail level'},
multiUse=['verbose'],
mutuallyExclusive=[['fokker', 'albatros', 'spad',
'bristol']],
optActions={'detail' : CompleteList(['1' '2' '3'
'4' '5'])},
extraActions=[CompleteFiles(descr='saved game file to load')]
)
def opt_silly(self):
# A silly option which nobody can explain
""" """
class FighterAceExtendedOptions(FighterAceOptions):
"""
Extend the options and zsh metadata provided by FighterAceOptions.
_shellcomp must accumulate options and metadata from all classes in the
hiearchy so this is important to test.
"""
optFlags = [['no-stalls', None,
'Turn off the ability to stall your aircraft']]
optParameters = [['reality-level', None,
'Select the level of physics reality (1-5)', '5']]
compData = Completions(
descriptions={'no-stalls' : 'Can\'t stall your plane'},
optActions={'reality-level' :
Completer(descr='Physics reality level')}
)
def opt_nocrash(self):
"""
Select that you can't crash your plane
"""
def opt_difficulty(self, difficulty):
"""
How tough are you? (1-10)
"""
def _accuracyAction():
# add tick marks just to exercise quoting
return CompleteList(['1', '2', '3'], descr='Accuracy\'`?')
class SimpleProgOptions(usage.Options):
"""
Command-line options for a `Silly` imaginary program
"""
optFlags = [['color', 'c', 'Turn on color output'],
['gray', 'g', 'Turn on gray-scale output'],
['verbose', 'v',
'Verbose logging (may be specified more than once)'],
]
optParameters = [['optimization', None, '5',
'Select the level of optimization (1-5)'],
['accuracy', 'a', '3',
'Select the level of accuracy (1-3)'],
]
compData = Completions(
descriptions={'color' : 'Color on',
'optimization' : 'Optimization level'},
multiUse=['verbose'],
mutuallyExclusive=[['color', 'gray']],
optActions={'optimization' : CompleteList(['1', '2', '3', '4', '5'],
descr='Optimization?'),
'accuracy' : _accuracyAction},
extraActions=[CompleteFiles(descr='output file')]
)
def opt_X(self):
"""
usage.Options does not recognize single-letter opt_ methods
"""
class SimpleProgSub1(usage.Options):
optFlags = [['sub-opt', 's', 'Sub Opt One']]
class SimpleProgSub2(usage.Options):
optFlags = [['sub-opt', 's', 'Sub Opt Two']]
class SimpleProgWithSubcommands(SimpleProgOptions):
optFlags = [['some-option'],
['other-option', 'o']]
optParameters = [['some-param'],
['other-param', 'p'],
['another-param', 'P', 'Yet Another Param']]
subCommands = [ ['sub1', None, SimpleProgSub1, 'Sub Command 1'],
['sub2', None, SimpleProgSub2, 'Sub Command 2']]
testOutput1 = b"""#compdef silly
_arguments -s -A "-*" \\
':output file (*):_files -g "*"' \\
"(--accuracy)-a[Select the level of accuracy (1-3)]:Accuracy'\`?:(1 2 3)" \\
"(-a)--accuracy=[Select the level of accuracy (1-3)]:Accuracy'\`?:(1 2 3)" \\
'(--color --gray -g)-c[Color on]' \\
'(--gray -c -g)--color[Color on]' \\
'(--color --gray -c)-g[Turn on gray-scale output]' \\
'(--color -c -g)--gray[Turn on gray-scale output]' \\
'--help[Display this help and exit.]' \\
'--optimization=[Optimization level]:Optimization?:(1 2 3 4 5)' \\
'*-v[Verbose logging (may be specified more than once)]' \\
'*--verbose[Verbose logging (may be specified more than once)]' \\
'--version[Display Twisted version and exit.]' \\
&& return 0
"""
# with sub-commands
testOutput2 = b"""#compdef silly2
_arguments -s -A "-*" \\
'*::subcmd:->subcmd' \\
':output file (*):_files -g "*"' \\
"(--accuracy)-a[Select the level of accuracy (1-3)]:Accuracy'\`?:(1 2 3)" \\
"(-a)--accuracy=[Select the level of accuracy (1-3)]:Accuracy'\`?:(1 2 3)" \\
'(--another-param)-P[another-param]:another-param:_files' \\
'(-P)--another-param=[another-param]:another-param:_files' \\
'(--color --gray -g)-c[Color on]' \\
'(--gray -c -g)--color[Color on]' \\
'(--color --gray -c)-g[Turn on gray-scale output]' \\
'(--color -c -g)--gray[Turn on gray-scale output]' \\
'--help[Display this help and exit.]' \\
'--optimization=[Optimization level]:Optimization?:(1 2 3 4 5)' \\
'(--other-option)-o[other-option]' \\
'(-o)--other-option[other-option]' \\
'(--other-param)-p[other-param]:other-param:_files' \\
'(-p)--other-param=[other-param]:other-param:_files' \\
'--some-option[some-option]' \\
'--some-param=[some-param]:some-param:_files' \\
'*-v[Verbose logging (may be specified more than once)]' \\
'*--verbose[Verbose logging (may be specified more than once)]' \\
'--version[Display Twisted version and exit.]' \\
&& return 0
local _zsh_subcmds_array
_zsh_subcmds_array=(
"sub1:Sub Command 1"
"sub2:Sub Command 2"
)
_describe "sub-command" _zsh_subcmds_array
"""

View file

@ -0,0 +1,151 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.trial.unittest import TestCase
from twisted.python.failure import Failure
try:
import syslog as stdsyslog
except ImportError:
stdsyslog = None
else:
from twisted.python import syslog
class SyslogObserverTests(TestCase):
"""
Tests for L{SyslogObserver} which sends Twisted log events to the syslog.
"""
events = None
if stdsyslog is None:
skip = "syslog is not supported on this platform"
def setUp(self):
self.patch(syslog.SyslogObserver, 'openlog', self.openlog)
self.patch(syslog.SyslogObserver, 'syslog', self.syslog)
self.observer = syslog.SyslogObserver('SyslogObserverTests')
def openlog(self, prefix, options, facility):
self.logOpened = (prefix, options, facility)
self.events = []
def syslog(self, options, message):
self.events.append((options, message))
def test_emitWithoutMessage(self):
"""
L{SyslogObserver.emit} ignores events with an empty value for the
C{'message'} key.
"""
self.observer.emit({'message': (), 'isError': False, 'system': '-'})
self.assertEqual(self.events, [])
def test_emitCustomPriority(self):
"""
L{SyslogObserver.emit} uses the value of the C{'syslogPriority'} as the
syslog priority, if that key is present in the event dictionary.
"""
self.observer.emit({
'message': ('hello, world',), 'isError': False, 'system': '-',
'syslogPriority': stdsyslog.LOG_DEBUG})
self.assertEqual(
self.events,
[(stdsyslog.LOG_DEBUG, '[-] hello, world')])
def test_emitErrorPriority(self):
"""
L{SyslogObserver.emit} uses C{LOG_ALERT} if the event represents an
error.
"""
self.observer.emit({
'message': ('hello, world',), 'isError': True, 'system': '-',
'failure': Failure(Exception("foo"))})
self.assertEqual(
self.events,
[(stdsyslog.LOG_ALERT, '[-] hello, world')])
def test_emitCustomPriorityOverridesError(self):
"""
L{SyslogObserver.emit} uses the value of the C{'syslogPriority'} key if
it is specified even if the event dictionary represents an error.
"""
self.observer.emit({
'message': ('hello, world',), 'isError': True, 'system': '-',
'syslogPriority': stdsyslog.LOG_NOTICE,
'failure': Failure(Exception("bar"))})
self.assertEqual(
self.events,
[(stdsyslog.LOG_NOTICE, '[-] hello, world')])
def test_emitCustomFacility(self):
"""
L{SyslogObserver.emit} uses the value of the C{'syslogPriority'} as the
syslog priority, if that key is present in the event dictionary.
"""
self.observer.emit({
'message': ('hello, world',), 'isError': False, 'system': '-',
'syslogFacility': stdsyslog.LOG_CRON})
self.assertEqual(
self.events,
[(stdsyslog.LOG_INFO | stdsyslog.LOG_CRON, '[-] hello, world')])
def test_emitCustomSystem(self):
"""
L{SyslogObserver.emit} uses the value of the C{'system'} key to prefix
the logged message.
"""
self.observer.emit({'message': ('hello, world',), 'isError': False,
'system': 'nonDefaultSystem'})
self.assertEqual(
self.events,
[(stdsyslog.LOG_INFO, "[nonDefaultSystem] hello, world")])
def test_emitMessage(self):
"""
L{SyslogObserver.emit} logs the value of the C{'message'} key of the
event dictionary it is passed to the syslog.
"""
self.observer.emit({
'message': ('hello, world',), 'isError': False,
'system': '-'})
self.assertEqual(
self.events,
[(stdsyslog.LOG_INFO, "[-] hello, world")])
def test_emitMultilineMessage(self):
"""
Each line of a multiline message is emitted separately to the syslog.
"""
self.observer.emit({
'message': ('hello,\nworld',), 'isError': False,
'system': '-'})
self.assertEqual(
self.events,
[(stdsyslog.LOG_INFO, '[-] hello,'),
(stdsyslog.LOG_INFO, '[-] \tworld')])
def test_emitStripsTrailingEmptyLines(self):
"""
Trailing empty lines of a multiline message are omitted from the
messages sent to the syslog.
"""
self.observer.emit({
'message': ('hello,\nworld\n\n',), 'isError': False,
'system': '-'})
self.assertEqual(
self.events,
[(stdsyslog.LOG_INFO, '[-] hello,'),
(stdsyslog.LOG_INFO, '[-] \tworld')])

View file

@ -0,0 +1,176 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.systemd}.
"""
from __future__ import division, absolute_import
import os
from twisted.trial.unittest import TestCase
from twisted.python.systemd import ListenFDs
class InheritedDescriptorsMixin(object):
"""
Mixin for a L{TestCase} subclass which defines test methods for some kind of
systemd sd-daemon class. In particular, it defines tests for a
C{inheritedDescriptors} method.
"""
def test_inheritedDescriptors(self):
"""
C{inheritedDescriptors} returns a list of integers giving the file
descriptors which were inherited from systemd.
"""
sddaemon = self.getDaemon(7, 3)
self.assertEqual([7, 8, 9], sddaemon.inheritedDescriptors())
def test_repeated(self):
"""
Any subsequent calls to C{inheritedDescriptors} return the same list.
"""
sddaemon = self.getDaemon(7, 3)
self.assertEqual(
sddaemon.inheritedDescriptors(),
sddaemon.inheritedDescriptors())
class MemoryOnlyMixin(object):
"""
Mixin for a L{TestCase} subclass which creates creating a fake, in-memory
implementation of C{inheritedDescriptors}. This provides verification that
the fake behaves in a compatible way with the real implementation.
"""
def getDaemon(self, start, count):
"""
Invent C{count} new I{file descriptors} (actually integers, attached to
no real file description), starting at C{start}. Construct and return a
new L{ListenFDs} which will claim those integers represent inherited
file descriptors.
"""
return ListenFDs(range(start, start + count))
class EnvironmentMixin(object):
"""
Mixin for a L{TestCase} subclass which creates a real implementation of
C{inheritedDescriptors} which is based on the environment variables set by
systemd. To facilitate testing, this mixin will also create a fake
environment dictionary and add keys to it to make it look as if some
descriptors have been inherited.
"""
def initializeEnvironment(self, count, pid):
"""
Create a copy of the process environment and add I{LISTEN_FDS} and
I{LISTEN_PID} (the environment variables set by systemd) to it.
"""
result = os.environ.copy()
result['LISTEN_FDS'] = str(count)
result['LISTEN_PID'] = str(pid)
return result
def getDaemon(self, start, count):
"""
Create a new L{ListenFDs} instance, initialized with a fake environment
dictionary which will be set up as systemd would have set it up if
C{count} descriptors were being inherited. The descriptors will also
start at C{start}.
"""
fakeEnvironment = self.initializeEnvironment(count, os.getpid())
return ListenFDs.fromEnvironment(environ=fakeEnvironment, start=start)
class MemoryOnlyTests(MemoryOnlyMixin, InheritedDescriptorsMixin, TestCase):
"""
Apply tests to L{ListenFDs}, explicitly constructed with some fake file
descriptors.
"""
class EnvironmentTests(EnvironmentMixin, InheritedDescriptorsMixin, TestCase):
"""
Apply tests to L{ListenFDs}, constructed based on an environment dictionary.
"""
def test_secondEnvironment(self):
"""
Only a single L{Environment} can extract inherited file descriptors.
"""
fakeEnvironment = self.initializeEnvironment(3, os.getpid())
first = ListenFDs.fromEnvironment(environ=fakeEnvironment)
second = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual(list(range(3, 6)), first.inheritedDescriptors())
self.assertEqual([], second.inheritedDescriptors())
def test_mismatchedPID(self):
"""
If the current process PID does not match the PID in the environment, no
inherited descriptors are reported.
"""
fakeEnvironment = self.initializeEnvironment(3, os.getpid() + 1)
sddaemon = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual([], sddaemon.inheritedDescriptors())
def test_missingPIDVariable(self):
"""
If the I{LISTEN_PID} environment variable is not present, no inherited
descriptors are reported.
"""
fakeEnvironment = self.initializeEnvironment(3, os.getpid())
del fakeEnvironment['LISTEN_PID']
sddaemon = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual([], sddaemon.inheritedDescriptors())
def test_nonIntegerPIDVariable(self):
"""
If the I{LISTEN_PID} environment variable is set to a string that cannot
be parsed as an integer, no inherited descriptors are reported.
"""
fakeEnvironment = self.initializeEnvironment(3, "hello, world")
sddaemon = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual([], sddaemon.inheritedDescriptors())
def test_missingFDSVariable(self):
"""
If the I{LISTEN_FDS} environment variable is not present, no inherited
descriptors are reported.
"""
fakeEnvironment = self.initializeEnvironment(3, os.getpid())
del fakeEnvironment['LISTEN_FDS']
sddaemon = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual([], sddaemon.inheritedDescriptors())
def test_nonIntegerFDSVariable(self):
"""
If the I{LISTEN_FDS} environment variable is set to a string that cannot
be parsed as an integer, no inherited descriptors are reported.
"""
fakeEnvironment = self.initializeEnvironment("hello, world", os.getpid())
sddaemon = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual([], sddaemon.inheritedDescriptors())
def test_defaultEnviron(self):
"""
If the process environment is not explicitly passed to
L{Environment.__init__}, the real process environment dictionary is
used.
"""
self.patch(os, 'environ', {
'LISTEN_PID': str(os.getpid()),
'LISTEN_FDS': '5'})
sddaemon = ListenFDs.fromEnvironment()
self.assertEqual(list(range(3, 3 + 5)),
sddaemon.inheritedDescriptors())

View file

@ -0,0 +1,27 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.textattributes}.
"""
from twisted.trial import unittest
from twisted.python._textattributes import DefaultFormattingState
class DefaultFormattingStateTests(unittest.TestCase):
"""
Tests for L{twisted.python._textattributes.DefaultFormattingState}.
"""
def test_equality(self):
"""
L{DefaultFormattingState}s are always equal to other
L{DefaultFormattingState}s.
"""
self.assertEqual(
DefaultFormattingState(),
DefaultFormattingState())
self.assertNotEqual(
DefaultFormattingState(),
'hello')

View file

@ -0,0 +1,153 @@
# # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python._tzhelper}.
"""
from os import environ
try:
from time import tzset
except ImportError:
tzset = None
from twisted.python._tzhelper import FixedOffsetTimeZone
from twisted.trial.unittest import TestCase, SkipTest
from datetime import timedelta
from time import mktime as mktime_real
# On some rare platforms (FreeBSD 8? I was not able to reproduce
# on FreeBSD 9) 'mktime' seems to always fail once tzset() has been
# called more than once in a process lifetime. I think this is
# just a platform bug, so let's work around it. -glyph
def mktime(t9):
"""
Call L{mktime_real}, and if it raises L{OverflowError}, catch it and raise
SkipTest instead.
@param t9: A time as a 9-item tuple.
@type t9: L{tuple}
@return: A timestamp.
@rtype: L{float}
"""
try:
return mktime_real(t9)
except OverflowError:
raise SkipTest(
"Platform cannot construct time zone for {0!r}"
.format(t9)
)
def setTZ(name):
"""
Set time zone.
@param name: a time zone name
@type name: L{str}
"""
if tzset is None:
return
if name is None:
try:
del environ["TZ"]
except KeyError:
pass
else:
environ["TZ"] = name
tzset()
def addTZCleanup(testCase):
"""
Add cleanup hooks to a test case to reset timezone to original value.
@param testCase: the test case to add the cleanup to.
@type testCase: L{unittest.TestCase}
"""
tzIn = environ.get("TZ", None)
@testCase.addCleanup
def resetTZ():
setTZ(tzIn)
class FixedOffsetTimeZoneTests(TestCase):
"""
Tests for L{FixedOffsetTimeZone}.
"""
def test_tzinfo(self):
"""
Test that timezone attributes respect the timezone as set by the
standard C{TZ} environment variable and L{tzset} API.
"""
if tzset is None:
raise SkipTest(
"Platform cannot change timezone; unable to verify offsets."
)
def testForTimeZone(name, expectedOffsetDST, expectedOffsetSTD):
setTZ(name)
localDST = mktime((2006, 6, 30, 0, 0, 0, 4, 181, 1))
localSTD = mktime((2007, 1, 31, 0, 0, 0, 2, 31, 0))
tzDST = FixedOffsetTimeZone.fromLocalTimeStamp(localDST)
tzSTD = FixedOffsetTimeZone.fromLocalTimeStamp(localSTD)
self.assertEqual(
tzDST.tzname(localDST),
"UTC{0}".format(expectedOffsetDST)
)
self.assertEqual(
tzSTD.tzname(localSTD),
"UTC{0}".format(expectedOffsetSTD)
)
self.assertEqual(tzDST.dst(localDST), timedelta(0))
self.assertEqual(tzSTD.dst(localSTD), timedelta(0))
def timeDeltaFromOffset(offset):
assert len(offset) == 5
sign = offset[0]
hours = int(offset[1:3])
minutes = int(offset[3:5])
if sign == "-":
hours = -hours
minutes = -minutes
else:
assert sign == "+"
return timedelta(hours=hours, minutes=minutes)
self.assertEqual(
tzDST.utcoffset(localDST),
timeDeltaFromOffset(expectedOffsetDST)
)
self.assertEqual(
tzSTD.utcoffset(localSTD),
timeDeltaFromOffset(expectedOffsetSTD)
)
addTZCleanup(self)
# UTC
testForTimeZone("UTC+00", "+0000", "+0000")
# West of UTC
testForTimeZone("EST+05EDT,M4.1.0,M10.5.0", "-0400", "-0500")
# East of UTC
testForTimeZone("CEST-01CEDT,M4.1.0,M10.5.0", "+0200", "+0100")
# No DST
testForTimeZone("CST+06", "-0600", "-0600")

View file

@ -0,0 +1,805 @@
# -*- test-case-name: twisted.python.test.test_url -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.url}.
"""
from __future__ import unicode_literals
from ..url import URL
unicode = type(u'')
from twisted.trial.unittest import SynchronousTestCase
theurl = "http://www.foo.com/a/nice/path/?zot=23&zut"
# Examples from RFC 3986 section 5.4, Reference Resolution Examples
relativeLinkBaseForRFC3986 = 'http://a/b/c/d;p?q'
relativeLinkTestsForRFC3986 = [
# "Normal"
#('g:h', 'g:h'), # Not supported: scheme with relative path
('g', 'http://a/b/c/g'),
('./g', 'http://a/b/c/g'),
('g/', 'http://a/b/c/g/'),
('/g', 'http://a/g'),
('//g', 'http://g'),
('?y', 'http://a/b/c/d;p?y'),
('g?y', 'http://a/b/c/g?y'),
('#s', 'http://a/b/c/d;p?q#s'),
('g#s', 'http://a/b/c/g#s'),
('g?y#s', 'http://a/b/c/g?y#s'),
(';x', 'http://a/b/c/;x'),
('g;x', 'http://a/b/c/g;x'),
('g;x?y#s', 'http://a/b/c/g;x?y#s'),
('', 'http://a/b/c/d;p?q'),
('.', 'http://a/b/c/'),
('./', 'http://a/b/c/'),
('..', 'http://a/b/'),
('../', 'http://a/b/'),
('../g', 'http://a/b/g'),
('../..', 'http://a/'),
('../../', 'http://a/'),
('../../g', 'http://a/g'),
# Abnormal examples
# ".." cannot be used to change the authority component of a URI.
('../../../g', 'http://a/g'),
('../../../../g', 'http://a/g'),
# Only include "." and ".." when they are only part of a larger segment,
# not by themselves.
('/./g', 'http://a/g'),
('/../g', 'http://a/g'),
('g.', 'http://a/b/c/g.'),
('.g', 'http://a/b/c/.g'),
('g..', 'http://a/b/c/g..'),
('..g', 'http://a/b/c/..g'),
# Unnecessary or nonsensical forms of "." and "..".
('./../g', 'http://a/b/g'),
('./g/.', 'http://a/b/c/g/'),
('g/./h', 'http://a/b/c/g/h'),
('g/../h', 'http://a/b/c/h'),
('g;x=1/./y', 'http://a/b/c/g;x=1/y'),
('g;x=1/../y', 'http://a/b/c/y'),
# Separating the reference's query and fragment components from the path.
('g?y/./x', 'http://a/b/c/g?y/./x'),
('g?y/../x', 'http://a/b/c/g?y/../x'),
('g#s/./x', 'http://a/b/c/g#s/./x'),
('g#s/../x', 'http://a/b/c/g#s/../x'),
# Not supported: scheme with relative path
#("http:g", "http:g"), # strict
#("http:g", "http://a/b/c/g"), # non-strict
]
_percentenc = lambda s: ''.join('%%%02X' % ord(c) for c in s)
class TestURL(SynchronousTestCase):
"""
Tests for L{URL}.
"""
def assertUnicoded(self, u):
"""
The given L{URL}'s components should be L{unicode}.
@param u: The L{URL} to test.
"""
self.assertTrue(isinstance(u.scheme, unicode)
or u.scheme is None, repr(u))
self.assertTrue(isinstance(u.host, unicode)
or u.host is None, repr(u))
for seg in u.path:
self.assertIsInstance(seg, unicode, repr(u))
for (k, v) in u.query:
self.assertIsInstance(k, unicode, repr(u))
self.assertTrue(v is None or isinstance(v, unicode), repr(u))
self.assertIsInstance(u.fragment, unicode, repr(u))
def assertURL(self, u, scheme, host, path, query,
fragment, port, userinfo=u''):
"""
The given L{URL} should have the given components.
@param u: The actual L{URL} to examine.
@param scheme: The expected scheme.
@param host: The expected host.
@param path: The expected path.
@param query: The expected query.
@param fragment: The expected fragment.
@param port: The expected port.
@param userinfo: The expected userinfo.
"""
actual = (u.scheme, u.host, u.path, u.query,
u.fragment, u.port, u.userinfo)
expected = (scheme, host, tuple(path), tuple(query),
fragment, port, u.userinfo)
self.assertEqual(actual, expected)
def test_initDefaults(self):
"""
L{URL} should have appropriate default values.
"""
def check(u):
self.assertUnicoded(u)
self.assertURL(u, u'http', u'', [], [], u'', 80, u'')
check(URL(u'http', u''))
check(URL(u'http', u'', [], []))
check(URL(u'http', u'', [], [], u''))
def test_init(self):
"""
L{URL} should accept L{unicode} parameters.
"""
u = URL(u's', u'h', [u'p'], [(u'k', u'v'), (u'k', None)], u'f')
self.assertUnicoded(u)
self.assertURL(u, u's', u'h', [u'p'], [(u'k', u'v'), (u'k', None)],
u'f', None)
self.assertURL(URL(u'http', u'\xe0', [u'\xe9'],
[(u'\u03bb', u'\u03c0')], u'\u22a5'),
u'http', u'\xe0', [u'\xe9'],
[(u'\u03bb', u'\u03c0')], u'\u22a5', 80)
def test_initPercent(self):
"""
L{URL} should accept (and not interpret) percent characters.
"""
u = URL(u's', u'%68', [u'%70'], [(u'%6B', u'%76'), (u'%6B', None)],
u'%66')
self.assertUnicoded(u)
self.assertURL(u,
u's', u'%68', [u'%70'],
[(u'%6B', u'%76'), (u'%6B', None)],
u'%66', None)
def test_repr(self):
"""
L{URL.__repr__} will display the canonical form of the URL, wrapped in
a L{URL.fromText} invocation, so that it is C{eval}-able but still easy
to read.
"""
self.assertEqual(
repr(URL(scheme=u'http', host=u'foo', path=[u'bar'],
query=[(u'baz', None), (u'k', u'v')],
fragment=u'frob')),
"URL.from_text(%s)" % (repr(u"http://foo/bar?baz&k=v#frob"),)
)
def test_fromText(self):
"""
Round-tripping L{URL.fromText} with C{str} results in an equivalent
URL.
"""
urlpath = URL.fromText(theurl)
self.assertEqual(theurl, urlpath.asText())
def test_roundtrip(self):
"""
L{URL.asText} should invert L{URL.fromText}.
"""
tests = (
"http://localhost",
"http://localhost/",
"http://localhost/foo",
"http://localhost/foo/",
"http://localhost/foo!!bar/",
"http://localhost/foo%20bar/",
"http://localhost/foo%2Fbar/",
"http://localhost/foo?n",
"http://localhost/foo?n=v",
"http://localhost/foo?n=/a/b",
"http://example.com/foo!@$bar?b!@z=123",
"http://localhost/asd?a=asd%20sdf/345",
"http://(%2525)/(%2525)?(%2525)&(%2525)=(%2525)#(%2525)",
"http://(%C3%A9)/(%C3%A9)?(%C3%A9)&(%C3%A9)=(%C3%A9)#(%C3%A9)",
)
for test in tests:
result = URL.fromText(test).asText()
self.assertEqual(test, result)
def test_equality(self):
"""
Two URLs decoded using L{URL.fromText} will be equal (C{==}) if they
decoded same URL string, and unequal (C{!=}) if they decoded different
strings.
"""
urlpath = URL.fromText(theurl)
self.assertEqual(urlpath, URL.fromText(theurl))
self.assertNotEqual(
urlpath,
URL.fromText('ftp://www.anotherinvaliddomain.com/'
'foo/bar/baz/?zot=21&zut')
)
def test_fragmentEquality(self):
"""
An URL created with the empty string for a fragment compares equal
to an URL created with an unspecified fragment.
"""
self.assertEqual(URL(fragment=u''), URL())
self.assertEqual(URL.fromText(u"http://localhost/#"),
URL.fromText(u"http://localhost/"))
def test_child(self):
"""
L{URL.child} appends a new path segment, but does not affect the query
or fragment.
"""
urlpath = URL.fromText(theurl)
self.assertEqual("http://www.foo.com/a/nice/path/gong?zot=23&zut",
urlpath.child(u'gong').asText())
self.assertEqual("http://www.foo.com/a/nice/path/gong%2F?zot=23&zut",
urlpath.child(u'gong/').asText())
self.assertEqual(
"http://www.foo.com/a/nice/path/gong%2Fdouble?zot=23&zut",
urlpath.child(u'gong/double').asText()
)
self.assertEqual(
"http://www.foo.com/a/nice/path/gong%2Fdouble%2F?zot=23&zut",
urlpath.child(u'gong/double/').asText()
)
def test_multiChild(self):
"""
L{URL.child} receives multiple segments as C{*args} and appends each in
turn.
"""
self.assertEqual(URL.fromText('http://example.com/a/b')
.child('c', 'd', 'e').asText(),
'http://example.com/a/b/c/d/e')
def test_childInitRoot(self):
"""
L{URL.child} of a L{URL} without a path produces a L{URL} with a single
path segment.
"""
childURL = URL(host=u"www.foo.com").child(u"c")
self.assertTrue(childURL.rooted)
self.assertEqual("http://www.foo.com/c", childURL.asText())
def test_sibling(self):
"""
L{URL.sibling} of a L{URL} replaces the last path segment, but does not
affect the query or fragment.
"""
urlpath = URL.fromText(theurl)
self.assertEqual(
"http://www.foo.com/a/nice/path/sister?zot=23&zut",
urlpath.sibling(u'sister').asText()
)
# Use an url without trailing '/' to check child removal.
theurl2 = "http://www.foo.com/a/nice/path?zot=23&zut"
urlpath = URL.fromText(theurl2)
self.assertEqual(
"http://www.foo.com/a/nice/sister?zot=23&zut",
urlpath.sibling(u'sister').asText()
)
def test_click(self):
"""
L{URL.click} interprets the given string as a relative URI-reference
and returns a new L{URL} interpreting C{self} as the base absolute URI.
"""
urlpath = URL.fromText(theurl)
# A null uri should be valid (return here).
self.assertEqual("http://www.foo.com/a/nice/path/?zot=23&zut",
urlpath.click("").asText())
# A simple relative path remove the query.
self.assertEqual("http://www.foo.com/a/nice/path/click",
urlpath.click("click").asText())
# An absolute path replace path and query.
self.assertEqual("http://www.foo.com/click",
urlpath.click("/click").asText())
# Replace just the query.
self.assertEqual("http://www.foo.com/a/nice/path/?burp",
urlpath.click("?burp").asText())
# One full url to another should not generate '//' between authority.
# and path
self.assertNotIn("//foobar",
urlpath.click('http://www.foo.com/foobar').asText())
# From a url with no query clicking a url with a query, the query
# should be handled properly.
u = URL.fromText('http://www.foo.com/me/noquery')
self.assertEqual('http://www.foo.com/me/17?spam=158',
u.click('/me/17?spam=158').asText())
# Check that everything from the path onward is removed when the click
# link has no path.
u = URL.fromText('http://localhost/foo?abc=def')
self.assertEqual(u.click('http://www.python.org').asText(),
'http://www.python.org')
def test_clickRFC3986(self):
"""
L{URL.click} should correctly resolve the examples in RFC 3986.
"""
base = URL.fromText(relativeLinkBaseForRFC3986)
for (ref, expected) in relativeLinkTestsForRFC3986:
self.assertEqual(base.click(ref).asText(), expected)
def test_clickSchemeRelPath(self):
"""
L{URL.click} should not accept schemes with relative paths.
"""
base = URL.fromText(relativeLinkBaseForRFC3986)
self.assertRaises(NotImplementedError, base.click, 'g:h')
self.assertRaises(NotImplementedError, base.click, 'http:h')
def test_cloneUnchanged(self):
"""
Verify that L{URL.replace} doesn't change any of the arguments it
is passed.
"""
urlpath = URL.fromText('https://x:1/y?z=1#A')
self.assertEqual(
urlpath.replace(urlpath.scheme,
urlpath.host,
urlpath.path,
urlpath.query,
urlpath.fragment,
urlpath.port),
urlpath)
self.assertEqual(
urlpath.replace(),
urlpath)
def test_clickCollapse(self):
"""
L{URL.click} collapses C{.} and C{..} according to RFC 3986 section
5.2.4.
"""
tests = [
['http://localhost/', '.', 'http://localhost/'],
['http://localhost/', '..', 'http://localhost/'],
['http://localhost/a/b/c', '.', 'http://localhost/a/b/'],
['http://localhost/a/b/c', '..', 'http://localhost/a/'],
['http://localhost/a/b/c', './d/e', 'http://localhost/a/b/d/e'],
['http://localhost/a/b/c', '../d/e', 'http://localhost/a/d/e'],
['http://localhost/a/b/c', '/./d/e', 'http://localhost/d/e'],
['http://localhost/a/b/c', '/../d/e', 'http://localhost/d/e'],
['http://localhost/a/b/c/', '../../d/e/',
'http://localhost/a/d/e/'],
['http://localhost/a/./c', '../d/e', 'http://localhost/d/e'],
['http://localhost/a/./c/', '../d/e', 'http://localhost/a/d/e'],
['http://localhost/a/b/c/d', './e/../f/../g',
'http://localhost/a/b/c/g'],
['http://localhost/a/b/c', 'd//e', 'http://localhost/a/b/d//e'],
]
for start, click, expected in tests:
actual = URL.fromText(start).click(click).asText()
self.assertEqual(
actual,
expected,
"{start}.click({click}) => {actual} not {expected}".format(
start=start,
click=repr(click),
actual=actual,
expected=expected,
)
)
def test_queryAdd(self):
"""
L{URL.add} adds query parameters.
"""
self.assertEqual(
"http://www.foo.com/a/nice/path/?foo=bar",
URL.fromText("http://www.foo.com/a/nice/path/")
.add(u"foo", u"bar").asText())
self.assertEqual(
"http://www.foo.com/?foo=bar",
URL(host=u"www.foo.com").add(u"foo", u"bar")
.asText())
urlpath = URL.fromText(theurl)
self.assertEqual(
"http://www.foo.com/a/nice/path/?zot=23&zut&burp",
urlpath.add(u"burp").asText())
self.assertEqual(
"http://www.foo.com/a/nice/path/?zot=23&zut&burp=xxx",
urlpath.add(u"burp", u"xxx").asText())
self.assertEqual(
"http://www.foo.com/a/nice/path/?zot=23&zut&burp=xxx&zing",
urlpath.add(u"burp", u"xxx").add(u"zing").asText())
# Note the inversion!
self.assertEqual(
"http://www.foo.com/a/nice/path/?zot=23&zut&zing&burp=xxx",
urlpath.add(u"zing").add(u"burp", u"xxx").asText())
# Note the two values for the same name.
self.assertEqual(
"http://www.foo.com/a/nice/path/?zot=23&zut&burp=xxx&zot=32",
urlpath.add(u"burp", u"xxx").add(u"zot", u'32')
.asText())
def test_querySet(self):
"""
L{URL.set} replaces query parameters by name.
"""
urlpath = URL.fromText(theurl)
self.assertEqual(
"http://www.foo.com/a/nice/path/?zot=32&zut",
urlpath.set(u"zot", u'32').asText())
# Replace name without value with name/value and vice-versa.
self.assertEqual(
"http://www.foo.com/a/nice/path/?zot&zut=itworked",
urlpath.set(u"zot").set(u"zut", u"itworked").asText()
)
# Q: what happens when the query has two values and we replace?
# A: we replace both values with a single one
self.assertEqual(
"http://www.foo.com/a/nice/path/?zot=32&zut",
urlpath.add(u"zot", u"xxx").set(u"zot", u'32').asText()
)
def test_queryRemove(self):
"""
L{URL.remove} removes all instances of a query parameter.
"""
url = URL.fromText(u"https://example.com/a/b/?foo=1&bar=2&foo=3")
self.assertEqual(
url.remove(u"foo"),
URL.fromText(u"https://example.com/a/b/?bar=2")
)
def test_empty(self):
"""
An empty L{URL} should serialize as the empty string.
"""
self.assertEqual(URL().asText(), u'')
def test_justQueryText(self):
"""
An L{URL} with query text should serialize as just query text.
"""
u = URL(query=[(u"hello", u"world")])
self.assertEqual(u.asText(), u'?hello=world')
def test_identicalEqual(self):
"""
L{URL} compares equal to itself.
"""
u = URL.fromText('http://localhost/')
self.assertEqual(u, u)
def test_similarEqual(self):
"""
URLs with equivalent components should compare equal.
"""
u1 = URL.fromText('http://localhost/')
u2 = URL.fromText('http://localhost/')
self.assertEqual(u1, u2)
def test_differentNotEqual(self):
"""
L{URL}s that refer to different resources are both unequal (C{!=}) and
also not equal (not C{==}).
"""
u1 = URL.fromText('http://localhost/a')
u2 = URL.fromText('http://localhost/b')
self.assertFalse(u1 == u2, "%r != %r" % (u1, u2))
self.assertNotEqual(u1, u2)
def test_otherTypesNotEqual(self):
"""
L{URL} is not equal (C{==}) to other types.
"""
u = URL.fromText('http://localhost/')
self.assertFalse(u == 42, "URL must not equal a number.")
self.assertFalse(u == object(), "URL must not equal an object.")
self.assertNotEqual(u, 42)
self.assertNotEqual(u, object())
def test_identicalNotUnequal(self):
"""
Identical L{URL}s are not unequal (C{!=}) to each other.
"""
u = URL.fromText('http://localhost/')
self.assertFalse(u != u, "%r == itself" % u)
def test_similarNotUnequal(self):
"""
Structurally similar L{URL}s are not unequal (C{!=}) to each other.
"""
u1 = URL.fromText('http://localhost/')
u2 = URL.fromText('http://localhost/')
self.assertFalse(u1 != u2, "%r == %r" % (u1, u2))
def test_differentUnequal(self):
"""
Structurally different L{URL}s are unequal (C{!=}) to each other.
"""
u1 = URL.fromText('http://localhost/a')
u2 = URL.fromText('http://localhost/b')
self.assertTrue(u1 != u2, "%r == %r" % (u1, u2))
def test_otherTypesUnequal(self):
"""
L{URL} is unequal (C{!=}) to other types.
"""
u = URL.fromText('http://localhost/')
self.assertTrue(u != 42, "URL must differ from a number.")
self.assertTrue(u != object(), "URL must be differ from an object.")
def test_asURI(self):
"""
L{URL.asURI} produces an URI which converts any URI unicode encoding
into pure US-ASCII and returns a new L{URL}.
"""
unicodey = ('http://\N{LATIN SMALL LETTER E WITH ACUTE}.com/'
'\N{LATIN SMALL LETTER E}\N{COMBINING ACUTE ACCENT}'
'?\N{LATIN SMALL LETTER A}\N{COMBINING ACUTE ACCENT}='
'\N{LATIN SMALL LETTER I}\N{COMBINING ACUTE ACCENT}'
'#\N{LATIN SMALL LETTER U}\N{COMBINING ACUTE ACCENT}')
iri = URL.fromText(unicodey)
uri = iri.asURI()
self.assertEqual(iri.host, '\N{LATIN SMALL LETTER E WITH ACUTE}.com')
self.assertEqual(iri.path[0],
'\N{LATIN SMALL LETTER E}\N{COMBINING ACUTE ACCENT}')
self.assertEqual(iri.asText(), unicodey)
expectedURI = 'http://xn--9ca.com/%C3%A9?%C3%A1=%C3%AD#%C3%BA'
actualURI = uri.asText()
self.assertEqual(actualURI, expectedURI,
'%r != %r' % (actualURI, expectedURI))
def test_asIRI(self):
"""
L{URL.asIRI} decodes any percent-encoded text in the URI, making it
more suitable for reading by humans, and returns a new L{URL}.
"""
asciiish = 'http://xn--9ca.com/%C3%A9?%C3%A1=%C3%AD#%C3%BA'
uri = URL.fromText(asciiish)
iri = uri.asIRI()
self.assertEqual(uri.host, 'xn--9ca.com')
self.assertEqual(uri.path[0], '%C3%A9')
self.assertEqual(uri.asText(), asciiish)
expectedIRI = ('http://\N{LATIN SMALL LETTER E WITH ACUTE}.com/'
'\N{LATIN SMALL LETTER E WITH ACUTE}'
'?\N{LATIN SMALL LETTER A WITH ACUTE}='
'\N{LATIN SMALL LETTER I WITH ACUTE}'
'#\N{LATIN SMALL LETTER U WITH ACUTE}')
actualIRI = iri.asText()
self.assertEqual(actualIRI, expectedIRI,
'%r != %r' % (actualIRI, expectedIRI))
def test_badUTF8AsIRI(self):
"""
Bad UTF-8 in a path segment, query parameter, or fragment results in
that portion of the URI remaining percent-encoded in the IRI.
"""
urlWithBinary = 'http://xn--9ca.com/%00%FF/%C3%A9'
uri = URL.fromText(urlWithBinary)
iri = uri.asIRI()
expectedIRI = ('http://\N{LATIN SMALL LETTER E WITH ACUTE}.com/'
'%00%FF/'
'\N{LATIN SMALL LETTER E WITH ACUTE}')
actualIRI = iri.asText()
self.assertEqual(actualIRI, expectedIRI,
'%r != %r' % (actualIRI, expectedIRI))
def test_alreadyIRIAsIRI(self):
"""
A L{URL} composed of non-ASCII text will result in non-ASCII text.
"""
unicodey = ('http://\N{LATIN SMALL LETTER E WITH ACUTE}.com/'
'\N{LATIN SMALL LETTER E}\N{COMBINING ACUTE ACCENT}'
'?\N{LATIN SMALL LETTER A}\N{COMBINING ACUTE ACCENT}='
'\N{LATIN SMALL LETTER I}\N{COMBINING ACUTE ACCENT}'
'#\N{LATIN SMALL LETTER U}\N{COMBINING ACUTE ACCENT}')
iri = URL.fromText(unicodey)
alsoIRI = iri.asIRI()
self.assertEqual(alsoIRI.asText(), unicodey)
def test_alreadyURIAsURI(self):
"""
A L{URL} composed of encoded text will remain encoded.
"""
expectedURI = 'http://xn--9ca.com/%C3%A9?%C3%A1=%C3%AD#%C3%BA'
uri = URL.fromText(expectedURI)
actualURI = uri.asURI().asText()
self.assertEqual(actualURI, expectedURI)
def test_userinfo(self):
"""
L{URL.fromText} will parse the C{userinfo} portion of the URI
separately from the host and port.
"""
url = URL.fromText(
'http://someuser:somepassword@example.com/some-segment@ignore'
)
self.assertEqual(url.authority(True),
'someuser:somepassword@example.com')
self.assertEqual(url.authority(False), 'someuser:@example.com')
self.assertEqual(url.userinfo, 'someuser:somepassword')
self.assertEqual(url.user, 'someuser')
self.assertEqual(url.asText(),
'http://someuser:@example.com/some-segment@ignore')
self.assertEqual(
url.replace(userinfo=u"someuser").asText(),
'http://someuser@example.com/some-segment@ignore'
)
def test_portText(self):
"""
L{URL.fromText} parses custom port numbers as integers.
"""
portURL = URL.fromText(u"http://www.example.com:8080/")
self.assertEqual(portURL.port, 8080)
self.assertEqual(portURL.asText(), u"http://www.example.com:8080/")
def test_mailto(self):
"""
Although L{URL} instances are mainly for dealing with HTTP, other
schemes (such as C{mailto:}) should work as well. For example,
L{URL.fromText}/L{URL.asText} round-trips cleanly for a C{mailto:} URL
representing an email address.
"""
self.assertEqual(URL.fromText(u"mailto:user@example.com").asText(),
u"mailto:user@example.com")
def test_queryIterable(self):
"""
When a L{URL} is created with a C{query} argument, the C{query}
argument is converted into an N-tuple of 2-tuples.
"""
url = URL(query=[[u'alpha', u'beta']])
self.assertEqual(url.query, ((u'alpha', u'beta'),))
def test_pathIterable(self):
"""
When a L{URL} is created with a C{path} argument, the C{path} is
converted into a tuple.
"""
url = URL(path=[u'hello', u'world'])
self.assertEqual(url.path, (u'hello', u'world'))
def test_invalidArguments(self):
"""
Passing an argument of the wrong type to any of the constructor
arguments of L{URL} will raise a descriptive L{TypeError}.
L{URL} typechecks very aggressively to ensure that its constitutent
parts are all properly immutable and to prevent confusing errors when
bad data crops up in a method call long after the code that called the
constructor is off the stack.
"""
class Unexpected(object):
def __str__(self):
return "wrong"
def __repr__(self):
return "<unexpected>"
defaultExpectation = "unicode" if bytes is str else "str"
def assertRaised(raised, expectation, name):
self.assertEqual(str(raised.exception),
"expected {} for {}, got {}".format(
expectation,
name, "<unexpected>"))
def check(param, expectation=defaultExpectation):
with self.assertRaises(TypeError) as raised:
URL(**{param: Unexpected()})
assertRaised(raised, expectation, param)
check("scheme")
check("host")
check("fragment")
check("rooted", "bool")
check("userinfo")
check("port", "int or NoneType")
with self.assertRaises(TypeError) as raised:
URL(path=[Unexpected(),])
assertRaised(raised, defaultExpectation, "path segment")
with self.assertRaises(TypeError) as raised:
URL(query=[(u"name", Unexpected()),])
assertRaised(raised, defaultExpectation + " or NoneType",
"query parameter value")
with self.assertRaises(TypeError) as raised:
URL(query=[(Unexpected(), u"value"),])
assertRaised(raised, defaultExpectation, "query parameter name")
# No custom error message for this one, just want to make sure
# non-2-tuples don't get through.
with self.assertRaises(TypeError):
URL(query=[Unexpected()])
with self.assertRaises(ValueError):
URL(query=[(u'k', u'v', u'vv')])
with self.assertRaises(ValueError):
URL(query=[(u'k',)])
url = URL.fromText("https://valid.example.com/")
with self.assertRaises(TypeError) as raised:
url.child(Unexpected())
assertRaised(raised, defaultExpectation, "path segment")
with self.assertRaises(TypeError) as raised:
url.sibling(Unexpected())
assertRaised(raised, defaultExpectation, "path segment")
with self.assertRaises(TypeError) as raised:
url.click(Unexpected())
assertRaised(raised, defaultExpectation, "relative URL")
def test_technicallyTextIsIterableBut(self):
"""
Technically, L{str} (or L{unicode}, as appropriate) is iterable, but
C{URL(path="foo")} resulting in C{URL.fromText("f/o/o")} is never what
you want.
"""
with self.assertRaises(TypeError) as raised:
URL(path=u'foo')
self.assertEqual(
str(raised.exception),
"expected iterable of text for path, not: {}"
.format(repr(u'foo'))
)
class URLDeprecationTests(SynchronousTestCase):
"""
L{twisted.python.url} is deprecated.
"""
def test_urlDeprecation(self):
"""
L{twisted.python.url} is deprecated since Twisted 17.5.0.
"""
from twisted.python import url
url
warningsShown = self.flushWarnings([self.test_urlDeprecation])
self.assertEqual(1, len(warningsShown))
self.assertEqual(
("twisted.python.url was deprecated in Twisted 17.5.0:"
" Please use hyperlink from PyPI instead."),
warningsShown[0]['message'])

View file

@ -0,0 +1,290 @@
# -*- test-case-name: twisted.python.test.test_urlpath -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.urlpath}.
"""
from twisted.trial import unittest
from twisted.python import urlpath
from twisted.python.compat import _PY3
class _BaseURLPathTests(object):
"""
Tests for instantiated L{urlpath.URLPath}s.
"""
def test_partsAreBytes(self):
"""
All of the attributes of L{urlpath.URLPath} should be L{bytes}.
"""
self.assertIsInstance(self.path.scheme, bytes)
self.assertIsInstance(self.path.netloc, bytes)
self.assertIsInstance(self.path.path, bytes)
self.assertIsInstance(self.path.query, bytes)
self.assertIsInstance(self.path.fragment, bytes)
def test_strReturnsStr(self):
"""
Calling C{str()} with a L{URLPath} will always return a L{str}.
"""
self.assertEqual(type(self.path.__str__()), str)
def test_mutabilityWithText(self, stringType=type(u"")):
"""
Setting attributes on L{urlpath.URLPath} should change the value
returned by L{str}.
@param stringType: a callable to parameterize this test for different
text types.
@type stringType: 1-argument callable taking L{unicode} and returning
L{str} or L{bytes}.
"""
self.path.scheme = stringType(u"https")
self.assertEqual(
str(self.path),
"https://example.com/foo/bar?yes=no&no=yes#footer")
self.path.netloc = stringType(u"another.example.invalid")
self.assertEqual(
str(self.path),
"https://another.example.invalid/foo/bar?yes=no&no=yes#footer")
self.path.path = stringType(u"/hello")
self.assertEqual(
str(self.path),
"https://another.example.invalid/hello?yes=no&no=yes#footer")
self.path.query = stringType(u"alpha=omega&opposites=same")
self.assertEqual(
str(self.path),
"https://another.example.invalid/hello?alpha=omega&opposites=same"
"#footer")
self.path.fragment = stringType(u"header")
self.assertEqual(
str(self.path),
"https://another.example.invalid/hello?alpha=omega&opposites=same"
"#header")
def test_mutabilityWithBytes(self):
"""
Same as L{test_mutabilityWithText} but for bytes.
"""
self.test_mutabilityWithText(lambda x: x.encode("ascii"))
def test_allAttributesAreBytes(self):
"""
A created L{URLPath} has bytes attributes.
"""
self.assertIsInstance(self.path.scheme, bytes)
self.assertIsInstance(self.path.netloc, bytes)
self.assertIsInstance(self.path.path, bytes)
self.assertIsInstance(self.path.query, bytes)
self.assertIsInstance(self.path.fragment, bytes)
def test_stringConversion(self):
"""
Calling C{str()} with a L{URLPath} will return the same URL that it was
constructed with.
"""
self.assertEqual(str(self.path),
"http://example.com/foo/bar?yes=no&no=yes#footer")
def test_childString(self):
"""
Calling C{str()} with a C{URLPath.child()} will return a URL which is
the child of the URL it was instantiated with.
"""
self.assertEqual(str(self.path.child(b'hello')),
"http://example.com/foo/bar/hello")
self.assertEqual(str(self.path.child(b'hello').child(b'')),
"http://example.com/foo/bar/hello/")
self.assertEqual(str(self.path.child(b'hello', keepQuery=True)),
"http://example.com/foo/bar/hello?yes=no&no=yes")
def test_siblingString(self):
"""
Calling C{str()} with a C{URLPath.sibling()} will return a URL which is
the sibling of the URL it was instantiated with.
"""
self.assertEqual(str(self.path.sibling(b'baz')),
'http://example.com/foo/baz')
self.assertEqual(str(self.path.sibling(b'baz', keepQuery=True)),
"http://example.com/foo/baz?yes=no&no=yes")
# The sibling of http://example.com/foo/bar/
# is http://example.comf/foo/bar/baz
# because really we are constructing a sibling of
# http://example.com/foo/bar/index.html
self.assertEqual(str(self.path.child(b'').sibling(b'baz')),
'http://example.com/foo/bar/baz')
def test_parentString(self):
"""
Calling C{str()} with a C{URLPath.parent()} will return a URL which is
the parent of the URL it was instantiated with.
"""
# .parent() should be equivalent to '..'
# 'foo' is the current directory, '/' is the parent directory
self.assertEqual(str(self.path.parent()),
'http://example.com/')
self.assertEqual(str(self.path.parent(keepQuery=True)),
'http://example.com/?yes=no&no=yes')
self.assertEqual(str(self.path.child(b'').parent()),
'http://example.com/foo/')
self.assertEqual(str(self.path.child(b'baz').parent()),
'http://example.com/foo/')
self.assertEqual(
str(self.path.parent().parent().parent().parent().parent()),
'http://example.com/')
def test_hereString(self):
"""
Calling C{str()} with a C{URLPath.here()} will return a URL which is
the URL that it was instantiated with, without any file, query, or
fragment.
"""
# .here() should be equivalent to '.'
self.assertEqual(str(self.path.here()), 'http://example.com/foo/')
self.assertEqual(str(self.path.here(keepQuery=True)),
'http://example.com/foo/?yes=no&no=yes')
self.assertEqual(str(self.path.child(b'').here()),
'http://example.com/foo/bar/')
def test_doubleSlash(self):
"""
Calling L{urlpath.URLPath.click} on a L{urlpath.URLPath} with a
trailing slash with a relative URL containing a leading slash will
result in a URL with a single slash at the start of the path portion.
"""
self.assertEqual(
str(self.path.click(b"/hello/world")).encode("ascii"),
b"http://example.com/hello/world"
)
def test_pathList(self):
"""
L{urlpath.URLPath.pathList} returns a L{list} of L{bytes}.
"""
self.assertEqual(
self.path.child(b"%00%01%02").pathList(),
[b"", b"foo", b"bar", b"%00%01%02"]
)
# Just testing that the 'copy' argument exists for compatibility; it
# was originally provided for performance reasons, and its behavioral
# contract is kind of nonsense (where is the state shared? who with?)
# so it doesn't actually *do* anything any more.
self.assertEqual(
self.path.child(b"%00%01%02").pathList(copy=False),
[b"", b"foo", b"bar", b"%00%01%02"]
)
self.assertEqual(
self.path.child(b"%00%01%02").pathList(unquote=True),
[b"", b"foo", b"bar", b"\x00\x01\x02"]
)
class BytesURLPathTests(_BaseURLPathTests, unittest.TestCase):
"""
Tests for interacting with a L{URLPath} created with C{fromBytes}.
"""
def setUp(self):
self.path = urlpath.URLPath.fromBytes(
b"http://example.com/foo/bar?yes=no&no=yes#footer")
def test_mustBeBytes(self):
"""
L{URLPath.fromBytes} must take a L{bytes} argument.
"""
with self.assertRaises(ValueError):
urlpath.URLPath.fromBytes(None)
with self.assertRaises(ValueError):
urlpath.URLPath.fromBytes(u"someurl")
def test_withoutArguments(self):
"""
An instantiation with no arguments creates a usable L{URLPath} with
default arguments.
"""
url = urlpath.URLPath()
self.assertEqual(str(url), "http://localhost/")
def test_partialArguments(self):
"""
Leaving some optional arguments unfilled makes a L{URLPath} with those
optional arguments filled with defaults.
"""
# Not a "full" URL given to fromBytes, no /
# / is filled in
url = urlpath.URLPath.fromBytes(b"http://google.com")
self.assertEqual(url.scheme, b"http")
self.assertEqual(url.netloc, b"google.com")
self.assertEqual(url.path, b"/")
self.assertEqual(url.fragment, b"")
self.assertEqual(url.query, b"")
self.assertEqual(str(url), "http://google.com/")
def test_nonASCIIBytes(self):
"""
L{URLPath.fromBytes} can interpret non-ASCII bytes as percent-encoded
"""
url = urlpath.URLPath.fromBytes(b"http://example.com/\xff\x00")
self.assertEqual(str(url), "http://example.com/%FF%00")
class StringURLPathTests(_BaseURLPathTests, unittest.TestCase):
"""
Tests for interacting with a L{URLPath} created with C{fromString} and a
L{str} argument.
"""
def setUp(self):
self.path = urlpath.URLPath.fromString(
"http://example.com/foo/bar?yes=no&no=yes#footer")
def test_mustBeStr(self):
"""
C{URLPath.fromString} must take a L{str} or L{unicode} argument.
"""
with self.assertRaises(ValueError):
urlpath.URLPath.fromString(None)
if _PY3:
with self.assertRaises(ValueError):
urlpath.URLPath.fromString(b"someurl")
class UnicodeURLPathTests(_BaseURLPathTests, unittest.TestCase):
"""
Tests for interacting with a L{URLPath} created with C{fromString} and a
L{unicode} argument.
"""
def setUp(self):
self.path = urlpath.URLPath.fromString(
u"http://example.com/foo/bar?yes=no&no=yes#footer")
def test_nonASCIICharacters(self):
"""
L{URLPath.fromString} can load non-ASCII characters.
"""
url = urlpath.URLPath.fromString(u"http://example.com/\xff\x00")
self.assertEqual(str(url), "http://example.com/%C3%BF%00")

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,176 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.versions}.
"""
from __future__ import division, absolute_import
import operator
from twisted.python.versions import getVersionString, IncomparableVersions
from twisted.python.versions import Version
from incremental import _inf
from twisted.trial.unittest import SynchronousTestCase as TestCase
class VersionsTests(TestCase):
def test_versionComparison(self):
"""
Versions can be compared for equality and order.
"""
va = Version("dummy", 1, 0, 0)
vb = Version("dummy", 0, 1, 0)
self.assertTrue(va > vb)
self.assertTrue(vb < va)
self.assertTrue(va >= vb)
self.assertTrue(vb <= va)
self.assertTrue(va != vb)
self.assertTrue(vb == Version("dummy", 0, 1, 0))
self.assertTrue(vb == vb)
def test_versionComparisonCaseInsensitive(self):
"""
Version packages are compared case insensitively.
"""
va = Version("twisted", 1, 0, 0)
vb = Version("Twisted", 0, 1, 0)
self.assertTrue(va > vb)
self.assertTrue(vb < va)
self.assertTrue(va >= vb)
self.assertTrue(vb <= va)
self.assertTrue(va != vb)
self.assertTrue(vb == Version("twisted", 0, 1, 0))
self.assertTrue(vb == Version("TWISted", 0, 1, 0))
self.assertTrue(vb == vb)
def test_comparingPrereleasesWithReleases(self):
"""
Prereleases are always less than versions without prereleases.
"""
va = Version("whatever", 1, 0, 0, prerelease=1)
vb = Version("whatever", 1, 0, 0)
self.assertTrue(va < vb)
self.assertFalse(va > vb)
self.assertNotEqual(vb, va)
def test_comparingPrereleases(self):
"""
The value specified as the prerelease is used in version comparisons.
"""
va = Version("whatever", 1, 0, 0, prerelease=1)
vb = Version("whatever", 1, 0, 0, prerelease=2)
self.assertTrue(va < vb)
self.assertTrue(vb > va)
self.assertTrue(va <= vb)
self.assertTrue(vb >= va)
self.assertTrue(va != vb)
self.assertTrue(vb == Version("whatever", 1, 0, 0, prerelease=2))
self.assertTrue(va == va)
def test_infComparison(self):
"""
L{_inf} is equal to L{_inf}.
This is a regression test.
"""
self.assertEqual(_inf, _inf)
def test_disallowBuggyComparisons(self):
"""
The package names of the Version objects need to be the same,
"""
self.assertRaises(IncomparableVersions,
operator.eq,
Version("dummy", 1, 0, 0),
Version("dumym", 1, 0, 0))
def test_notImplementedComparisons(self):
"""
Comparing a L{Version} to some other object type results in
C{NotImplemented}.
"""
va = Version("dummy", 1, 0, 0)
vb = ("dummy", 1, 0, 0) # a tuple is not a Version object
self.assertEqual(va.__cmp__(vb), NotImplemented)
def test_repr(self):
"""
Calling C{repr} on a version returns a human-readable string
representation of the version.
"""
self.assertEqual(repr(Version("dummy", 1, 2, 3)),
"Version('dummy', 1, 2, 3)")
def test_reprWithPrerelease(self):
"""
Calling C{repr} on a version with a prerelease returns a human-readable
string representation of the version including the prerelease.
"""
self.assertEqual(repr(Version("dummy", 1, 2, 3, prerelease=4)),
"Version('dummy', 1, 2, 3, release_candidate=4)")
def test_str(self):
"""
Calling C{str} on a version returns a human-readable string
representation of the version.
"""
self.assertEqual(str(Version("dummy", 1, 2, 3)),
"[dummy, version 1.2.3]")
def test_strWithPrerelease(self):
"""
Calling C{str} on a version with a prerelease includes the prerelease.
"""
self.assertEqual(str(Version("dummy", 1, 0, 0, prerelease=1)),
"[dummy, version 1.0.0rc1]")
def testShort(self):
self.assertEqual(Version('dummy', 1, 2, 3).short(), '1.2.3')
def test_getVersionString(self):
"""
L{getVersionString} returns a string with the package name and the
short version number.
"""
self.assertEqual(
'Twisted 8.0.0', getVersionString(Version('Twisted', 8, 0, 0)))
def test_getVersionStringWithPrerelease(self):
"""
L{getVersionString} includes the prerelease, if any.
"""
self.assertEqual(
getVersionString(Version("whatever", 8, 0, 0, prerelease=1)),
"whatever 8.0.0rc1")
def test_base(self):
"""
The L{base} method returns a very simple representation of the version.
"""
self.assertEqual(Version("foo", 1, 0, 0).base(), "1.0.0")
def test_baseWithPrerelease(self):
"""
The base version includes 'preX' for versions with prereleases.
"""
self.assertEqual(Version("foo", 1, 0, 0, prerelease=8).base(),
"1.0.0rc8")

View file

@ -0,0 +1,101 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases covering L{twisted.python.zippath}.
"""
from __future__ import absolute_import, division
import os
import zipfile
from twisted.test.test_paths import AbstractFilePathTests
from twisted.python.zippath import ZipArchive
from twisted.python.filepath import _coerceToFilesystemEncoding
def zipit(dirname, zfname):
"""
Create a zipfile on zfname, containing the contents of dirname'
"""
dirname = _coerceToFilesystemEncoding('', dirname)
zfname = _coerceToFilesystemEncoding('', zfname)
with zipfile.ZipFile(zfname, "w") as zf:
for root, ignored, files, in os.walk(dirname):
for fname in files:
fspath = os.path.join(root, fname)
arcpath = os.path.join(root, fname)[len(dirname)+1:]
zf.write(fspath, arcpath)
class ZipFilePathTests(AbstractFilePathTests):
"""
Test various L{ZipPath} path manipulations as well as reprs for L{ZipPath}
and L{ZipArchive}.
"""
def setUp(self):
AbstractFilePathTests.setUp(self)
zipit(self.cmn, self.cmn + b'.zip')
self.nativecmn = _coerceToFilesystemEncoding('', self.cmn)
self.path = ZipArchive(self.cmn + b'.zip')
self.root = self.path
self.all = [x.replace(self.cmn, self.cmn + b'.zip')
for x in self.all]
def test_zipPathRepr(self):
"""
Make sure that invoking ZipPath's repr prints the correct class name
and an absolute path to the zip file.
"""
child = self.path.child("foo")
pathRepr = "ZipPath(%r)" % (
os.path.abspath(self.nativecmn + ".zip" + os.sep + 'foo'),)
# Check for an absolute path
self.assertEqual(repr(child), pathRepr)
# Create a path to the file rooted in the current working directory
relativeCommon = self.nativecmn.replace(os.getcwd() + os.sep,
"", 1) + ".zip"
relpath = ZipArchive(relativeCommon)
child = relpath.child("foo")
# Check using a path without the cwd prepended
self.assertEqual(repr(child), pathRepr)
def test_zipPathReprParentDirSegment(self):
"""
The repr of a ZipPath with C{".."} in the internal part of its path
includes the C{".."} rather than applying the usual parent directory
meaning.
"""
child = self.path.child("foo").child("..").child("bar")
pathRepr = "ZipPath(%r)" % (
self.nativecmn + ".zip" + os.sep.join(["", "foo", "..", "bar"]))
self.assertEqual(repr(child), pathRepr)
def test_zipArchiveRepr(self):
"""
Make sure that invoking ZipArchive's repr prints the correct class
name and an absolute path to the zip file.
"""
path = ZipArchive(self.nativecmn + '.zip')
pathRepr = 'ZipArchive(%r)' % (os.path.abspath(
self.nativecmn + '.zip'),)
# Check for an absolute path
self.assertEqual(repr(path), pathRepr)
# Create a path to the file rooted in the current working directory
relativeCommon = self.nativecmn.replace(os.getcwd() + os.sep,
"", 1) + ".zip"
relpath = ZipArchive(relativeCommon)
# Check using a path without the cwd prepended
self.assertEqual(repr(relpath), pathRepr)

View file

@ -0,0 +1,361 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.zipstream}
"""
import random
import struct
import zipfile
from hashlib import md5
from twisted.python import zipstream, filepath
from twisted.trial import unittest
class FileEntryMixin(object):
"""
File entry classes should behave as file-like objects
"""
def getFileEntry(self, contents):
"""
Return an appropriate zip file entry
"""
filename = self.mktemp()
with zipfile.ZipFile(filename, 'w', self.compression) as z:
z.writestr('content', contents)
z = zipstream.ChunkingZipFile(filename, 'r')
return z.readfile('content')
def test_isatty(self):
"""
zip files should not be ttys, so isatty() should be false
"""
with self.getFileEntry('') as fileEntry:
self.assertFalse(fileEntry.isatty())
def test_closed(self):
"""
The C{closed} attribute should reflect whether C{close()} has been
called.
"""
with self.getFileEntry('') as fileEntry:
self.assertFalse(fileEntry.closed)
self.assertTrue(fileEntry.closed)
def test_readline(self):
"""
C{readline()} should mirror L{file.readline} and return up to a single
delimiter.
"""
with self.getFileEntry(b'hoho\nho') as fileEntry:
self.assertEqual(fileEntry.readline(), b'hoho\n')
self.assertEqual(fileEntry.readline(), b'ho')
self.assertEqual(fileEntry.readline(), b'')
def test_next(self):
"""
Zip file entries should implement the iterator protocol as files do.
"""
with self.getFileEntry(b'ho\nhoho') as fileEntry:
self.assertEqual(fileEntry.next(), b'ho\n')
self.assertEqual(fileEntry.next(), b'hoho')
self.assertRaises(StopIteration, fileEntry.next)
def test_readlines(self):
"""
C{readlines()} should return a list of all the lines.
"""
with self.getFileEntry(b'ho\nho\nho') as fileEntry:
self.assertEqual(fileEntry.readlines(), [b'ho\n', b'ho\n', b'ho'])
def test_iteration(self):
"""
C{__iter__()} and C{xreadlines()} should return C{self}.
"""
with self.getFileEntry('') as fileEntry:
self.assertIs(iter(fileEntry), fileEntry)
self.assertIs(fileEntry.xreadlines(), fileEntry)
def test_readWhole(self):
"""
C{.read()} should read the entire file.
"""
contents = b"Hello, world!"
with self.getFileEntry(contents) as entry:
self.assertEqual(entry.read(), contents)
def test_readPartial(self):
"""
C{.read(num)} should read num bytes from the file.
"""
contents = "0123456789"
with self.getFileEntry(contents) as entry:
one = entry.read(4)
two = entry.read(200)
self.assertEqual(one, b"0123")
self.assertEqual(two, b"456789")
def test_tell(self):
"""
C{.tell()} should return the number of bytes that have been read so
far.
"""
contents = "x" * 100
with self.getFileEntry(contents) as entry:
entry.read(2)
self.assertEqual(entry.tell(), 2)
entry.read(4)
self.assertEqual(entry.tell(), 6)
class DeflatedZipFileEntryTests(FileEntryMixin, unittest.TestCase):
"""
DeflatedZipFileEntry should be file-like
"""
compression = zipfile.ZIP_DEFLATED
class ZipFileEntryTests(FileEntryMixin, unittest.TestCase):
"""
ZipFileEntry should be file-like
"""
compression = zipfile.ZIP_STORED
class ZipstreamTests(unittest.TestCase):
"""
Tests for twisted.python.zipstream
"""
def setUp(self):
"""
Creates junk data that can be compressed and a test directory for any
files that will be created
"""
self.testdir = filepath.FilePath(self.mktemp())
self.testdir.makedirs()
self.unzipdir = self.testdir.child('unzipped')
self.unzipdir.makedirs()
def makeZipFile(self, contents, directory=''):
"""
Makes a zip file archive containing len(contents) files. Contents
should be a list of strings, each string being the content of one file.
"""
zpfilename = self.testdir.child('zipfile.zip').path
with zipfile.ZipFile(zpfilename, 'w') as zpfile:
for i, content in enumerate(contents):
filename = str(i)
if directory:
filename = directory + "/" + filename
zpfile.writestr(filename, content)
return zpfilename
def test_invalidMode(self):
"""
A ChunkingZipFile opened in write-mode should not allow .readfile(),
and raise a RuntimeError instead.
"""
with zipstream.ChunkingZipFile(self.mktemp(), "w") as czf:
self.assertRaises(RuntimeError, czf.readfile, "something")
def test_closedArchive(self):
"""
A closed ChunkingZipFile should raise a L{RuntimeError} when
.readfile() is invoked.
"""
czf = zipstream.ChunkingZipFile(self.makeZipFile(["something"]), "r")
czf.close()
self.assertRaises(RuntimeError, czf.readfile, "something")
def test_invalidHeader(self):
"""
A zipfile entry with the wrong magic number should raise BadZipfile for
readfile(), but that should not affect other files in the archive.
"""
fn = self.makeZipFile(["test contents",
"more contents"])
with zipfile.ZipFile(fn, "r") as zf:
zeroOffset = zf.getinfo("0").header_offset
# Zero out just the one header.
with open(fn, "r+b") as scribble:
scribble.seek(zeroOffset, 0)
scribble.write(b'0' * 4)
with zipstream.ChunkingZipFile(fn) as czf:
self.assertRaises(zipfile.BadZipfile, czf.readfile, "0")
with czf.readfile("1") as zfe:
self.assertEqual(zfe.read(), b"more contents")
def test_filenameMismatch(self):
"""
A zipfile entry with a different filename than is found in the central
directory should raise BadZipfile.
"""
fn = self.makeZipFile([b"test contents",
b"more contents"])
with zipfile.ZipFile(fn, "r") as zf:
info = zf.getinfo("0")
info.filename = "not zero"
with open(fn, "r+b") as scribble:
scribble.seek(info.header_offset, 0)
scribble.write(info.FileHeader())
with zipstream.ChunkingZipFile(fn) as czf:
self.assertRaises(zipfile.BadZipfile, czf.readfile, "0")
with czf.readfile("1") as zfe:
self.assertEqual(zfe.read(), b"more contents")
def test_unsupportedCompression(self):
"""
A zipfile which describes an unsupported compression mechanism should
raise BadZipfile.
"""
fn = self.mktemp()
with zipfile.ZipFile(fn, "w") as zf:
zi = zipfile.ZipInfo("0")
zf.writestr(zi, "some data")
# Mangle its compression type in the central directory; can't do
# this before the writestr call or zipfile will (correctly) tell us
# not to pass bad compression types :)
zi.compress_type = 1234
with zipstream.ChunkingZipFile(fn) as czf:
self.assertRaises(zipfile.BadZipfile, czf.readfile, "0")
def test_extraData(self):
"""
readfile() should skip over 'extra' data present in the zip metadata.
"""
fn = self.mktemp()
with zipfile.ZipFile(fn, 'w') as zf:
zi = zipfile.ZipInfo("0")
extra_data = b"hello, extra"
zi.extra = (
struct.pack('<hh', 42, len(extra_data))
+ extra_data
)
zf.writestr(zi, b"the real data")
with zipstream.ChunkingZipFile(fn) as czf, czf.readfile("0") as zfe:
self.assertEqual(zfe.read(), b"the real data")
def test_unzipIterChunky(self):
"""
L{twisted.python.zipstream.unzipIterChunky} returns an iterator which
must be exhausted to completely unzip the input archive.
"""
numfiles = 10
contents = ['This is test file %d!' % i for i in range(numfiles)]
contents = [i.encode("ascii") for i in contents]
zpfilename = self.makeZipFile(contents)
list(zipstream.unzipIterChunky(zpfilename, self.unzipdir.path))
self.assertEqual(
set(self.unzipdir.listdir()),
set(map(str, range(numfiles))))
for child in self.unzipdir.children():
num = int(child.basename())
self.assertEqual(child.getContent(), contents[num])
def test_unzipIterChunkyDirectory(self):
"""
The path to which a file is extracted by L{zipstream.unzipIterChunky}
is determined by joining the C{directory} argument to C{unzip} with the
path within the archive of the file being extracted.
"""
numfiles = 10
contents = ['This is test file %d!' % i for i in range(numfiles)]
contents = [i.encode("ascii") for i in contents]
zpfilename = self.makeZipFile(contents, 'foo')
list(zipstream.unzipIterChunky(zpfilename, self.unzipdir.path))
fileContents = {str(num).encode("ascii") for num in range(numfiles)}
self.assertEqual(
set(self.unzipdir.child(b'foo').listdir()),
fileContents)
for child in self.unzipdir.child(b'foo').children():
num = int(child.basename())
self.assertEqual(child.getContent(), contents[num])
# XXX these tests are kind of gross and old, but I think unzipIterChunky is
# kind of a gross function anyway. We should really write an abstract
# copyTo/moveTo that operates on FilePath and make sure ZipPath can support
# it, then just deprecate / remove this stuff.
def _unzipIterChunkyTest(self, compression, chunksize, lower, upper):
"""
unzipIterChunky should unzip the given number of bytes per iteration.
"""
junk = b''
for n in range(1000):
num = round(random.random(), 12)
numEncoded = str(num).encode("ascii")
junk += b' '+numEncoded
junkmd5 = md5(junk).hexdigest()
tempdir = filepath.FilePath(self.mktemp())
tempdir.makedirs()
zfpath = tempdir.child('bigfile.zip').path
self._makebigfile(zfpath, compression, junk)
uziter = zipstream.unzipIterChunky(zfpath, tempdir.path,
chunksize=chunksize)
r = next(uziter)
# test that the number of chunks is in the right ballpark;
# this could theoretically be any number but statistically it
# should always be in this range
approx = lower < r < upper
self.assertTrue(approx)
for r in uziter:
pass
self.assertEqual(r, 0)
with tempdir.child("zipstreamjunk").open() as f:
newmd5 = md5(f.read()).hexdigest()
self.assertEqual(newmd5, junkmd5)
def test_unzipIterChunkyStored(self):
"""
unzipIterChunky should unzip the given number of bytes per iteration on
a stored archive.
"""
self._unzipIterChunkyTest(zipfile.ZIP_STORED, 500, 35, 45)
def test_chunkyDeflated(self):
"""
unzipIterChunky should unzip the given number of bytes per iteration on
a deflated archive.
"""
self._unzipIterChunkyTest(zipfile.ZIP_DEFLATED, 972, 23, 27)
def _makebigfile(self, filename, compression, junk):
"""
Create a zip file with the given file name and compression scheme.
"""
with zipfile.ZipFile(filename, 'w', compression) as zf:
for i in range(10):
fn = 'zipstream%d' % i
zf.writestr(fn, "")
zf.writestr('zipstreamjunk', junk)

View file

@ -0,0 +1,208 @@
# -*- test-case-name: twisted.test.test_text -*-
#
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Miscellany of text-munging functions.
"""
def stringyString(object, indentation=''):
"""
Expansive string formatting for sequence types.
C{list.__str__} and C{dict.__str__} use C{repr()} to display their
elements. This function also turns these sequence types
into strings, but uses C{str()} on their elements instead.
Sequence elements are also displayed on separate lines, and nested
sequences have nested indentation.
"""
braces = ''
sl = []
if type(object) is dict:
braces = '{}'
for key, value in object.items():
value = stringyString(value, indentation + ' ')
if isMultiline(value):
if endsInNewline(value):
value = value[:-len('\n')]
sl.append("%s %s:\n%s" % (indentation, key, value))
else:
# Oops. Will have to move that indentation.
sl.append("%s %s: %s" % (indentation, key,
value[len(indentation) + 3:]))
elif type(object) is tuple or type(object) is list:
if type(object) is tuple:
braces = '()'
else:
braces = '[]'
for element in object:
element = stringyString(element, indentation + ' ')
sl.append(element.rstrip() + ',')
else:
sl[:] = map(lambda s, i=indentation: i + s,
str(object).split('\n'))
if not sl:
sl.append(indentation)
if braces:
sl[0] = indentation + braces[0] + sl[0][len(indentation) + 1:]
sl[-1] = sl[-1] + braces[-1]
s = "\n".join(sl)
if isMultiline(s) and not endsInNewline(s):
s = s + '\n'
return s
def isMultiline(s):
"""
Returns C{True} if this string has a newline in it.
"""
return (s.find('\n') != -1)
def endsInNewline(s):
"""
Returns C{True} if this string ends in a newline.
"""
return (s[-len('\n'):] == '\n')
def greedyWrap(inString, width=80):
"""
Given a string and a column width, return a list of lines.
Caveat: I'm use a stupid greedy word-wrapping
algorythm. I won't put two spaces at the end
of a sentence. I don't do full justification.
And no, I've never even *heard* of hypenation.
"""
outLines = []
#eww, evil hacks to allow paragraphs delimited by two \ns :(
if inString.find('\n\n') >= 0:
paragraphs = inString.split('\n\n')
for para in paragraphs:
outLines.extend(greedyWrap(para, width) + [''])
return outLines
inWords = inString.split()
column = 0
ptr_line = 0
while inWords:
column = column + len(inWords[ptr_line])
ptr_line = ptr_line + 1
if (column > width):
if ptr_line == 1:
# This single word is too long, it will be the whole line.
pass
else:
# We've gone too far, stop the line one word back.
ptr_line = ptr_line - 1
(l, inWords) = (inWords[0:ptr_line], inWords[ptr_line:])
outLines.append(' '.join(l))
ptr_line = 0
column = 0
elif not (len(inWords) > ptr_line):
# Clean up the last bit.
outLines.append(' '.join(inWords))
del inWords[:]
else:
# Space
column = column + 1
# next word
return outLines
wordWrap = greedyWrap
def removeLeadingBlanks(lines):
ret = []
for line in lines:
if ret or line.strip():
ret.append(line)
return ret
def removeLeadingTrailingBlanks(s):
lines = removeLeadingBlanks(s.split('\n'))
lines.reverse()
lines = removeLeadingBlanks(lines)
lines.reverse()
return '\n'.join(lines)+'\n'
def splitQuoted(s):
"""
Like a string split, but don't break substrings inside quotes.
>>> splitQuoted('the "hairy monkey" likes pie')
['the', 'hairy monkey', 'likes', 'pie']
Another one of those "someone must have a better solution for
this" things. This implementation is a VERY DUMB hack done too
quickly.
"""
out = []
quot = None
phrase = None
for word in s.split():
if phrase is None:
if word and (word[0] in ("\"", "'")):
quot = word[0]
word = word[1:]
phrase = []
if phrase is None:
out.append(word)
else:
if word and (word[-1] == quot):
word = word[:-1]
phrase.append(word)
out.append(" ".join(phrase))
phrase = None
else:
phrase.append(word)
return out
def strFile(p, f, caseSensitive=True):
"""
Find whether string C{p} occurs in a read()able object C{f}.
@rtype: C{bool}
"""
buf = type(p)()
buf_len = max(len(p), 2**2**2**2)
if not caseSensitive:
p = p.lower()
while 1:
r = f.read(buf_len-len(p))
if not caseSensitive:
r = r.lower()
bytes_read = len(r)
if bytes_read == 0:
return False
l = len(buf)+bytes_read-buf_len
if l <= 0:
buf = buf + r
else:
buf = buf[l:] + r
if buf.find(p) != -1:
return True

View file

@ -0,0 +1,141 @@
# -*- test-case-name: twisted.python.test_threadable -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A module to provide some very basic threading primitives, such as
synchronization.
"""
from __future__ import division, absolute_import
from functools import wraps
class DummyLock(object):
"""
Hack to allow locks to be unpickled on an unthreaded system.
"""
def __reduce__(self):
return (unpickle_lock, ())
def unpickle_lock():
if threadingmodule is not None:
return XLock()
else:
return DummyLock()
unpickle_lock.__safe_for_unpickling__ = True
def _synchPre(self):
if '_threadable_lock' not in self.__dict__:
_synchLockCreator.acquire()
if '_threadable_lock' not in self.__dict__:
self.__dict__['_threadable_lock'] = XLock()
_synchLockCreator.release()
self._threadable_lock.acquire()
def _synchPost(self):
self._threadable_lock.release()
def _sync(klass, function):
@wraps(function)
def sync(self, *args, **kwargs):
_synchPre(self)
try:
return function(self, *args, **kwargs)
finally:
_synchPost(self)
return sync
def synchronize(*klasses):
"""
Make all methods listed in each class' synchronized attribute synchronized.
The synchronized attribute should be a list of strings, consisting of the
names of methods that must be synchronized. If we are running in threaded
mode these methods will be wrapped with a lock.
"""
if threadingmodule is not None:
for klass in klasses:
for methodName in klass.synchronized:
sync = _sync(klass, klass.__dict__[methodName])
setattr(klass, methodName, sync)
def init(with_threads=1):
"""Initialize threading.
Don't bother calling this. If it needs to happen, it will happen.
"""
global threaded, _synchLockCreator, XLock
if with_threads:
if not threaded:
if threadingmodule is not None:
threaded = True
class XLock(threadingmodule._RLock, object):
def __reduce__(self):
return (unpickle_lock, ())
_synchLockCreator = XLock()
else:
raise RuntimeError("Cannot initialize threading, platform lacks thread support")
else:
if threaded:
raise RuntimeError("Cannot uninitialize threads")
else:
pass
_dummyID = object()
def getThreadID():
if threadingmodule is None:
return _dummyID
return threadingmodule.currentThread().ident
def isInIOThread():
"""Are we in the thread responsible for I/O requests (the event loop)?
"""
return ioThread == getThreadID()
def registerAsIOThread():
"""Mark the current thread as responsible for I/O requests.
"""
global ioThread
ioThread = getThreadID()
ioThread = None
threaded = False
# Define these globals which might be overwritten in init().
_synchLockCreator = None
XLock = None
try:
import threading as threadingmodule
except ImportError:
threadingmodule = None
else:
init(True)
__all__ = ['isInIOThread', 'registerAsIOThread', 'getThreadID', 'XLock']

View file

@ -0,0 +1,320 @@
# -*- test-case-name: twisted.test.test_threadpool -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
twisted.python.threadpool: a pool of threads to which we dispatch tasks.
In most cases you can just use C{reactor.callInThread} and friends
instead of creating a thread pool directly.
"""
from __future__ import division, absolute_import
import threading
from twisted._threads import pool as _pool
from twisted.python import log, context
from twisted.python.failure import Failure
from twisted.python._oldstyle import _oldStyle
WorkerStop = object()
@_oldStyle
class ThreadPool:
"""
This class (hopefully) generalizes the functionality of a pool of threads
to which work can be dispatched.
L{callInThread} and L{stop} should only be called from a single thread.
@ivar started: Whether or not the thread pool is currently running.
@type started: L{bool}
@ivar threads: List of workers currently running in this thread pool.
@type threads: L{list}
@ivar _pool: A hook for testing.
@type _pool: callable compatible with L{_pool}
"""
min = 5
max = 20
joined = False
started = False
workers = 0
name = None
threadFactory = threading.Thread
currentThread = staticmethod(threading.currentThread)
_pool = staticmethod(_pool)
def __init__(self, minthreads=5, maxthreads=20, name=None):
"""
Create a new threadpool.
@param minthreads: minimum number of threads in the pool
@type minthreads: L{int}
@param maxthreads: maximum number of threads in the pool
@type maxthreads: L{int}
@param name: The name to give this threadpool; visible in log messages.
@type name: native L{str}
"""
assert minthreads >= 0, 'minimum is negative'
assert minthreads <= maxthreads, 'minimum is greater than maximum'
self.min = minthreads
self.max = maxthreads
self.name = name
self.threads = []
def trackingThreadFactory(*a, **kw):
thread = self.threadFactory(*a, name=self._generateName(), **kw)
self.threads.append(thread)
return thread
def currentLimit():
if not self.started:
return 0
return self.max
self._team = self._pool(currentLimit, trackingThreadFactory)
@property
def workers(self):
"""
For legacy compatibility purposes, return a total number of workers.
@return: the current number of workers, both idle and busy (but not
those that have been quit by L{ThreadPool.adjustPoolsize})
@rtype: L{int}
"""
stats = self._team.statistics()
return stats.idleWorkerCount + stats.busyWorkerCount
@property
def working(self):
"""
For legacy compatibility purposes, return the number of busy workers as
expressed by a list the length of that number.
@return: the number of workers currently processing a work item.
@rtype: L{list} of L{None}
"""
return [None] * self._team.statistics().busyWorkerCount
@property
def waiters(self):
"""
For legacy compatibility purposes, return the number of idle workers as
expressed by a list the length of that number.
@return: the number of workers currently alive (with an allocated
thread) but waiting for new work.
@rtype: L{list} of L{None}
"""
return [None] * self._team.statistics().idleWorkerCount
@property
def _queue(self):
"""
For legacy compatibility purposes, return an object with a C{qsize}
method that indicates the amount of work not yet allocated to a worker.
@return: an object with a C{qsize} method.
"""
class NotAQueue(object):
def qsize(q):
"""
Pretend to be a Python threading Queue and return the
number of as-yet-unconsumed tasks.
@return: the amount of backlogged work not yet dispatched to a
worker.
@rtype: L{int}
"""
return self._team.statistics().backloggedWorkCount
return NotAQueue()
q = _queue # Yes, twistedchecker, I want a single-letter
# attribute name.
def start(self):
"""
Start the threadpool.
"""
self.joined = False
self.started = True
# Start some threads.
self.adjustPoolsize()
backlog = self._team.statistics().backloggedWorkCount
if backlog:
self._team.grow(backlog)
def startAWorker(self):
"""
Increase the number of available workers for the thread pool by 1, up
to the maximum allowed by L{ThreadPool.max}.
"""
self._team.grow(1)
def _generateName(self):
"""
Generate a name for a new pool thread.
@return: A distinctive name for the thread.
@rtype: native L{str}
"""
return "PoolThread-%s-%s" % (self.name or id(self), self.workers)
def stopAWorker(self):
"""
Decrease the number of available workers by 1, by quitting one as soon
as it's idle.
"""
self._team.shrink(1)
def __setstate__(self, state):
setattr(self, "__dict__", state)
ThreadPool.__init__(self, self.min, self.max)
def __getstate__(self):
state = {}
state['min'] = self.min
state['max'] = self.max
return state
def callInThread(self, func, *args, **kw):
"""
Call a callable object in a separate thread.
@param func: callable object to be called in separate thread
@param args: positional arguments to be passed to C{func}
@param kw: keyword args to be passed to C{func}
"""
self.callInThreadWithCallback(None, func, *args, **kw)
def callInThreadWithCallback(self, onResult, func, *args, **kw):
"""
Call a callable object in a separate thread and call C{onResult} with
the return value, or a L{twisted.python.failure.Failure} if the
callable raises an exception.
The callable is allowed to block, but the C{onResult} function must not
block and should perform as little work as possible.
A typical action for C{onResult} for a threadpool used with a Twisted
reactor would be to schedule a L{twisted.internet.defer.Deferred} to
fire in the main reactor thread using C{.callFromThread}. Note that
C{onResult} is called inside the separate thread, not inside the
reactor thread.
@param onResult: a callable with the signature C{(success, result)}.
If the callable returns normally, C{onResult} is called with
C{(True, result)} where C{result} is the return value of the
callable. If the callable throws an exception, C{onResult} is
called with C{(False, failure)}.
Optionally, C{onResult} may be L{None}, in which case it is not
called at all.
@param func: callable object to be called in separate thread
@param args: positional arguments to be passed to C{func}
@param kw: keyword arguments to be passed to C{func}
"""
if self.joined:
return
ctx = context.theContextTracker.currentContext().contexts[-1]
def inContext():
try:
result = inContext.theWork()
ok = True
except:
result = Failure()
ok = False
inContext.theWork = None
if inContext.onResult is not None:
inContext.onResult(ok, result)
inContext.onResult = None
elif not ok:
log.err(result)
# Avoid closing over func, ctx, args, kw so that we can carefully
# manage their lifecycle. See
# test_threadCreationArgumentsCallInThreadWithCallback.
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
inContext.onResult = onResult
self._team.do(inContext)
def stop(self):
"""
Shutdown the threads in the threadpool.
"""
self.joined = True
self.started = False
self._team.quit()
for thread in self.threads:
thread.join()
def adjustPoolsize(self, minthreads=None, maxthreads=None):
"""
Adjust the number of available threads by setting C{min} and C{max} to
new values.
@param minthreads: The new value for L{ThreadPool.min}.
@param maxthreads: The new value for L{ThreadPool.max}.
"""
if minthreads is None:
minthreads = self.min
if maxthreads is None:
maxthreads = self.max
assert minthreads >= 0, 'minimum is negative'
assert minthreads <= maxthreads, 'minimum is greater than maximum'
self.min = minthreads
self.max = maxthreads
if not self.started:
return
# Kill of some threads if we have too many.
if self.workers > self.max:
self._team.shrink(self.workers - self.max)
# Start some threads if we have too few.
if self.workers < self.min:
self._team.grow(self.min - self.workers)
def dumpStats(self):
"""
Dump some plain-text informational messages to the log about the state
of this L{ThreadPool}.
"""
log.msg('waiters: %s' % (self.waiters,))
log.msg('workers: %s' % (self.working,))
log.msg('total: %s' % (self.threads,))

View file

@ -0,0 +1,33 @@
#compdef twist twistd trial conch cftp ckeygen pyhtmlizer tkconch
#
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked all
# over the user's terminal if completing options for a deprecated command.
# Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,15 @@
# -*- test-case-name: twisted.python.test.test_url -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
URL parsing, construction and rendering.
@see: L{URL}
"""
from hyperlink import URL
__all__ = [
"URL",
]

View file

@ -0,0 +1,294 @@
# -*- test-case-name: twisted.python.test.test_urlpath -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
L{URLPath}, a representation of a URL.
"""
from __future__ import division, absolute_import
from twisted.python.compat import (
nativeString, unicode, urllib_parse as urlparse, urlunquote, urlquote
)
from hyperlink import URL as _URL
_allascii = b"".join([chr(x).encode('ascii') for x in range(1, 128)])
def _rereconstituter(name):
"""
Attriute declaration to preserve mutability on L{URLPath}.
@param name: a public attribute name
@type name: native L{str}
@return: a descriptor which retrieves the private version of the attribute
on get and calls rerealize on set.
"""
privateName = nativeString("_") + name
return property(
lambda self: getattr(self, privateName),
lambda self, value: (setattr(self, privateName,
value if isinstance(value, bytes)
else value.encode("charmap")) or
self._reconstitute())
)
class URLPath(object):
"""
A representation of a URL.
@ivar scheme: The scheme of the URL (e.g. 'http').
@type scheme: L{bytes}
@ivar netloc: The network location ("host").
@type netloc: L{bytes}
@ivar path: The path on the network location.
@type path: L{bytes}
@ivar query: The query argument (the portion after ? in the URL).
@type query: L{bytes}
@ivar fragment: The page fragment (the portion after # in the URL).
@type fragment: L{bytes}
"""
def __init__(self, scheme=b'', netloc=b'localhost', path=b'',
query=b'', fragment=b''):
self._scheme = scheme or b'http'
self._netloc = netloc
self._path = path or b'/'
self._query = query
self._fragment = fragment
self._reconstitute()
def _reconstitute(self):
"""
Reconstitute this L{URLPath} from all its given attributes.
"""
urltext = urlquote(
urlparse.urlunsplit((self._scheme, self._netloc,
self._path, self._query, self._fragment)),
safe=_allascii
)
self._url = _URL.fromText(urltext.encode("ascii").decode("ascii"))
scheme = _rereconstituter("scheme")
netloc = _rereconstituter("netloc")
path = _rereconstituter("path")
query = _rereconstituter("query")
fragment = _rereconstituter("fragment")
@classmethod
def _fromURL(cls, urlInstance):
"""
Reconstruct all the public instance variables of this L{URLPath} from
its underlying L{_URL}.
@param urlInstance: the object to base this L{URLPath} on.
@type urlInstance: L{_URL}
@return: a new L{URLPath}
"""
self = cls.__new__(cls)
self._url = urlInstance.replace(path=urlInstance.path or [u""])
self._scheme = self._url.scheme.encode("ascii")
self._netloc = self._url.authority().encode("ascii")
self._path = (_URL(path=self._url.path,
rooted=True).asURI().asText()
.encode("ascii"))
self._query = (_URL(query=self._url.query).asURI().asText()
.encode("ascii"))[1:]
self._fragment = self._url.fragment.encode("ascii")
return self
def pathList(self, unquote=False, copy=True):
"""
Split this URL's path into its components.
@param unquote: whether to remove %-encoding from the returned strings.
@param copy: (ignored, do not use)
@return: The components of C{self.path}
@rtype: L{list} of L{bytes}
"""
segments = self._url.path
mapper = lambda x: x.encode("ascii")
if unquote:
mapper = (lambda x, m=mapper: m(urlunquote(x)))
return [b''] + [mapper(segment) for segment in segments]
@classmethod
def fromString(klass, url):
"""
Make a L{URLPath} from a L{str} or L{unicode}.
@param url: A L{str} representation of a URL.
@type url: L{str} or L{unicode}.
@return: a new L{URLPath} derived from the given string.
@rtype: L{URLPath}
"""
if not isinstance(url, (str, unicode)):
raise ValueError("'url' must be a str or unicode")
if isinstance(url, bytes):
# On Python 2, accepting 'str' (for compatibility) means we might
# get 'bytes'. On py3, this will not work with bytes due to the
# check above.
return klass.fromBytes(url)
return klass._fromURL(_URL.fromText(url))
@classmethod
def fromBytes(klass, url):
"""
Make a L{URLPath} from a L{bytes}.
@param url: A L{bytes} representation of a URL.
@type url: L{bytes}
@return: a new L{URLPath} derived from the given L{bytes}.
@rtype: L{URLPath}
@since: 15.4
"""
if not isinstance(url, bytes):
raise ValueError("'url' must be bytes")
quoted = urlquote(url, safe=_allascii)
if isinstance(quoted, bytes):
# This will only be bytes on python 2, where we can transform it
# into unicode. On python 3, urlquote always returns str.
quoted = quoted.decode("ascii")
return klass.fromString(quoted)
@classmethod
def fromRequest(klass, request):
"""
Make a L{URLPath} from a L{twisted.web.http.Request}.
@param request: A L{twisted.web.http.Request} to make the L{URLPath}
from.
@return: a new L{URLPath} derived from the given request.
@rtype: L{URLPath}
"""
return klass.fromBytes(request.prePathURL())
def _mod(self, newURL, keepQuery):
"""
Return a modified copy of C{self} using C{newURL}, keeping the query
string if C{keepQuery} is C{True}.
@param newURL: a L{URL} to derive a new L{URLPath} from
@type newURL: L{URL}
@param keepQuery: if C{True}, preserve the query parameters from
C{self} on the new L{URLPath}; if C{False}, give the new L{URLPath}
no query parameters.
@type keepQuery: L{bool}
@return: a new L{URLPath}
"""
return self._fromURL(newURL.replace(
fragment=u'', query=self._url.query if keepQuery else ()
))
def sibling(self, path, keepQuery=False):
"""
Get the sibling of the current L{URLPath}. A sibling is a file which
is in the same directory as the current file.
@param path: The path of the sibling.
@type path: L{bytes}
@param keepQuery: Whether to keep the query parameters on the returned
L{URLPath}.
@type: keepQuery: L{bool}
@return: a new L{URLPath}
"""
return self._mod(self._url.sibling(path.decode("ascii")), keepQuery)
def child(self, path, keepQuery=False):
"""
Get the child of this L{URLPath}.
@param path: The path of the child.
@type path: L{bytes}
@param keepQuery: Whether to keep the query parameters on the returned
L{URLPath}.
@type: keepQuery: L{bool}
@return: a new L{URLPath}
"""
return self._mod(self._url.child(path.decode("ascii")), keepQuery)
def parent(self, keepQuery=False):
"""
Get the parent directory of this L{URLPath}.
@param keepQuery: Whether to keep the query parameters on the returned
L{URLPath}.
@type: keepQuery: L{bool}
@return: a new L{URLPath}
"""
return self._mod(self._url.click(u".."), keepQuery)
def here(self, keepQuery=False):
"""
Get the current directory of this L{URLPath}.
@param keepQuery: Whether to keep the query parameters on the returned
L{URLPath}.
@type: keepQuery: L{bool}
@return: a new L{URLPath}
"""
return self._mod(self._url.click(u"."), keepQuery)
def click(self, st):
"""
Return a path which is the URL where a browser would presumably take
you if you clicked on a link with an HREF as given.
@param st: A relative URL, to be interpreted relative to C{self} as the
base URL.
@type st: L{bytes}
@return: a new L{URLPath}
"""
return self._fromURL(self._url.click(st.decode("ascii")))
def __str__(self):
"""
The L{str} of a L{URLPath} is its URL text.
"""
return nativeString(self._url.asURI().asText())
def __repr__(self):
"""
The L{repr} of a L{URLPath} is an eval-able expression which will
construct a similar L{URLPath}.
"""
return ('URLPath(scheme=%r, netloc=%r, path=%r, query=%r, fragment=%r)'
% (self.scheme, self.netloc, self.path, self.query,
self.fragment))

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,14 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Versions for Python packages.
See L{incremental}.
"""
from __future__ import division, absolute_import
from incremental import IncomparableVersions, Version, getVersionString
__all__ = ["Version", "getVersionString", "IncomparableVersions"]

View file

@ -0,0 +1,136 @@
# -*- test-case-name: twisted.python.test.test_win32 -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Win32 utilities.
See also twisted.python.shortcut.
@var O_BINARY: the 'binary' mode flag on Windows, or 0 on other platforms, so it
may safely be OR'ed into a mask for os.open.
"""
from __future__ import division, absolute_import
import re
import os
# http://msdn.microsoft.com/library/default.asp?url=/library/en-us/debug/base/system_error_codes.asp
ERROR_FILE_NOT_FOUND = 2
ERROR_PATH_NOT_FOUND = 3
ERROR_INVALID_NAME = 123
ERROR_DIRECTORY = 267
O_BINARY = getattr(os, "O_BINARY", 0)
class FakeWindowsError(OSError):
"""
Stand-in for sometimes-builtin exception on platforms for which it
is missing.
"""
try:
WindowsError = WindowsError
except NameError:
WindowsError = FakeWindowsError
_cmdLineQuoteRe = re.compile(r'(\\*)"')
_cmdLineQuoteRe2 = re.compile(r'(\\+)\Z')
def cmdLineQuote(s):
"""
Internal method for quoting a single command-line argument.
@param s: an unquoted string that you want to quote so that something that
does cmd.exe-style unquoting will interpret it as a single argument,
even if it contains spaces.
@type s: C{str}
@return: a quoted string.
@rtype: C{str}
"""
quote = ((" " in s) or ("\t" in s) or ('"' in s) or s == '') and '"' or ''
return quote + _cmdLineQuoteRe2.sub(r"\1\1", _cmdLineQuoteRe.sub(r'\1\1\\"', s)) + quote
def quoteArguments(arguments):
"""
Quote an iterable of command-line arguments for passing to CreateProcess or
a similar API. This allows the list passed to C{reactor.spawnProcess} to
match the child process's C{sys.argv} properly.
@param arglist: an iterable of C{str}, each unquoted.
@return: a single string, with the given sequence quoted as necessary.
"""
return ' '.join([cmdLineQuote(a) for a in arguments])
class _ErrorFormatter(object):
"""
Formatter for Windows error messages.
@ivar winError: A callable which takes one integer error number argument
and returns an L{exceptions.WindowsError} instance for that error (like
L{ctypes.WinError}).
@ivar formatMessage: A callable which takes one integer error number
argument and returns a C{str} giving the message for that error (like
L{win32api.FormatMessage}).
@ivar errorTab: A mapping from integer error numbers to C{str} messages
which correspond to those erorrs (like I{socket.errorTab}).
"""
def __init__(self, WinError, FormatMessage, errorTab):
self.winError = WinError
self.formatMessage = FormatMessage
self.errorTab = errorTab
def fromEnvironment(cls):
"""
Get as many of the platform-specific error translation objects as
possible and return an instance of C{cls} created with them.
"""
try:
from ctypes import WinError
except ImportError:
WinError = None
try:
from win32api import FormatMessage
except ImportError:
FormatMessage = None
try:
from socket import errorTab
except ImportError:
errorTab = None
return cls(WinError, FormatMessage, errorTab)
fromEnvironment = classmethod(fromEnvironment)
def formatError(self, errorcode):
"""
Returns the string associated with a Windows error message, such as the
ones found in socket.error.
Attempts direct lookup against the win32 API via ctypes and then
pywin32 if available), then in the error table in the socket module,
then finally defaulting to C{os.strerror}.
@param errorcode: the Windows error code
@type errorcode: C{int}
@return: The error message string
@rtype: C{str}
"""
if self.winError is not None:
return self.winError(errorcode).strerror
if self.formatMessage is not None:
return self.formatMessage(errorcode)
if self.errorTab is not None:
result = self.errorTab.get(errorcode)
if result is not None:
return result
return os.strerror(errorcode)
formatError = _ErrorFormatter.fromEnvironment().formatError

View file

@ -0,0 +1,295 @@
# -*- test-case-name: twisted.python.test.test_zippath -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module contains implementations of L{IFilePath} for zip files.
See the constructor of L{ZipArchive} for use.
"""
from __future__ import absolute_import, division
import os
import time
import errno
from zipfile import ZipFile
from twisted.python.compat import comparable, cmp
from twisted.python.filepath import IFilePath, FilePath, AbstractFilePath
from twisted.python.filepath import _coerceToFilesystemEncoding
from twisted.python.filepath import UnlistableError
from zope.interface import implementer
ZIP_PATH_SEP = '/' # In zipfiles, "/" is universally used as the
# path separator, regardless of platform.
@comparable
@implementer(IFilePath)
class ZipPath(AbstractFilePath):
"""
I represent a file or directory contained within a zip file.
"""
def __init__(self, archive, pathInArchive):
"""
Don't construct me directly. Use C{ZipArchive.child()}.
@param archive: a L{ZipArchive} instance.
@param pathInArchive: a ZIP_PATH_SEP-separated string.
"""
self.archive = archive
self.pathInArchive = pathInArchive
# self.path pretends to be os-specific because that's the way the
# 'zipimport' module does it.
sep = _coerceToFilesystemEncoding(pathInArchive, ZIP_PATH_SEP)
archiveFilename = _coerceToFilesystemEncoding(
pathInArchive, archive.zipfile.filename)
self.path = os.path.join(archiveFilename,
*(self.pathInArchive.split(sep)))
def __cmp__(self, other):
if not isinstance(other, ZipPath):
return NotImplemented
return cmp((self.archive, self.pathInArchive),
(other.archive, other.pathInArchive))
def __repr__(self):
parts = [_coerceToFilesystemEncoding(
self.sep, os.path.abspath(self.archive.path))]
parts.extend(self.pathInArchive.split(self.sep))
ossep = _coerceToFilesystemEncoding(self.sep, os.sep)
return "ZipPath(%r)" % (ossep.join(parts),)
@property
def sep(self):
"""
Return a zip directory separator.
@return: The zip directory separator.
@returntype: The same type as C{self.path}.
"""
return _coerceToFilesystemEncoding(self.path, ZIP_PATH_SEP)
def parent(self):
splitup = self.pathInArchive.split(self.sep)
if len(splitup) == 1:
return self.archive
return ZipPath(self.archive, self.sep.join(splitup[:-1]))
def child(self, path):
"""
Return a new ZipPath representing a path in C{self.archive} which is
a child of this path.
@note: Requesting the C{".."} (or other special name) child will not
cause L{InsecurePath} to be raised since these names do not have
any special meaning inside a zip archive. Be particularly
careful with the C{path} attribute (if you absolutely must use
it) as this means it may include special names with special
meaning outside of the context of a zip archive.
"""
joiner = _coerceToFilesystemEncoding(path, ZIP_PATH_SEP)
pathInArchive = _coerceToFilesystemEncoding(path, self.pathInArchive)
return ZipPath(self.archive, joiner.join([pathInArchive, path]))
def sibling(self, path):
return self.parent().child(path)
def exists(self):
return self.isdir() or self.isfile()
def isdir(self):
return self.pathInArchive in self.archive.childmap
def isfile(self):
return self.pathInArchive in self.archive.zipfile.NameToInfo
def islink(self):
return False
def listdir(self):
if self.exists():
if self.isdir():
return list(self.archive.childmap[self.pathInArchive].keys())
else:
raise UnlistableError(
OSError(errno.ENOTDIR, "Leaf zip entry listed"))
else:
raise UnlistableError(
OSError(errno.ENOENT, "Non-existent zip entry listed"))
def splitext(self):
"""
Return a value similar to that returned by C{os.path.splitext}.
"""
# This happens to work out because of the fact that we use OS-specific
# path separators in the constructor to construct our fake 'path'
# attribute.
return os.path.splitext(self.path)
def basename(self):
return self.pathInArchive.split(self.sep)[-1]
def dirname(self):
# XXX NOTE: This API isn't a very good idea on filepath, but it's even
# less meaningful here.
return self.parent().path
def open(self, mode="r"):
pathInArchive = _coerceToFilesystemEncoding('', self.pathInArchive)
return self.archive.zipfile.open(pathInArchive, mode=mode)
def changed(self):
pass
def getsize(self):
"""
Retrieve this file's size.
@return: file size, in bytes
"""
pathInArchive = _coerceToFilesystemEncoding("", self.pathInArchive)
return self.archive.zipfile.NameToInfo[pathInArchive].file_size
def getAccessTime(self):
"""
Retrieve this file's last access-time. This is the same as the last access
time for the archive.
@return: a number of seconds since the epoch
"""
return self.archive.getAccessTime()
def getModificationTime(self):
"""
Retrieve this file's last modification time. This is the time of
modification recorded in the zipfile.
@return: a number of seconds since the epoch.
"""
pathInArchive = _coerceToFilesystemEncoding("", self.pathInArchive)
return time.mktime(
self.archive.zipfile.NameToInfo[pathInArchive].date_time
+ (0, 0, 0))
def getStatusChangeTime(self):
"""
Retrieve this file's last modification time. This name is provided for
compatibility, and returns the same value as getmtime.
@return: a number of seconds since the epoch.
"""
return self.getModificationTime()
class ZipArchive(ZipPath):
"""
I am a L{FilePath}-like object which can wrap a zip archive as if it were a
directory.
It works similarly to L{FilePath} in L{bytes} and L{unicode} handling --
instantiating with a L{bytes} will return a "bytes mode" L{ZipArchive},
and instantiating with a L{unicode} will return a "text mode"
L{ZipArchive}. Methods that return new L{ZipArchive} or L{ZipPath}
instances will be in the mode of the argument to the creator method,
converting if required.
"""
archive = property(lambda self: self)
def __init__(self, archivePathname):
"""
Create a ZipArchive, treating the archive at archivePathname as a zip
file.
@param archivePathname: a L{bytes} or L{unicode}, naming a path in the
filesystem.
"""
self.path = archivePathname
self.zipfile = ZipFile(_coerceToFilesystemEncoding('',
archivePathname))
self.pathInArchive = _coerceToFilesystemEncoding(archivePathname, '')
# zipfile is already wasting O(N) memory on cached ZipInfo instances,
# so there's no sense in trying to do this lazily or intelligently
self.childmap = {} # map parent: list of children
for name in self.zipfile.namelist():
name = _coerceToFilesystemEncoding(self.path, name).split(self.sep)
for x in range(len(name)):
child = name[-x]
parent = self.sep.join(name[:-x])
if parent not in self.childmap:
self.childmap[parent] = {}
self.childmap[parent][child] = 1
parent = _coerceToFilesystemEncoding(archivePathname, '')
def child(self, path):
"""
Create a ZipPath pointing at a path within the archive.
@param path: a L{bytes} or L{unicode} with no path separators in it
(either '/' or the system path separator, if it's different).
"""
return ZipPath(self, path)
def exists(self):
"""
Returns C{True} if the underlying archive exists.
"""
return FilePath(self.zipfile.filename).exists()
def getAccessTime(self):
"""
Return the archive file's last access time.
"""
return FilePath(self.zipfile.filename).getAccessTime()
def getModificationTime(self):
"""
Return the archive file's modification time.
"""
return FilePath(self.zipfile.filename).getModificationTime()
def getStatusChangeTime(self):
"""
Return the archive file's status change time.
"""
return FilePath(self.zipfile.filename).getStatusChangeTime()
def __repr__(self):
return 'ZipArchive(%r)' % (os.path.abspath(self.path),)
__all__ = ['ZipArchive', 'ZipPath']

View file

@ -0,0 +1,336 @@
# -*- test-case-name: twisted.python.test.test_zipstream -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An incremental approach to unzipping files. This allows you to unzip a little
bit of a file at a time, which means you can report progress as a file unzips.
"""
import zipfile
import os.path
import zlib
import struct
_fileHeaderSize = struct.calcsize(zipfile.structFileHeader)
class ChunkingZipFile(zipfile.ZipFile):
"""
A L{zipfile.ZipFile} object which, with L{readfile}, also gives you access
to a file-like object for each entry.
"""
def readfile(self, name):
"""
Return file-like object for name.
"""
if self.mode not in ("r", "a"):
raise RuntimeError('read() requires mode "r" or "a"')
if not self.fp:
raise RuntimeError(
"Attempt to read ZIP archive that was already closed")
zinfo = self.getinfo(name)
self.fp.seek(zinfo.header_offset, 0)
fheader = self.fp.read(_fileHeaderSize)
if fheader[0:4] != zipfile.stringFileHeader:
raise zipfile.BadZipfile("Bad magic number for file header")
fheader = struct.unpack(zipfile.structFileHeader, fheader)
fname = self.fp.read(fheader[zipfile._FH_FILENAME_LENGTH])
if fheader[zipfile._FH_EXTRA_FIELD_LENGTH]:
self.fp.read(fheader[zipfile._FH_EXTRA_FIELD_LENGTH])
if zinfo.flag_bits & 0x800:
# UTF-8 filename
fname_str = fname.decode("utf-8")
else:
fname_str = fname.decode("cp437")
if fname_str != zinfo.orig_filename:
raise zipfile.BadZipfile(
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname_str))
if zinfo.compress_type == zipfile.ZIP_STORED:
return ZipFileEntry(self, zinfo.compress_size)
elif zinfo.compress_type == zipfile.ZIP_DEFLATED:
return DeflatedZipFileEntry(self, zinfo.compress_size)
else:
raise zipfile.BadZipfile(
"Unsupported compression method %d for file %s" %
(zinfo.compress_type, name))
class _FileEntry(object):
"""
Abstract superclass of both compressed and uncompressed variants of
file-like objects within a zip archive.
@ivar chunkingZipFile: a chunking zip file.
@type chunkingZipFile: L{ChunkingZipFile}
@ivar length: The number of bytes within the zip file that represent this
file. (This is the size on disk, not the number of decompressed bytes
which will result from reading it.)
@ivar fp: the underlying file object (that contains pkzip data). Do not
touch this, please. It will quite likely move or go away.
@ivar closed: File-like 'closed' attribute; True before this file has been
closed, False after.
@type closed: L{bool}
@ivar finished: An older, broken synonym for 'closed'. Do not touch this,
please.
@type finished: L{int}
"""
def __init__(self, chunkingZipFile, length):
"""
Create a L{_FileEntry} from a L{ChunkingZipFile}.
"""
self.chunkingZipFile = chunkingZipFile
self.fp = self.chunkingZipFile.fp
self.length = length
self.finished = 0
self.closed = False
def isatty(self):
"""
Returns false because zip files should not be ttys
"""
return False
def close(self):
"""
Close self (file-like object)
"""
self.closed = True
self.finished = 1
del self.fp
def readline(self):
"""
Read a line.
"""
line = b""
for byte in iter(lambda : self.read(1), b""):
line += byte
if byte == b"\n":
break
return line
def __next__(self):
"""
Implement next as file does (like readline, except raises StopIteration
at EOF)
"""
nextline = self.readline()
if nextline:
return nextline
raise StopIteration()
# Iterators on Python 2 use next(), not __next__()
next = __next__
def readlines(self):
"""
Returns a list of all the lines
"""
return list(self)
def xreadlines(self):
"""
Returns an iterator (so self)
"""
return self
def __iter__(self):
"""
Returns an iterator (so self)
"""
return self
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
class ZipFileEntry(_FileEntry):
"""
File-like object used to read an uncompressed entry in a ZipFile
"""
def __init__(self, chunkingZipFile, length):
_FileEntry.__init__(self, chunkingZipFile, length)
self.readBytes = 0
def tell(self):
return self.readBytes
def read(self, n=None):
if n is None:
n = self.length - self.readBytes
if n == 0 or self.finished:
return b''
data = self.chunkingZipFile.fp.read(
min(n, self.length - self.readBytes))
self.readBytes += len(data)
if self.readBytes == self.length or len(data) < n:
self.finished = 1
return data
class DeflatedZipFileEntry(_FileEntry):
"""
File-like object used to read a deflated entry in a ZipFile
"""
def __init__(self, chunkingZipFile, length):
_FileEntry.__init__(self, chunkingZipFile, length)
self.returnedBytes = 0
self.readBytes = 0
self.decomp = zlib.decompressobj(-15)
self.buffer = b""
def tell(self):
return self.returnedBytes
def read(self, n=None):
if self.finished:
return b""
if n is None:
result = [self.buffer,]
result.append(
self.decomp.decompress(
self.chunkingZipFile.fp.read(
self.length - self.readBytes)))
result.append(self.decomp.decompress(b"Z"))
result.append(self.decomp.flush())
self.buffer = b""
self.finished = 1
result = b"".join(result)
self.returnedBytes += len(result)
return result
else:
while len(self.buffer) < n:
data = self.chunkingZipFile.fp.read(
min(n, 1024, self.length - self.readBytes))
self.readBytes += len(data)
if not data:
result = (self.buffer
+ self.decomp.decompress(b"Z")
+ self.decomp.flush())
self.finished = 1
self.buffer = b""
self.returnedBytes += len(result)
return result
else:
self.buffer += self.decomp.decompress(data)
result = self.buffer[:n]
self.buffer = self.buffer[n:]
self.returnedBytes += len(result)
return result
DIR_BIT = 16
def countZipFileChunks(filename, chunksize):
"""
Predict the number of chunks that will be extracted from the entire
zipfile, given chunksize blocks.
"""
totalchunks = 0
zf = ChunkingZipFile(filename)
for info in zf.infolist():
totalchunks += countFileChunks(info, chunksize)
return totalchunks
def countFileChunks(zipinfo, chunksize):
"""
Count the number of chunks that will result from the given C{ZipInfo}.
@param zipinfo: a C{zipfile.ZipInfo} instance describing an entry in a zip
archive to be counted.
@return: the number of chunks present in the zip file. (Even an empty file
counts as one chunk.)
@rtype: L{int}
"""
count, extra = divmod(zipinfo.file_size, chunksize)
if extra > 0:
count += 1
return count or 1
def unzipIterChunky(filename, directory='.', overwrite=0,
chunksize=4096):
"""
Return a generator for the zipfile. This implementation will yield after
every chunksize uncompressed bytes, or at the end of a file, whichever
comes first.
The value it yields is the number of chunks left to unzip.
"""
czf = ChunkingZipFile(filename, 'r')
if not os.path.exists(directory):
os.makedirs(directory)
remaining = countZipFileChunks(filename, chunksize)
names = czf.namelist()
infos = czf.infolist()
for entry, info in zip(names, infos):
isdir = info.external_attr & DIR_BIT
f = os.path.join(directory, entry)
if isdir:
# overwrite flag only applies to files
if not os.path.exists(f):
os.makedirs(f)
remaining -= 1
yield remaining
else:
# create the directory the file will be in first,
# since we can't guarantee it exists
fdir = os.path.split(f)[0]
if not os.path.exists(fdir):
os.makedirs(fdir)
if overwrite or not os.path.exists(f):
fp = czf.readfile(entry)
if info.file_size == 0:
remaining -= 1
yield remaining
with open(f, 'wb') as outfile:
while fp.tell() < info.file_size:
hunk = fp.read(chunksize)
outfile.write(hunk)
remaining -= 1
yield remaining
else:
remaining -= countFileChunks(info, chunksize)
yield remaining