index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
62,051 |
fixtures._fixtures.logger
|
FakeLogger
|
Replace a logger and capture its output.
|
class FakeLogger(Fixture):
"""Replace a logger and capture its output."""
def __init__(self, name="", level=INFO, format=None,
datefmt=None, nuke_handlers=True, formatter=None):
"""Create a FakeLogger fixture.
:param name: The name of the logger to replace. Defaults to "".
:param level: The log level to set, defaults to INFO.
:param format: Logging format to use. Defaults to capturing supplied
messages verbatim.
:param datefmt: Logging date format to use.
Mirrors the datefmt used in python loggging.
:param nuke_handlers: If True remove all existing handles (prevents
existing messages going to e.g. stdout). Defaults to True.
:param formatter: a custom log formatter class. Use this if you want
to use a log Formatter other than the default one in python.
Example:
def test_log(self)
fixture = self.useFixture(LoggerFixture())
logging.info('message')
self.assertEqual('message', fixture.output)
"""
super(FakeLogger, self).__init__()
self._name = name
self._level = level
self._format = format
self._datefmt = datefmt
self._nuke_handlers = nuke_handlers
self._formatter = formatter
def _setUp(self):
name = "pythonlogging:'%s'" % self._name
output = self.useFixture(StringStream(name)).stream
self._output = output
handler = StreamHandlerRaiseException(output)
if self._format:
formatter = (self._formatter or Formatter)
handler.setFormatter(formatter(self._format, self._datefmt))
self.useFixture(
LogHandler(handler, name=self._name, level=self._level,
nuke_handlers=self._nuke_handlers))
@property
def output(self):
self._output.seek(0)
return self._output.read()
def reset_output(self):
self._output.truncate(0)
|
(name='', level=20, format=None, datefmt=None, nuke_handlers=True, formatter=None)
|
62,054 |
fixtures._fixtures.logger
|
__init__
|
Create a FakeLogger fixture.
:param name: The name of the logger to replace. Defaults to "".
:param level: The log level to set, defaults to INFO.
:param format: Logging format to use. Defaults to capturing supplied
messages verbatim.
:param datefmt: Logging date format to use.
Mirrors the datefmt used in python loggging.
:param nuke_handlers: If True remove all existing handles (prevents
existing messages going to e.g. stdout). Defaults to True.
:param formatter: a custom log formatter class. Use this if you want
to use a log Formatter other than the default one in python.
Example:
def test_log(self)
fixture = self.useFixture(LoggerFixture())
logging.info('message')
self.assertEqual('message', fixture.output)
|
def __init__(self, name="", level=INFO, format=None,
datefmt=None, nuke_handlers=True, formatter=None):
"""Create a FakeLogger fixture.
:param name: The name of the logger to replace. Defaults to "".
:param level: The log level to set, defaults to INFO.
:param format: Logging format to use. Defaults to capturing supplied
messages verbatim.
:param datefmt: Logging date format to use.
Mirrors the datefmt used in python loggging.
:param nuke_handlers: If True remove all existing handles (prevents
existing messages going to e.g. stdout). Defaults to True.
:param formatter: a custom log formatter class. Use this if you want
to use a log Formatter other than the default one in python.
Example:
def test_log(self)
fixture = self.useFixture(LoggerFixture())
logging.info('message')
self.assertEqual('message', fixture.output)
"""
super(FakeLogger, self).__init__()
self._name = name
self._level = level
self._format = format
self._datefmt = datefmt
self._nuke_handlers = nuke_handlers
self._formatter = formatter
|
(self, name='', level=20, format=None, datefmt=None, nuke_handlers=True, formatter=None)
|
62,057 |
fixtures._fixtures.logger
|
_setUp
| null |
def _setUp(self):
name = "pythonlogging:'%s'" % self._name
output = self.useFixture(StringStream(name)).stream
self._output = output
handler = StreamHandlerRaiseException(output)
if self._format:
formatter = (self._formatter or Formatter)
handler.setFormatter(formatter(self._format, self._datefmt))
self.useFixture(
LogHandler(handler, name=self._name, level=self._level,
nuke_handlers=self._nuke_handlers))
|
(self)
|
62,063 |
fixtures._fixtures.logger
|
reset_output
| null |
def reset_output(self):
self._output.truncate(0)
|
(self)
|
62,066 |
fixtures._fixtures.popen
|
FakePopen
|
Replace subprocess.Popen.
Primarily useful for testing, this fixture replaces subprocess.Popen with a
test double.
:ivar procs: A list of the processes created by the fixture.
|
class FakePopen(Fixture):
"""Replace subprocess.Popen.
Primarily useful for testing, this fixture replaces subprocess.Popen with a
test double.
:ivar procs: A list of the processes created by the fixture.
"""
_unpassed = object()
def __init__(self, get_info=lambda _:{}):
"""Create a PopenFixture
:param get_info: Optional callback to control the behaviour of the
created process. This callback takes a kwargs dict for the Popen
call, and should return a dict with any desired attributes.
Only parameters that are supplied to the Popen call are in the
dict, making it possible to detect the difference between 'passed
with a default value' and 'not passed at all'.
e.g.
def get_info(proc_args):
self.assertEqual(subprocess.PIPE, proc_args['stdin'])
return {'stdin': StringIO('foobar')}
The default behaviour if no get_info is supplied is for the return
process to have returncode of None, empty streams and a random pid.
After communicate() or wait() are called on the process object,
the returncode is set to whatever get_info returns (or 0 if
get_info is not supplied or doesn't return a dict with an explicit
'returncode' key).
"""
super(FakePopen, self).__init__()
self.get_info = get_info
def _setUp(self):
self.addCleanup(setattr, subprocess, 'Popen', subprocess.Popen)
subprocess.Popen = self
self.procs = []
# The method has the correct signature so we error appropriately if called
# wrongly.
def __call__(self, args, bufsize=_unpassed, executable=_unpassed,
stdin=_unpassed, stdout=_unpassed, stderr=_unpassed,
preexec_fn=_unpassed, close_fds=_unpassed, shell=_unpassed,
cwd=_unpassed, env=_unpassed, universal_newlines=_unpassed,
startupinfo=_unpassed, creationflags=_unpassed,
restore_signals=_unpassed, start_new_session=_unpassed,
pass_fds=_unpassed, *, group=_unpassed, extra_groups=_unpassed,
user=_unpassed, umask=_unpassed, encoding=_unpassed,
errors=_unpassed, text=_unpassed, pipesize=_unpassed,
process_group=_unpassed):
# Reject arguments introduced by newer versions of Python in older
# versions; this makes it harder to accidentally hide compatibility
# problems using test doubles.
if sys.version_info < (3, 7) and text is not FakePopen._unpassed:
raise TypeError(
"FakePopen.__call__() got an unexpected keyword argument "
"'text'")
if sys.version_info < (3, 9):
for arg_name in "group", "extra_groups", "user", "umask":
if locals()[arg_name] is not FakePopen._unpassed:
raise TypeError(
"FakePopen.__call__() got an unexpected keyword "
"argument '{}'".format(arg_name))
if sys.version_info < (3, 10) and pipesize is not FakePopen._unpassed:
raise TypeError(
"FakePopen.__call__() got an unexpected keyword argument "
"'pipesize'")
if sys.version_info < (3, 11) and process_group is not FakePopen._unpassed:
raise TypeError(
"FakePopen.__call__() got an unexpected keyword argument "
"'process_group'")
proc_args = dict(args=args)
local = locals()
for param in [
"bufsize", "executable", "stdin", "stdout", "stderr",
"preexec_fn", "close_fds", "shell", "cwd", "env",
"universal_newlines", "startupinfo", "creationflags",
"restore_signals", "start_new_session", "pass_fds", "group",
"extra_groups", "user", "umask", "encoding", "errors", "text",
"pipesize", "process_group"]:
if local[param] is not FakePopen._unpassed:
proc_args[param] = local[param]
proc_info = self.get_info(proc_args)
result = FakeProcess(proc_args, proc_info)
self.procs.append(result)
return result
|
(get_info=<function FakePopen.<lambda> at 0x7fa4239eaef0>)
|
62,067 |
fixtures._fixtures.popen
|
__call__
| null |
def __call__(self, args, bufsize=_unpassed, executable=_unpassed,
stdin=_unpassed, stdout=_unpassed, stderr=_unpassed,
preexec_fn=_unpassed, close_fds=_unpassed, shell=_unpassed,
cwd=_unpassed, env=_unpassed, universal_newlines=_unpassed,
startupinfo=_unpassed, creationflags=_unpassed,
restore_signals=_unpassed, start_new_session=_unpassed,
pass_fds=_unpassed, *, group=_unpassed, extra_groups=_unpassed,
user=_unpassed, umask=_unpassed, encoding=_unpassed,
errors=_unpassed, text=_unpassed, pipesize=_unpassed,
process_group=_unpassed):
# Reject arguments introduced by newer versions of Python in older
# versions; this makes it harder to accidentally hide compatibility
# problems using test doubles.
if sys.version_info < (3, 7) and text is not FakePopen._unpassed:
raise TypeError(
"FakePopen.__call__() got an unexpected keyword argument "
"'text'")
if sys.version_info < (3, 9):
for arg_name in "group", "extra_groups", "user", "umask":
if locals()[arg_name] is not FakePopen._unpassed:
raise TypeError(
"FakePopen.__call__() got an unexpected keyword "
"argument '{}'".format(arg_name))
if sys.version_info < (3, 10) and pipesize is not FakePopen._unpassed:
raise TypeError(
"FakePopen.__call__() got an unexpected keyword argument "
"'pipesize'")
if sys.version_info < (3, 11) and process_group is not FakePopen._unpassed:
raise TypeError(
"FakePopen.__call__() got an unexpected keyword argument "
"'process_group'")
proc_args = dict(args=args)
local = locals()
for param in [
"bufsize", "executable", "stdin", "stdout", "stderr",
"preexec_fn", "close_fds", "shell", "cwd", "env",
"universal_newlines", "startupinfo", "creationflags",
"restore_signals", "start_new_session", "pass_fds", "group",
"extra_groups", "user", "umask", "encoding", "errors", "text",
"pipesize", "process_group"]:
if local[param] is not FakePopen._unpassed:
proc_args[param] = local[param]
proc_info = self.get_info(proc_args)
result = FakeProcess(proc_args, proc_info)
self.procs.append(result)
return result
|
(self, args, bufsize=<object object at 0x7fa425205ed0>, executable=<object object at 0x7fa425205ed0>, stdin=<object object at 0x7fa425205ed0>, stdout=<object object at 0x7fa425205ed0>, stderr=<object object at 0x7fa425205ed0>, preexec_fn=<object object at 0x7fa425205ed0>, close_fds=<object object at 0x7fa425205ed0>, shell=<object object at 0x7fa425205ed0>, cwd=<object object at 0x7fa425205ed0>, env=<object object at 0x7fa425205ed0>, universal_newlines=<object object at 0x7fa425205ed0>, startupinfo=<object object at 0x7fa425205ed0>, creationflags=<object object at 0x7fa425205ed0>, restore_signals=<object object at 0x7fa425205ed0>, start_new_session=<object object at 0x7fa425205ed0>, pass_fds=<object object at 0x7fa425205ed0>, *, group=<object object at 0x7fa425205ed0>, extra_groups=<object object at 0x7fa425205ed0>, user=<object object at 0x7fa425205ed0>, umask=<object object at 0x7fa425205ed0>, encoding=<object object at 0x7fa425205ed0>, errors=<object object at 0x7fa425205ed0>, text=<object object at 0x7fa425205ed0>, pipesize=<object object at 0x7fa425205ed0>, process_group=<object object at 0x7fa425205ed0>)
|
62,070 |
fixtures._fixtures.popen
|
__init__
|
Create a PopenFixture
:param get_info: Optional callback to control the behaviour of the
created process. This callback takes a kwargs dict for the Popen
call, and should return a dict with any desired attributes.
Only parameters that are supplied to the Popen call are in the
dict, making it possible to detect the difference between 'passed
with a default value' and 'not passed at all'.
e.g.
def get_info(proc_args):
self.assertEqual(subprocess.PIPE, proc_args['stdin'])
return {'stdin': StringIO('foobar')}
The default behaviour if no get_info is supplied is for the return
process to have returncode of None, empty streams and a random pid.
After communicate() or wait() are called on the process object,
the returncode is set to whatever get_info returns (or 0 if
get_info is not supplied or doesn't return a dict with an explicit
'returncode' key).
|
def __init__(self, get_info=lambda _:{}):
"""Create a PopenFixture
:param get_info: Optional callback to control the behaviour of the
created process. This callback takes a kwargs dict for the Popen
call, and should return a dict with any desired attributes.
Only parameters that are supplied to the Popen call are in the
dict, making it possible to detect the difference between 'passed
with a default value' and 'not passed at all'.
e.g.
def get_info(proc_args):
self.assertEqual(subprocess.PIPE, proc_args['stdin'])
return {'stdin': StringIO('foobar')}
The default behaviour if no get_info is supplied is for the return
process to have returncode of None, empty streams and a random pid.
After communicate() or wait() are called on the process object,
the returncode is set to whatever get_info returns (or 0 if
get_info is not supplied or doesn't return a dict with an explicit
'returncode' key).
"""
super(FakePopen, self).__init__()
self.get_info = get_info
|
(self, get_info=<function FakePopen.<lambda> at 0x7fa4239eaef0>)
|
62,073 |
fixtures._fixtures.popen
|
_setUp
| null |
def _setUp(self):
self.addCleanup(setattr, subprocess, 'Popen', subprocess.Popen)
subprocess.Popen = self
self.procs = []
|
(self)
|
62,081 |
fixtures.fixture
|
Fixture
|
A Fixture representing some state or resource.
Often used in tests, a Fixture must be setUp before using it, and cleanUp
called after it is finished with (because many Fixture classes have
external resources such as temporary directories).
The reset() method can be called to perform cleanUp and setUp automatically
and potentially faster.
|
class Fixture(object):
"""A Fixture representing some state or resource.
Often used in tests, a Fixture must be setUp before using it, and cleanUp
called after it is finished with (because many Fixture classes have
external resources such as temporary directories).
The reset() method can be called to perform cleanUp and setUp automatically
and potentially faster.
"""
def addCleanup(self, cleanup, *args, **kwargs):
"""Add a clean function to be called from cleanUp.
All cleanup functions are called - see cleanUp for details on how
multiple exceptions are handled.
If for some reason you need to cancel cleanups, call
self._clear_cleanups.
:param cleanup: A callable to call during cleanUp.
:param *args: Positional args for cleanup.
:param kwargs: Keyword args for cleanup.
:return: None
"""
self._cleanups.push(cleanup, *args, **kwargs)
def addDetail(self, name, content_object):
"""Add a detail to the Fixture.
This may only be called after setUp has been called.
:param name: The name for the detail being added. Overrides existing
identically named details.
:param content_object: The content object (meeting the
testtools.content.Content protocol) being added.
"""
self._details[name] = content_object
def cleanUp(self, raise_first=True):
"""Cleanup the fixture.
This function will free all resources managed by the Fixture, restoring
it (and any external facilities such as databases, temporary
directories and so forth_ to their original state.
This should not typically be overridden, see addCleanup instead.
cleanUp may be called once and only once after setUp() has been called.
The base implementation of setUp will automatically call cleanUp if
an exception occurs within setUp itself.
:param raise_first: Deprecated parameter from before testtools gained
MultipleExceptions. raise_first defaults to True. When True
if a single exception is raised, it is reraised after all the
cleanUps have run. If multiple exceptions are raised, they are
all wrapped into a MultipleExceptions object, and that is reraised.
Thus, to catch a specific exception from cleanUp, you need to catch
both the exception and MultipleExceptions, and then check within
a MultipleExceptions instance for the type you're catching.
:return: A list of the exc_info() for each exception that occurred if
raise_first was False
"""
try:
return self._cleanups(raise_errors=raise_first)
finally:
self._remove_state()
def _clear_cleanups(self):
"""Clean the cleanup queue without running them.
This is a helper that can be useful for subclasses which define
reset(): they may perform something equivalent to a typical cleanUp
without actually calling the cleanups.
This also clears the details dict.
"""
self._cleanups = CallMany()
self._details = {}
self._detail_sources = []
def _remove_state(self):
"""Remove the internal state.
Called from cleanUp to put the fixture back into a not-ready state.
"""
self._cleanups = None
self._details = None
self._detail_sources = None
def __enter__(self):
self.setUp()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
try:
self._cleanups()
finally:
self._remove_state()
return False # propagate exceptions from the with body.
def getDetails(self):
"""Get the current details registered with the fixture.
This does not return the internal dictionary: mutating it will have no
effect. If you need to mutate it, just do so directly.
:return: Dict from name -> content_object.
"""
result = dict(self._details)
for source in self._detail_sources:
combine_details(source.getDetails(), result)
return result
def setUp(self):
"""Prepare the Fixture for use.
This should not be overridden. Concrete fixtures should implement
_setUp. Overriding of setUp is still supported, just not recommended.
After setUp has completed, the fixture will have one or more attributes
which can be used (these depend totally on the concrete subclass).
:raises: MultipleExceptions if _setUp fails. The last exception
captured within the MultipleExceptions will be a SetupError
exception.
:return: None.
:changed in 1.3: The recommendation to override setUp has been
reversed - before 1.3, setUp() should be overridden, now it should
not be.
:changed in 1.3.1: BaseException is now caught, and only subclasses of
Exception are wrapped in MultipleExceptions.
"""
self._clear_cleanups()
try:
self._setUp()
except:
err = sys.exc_info()
details = {}
if gather_details is not None:
# Materialise all details since we're about to cleanup.
gather_details(self.getDetails(), details)
else:
details = self.getDetails()
errors = [err] + self.cleanUp(raise_first=False)
try:
raise SetupError(details)
except SetupError:
errors.append(sys.exc_info())
if issubclass(err[0], Exception):
raise MultipleExceptions(*errors)
else:
raise err[1].with_traceback(err[2])
def _setUp(self):
"""Template method for subclasses to override.
Override this to customise the fixture. When overriding
be sure to include self.addCleanup calls to restore the fixture to
an un-setUp state, so that a single Fixture instance can be reused.
Fixtures will never have a body in _setUp - calling super() is
entirely at the discretion of subclasses.
:return: None.
"""
def reset(self):
"""Reset a setUp Fixture to the 'just setUp' state again.
The default implementation calls
self.cleanUp()
self.setUp()
but this function may be overridden to provide an optimised routine to
achieve the same result.
:return: None.
"""
self.cleanUp()
self.setUp()
def useFixture(self, fixture):
"""Use another fixture.
The fixture will be setUp, and self.addCleanup(fixture.cleanUp) called.
If the fixture fails to set up, useFixture will attempt to gather its
details into this fixture's details to aid in debugging.
:param fixture: The fixture to use.
:return: The fixture, after setting it up and scheduling a cleanup for
it.
:raises: Any errors raised by the fixture's setUp method.
"""
try:
fixture.setUp()
except MultipleExceptions as e:
if e.args[-1][0] is SetupError:
combine_details(e.args[-1][1].args[0], self._details)
raise
except:
# The child failed to come up and didn't raise MultipleExceptions
# which we can understand... capture any details it has (copying
# the content, it may go away anytime).
if gather_details is not None:
gather_details(fixture.getDetails(), self._details)
raise
else:
self.addCleanup(fixture.cleanUp)
# Calls to getDetails while this fixture is setup will return
# details from the child fixture.
self._detail_sources.append(fixture)
return fixture
|
()
|
62,086 |
fixtures.fixture
|
_setUp
|
Template method for subclasses to override.
Override this to customise the fixture. When overriding
be sure to include self.addCleanup calls to restore the fixture to
an un-setUp state, so that a single Fixture instance can be reused.
Fixtures will never have a body in _setUp - calling super() is
entirely at the discretion of subclasses.
:return: None.
|
def _setUp(self):
"""Template method for subclasses to override.
Override this to customise the fixture. When overriding
be sure to include self.addCleanup calls to restore the fixture to
an un-setUp state, so that a single Fixture instance can be reused.
Fixtures will never have a body in _setUp - calling super() is
entirely at the discretion of subclasses.
:return: None.
"""
|
(self)
|
62,094 |
fixtures.fixture
|
FunctionFixture
|
An adapter to use function(s) as a Fixture.
Typically used when an existing object or function interface exists but you
wish to use it as a Fixture (e.g. because fixtures are in use in your test
suite and this will fit in better).
To adapt an object with differently named setUp and cleanUp methods:
fixture = FunctionFixture(object.install, object.__class__.remove)
Note that the indirection via __class__ is to get an unbound method
which can accept the result from install. See also MethodFixture which
is specialised for objects.
To adapt functions:
fixture = FunctionFixture(tempfile.mkdtemp, shutil.rmtree)
With a reset function:
fixture = FunctionFixture(setup, cleanup, reset)
:ivar fn_result: The result of the setup_fn. Undefined outside of the
setUp, cleanUp context.
|
class FunctionFixture(Fixture):
"""An adapter to use function(s) as a Fixture.
Typically used when an existing object or function interface exists but you
wish to use it as a Fixture (e.g. because fixtures are in use in your test
suite and this will fit in better).
To adapt an object with differently named setUp and cleanUp methods:
fixture = FunctionFixture(object.install, object.__class__.remove)
Note that the indirection via __class__ is to get an unbound method
which can accept the result from install. See also MethodFixture which
is specialised for objects.
To adapt functions:
fixture = FunctionFixture(tempfile.mkdtemp, shutil.rmtree)
With a reset function:
fixture = FunctionFixture(setup, cleanup, reset)
:ivar fn_result: The result of the setup_fn. Undefined outside of the
setUp, cleanUp context.
"""
def __init__(self, setup_fn, cleanup_fn=None, reset_fn=None):
"""Create a FunctionFixture.
:param setup_fn: A callable which takes no parameters and returns the
thing you want to use. e.g.
def setup_fn():
return 42
The result of setup_fn is assigned to the fn_result attribute bu
FunctionFixture.setUp.
:param cleanup_fn: Optional callable which takes a single parameter, which
must be that which is returned from the setup_fn. This is called
from cleanUp.
:param reset_fn: Optional callable which takes a single parameter like
cleanup_fn, but also returns a new object for use as the fn_result:
if defined this replaces the use of cleanup_fn and setup_fn when
reset() is called.
"""
super(FunctionFixture, self).__init__()
self.setup_fn = setup_fn
self.cleanup_fn = cleanup_fn
self.reset_fn = reset_fn
def _setUp(self):
fn_result = self.setup_fn()
self._maybe_cleanup(fn_result)
def reset(self):
if self.reset_fn is None:
super(FunctionFixture, self).reset()
else:
self._clear_cleanups()
fn_result = self.reset_fn(self.fn_result)
self._maybe_cleanup(fn_result)
def _maybe_cleanup(self, fn_result):
self.addCleanup(delattr, self, 'fn_result')
if self.cleanup_fn is not None:
self.addCleanup(self.cleanup_fn, fn_result)
self.fn_result = fn_result
|
(setup_fn, cleanup_fn=None, reset_fn=None)
|
62,097 |
fixtures.fixture
|
__init__
|
Create a FunctionFixture.
:param setup_fn: A callable which takes no parameters and returns the
thing you want to use. e.g.
def setup_fn():
return 42
The result of setup_fn is assigned to the fn_result attribute bu
FunctionFixture.setUp.
:param cleanup_fn: Optional callable which takes a single parameter, which
must be that which is returned from the setup_fn. This is called
from cleanUp.
:param reset_fn: Optional callable which takes a single parameter like
cleanup_fn, but also returns a new object for use as the fn_result:
if defined this replaces the use of cleanup_fn and setup_fn when
reset() is called.
|
def __init__(self, setup_fn, cleanup_fn=None, reset_fn=None):
"""Create a FunctionFixture.
:param setup_fn: A callable which takes no parameters and returns the
thing you want to use. e.g.
def setup_fn():
return 42
The result of setup_fn is assigned to the fn_result attribute bu
FunctionFixture.setUp.
:param cleanup_fn: Optional callable which takes a single parameter, which
must be that which is returned from the setup_fn. This is called
from cleanUp.
:param reset_fn: Optional callable which takes a single parameter like
cleanup_fn, but also returns a new object for use as the fn_result:
if defined this replaces the use of cleanup_fn and setup_fn when
reset() is called.
"""
super(FunctionFixture, self).__init__()
self.setup_fn = setup_fn
self.cleanup_fn = cleanup_fn
self.reset_fn = reset_fn
|
(self, setup_fn, cleanup_fn=None, reset_fn=None)
|
62,099 |
fixtures.fixture
|
_maybe_cleanup
| null |
def _maybe_cleanup(self, fn_result):
self.addCleanup(delattr, self, 'fn_result')
if self.cleanup_fn is not None:
self.addCleanup(self.cleanup_fn, fn_result)
self.fn_result = fn_result
|
(self, fn_result)
|
62,101 |
fixtures.fixture
|
_setUp
| null |
def _setUp(self):
fn_result = self.setup_fn()
self._maybe_cleanup(fn_result)
|
(self)
|
62,106 |
fixtures.fixture
|
reset
| null |
def reset(self):
if self.reset_fn is None:
super(FunctionFixture, self).reset()
else:
self._clear_cleanups()
fn_result = self.reset_fn(self.fn_result)
self._maybe_cleanup(fn_result)
|
(self)
|
62,109 |
fixtures._fixtures.logger
|
LogHandler
|
Replace a logger's handlers.
|
class LogHandler(Fixture):
"""Replace a logger's handlers."""
def __init__(self, handler, name="", level=None, nuke_handlers=True):
"""Create a LogHandler fixture.
:param handler: The handler to replace other handlers with.
If nuke_handlers is False, then added as an extra handler.
:param name: The name of the logger to replace. Defaults to "".
:param level: The log level to set, defaults to not changing the level.
:param nuke_handlers: If True remove all existing handles (prevents
existing messages going to e.g. stdout). Defaults to True.
"""
super(LogHandler, self).__init__()
self.handler = handler
self._name = name
self._level = level
self._nuke_handlers = nuke_handlers
def _setUp(self):
logger = getLogger(self._name)
if self._level:
self.addCleanup(logger.setLevel, logger.level)
logger.setLevel(self._level)
if self._nuke_handlers:
for handler in reversed(logger.handlers):
self.addCleanup(logger.addHandler, handler)
logger.removeHandler(handler)
try:
logger.addHandler(self.handler)
finally:
self.addCleanup(logger.removeHandler, self.handler)
|
(handler, name='', level=None, nuke_handlers=True)
|
62,112 |
fixtures._fixtures.logger
|
__init__
|
Create a LogHandler fixture.
:param handler: The handler to replace other handlers with.
If nuke_handlers is False, then added as an extra handler.
:param name: The name of the logger to replace. Defaults to "".
:param level: The log level to set, defaults to not changing the level.
:param nuke_handlers: If True remove all existing handles (prevents
existing messages going to e.g. stdout). Defaults to True.
|
def __init__(self, handler, name="", level=None, nuke_handlers=True):
"""Create a LogHandler fixture.
:param handler: The handler to replace other handlers with.
If nuke_handlers is False, then added as an extra handler.
:param name: The name of the logger to replace. Defaults to "".
:param level: The log level to set, defaults to not changing the level.
:param nuke_handlers: If True remove all existing handles (prevents
existing messages going to e.g. stdout). Defaults to True.
"""
super(LogHandler, self).__init__()
self.handler = handler
self._name = name
self._level = level
self._nuke_handlers = nuke_handlers
|
(self, handler, name='', level=None, nuke_handlers=True)
|
62,115 |
fixtures._fixtures.logger
|
_setUp
| null |
def _setUp(self):
logger = getLogger(self._name)
if self._level:
self.addCleanup(logger.setLevel, logger.level)
logger.setLevel(self._level)
if self._nuke_handlers:
for handler in reversed(logger.handlers):
self.addCleanup(logger.addHandler, handler)
logger.removeHandler(handler)
try:
logger.addHandler(self.handler)
finally:
self.addCleanup(logger.removeHandler, self.handler)
|
(self)
|
62,138 |
fixtures.fixture
|
MethodFixture
|
An adapter to use a function as a Fixture.
Typically used when an existing object exists but you wish to use it as a
Fixture (e.g. because fixtures are in use in your test suite and this will
fit in better).
To adapt an object with setUp / tearDown methods:
fixture = MethodFixture(object)
If setUp / tearDown / reset are missing, they simply won't be called.
The object is exposed on fixture.obj.
To adapt an object with differently named setUp and cleanUp methods:
fixture = MethodFixture(object, setup=object.mySetUp,
teardown=object.myTearDown)
With a differently named reset function:
fixture = MethodFixture(object, reset=object.myReset)
:ivar obj: The object which is being wrapped.
|
class MethodFixture(Fixture):
"""An adapter to use a function as a Fixture.
Typically used when an existing object exists but you wish to use it as a
Fixture (e.g. because fixtures are in use in your test suite and this will
fit in better).
To adapt an object with setUp / tearDown methods:
fixture = MethodFixture(object)
If setUp / tearDown / reset are missing, they simply won't be called.
The object is exposed on fixture.obj.
To adapt an object with differently named setUp and cleanUp methods:
fixture = MethodFixture(object, setup=object.mySetUp,
teardown=object.myTearDown)
With a differently named reset function:
fixture = MethodFixture(object, reset=object.myReset)
:ivar obj: The object which is being wrapped.
"""
def __init__(self, obj, setup=None, cleanup=None, reset=None):
"""Create a MethodFixture.
:param obj: The object to wrap. Exposed as fixture.obj
:param setup: A method which takes no parameters. e.g.
def setUp(self):
self.value = 42
If setup is not supplied, and the object has a setUp method, that
method is used, otherwise nothing will happen during fixture.setUp.
:param cleanup: Optional method to cleanup the object's state. If
not supplied the method 'tearDown' is used if it exists.
:param reset: Optional method to reset the wrapped object for use.
If not supplied, then the method 'reset' is used if it exists,
otherwise cleanUp and setUp are called as per Fixture.reset().
"""
super(MethodFixture, self).__init__()
self.obj = obj
if setup is None:
setup = getattr(obj, 'setUp', None)
if setup is None:
setup = lambda: None
self._setup = setup
if cleanup is None:
cleanup = getattr(obj, 'tearDown', None)
if cleanup is None:
cleanup = lambda: None
self._cleanup = cleanup
if reset is None:
reset = getattr(obj, 'reset', None)
self._reset = reset
def _setUp(self):
self._setup()
def cleanUp(self):
super(MethodFixture, self).cleanUp()
self._cleanup()
def reset(self):
if self._reset is None:
super(MethodFixture, self).reset()
else:
self._reset()
|
(obj, setup=None, cleanup=None, reset=None)
|
62,141 |
fixtures.fixture
|
__init__
|
Create a MethodFixture.
:param obj: The object to wrap. Exposed as fixture.obj
:param setup: A method which takes no parameters. e.g.
def setUp(self):
self.value = 42
If setup is not supplied, and the object has a setUp method, that
method is used, otherwise nothing will happen during fixture.setUp.
:param cleanup: Optional method to cleanup the object's state. If
not supplied the method 'tearDown' is used if it exists.
:param reset: Optional method to reset the wrapped object for use.
If not supplied, then the method 'reset' is used if it exists,
otherwise cleanUp and setUp are called as per Fixture.reset().
|
def __init__(self, obj, setup=None, cleanup=None, reset=None):
"""Create a MethodFixture.
:param obj: The object to wrap. Exposed as fixture.obj
:param setup: A method which takes no parameters. e.g.
def setUp(self):
self.value = 42
If setup is not supplied, and the object has a setUp method, that
method is used, otherwise nothing will happen during fixture.setUp.
:param cleanup: Optional method to cleanup the object's state. If
not supplied the method 'tearDown' is used if it exists.
:param reset: Optional method to reset the wrapped object for use.
If not supplied, then the method 'reset' is used if it exists,
otherwise cleanUp and setUp are called as per Fixture.reset().
"""
super(MethodFixture, self).__init__()
self.obj = obj
if setup is None:
setup = getattr(obj, 'setUp', None)
if setup is None:
setup = lambda: None
self._setup = setup
if cleanup is None:
cleanup = getattr(obj, 'tearDown', None)
if cleanup is None:
cleanup = lambda: None
self._cleanup = cleanup
if reset is None:
reset = getattr(obj, 'reset', None)
self._reset = reset
|
(self, obj, setup=None, cleanup=None, reset=None)
|
62,144 |
fixtures.fixture
|
_setUp
| null |
def _setUp(self):
self._setup()
|
(self)
|
62,147 |
fixtures.fixture
|
cleanUp
| null |
def cleanUp(self):
super(MethodFixture, self).cleanUp()
self._cleanup()
|
(self)
|
62,149 |
fixtures.fixture
|
reset
| null |
def reset(self):
if self._reset is None:
super(MethodFixture, self).reset()
else:
self._reset()
|
(self)
|
62,152 |
fixtures._fixtures.mockpatch
|
MockPatch
|
Deal with code around mock.patch.
|
class MockPatch(_Base):
"""Deal with code around mock.patch."""
def __init__(self, obj, new=None, **kwargs):
super(MockPatch, self).__init__()
if new is None:
new = mock.DEFAULT
self._get_p = lambda: mock.patch(obj, new, **kwargs)
|
(obj, new=None, **kwargs)
|
62,155 |
fixtures._fixtures.mockpatch
|
__init__
| null |
def __init__(self, obj, new=None, **kwargs):
super(MockPatch, self).__init__()
if new is None:
new = mock.DEFAULT
self._get_p = lambda: mock.patch(obj, new, **kwargs)
|
(self, obj, new=None, **kwargs)
|
62,158 |
fixtures._fixtures.mockpatch
|
_setUp
| null |
def _setUp(self):
_p = self._get_p()
self.addCleanup(_p.stop)
self.mock = _p.start()
|
(self)
|
62,166 |
fixtures._fixtures.mockpatch
|
MockPatchMultiple
|
Deal with code around mock.patch.multiple.
|
class MockPatchMultiple(_Base, metaclass=_MockPatchMultipleMeta):
"""Deal with code around mock.patch.multiple."""
def __init__(self, obj, **kwargs):
"""Initialize the mocks
Pass name=value to replace obj.name with value.
Pass name=Multiple.DEFAULT to replace obj.name with a
MagicMock instance.
:param obj: Object or name containing values being mocked.
:type obj: str or object
:param kwargs: names and values of attributes of obj to be mocked.
"""
super(MockPatchMultiple, self).__init__()
self._get_p = lambda: mock.patch.multiple(obj, **kwargs)
|
(obj, **kwargs)
|
62,169 |
fixtures._fixtures.mockpatch
|
__init__
|
Initialize the mocks
Pass name=value to replace obj.name with value.
Pass name=Multiple.DEFAULT to replace obj.name with a
MagicMock instance.
:param obj: Object or name containing values being mocked.
:type obj: str or object
:param kwargs: names and values of attributes of obj to be mocked.
|
def __init__(self, obj, **kwargs):
"""Initialize the mocks
Pass name=value to replace obj.name with value.
Pass name=Multiple.DEFAULT to replace obj.name with a
MagicMock instance.
:param obj: Object or name containing values being mocked.
:type obj: str or object
:param kwargs: names and values of attributes of obj to be mocked.
"""
super(MockPatchMultiple, self).__init__()
self._get_p = lambda: mock.patch.multiple(obj, **kwargs)
|
(self, obj, **kwargs)
|
62,180 |
fixtures._fixtures.mockpatch
|
MockPatchObject
|
Deal with code around mock.
|
class MockPatchObject(_Base):
"""Deal with code around mock."""
def __init__(self, obj, attr, new=None, **kwargs):
super(MockPatchObject, self).__init__()
if new is None:
new = mock.DEFAULT
self._get_p = lambda: mock.patch.object(obj, attr, new, **kwargs)
|
(obj, attr, new=None, **kwargs)
|
62,183 |
fixtures._fixtures.mockpatch
|
__init__
| null |
def __init__(self, obj, attr, new=None, **kwargs):
super(MockPatchObject, self).__init__()
if new is None:
new = mock.DEFAULT
self._get_p = lambda: mock.patch.object(obj, attr, new, **kwargs)
|
(self, obj, attr, new=None, **kwargs)
|
62,194 |
fixtures._fixtures.monkeypatch
|
MonkeyPatch
|
Replace or delete an attribute.
|
class MonkeyPatch(Fixture):
"""Replace or delete an attribute."""
delete = object()
def __init__(self, name, new_value=None):
"""Create a MonkeyPatch.
:param name: The fully qualified object name to override.
:param new_value: A value to set the name to. If set to
MonkeyPatch.delete the attribute will be deleted.
During setup the name will be deleted or assigned the requested value,
and this will be restored in cleanUp.
When patching methods, the call signature of name should be a subset
of the parameters which can be used to call new_value.
For instance.
>>> class T:
... def method(self, arg1):
... pass
>>> class N:
... @staticmethod
... def newmethod(arg1):
... pass
Patching N.newmethod on top of T.method and then calling T().method(1)
will not work because they do not have compatible call signatures -
self will be passed to newmethod because the callable (N.newmethod)
is placed onto T as a regular function. This allows capturing all the
supplied parameters while still consulting local state in your
new_value.
"""
Fixture.__init__(self)
self.name = name
self.new_value = new_value
def _setUp(self):
location, attribute = self.name.rsplit('.', 1)
# Import, swallowing all errors as any element of location may be
# a class or some such thing.
try:
__import__(location, {}, {})
except ImportError:
pass
components = location.split('.')
current = __import__(components[0], {}, {})
for component in components[1:]:
current = getattr(current, component)
sentinel = object()
new_value, old_value = _coerce_values(
current, attribute, self.new_value, sentinel)
if self.new_value is self.delete:
if old_value is not sentinel:
delattr(current, attribute)
else:
setattr(current, attribute, new_value)
if old_value is sentinel:
self.addCleanup(self._safe_delete, current, attribute)
else:
self.addCleanup(setattr, current, attribute, old_value)
def _safe_delete(self, obj, attribute):
"""Delete obj.attribute handling the case where its missing."""
sentinel = object()
if getattr(obj, attribute, sentinel) is not sentinel:
delattr(obj, attribute)
|
(name, new_value=None)
|
62,197 |
fixtures._fixtures.monkeypatch
|
__init__
|
Create a MonkeyPatch.
:param name: The fully qualified object name to override.
:param new_value: A value to set the name to. If set to
MonkeyPatch.delete the attribute will be deleted.
During setup the name will be deleted or assigned the requested value,
and this will be restored in cleanUp.
When patching methods, the call signature of name should be a subset
of the parameters which can be used to call new_value.
For instance.
>>> class T:
... def method(self, arg1):
... pass
>>> class N:
... @staticmethod
... def newmethod(arg1):
... pass
Patching N.newmethod on top of T.method and then calling T().method(1)
will not work because they do not have compatible call signatures -
self will be passed to newmethod because the callable (N.newmethod)
is placed onto T as a regular function. This allows capturing all the
supplied parameters while still consulting local state in your
new_value.
|
def __init__(self, name, new_value=None):
"""Create a MonkeyPatch.
:param name: The fully qualified object name to override.
:param new_value: A value to set the name to. If set to
MonkeyPatch.delete the attribute will be deleted.
During setup the name will be deleted or assigned the requested value,
and this will be restored in cleanUp.
When patching methods, the call signature of name should be a subset
of the parameters which can be used to call new_value.
For instance.
>>> class T:
... def method(self, arg1):
... pass
>>> class N:
... @staticmethod
... def newmethod(arg1):
... pass
Patching N.newmethod on top of T.method and then calling T().method(1)
will not work because they do not have compatible call signatures -
self will be passed to newmethod because the callable (N.newmethod)
is placed onto T as a regular function. This allows capturing all the
supplied parameters while still consulting local state in your
new_value.
"""
Fixture.__init__(self)
self.name = name
self.new_value = new_value
|
(self, name, new_value=None)
|
62,200 |
fixtures._fixtures.monkeypatch
|
_safe_delete
|
Delete obj.attribute handling the case where its missing.
|
def _safe_delete(self, obj, attribute):
"""Delete obj.attribute handling the case where its missing."""
sentinel = object()
if getattr(obj, attribute, sentinel) is not sentinel:
delattr(obj, attribute)
|
(self, obj, attribute)
|
62,201 |
fixtures._fixtures.monkeypatch
|
_setUp
| null |
def _setUp(self):
location, attribute = self.name.rsplit('.', 1)
# Import, swallowing all errors as any element of location may be
# a class or some such thing.
try:
__import__(location, {}, {})
except ImportError:
pass
components = location.split('.')
current = __import__(components[0], {}, {})
for component in components[1:]:
current = getattr(current, component)
sentinel = object()
new_value, old_value = _coerce_values(
current, attribute, self.new_value, sentinel)
if self.new_value is self.delete:
if old_value is not sentinel:
delattr(current, attribute)
else:
setattr(current, attribute, new_value)
if old_value is sentinel:
self.addCleanup(self._safe_delete, current, attribute)
else:
self.addCleanup(setattr, current, attribute, old_value)
|
(self)
|
62,209 |
fixtures.callmany
|
MultipleExceptions
|
Report multiple exc_info tuples in self.args.
|
class MultipleExceptions(Exception):
"""Report multiple exc_info tuples in self.args."""
| null |
62,210 |
fixtures._fixtures.tempdir
|
NestedTempfile
|
Nest all temporary files and directories inside another directory.
This temporarily monkey-patches the default location that the `tempfile`
package creates temporary files and directories in to be a new temporary
directory. This new temporary directory is removed when the fixture is torn
down.
|
class NestedTempfile(fixtures.Fixture):
"""Nest all temporary files and directories inside another directory.
This temporarily monkey-patches the default location that the `tempfile`
package creates temporary files and directories in to be a new temporary
directory. This new temporary directory is removed when the fixture is torn
down.
"""
def _setUp(self):
tempdir = self.useFixture(TempDir()).path
patch = fixtures.MonkeyPatch("tempfile.tempdir", tempdir)
self.useFixture(patch)
|
()
|
62,215 |
fixtures._fixtures.tempdir
|
_setUp
| null |
def _setUp(self):
tempdir = self.useFixture(TempDir()).path
patch = fixtures.MonkeyPatch("tempfile.tempdir", tempdir)
self.useFixture(patch)
|
(self)
|
62,223 |
fixtures._fixtures.packagepath
|
PackagePathEntry
|
Add a path to the path of a python package.
The python package needs to be already imported.
If this new path is already in the packages __path__ list then the __path__
list will not be altered.
|
class PackagePathEntry(Fixture):
"""Add a path to the path of a python package.
The python package needs to be already imported.
If this new path is already in the packages __path__ list then the __path__
list will not be altered.
"""
def __init__(self, packagename, directory):
"""Create a PackagePathEntry.
:param directory: The directory to add to the package.__path__.
"""
self.packagename = packagename
self.directory = directory
def _setUp(self):
path = sys.modules[self.packagename].__path__
if self.directory in path:
return
self.addCleanup(path.remove, self.directory)
path.append(self.directory)
|
(packagename, directory)
|
62,226 |
fixtures._fixtures.packagepath
|
__init__
|
Create a PackagePathEntry.
:param directory: The directory to add to the package.__path__.
|
def __init__(self, packagename, directory):
"""Create a PackagePathEntry.
:param directory: The directory to add to the package.__path__.
"""
self.packagename = packagename
self.directory = directory
|
(self, packagename, directory)
|
62,229 |
fixtures._fixtures.packagepath
|
_setUp
| null |
def _setUp(self):
path = sys.modules[self.packagename].__path__
if self.directory in path:
return
self.addCleanup(path.remove, self.directory)
path.append(self.directory)
|
(self)
|
62,252 |
fixtures._fixtures.pythonpackage
|
PythonPackage
|
Create a temporary Python package.
:ivar base: The path of the directory containing the module. E.g. for a
module 'foo', the path base + '/foo/__init__.py' would be the file path
for the module.
|
class PythonPackage(Fixture):
"""Create a temporary Python package.
:ivar base: The path of the directory containing the module. E.g. for a
module 'foo', the path base + '/foo/__init__.py' would be the file path
for the module.
"""
def __init__(self, packagename, modulelist, init=True):
"""Create a PythonPackage.
:param packagename: The name of the package to create - e.g.
'toplevel.subpackage.'
:param modulelist: List of modules to include in the package.
Each module should be a tuple with the filename and content it
should have.
:param init: If false, do not create a missing __init__.py. When
True, if modulelist does not include an __init__.py, an empty
one is created.
"""
self.packagename = packagename
self.modulelist = modulelist
self.init = init
def _setUp(self):
self.base = self.useFixture(TempDir()).path
base = self.base
root = os.path.join(base, self.packagename)
os.mkdir(root)
init_seen = not self.init
for modulename, contents in self.modulelist:
stream = open(os.path.join(root, modulename), 'wb')
try:
stream.write(contents)
finally:
stream.close()
if modulename == '__init__.py':
init_seen = True
if not init_seen:
open(os.path.join(root, '__init__.py'), 'wb').close()
|
(packagename, modulelist, init=True)
|
62,255 |
fixtures._fixtures.pythonpackage
|
__init__
|
Create a PythonPackage.
:param packagename: The name of the package to create - e.g.
'toplevel.subpackage.'
:param modulelist: List of modules to include in the package.
Each module should be a tuple with the filename and content it
should have.
:param init: If false, do not create a missing __init__.py. When
True, if modulelist does not include an __init__.py, an empty
one is created.
|
def __init__(self, packagename, modulelist, init=True):
"""Create a PythonPackage.
:param packagename: The name of the package to create - e.g.
'toplevel.subpackage.'
:param modulelist: List of modules to include in the package.
Each module should be a tuple with the filename and content it
should have.
:param init: If false, do not create a missing __init__.py. When
True, if modulelist does not include an __init__.py, an empty
one is created.
"""
self.packagename = packagename
self.modulelist = modulelist
self.init = init
|
(self, packagename, modulelist, init=True)
|
62,258 |
fixtures._fixtures.pythonpackage
|
_setUp
| null |
def _setUp(self):
self.base = self.useFixture(TempDir()).path
base = self.base
root = os.path.join(base, self.packagename)
os.mkdir(root)
init_seen = not self.init
for modulename, contents in self.modulelist:
stream = open(os.path.join(root, modulename), 'wb')
try:
stream.write(contents)
finally:
stream.close()
if modulename == '__init__.py':
init_seen = True
if not init_seen:
open(os.path.join(root, '__init__.py'), 'wb').close()
|
(self)
|
62,266 |
fixtures._fixtures.pythonpath
|
PythonPathEntry
|
Add a path to sys.path.
If the path is already in sys.path, sys.path will not be altered.
|
class PythonPathEntry(Fixture):
"""Add a path to sys.path.
If the path is already in sys.path, sys.path will not be altered.
"""
def __init__(self, directory):
"""Create a PythonPathEntry.
:param directory: The directory to add to sys.path.
"""
self.directory = directory
def _setUp(self):
if self.directory in sys.path:
return
self.addCleanup(sys.path.remove, self.directory)
sys.path.append(self.directory)
|
(directory)
|
62,269 |
fixtures._fixtures.pythonpath
|
__init__
|
Create a PythonPathEntry.
:param directory: The directory to add to sys.path.
|
def __init__(self, directory):
"""Create a PythonPathEntry.
:param directory: The directory to add to sys.path.
"""
self.directory = directory
|
(self, directory)
|
62,272 |
fixtures._fixtures.pythonpath
|
_setUp
| null |
def _setUp(self):
if self.directory in sys.path:
return
self.addCleanup(sys.path.remove, self.directory)
sys.path.append(self.directory)
|
(self)
|
62,280 |
fixtures.fixture
|
SetupError
|
Setup failed.
args[0] will be a details dict.
|
class SetupError(Exception):
"""Setup failed.
args[0] will be a details dict.
"""
| null |
62,281 |
fixtures._fixtures.streams
|
StringStream
|
Provide a file-like object that accepts strings and expose as a detail.
:param detail_name: The name of the detail.
:return: A fixture which has an attribute `stream` containing the file-like
object.
|
def StringStream(detail_name):
"""Provide a file-like object that accepts strings and expose as a detail.
:param detail_name: The name of the detail.
:return: A fixture which has an attribute `stream` containing the file-like
object.
"""
return Stream(detail_name, _string_stream_factory)
|
(detail_name)
|
62,282 |
fixtures._fixtures.tempdir
|
TempDir
|
Create a temporary directory.
:ivar path: The path of the temporary directory.
|
class TempDir(fixtures.Fixture):
"""Create a temporary directory.
:ivar path: The path of the temporary directory.
"""
def __init__(self, rootdir=None):
"""Create a TempDir.
:param rootdir: If supplied force the temporary directory to be a
child of rootdir.
"""
self.rootdir = rootdir
def _setUp(self):
self.path = tempfile.mkdtemp(dir=self.rootdir)
self.addCleanup(shutil.rmtree, self.path, ignore_errors=True)
def join(self, *children):
"""Return an absolute path, given one relative to this ``TempDir``.
WARNING: This does not do any checking of ``children`` to make sure
they aren't walking up the tree using path segments like '..' or
'/usr'. Use at your own risk.
"""
return os.path.abspath(os.path.join(self.path, *children))
|
(rootdir=None)
|
62,285 |
fixtures._fixtures.tempdir
|
__init__
|
Create a TempDir.
:param rootdir: If supplied force the temporary directory to be a
child of rootdir.
|
def __init__(self, rootdir=None):
"""Create a TempDir.
:param rootdir: If supplied force the temporary directory to be a
child of rootdir.
"""
self.rootdir = rootdir
|
(self, rootdir=None)
|
62,288 |
fixtures._fixtures.tempdir
|
_setUp
| null |
def _setUp(self):
self.path = tempfile.mkdtemp(dir=self.rootdir)
self.addCleanup(shutil.rmtree, self.path, ignore_errors=True)
|
(self)
|
62,293 |
fixtures._fixtures.tempdir
|
join
|
Return an absolute path, given one relative to this ``TempDir``.
WARNING: This does not do any checking of ``children`` to make sure
they aren't walking up the tree using path segments like '..' or
'/usr'. Use at your own risk.
|
def join(self, *children):
"""Return an absolute path, given one relative to this ``TempDir``.
WARNING: This does not do any checking of ``children`` to make sure
they aren't walking up the tree using path segments like '..' or
'/usr'. Use at your own risk.
"""
return os.path.abspath(os.path.join(self.path, *children))
|
(self, *children)
|
62,297 |
fixtures._fixtures.temphomedir
|
TempHomeDir
|
Create a temporary directory and set it as $HOME
:ivar path: the path of the temporary directory.
|
class TempHomeDir(TempDir):
"""Create a temporary directory and set it as $HOME
:ivar path: the path of the temporary directory.
"""
def _setUp(self):
super(TempHomeDir, self)._setUp()
self.useFixture(fixtures.EnvironmentVariable("HOME", self.path))
|
(rootdir=None)
|
62,303 |
fixtures._fixtures.temphomedir
|
_setUp
| null |
def _setUp(self):
super(TempHomeDir, self)._setUp()
self.useFixture(fixtures.EnvironmentVariable("HOME", self.path))
|
(self)
|
62,312 |
fixtures.testcase
|
TestWithFixtures
|
A TestCase with a helper function to use fixtures.
Normally used as a mix-in class to add useFixture.
Note that test classes such as testtools.TestCase which already have a
``useFixture`` method do not need this mixed in.
|
class TestWithFixtures(unittest.TestCase):
"""A TestCase with a helper function to use fixtures.
Normally used as a mix-in class to add useFixture.
Note that test classes such as testtools.TestCase which already have a
``useFixture`` method do not need this mixed in.
"""
def useFixture(self, fixture):
"""Use fixture in a test case.
The fixture will be setUp, and self.addCleanup(fixture.cleanUp) called.
:param fixture: The fixture to use.
:return: The fixture, after setting it up and scheduling a cleanup for
it.
"""
use_details = (
gather_details is not None and
getattr(self, "addDetail", None) is not None)
try:
fixture.setUp()
except:
if use_details:
# Capture the details now, in case the fixture goes away.
gather_details(fixture.getDetails(), self.getDetails())
raise
else:
self.addCleanup(fixture.cleanUp)
if use_details:
# Capture the details from the fixture during test teardown;
# this will evaluate the details before tearing down the
# fixture.
self.addCleanup(gather_details, fixture, self)
return fixture
|
(methodName='runTest')
|
62,390 |
unittest.case
|
setUp
|
Hook method for setting up the test fixture before exercising it.
|
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
pass
|
(self)
|
62,395 |
fixtures.testcase
|
useFixture
|
Use fixture in a test case.
The fixture will be setUp, and self.addCleanup(fixture.cleanUp) called.
:param fixture: The fixture to use.
:return: The fixture, after setting it up and scheduling a cleanup for
it.
|
def useFixture(self, fixture):
"""Use fixture in a test case.
The fixture will be setUp, and self.addCleanup(fixture.cleanUp) called.
:param fixture: The fixture to use.
:return: The fixture, after setting it up and scheduling a cleanup for
it.
"""
use_details = (
gather_details is not None and
getattr(self, "addDetail", None) is not None)
try:
fixture.setUp()
except:
if use_details:
# Capture the details now, in case the fixture goes away.
gather_details(fixture.getDetails(), self.getDetails())
raise
else:
self.addCleanup(fixture.cleanUp)
if use_details:
# Capture the details from the fixture during test teardown;
# this will evaluate the details before tearing down the
# fixture.
self.addCleanup(gather_details, fixture, self)
return fixture
|
(self, fixture)
|
62,396 |
fixtures._fixtures.timeout
|
Timeout
|
Fixture that aborts the contained code after a number of seconds.
The interrupt can be either gentle, in which case TimeoutException is
raised, or not gentle, in which case the process will typically be aborted
by SIGALRM.
Cautions:
* This has no effect on Windows.
* Only one Timeout can be used at any time per process.
|
class Timeout(fixtures.Fixture):
"""Fixture that aborts the contained code after a number of seconds.
The interrupt can be either gentle, in which case TimeoutException is
raised, or not gentle, in which case the process will typically be aborted
by SIGALRM.
Cautions:
* This has no effect on Windows.
* Only one Timeout can be used at any time per process.
"""
def __init__(self, timeout_secs, gentle):
self.timeout_secs = timeout_secs
self.alarm_fn = getattr(signal, 'alarm', None)
self.gentle = gentle
def signal_handler(self, signum, frame):
raise TimeoutException()
def _setUp(self):
if self.alarm_fn is None:
return # Can't run on Windows
if self.gentle:
# Install a handler for SIGARLM so we can raise an exception rather
# than the default handler executing, which kills the process.
old_handler = signal.signal(signal.SIGALRM, self.signal_handler)
# We add the slarm cleanup before the cleanup for the signal handler,
# otherwise there is a race condition where the signal handler is
# cleaned up but the alarm still fires.
self.addCleanup(lambda: self.alarm_fn(0))
self.alarm_fn(self.timeout_secs)
if self.gentle:
self.addCleanup(lambda: signal.signal(signal.SIGALRM, old_handler))
|
(timeout_secs, gentle)
|
62,399 |
fixtures._fixtures.timeout
|
__init__
| null |
def __init__(self, timeout_secs, gentle):
self.timeout_secs = timeout_secs
self.alarm_fn = getattr(signal, 'alarm', None)
self.gentle = gentle
|
(self, timeout_secs, gentle)
|
62,402 |
fixtures._fixtures.timeout
|
_setUp
| null |
def _setUp(self):
if self.alarm_fn is None:
return # Can't run on Windows
if self.gentle:
# Install a handler for SIGARLM so we can raise an exception rather
# than the default handler executing, which kills the process.
old_handler = signal.signal(signal.SIGALRM, self.signal_handler)
# We add the slarm cleanup before the cleanup for the signal handler,
# otherwise there is a race condition where the signal handler is
# cleaned up but the alarm still fires.
self.addCleanup(lambda: self.alarm_fn(0))
self.alarm_fn(self.timeout_secs)
if self.gentle:
self.addCleanup(lambda: signal.signal(signal.SIGALRM, old_handler))
|
(self)
|
62,409 |
fixtures._fixtures.timeout
|
signal_handler
| null |
def signal_handler(self, signum, frame):
raise TimeoutException()
|
(self, signum, frame)
|
62,411 |
fixtures._fixtures.timeout
|
TimeoutException
|
Timeout expired
|
class TimeoutException(Exception):
"""Timeout expired"""
| null |
62,412 |
pbr.version
|
VersionInfo
| null |
class VersionInfo(object):
def __init__(self, package):
"""Object that understands versioning for a package
:param package: name of the python package, such as glance, or
python-glanceclient
"""
self.package = package
self.version = None
self._cached_version = None
self._semantic = None
def __str__(self):
"""Make the VersionInfo object behave like a string."""
return self.version_string()
def __repr__(self):
"""Include the name."""
return "pbr.version.VersionInfo(%s:%s)" % (
self.package, self.version_string())
def _get_version_from_pkg_resources(self):
"""Obtain a version from pkg_resources or setup-time logic if missing.
This will try to get the version of the package from the pkg_resources
This will try to get the version of the package from the
record associated with the package, and if there is no such record
importlib_metadata record associated with the package, and if there
falls back to the logic sdist would use.
is no such record falls back to the logic sdist would use.
"""
import pkg_resources
try:
requirement = pkg_resources.Requirement.parse(self.package)
provider = pkg_resources.get_provider(requirement)
result_string = provider.version
except pkg_resources.DistributionNotFound:
# The most likely cause for this is running tests in a tree
# produced from a tarball where the package itself has not been
# installed into anything. Revert to setup-time logic.
from pbr import packaging
result_string = packaging.get_version(self.package)
return SemanticVersion.from_pip_string(result_string)
def _get_version_from_importlib_metadata(self):
"""Obtain a version from importlib or setup-time logic if missing.
This will try to get the version of the package from the
importlib_metadata record associated with the package, and if there
is no such record falls back to the logic sdist would use.
"""
try:
distribution = importlib_metadata.distribution(self.package)
result_string = distribution.version
except importlib_metadata.PackageNotFoundError:
# The most likely cause for this is running tests in a tree
# produced from a tarball where the package itself has not been
# installed into anything. Revert to setup-time logic.
from pbr import packaging
result_string = packaging.get_version(self.package)
return SemanticVersion.from_pip_string(result_string)
def release_string(self):
"""Return the full version of the package.
This including suffixes indicating VCS status.
"""
return self.semantic_version().release_string()
def semantic_version(self):
"""Return the SemanticVersion object for this version."""
if self._semantic is None:
# TODO(damami): simplify this once Python 3.8 is the oldest
# we support
if use_importlib:
self._semantic = self._get_version_from_importlib_metadata()
else:
self._semantic = self._get_version_from_pkg_resources()
return self._semantic
def version_string(self):
"""Return the short version minus any alpha/beta tags."""
return self.semantic_version().brief_string()
# Compatibility functions
canonical_version_string = version_string
version_string_with_vcs = release_string
def cached_version_string(self, prefix=""):
"""Return a cached version string.
This will return a cached version string if one is already cached,
irrespective of prefix. If none is cached, one will be created with
prefix and then cached and returned.
"""
if not self._cached_version:
self._cached_version = "%s%s" % (prefix,
self.version_string())
return self._cached_version
|
(package)
|
62,413 |
pbr.version
|
__init__
|
Object that understands versioning for a package
:param package: name of the python package, such as glance, or
python-glanceclient
|
def __init__(self, package):
"""Object that understands versioning for a package
:param package: name of the python package, such as glance, or
python-glanceclient
"""
self.package = package
self.version = None
self._cached_version = None
self._semantic = None
|
(self, package)
|
62,414 |
pbr.version
|
__repr__
|
Include the name.
|
def __repr__(self):
"""Include the name."""
return "pbr.version.VersionInfo(%s:%s)" % (
self.package, self.version_string())
|
(self)
|
62,415 |
pbr.version
|
__str__
|
Make the VersionInfo object behave like a string.
|
def __str__(self):
"""Make the VersionInfo object behave like a string."""
return self.version_string()
|
(self)
|
62,416 |
pbr.version
|
_get_version_from_importlib_metadata
|
Obtain a version from importlib or setup-time logic if missing.
This will try to get the version of the package from the
importlib_metadata record associated with the package, and if there
is no such record falls back to the logic sdist would use.
|
def _get_version_from_importlib_metadata(self):
"""Obtain a version from importlib or setup-time logic if missing.
This will try to get the version of the package from the
importlib_metadata record associated with the package, and if there
is no such record falls back to the logic sdist would use.
"""
try:
distribution = importlib_metadata.distribution(self.package)
result_string = distribution.version
except importlib_metadata.PackageNotFoundError:
# The most likely cause for this is running tests in a tree
# produced from a tarball where the package itself has not been
# installed into anything. Revert to setup-time logic.
from pbr import packaging
result_string = packaging.get_version(self.package)
return SemanticVersion.from_pip_string(result_string)
|
(self)
|
62,417 |
pbr.version
|
_get_version_from_pkg_resources
|
Obtain a version from pkg_resources or setup-time logic if missing.
This will try to get the version of the package from the pkg_resources
This will try to get the version of the package from the
record associated with the package, and if there is no such record
importlib_metadata record associated with the package, and if there
falls back to the logic sdist would use.
is no such record falls back to the logic sdist would use.
|
def _get_version_from_pkg_resources(self):
"""Obtain a version from pkg_resources or setup-time logic if missing.
This will try to get the version of the package from the pkg_resources
This will try to get the version of the package from the
record associated with the package, and if there is no such record
importlib_metadata record associated with the package, and if there
falls back to the logic sdist would use.
is no such record falls back to the logic sdist would use.
"""
import pkg_resources
try:
requirement = pkg_resources.Requirement.parse(self.package)
provider = pkg_resources.get_provider(requirement)
result_string = provider.version
except pkg_resources.DistributionNotFound:
# The most likely cause for this is running tests in a tree
# produced from a tarball where the package itself has not been
# installed into anything. Revert to setup-time logic.
from pbr import packaging
result_string = packaging.get_version(self.package)
return SemanticVersion.from_pip_string(result_string)
|
(self)
|
62,418 |
pbr.version
|
cached_version_string
|
Return a cached version string.
This will return a cached version string if one is already cached,
irrespective of prefix. If none is cached, one will be created with
prefix and then cached and returned.
|
def cached_version_string(self, prefix=""):
"""Return a cached version string.
This will return a cached version string if one is already cached,
irrespective of prefix. If none is cached, one will be created with
prefix and then cached and returned.
"""
if not self._cached_version:
self._cached_version = "%s%s" % (prefix,
self.version_string())
return self._cached_version
|
(self, prefix='')
|
62,419 |
pbr.version
|
version_string
|
Return the short version minus any alpha/beta tags.
|
def version_string(self):
"""Return the short version minus any alpha/beta tags."""
return self.semantic_version().brief_string()
|
(self)
|
62,420 |
pbr.version
|
release_string
|
Return the full version of the package.
This including suffixes indicating VCS status.
|
def release_string(self):
"""Return the full version of the package.
This including suffixes indicating VCS status.
"""
return self.semantic_version().release_string()
|
(self)
|
62,421 |
pbr.version
|
semantic_version
|
Return the SemanticVersion object for this version.
|
def semantic_version(self):
"""Return the SemanticVersion object for this version."""
if self._semantic is None:
# TODO(damami): simplify this once Python 3.8 is the oldest
# we support
if use_importlib:
self._semantic = self._get_version_from_importlib_metadata()
else:
self._semantic = self._get_version_from_pkg_resources()
return self._semantic
|
(self)
|
62,424 |
fixtures._fixtures.warnings
|
WarningsCapture
|
Capture warnings.
While ``WarningsCapture`` is active, warnings will be captured by
the fixture (so that they can be later analyzed).
:attribute captures: A list of warning capture ``WarningMessage`` objects.
|
class WarningsCapture(fixtures.Fixture):
"""Capture warnings.
While ``WarningsCapture`` is active, warnings will be captured by
the fixture (so that they can be later analyzed).
:attribute captures: A list of warning capture ``WarningMessage`` objects.
"""
def _showwarning(self, *args, **kwargs):
self.captures.append(warnings.WarningMessage(*args, **kwargs))
def _setUp(self):
patch = fixtures.MonkeyPatch("warnings.showwarning", self._showwarning)
self.useFixture(patch)
self.captures = []
|
()
|
62,429 |
fixtures._fixtures.warnings
|
_setUp
| null |
def _setUp(self):
patch = fixtures.MonkeyPatch("warnings.showwarning", self._showwarning)
self.useFixture(patch)
self.captures = []
|
(self)
|
62,430 |
fixtures._fixtures.warnings
|
_showwarning
| null |
def _showwarning(self, *args, **kwargs):
self.captures.append(warnings.WarningMessage(*args, **kwargs))
|
(self, *args, **kwargs)
|
62,438 |
fixtures._fixtures.warnings
|
WarningsFilter
|
Configure warnings filters.
While ``WarningsFilter`` is active, warnings will be filtered per
configuration.
|
class WarningsFilter(fixtures.Fixture):
"""Configure warnings filters.
While ``WarningsFilter`` is active, warnings will be filtered per
configuration.
"""
def __init__(self, filters=None):
"""Create a WarningsFilter fixture.
:param filters: An optional list of dictionaries with arguments
corresponding to the arguments to
:py:func:`warnings.filterwarnings`. For example::
[
{
'action': 'ignore',
'message': 'foo',
'category': DeprecationWarning,
},
]
Order is important: entries closer to the front of the list
override entries later in the list, if both match a particular
warning.
Alternatively, you can configure warnings within the context of the
fixture.
See `the Python documentation`__ for more information.
__: https://docs.python.org/3/library/warnings.html#the-warnings-filter
"""
super().__init__()
self.filters = filters or []
def _setUp(self):
self._original_warning_filters = warnings.filters[:]
for filt in self.filters:
warnings.filterwarnings(**filt)
self.addCleanup(self._reset_warning_filters)
def _reset_warning_filters(self):
warnings.filters[:] = self._original_warning_filters
|
(filters=None)
|
62,441 |
fixtures._fixtures.warnings
|
__init__
|
Create a WarningsFilter fixture.
:param filters: An optional list of dictionaries with arguments
corresponding to the arguments to
:py:func:`warnings.filterwarnings`. For example::
[
{
'action': 'ignore',
'message': 'foo',
'category': DeprecationWarning,
},
]
Order is important: entries closer to the front of the list
override entries later in the list, if both match a particular
warning.
Alternatively, you can configure warnings within the context of the
fixture.
See `the Python documentation`__ for more information.
__: https://docs.python.org/3/library/warnings.html#the-warnings-filter
|
def __init__(self, filters=None):
"""Create a WarningsFilter fixture.
:param filters: An optional list of dictionaries with arguments
corresponding to the arguments to
:py:func:`warnings.filterwarnings`. For example::
[
{
'action': 'ignore',
'message': 'foo',
'category': DeprecationWarning,
},
]
Order is important: entries closer to the front of the list
override entries later in the list, if both match a particular
warning.
Alternatively, you can configure warnings within the context of the
fixture.
See `the Python documentation`__ for more information.
__: https://docs.python.org/3/library/warnings.html#the-warnings-filter
"""
super().__init__()
self.filters = filters or []
|
(self, filters=None)
|
62,444 |
fixtures._fixtures.warnings
|
_reset_warning_filters
| null |
def _reset_warning_filters(self):
warnings.filters[:] = self._original_warning_filters
|
(self)
|
62,445 |
fixtures._fixtures.warnings
|
_setUp
| null |
def _setUp(self):
self._original_warning_filters = warnings.filters[:]
for filt in self.filters:
warnings.filterwarnings(**filt)
self.addCleanup(self._reset_warning_filters)
|
(self)
|
62,456 |
fixtures
|
load_tests
| null |
def load_tests(loader, standard_tests, pattern):
standard_tests.addTests(loader.loadTestsFromNames(["fixtures.tests"]))
return standard_tests
|
(loader, standard_tests, pattern)
|
62,457 |
fixtures
|
test_suite
| null |
def test_suite():
import fixtures.tests
return fixtures.tests.test_suite()
|
()
|
62,459 |
plyfile
|
PlyData
|
PLY file header and data.
A `PlyData` instance is created in one of two ways: by the static
method `PlyData.read` (to read a PLY file), or directly from
`__init__` given a sequence of elements (which can then be written
to a PLY file).
Attributes
----------
elements : list of PlyElement
comments : list of str
obj_info : list of str
text : bool
byte_order : {'<', '>', '='}
header : str
|
class PlyData(object):
"""
PLY file header and data.
A `PlyData` instance is created in one of two ways: by the static
method `PlyData.read` (to read a PLY file), or directly from
`__init__` given a sequence of elements (which can then be written
to a PLY file).
Attributes
----------
elements : list of PlyElement
comments : list of str
obj_info : list of str
text : bool
byte_order : {'<', '>', '='}
header : str
"""
def __init__(self, elements=[], text=False, byte_order='=',
comments=[], obj_info=[]):
"""
Parameters
----------
elements : iterable of PlyElement
text : bool, optional
Whether the resulting PLY file will be text (True) or
binary (False).
byte_order : {'<', '>', '='}, optional
`'<'` for little-endian, `'>'` for big-endian, or `'='`
for native. This is only relevant if `text` is False.
comments : iterable of str, optional
Comment lines between "ply" and "format" lines.
obj_info : iterable of str, optional
like comments, but will be placed in the header with
"obj_info ..." instead of "comment ...".
"""
self.byte_order = byte_order
self.text = text
self.comments = comments
self.obj_info = obj_info
self.elements = elements
def _get_elements(self):
return self._elements
def _set_elements(self, elements):
self._elements = tuple(elements)
self._index()
elements = property(_get_elements, _set_elements)
def _get_byte_order(self):
if not self.text and self._byte_order == '=':
return _native_byte_order
return self._byte_order
def _set_byte_order(self, byte_order):
if byte_order not in ['<', '>', '=']:
raise ValueError("byte order must be '<', '>', or '='")
self._byte_order = byte_order
byte_order = property(_get_byte_order, _set_byte_order)
def _index(self):
self._element_lookup = dict((elt.name, elt) for elt in
self._elements)
if len(self._element_lookup) != len(self._elements):
raise ValueError("two elements with same name")
def _get_comments(self):
return list(self._comments)
def _set_comments(self, comments):
_check_comments(comments)
self._comments = list(comments)
comments = property(_get_comments, _set_comments)
def _get_obj_info(self):
return list(self._obj_info)
def _set_obj_info(self, obj_info):
_check_comments(obj_info)
self._obj_info = list(obj_info)
obj_info = property(_get_obj_info, _set_obj_info)
@staticmethod
def _parse_header(stream):
parser = _PlyHeaderParser(_PlyHeaderLines(stream))
return PlyData(
[PlyElement(*e) for e in parser.elements],
parser.format == 'ascii',
_byte_order_map[parser.format],
parser.comments,
parser.obj_info
)
@staticmethod
def read(stream, mmap=True, known_list_len={}):
"""
Read PLY data from a readable file-like object or filename.
Parameters
----------
stream : str or readable open file
mmap : bool, optional (default=True)
Whether to allow element data to be memory-mapped when
possible. The default is `True`, which allows memory
mapping. Using `False` will prevent memory mapping.
known_list_len : dict, optional
Mapping from element names to mappings from list property
names to their fixed lengths. This optional argument is
necessary to enable memory mapping of elements that contain
list properties. (Note that elements with variable-length
list properties cannot be memory-mapped.)
Raises
------
PlyParseError
If the file cannot be parsed for any reason.
ValueError
If `stream` is open in text mode but the PLY header
indicates binary encoding.
"""
(must_close, stream) = _open_stream(stream, 'read')
try:
data = PlyData._parse_header(stream)
if isinstance(stream.read(0), str):
if data.text:
data_stream = stream
else:
raise ValueError("can't read binary-format PLY "
"from text stream")
else:
if data.text:
data_stream = _io.TextIOWrapper(stream, 'ascii')
else:
data_stream = stream
for elt in data:
elt._read(data_stream, data.text, data.byte_order, mmap,
known_list_len=known_list_len.get(elt.name, {}))
finally:
if must_close:
stream.close()
return data
def write(self, stream):
"""
Write PLY data to a writeable file-like object or filename.
Parameters
----------
stream : str or writeable open file
Raises
------
ValueError
If `stream` is open in text mode and the file to be written
is binary-format.
"""
(must_close, stream) = _open_stream(stream, 'write')
try:
try:
stream.write(b'')
binary_stream = True
except TypeError:
binary_stream = False
if binary_stream:
stream.write(self.header.encode('ascii'))
stream.write(b'\n')
else:
if not self.text:
raise ValueError("can't write binary-format PLY to "
"text stream")
stream.write(self.header)
stream.write('\n')
for elt in self:
elt._write(stream, self.text, self.byte_order)
finally:
if must_close:
stream.close()
@property
def header(self):
"""
PLY-formatted metadata for the instance.
"""
lines = ['ply']
if self.text:
lines.append('format ascii 1.0')
else:
lines.append('format ' +
_byte_order_reverse[self.byte_order] +
' 1.0')
# Some information is lost here, since all comments are placed
# between the 'format' line and the first element.
for c in self.comments:
lines.append('comment ' + c)
for c in self.obj_info:
lines.append('obj_info ' + c)
lines.extend(elt.header for elt in self.elements)
lines.append('end_header')
return '\n'.join(lines)
def __iter__(self):
"""
Iterate over the elements.
"""
return iter(self.elements)
def __len__(self):
"""
Return the number of elements.
"""
return len(self.elements)
def __contains__(self, name):
"""
Check if an element with the given name exists.
"""
return name in self._element_lookup
def __getitem__(self, name):
"""
Retrieve an element by name.
Parameters
----------
name : str
Returns
-------
PlyElement
Raises
------
KeyError
If the element can't be found.
"""
return self._element_lookup[name]
def __str__(self):
return self.header
def __repr__(self):
return ('PlyData(%r, text=%r, byte_order=%r, '
'comments=%r, obj_info=%r)' %
(self.elements, self.text, self.byte_order,
self.comments, self.obj_info))
|
(elements=[], text=False, byte_order='=', comments=[], obj_info=[])
|
62,460 |
plyfile
|
__contains__
|
Check if an element with the given name exists.
|
def __contains__(self, name):
"""
Check if an element with the given name exists.
"""
return name in self._element_lookup
|
(self, name)
|
62,461 |
plyfile
|
__getitem__
|
Retrieve an element by name.
Parameters
----------
name : str
Returns
-------
PlyElement
Raises
------
KeyError
If the element can't be found.
|
def __getitem__(self, name):
"""
Retrieve an element by name.
Parameters
----------
name : str
Returns
-------
PlyElement
Raises
------
KeyError
If the element can't be found.
"""
return self._element_lookup[name]
|
(self, name)
|
62,462 |
plyfile
|
__init__
|
Parameters
----------
elements : iterable of PlyElement
text : bool, optional
Whether the resulting PLY file will be text (True) or
binary (False).
byte_order : {'<', '>', '='}, optional
`'<'` for little-endian, `'>'` for big-endian, or `'='`
for native. This is only relevant if `text` is False.
comments : iterable of str, optional
Comment lines between "ply" and "format" lines.
obj_info : iterable of str, optional
like comments, but will be placed in the header with
"obj_info ..." instead of "comment ...".
|
def __init__(self, elements=[], text=False, byte_order='=',
comments=[], obj_info=[]):
"""
Parameters
----------
elements : iterable of PlyElement
text : bool, optional
Whether the resulting PLY file will be text (True) or
binary (False).
byte_order : {'<', '>', '='}, optional
`'<'` for little-endian, `'>'` for big-endian, or `'='`
for native. This is only relevant if `text` is False.
comments : iterable of str, optional
Comment lines between "ply" and "format" lines.
obj_info : iterable of str, optional
like comments, but will be placed in the header with
"obj_info ..." instead of "comment ...".
"""
self.byte_order = byte_order
self.text = text
self.comments = comments
self.obj_info = obj_info
self.elements = elements
|
(self, elements=[], text=False, byte_order='=', comments=[], obj_info=[])
|
62,463 |
plyfile
|
__iter__
|
Iterate over the elements.
|
def __iter__(self):
"""
Iterate over the elements.
"""
return iter(self.elements)
|
(self)
|
62,464 |
plyfile
|
__len__
|
Return the number of elements.
|
def __len__(self):
"""
Return the number of elements.
"""
return len(self.elements)
|
(self)
|
62,465 |
plyfile
|
__repr__
| null |
def __repr__(self):
return ('PlyData(%r, text=%r, byte_order=%r, '
'comments=%r, obj_info=%r)' %
(self.elements, self.text, self.byte_order,
self.comments, self.obj_info))
|
(self)
|
62,466 |
plyfile
|
__str__
| null |
def __str__(self):
return self.header
|
(self)
|
62,467 |
plyfile
|
_get_byte_order
| null |
def _get_byte_order(self):
if not self.text and self._byte_order == '=':
return _native_byte_order
return self._byte_order
|
(self)
|
62,468 |
plyfile
|
_get_comments
| null |
def _get_comments(self):
return list(self._comments)
|
(self)
|
62,469 |
plyfile
|
_get_elements
| null |
def _get_elements(self):
return self._elements
|
(self)
|
62,470 |
plyfile
|
_get_obj_info
| null |
def _get_obj_info(self):
return list(self._obj_info)
|
(self)
|
62,471 |
plyfile
|
_index
| null |
def _index(self):
self._element_lookup = dict((elt.name, elt) for elt in
self._elements)
if len(self._element_lookup) != len(self._elements):
raise ValueError("two elements with same name")
|
(self)
|
62,472 |
plyfile
|
_parse_header
| null |
@staticmethod
def _parse_header(stream):
parser = _PlyHeaderParser(_PlyHeaderLines(stream))
return PlyData(
[PlyElement(*e) for e in parser.elements],
parser.format == 'ascii',
_byte_order_map[parser.format],
parser.comments,
parser.obj_info
)
|
(stream)
|
62,473 |
plyfile
|
_set_byte_order
| null |
def _set_byte_order(self, byte_order):
if byte_order not in ['<', '>', '=']:
raise ValueError("byte order must be '<', '>', or '='")
self._byte_order = byte_order
|
(self, byte_order)
|
62,474 |
plyfile
|
_set_comments
| null |
def _set_comments(self, comments):
_check_comments(comments)
self._comments = list(comments)
|
(self, comments)
|
62,475 |
plyfile
|
_set_elements
| null |
def _set_elements(self, elements):
self._elements = tuple(elements)
self._index()
|
(self, elements)
|
62,476 |
plyfile
|
_set_obj_info
| null |
def _set_obj_info(self, obj_info):
_check_comments(obj_info)
self._obj_info = list(obj_info)
|
(self, obj_info)
|
62,477 |
plyfile
|
read
|
Read PLY data from a readable file-like object or filename.
Parameters
----------
stream : str or readable open file
mmap : bool, optional (default=True)
Whether to allow element data to be memory-mapped when
possible. The default is `True`, which allows memory
mapping. Using `False` will prevent memory mapping.
known_list_len : dict, optional
Mapping from element names to mappings from list property
names to their fixed lengths. This optional argument is
necessary to enable memory mapping of elements that contain
list properties. (Note that elements with variable-length
list properties cannot be memory-mapped.)
Raises
------
PlyParseError
If the file cannot be parsed for any reason.
ValueError
If `stream` is open in text mode but the PLY header
indicates binary encoding.
|
@staticmethod
def read(stream, mmap=True, known_list_len={}):
"""
Read PLY data from a readable file-like object or filename.
Parameters
----------
stream : str or readable open file
mmap : bool, optional (default=True)
Whether to allow element data to be memory-mapped when
possible. The default is `True`, which allows memory
mapping. Using `False` will prevent memory mapping.
known_list_len : dict, optional
Mapping from element names to mappings from list property
names to their fixed lengths. This optional argument is
necessary to enable memory mapping of elements that contain
list properties. (Note that elements with variable-length
list properties cannot be memory-mapped.)
Raises
------
PlyParseError
If the file cannot be parsed for any reason.
ValueError
If `stream` is open in text mode but the PLY header
indicates binary encoding.
"""
(must_close, stream) = _open_stream(stream, 'read')
try:
data = PlyData._parse_header(stream)
if isinstance(stream.read(0), str):
if data.text:
data_stream = stream
else:
raise ValueError("can't read binary-format PLY "
"from text stream")
else:
if data.text:
data_stream = _io.TextIOWrapper(stream, 'ascii')
else:
data_stream = stream
for elt in data:
elt._read(data_stream, data.text, data.byte_order, mmap,
known_list_len=known_list_len.get(elt.name, {}))
finally:
if must_close:
stream.close()
return data
|
(stream, mmap=True, known_list_len={})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.