index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
43,890 |
pybtex
|
format_from_files
|
Read the bigliography data from the given files and produce a formated
bibliography.
This is an abstract method overridden by both
:py:class:`pybtex.PybtexEngine` and :py:class:`pybtex.bibtex.BibTeXEngine`.
|
def format_from_files(*args, **kwargs):
"""
Read the bigliography data from the given files and produce a formated
bibliography.
This is an abstract method overridden by both
:py:class:`pybtex.PybtexEngine` and :py:class:`pybtex.bibtex.BibTeXEngine`.
"""
raise NotImplementedError
|
(*args, **kwargs)
|
43,891 |
pybtex
|
format_from_string
|
Parse the bigliography data from the given string and produce a formated
bibliography using :py:meth:`~.Engine.format_from_files`.
This is a convenience method that calls
:py:meth:`~.Engine.format_from_strings` with a single string.
|
def format_from_string(self, bib_string, *args, **kwargs):
"""
Parse the bigliography data from the given string and produce a formated
bibliography using :py:meth:`~.Engine.format_from_files`.
This is a convenience method that calls
:py:meth:`~.Engine.format_from_strings` with a single string.
"""
return self.format_from_strings([bib_string], *args, **kwargs)
|
(self, bib_string, *args, **kwargs)
|
43,892 |
pybtex
|
format_from_strings
|
Parse the bigliography data from the given strings and produce a formated
bibliography.
This is a convenience method that wraps each string into a StringIO,
then calls :py:meth:`~.Engine.format_from_files`.
|
def format_from_strings(self, bib_strings, *args, **kwargs):
"""
Parse the bigliography data from the given strings and produce a formated
bibliography.
This is a convenience method that wraps each string into a StringIO,
then calls :py:meth:`~.Engine.format_from_files`.
"""
from io import StringIO
inputs = [StringIO(bib_string) for bib_string in bib_strings]
return self.format_from_files(inputs, *args, **kwargs)
|
(self, bib_strings, *args, **kwargs)
|
43,893 |
pybtex
|
make_bibliography
|
Read the given ``.aux`` file and produce a formatted bibliography
using :py:meth:`~.Engine.format_from_files`.
:param style: If not ``None``, use this style instead of specified in the ``.aux`` file.
|
def make_bibliography(self, aux_filename, style=None, output_encoding=None, bib_format=None, **kwargs):
"""
Read the given ``.aux`` file and produce a formatted bibliography
using :py:meth:`~.Engine.format_from_files`.
:param style: If not ``None``, use this style instead of specified in the ``.aux`` file.
"""
from pybtex import auxfile
if bib_format is None:
from pybtex.database.input.bibtex import Parser as bib_format
aux_data = auxfile.parse_file(aux_filename, output_encoding)
if style is None:
style = aux_data.style
base_filename = path.splitext(aux_filename)[0]
bib_filenames = [filename + bib_format.default_suffix for filename in aux_data.data]
return self.format_from_files(
bib_filenames,
style=aux_data.style,
citations=aux_data.citations,
output_encoding=output_encoding,
output_filename=base_filename,
add_output_suffix=True,
**kwargs
)
|
(self, aux_filename, style=None, output_encoding=None, bib_format=None, **kwargs)
|
43,894 |
pybtex
|
PybtexEngine
|
The Python fomatting engine.
See :py:class:`pybtex.Engine` for inherited methods.
|
class PybtexEngine(Engine):
"""
The Python fomatting engine.
See :py:class:`pybtex.Engine` for inherited methods.
"""
def format_from_files(
self,
bib_files_or_filenames,
style,
citations=['*'],
bib_format=None,
bib_encoding=None,
output_backend=None,
output_encoding=None,
min_crossrefs=2,
output_filename=None,
add_output_suffix=False,
**kwargs
):
"""
Read the bigliography data from the given files and produce a formated
bibliography.
:param bib_files_or_filenames: A list of file names or file objects.
:param style: The name of the formatting style.
:param citations: A list of citation keys.
:param bib_format: The name of the bibliography format. The default
format is ``bibtex``.
:param bib_encoding: Encoding of bibliography files.
:param output_backend: Which output backend to use. The default is ``latex``.
:param output_encoding: Encoding that will be used by the output backend.
:param bst_encoding: Encoding of the ``.bst`` file.
:param min_crossrefs: Include cross-referenced entries after this many
crossrefs. See BibTeX manual for details.
:param output_filename: If ``None``, the result will be returned as a
string. Else, the result will be written to the specified file.
:param add_output_suffix: Append default suffix to the output file
name (``.bbl`` for LaTeX, ``.html`` for HTML, etc.).
"""
from pybtex.plugin import find_plugin
bib_parser = find_plugin('pybtex.database.input', bib_format)
bib_data = bib_parser(
encoding=bib_encoding,
wanted_entries=citations,
min_crossrefs=min_crossrefs,
).parse_files(bib_files_or_filenames)
style_cls = find_plugin('pybtex.style.formatting', style)
style = style_cls(
label_style=kwargs.get('label_style'),
name_style=kwargs.get('name_style'),
sorting_style=kwargs.get('sorting_style'),
abbreviate_names=kwargs.get('abbreviate_names'),
min_crossrefs=min_crossrefs,
)
formatted_bibliography = style.format_bibliography(bib_data, citations)
output_backend = find_plugin('pybtex.backends', output_backend)
if add_output_suffix:
output_filename = output_filename + output_backend.default_suffix
if not output_filename:
import io
output_filename = io.StringIO()
return output_backend(output_encoding).write_to_file(formatted_bibliography, output_filename)
|
()
|
43,896 |
pybtex
|
format_from_files
|
Read the bigliography data from the given files and produce a formated
bibliography.
:param bib_files_or_filenames: A list of file names or file objects.
:param style: The name of the formatting style.
:param citations: A list of citation keys.
:param bib_format: The name of the bibliography format. The default
format is ``bibtex``.
:param bib_encoding: Encoding of bibliography files.
:param output_backend: Which output backend to use. The default is ``latex``.
:param output_encoding: Encoding that will be used by the output backend.
:param bst_encoding: Encoding of the ``.bst`` file.
:param min_crossrefs: Include cross-referenced entries after this many
crossrefs. See BibTeX manual for details.
:param output_filename: If ``None``, the result will be returned as a
string. Else, the result will be written to the specified file.
:param add_output_suffix: Append default suffix to the output file
name (``.bbl`` for LaTeX, ``.html`` for HTML, etc.).
|
def format_from_files(
self,
bib_files_or_filenames,
style,
citations=['*'],
bib_format=None,
bib_encoding=None,
output_backend=None,
output_encoding=None,
min_crossrefs=2,
output_filename=None,
add_output_suffix=False,
**kwargs
):
"""
Read the bigliography data from the given files and produce a formated
bibliography.
:param bib_files_or_filenames: A list of file names or file objects.
:param style: The name of the formatting style.
:param citations: A list of citation keys.
:param bib_format: The name of the bibliography format. The default
format is ``bibtex``.
:param bib_encoding: Encoding of bibliography files.
:param output_backend: Which output backend to use. The default is ``latex``.
:param output_encoding: Encoding that will be used by the output backend.
:param bst_encoding: Encoding of the ``.bst`` file.
:param min_crossrefs: Include cross-referenced entries after this many
crossrefs. See BibTeX manual for details.
:param output_filename: If ``None``, the result will be returned as a
string. Else, the result will be written to the specified file.
:param add_output_suffix: Append default suffix to the output file
name (``.bbl`` for LaTeX, ``.html`` for HTML, etc.).
"""
from pybtex.plugin import find_plugin
bib_parser = find_plugin('pybtex.database.input', bib_format)
bib_data = bib_parser(
encoding=bib_encoding,
wanted_entries=citations,
min_crossrefs=min_crossrefs,
).parse_files(bib_files_or_filenames)
style_cls = find_plugin('pybtex.style.formatting', style)
style = style_cls(
label_style=kwargs.get('label_style'),
name_style=kwargs.get('name_style'),
sorting_style=kwargs.get('sorting_style'),
abbreviate_names=kwargs.get('abbreviate_names'),
min_crossrefs=min_crossrefs,
)
formatted_bibliography = style.format_bibliography(bib_data, citations)
output_backend = find_plugin('pybtex.backends', output_backend)
if add_output_suffix:
output_filename = output_filename + output_backend.default_suffix
if not output_filename:
import io
output_filename = io.StringIO()
return output_backend(output_encoding).write_to_file(formatted_bibliography, output_filename)
|
(self, bib_files_or_filenames, style, citations=['*'], bib_format=None, bib_encoding=None, output_backend=None, output_encoding=None, min_crossrefs=2, output_filename=None, add_output_suffix=False, **kwargs)
|
43,900 |
pybtex
|
format_from_file
|
A convenience function that calls :py:meth:`.PybtexEngine.format_from_file`.
|
def format_from_file(*args, **kwargs):
"""A convenience function that calls :py:meth:`.PybtexEngine.format_from_file`."""
return PybtexEngine().format_from_file(*args, **kwargs)
|
(*args, **kwargs)
|
43,901 |
pybtex
|
format_from_files
|
A convenience function that calls :py:meth:`.PybtexEngine.format_from_files`.
|
def format_from_files(*args, **kwargs):
"""A convenience function that calls :py:meth:`.PybtexEngine.format_from_files`."""
return PybtexEngine().format_from_files(*args, **kwargs)
|
(*args, **kwargs)
|
43,902 |
pybtex
|
format_from_string
|
A convenience function that calls :py:meth:`.PybtexEngine.format_from_string`.
|
def format_from_string(*args, **kwargs):
"""A convenience function that calls :py:meth:`.PybtexEngine.format_from_string`."""
return PybtexEngine().format_from_string(*args, **kwargs)
|
(*args, **kwargs)
|
43,903 |
pybtex
|
format_from_strings
|
A convenience function that calls :py:meth:`.PybtexEngine.format_from_strings`.
|
def format_from_strings(*args, **kwargs):
"""A convenience function that calls :py:meth:`.PybtexEngine.format_from_strings`."""
return PybtexEngine().format_from_strings(*args, **kwargs)
|
(*args, **kwargs)
|
43,904 |
pybtex
|
make_bibliography
|
A convenience function that calls :py:meth:`.PybtexEngine.make_bibliography`.
|
def make_bibliography(*args, **kwargs):
"""A convenience function that calls :py:meth:`.PybtexEngine.make_bibliography`."""
return PybtexEngine().make_bibliography(*args, **kwargs)
|
(*args, **kwargs)
|
43,906 |
iso4217
|
Currency
|
ISO 4217 currency. Its enumerants are ISO 4217 currencies except for
some special currencies like ```XXX``. Enumerants names are lowercase
cureency code e.g. :attr:`Currency.eur`, :attr:`Currency.usd`.
|
class Currency(enum.Enum):
"""ISO 4217 currency. Its enumerants are ISO 4217 currencies except for
some special currencies like ```XXX``. Enumerants names are lowercase
cureency code e.g. :attr:`Currency.eur`, :attr:`Currency.usd`.
"""
update_enum_dict(locals(), raw_table)
@property
def code(self):
"""(:class:`str`) The currency code which consist of 3 uppercase
characters e.g. ``'USD'``, ``'EUR'``.
"""
return self.value
@property
def number(self):
"""(:class:`int`) The currency number."""
return int(raw_table[self.value]['CcyNbr'])
@property
def currency_name(self):
"""(:class:`str`) The human-readable name of the currency e.g.
``'US Dollar'``, ``'Euro'``.
"""
return raw_table[self.value]['CcyNm']
@property
def country_names(self):
return frozenset(raw_table[self.value]['CtryNm'])
@property
def exponent(self):
"""(:class:`int`) The treatment of minor currency unit, in exponent
where base is 10. For example, a U.S. dollar is 100 cents,
so ``Currency.usd.exponent == 2``.
There are also currencies that have no minor ucrrency unit.
These are represented as 0.
Minor currency unit cannot be applicable for some `X currencies`__.
These include, for instance, precious metal gold ``XAU`` or ``XTS``
which is for testing purpose.
__ https://en.wikipedia.org/wiki/ISO_4217#X_currencies
"""
minor_units = raw_table[self.value]['CcyMnrUnts']
return None if minor_units == 'N.A.' else int(minor_units)
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
43,910 |
importlib._common
|
files
|
Get a Traversable resource from a package
|
def files(package):
# type: (Package) -> Traversable
"""
Get a Traversable resource from a package
"""
return from_package(get_package(package))
|
(package)
|
43,912 |
iso4217
|
parse_published
| null |
def parse_published(pblshd):
if '-' in pblshd and ',' not in pblshd:
return datetime.date(*map(int, raw_xml.attrib['Pblshd'].split('-')))
lc, enc = locale.getlocale(locale.LC_TIME)
lc_time = "{0}.{1}".format(lc, enc) if enc else lc
locale.setlocale(locale.LC_TIME, 'C')
published = datetime.datetime.strptime(pblshd, "%B %d, %Y").date()
locale.setlocale(locale.LC_TIME, lc_time)
return published
|
(pblshd)
|
43,913 |
iso4217
|
parse_table
|
Parse an ISO 4217 XML table data and then return raw table as
a dictionary.
|
def parse_table(tree):
"""Parse an ISO 4217 XML table data and then return raw table as
a dictionary.
"""
table = {}
for node in tree.findall('CcyTbl/CcyNtry'):
ctry_nm = node.find('CtryNm')
if ctry_nm is not None:
ctry_nm = ctry_nm.text.strip()
ccy_nm = node.find('CcyNm')
if ccy_nm is not None:
ccy_nm = ccy_nm.text.strip()
ccy = node.find('Ccy')
if ccy is not None:
ccy = ccy.text.strip()
ccy_nbr = node.find('CcyNbr')
if ccy_nbr is not None:
ccy_nbr = int(ccy_nbr.text.strip())
ccy_mnr_unts = node.find('CcyMnrUnts')
if ccy_mnr_unts is not None:
ccy_mnr_unts = ccy_mnr_unts.text.strip()
try:
ccy_dict = table[ccy]
except KeyError:
table[ccy] = {
'CtryNm': set([ctry_nm]),
'CcyNm': ccy_nm,
'Ccy': ccy,
'CcyNbr': ccy_nbr,
'CcyMnrUnts': ccy_mnr_unts,
}
else:
ccy_dict['CtryNm'].add(ctry_nm)
return table
|
(tree)
|
43,914 |
iso4217
|
update_enum_dict
|
Since :mod:`enum` module's class-level locals dictionary is
not an ordinary Python :class:`dict`, so leaking local variables
make unexpected behaviors.
This function takes a ``locals_`` dictionary and then add
all available enumerants to this.
|
def update_enum_dict(locals_, raw_table):
"""Since :mod:`enum` module's class-level locals dictionary is
not an ordinary Python :class:`dict`, so leaking local variables
make unexpected behaviors.
This function takes a ``locals_`` dictionary and then add
all available enumerants to this.
"""
enumerants = {}
for code, _ccy_ntry in raw_table.items():
if _ccy_ntry['CcyNbr'] is None:
continue
minor_units = _ccy_ntry.get('CcyMnrUnts', '')
if not minor_units.isdigit() and minor_units != 'N.A.':
continue
lcode = code.lower()
if lcode in ('mro',):
lcode += '_'
enumerants[lcode] = code
for code, enumerant in enumerants.items():
locals_[code] = enumerant
|
(locals_, raw_table)
|
43,916 |
jsons.exceptions
|
DecodeError
|
Raised when decoding a string or bytes to Python types failed. This error
is actually a wrapper around `json.JSONDecodeError`.
|
class DecodeError(DeserializationError, JSONDecodeError):
"""
Raised when decoding a string or bytes to Python types failed. This error
is actually a wrapper around `json.JSONDecodeError`.
"""
def __init__(self, message: str, source: object, target: type,
error: JSONDecodeError):
"""
Constructor.
:param message: the message of this error.
:param source: the object that was to be deserialized.
:param target: the type to which `source` was to be deserialized.
:param error: the wrapped `JSONDecodeError`.
"""
DeserializationError.__init__(self, message, source, target)
JSONDecodeError.__init__(self, message, error.doc, error.pos)
|
(message: str, source: object, target: type, error: json.decoder.JSONDecodeError)
|
43,917 |
jsons.exceptions
|
__init__
|
Constructor.
:param message: the message of this error.
:param source: the object that was to be deserialized.
:param target: the type to which `source` was to be deserialized.
:param error: the wrapped `JSONDecodeError`.
|
def __init__(self, message: str, source: object, target: type,
error: JSONDecodeError):
"""
Constructor.
:param message: the message of this error.
:param source: the object that was to be deserialized.
:param target: the type to which `source` was to be deserialized.
:param error: the wrapped `JSONDecodeError`.
"""
DeserializationError.__init__(self, message, source, target)
JSONDecodeError.__init__(self, message, error.doc, error.pos)
|
(self, message: str, source: object, target: type, error: json.decoder.JSONDecodeError)
|
43,919 |
jsons.exceptions
|
DeserializationError
|
Raised when deserialization failed for some reason.
|
class DeserializationError(JsonsError):
"""
Raised when deserialization failed for some reason.
"""
def __init__(self, message: str, source: object, target: Optional[type]):
"""
Constructor.
:param message: the message describing the problem.
:param source: the object that was to be deserialized.
:param target: the type to which `source` was to be deserialized.
"""
JsonsError.__init__(self, message)
self._source = source
self._target = target
@property
def source(self) -> object:
"""
The object that was to be deserialized.
:return: the object that was to be deserialized.
"""
return self._source
@property
def target(self) -> Optional[type]:
"""
The target type to which `source` was to be deserialized.
:return: the type to which `source` was to be deserialized.
"""
return self._target
|
(message: str, source: object, target: Optional[type])
|
43,920 |
jsons.exceptions
|
__init__
|
Constructor.
:param message: the message describing the problem.
:param source: the object that was to be deserialized.
:param target: the type to which `source` was to be deserialized.
|
def __init__(self, message: str, source: object, target: Optional[type]):
"""
Constructor.
:param message: the message describing the problem.
:param source: the object that was to be deserialized.
:param target: the type to which `source` was to be deserialized.
"""
JsonsError.__init__(self, message)
self._source = source
self._target = target
|
(self, message: str, source: object, target: Optional[type])
|
43,922 |
enum
|
IntEnum
|
Enum where members are also (and must be) ints
|
class IntEnum(int, Enum):
"""Enum where members are also (and must be) ints"""
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
43,923 |
jsons.exceptions
|
InvalidDecorationError
|
Raised when a jsons decorator was wrongly used.
|
class InvalidDecorationError(JsonsError):
"""
Raised when a jsons decorator was wrongly used.
"""
def __init__(self, message: str):
"""
Constructor.
:param message: the message of this error.
"""
JsonsError.__init__(self, message)
|
(message: str)
|
43,924 |
jsons.exceptions
|
__init__
|
Constructor.
:param message: the message of this error.
|
def __init__(self, message: str):
"""
Constructor.
:param message: the message of this error.
"""
JsonsError.__init__(self, message)
|
(self, message: str)
|
43,925 |
jsons.classes.json_serializable
|
JsonSerializable
|
This class offers an alternative to using the ``jsons.load`` and
``jsons.dump`` methods. An instance of a class that inherits from
``JsonSerializable`` has the ``json`` property, which value is equivalent
to calling ``jsons.dump`` on that instance. Furthermore, you can call
``from_json`` on that class, which is equivalent to calling ``json.load``
with that class as an argument.
|
class JsonSerializable(StateHolder):
"""
This class offers an alternative to using the ``jsons.load`` and
``jsons.dump`` methods. An instance of a class that inherits from
``JsonSerializable`` has the ``json`` property, which value is equivalent
to calling ``jsons.dump`` on that instance. Furthermore, you can call
``from_json`` on that class, which is equivalent to calling ``json.load``
with that class as an argument.
"""
@classmethod
def fork(cls, name: Optional[str] = None) -> Type['JsonSerializable']:
"""
Create a 'fork' of ``JsonSerializable``: a new ``type`` with a separate
configuration of serializers and deserializers.
:param name: the ``__name__`` of the new ``type``.
:return: a new ``type`` based on ``JsonSerializable``.
"""
return fork(cls, name=name)
@classmethod
def with_dump(cls, fork: Optional[bool] = False, **kwargs) \
-> Type['JsonSerializable']:
"""
Return a class (``type``) that is based on JsonSerializable with the
``dump`` method being automatically provided the given ``kwargs``.
**Example:**
>>> custom_serializable = JsonSerializable\
.with_dump(key_transformer=KEY_TRANSFORMER_CAMELCASE)
>>> class Person(custom_serializable):
... def __init__(self, my_name):
... self.my_name = my_name
>>> p = Person('John')
>>> p.json
{'myName': 'John'}
:param kwargs: the keyword args that are automatically provided to the
``dump`` method.
:param fork: determines that a new fork is to be created.
:return: a class with customized behavior.
"""
def _wrapper(inst, **kwargs_):
return dump(inst, **{**kwargs_, **kwargs})
type_ = cls.fork() if fork else cls
type_.dump = _wrapper
return type_
@classmethod
def with_load(cls, fork: Optional[bool] = False, **kwargs) \
-> Type['JsonSerializable']:
"""
Return a class (``type``) that is based on JsonSerializable with the
``load`` method being automatically provided the given ``kwargs``.
**Example:**
>>> custom_serializable = JsonSerializable\
.with_load(key_transformer=KEY_TRANSFORMER_SNAKECASE)
>>> class Person(custom_serializable):
... def __init__(self, my_name):
... self.my_name = my_name
>>> p_json = {'myName': 'John'}
>>> p = Person.from_json(p_json)
>>> p.my_name
'John'
:param kwargs: the keyword args that are automatically provided to the
``load`` method.
:param fork: determines that a new fork is to be created.
:return: a class with customized behavior.
"""
@classmethod
def _wrapper(cls_, inst, **kwargs_):
return load(inst, cls_, fork_inst=cls_, **{**kwargs_, **kwargs})
type_ = cls.fork() if fork else cls
type_.load = _wrapper
return type_
@property
def json(self) -> object:
"""
See ``jsons.dump``.
:return: this instance in a JSON representation (dict).
"""
return self.dump()
def __str__(self) -> str:
"""
See ``jsons.dumps``.
:return: this instance as a JSON string.
"""
return self.dumps()
@classmethod
def from_json(cls: Type[T], json_obj: object, **kwargs) -> T:
"""
See ``jsons.load``.
:param json_obj: a JSON representation of an instance of the inheriting
class
:param kwargs: the keyword args are passed on to the deserializer
function.
:return: an instance of the inheriting class.
"""
return cls.load(json_obj, **kwargs)
def dump(self, **kwargs) -> object:
"""
See ``jsons.dump``.
:param kwargs: the keyword args are passed on to the serializer
function.
:return: this instance in a JSON representation (dict).
"""
return dump(self, fork_inst=self.__class__, **kwargs)
@classmethod
def load(cls: Type[T], json_obj: object, **kwargs) -> T:
"""
See ``jsons.load``.
:param kwargs: the keyword args are passed on to the serializer
function.
:param json_obj: the object that is loaded into an instance of `cls`.
:return: this instance in a JSON representation (dict).
"""
return load(json_obj, cls, fork_inst=cls, **kwargs)
def dumps(self, **kwargs) -> str:
"""
See ``jsons.dumps``.
:param kwargs: the keyword args are passed on to the serializer
function.
:return: this instance as a JSON string.
"""
return dumps(self, fork_inst=self.__class__, **kwargs)
@classmethod
def loads(cls: Type[T], json_obj: str, **kwargs) -> T:
"""
See ``jsons.loads``.
:param kwargs: the keyword args are passed on to the serializer
function.
:param json_obj: the object that is loaded into an instance of `cls`.
:return: this instance in a JSON representation (dict).
"""
return loads(json_obj, cls, fork_inst=cls, **kwargs)
def dumpb(self, **kwargs) -> bytes:
"""
See ``jsons.dumpb``.
:param kwargs: the keyword args are passed on to the serializer
function.
:return: this instance as a JSON string.
"""
return dumpb(self, fork_inst=self.__class__, **kwargs)
@classmethod
def loadb(cls: Type[T], json_obj: bytes, **kwargs) -> T:
"""
See ``jsons.loadb``.
:param kwargs: the keyword args are passed on to the serializer
function.
:param json_obj: the object that is loaded into an instance of `cls`.
:return: this instance in a JSON representation (dict).
"""
return loadb(json_obj, cls, fork_inst=cls, **kwargs)
@classmethod
def set_serializer(cls: Type[T],
func: callable,
cls_: type,
high_prio: Optional[bool] = True,
fork: Optional[bool] = False) -> T:
"""
See ``jsons.set_serializer``.
:param func: the serializer function.
:param cls_: the type this serializer can handle.
:param high_prio: determines the order in which is looked for the
callable.
:param fork: determines that a new fork is to be created.
:return: the type on which this method is invoked or its fork.
"""
type_ = cls.fork() if fork else cls
set_serializer(func, cls_, high_prio, type_)
return type_
@classmethod
def set_deserializer(cls: Type[T],
func: callable,
cls_: type,
high_prio: Optional[bool] = True,
fork: Optional[bool] = False) -> T:
"""
See ``jsons.set_deserializer``.
:param func: the deserializer function.
:param cls_: the type this serializer can handle.
:param high_prio: determines the order in which is looked for the
callable.
:param fork: determines that a new fork is to be created.
:return: the type on which this method is invoked or its fork.
"""
type_ = cls.fork() if fork else cls
set_deserializer(func, cls_, high_prio, type_)
return type_
|
()
|
43,926 |
jsons.classes.json_serializable
|
__str__
|
See ``jsons.dumps``.
:return: this instance as a JSON string.
|
def __str__(self) -> str:
"""
See ``jsons.dumps``.
:return: this instance as a JSON string.
"""
return self.dumps()
|
(self) -> str
|
43,927 |
jsons.classes.json_serializable
|
dump
|
See ``jsons.dump``.
:param kwargs: the keyword args are passed on to the serializer
function.
:return: this instance in a JSON representation (dict).
|
def dump(self, **kwargs) -> object:
"""
See ``jsons.dump``.
:param kwargs: the keyword args are passed on to the serializer
function.
:return: this instance in a JSON representation (dict).
"""
return dump(self, fork_inst=self.__class__, **kwargs)
|
(self, **kwargs) -> object
|
43,928 |
jsons.classes.json_serializable
|
dumpb
|
See ``jsons.dumpb``.
:param kwargs: the keyword args are passed on to the serializer
function.
:return: this instance as a JSON string.
|
def dumpb(self, **kwargs) -> bytes:
"""
See ``jsons.dumpb``.
:param kwargs: the keyword args are passed on to the serializer
function.
:return: this instance as a JSON string.
"""
return dumpb(self, fork_inst=self.__class__, **kwargs)
|
(self, **kwargs) -> bytes
|
43,929 |
jsons.classes.json_serializable
|
dumps
|
See ``jsons.dumps``.
:param kwargs: the keyword args are passed on to the serializer
function.
:return: this instance as a JSON string.
|
def dumps(self, **kwargs) -> str:
"""
See ``jsons.dumps``.
:param kwargs: the keyword args are passed on to the serializer
function.
:return: this instance as a JSON string.
"""
return dumps(self, fork_inst=self.__class__, **kwargs)
|
(self, **kwargs) -> str
|
43,930 |
jsons.exceptions
|
JsonsError
|
Base class for all `jsons` errors.
|
class JsonsError(Exception):
"""
Base class for all `jsons` errors.
"""
def __init__(self, message: str):
"""
Constructor.
:param message: the message describing the problem.
"""
Exception.__init__(self, message)
self._message = message
@property
def message(self):
return self._message
|
(message: str)
|
43,931 |
jsons.exceptions
|
__init__
|
Constructor.
:param message: the message describing the problem.
|
def __init__(self, message: str):
"""
Constructor.
:param message: the message describing the problem.
"""
Exception.__init__(self, message)
self._message = message
|
(self, message: str)
|
43,932 |
jsons._key_transformers
|
camelcase
|
Return ``s`` in camelCase.
:param str_: the string that is to be transformed.
:return: a string in camelCase.
|
def camelcase(str_: str) -> str:
"""
Return ``s`` in camelCase.
:param str_: the string that is to be transformed.
:return: a string in camelCase.
"""
str_ = str_.replace('-', '_')
splitted = str_.split('_')
if len(splitted) > 1:
str_ = ''.join([x.title() for x in splitted])
return str_[0].lower() + str_[1:]
|
(str_: str) -> str
|
43,933 |
jsons._key_transformers
|
lispcase
|
Return ``s`` in lisp-case.
:param str_: the string that is to be transformed.
:return: a string in lisp-case.
|
def lispcase(str_: str) -> str:
"""
Return ``s`` in lisp-case.
:param str_: the string that is to be transformed.
:return: a string in lisp-case.
"""
return snakecase(str_).replace('_', '-')
|
(str_: str) -> str
|
43,934 |
jsons._key_transformers
|
pascalcase
|
Return ``s`` in PascalCase.
:param str_: the string that is to be transformed.
:return: a string in PascalCase.
|
def pascalcase(str_: str) -> str:
"""
Return ``s`` in PascalCase.
:param str_: the string that is to be transformed.
:return: a string in PascalCase.
"""
camelcase_str = camelcase(str_)
return camelcase_str[0].upper() + camelcase_str[1:]
|
(str_: str) -> str
|
43,935 |
jsons._key_transformers
|
snakecase
|
Return ``s`` in snake_case.
:param str_: the string that is to be transformed.
:return: a string in snake_case.
|
def snakecase(str_: str) -> str:
"""
Return ``s`` in snake_case.
:param str_: the string that is to be transformed.
:return: a string in snake_case.
"""
str_ = str_.replace('-', '_')
str_ = str_[0].lower() + str_[1:]
return re.sub(r'([a-z])([A-Z])', '\\1_\\2', str_).lower()
|
(str_: str) -> str
|
43,947 |
pathlib
|
PurePath
|
Base class for manipulating paths without I/O.
PurePath represents a filesystem path and offers operations which
don't imply any actual filesystem I/O. Depending on your system,
instantiating a PurePath will return either a PurePosixPath or a
PureWindowsPath object. You can also instantiate either of these classes
directly, regardless of your system.
|
class PurePath(object):
"""Base class for manipulating paths without I/O.
PurePath represents a filesystem path and offers operations which
don't imply any actual filesystem I/O. Depending on your system,
instantiating a PurePath will return either a PurePosixPath or a
PureWindowsPath object. You can also instantiate either of these classes
directly, regardless of your system.
"""
__slots__ = (
'_drv', '_root', '_parts',
'_str', '_hash', '_pparts', '_cached_cparts',
)
def __new__(cls, *args):
"""Construct a PurePath from one or several strings and or existing
PurePath objects. The strings and path objects are combined so as
to yield a canonicalized path, which is incorporated into the
new PurePath object.
"""
if cls is PurePath:
cls = PureWindowsPath if os.name == 'nt' else PurePosixPath
return cls._from_parts(args)
def __reduce__(self):
# Using the parts tuple helps share interned path parts
# when pickling related paths.
return (self.__class__, tuple(self._parts))
@classmethod
def _parse_args(cls, args):
# This is useful when you don't want to create an instance, just
# canonicalize some constructor arguments.
parts = []
for a in args:
if isinstance(a, PurePath):
parts += a._parts
else:
a = os.fspath(a)
if isinstance(a, str):
# Force-cast str subclasses to str (issue #21127)
parts.append(str(a))
else:
raise TypeError(
"argument should be a str object or an os.PathLike "
"object returning str, not %r"
% type(a))
return cls._flavour.parse_parts(parts)
@classmethod
def _from_parts(cls, args):
# We need to call _parse_args on the instance, so as to get the
# right flavour.
self = object.__new__(cls)
drv, root, parts = self._parse_args(args)
self._drv = drv
self._root = root
self._parts = parts
return self
@classmethod
def _from_parsed_parts(cls, drv, root, parts):
self = object.__new__(cls)
self._drv = drv
self._root = root
self._parts = parts
return self
@classmethod
def _format_parsed_parts(cls, drv, root, parts):
if drv or root:
return drv + root + cls._flavour.join(parts[1:])
else:
return cls._flavour.join(parts)
def _make_child(self, args):
drv, root, parts = self._parse_args(args)
drv, root, parts = self._flavour.join_parsed_parts(
self._drv, self._root, self._parts, drv, root, parts)
return self._from_parsed_parts(drv, root, parts)
def __str__(self):
"""Return the string representation of the path, suitable for
passing to system calls."""
try:
return self._str
except AttributeError:
self._str = self._format_parsed_parts(self._drv, self._root,
self._parts) or '.'
return self._str
def __fspath__(self):
return str(self)
def as_posix(self):
"""Return the string representation of the path with forward (/)
slashes."""
f = self._flavour
return str(self).replace(f.sep, '/')
def __bytes__(self):
"""Return the bytes representation of the path. This is only
recommended to use under Unix."""
return os.fsencode(self)
def __repr__(self):
return "{}({!r})".format(self.__class__.__name__, self.as_posix())
def as_uri(self):
"""Return the path as a 'file' URI."""
if not self.is_absolute():
raise ValueError("relative path can't be expressed as a file URI")
return self._flavour.make_uri(self)
@property
def _cparts(self):
# Cached casefolded parts, for hashing and comparison
try:
return self._cached_cparts
except AttributeError:
self._cached_cparts = self._flavour.casefold_parts(self._parts)
return self._cached_cparts
def __eq__(self, other):
if not isinstance(other, PurePath):
return NotImplemented
return self._cparts == other._cparts and self._flavour is other._flavour
def __hash__(self):
try:
return self._hash
except AttributeError:
self._hash = hash(tuple(self._cparts))
return self._hash
def __lt__(self, other):
if not isinstance(other, PurePath) or self._flavour is not other._flavour:
return NotImplemented
return self._cparts < other._cparts
def __le__(self, other):
if not isinstance(other, PurePath) or self._flavour is not other._flavour:
return NotImplemented
return self._cparts <= other._cparts
def __gt__(self, other):
if not isinstance(other, PurePath) or self._flavour is not other._flavour:
return NotImplemented
return self._cparts > other._cparts
def __ge__(self, other):
if not isinstance(other, PurePath) or self._flavour is not other._flavour:
return NotImplemented
return self._cparts >= other._cparts
def __class_getitem__(cls, type):
return cls
drive = property(attrgetter('_drv'),
doc="""The drive prefix (letter or UNC path), if any.""")
root = property(attrgetter('_root'),
doc="""The root of the path, if any.""")
@property
def anchor(self):
"""The concatenation of the drive and root, or ''."""
anchor = self._drv + self._root
return anchor
@property
def name(self):
"""The final path component, if any."""
parts = self._parts
if len(parts) == (1 if (self._drv or self._root) else 0):
return ''
return parts[-1]
@property
def suffix(self):
"""
The final component's last suffix, if any.
This includes the leading period. For example: '.txt'
"""
name = self.name
i = name.rfind('.')
if 0 < i < len(name) - 1:
return name[i:]
else:
return ''
@property
def suffixes(self):
"""
A list of the final component's suffixes, if any.
These include the leading periods. For example: ['.tar', '.gz']
"""
name = self.name
if name.endswith('.'):
return []
name = name.lstrip('.')
return ['.' + suffix for suffix in name.split('.')[1:]]
@property
def stem(self):
"""The final path component, minus its last suffix."""
name = self.name
i = name.rfind('.')
if 0 < i < len(name) - 1:
return name[:i]
else:
return name
def with_name(self, name):
"""Return a new path with the file name changed."""
if not self.name:
raise ValueError("%r has an empty name" % (self,))
drv, root, parts = self._flavour.parse_parts((name,))
if (not name or name[-1] in [self._flavour.sep, self._flavour.altsep]
or drv or root or len(parts) != 1):
raise ValueError("Invalid name %r" % (name))
return self._from_parsed_parts(self._drv, self._root,
self._parts[:-1] + [name])
def with_stem(self, stem):
"""Return a new path with the stem changed."""
return self.with_name(stem + self.suffix)
def with_suffix(self, suffix):
"""Return a new path with the file suffix changed. If the path
has no suffix, add given suffix. If the given suffix is an empty
string, remove the suffix from the path.
"""
f = self._flavour
if f.sep in suffix or f.altsep and f.altsep in suffix:
raise ValueError("Invalid suffix %r" % (suffix,))
if suffix and not suffix.startswith('.') or suffix == '.':
raise ValueError("Invalid suffix %r" % (suffix))
name = self.name
if not name:
raise ValueError("%r has an empty name" % (self,))
old_suffix = self.suffix
if not old_suffix:
name = name + suffix
else:
name = name[:-len(old_suffix)] + suffix
return self._from_parsed_parts(self._drv, self._root,
self._parts[:-1] + [name])
def relative_to(self, *other):
"""Return the relative path to another path identified by the passed
arguments. If the operation is not possible (because this is not
a subpath of the other path), raise ValueError.
"""
# For the purpose of this method, drive and root are considered
# separate parts, i.e.:
# Path('c:/').relative_to('c:') gives Path('/')
# Path('c:/').relative_to('/') raise ValueError
if not other:
raise TypeError("need at least one argument")
parts = self._parts
drv = self._drv
root = self._root
if root:
abs_parts = [drv, root] + parts[1:]
else:
abs_parts = parts
to_drv, to_root, to_parts = self._parse_args(other)
if to_root:
to_abs_parts = [to_drv, to_root] + to_parts[1:]
else:
to_abs_parts = to_parts
n = len(to_abs_parts)
cf = self._flavour.casefold_parts
if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts):
formatted = self._format_parsed_parts(to_drv, to_root, to_parts)
raise ValueError("{!r} is not in the subpath of {!r}"
" OR one path is relative and the other is absolute."
.format(str(self), str(formatted)))
return self._from_parsed_parts('', root if n == 1 else '',
abs_parts[n:])
def is_relative_to(self, *other):
"""Return True if the path is relative to another path or False.
"""
try:
self.relative_to(*other)
return True
except ValueError:
return False
@property
def parts(self):
"""An object providing sequence-like access to the
components in the filesystem path."""
# We cache the tuple to avoid building a new one each time .parts
# is accessed. XXX is this necessary?
try:
return self._pparts
except AttributeError:
self._pparts = tuple(self._parts)
return self._pparts
def joinpath(self, *args):
"""Combine this path with one or several arguments, and return a
new path representing either a subpath (if all arguments are relative
paths) or a totally different path (if one of the arguments is
anchored).
"""
return self._make_child(args)
def __truediv__(self, key):
try:
return self._make_child((key,))
except TypeError:
return NotImplemented
def __rtruediv__(self, key):
try:
return self._from_parts([key] + self._parts)
except TypeError:
return NotImplemented
@property
def parent(self):
"""The logical parent of the path."""
drv = self._drv
root = self._root
parts = self._parts
if len(parts) == 1 and (drv or root):
return self
return self._from_parsed_parts(drv, root, parts[:-1])
@property
def parents(self):
"""A sequence of this path's logical parents."""
return _PathParents(self)
def is_absolute(self):
"""True if the path is absolute (has both a root and, if applicable,
a drive)."""
if not self._root:
return False
return not self._flavour.has_drv or bool(self._drv)
def is_reserved(self):
"""Return True if the path contains one of the special names reserved
by the system, if any."""
return self._flavour.is_reserved(self._parts)
def match(self, path_pattern):
"""
Return True if this path matches the given pattern.
"""
cf = self._flavour.casefold
path_pattern = cf(path_pattern)
drv, root, pat_parts = self._flavour.parse_parts((path_pattern,))
if not pat_parts:
raise ValueError("empty pattern")
if drv and drv != cf(self._drv):
return False
if root and root != cf(self._root):
return False
parts = self._cparts
if drv or root:
if len(pat_parts) != len(parts):
return False
pat_parts = pat_parts[1:]
elif len(pat_parts) > len(parts):
return False
for part, pat in zip(reversed(parts), reversed(pat_parts)):
if not fnmatch.fnmatchcase(part, pat):
return False
return True
|
(*args)
|
43,974 |
jsons.exceptions
|
SerializationError
|
Raised when serialization failed for some reason.
|
class SerializationError(JsonsError):
"""
Raised when serialization failed for some reason.
"""
|
(message: str)
|
43,990 |
jsons.exceptions
|
UnfulfilledArgumentError
|
Raised on a deserialization failure when an argument could not be fulfilled
by the given object attr_getter.
|
class UnfulfilledArgumentError(DeserializationError, ArgumentError):
"""
Raised on a deserialization failure when an argument could not be fulfilled
by the given object attr_getter.
"""
def __init__(self,
message: str,
argument: str,
source: object,
target: type):
"""
Constructor.
:param message: the message of this error.
:param argument: the argument that was unfulfilled.
:param source: the object that was to be deserialized.
:param target: the type to which `source` was to be deserialized.
"""
DeserializationError.__init__(self, message, source, target)
ArgumentError.__init__(self, message, argument)
|
(message: str, argument: str, source: object, target: type)
|
43,991 |
jsons.exceptions
|
__init__
|
Constructor.
:param message: the message of this error.
:param argument: the argument that was unfulfilled.
:param source: the object that was to be deserialized.
:param target: the type to which `source` was to be deserialized.
|
def __init__(self,
message: str,
argument: str,
source: object,
target: type):
"""
Constructor.
:param message: the message of this error.
:param argument: the argument that was unfulfilled.
:param source: the object that was to be deserialized.
:param target: the type to which `source` was to be deserialized.
"""
DeserializationError.__init__(self, message, source, target)
ArgumentError.__init__(self, message, argument)
|
(self, message: str, argument: str, source: object, target: type)
|
43,992 |
jsons.exceptions
|
ValidationError
|
Raised when the validation of an object failed.
|
class ValidationError(JsonsError):
"""
Raised when the validation of an object failed.
"""
|
(message: str)
|
43,994 |
jsons.classes.verbosity
|
Verbosity
|
An enum that defines the level of verbosity of the serialization of an
object.
|
class Verbosity(Flag):
"""
An enum that defines the level of verbosity of the serialization of an
object.
"""
WITH_NOTHING = 0
WITH_CLASS_INFO = 10
WITH_DUMP_TIME = 20
WITH_EVERYTHING = WITH_CLASS_INFO | WITH_DUMP_TIME
@staticmethod
def from_value(value: any) -> 'Verbosity':
"""
Return a ``Verbosity`` instance from the given value.
:param value:
:return: a ``Verbosity`` instance corresponding to ``value``.
"""
if isinstance(value, Verbosity):
return value
if value in (False, None):
return Verbosity.WITH_NOTHING
if value is True:
return Verbosity.WITH_EVERYTHING
if value:
return Verbosity.WITH_EVERYTHING
return Verbosity.WITH_NOTHING
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
43,995 |
zoneinfo
|
ZoneInfo
| null |
from zoneinfo import ZoneInfo
| null |
44,014 |
jsons.deserializers.default_complex
|
default_complex_deserializer
|
Deserialize a dictionary with 'real' and 'imag' keys to a complex number.
:param obj: the dict that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: an instance of ``complex``.
|
def default_complex_deserializer(obj: Dict[str, float],
cls: type = complex,
**kwargs) -> complex:
"""
Deserialize a dictionary with 'real' and 'imag' keys to a complex number.
:param obj: the dict that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: an instance of ``complex``.
"""
try:
clean_obj = load({'real': obj['real'], 'imag': obj['imag']},
cls=Dict[str, float])
return complex(clean_obj['real'], clean_obj['imag'])
except KeyError as err:
raise AttributeError("Cannot deserialize {} to a complex number, "
"does not contain key '{}'"
.format(obj, err.args[0])) from err
except DeserializationError as err:
raise AttributeError("Cannot deserialize {} to a complex number, "
"cannot cast value {} to float"
.format(obj, err.source)) from err
|
(obj: Dict[str, float], cls: type = <class 'complex'>, **kwargs) -> complex
|
44,015 |
jsons.serializers.default_complex
|
default_complex_serializer
|
Serialize a complex as a dict.
:param obj: the complex.
:param _: not used.
:return: a ``dict``.
|
def default_complex_serializer(obj: complex, **_) -> dict:
"""
Serialize a complex as a dict.
:param obj: the complex.
:param _: not used.
:return: a ``dict``.
"""
return {'real': obj.real, 'imag': obj.imag}
|
(obj: complex, **_) -> dict
|
44,016 |
jsons.deserializers.default_date
|
default_date_deserializer
|
Deserialize a string with an RFC3339 pattern to a date instance.
:param obj: the string that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: a ``datetime.date`` instance.
|
def default_date_deserializer(obj: str,
cls: type = date,
**kwargs) -> date:
"""
Deserialize a string with an RFC3339 pattern to a date instance.
:param obj: the string that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: a ``datetime.date`` instance.
"""
return get_datetime_inst(obj, RFC3339_DATE_PATTERN).date()
|
(obj: str, cls: type = <class 'datetime.date'>, **kwargs) -> datetime.date
|
44,017 |
jsons.serializers.default_date
|
default_date_serializer
|
Serialize the given date instance to a string. It uses the RFC3339
pattern. If date is a localtime, an offset is provided. If date is
in UTC, the result is suffixed with a 'Z'.
:param obj: the date instance that is to be serialized.
:param kwargs: not used.
:return: ``date`` as an RFC3339 string.
|
def default_date_serializer(obj: date, **kwargs) -> str:
"""
Serialize the given date instance to a string. It uses the RFC3339
pattern. If date is a localtime, an offset is provided. If date is
in UTC, the result is suffixed with a 'Z'.
:param obj: the date instance that is to be serialized.
:param kwargs: not used.
:return: ``date`` as an RFC3339 string.
"""
return to_str(obj, False, kwargs['fork_inst'],
RFC3339_DATE_PATTERN)
|
(obj: datetime.date, **kwargs) -> str
|
44,018 |
jsons.deserializers.default_datetime
|
default_datetime_deserializer
|
Deserialize a string with an RFC3339 pattern to a datetime instance.
:param obj: the string that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: a ``datetime.datetime`` instance.
|
def default_datetime_deserializer(obj: str,
cls: type = datetime,
**kwargs) -> datetime:
"""
Deserialize a string with an RFC3339 pattern to a datetime instance.
:param obj: the string that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: a ``datetime.datetime`` instance.
"""
pattern = RFC3339_DATETIME_PATTERN
if '.' in obj:
pattern += '.%f'
# strptime allows a fraction of length 6, so trip the rest (if exists).
regex_pattern = re.compile(r'(\.[0-9]+)')
frac = regex_pattern.search(obj).group()
obj = obj.replace(frac, frac[0:7])
return get_datetime_inst(obj, pattern)
|
(obj: str, cls: type = <module 'datetime' from '/usr/local/lib/python3.10/datetime.py'>, **kwargs) -> <module 'datetime' from '/usr/local/lib/python3.10/datetime.py'>
|
44,019 |
jsons.serializers.default_datetime
|
default_datetime_serializer
|
Serialize the given datetime instance to a string. It uses the RFC3339
pattern. If datetime is a localtime, an offset is provided. If datetime is
in UTC, the result is suffixed with a 'Z'.
:param obj: the datetime instance that is to be serialized.
:param strip_microseconds: determines whether microseconds should be
omitted.
:param kwargs: not used.
:return: ``datetime`` as an RFC3339 string.
|
def default_datetime_serializer(obj: datetime,
*,
strip_microseconds: Optional[bool] = False,
**kwargs) -> str:
"""
Serialize the given datetime instance to a string. It uses the RFC3339
pattern. If datetime is a localtime, an offset is provided. If datetime is
in UTC, the result is suffixed with a 'Z'.
:param obj: the datetime instance that is to be serialized.
:param strip_microseconds: determines whether microseconds should be
omitted.
:param kwargs: not used.
:return: ``datetime`` as an RFC3339 string.
"""
return to_str(obj, strip_microseconds, kwargs['fork_inst'],
RFC3339_DATETIME_PATTERN)
|
(obj: datetime.datetime, *, strip_microseconds: Optional[bool] = False, **kwargs) -> str
|
44,020 |
jsons.deserializers.default_decimal
|
default_decimal_deserializer
|
Deserialize a Decimal. Expects a string representation of a number, or
the number itself as a float or int.
:param obj: the string float or int that is to be deserialized.
:param cls: not used.
:param kwargs: any keyword arguments.
:return: the deserialized obj.
|
def default_decimal_deserializer(obj: Union[str, float, int],
cls: Optional[type] = None,
**kwargs) -> Decimal:
"""
Deserialize a Decimal. Expects a string representation of a number, or
the number itself as a float or int.
:param obj: the string float or int that is to be deserialized.
:param cls: not used.
:param kwargs: any keyword arguments.
:return: the deserialized obj.
"""
return Decimal(obj)
|
(obj: Union[str, float, int], cls: Optional[type] = None, **kwargs) -> decimal.Decimal
|
44,021 |
jsons.serializers.default_decimal
|
default_decimal_serializer
|
Serialize a Decimal.
:param obj: an instance of a Decimal.
:param kwargs: any keyword arguments.
:return: ``obj`` serialized as a string.
|
def default_decimal_serializer(obj: Decimal, **kwargs) -> str:
"""
Serialize a Decimal.
:param obj: an instance of a Decimal.
:param kwargs: any keyword arguments.
:return: ``obj`` serialized as a string.
"""
return str(obj)
|
(obj: decimal.Decimal, **kwargs) -> str
|
44,022 |
jsons.deserializers.default_defaultdict
|
default_defaultdict_deserializer
|
Deserialize a defaultdict.
:param obj: the dict that needs deserializing.
:param key_transformer: a function that transforms the keys to a different
style (e.g. PascalCase).
:param cls: not used.
:param kwargs: any keyword arguments.
:return: a deserialized defaultdict instance.
|
def default_defaultdict_deserializer(
obj: dict,
cls: type,
*,
key_transformer: Optional[Callable[[str], str]] = None,
**kwargs) -> dict:
"""
Deserialize a defaultdict.
:param obj: the dict that needs deserializing.
:param key_transformer: a function that transforms the keys to a different
style (e.g. PascalCase).
:param cls: not used.
:param kwargs: any keyword arguments.
:return: a deserialized defaultdict instance.
"""
args = get_args(cls)
default_factory = None
cls_ = Dict
if args:
key, value = get_args(cls)
cls_ = Dict[key, value]
default_factory = value
loaded = load(obj, cls_, key_transformer=key_transformer, **kwargs)
return defaultdict(default_factory, loaded)
|
(obj: dict, cls: type, *, key_transformer: Optional[Callable[[str], str]] = None, **kwargs) -> dict
|
44,023 |
jsons.deserializers.default_dict
|
default_dict_deserializer
|
Deserialize a dict by deserializing all instances of that dict.
:param obj: the dict that needs deserializing.
:param key_transformer: a function that transforms the keys to a different
style (e.g. PascalCase).
:param cls: not used.
:param kwargs: any keyword arguments.
:return: a deserialized dict instance.
|
def default_dict_deserializer(
obj: dict,
cls: type,
*,
key_transformer: Optional[Callable[[str], str]] = None,
**kwargs) -> dict:
"""
Deserialize a dict by deserializing all instances of that dict.
:param obj: the dict that needs deserializing.
:param key_transformer: a function that transforms the keys to a different
style (e.g. PascalCase).
:param cls: not used.
:param kwargs: any keyword arguments.
:return: a deserialized dict instance.
"""
cls_args = get_args(cls)
obj_, keys_were_hashed = _load_hashed_keys(
obj, cls, cls_args, key_transformer=key_transformer, **kwargs)
return _deserialize(obj_, cls_args, key_transformer, keys_were_hashed, kwargs)
|
(obj: dict, cls: type, *, key_transformer: Optional[Callable[[str], str]] = None, **kwargs) -> dict
|
44,024 |
jsons.serializers.default_dict
|
default_dict_serializer
|
Serialize the given ``obj`` to a dict of serialized objects.
:param obj: the dict that is to be serialized.
:param cls: not used.
:param strict: if ``True`` the serialization will raise upon any the
failure of any attribute. Otherwise it continues with a warning.
:param strict: a bool to determine if the serializer should be strict
(i.e. only dumping stuff that is known to ``cls``).
:param strip_nulls: if ``True`` the resulting dict will not contain null
values.
:param key_transformer: a function that will be applied to all keys in the
resulting dict.
:param types: a ``dict`` with attribute names (keys) and their types
(values).
:param kwargs: any keyword arguments that may be given to the serialization
process.
:return: a dict of which all elements are serialized.
|
def default_dict_serializer(
obj: dict,
cls: Optional[type] = None,
*,
strict: bool = False,
strip_nulls: bool = False,
key_transformer: Optional[Callable[[str], str]] = None,
types: Optional[Dict[str, type]] = None,
**kwargs) -> dict:
"""
Serialize the given ``obj`` to a dict of serialized objects.
:param obj: the dict that is to be serialized.
:param cls: not used.
:param strict: if ``True`` the serialization will raise upon any the
failure of any attribute. Otherwise it continues with a warning.
:param strict: a bool to determine if the serializer should be strict
(i.e. only dumping stuff that is known to ``cls``).
:param strip_nulls: if ``True`` the resulting dict will not contain null
values.
:param key_transformer: a function that will be applied to all keys in the
resulting dict.
:param types: a ``dict`` with attribute names (keys) and their types
(values).
:param kwargs: any keyword arguments that may be given to the serialization
process.
:return: a dict of which all elements are serialized.
"""
result = dict()
types = types or dict()
for key in obj:
obj_ = obj[key]
cls_ = types.get(key, None)
# If key is not a valid json type, use the hash as key and store the
# original key in a separate section.
dict_and_key = _store_and_hash(result, key,
key_transformer=key_transformer,
strip_nulls=strip_nulls, strict=strict,
types=types, **kwargs)
if dict_and_key:
result, key = dict_and_key
dumped_elem = dump(obj_,
cls=cls_,
key_transformer=key_transformer,
strip_nulls=strip_nulls,
strict=strict,
**kwargs)
if not (strip_nulls and dumped_elem is None):
if key_transformer:
key = key_transformer(key)
result[key] = dumped_elem
return result
|
(obj: dict, cls: Optional[type] = None, *, strict: bool = False, strip_nulls: bool = False, key_transformer: Optional[Callable[[str], str]] = None, types: Optional[Dict[str, type]] = None, **kwargs) -> dict
|
44,025 |
jsons.deserializers.default_enum
|
default_enum_deserializer
|
Deserialize an enum value to an enum instance. The serialized value can be
either the name or the key of an enum entry. If ``use_enum_name`` is set to
``True``, then the value *must* be the key of the enum entry. If
``use_enum_name`` is set to ``False``, the value *must* be the value of the
enum entry. By default, this deserializer tries both.
:param obj: the serialized enum.
:param cls: the enum class.
:param use_enum_name: determines whether the name or the value of an enum
element should be used.
:param kwargs: not used.
:return: the corresponding enum element instance.
|
def default_enum_deserializer(obj: str,
cls: EnumMeta,
*,
use_enum_name: Optional[bool] = None,
**kwargs) -> object:
"""
Deserialize an enum value to an enum instance. The serialized value can be
either the name or the key of an enum entry. If ``use_enum_name`` is set to
``True``, then the value *must* be the key of the enum entry. If
``use_enum_name`` is set to ``False``, the value *must* be the value of the
enum entry. By default, this deserializer tries both.
:param obj: the serialized enum.
:param cls: the enum class.
:param use_enum_name: determines whether the name or the value of an enum
element should be used.
:param kwargs: not used.
:return: the corresponding enum element instance.
"""
if use_enum_name:
result = cls[obj]
elif use_enum_name is False:
result = cls(obj)
else: # use_enum_name is None
try:
result = cls[obj]
except KeyError:
result = cls(obj) # May raise a ValueError (which is expected).
return result
|
(obj: str, cls: enum.EnumMeta, *, use_enum_name: Optional[bool] = None, **kwargs) -> object
|
44,026 |
jsons.serializers.default_enum
|
default_enum_serializer
|
Serialize the given obj. By default, the name of the enum element is
returned.
:param obj: an instance of an enum.
:param use_enum_name: determines whether the name or the value should be
used for serialization.
:param _: not used.
:return: ``obj`` serialized as a string.
|
def default_enum_serializer(obj: EnumMeta,
*,
use_enum_name: bool = True,
**_) -> str:
"""
Serialize the given obj. By default, the name of the enum element is
returned.
:param obj: an instance of an enum.
:param use_enum_name: determines whether the name or the value should be
used for serialization.
:param _: not used.
:return: ``obj`` serialized as a string.
"""
attr = 'name' if use_enum_name else 'value'
return getattr(obj, attr)
|
(obj: enum.EnumMeta, *, use_enum_name: bool = True, **_) -> str
|
44,027 |
jsons.deserializers.default_iterable
|
default_iterable_deserializer
|
Deserialize a (JSON) list into an ``Iterable`` by deserializing all items
of that list. The given obj is assumed to be homogeneous; if the list has a
generic type (e.g. Set[datetime]) then it is assumed that all elements can
be deserialized to that type.
:param obj: The list that needs deserializing to an ``Iterable``.
:param cls: The type, optionally with a generic (e.g. Deque[str]).
:param kwargs: Any keyword arguments.
:return: A deserialized ``Iterable`` (e.g. ``set``) instance.
|
def default_iterable_deserializer(
obj: list,
cls: type,
**kwargs) -> Iterable:
"""
Deserialize a (JSON) list into an ``Iterable`` by deserializing all items
of that list. The given obj is assumed to be homogeneous; if the list has a
generic type (e.g. Set[datetime]) then it is assumed that all elements can
be deserialized to that type.
:param obj: The list that needs deserializing to an ``Iterable``.
:param cls: The type, optionally with a generic (e.g. Deque[str]).
:param kwargs: Any keyword arguments.
:return: A deserialized ``Iterable`` (e.g. ``set``) instance.
"""
cls_ = Mapping
if hasattr(cls, '__args__'):
cls_ = IterableType[cls.__args__]
list_ = default_list_deserializer(obj, cls_, **kwargs)
result = list_
naked_cls = get_naked_class(cls)
if not isinstance(result, naked_cls):
result = naked_cls(list_)
return result
|
(obj: list, cls: type, **kwargs) -> collections.abc.Iterable
|
44,028 |
jsons.serializers.default_iterable
|
default_iterable_serializer
|
Serialize the given ``obj`` to a list of serialized objects.
:param obj: the iterable that is to be serialized.
:param cls: the (subscripted) type of the iterable.
:param strict: a bool to determine if the serializer should be strict
(i.e. only dumping stuff that is known to ``cls``).
:param tasks: the allowed number of tasks (threads or processes).
:param task_type: the type that is used for multitasking.
:param kwargs: any keyword arguments that may be given to the serialization
process.
:return: a list of which all elements are serialized.
|
def default_iterable_serializer(
obj: Iterable,
cls: type = None,
*,
strict: bool = False,
tasks: int = 1,
task_type: type = Process,
**kwargs) -> list:
"""
Serialize the given ``obj`` to a list of serialized objects.
:param obj: the iterable that is to be serialized.
:param cls: the (subscripted) type of the iterable.
:param strict: a bool to determine if the serializer should be strict
(i.e. only dumping stuff that is known to ``cls``).
:param tasks: the allowed number of tasks (threads or processes).
:param task_type: the type that is used for multitasking.
:param kwargs: any keyword arguments that may be given to the serialization
process.
:return: a list of which all elements are serialized.
"""
# The meta kwarg store_cls is filtered out, because an iterable should have
# its own -meta attribute.
kwargs_ = {**kwargs, 'strict': strict}
kwargs_.pop('_store_cls', None)
if strict:
cls_ = _determine_cls(obj, cls)
subclasses = _get_subclasses(obj, cls_)
else:
subclasses = _get_subclasses(obj, None)
if tasks < 2:
result = [dump(elem, cls=subclasses[i], **kwargs_)
for i, elem in enumerate(obj)]
else:
zipped_objs = list(zip(obj, subclasses))
result = multi_task(_do_dump, zipped_objs, tasks, task_type, **kwargs_)
return result
|
(obj: collections.abc.Iterable, cls: Optional[type] = None, *, strict: bool = False, tasks: int = 1, task_type: type = <class 'multiprocessing.context.Process'>, **kwargs) -> list
|
44,029 |
jsons.deserializers.default_list
|
default_list_deserializer
|
Deserialize a list by deserializing all items of that list.
:param obj: the list that needs deserializing.
:param cls: the type optionally with a generic (e.g. List[str]).
:param warn_on_fail: if ``True``, will warn upon any failure and continue.
:param tasks: the allowed number of tasks (threads or processes).
:param task_type: the type that is used for multitasking.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:param kwargs: any keyword arguments.
:return: a deserialized list instance.
|
def default_list_deserializer(
obj: list,
cls: type = None,
*,
warn_on_fail: bool = False,
tasks: int = 1,
task_type: type = Process,
fork_inst: Type[StateHolder] = StateHolder,
**kwargs) -> list:
"""
Deserialize a list by deserializing all items of that list.
:param obj: the list that needs deserializing.
:param cls: the type optionally with a generic (e.g. List[str]).
:param warn_on_fail: if ``True``, will warn upon any failure and continue.
:param tasks: the allowed number of tasks (threads or processes).
:param task_type: the type that is used for multitasking.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:param kwargs: any keyword arguments.
:return: a deserialized list instance.
"""
cls_ = None
kwargs_ = {**kwargs}
cls_args = get_args(cls)
if cls_args:
cls_ = cls_args[0]
# Mark the cls as 'inferred' so that later it is known where cls came
# from and the precedence of classes can be determined.
kwargs_['_inferred_cls'] = True
if tasks == 1:
result = _do_load(obj, cls_, warn_on_fail, fork_inst, kwargs_)
elif tasks > 1:
result = multi_task(load, obj, tasks, task_type, cls_, **kwargs_)
else:
raise JsonsError('Invalid number of tasks: {}'.format(tasks))
return result
|
(obj: list, cls: Optional[type] = None, *, warn_on_fail: bool = False, tasks: int = 1, task_type: type = <class 'multiprocessing.context.Process'>, fork_inst: Type[jsons._common_impl.StateHolder] = <class 'jsons._common_impl.StateHolder'>, **kwargs) -> list
|
44,030 |
jsons.serializers.default_list
|
default_list_serializer
|
Serialize the given ``obj`` to a list of serialized objects.
:param obj: the list that is to be serialized.
:param cls: the (subscripted) type of the list.
:param strict: a bool to determine if the serializer should be strict
(i.e. only dumping stuff that is known to ``cls``).
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:param kwargs: any keyword arguments that may be given to the serialization
process.
:return: a list of which all elements are serialized.
|
def default_list_serializer(
obj: list,
cls: type = None,
*,
strict: bool = False,
fork_inst: Optional[type] = StateHolder,
**kwargs) -> list:
"""
Serialize the given ``obj`` to a list of serialized objects.
:param obj: the list that is to be serialized.
:param cls: the (subscripted) type of the list.
:param strict: a bool to determine if the serializer should be strict
(i.e. only dumping stuff that is known to ``cls``).
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:param kwargs: any keyword arguments that may be given to the serialization
process.
:return: a list of which all elements are serialized.
"""
if not obj:
return []
kwargs_ = {**kwargs, 'strict': strict}
# The meta kwarg store_cls is filtered out, because an iterable should have
# its own -meta attribute.
kwargs_.pop('_store_cls', None)
inner_type = None
serializer = dump
cls_args = get_args(cls)
if cls_args:
inner_type = cls_args[0]
serializer = get_serializer(inner_type, fork_inst)
elif strict:
inner_type = type(obj[0])
serializer = get_serializer(inner_type, fork_inst)
return [serializer(elem, cls=inner_type, fork_inst=fork_inst, **kwargs_) for elem in obj]
|
(obj: list, cls: Optional[type] = None, *, strict: bool = False, fork_inst: Optional[type] = <class 'jsons._common_impl.StateHolder'>, **kwargs) -> list
|
44,031 |
jsons.deserializers.default_mapping
|
default_mapping_deserializer
|
Deserialize a (JSON) dict into a mapping by deserializing all items of that
dict.
:param obj: the dict that needs deserializing.
:param cls: the type, optionally with a generic (e.g. Set[str]).
:param kwargs: any keyword arguments.
:return: a deserialized set instance.
|
def default_mapping_deserializer(obj: dict, cls: type, **kwargs) -> Mapping:
"""
Deserialize a (JSON) dict into a mapping by deserializing all items of that
dict.
:param obj: the dict that needs deserializing.
:param cls: the type, optionally with a generic (e.g. Set[str]).
:param kwargs: any keyword arguments.
:return: a deserialized set instance.
"""
cls_ = Mapping
cls_args = get_args(cls)
if cls_args:
cls_ = MappingType[cls_args]
dict_ = default_dict_deserializer(obj, cls_, **kwargs)
result = dict_
# Strip any generics from cls to allow for an instance check.
if not isinstance(result, get_origin(cls)):
result = cls(dict_)
return result
|
(obj: dict, cls: type, **kwargs) -> collections.abc.Mapping
|
44,032 |
jsons.deserializers.default_nonetype
|
default_nonetype_deserializer
|
Deserialize a ``NoneType``.
:param obj: the value that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: ``obj``.
|
def default_nonetype_deserializer(obj: object,
cls: Optional[type] = None,
**kwargs) -> object:
"""
Deserialize a ``NoneType``.
:param obj: the value that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: ``obj``.
"""
if obj is not None:
raise DeserializationError('Cannot deserialize {} as NoneType'
.format(obj), source=obj, target=cls)
return obj
|
(obj: object, cls: Optional[type] = None, **kwargs) -> object
|
44,033 |
jsons.deserializers.default_object
|
default_object_deserializer
|
Deserialize ``obj`` into an instance of type ``cls``. If ``obj`` contains
keys with a certain case style (e.g. camelCase) that do not match the style
of ``cls`` (e.g. snake_case), a key_transformer should be used (e.g.
KEY_TRANSFORMER_SNAKECASE).
:param obj: a serialized instance of ``cls``.
:param cls: the type to which ``obj`` should be deserialized.
:param key_transformer: a function that transforms the keys in order to
match the attribute names of ``cls``.
:param strict: deserialize in strict mode.
:param kwargs: any keyword arguments that may be passed to the
deserializers.
:return: an instance of type ``cls``.
|
def default_object_deserializer(
obj: dict,
cls: type,
*,
key_transformer: Optional[Callable[[str], str]] = None,
strict: bool = False,
**kwargs) -> object:
"""
Deserialize ``obj`` into an instance of type ``cls``. If ``obj`` contains
keys with a certain case style (e.g. camelCase) that do not match the style
of ``cls`` (e.g. snake_case), a key_transformer should be used (e.g.
KEY_TRANSFORMER_SNAKECASE).
:param obj: a serialized instance of ``cls``.
:param cls: the type to which ``obj`` should be deserialized.
:param key_transformer: a function that transforms the keys in order to
match the attribute names of ``cls``.
:param strict: deserialize in strict mode.
:param kwargs: any keyword arguments that may be passed to the
deserializers.
:return: an instance of type ``cls``.
"""
obj, kwargs = _check_and_transform_keys(obj, key_transformer, **kwargs)
kwargs['strict'] = strict
constructor_args = _get_constructor_args(obj, cls, **kwargs)
remaining_attrs = _get_remaining_args(obj, cls, constructor_args,
strict, kwargs['fork_inst'])
instance = cls(**constructor_args)
_set_remaining_attrs(instance, remaining_attrs, **kwargs)
return instance
|
(obj: dict, cls: type, *, key_transformer: Optional[Callable[[str], str]] = None, strict: bool = False, **kwargs) -> object
|
44,034 |
jsons.serializers.default_object
|
default_object_serializer
|
Serialize the given ``obj`` to a dict. All values within ``obj`` are also
serialized. If ``key_transformer`` is given, it will be used to transform
the casing (e.g. snake_case) to a different format (e.g. camelCase).
:param obj: the object that is to be serialized.
:param cls: the type of the object that is to be dumped.
:param key_transformer: a function that will be applied to all keys in the
resulting dict.
:param strip_nulls: if ``True`` the resulting dict will not contain null
values.
:param strip_privates: if ``True`` the resulting dict will not contain
private attributes (i.e. attributes that start with an underscore).
:param strip_properties: if ``True`` the resulting dict will not contain
values from @properties.
:param strip_class_variables: if ``True`` the resulting dict will not
contain values from class variables.
:param strip_attr: can be a name or a collection of names of attributes
that are not to be included in the dump.
:param verbose: if ``True`` the resulting dict will contain meta
information (e.g. on how to deserialize).
:param strict: a bool to determine if the serializer should be strict
(i.e. only dumping stuff that is known to ``cls``).
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:param kwargs: any keyword arguments that are to be passed to the
serializer functions.
:return: a Python dict holding the values
of ``obj``.
|
def default_object_serializer(
obj: object,
cls: Optional[type] = None,
*,
key_transformer: Optional[Callable[[str], str]] = None,
strip_nulls: bool = False,
strip_privates: bool = False,
strip_properties: bool = False,
strip_class_variables: bool = False,
strip_attr: Union[str, MutableSequence[str], Tuple[str]] = None,
verbose: Union[Verbosity, bool] = False,
strict: bool = False,
fork_inst: Optional[type] = StateHolder,
**kwargs) -> Optional[dict]:
"""
Serialize the given ``obj`` to a dict. All values within ``obj`` are also
serialized. If ``key_transformer`` is given, it will be used to transform
the casing (e.g. snake_case) to a different format (e.g. camelCase).
:param obj: the object that is to be serialized.
:param cls: the type of the object that is to be dumped.
:param key_transformer: a function that will be applied to all keys in the
resulting dict.
:param strip_nulls: if ``True`` the resulting dict will not contain null
values.
:param strip_privates: if ``True`` the resulting dict will not contain
private attributes (i.e. attributes that start with an underscore).
:param strip_properties: if ``True`` the resulting dict will not contain
values from @properties.
:param strip_class_variables: if ``True`` the resulting dict will not
contain values from class variables.
:param strip_attr: can be a name or a collection of names of attributes
that are not to be included in the dump.
:param verbose: if ``True`` the resulting dict will contain meta
information (e.g. on how to deserialize).
:param strict: a bool to determine if the serializer should be strict
(i.e. only dumping stuff that is known to ``cls``).
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:param kwargs: any keyword arguments that are to be passed to the
serializer functions.
:return: a Python dict holding the values
of ``obj``.
"""
strip_attr = _normalize_strip_attr(strip_attr)
if cls and strict:
attributes = _get_attributes_from_class(
cls, strip_privates, strip_properties, strip_class_variables,
strip_attr, strict)
else:
attributes = _get_attributes_from_object(
obj, strip_privates, strip_properties, strip_class_variables,
strip_attr, strict)
cls = obj.__class__
verbose = Verbosity.from_value(verbose)
kwargs_ = {
**kwargs,
'fork_inst': fork_inst,
'verbose': verbose,
'strict': strict,
# Set a flag in kwargs to temporarily store -cls:
'_store_cls': Verbosity.WITH_CLASS_INFO in verbose
}
result = _do_serialize(obj=obj,
cls=cls,
attributes=attributes,
kwargs=kwargs_,
key_transformer=key_transformer,
strip_nulls=strip_nulls,
strip_privates=strip_privates,
strip_properties=strip_properties,
strip_class_variables=strip_class_variables,
strip_attr=strip_attr,
strict=strict,
fork_inst=fork_inst)
cls_name = get_class_name(cls, fully_qualified=True)
if not kwargs.get('_store_cls'):
result = _get_dict_with_meta(result, cls_name, verbose, fork_inst)
return result
|
(obj: object, cls: Optional[type] = None, *, key_transformer: Optional[Callable[[str], str]] = None, strip_nulls: bool = False, strip_privates: bool = False, strip_properties: bool = False, strip_class_variables: bool = False, strip_attr: Union[str, MutableSequence[str], Tuple[str], NoneType] = None, verbose: Union[jsons.classes.verbosity.Verbosity, bool] = False, strict: bool = False, fork_inst: Optional[type] = <class 'jsons._common_impl.StateHolder'>, **kwargs) -> Optional[dict]
|
44,035 |
jsons.deserializers.default_path
|
default_path_deserializer
|
Deserialize a string to a `pathlib.PurePath` object. Since ``pathlib``
implements ``PurePath``, no filename or existence checks are performed.
:param obj: the string to deserialize.
:param kwargs: not used.
:return: a ``str``.
|
def default_path_deserializer(obj: str, cls: type = PurePath, **kwargs) -> PurePath:
"""
Deserialize a string to a `pathlib.PurePath` object. Since ``pathlib``
implements ``PurePath``, no filename or existence checks are performed.
:param obj: the string to deserialize.
:param kwargs: not used.
:return: a ``str``.
"""
return cls(obj)
|
(obj: str, cls: type = <class 'pathlib.PurePath'>, **kwargs) -> pathlib.PurePath
|
44,036 |
jsons.serializers.default_path
|
default_path_serializer
|
Serialize a ``pathlib.PurePath`` object to a ``str``, Posix-style.
Posix-style strings are used as they can be used to create ``pathlib.Path``
objects on both Posix and Windows systems, but Windows-style strings can
only be used to create valid ``pathlib.Path`` objects on Windows.
:param obj: the path to serialize.
:param kwargs: not used.
:return: a ``str``.
|
def default_path_serializer(obj: PurePath, **kwargs) -> str:
"""
Serialize a ``pathlib.PurePath`` object to a ``str``, Posix-style.
Posix-style strings are used as they can be used to create ``pathlib.Path``
objects on both Posix and Windows systems, but Windows-style strings can
only be used to create valid ``pathlib.Path`` objects on Windows.
:param obj: the path to serialize.
:param kwargs: not used.
:return: a ``str``.
"""
return obj.as_posix()
|
(obj: pathlib.PurePath, **kwargs) -> str
|
44,037 |
jsons.deserializers.default_primitive
|
default_primitive_deserializer
|
Deserialize a primitive: it simply returns the given primitive.
:param obj: the value that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: ``obj``.
|
def default_primitive_deserializer(obj: object,
cls: Optional[type] = None,
**kwargs) -> object:
"""
Deserialize a primitive: it simply returns the given primitive.
:param obj: the value that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: ``obj``.
"""
result = obj
if obj is not None and not isinstance(obj, cls):
try:
result = cls(obj)
except ValueError as err:
raise DeserializationError(
'Could not cast "{}" into "{}"'.format(obj, cls.__name__),
obj, cls) from err
return result
|
(obj: object, cls: Optional[type] = None, **kwargs) -> object
|
44,038 |
jsons.serializers.default_primitive
|
default_primitive_serializer
|
Serialize a primitive; simply return the given ``obj``.
:param obj: the primitive.
:param cls: the type of ``obj``.
:return: ``obj``.
|
def default_primitive_serializer(obj: object,
cls: Optional[type] = None,
**kwargs) -> object:
"""
Serialize a primitive; simply return the given ``obj``.
:param obj: the primitive.
:param cls: the type of ``obj``.
:return: ``obj``.
"""
result = obj
cls_ = cls
if _is_newtype(cls):
cls_ = cls.__supertype__
if cls_ and obj is not None and not isinstance(obj, cls_):
try:
result = cls_(obj)
except ValueError as err:
raise SerializationError('Could not cast "{}" into "{}"'
.format(obj, cls_.__name__)) from err
return result
|
(obj: object, cls: Optional[type] = None, **kwargs) -> object
|
44,039 |
jsons.deserializers.default_string
|
default_string_deserializer
|
Deserialize a string. If the given ``obj`` can be parsed to a date, a
``datetime`` instance is returned.
:param obj: the string that is to be deserialized.
:param cls: not used.
:param kwargs: any keyword arguments.
:return: the deserialized obj.
|
def default_string_deserializer(obj: str,
cls: Optional[type] = None,
**kwargs) -> object:
"""
Deserialize a string. If the given ``obj`` can be parsed to a date, a
``datetime`` instance is returned.
:param obj: the string that is to be deserialized.
:param cls: not used.
:param kwargs: any keyword arguments.
:return: the deserialized obj.
"""
target_is_str = cls is str and not kwargs.get('_inferred_cls')
if target_is_str:
return str(obj)
try:
result = load(obj, datetime, **kwargs)
except DeserializationError:
result = default_primitive_deserializer(obj, str)
return result
|
(obj: str, cls: Optional[type] = None, **kwargs) -> object
|
44,040 |
jsons.deserializers.default_time
|
default_time_deserializer
|
Deserialize a string with an RFC3339 pattern to a time instance.
:param obj: the string that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: a ``datetime.time`` instance.
|
def default_time_deserializer(obj: str,
cls: type = time,
**kwargs) -> time:
"""
Deserialize a string with an RFC3339 pattern to a time instance.
:param obj: the string that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: a ``datetime.time`` instance.
"""
return get_datetime_inst(obj, RFC3339_TIME_PATTERN).time()
|
(obj: str, cls: type = <class 'datetime.time'>, **kwargs) -> datetime.time
|
44,041 |
jsons.serializers.default_time
|
default_time_serializer
|
Serialize the given time instance to a string. It uses the RFC3339
pattern.
:param obj: the time instance that is to be serialized.
:param kwargs: not used.
:return: ``time`` as an RFC3339 string.
|
def default_time_serializer(obj: date, **kwargs) -> str:
"""
Serialize the given time instance to a string. It uses the RFC3339
pattern.
:param obj: the time instance that is to be serialized.
:param kwargs: not used.
:return: ``time`` as an RFC3339 string.
"""
return to_str(obj, False, kwargs['fork_inst'],
RFC3339_TIME_PATTERN)
|
(obj: datetime.date, **kwargs) -> str
|
44,042 |
jsons.deserializers.default_timedelta
|
default_timedelta_deserializer
|
Deserialize a float to a timedelta instance.
:param obj: the float that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: a ``datetime.timedelta`` instance.
|
def default_timedelta_deserializer(obj: float,
cls: type = float,
**kwargs) -> timedelta:
"""
Deserialize a float to a timedelta instance.
:param obj: the float that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: a ``datetime.timedelta`` instance.
"""
return timedelta(seconds=obj)
|
(obj: float, cls: type = <class 'float'>, **kwargs) -> datetime.timedelta
|
44,043 |
jsons.serializers.default_timedelta
|
default_timedelta_serializer
|
Serialize the given timedelta instance to a float holding the total
seconds.
:param obj: the timedelta instance that is to be serialized.
:param kwargs: not used.
:return: ``timedelta`` as a float.
|
def default_timedelta_serializer(obj: timedelta, **kwargs) -> float:
"""
Serialize the given timedelta instance to a float holding the total
seconds.
:param obj: the timedelta instance that is to be serialized.
:param kwargs: not used.
:return: ``timedelta`` as a float.
"""
return obj.total_seconds()
|
(obj: datetime.timedelta, **kwargs) -> float
|
44,044 |
jsons.deserializers.default_timezone
|
default_timezone_deserializer
|
Deserialize a dict to a timezone instance.
:param obj: the dict that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: a ``datetime.timezone`` instance.
|
def default_timezone_deserializer(obj: dict,
cls: type = timezone,
**kwargs) -> timezone:
"""
Deserialize a dict to a timezone instance.
:param obj: the dict that is to be deserialized.
:param cls: not used.
:param kwargs: not used.
:return: a ``datetime.timezone`` instance.
"""
return timezone(load(obj['offset'], timedelta), obj['name'])
|
(obj: dict, cls: type = <class 'datetime.timezone'>, **kwargs) -> datetime.timezone
|
44,045 |
jsons.serializers.default_timezone
|
default_timezone_serializer
|
Serialize the given timezone instance to a dict holding the total
seconds.
:param obj: the timezone instance that is to be serialized.
:param kwargs: not used.
:return: ``timezone`` as a dict.
|
def default_timezone_serializer(obj: timezone, **kwargs) -> dict:
"""
Serialize the given timezone instance to a dict holding the total
seconds.
:param obj: the timezone instance that is to be serialized.
:param kwargs: not used.
:return: ``timezone`` as a dict.
"""
name = obj.tzname(None)
offset = dump(obj.utcoffset(None), **kwargs)
return {
'name': name,
'offset': offset
}
|
(obj: datetime.timezone, **kwargs) -> dict
|
44,046 |
jsons.deserializers.default_tuple
|
default_tuple_deserializer
|
Deserialize a (JSON) list into a tuple by deserializing all items of that
list.
:param obj: the tuple that needs deserializing.
:param cls: the type optionally with a generic (e.g. Tuple[str, int]).
:param kwargs: any keyword arguments.
:return: a deserialized tuple instance.
|
def default_tuple_deserializer(obj: list,
cls: type = None,
*,
key_transformer: Optional[Callable[[str], str]] = None,
**kwargs) -> object:
"""
Deserialize a (JSON) list into a tuple by deserializing all items of that
list.
:param obj: the tuple that needs deserializing.
:param cls: the type optionally with a generic (e.g. Tuple[str, int]).
:param kwargs: any keyword arguments.
:return: a deserialized tuple instance.
"""
if hasattr(cls, '_fields'):
return default_namedtuple_deserializer(obj, cls, key_transformer=key_transformer, **kwargs)
cls_args = get_args(cls)
if cls_args:
tuple_types = getattr(cls, '__tuple_params__', cls_args)
if tuple_with_ellipsis(cls):
tuple_types = [tuple_types[0]] * len(obj)
list_ = [load(value, tuple_types[i], **kwargs)
for i, value in enumerate(obj)]
else:
list_ = [load(value, **kwargs) for i, value in enumerate(obj)]
return tuple(list_)
|
(obj: list, cls: Optional[type] = None, *, key_transformer: Optional[Callable[[str], str]] = None, **kwargs) -> object
|
44,047 |
jsons.serializers.default_tuple
|
default_tuple_serializer
|
Serialize the given ``obj`` to a list of serialized objects.
:param obj: the tuple that is to be serialized.
:param cls: the type of the ``obj``.
:param kwargs: any keyword arguments that may be given to the serialization
process.
:return: a list of which all elements are serialized.
|
def default_tuple_serializer(obj: tuple,
cls: Optional[type] = None,
**kwargs) -> Union[list, dict]:
"""
Serialize the given ``obj`` to a list of serialized objects.
:param obj: the tuple that is to be serialized.
:param cls: the type of the ``obj``.
:param kwargs: any keyword arguments that may be given to the serialization
process.
:return: a list of which all elements are serialized.
"""
if hasattr(obj, '_fields'):
return default_namedtuple_serializer(obj, **kwargs)
cls_ = cls
if cls and tuple_with_ellipsis(cls):
cls_ = Tuple[(get_args(cls)[0],) * len(obj)]
return default_iterable_serializer(obj, cls_, **kwargs)
|
(obj: tuple, cls: Optional[type] = None, **kwargs) -> Union[list, dict]
|
44,048 |
jsons.deserializers.default_union
|
default_union_deserializer
|
Deserialize an object to any matching type of the given union. The first
successful deserialization is returned.
:param obj: The object that needs deserializing.
:param cls: The Union type with a generic (e.g. Union[str, int]).
:param kwargs: Any keyword arguments that are passed through the
deserialization process.
:return: An object of the first type of the Union that could be
deserialized successfully.
|
def default_union_deserializer(obj: object, cls: Union, **kwargs) -> object:
"""
Deserialize an object to any matching type of the given union. The first
successful deserialization is returned.
:param obj: The object that needs deserializing.
:param cls: The Union type with a generic (e.g. Union[str, int]).
:param kwargs: Any keyword arguments that are passed through the
deserialization process.
:return: An object of the first type of the Union that could be
deserialized successfully.
"""
for sub_type in get_union_params(cls):
try:
return load(obj, sub_type, **kwargs)
except JsonsError:
pass # Try the next one.
else:
args_msg = ', '.join([get_class_name(cls_)
for cls_ in get_union_params(cls)])
err_msg = ('Could not match the object of type "{}" to any type of '
'the Union: {}'.format(type(obj).__name__, args_msg))
raise DeserializationError(err_msg, obj, cls)
|
(obj: object, cls: Union, **kwargs) -> object
|
44,049 |
jsons.serializers.default_union
|
default_union_serializer
|
Serialize an object to any matching type of the given union. The first
successful serialization is returned.
:param obj: The object that is to be serialized.
:param cls: The Union type with a generic (e.g. Union[str, int]).
:param kwargs: Any keyword arguments that are passed through the
serialization process.
:return: An object of the first type of the Union that could be
serialized successfully.
|
def default_union_serializer(obj: object, cls: Union, **kwargs) -> object:
"""
Serialize an object to any matching type of the given union. The first
successful serialization is returned.
:param obj: The object that is to be serialized.
:param cls: The Union type with a generic (e.g. Union[str, int]).
:param kwargs: Any keyword arguments that are passed through the
serialization process.
:return: An object of the first type of the Union that could be
serialized successfully.
"""
sub_types = get_union_params(cls)
# Cater for Optional[...]/Union[None, ...] first to avoid blindly
# string-ifying None in later serializers.
if obj is None and NoneType in sub_types:
return obj
for sub_type in sub_types:
try:
return dump(obj, sub_type, **kwargs)
except JsonsError:
pass # Try the next one.
else:
args_msg = ', '.join([get_class_name(cls_)
for cls_ in get_union_params(cls)])
err_msg = ('Could not match the object of type "{}" to any type of '
'the Union: {}'.format(type(obj), args_msg))
raise SerializationError(err_msg)
|
(obj: object, cls: Union, **kwargs) -> object
|
44,050 |
jsons.deserializers.default_uuid
|
default_uuid_deserializer
|
Deserialize a UUID. Expected format for string is specified in RFC 4122.
e.g. '12345678-1234-1234-1234-123456789abc'
:param obj: the string that is to be deserialized.
:param cls: not used.
:param kwargs: any keyword arguments.
:return: the deserialized obj.
|
def default_uuid_deserializer(obj: str,
cls: Optional[type] = None,
**kwargs) -> UUID:
"""
Deserialize a UUID. Expected format for string is specified in RFC 4122.
e.g. '12345678-1234-1234-1234-123456789abc'
:param obj: the string that is to be deserialized.
:param cls: not used.
:param kwargs: any keyword arguments.
:return: the deserialized obj.
"""
return UUID(obj)
|
(obj: str, cls: Optional[type] = None, **kwargs) -> uuid.UUID
|
44,051 |
jsons.serializers.default_uuid
|
default_uuid_serializer
|
Serialize the given obj. By default, it is serialized as specified in RFC 4122.
e.g. '12345678-1234-1234-1234-123456789abc'
:param obj: an instance of an uuid.UUID.
:param kwargs: any keyword arguments.
:return: ``obj`` serialized as a string.
|
def default_uuid_serializer(obj: UUID, **kwargs) -> str:
"""
Serialize the given obj. By default, it is serialized as specified in RFC 4122.
e.g. '12345678-1234-1234-1234-123456789abc'
:param obj: an instance of an uuid.UUID.
:param kwargs: any keyword arguments.
:return: ``obj`` serialized as a string.
"""
return str(obj)
|
(obj: uuid.UUID, **kwargs) -> str
|
44,052 |
jsons.deserializers.default_zone_info
|
default_zone_info_deserializer
|
Deserialize a ZoneInfo.
:param obj: a serialized ZoneInfo object.
:return: an instance of ZoneInfo.
|
def default_zone_info_deserializer(obj: ZoneInfo, *_, **__) -> ZoneInfo:
"""
Deserialize a ZoneInfo.
:param obj: a serialized ZoneInfo object.
:return: an instance of ZoneInfo.
"""
return ZoneInfo(obj['key'])
|
(obj: zoneinfo.ZoneInfo, *_, **__) -> zoneinfo.ZoneInfo
|
44,053 |
jsons.serializers.default_zone_info
|
default_zone_info_serializer
|
Serialize a ZoneInfo object.
:return: a serialized ZoneInfo instance.
|
def default_zone_info_serializer(obj: ZoneInfo, *_, **__) -> Dict[str, str]:
"""
Serialize a ZoneInfo object.
:return: a serialized ZoneInfo instance.
"""
return {'key': obj.key}
|
(obj: zoneinfo.ZoneInfo, *_, **__) -> Dict[str, str]
|
44,055 |
jsons._dump_impl
|
dump
|
Serialize the given ``obj`` to a JSON equivalent type (e.g. dict, list,
int, ...).
The way objects are serialized can be finetuned by setting serializer
functions for the specific type using ``set_serializer``.
You can also provide ``cls`` to specify that ``obj`` needs to be serialized
as if it was of type ``cls`` (meaning to only take into account attributes
from ``cls``). The type ``cls`` must have a ``__slots__`` defined. Any type
will do, but in most cases you may want ``cls`` to be a base class of
``obj``.
:param obj: a Python instance of any sort.
:param cls: if given, ``obj`` will be dumped as if it is of type ``type``.
:param strict: a bool to determine if the serializer should be strict
(i.e. only dumping stuff that is known to ``cls``).
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:param kwargs: the keyword args are passed on to the serializer function.
:return: the serialized obj as a JSON type.
|
def dump(obj: object,
cls: Optional[type] = None,
*,
strict: bool = False,
fork_inst: Optional[type] = StateHolder,
**kwargs) -> object:
"""
Serialize the given ``obj`` to a JSON equivalent type (e.g. dict, list,
int, ...).
The way objects are serialized can be finetuned by setting serializer
functions for the specific type using ``set_serializer``.
You can also provide ``cls`` to specify that ``obj`` needs to be serialized
as if it was of type ``cls`` (meaning to only take into account attributes
from ``cls``). The type ``cls`` must have a ``__slots__`` defined. Any type
will do, but in most cases you may want ``cls`` to be a base class of
``obj``.
:param obj: a Python instance of any sort.
:param cls: if given, ``obj`` will be dumped as if it is of type ``type``.
:param strict: a bool to determine if the serializer should be strict
(i.e. only dumping stuff that is known to ``cls``).
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:param kwargs: the keyword args are passed on to the serializer function.
:return: the serialized obj as a JSON type.
"""
cls_ = cls or obj.__class__
serializer = get_serializer(cls_, fork_inst)
# Is this the initial call or a nested?
initial = kwargs.get('_initial', True)
kwargs_ = {
'fork_inst': fork_inst,
'_initial': False,
'strict': strict,
**kwargs
}
announce_class(cls_, fork_inst=fork_inst)
return _do_dump(obj, serializer, cls, initial, kwargs_)
|
(obj: object, cls: Optional[type] = None, *, strict: bool = False, fork_inst: Optional[type] = <class 'jsons._common_impl.StateHolder'>, **kwargs) -> object
|
44,056 |
jsons._dump_impl
|
dumpb
|
Extend ``json.dumps``, allowing any Python instance to be dumped to bytes.
Any extra (keyword) arguments are passed on to ``json.dumps``.
:param obj: the object that is to be dumped to bytes.
:param encoding: the encoding that is used to transform to bytes.
:param jdkwargs: extra keyword arguments for ``json.dumps`` (not
``jsons.dumps``!)
:param args: extra arguments for ``jsons.dumps``.
:param kwargs: Keyword arguments that are passed on through the
serialization process.
passed on to the serializer function.
:return: ``obj`` as ``bytes``.
|
def dumpb(obj: object,
encoding: str = 'utf-8',
jdkwargs: Optional[Dict[str, object]] = None,
*args,
**kwargs) -> bytes:
"""
Extend ``json.dumps``, allowing any Python instance to be dumped to bytes.
Any extra (keyword) arguments are passed on to ``json.dumps``.
:param obj: the object that is to be dumped to bytes.
:param encoding: the encoding that is used to transform to bytes.
:param jdkwargs: extra keyword arguments for ``json.dumps`` (not
``jsons.dumps``!)
:param args: extra arguments for ``jsons.dumps``.
:param kwargs: Keyword arguments that are passed on through the
serialization process.
passed on to the serializer function.
:return: ``obj`` as ``bytes``.
"""
jdkwargs = jdkwargs or {}
dumped_dict = dump(obj, *args, **kwargs)
dumped_str = json.dumps(dumped_dict, **jdkwargs)
return dumped_str.encode(encoding=encoding)
|
(obj: object, encoding: str = 'utf-8', jdkwargs: Optional[Dict[str, object]] = None, *args, **kwargs) -> bytes
|
44,057 |
jsons._dump_impl
|
dumps
|
Extend ``json.dumps``, allowing any Python instance to be dumped to a
string. Any extra (keyword) arguments are passed on to ``json.dumps``.
:param obj: the object that is to be dumped to a string.
:param jdkwargs: extra keyword arguments for ``json.dumps`` (not
``jsons.dumps``!)
:param args: extra arguments for ``jsons.dumps``.
:param kwargs: Keyword arguments that are passed on through the
serialization process.
passed on to the serializer function.
:return: ``obj`` as a ``str``.
|
def dumps(obj: object,
jdkwargs: Optional[Dict[str, object]] = None,
*args,
**kwargs) -> str:
"""
Extend ``json.dumps``, allowing any Python instance to be dumped to a
string. Any extra (keyword) arguments are passed on to ``json.dumps``.
:param obj: the object that is to be dumped to a string.
:param jdkwargs: extra keyword arguments for ``json.dumps`` (not
``jsons.dumps``!)
:param args: extra arguments for ``jsons.dumps``.
:param kwargs: Keyword arguments that are passed on through the
serialization process.
passed on to the serializer function.
:return: ``obj`` as a ``str``.
"""
jdkwargs = jdkwargs or {}
dumped = dump(obj, *args, **kwargs)
return json.dumps(dumped, **jdkwargs)
|
(obj: object, jdkwargs: Optional[Dict[str, object]] = None, *args, **kwargs) -> str
|
44,059 |
jsons._fork_impl
|
fork
|
Fork from the given ``StateHolder`` to create a separate "branch" of
serializers and deserializers.
:param fork_inst: The ``StateHolder`` on which the new fork is based.
:param name: The ``__name__`` of the new ``type``.
:return: A "fork inst" that can be used to separately store
(de)serializers from the regular ``StateHolder``.
|
def fork(
fork_inst: Type[T] = StateHolder,
name: Optional[str] = None) -> Type[T]:
"""
Fork from the given ``StateHolder`` to create a separate "branch" of
serializers and deserializers.
:param fork_inst: The ``StateHolder`` on which the new fork is based.
:param name: The ``__name__`` of the new ``type``.
:return: A "fork inst" that can be used to separately store
(de)serializers from the regular ``StateHolder``.
"""
fork_inst._fork_counter += 1
if name:
class_name = name
else:
class_name = '{}_fork{}'.format(
get_class_name(fork_inst),
fork_inst._fork_counter
)
result = type(class_name, (fork_inst,), {})
result._classes_serializers = fork_inst._classes_serializers.copy()
result._classes_deserializers = fork_inst._classes_deserializers.copy()
result._serializers = fork_inst._serializers.copy()
result._deserializers = fork_inst._deserializers.copy()
result._fork_counter = 0
result._suppress_warnings = fork_inst._suppress_warnings
result._suppressed_warnings = fork_inst._suppressed_warnings.copy()
return result
|
(fork_inst: Type[~T] = <class 'jsons._common_impl.StateHolder'>, name: Optional[str] = None) -> Type[~T]
|
44,061 |
jsons._load_impl
|
load
|
Deserialize the given ``json_obj`` to an object of type ``cls``. If the
contents of ``json_obj`` do not match the interface of ``cls``, a
DeserializationError is raised.
If ``json_obj`` contains a value that belongs to a custom class, there must
be a type hint present for that value in ``cls`` to let this function know
what type it should deserialize that value to.
**Example**:
>>> from typing import List
>>> import jsons
>>> class Person:
... # No type hint required for name
... def __init__(self, name):
... self.name = name
>>> class Family:
... # Person is a custom class, use a type hint
... def __init__(self, persons: List[Person]):
... self.persons = persons
>>> loaded = jsons.load({'persons': [{'name': 'John'}]}, Family)
>>> loaded.persons[0].name
'John'
If no ``cls`` is given, a dict is simply returned, but contained values
(e.g. serialized ``datetime`` values) are still deserialized.
If `strict` mode is off and the type of `json_obj` exactly matches `cls`
then `json_obj` is simply returned.
:param json_obj: the dict that is to be deserialized.
:param cls: a matching class of which an instance should be returned.
:param strict: a bool to determine if the deserializer should be strict
(i.e. fail on a partially deserialized `json_obj` or on `None`).
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:param attr_getters: a ``dict`` that may hold callables that return values
for certain attributes.
:param kwargs: the keyword args are passed on to the deserializer function.
:return: an instance of ``cls`` if given, a dict otherwise.
|
def load(
json_obj: object,
cls: Optional[Type[T]] = None,
*,
strict: bool = False,
fork_inst: Optional[type] = StateHolder,
attr_getters: Optional[Dict[str, Callable[[], object]]] = None,
**kwargs) -> T:
"""
Deserialize the given ``json_obj`` to an object of type ``cls``. If the
contents of ``json_obj`` do not match the interface of ``cls``, a
DeserializationError is raised.
If ``json_obj`` contains a value that belongs to a custom class, there must
be a type hint present for that value in ``cls`` to let this function know
what type it should deserialize that value to.
**Example**:
>>> from typing import List
>>> import jsons
>>> class Person:
... # No type hint required for name
... def __init__(self, name):
... self.name = name
>>> class Family:
... # Person is a custom class, use a type hint
... def __init__(self, persons: List[Person]):
... self.persons = persons
>>> loaded = jsons.load({'persons': [{'name': 'John'}]}, Family)
>>> loaded.persons[0].name
'John'
If no ``cls`` is given, a dict is simply returned, but contained values
(e.g. serialized ``datetime`` values) are still deserialized.
If `strict` mode is off and the type of `json_obj` exactly matches `cls`
then `json_obj` is simply returned.
:param json_obj: the dict that is to be deserialized.
:param cls: a matching class of which an instance should be returned.
:param strict: a bool to determine if the deserializer should be strict
(i.e. fail on a partially deserialized `json_obj` or on `None`).
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:param attr_getters: a ``dict`` that may hold callables that return values
for certain attributes.
:param kwargs: the keyword args are passed on to the deserializer function.
:return: an instance of ``cls`` if given, a dict otherwise.
"""
_check_for_none(json_obj, cls)
if _should_skip(json_obj, cls, strict):
validate(json_obj, cls, fork_inst)
return json_obj
if isinstance(cls, str):
cls = get_cls_from_str(cls, json_obj, fork_inst)
original_cls = cls
cls, meta_hints = _check_and_get_cls_and_meta_hints(
json_obj, cls, fork_inst, kwargs.get('_inferred_cls', False))
deserializer = get_deserializer(cls, fork_inst)
# Is this the initial call or a nested?
initial = kwargs.get('_initial', True)
kwargs_ = {
'meta_hints': meta_hints, # Overridable by kwargs.
**kwargs,
'strict': strict,
'fork_inst': fork_inst,
'attr_getters': attr_getters,
'_initial': False,
'_inferred_cls': cls is not original_cls,
}
return _do_load(json_obj, deserializer, cls, initial, **kwargs_)
|
(json_obj: object, cls: Optional[Type[~T]] = None, *, strict: bool = False, fork_inst: Optional[type] = <class 'jsons._common_impl.StateHolder'>, attr_getters: Optional[Dict[str, Callable[[], object]]] = None, **kwargs) -> ~T
|
44,062 |
jsons._load_impl
|
loadb
|
Extend ``json.loads``, allowing bytes to be loaded into a dict or a Python
instance of type ``cls``. Any extra (keyword) arguments are passed on to
``json.loads``.
:param bytes_: the bytes that are to be loaded.
:param cls: a matching class of which an instance should be returned.
:param encoding: the encoding that is used to transform from bytes.
:param jdkwargs: extra keyword arguments for ``json.loads`` (not
``jsons.loads``!)
:param args: extra arguments for ``jsons.loads``.
:param kwargs: extra keyword arguments for ``jsons.loads``.
:return: a JSON-type object (dict, str, list, etc.) or an instance of type
``cls`` if given.
|
def loadb(
bytes_: bytes,
cls: Optional[Type[T]] = None,
encoding: str = 'utf-8',
jdkwargs: Optional[Dict[str, object]] = None,
*args,
**kwargs) -> T:
"""
Extend ``json.loads``, allowing bytes to be loaded into a dict or a Python
instance of type ``cls``. Any extra (keyword) arguments are passed on to
``json.loads``.
:param bytes_: the bytes that are to be loaded.
:param cls: a matching class of which an instance should be returned.
:param encoding: the encoding that is used to transform from bytes.
:param jdkwargs: extra keyword arguments for ``json.loads`` (not
``jsons.loads``!)
:param args: extra arguments for ``jsons.loads``.
:param kwargs: extra keyword arguments for ``jsons.loads``.
:return: a JSON-type object (dict, str, list, etc.) or an instance of type
``cls`` if given.
"""
if not isinstance(bytes_, bytes):
raise DeserializationError('loadb accepts bytes only, "{}" was given'
.format(type(bytes_)), bytes_, cls)
jdkwargs = jdkwargs or {}
str_ = bytes_.decode(encoding=encoding)
return loads(str_, cls, jdkwargs=jdkwargs, *args, **kwargs)
|
(bytes_: bytes, cls: Optional[Type[~T]] = None, encoding: str = 'utf-8', jdkwargs: Optional[Dict[str, object]] = None, *args, **kwargs) -> ~T
|
44,063 |
jsons._load_impl
|
loads
|
Extend ``json.loads``, allowing a string to be loaded into a dict or a
Python instance of type ``cls``. Any extra (keyword) arguments are passed
on to ``json.loads``.
:param str_: the string that is to be loaded.
:param cls: a matching class of which an instance should be returned.
:param jdkwargs: extra keyword arguments for ``json.loads`` (not
``jsons.loads``!)
:param args: extra arguments for ``jsons.loads``.
:param kwargs: extra keyword arguments for ``jsons.loads``.
:return: a JSON-type object (dict, str, list, etc.) or an instance of type
``cls`` if given.
|
def loads(
str_: str,
cls: Optional[Type[T]] = None,
jdkwargs: Optional[Dict[str, object]] = None,
*args,
**kwargs) -> T:
"""
Extend ``json.loads``, allowing a string to be loaded into a dict or a
Python instance of type ``cls``. Any extra (keyword) arguments are passed
on to ``json.loads``.
:param str_: the string that is to be loaded.
:param cls: a matching class of which an instance should be returned.
:param jdkwargs: extra keyword arguments for ``json.loads`` (not
``jsons.loads``!)
:param args: extra arguments for ``jsons.loads``.
:param kwargs: extra keyword arguments for ``jsons.loads``.
:return: a JSON-type object (dict, str, list, etc.) or an instance of type
``cls`` if given.
"""
jdkwargs = jdkwargs or {}
try:
obj = json.loads(str_, **jdkwargs)
except JSONDecodeError as err:
raise DecodeError('Could not load a dict; the given string is not '
'valid JSON.', str_, cls, err) from err
else:
return load(obj, cls, *args, **kwargs)
|
(str_: str, cls: Optional[Type[~T]] = None, jdkwargs: Optional[Dict[str, object]] = None, *args, **kwargs) -> ~T
|
44,066 |
jsons._lizers_impl
|
set_deserializer
|
Set a deserializer function for the given type. You may override the
default behavior of ``jsons.dump`` by setting a custom deserializer.
The ``func`` argument must take two arguments (i.e. the dict containing the
serialized values and the type that the values should be deserialized into)
and also a ``kwargs`` parameter. For example:
>>> def func(dict_, cls, **kwargs):
... return cls()
You may ask additional arguments between ``cls`` and ``kwargs``.
:param func: the deserializer function.
:param cls: the type or sequence of types this serializer can handle.
:param high_prio: determines the order in which is looked for the callable.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
|
def set_deserializer(
func: callable,
cls: Union[type, Sequence[type]],
high_prio: bool = True,
fork_inst: type = StateHolder) -> None:
"""
Set a deserializer function for the given type. You may override the
default behavior of ``jsons.dump`` by setting a custom deserializer.
The ``func`` argument must take two arguments (i.e. the dict containing the
serialized values and the type that the values should be deserialized into)
and also a ``kwargs`` parameter. For example:
>>> def func(dict_, cls, **kwargs):
... return cls()
You may ask additional arguments between ``cls`` and ``kwargs``.
:param func: the deserializer function.
:param cls: the type or sequence of types this serializer can handle.
:param high_prio: determines the order in which is looked for the callable.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
"""
if isinstance(cls, Sequence):
for cls_ in cls:
set_deserializer(func, cls_, high_prio, fork_inst)
elif cls:
index = 0 if high_prio else len(fork_inst._classes_deserializers)
fork_inst._classes_deserializers.insert(index, cls)
cls_name = get_class_name(cls, fully_qualified=True)
fork_inst._deserializers[cls_name.lower()] = func
else:
fork_inst._deserializers['nonetype'] = func
|
(func: <built-in function callable>, cls: Union[type, Sequence[type]], high_prio: bool = True, fork_inst: type = <class 'jsons._common_impl.StateHolder'>) -> NoneType
|
44,067 |
jsons._lizers_impl
|
set_serializer
|
Set a serializer function for the given type. You may override the default
behavior of ``jsons.load`` by setting a custom serializer.
The ``func`` argument must take one argument (i.e. the object that is to be
serialized) and also a ``kwargs`` parameter. For example:
>>> def func(obj, **kwargs):
... return dict()
You may ask additional arguments between ``cls`` and ``kwargs``.
:param func: the serializer function.
:param cls: the type or sequence of types this serializer can handle.
:param high_prio: determines the order in which is looked for the callable.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
|
def set_serializer(
func: callable,
cls: Union[type, Sequence[type]],
high_prio: bool = True,
fork_inst: type = StateHolder) -> None:
"""
Set a serializer function for the given type. You may override the default
behavior of ``jsons.load`` by setting a custom serializer.
The ``func`` argument must take one argument (i.e. the object that is to be
serialized) and also a ``kwargs`` parameter. For example:
>>> def func(obj, **kwargs):
... return dict()
You may ask additional arguments between ``cls`` and ``kwargs``.
:param func: the serializer function.
:param cls: the type or sequence of types this serializer can handle.
:param high_prio: determines the order in which is looked for the callable.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
"""
if isinstance(cls, Sequence):
for cls_ in cls:
set_serializer(func, cls_, high_prio, fork_inst)
elif cls:
index = 0 if high_prio else len(fork_inst._classes_serializers)
fork_inst._classes_serializers.insert(index, cls)
cls_name = get_class_name(cls, fully_qualified=True)
fork_inst._serializers[cls_name.lower()] = func
else:
fork_inst._serializers['nonetype'] = func
|
(func: <built-in function callable>, cls: Union[type, Sequence[type]], high_prio: bool = True, fork_inst: type = <class 'jsons._common_impl.StateHolder'>) -> NoneType
|
44,068 |
jsons._validation
|
set_validator
|
Set a validator function for the given ``cls``. The function should accept
an instance of the type it should validate and must return ``False`` or
raise any exception in case of a validation failure.
:param func: the function that takes an instance of type ``cls`` and
returns a bool (``True`` if the validation was successful).
:param cls: the type or types that ``func`` is able to validate.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
|
def set_validator(
func: Callable[[object], bool],
cls: Union[type, Sequence[type]],
*,
fork_inst: type = StateHolder) -> None:
"""
Set a validator function for the given ``cls``. The function should accept
an instance of the type it should validate and must return ``False`` or
raise any exception in case of a validation failure.
:param func: the function that takes an instance of type ``cls`` and
returns a bool (``True`` if the validation was successful).
:param cls: the type or types that ``func`` is able to validate.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
"""
if isinstance(cls, Sequence):
for cls_ in cls:
set_validator(func, cls=cls_, fork_inst=fork_inst)
else:
cls_name = get_class_name(cls, fully_qualified=True)
fork_inst._validators[cls_name.lower()] = func
fork_inst._classes_validators.append(cls)
|
(func: Callable[[object], bool], cls: Union[type, Sequence[type]], *, fork_inst: type = <class 'jsons._common_impl.StateHolder'>) -> NoneType
|
44,070 |
jsons._extra_impl
|
suppress_warning
|
Suppress a specific warning that corresponds to the given code (see the
warning).
:param code: the code of the warning that is to be suppressed.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
|
def suppress_warning(
code: str,
fork_inst: Optional[type] = StateHolder):
"""
Suppress a specific warning that corresponds to the given code (see the
warning).
:param code: the code of the warning that is to be suppressed.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
"""
fork_inst._suppressed_warnings |= {code}
|
(code: str, fork_inst: Optional[type] = <class 'jsons._common_impl.StateHolder'>)
|
44,071 |
jsons._extra_impl
|
suppress_warnings
|
Suppress (or stop suppressing) warnings altogether.
:param do_suppress: if ``True``, warnings will be suppressed from now on.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
|
def suppress_warnings(
do_suppress: Optional[bool] = True,
fork_inst: Optional[type] = StateHolder):
"""
Suppress (or stop suppressing) warnings altogether.
:param do_suppress: if ``True``, warnings will be suppressed from now on.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
"""
fork_inst._suppress_warnings = do_suppress
|
(do_suppress: Optional[bool] = True, fork_inst: Optional[type] = <class 'jsons._common_impl.StateHolder'>)
|
44,075 |
jsons._transform_impl
|
transform
|
Transform the given ``obj`` to an instance of ``cls``.
:param obj: the object that is to be transformed into a type of ``cls``.
:param cls: the type that ``obj`` is to be transformed into.
:param mapper: a callable that takes the dumped dict and returns a mapped
dict right before it is loaded into ``cls``.
:param dump_cls: the ``cls`` parameter that is given to ``dump``.
:param dump_args: the ``args`` parameter that is given to ``dump``.
:param dump_kwargs: the ``kwargs`` parameter that is given to ``dump``.
:param kwargs: any keyword arguments that are given to ``load``.
:return: an instance of ``cls``.
|
def transform(
obj: object,
cls: Type[T],
*,
mapper: Callable[[Dict[str, Any]], Dict[str, Any]] = None,
dump_cls: type = None,
dump_args: List[Any] = None,
dump_kwargs: List[Dict[str, Any]] = None,
**kwargs) -> T:
"""
Transform the given ``obj`` to an instance of ``cls``.
:param obj: the object that is to be transformed into a type of ``cls``.
:param cls: the type that ``obj`` is to be transformed into.
:param mapper: a callable that takes the dumped dict and returns a mapped
dict right before it is loaded into ``cls``.
:param dump_cls: the ``cls`` parameter that is given to ``dump``.
:param dump_args: the ``args`` parameter that is given to ``dump``.
:param dump_kwargs: the ``kwargs`` parameter that is given to ``dump``.
:param kwargs: any keyword arguments that are given to ``load``.
:return: an instance of ``cls``.
"""
dump_args_ = dump_args or []
dump_kwargs_ = dump_kwargs or {}
dumped = dump(obj, dump_cls, *dump_args_, **dump_kwargs_)
mapper_ = mapper or (lambda x: x)
dumped_mapped = mapper_(dumped)
return load(dumped_mapped, cls, **kwargs)
|
(obj: object, cls: Type[~T], *, mapper: Optional[Callable[[Dict[str, Any]], Dict[str, Any]]] = None, dump_cls: Optional[type] = None, dump_args: Optional[List[Any]] = None, dump_kwargs: Optional[List[Dict[str, Any]]] = None, **kwargs) -> ~T
|
44,076 |
jsons._validation
|
validate
|
Validate the given ``obj`` with the validator that was registered for
``cls``. Raises a ``ValidationError`` if the validation failed.
:param obj: the object that is to be validated.
:param cls: the type of which the validator function was registered.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
|
def validate(
obj: object,
cls: type,
fork_inst: type = StateHolder) -> None:
"""
Validate the given ``obj`` with the validator that was registered for
``cls``. Raises a ``ValidationError`` if the validation failed.
:param obj: the object that is to be validated.
:param cls: the type of which the validator function was registered.
:param fork_inst: if given, it uses this fork of ``JsonSerializable``.
:return: None.
"""
validator = get_validator(cls, fork_inst)
result = True
msg = 'Validation failed.'
if validator:
try:
result = validator(obj)
except Exception as err:
if err.args:
msg = err.args[0]
result = False
if not result:
raise ValidationError(msg)
|
(obj: object, cls: type, fork_inst: type = <class 'jsons._common_impl.StateHolder'>) -> NoneType
|
44,077 |
dataclass_array.array_dataclass
|
DataclassArray
|
Dataclass which behaves like an array.
Usage:
```python
class Square(DataclassArray):
pos: f32['*shape 2']
scale: f32['*shape']
name: str
# Create 3 squares batched
p = Square(
pos=[[x0, y0], [x1, y1], [x2, y2]],
scale=[scale0, scale1, scale2],
name='my_square',
)
p.shape == (3,)
p.pos.shape == (3, 2)
p[0] == Square(pos=[x0, y0], scale=scale0)
p = p.reshape((3, 1)) # Reshape the inner-shape
p.shape == (3, 1)
p.pos.shape == (3, 1, 2)
p.name == 'my_square'
```
`DataclassArray` has 2 types of fields:
* Array fields: Fields batched like numpy arrays, with reshape, slicing,...
(`pos` and `scale` in the above example).
* Static fields: Other non-numpy field. Are not modified by reshaping,... (
`name` in the above example).
Static fields are also ignored in `jax.tree_map`.
`DataclassArray` detect array fields if either:
* The typing annotation is a `etils.array_types` annotation (in which
case shape/dtype are automatically infered from the typing annotation)
Example: `x: f32[..., 3]`
* The typing annotation is another `dca.DataclassArray` (in which case
`my_dataclass.field.shape == my_dataclass.shape`)
Example: `x: MyDataclass`
* The field is explicitly defined in `dca.array_field`, in which case
the typing annotation is ignored.
Example: `x: Any = dca.field(shape=(), dtype=np.int64)`
Field which do not satisfy any of the above conditions are static (including
field annotated with `field: np.ndarray` or similar).
|
class DataclassArray(metaclass=MetaDataclassArray):
"""Dataclass which behaves like an array.
Usage:
```python
class Square(DataclassArray):
pos: f32['*shape 2']
scale: f32['*shape']
name: str
# Create 3 squares batched
p = Square(
pos=[[x0, y0], [x1, y1], [x2, y2]],
scale=[scale0, scale1, scale2],
name='my_square',
)
p.shape == (3,)
p.pos.shape == (3, 2)
p[0] == Square(pos=[x0, y0], scale=scale0)
p = p.reshape((3, 1)) # Reshape the inner-shape
p.shape == (3, 1)
p.pos.shape == (3, 1, 2)
p.name == 'my_square'
```
`DataclassArray` has 2 types of fields:
* Array fields: Fields batched like numpy arrays, with reshape, slicing,...
(`pos` and `scale` in the above example).
* Static fields: Other non-numpy field. Are not modified by reshaping,... (
`name` in the above example).
Static fields are also ignored in `jax.tree_map`.
`DataclassArray` detect array fields if either:
* The typing annotation is a `etils.array_types` annotation (in which
case shape/dtype are automatically infered from the typing annotation)
Example: `x: f32[..., 3]`
* The typing annotation is another `dca.DataclassArray` (in which case
`my_dataclass.field.shape == my_dataclass.shape`)
Example: `x: MyDataclass`
* The field is explicitly defined in `dca.array_field`, in which case
the typing annotation is ignored.
Example: `x: Any = dca.field(shape=(), dtype=np.int64)`
Field which do not satisfy any of the above conditions are static (including
field annotated with `field: np.ndarray` or similar).
"""
# Child class inherit the default params by default, but can also
# overwrite them.
__dca_params__: ClassVar[DataclassParams] = DataclassParams()
# TODO(epot): Could be removed with py3.10 and using `kw_only=True`
# Fields defined here will be forwarded with `.replace`
# TODO(py39): Replace Set -> set
__dca_non_init_fields__: ClassVar[Set[str]] = set()
_shape: Shape
_xnp: enp.NpModule
def __init_subclass__(
cls,
frozen=True,
**kwargs,
):
super().__init_subclass__(**kwargs)
if not frozen:
raise ValueError(f'{cls} cannot be `frozen=False`.')
# Apply dataclass (in-place)
if not typing.TYPE_CHECKING:
# TODO(b/227290126): Create pytype issues
dataclasses.dataclass(frozen=True)(cls)
# TODO(epot): Could have smart __repr__ which display types if array have
# too many values (maybe directly in `edc.field(repr=...)`).
edc.dataclass(kw_only=True, repr=True, auto_cast=False)(cls)
cls._dca_jax_tree_registered = False
cls._dca_torch_tree_registered = False
# Typing annotations have to be lazily evaluated (to support
# `from __future__ import annotations` and forward reference)
# To avoid costly `typing.get_type_hints` which perform `eval` and `str`
# convertions, we cache the type annotations here.
cls._dca_fields_metadata: Optional[dict[str, _ArrayFieldMetadata]] = None
# Normalize the `cls.__dca_non_init_fields__`
# TODO(epot): Support inheritance if the parents also define
# `__dca_non_init_fields__` (fields should be merged from `.mro()`)
cls.__dca_non_init_fields__ = set(cls.__dca_non_init_fields__)
if typing.TYPE_CHECKING:
# TODO(b/242839979): pytype do not support PEP 681 -- Data Class Transforms
def __init__(self, **kwargs):
pass
def __post_init__(self) -> None:
"""Validate and normalize inputs."""
cls = type(self)
# First time, we perform additional check & updates
if cls._dca_fields_metadata is None: # pylint: disable=protected-access
_init_cls(self)
# Register the tree_map here instead of `__init_subclass__` as `jax` may
# not have been imported yet during import.
if enp.lazy.has_jax and not cls._dca_jax_tree_registered: # pylint: disable=protected-access
enp.lazy.jax.tree_util.register_pytree_node_class(cls)
cls._dca_jax_tree_registered = True # pylint: disable=protected-access
if enp.lazy.has_torch and not cls._dca_torch_tree_registered: # pylint: disable=protected-access
# Note: Torch is updating it's tree API to make it public and use `optree`
# as backend: https://github.com/pytorch/pytorch/issues/65761
enp.lazy.torch.utils._pytree._register_pytree_node( # pylint: disable=protected-access
cls,
flatten_fn=lambda a: a.tree_flatten(),
unflatten_fn=lambda vals, ctx: cls.tree_unflatten(ctx, vals),
)
cls._dca_torch_tree_registered = True # pylint: disable=protected-access
# Validate and normalize array fields
# * Maybe cast (list, np) -> xnp
# * Maybe cast dtype
# * Maybe broadcast shapes
# Because this is only done inside `__init__`, it is ok to mutate self.
# Cast and validate the array xnp are consistent
xnp = self._cast_xnp_dtype_inplace()
# Validate the batch shape is consistent
# However, we need to be careful that `_ArrayField` never uses
# `@epy.cached_property`
shape = self._broadcast_shape_inplace()
# TODO(epot): When to validate (`field.validate()`)
if xnp is None: # No values
# Inside `jax.tree_utils`, tree-def can be created with `None` values.
# Inside `jax.vmap`, tree can be created with `object()` sentinel values.
assert shape is None
xnp = None
# Cache results
# Should the state be stored in a separate object to avoid collisions ?
assert shape is None or isinstance(shape, tuple), shape
self._setattr('_shape', shape)
self._setattr('_xnp', xnp)
# ====== Array functions ======
@property
def shape(self) -> Shape:
"""Returns the batch shape common to all fields."""
return self._shape
@property
def size(self) -> int:
"""Returns the number of elements."""
return np_utils.size_of(self._shape)
@property
def ndim(self) -> int:
"""Returns the number of dimensions."""
return len(self._shape)
def reshape(self: _DcT, shape: Union[Shape, str], **axes_length: int) -> _DcT:
"""Reshape the batch shape according to the pattern.
Supports both tuple and einops mode:
```python
rays.reshape('b h w -> b (h w)')
rays.reshape((128, -1))
```
Args:
shape: Target shape. Can be string for `einops` support.
**axes_length: Any additional specifications for dimensions for einops
support.
Returns:
The dataclass array with the new shape
"""
if isinstance(shape, str): # Einops support
return self._map_field( # pylint: disable=protected-access
array_fn=lambda f: einops.rearrange( # pylint: disable=g-long-lambda
f.value,
np_utils.to_absolute_einops(shape, nlastdim=len(f.inner_shape)),
**axes_length,
),
dc_fn=lambda f: f.value.reshape( # pylint: disable=g-long-lambda
np_utils.to_absolute_einops(shape, nlastdim=len(f.inner_shape)),
**axes_length,
),
)
else: # Numpy support
assert isinstance(shape, tuple) # For pytest
def _reshape(f: _ArrayField):
return f.value.reshape(shape + f.inner_shape)
return self._map_field(array_fn=_reshape, dc_fn=_reshape) # pylint: disable=protected-access
def flatten(self: _DcT) -> _DcT:
"""Flatten the batch shape."""
return self.reshape((-1,))
def broadcast_to(self: _DcT, shape: Shape) -> _DcT:
"""Broadcast the batch shape."""
return self._map_field( # pylint: disable=protected-access
array_fn=lambda f: f.broadcast_to(shape),
dc_fn=lambda f: f.broadcast_to(shape),
)
def __getitem__(self: _DcT, indices: _IndicesArg) -> _DcT:
"""Slice indexing."""
indices = np.index_exp[indices] # Normalize indices
# Replace `...` by explicit shape
indices = _to_absolute_indices(indices, shape=self.shape)
return self._map_field(
array_fn=lambda f: f.value[indices],
dc_fn=lambda f: f.value[indices],
)
# _DcT[n *d] -> Iterator[_DcT[*d]]
def __iter__(self: _DcT) -> Iterator[_DcT]:
"""Iterate over the outermost dimension."""
if not self.shape:
raise TypeError(f'iteration over 0-d array: {self!r}')
# Similar to `etree.unzip(self)` (but work with any backend)
field_names = [f.name for f in self._array_fields] # pylint: disable=not-an-iterable
field_values = [f.value for f in self._array_fields] # pylint: disable=not-an-iterable
for vals in zip(*field_values):
yield self.replace(**dict(zip(field_names, vals)))
def __len__(self) -> int:
"""Length of the first array dimension."""
if not self.shape:
raise TypeError(
f'len() of unsized {self.__class__.__name__} (shape={self.shape})'
)
return self.shape[0]
def __bool__(self) -> Literal[True]:
"""`dca.DataclassArray` always evaluate to `True`.
Like all python objects (including dataclasses), `dca.DataclassArray` always
evaluate to `True`. So:
`Ray(pos=None)`, `Ray(pos=0)` all evaluate to `True`.
This allow construct like:
```python
def fn(ray: Optional[dca.Ray] = None):
if ray:
...
```
Or:
```python
def fn(ray: Optional[dca.Ray] = None):
ray = ray or default_ray
```
Only in the very rare case of empty-tensor (`shape=(0, ...)`)
```python
assert ray is not None
assert len(ray) == 0
bool(ray) # TypeError: Truth value is ambigous
```
Returns:
True
Raises:
ValueError: If `len(self) == 0` to avoid ambiguity.
"""
if self.shape and not len(self): # pylint: disable=g-explicit-length-test
raise ValueError(
f'The truth value of {self.__class__.__name__} when `len(x) == 0` '
'is ambigous. Use `len(x)` or `x is not None`.'
)
return True
def map_field(
self: _DcT,
fn: Callable[[Array['*din']], Array['*dout']],
) -> _DcT:
"""Apply a transformation on all arrays from the fields."""
return self._map_field( # pylint: disable=protected-access
array_fn=lambda f: fn(f.value),
dc_fn=lambda f: f.value.map_field(fn),
)
# ====== Dataclass/Conversion utils ======
def replace(self: _DcT, **kwargs: Any) -> _DcT:
"""Alias for `dataclasses.replace`."""
init_kwargs = {
k: v for k, v in kwargs.items() if k not in self.__dca_non_init_fields__
}
non_init_kwargs = {
k: v for k, v in kwargs.items() if k in self.__dca_non_init_fields__
}
# Create the new object
new_self = dataclasses.replace(self, **init_kwargs) # pytype: disable=wrong-arg-types # re-none
# TODO(epot): Could try to unify logic bellow with `tree_unflatten`
# Additionally forward the non-init kwargs
# `dataclasses.field(init=False) kwargs are required because `init=True`
# creates conflicts:
# * Inheritance fails with non-default argument 'K' follows default argument
# * Pytype complains too
# TODO(py310): Cleanup using `dataclass(kw_only)`
assert new_self is not self
for k in self.__dca_non_init_fields__:
if k in non_init_kwargs:
v = non_init_kwargs[k]
else:
v = getattr(self, k)
new_self._setattr(k, v) # pylint: disable=protected-access
return new_self
def as_np(self: _DcT) -> _DcT:
"""Returns the instance as containing `np.ndarray`."""
return self.as_xnp(enp.lazy.np)
def as_jax(self: _DcT) -> _DcT:
"""Returns the instance as containing `jnp.ndarray`."""
return self.as_xnp(enp.lazy.jnp)
def as_tf(self: _DcT) -> _DcT:
"""Returns the instance as containing `tf.Tensor`."""
return self.as_xnp(enp.lazy.tnp)
def as_torch(self: _DcT) -> _DcT:
"""Returns the instance as containing `torch.Tensor`."""
return self.as_xnp(enp.lazy.torch)
def as_xnp(self: _DcT, xnp: enp.NpModule) -> _DcT:
"""Returns the instance as containing `xnp.ndarray`."""
if xnp is self.xnp: # No-op
return self
# Direct `torch` <> `tf`/`jax` conversion not supported, so convert to
# `numpy`
if enp.lazy.is_torch_xnp(xnp) or enp.lazy.is_torch_xnp(self.xnp):
def _as_torch(f):
arr = np.asarray(f.value)
# Torch fail for scalar arrays:
# https://github.com/pytorch/pytorch/issues/97021
if enp.lazy.is_torch_xnp(xnp) and not arr.shape: # Destination is torch
return xnp.asarray(arr.item(), dtype=lazy.as_torch_dtype(arr.dtype))
return xnp.asarray(arr)
array_fn = _as_torch
else:
array_fn = lambda f: xnp.asarray(f.value)
# Update all childs
new_self = self._map_field( # pylint: disable=protected-access
array_fn=array_fn,
dc_fn=lambda f: f.value.as_xnp(xnp),
)
return new_self
# TODO(pytype): Remove hack. Currently, Python does not support typing
# annotations for modules, by pytype auto-infer the correct type.
# So this hack allow auto-completion
if typing.TYPE_CHECKING:
@property
def xnp(self): # pylint: disable=function-redefined
"""Returns the numpy module of the class (np, jnp, tnp)."""
return np
else:
@property
def xnp(self) -> enp.NpModule:
"""Returns the numpy module of the class (np, jnp, tnp)."""
return self._xnp
# ====== Torch specific methods ======
# Could also add
# * x.detach
# * x.is_cuda
# * x.device
# * x.get_device
def to(self: _DcT, device, **kwargs) -> _DcT:
"""Move the dataclass array to the device."""
if not lazy.is_torch_xnp(self.xnp):
raise ValueError('`.to` can only be called when `xnp == torch`')
return self.map_field(lambda f: f.to(device, **kwargs))
def cpu(self: _DcT, *args, **kwargs) -> _DcT:
"""Move the dataclass array to the CPU device."""
if not lazy.is_torch_xnp(self.xnp):
raise ValueError('`.cpu` can only be called when `xnp == torch`')
return self.map_field(lambda f: f.cpu(*args, **kwargs))
def cuda(self: _DcT, *args, **kwargs) -> _DcT:
"""Move the dataclass array to the CUDA device."""
if not lazy.is_torch_xnp(self.xnp):
raise ValueError('`.cuda` can only be called when `xnp == torch`')
return self.map_field(lambda f: f.cuda(*args, **kwargs))
# ====== Internal ======
@epy.cached_property
def _all_fields_empty(self) -> bool:
"""Returns True if the `dataclass_array` is invalid."""
if not self._array_fields: # All fields are `None` / `object`
# No fields have been defined.
# This can be the case internally by jax which apply some
# `tree_map(lambda x: sentinel)`.
return True
# `tf.nest` sometimes replace values by dummy `.` inside
# `assert_same_structure`
if enp.lazy.has_tf:
# pylint: disable=g-direct-tensorflow-import,g-import-not-at-top
from tensorflow.python.util import nest_util # pytype: disable=import-error
# pylint: enable=g-direct-tensorflow-import,g-import-not-at-top
if any(f.value is nest_util._DOT for f in self._array_fields): # pylint: disable=protected-access,not-an-iterable
return True
return False
@epy.cached_property
def _all_array_fields(self) -> dict[str, _ArrayField]:
"""All array fields, including `None` values."""
return { # pylint: disable=g-complex-comprehension
name: _ArrayField(
name=name,
host=self,
**field_metadata.to_dict(), # pylint: disable=not-a-mapping
)
for name, field_metadata in self._dca_fields_metadata.items() # pylint: disable=protected-access
}
@epy.cached_property
def _array_fields(self) -> list[_ArrayField]:
"""All active array fields (non-None), including static ones."""
# Filter `None` values
return [
f for f in self._all_array_fields.values() if not f.is_value_missing
]
def _cast_xnp_dtype_inplace(self) -> Optional[enp.NpModule]:
"""Validate `xnp` are consistent and cast `np` -> `xnp` in-place."""
if self._all_fields_empty: # pylint: disable=using-constant-test
return None
# Validate the dtype
def _get_xnp(f: _ArrayField) -> enp.NpModule:
try:
return np_utils.get_xnp(
f.value,
strict=not self.__dca_params__.cast_list,
)
except Exception as e: # pylint: disable=broad-except
epy.reraise(e, prefix=f'Invalid {f.qualname}: ')
xnps = epy.groupby(
self._array_fields,
key=_get_xnp,
value=lambda f: f.name,
)
if not xnps:
return None
xnp = _infer_xnp(xnps)
def _cast_field(f: _ArrayField) -> None:
try:
# Supports for TensorSpec (e.g. in `tf.function` signature)
if enp.lazy.is_tf_xnp(xnp) and isinstance(
f.value, enp.lazy.tf.TensorSpec
):
# TODO(epot): Actually check the dtype
new_value = f.value
else:
new_value = np_utils.asarray(
f.value,
xnp=xnp,
dtype=f.dtype,
cast_dtype=self.__dca_params__.cast_dtype,
)
self._setattr(f.name, new_value)
# After the field has been set, we validate the shape
f.assert_shape()
except Exception as e: # pylint: disable=broad-except
epy.reraise(e, prefix=f'Invalid {f.qualname}: ')
self._map_field(
array_fn=_cast_field,
dc_fn=_cast_field, # pytype: disable=wrong-arg-types
_inplace=True,
)
return xnp
def _broadcast_shape_inplace(self) -> Optional[Shape]:
"""Validate the shapes are consistent and broadcast values in-place."""
if self._all_fields_empty: # pylint: disable=using-constant-test
return None
# First collect all shapes and compute the final shape.
shape_to_names = epy.groupby(
self._array_fields,
key=lambda f: f.host_shape,
value=lambda f: f.name,
)
shape_lengths = {len(s) for s in shape_to_names.keys()}
# Broadcast all shape together
try:
final_shape = np.broadcast_shapes(*shape_to_names.keys())
except ValueError:
final_shape = None # Bad broadcast
# Currently, we restrict broadcasting to either scalar or fixed length.
# This is to avoid confusion broadcasting vs vectorization rules.
# This restriction could be lifted if we encounter a use-case.
if (
final_shape is None
or len(shape_lengths) > 2
or (len(shape_lengths) == 2 and 0 not in shape_lengths)
):
raise ValueError(
f'Conflicting batch shapes: {shape_to_names}. '
f'Currently {type(self).__qualname__}.__init__ broadcasting is '
'restricted to scalar or dim=1 . '
'Please open an issue if you need more fine-grained broadcasting.'
)
def _broadcast_field(f: _ArrayField) -> None:
if f.host_shape == final_shape: # Already broadcasted
return
elif not self.__dca_params__.broadcast: # Broadcasing disabled
raise ValueError(
f'{type(self).__qualname__} has `broadcast=False`. '
f'Cannot broadcast {f.name} from {f.full_shape} to {final_shape}. '
'To enable broadcast, use `@dca.dataclass_array(broadcast=True)`.'
)
self._setattr(f.name, f.broadcast_to(final_shape))
self._map_field(
array_fn=_broadcast_field,
dc_fn=_broadcast_field, # pytype: disable=wrong-arg-types
_inplace=True,
)
return final_shape
def _to_absolute_axis(self, axis: Axes) -> Axes:
"""Normalize the axis to absolute value."""
try:
return np_utils.to_absolute_axis(axis, ndim=self.ndim)
except Exception as e: # pylint: disable=broad-except
epy.reraise(
e,
prefix=f'For {self.__class__.__qualname__} with shape={self.shape}: ',
)
def _map_field(
self: _DcT,
*,
array_fn: Callable[[_ArrayField[Array['*din']]], Array['*dout']],
dc_fn: Optional[Callable[[_ArrayField[_DcT]], _DcT]],
_inplace: bool = False,
) -> _DcT:
"""Apply a transformation on all array fields structure.
Args:
array_fn: Function applied on the `xnp.ndarray` fields
dc_fn: Function applied on the `dca.DataclassArray` fields (to recurse)
_inplace: If True, assume the function mutate the object in-place. Should
only be used inside `__init__` for performances.
Returns:
The transformed dataclass array.
"""
def _apply_field_dn(f: _ArrayField):
if f.is_dataclass: # Recurse on dataclasses
return dc_fn(f) # pylint: disable=protected-access
else:
return array_fn(f)
new_values = {f.name: _apply_field_dn(f) for f in self._array_fields} # pylint: disable=not-an-iterable,protected-access
# For performance, do not call replace to save the constructor call
if not _inplace:
return self.replace(**new_values)
else:
return self
def tree_flatten(self) -> tuple[tuple[DcOrArray, ...], _TreeMetadata]:
"""`jax.tree_utils` support."""
# We flatten all values (and not just the non-None ones)
array_field_values = tuple(f.value for f in self._all_array_fields.values())
metadata = _TreeMetadata(
array_field_names=list(self._all_array_fields.keys()),
non_array_field_kwargs={
f.name: getattr(self, f.name)
for f in dataclasses.fields(self) # pytype: disable=wrong-arg-types # re-none
if f.name not in self._all_array_fields # pylint: disable=unsupported-membership-test
},
)
return (array_field_values, metadata)
@classmethod
def tree_unflatten(
cls: Type[_DcT],
metadata: _TreeMetadata,
array_field_values: list[DcOrArray],
) -> _DcT:
"""`jax.tree_utils` support."""
array_field_kwargs = dict(
zip(
metadata.array_field_names,
array_field_values,
)
)
init_fields = {}
non_init_fields = {}
fields = {f.name: f for f in dataclasses.fields(cls)} # pytype: disable=wrong-arg-types # re-none
for k, v in metadata.non_array_field_kwargs.items():
if fields[k].init:
init_fields[k] = v
else:
non_init_fields[k] = v
self = cls(**array_field_kwargs, **init_fields)
# Currently it's not clear how to handle non-init fields so raise an error
if non_init_fields:
if set(non_init_fields) - self.__dca_non_init_fields__:
raise ValueError(
'`dca.DataclassArray` field with init=False should be explicitly '
'specified in `__dca_non_init_fields__` for them to be '
'propagated by `tree_map`.'
)
# TODO(py310): Delete once dataclass supports `kw_only=True`
for k, v in non_init_fields.items():
self._setattr(k, v) # pylint: disable=protected-access
return self
def __tf_flatten__(self) -> tuple[_TreeMetadata, tuple[DcOrArray, ...]]:
components, metadata = self.tree_flatten()
return metadata, components
@classmethod
def __tf_unflatten__(
cls: Type[_DcT],
metadata: _TreeMetadata,
components: list[DcOrArray],
) -> _DcT:
return cls.tree_unflatten(metadata, components)
def _setattr(self, name: str, value: Any) -> None:
"""Like setattr, but support `frozen` dataclasses."""
object.__setattr__(self, name, value)
def assert_same_xnp(self, x: Union[Array[...], DataclassArray]) -> None:
"""Assert the given array is of the same type as the current object."""
xnp = np_utils.get_xnp(x)
if xnp is not self.xnp:
raise ValueError(
f'{self.__class__.__name__} is {self.xnp.__name__} but got input '
f'{xnp.__name__}. Please cast input first.'
)
|
()
|
44,078 |
dataclass_array.array_dataclass
|
__bool__
|
`dca.DataclassArray` always evaluate to `True`.
Like all python objects (including dataclasses), `dca.DataclassArray` always
evaluate to `True`. So:
`Ray(pos=None)`, `Ray(pos=0)` all evaluate to `True`.
This allow construct like:
```python
def fn(ray: Optional[dca.Ray] = None):
if ray:
...
```
Or:
```python
def fn(ray: Optional[dca.Ray] = None):
ray = ray or default_ray
```
Only in the very rare case of empty-tensor (`shape=(0, ...)`)
```python
assert ray is not None
assert len(ray) == 0
bool(ray) # TypeError: Truth value is ambigous
```
Returns:
True
Raises:
ValueError: If `len(self) == 0` to avoid ambiguity.
|
def __bool__(self) -> Literal[True]:
"""`dca.DataclassArray` always evaluate to `True`.
Like all python objects (including dataclasses), `dca.DataclassArray` always
evaluate to `True`. So:
`Ray(pos=None)`, `Ray(pos=0)` all evaluate to `True`.
This allow construct like:
```python
def fn(ray: Optional[dca.Ray] = None):
if ray:
...
```
Or:
```python
def fn(ray: Optional[dca.Ray] = None):
ray = ray or default_ray
```
Only in the very rare case of empty-tensor (`shape=(0, ...)`)
```python
assert ray is not None
assert len(ray) == 0
bool(ray) # TypeError: Truth value is ambigous
```
Returns:
True
Raises:
ValueError: If `len(self) == 0` to avoid ambiguity.
"""
if self.shape and not len(self): # pylint: disable=g-explicit-length-test
raise ValueError(
f'The truth value of {self.__class__.__name__} when `len(x) == 0` '
'is ambigous. Use `len(x)` or `x is not None`.'
)
return True
|
(self) -> Literal[True]
|
44,079 |
dataclass_array.array_dataclass
|
__getitem__
|
Slice indexing.
|
def __getitem__(self: _DcT, indices: _IndicesArg) -> _DcT:
"""Slice indexing."""
indices = np.index_exp[indices] # Normalize indices
# Replace `...` by explicit shape
indices = _to_absolute_indices(indices, shape=self.shape)
return self._map_field(
array_fn=lambda f: f.value[indices],
dc_fn=lambda f: f.value[indices],
)
|
(self: ~_DcT, indices: Union[ellipsis, NoneType, int, slice, Any, Tuple[Union[ellipsis, NoneType, int, slice, Any]]]) -> ~_DcT
|
44,080 |
dataclass_array.array_dataclass
|
__iter__
|
Iterate over the outermost dimension.
|
def __iter__(self: _DcT) -> Iterator[_DcT]:
"""Iterate over the outermost dimension."""
if not self.shape:
raise TypeError(f'iteration over 0-d array: {self!r}')
# Similar to `etree.unzip(self)` (but work with any backend)
field_names = [f.name for f in self._array_fields] # pylint: disable=not-an-iterable
field_values = [f.value for f in self._array_fields] # pylint: disable=not-an-iterable
for vals in zip(*field_values):
yield self.replace(**dict(zip(field_names, vals)))
|
(self: ~_DcT) -> Iterator[~_DcT]
|
44,081 |
dataclass_array.array_dataclass
|
__len__
|
Length of the first array dimension.
|
def __len__(self) -> int:
"""Length of the first array dimension."""
if not self.shape:
raise TypeError(
f'len() of unsized {self.__class__.__name__} (shape={self.shape})'
)
return self.shape[0]
|
(self) -> int
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.