python_code
stringlengths 0
108k
|
---|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype validator PEP-compliant type hints** (i.e., hints annotating callables
declared throughout the :mod:`beartype.vale` subpackage, either for compliance
with :pep:`561` or simply for documentation purposes).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.typing import (
Callable,
Union,
)
# ....................{ HINTS }....................
BeartypeValidatorTester = Callable[[object], bool]
'''
PEP-compliant type hint matching a **beartype validator tester** (i.e.,
caller-defined callable accepting a single arbitrary object and returning
either ``True`` if that object satisfies an arbitrary constraint *or* ``False``
otherwise).
Beartype validator testers are suitable for subscripting functional beartype
validator factories (e.g., :attr:`beartype.vale.Is`).
'''
BeartypeValidatorRepresenter = Union[str, Callable[[], str]]
'''
PEP-compliant type hint matching a **beartype validator representer** (i.e.,
either a string *or* caller-defined callable accepting no arguments returning a
machine-readable representation of a beartype validator).
Technically, this representation *could* be passed by the caller rather than
this callable dynamically generating that representation. Pragmatically,
generating this representation is sufficiently slow for numerous types of
validators that deferring their generation until required by a call to the
:meth:`__repr__` dunder method externally called by a call to the :func:`repr`
builtin` on this validator is effectively mandatory. Validators whose
representations are particularly slow to generate include:
* The :class:`Is` class subscripted by a lambda rather than non-lambda
function. Generating the representation of that class subscripted by a
non-lambda function only requires introspecting the name of that function and
is thus trivially fast. However, lambda functions have no names and are thus
*only* distinguishable by their source code; generating the representation of
that class subscripted by a lambda function requires parsing the source code
of the file declaring that lambda for the exact substring of that code
declaring that lambda.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype validator callable utilities** (i.e., callables performing low-level
callable-centric operations on behalf of higher-level beartype validators).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeValeSubscriptionException
from beartype.vale._util._valeutiltyping import BeartypeValidatorTester
from beartype._util.func.arg.utilfuncargtest import (
die_unless_func_args_len_flexible_equal)
# ....................{ FORMATTERS }....................
def die_unless_validator_tester(
validator_tester: BeartypeValidatorTester) -> None:
'''
Raise an exception unless the passed object is a **validator tester** (i.e.,
caller-defined callable accepting a single arbitrary object and returning
either ``True`` if that object satisfies an arbitrary constraint *or*
``False`` otherwise).
Parameters
----------
validator_tester : BeartypeValidatorTester
Object to be validated.
Raises
----------
beartype.roar.BeartypeValeSubscriptionException
If that object is either:
* *Not* callable.
* A C-based rather than pure-Python callable.
* A pure-Python callable accepting two or more arguments.
'''
# If this validator is either uncallable, a C-based callable, *OR* a
# pure-Python callable accepting more or less than one parameter, raise
# an exception.
die_unless_func_args_len_flexible_equal(
func=validator_tester,
func_args_len_flexible=1,
exception_cls=BeartypeValeSubscriptionException,
)
# Else, this validator is a pure-Python callable accepting exactly one
# parameter. Since no further validation can be performed on this
# callable without unsafely calling that callable, we accept this
# callable as is for now.
#
# Note that we *COULD* technically inspect annotations if defined on
# this callable as well. Since this callable is typically defined as a
# lambda, annotations are typically *NOT* defined on this callable.
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype validator code snippets** (i.e., triple-quoted pure-Python code
constants formatted and concatenated together into wrapper functions
type-checking decorated callables annotated by one or more beartype
validators).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._util.py.utilpyversion import IS_PYTHON_AT_LEAST_3_8
from beartype._util.text.utiltextmagic import CODE_INDENT_1
# ....................{ INDENTATION }....................
VALE_CODE_INDENT_1 = f'{{indent}}{CODE_INDENT_1}'
'''
Code snippet prefixed by the placeholder substring ``"{indent}"`` (which the
:func:`beartype._check.expr.exprmake.make_func_wrapper_code` replaces with
the indentation level required by the current beartype validator) followed by a
single level of indentation.
'''
# ....................{ CHECK ~ factory }....................
VALE_CODE_CHECK_ISEQUAL_TEST = '''
{{indent}}# True only if this pith equals this object.
{{indent}}{{obj}} == {param_name_obj_value}'''
'''
:attr:`beartype.vale.IsEqual`-specific code snippet validating an arbitrary
object to be equal to another arbitrary object.
'''
VALE_CODE_CHECK_ISINSTANCE_TEST = '''
{{indent}}# True only if this pith is an object instancing this superclass.
{{indent}}isinstance({{obj}}, {param_name_types})'''
'''
:attr:`beartype.vale.IsInstance`-specific code snippet validating an arbitrary
object to instance an arbitrary type.
'''
VALE_CODE_CHECK_ISSUBCLASS_TEST = '''
{{indent}}# True only if this pith is a class subclassing this superclass.
{{indent}}(isinstance({{obj}}, type) and issubclass({{obj}}, {param_name_types}))'''
'''
:attr:`beartype.vale.IsSubclass`-specific code snippet validating an arbitrary
type to subclass another arbitrary type.
'''
# ....................{ CHECK ~ factory : isattr }....................
VALE_CODE_CHECK_ISATTR_TEST = '''(
{{indent}} # True only if this pith defines an attribute with this name.
{{indent}} {attr_value_expr}
{{indent}} is not {local_name_sentinel} and {attr_value_is_valid_expr}
{{indent}})'''
'''
:attr:`beartype.vale.IsAttr`-specific code snippet validating an arbitrary
object to define an attribute with an arbitrary name satisfying an arbitrary
expression evaluating to a boolean.
'''
_VALE_CODE_CHECK_ISATTR_VALUE_EXPR_RAW = (
'getattr({{obj}}, {attr_name_expr}, {local_name_sentinel})')
'''
:attr:`beartype.vale.IsAttr`-specific Python expression inefficiently yielding
the value of the attribute with an arbitrary name of an arbitrary object to be
validated.
'''
VALE_CODE_CHECK_ISATTR_VALUE_EXPR = (
# If the active Python interpreter targets Python >= 3.8 and thus supports
# assignment expressions, localize the value of this attribute to optimize
# subsequent access of that value;
f'({{local_name_attr_value}} := {_VALE_CODE_CHECK_ISATTR_VALUE_EXPR_RAW})'
if IS_PYTHON_AT_LEAST_3_8 else
# Else, the active Python interpreter targets Python < 3.8 and thus fails
# to support assignment expressions. In this case, directly access that
# value repeatedly (and thus inefficiently) *WITHOUT* localization.
_VALE_CODE_CHECK_ISATTR_VALUE_EXPR_RAW
)
'''
:attr:`beartype.vale.IsAttr`-specific Python expression efficiently yielding
the value of the attribute with an arbitrary name of an arbitrary object to be
validated.
If the active Python interpreter targets Python >= 3.8 and thus supports
assignment expressions, this expression is optimized to localize this value to
a local variable whose name *must* be uniquified and formatted by the caller
into the ``local_name_attr_value`` format variable.
'''
# ....................{ METHODS }....................
# Format methods of the code snippets declared above as a microoptimization.
VALE_CODE_CHECK_ISATTR_TEST_format = VALE_CODE_CHECK_ISATTR_TEST.format
VALE_CODE_CHECK_ISATTR_VALUE_EXPR_format = (
VALE_CODE_CHECK_ISATTR_VALUE_EXPR.format)
VALE_CODE_CHECK_ISEQUAL_TEST_format = VALE_CODE_CHECK_ISEQUAL_TEST.format
VALE_CODE_CHECK_ISINSTANCE_TEST_format = VALE_CODE_CHECK_ISINSTANCE_TEST.format
VALE_CODE_CHECK_ISSUBCLASS_TEST_format = VALE_CODE_CHECK_ISSUBCLASS_TEST.format
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype validator text utilities** (i.e., callables performing low-level
string-centric operations on behalf of higher-level beartype validators).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar._roarexc import _BeartypeValeUtilException
from beartype.typing import Optional
from beartype._cave._cavemap import NoneTypeOr
# ....................{ FORMATTERS }....................
def format_diagnosis_line(
# Mandatory parameters.
validator_repr: str,
indent_level_outer: str,
indent_level_inner: str,
# Optional parameters.
is_obj_valid: Optional[bool] = None,
) -> str:
'''
Single line of a larger human-readable **validation failure diagnosis**
(i.e., substring describing how an arbitrary object either satisfies *or*
violates an arbitrary validator), formatted with the passed indentation
level and boolean value.
Parameters
----------
validator_repr : str
**Validator representation** (i.e., unformatted single line of a larger
diagnosis report to be formatted by this function).
indent_level_outer : str
**Outermost indentation level** (i.e., zero or more adjacent spaces
prefixing each line of the returned substring).
indent_level_inner : str
**Innermost indentation level** (i.e., zero or more adjacent spaces
delimiting the human-readable representation of the tri-state boolean
and validator representation in the returned substring).
is_obj_valid : Optional[bool]
Tri-state boolean such that:
* If ``True``, that arbitrary object satisfies the beartype validator
described by this specific line.
* If ``False``, that arbitrary object violates the beartype validator
described by this specific line.
* If ``None``, this specific line is entirely syntactic (e.g., a
suffixing ")" delimiter) isolated to its own discrete line for
readability. In this case, this line does *not* describe how an
arbitrary object either satisfies *or* violates an arbitrary
validator.
Defaults to ``None``.
Returns
----------
str
This diagnosis line formatted with this indentation level.
Raises
----------
_BeartypeValeUtilException
If ``is_obj_valid`` is *not* a **tri-state boolean** (i.e., either
``True``, ``False``, or ``None``).
'''
assert isinstance(validator_repr, str), (
f'{repr(validator_repr)} not string.')
assert isinstance(indent_level_outer, str), (
f'{repr(indent_level_outer)} not string.')
assert isinstance(indent_level_inner, str), (
f'{repr(indent_level_inner)} not string.')
# If "is_obj_valid" is *NOT* a tri-state boolean, raise an exception.
#
# Note that this condition is intentionally validated with full-blown
# exception handling rather than a simple "assert" statement. This condition
# was previously implemented via a simple "assert" statement, which then
# raised a non-human-readable assertion in an end user issue. *OH, GODS!*
if not isinstance(is_obj_valid, NoneTypeOr[bool]):
raise _BeartypeValeUtilException(
f'beartype.vale._valeutiltext.format_diagnosis_line() parameter '
f'"is_obj_valid" value {repr(is_obj_valid)} '
f'not tri-state boolean for '
f'validator representation: {validator_repr}'
)
# Else, "is_obj_valid" is a tri-state boolean.
# String representing this boolean value, padded with spaces on the left as
# needed to produce a column-aligned line diagnosis resembling:
# False == (
# True == Is[lambda foo: foo.x + foo.y >= 0] &
# False == Is[lambda foo: foo.x + foo.y <= 10]
# )
is_obj_valid_str = ''
if is_obj_valid is True: is_obj_valid_str = ' True == '
elif is_obj_valid is False: is_obj_valid_str = 'False == '
else: is_obj_valid_str = ' '
# Do one thing and do it well.
return (
f'{indent_level_outer}'
f'{is_obj_valid_str}'
f'{indent_level_inner}'
f'{validator_repr}'
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Core unary beartype validators** (i.e., :class:`BeartypeValidator` subclasses
implementing binary operations on pairs of lower-level beartype validators).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from abc import ABCMeta, abstractmethod
from beartype.roar import BeartypeValeSubscriptionException
from beartype.vale._core._valecore import BeartypeValidator
from beartype.vale._util._valeutiltext import format_diagnosis_line
from beartype._util.kind.utilkinddict import merge_mappings_two
from beartype._util.text.utiltextmagic import CODE_INDENT_1
from beartype._util.text.utiltextrepr import represent_object
# ....................{ SUPERCLASSES }....................
class BeartypeValidatorBinaryABC(BeartypeValidator, metaclass=ABCMeta):
'''
Abstract base class of all **beartype binary validator** (i.e., validator
modifying the boolean truthiness returned by the validation performed by a
pair of lower-level beartype validators) subclasses.
Attributes
----------
_validator_operand_1 : BeartypeValidator
First lower-level validator operated upon by this higher-level
validator.
_validator_operand_2 : BeartypeValidator
Second lower-level validator operated upon by this higher-level
validator.
'''
# ..................{ CLASS VARIABLES }..................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Subclasses declaring uniquely subclass-specific instance
# variables *MUST* additionally slot those variables. Subclasses violating
# this constraint will be usable but unslotted, which defeats our purposes.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Slot all instance variables defined on this object to minimize the time
# complexity of both reading and writing variables across frequently called
# cache dunder methods. Slotting has been shown to reduce read and write
# costs by approximately ~10%, which is non-trivial.
__slots__ = (
'_validator_operand_1',
'_validator_operand_2',
)
# ..................{ INITIALIZERS }..................
def __init__(
self,
validator_operand_1: BeartypeValidator,
validator_operand_2: BeartypeValidator,
**kwargs
) -> None:
'''
Initialize this higher-level validator from the passed validators.
Parameters
----------
validator_operand_1 : BeartypeValidator
First validator operated upon by this higher-level validator.
validator_operand_2 : BeartypeValidator
Second validator operated upon by this higher-level validator.
All remaining parameters are passed as is to the superclass
:meth:`BeartypeValidator.__init__` method.
Raises
----------
BeartypeValeSubscriptionException
If either of these operands are *not* beartype validators.
'''
# Locals safely merging the locals required by the code provided by
# both validators.
is_valid_code_locals = merge_mappings_two(
validator_operand_1._is_valid_code_locals,
validator_operand_2._is_valid_code_locals,
)
# Callable accepting no arguments returning a machine-readable
# representation of this binary validator.
get_repr = lambda: (
f'{repr(validator_operand_1)} {self._operator_symbol} '
f'{repr(validator_operand_2)}'
)
# Initialize our superclass with all remaining parameters.
super().__init__(
is_valid_code_locals=is_valid_code_locals, # type: ignore[arg-type]
get_repr=get_repr,
**kwargs
)
# Classify all remaining parameters.
self._validator_operand_1 = validator_operand_1
self._validator_operand_2 = validator_operand_2
# ..................{ GETTERS }..................
#FIXME: Unit test us up, please.
#FIXME: Overly verbose for conjunctions involving three or more
#beartype validators. Contemplate compaction schemes, please. Specifically,
#we need to detect this condition here and then compact based on that:
# # If either of these validators are themselves conjunctions...
# if isinstance(self._validator_operand_1, BeartypeValidatorConjunction):
# ...
# if isinstance(self._validator_operand_2, BeartypeValidatorConjunction):
# ...
def get_diagnosis(
self,
*,
# Mandatory keyword-only parameters.
obj: object,
indent_level_outer: str,
indent_level_inner: str,
# Optional keyword-only parameters.
is_shortcircuited: bool = False,
) -> str:
# Innermost indentation level indented one level deeper than the passed
# innermost indentation level.
indent_level_inner_nested = indent_level_inner + CODE_INDENT_1
# Line diagnosing this object against this parent conjunction.
line_outer_prefix = format_diagnosis_line(
validator_repr='(',
indent_level_outer=indent_level_outer,
indent_level_inner=indent_level_inner,
is_obj_valid=self.is_valid(obj),
)
# Line diagnosing this object against this first child validator, with
# an increased indentation level for readability.
line_inner_operand_1 = self._validator_operand_1.get_diagnosis(
obj=obj,
indent_level_outer=indent_level_outer,
indent_level_inner=indent_level_inner_nested,
is_shortcircuited=is_shortcircuited,
)
# If this binary validator has *NOT* already been short-circuited,
# decide whether this first child validator short-circuits this second
# child validator with respect to the passed object.
if not is_shortcircuited:
is_shortcircuited = self._is_shortcircuited(obj)
# Else, this binary validator has already been short-circuited (e.g.,
# due to being embedded in a higher-level parent validator that was
# short-circuited with respect to the passed object). In this case,
# preserve this short-circuiting as is.
# Line diagnosing this object against this second child validator, with
# an increased indentation level for readability.
line_inner_operand_2 = self._validator_operand_2.get_diagnosis(
obj=obj,
indent_level_outer=indent_level_outer,
indent_level_inner=indent_level_inner_nested,
is_shortcircuited=is_shortcircuited,
)
# Line providing the suffixing ")" delimiter for readability.
line_outer_suffix = format_diagnosis_line(
validator_repr=')',
indent_level_outer=indent_level_outer,
indent_level_inner=indent_level_inner,
)
# Return these lines concatenated.
return (
f'{line_outer_prefix}\n'
f'{line_inner_operand_1} {self._operator_symbol}\n'
f'{line_inner_operand_2}\n'
f'{line_outer_suffix}'
)
# ..................{ ABSTRACT }..................
# Abstract methods required to be concretely implemented by subclasses.
@property
@abstractmethod
def _operator_symbol(self) -> str:
'''
Human-readable string embodying the operation performed by this binary
validator - typically the single-character mathematical sign
symbolizing this operation.
'''
pass
@abstractmethod
def _is_shortcircuited(self, obj: object) -> bool:
'''
``True`` only if the first child validator short-circuits the second
child validator underlying this parent validator with respect to the
passed object.
In this context, "short-circuits" is in the boolean evaluation sense.
Specifically, short-circuiting:
* Occurs when the first child validator either fully satisfies or
violates this parent validator with respect to the passed object.
* Implies the second child validator to be safely ignorable with
respect to the passed object.
Parameters
----------
obj : object
Arbitrary object to be diagnosed against this validator.
Returns
----------
bool
``True`` only if this the passed object short-circuits the second
child operand validator underlying this parent binary validator.
'''
pass
# ....................{ SUBCLASSES ~ & }....................
class BeartypeValidatorConjunction(BeartypeValidatorBinaryABC):
'''
**Beartype conjunction validator** (i.e., validator conjunctively
evaluating the boolean truthiness returned by the validation performed by a
pair of lower-level beartype validators, typically instantiated and
returned by the :meth:`BeartypeValidator.__and__` dunder method of the
first validator passed the second).
'''
# ..................{ INITIALIZERS }..................
def __init__(
self,
validator_operand_1: BeartypeValidator,
validator_operand_2: BeartypeValidator,
) -> None:
'''
Initialize this higher-level validator from the passed validators.
Parameters
----------
validator_operand_1 : BeartypeValidator
First validator operated upon by this higher-level validator.
validator_operand_2 : BeartypeValidator
Second validator operated upon by this higher-level validator.
Raises
----------
BeartypeValeSubscriptionException
If either of these operands are *not* beartype validators.
'''
# Validate the passed operands as sane.
_validate_operands(self, validator_operand_1, validator_operand_2)
# Initialize our superclass with all remaining parameters.
super().__init__(
validator_operand_1=validator_operand_1,
validator_operand_2=validator_operand_2,
# Lambda function conjunctively performing both validations.
is_valid=lambda obj: (
validator_operand_1.is_valid(obj) and
validator_operand_2.is_valid(obj)
),
# Code expression conjunctively performing both validations.
is_valid_code=(
f'({validator_operand_1._is_valid_code} and '
f'{validator_operand_2._is_valid_code})'
),
)
# ..................{ PROPERTIES }..................
@property
def _operator_symbol(self) -> str:
return '&'
def _is_shortcircuited(self, obj: object) -> bool:
# Return true only if the passed object violates this first child
# validator. Why? Because if this first child validator is violated,
# then this parent validator as a whole is violated; no further
# validation of this second child validator is required.
return not self._validator_operand_1.is_valid(obj)
# ....................{ SUBCLASSES ~ | }....................
class BeartypeValidatorDisjunction(BeartypeValidatorBinaryABC):
'''
**Beartype disjunction validator** (i.e., validator disjunctively
evaluating the boolean truthiness returned by the validation performed by a
pair of lower-level beartype validators, typically instantiated and
returned by the :meth:`BeartypeValidator.__and__` dunder method of the
first validator passed the second).
'''
# ..................{ INITIALIZERS }..................
def __init__(
self,
validator_operand_1: BeartypeValidator,
validator_operand_2: BeartypeValidator,
) -> None:
'''
Initialize this higher-level validator from the passed validators.
Parameters
----------
validator_operand_1 : BeartypeValidator
First validator operated upon by this higher-level validator.
validator_operand_2 : BeartypeValidator
Second validator operated upon by this higher-level validator.
Raises
----------
BeartypeValeSubscriptionException
If either of these operands are *not* beartype validators.
'''
# Validate the passed operands as sane.
_validate_operands(self, validator_operand_1, validator_operand_2)
# Initialize our superclass with all remaining parameters.
super().__init__(
validator_operand_1=validator_operand_1,
validator_operand_2=validator_operand_2,
# Lambda function disjunctively performing both validations.
is_valid=lambda obj: (
validator_operand_1.is_valid(obj) or
validator_operand_2.is_valid(obj)
),
# Code expression disjunctively performing both validations.
is_valid_code=(
f'({validator_operand_1._is_valid_code} or '
f'{validator_operand_2._is_valid_code})'
),
)
# ..................{ PROPERTIES }..................
@property
def _operator_symbol(self) -> str:
return '|'
def _is_shortcircuited(self, obj: object) -> bool:
# Return true only if the passed object satisfies this first child
# validator. Why? Because if this first child validator is satisfied,
# then this parent validator as a whole is satisfied; no further
# validation of this second child validator is required.
return self._validator_operand_1.is_valid(obj)
# ....................{ PRIVATE ~ validators }....................
def _validate_operands(
self: BeartypeValidatorBinaryABC,
validator_operand_1: BeartypeValidator,
validator_operand_2: BeartypeValidator,
) -> None:
'''
Validate the passed validator operands as sane.
Parameters
----------
self : BeartypeValidatorBinaryABC
Beartype binary validator operating upon these operands.
validator_operand_1 : BeartypeValidator
First validator operated upon by this higher-level validator.
validator_operand_2 : BeartypeValidator
Second validator operated upon by this higher-level validator.
Raises
----------
BeartypeValeSubscriptionException
If either of these operands are *not* beartype validators.
'''
# If either of these operands are *NOT* beartype validators, raise an
# exception.
if not isinstance(validator_operand_1, BeartypeValidator):
raise BeartypeValeSubscriptionException(
f'Beartype "{self._operator_symbol}" validator first operand '
f'{represent_object(validator_operand_1)} not beartype '
f'validator (i.e., "beartype.vale.Is*[...]" object).'
)
elif not isinstance(validator_operand_2, BeartypeValidator):
raise BeartypeValeSubscriptionException(
f'Beartype "{self._operator_symbol}" validator second operand '
f'{represent_object(validator_operand_2)} not beartype '
f'validator (i.e., "beartype.vale.Is*[...]" object).'
)
# Else, both of these operands are beartype validators.
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Core unary beartype validators** (i.e., :class:`BeartypeValidator` subclasses
implementing unary operations on a single lower-level beartype validator).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from abc import ABCMeta, abstractmethod
from beartype.roar import BeartypeValeSubscriptionException
from beartype.vale._core._valecore import BeartypeValidator
from beartype.vale._util._valeutiltext import format_diagnosis_line
from beartype._util.text.utiltextmagic import CODE_INDENT_1
from beartype._util.text.utiltextrepr import represent_object
# ....................{ SUPERCLASSES }....................
class BeartypeValidatorUnaryABC(BeartypeValidator, metaclass=ABCMeta):
'''
Abstract base class of all **beartype binary validator** (i.e., validator
modifying the boolean truthiness returned by the validation performed by a
single lower-level beartype validator) subclasses.
Attributes
----------
_validator_operand : BeartypeValidator
Lower-level validator operated upon by this higher-level validator.
'''
# ..................{ CLASS VARIABLES }..................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Subclasses declaring uniquely subclass-specific instance
# variables *MUST* additionally slot those variables. Subclasses violating
# this constraint will be usable but unslotted, which defeats our purposes.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Slot all instance variables defined on this object to minimize the time
# complexity of both reading and writing variables across frequently called
# cache dunder methods. Slotting has been shown to reduce read and write
# costs by approximately ~10%, which is non-trivial.
__slots__ = (
'_validator_operand',
)
# ..................{ INITIALIZERS }..................
def __init__(
self,
validator_operand: BeartypeValidator,
**kwargs
) -> None:
'''
Initialize this validator from the passed metadata.
Parameters
----------
validator_operand : BeartypeValidator
Lower-level validator operated upon by this higher-level validator.
Raises
----------
BeartypeValeSubscriptionException
If this operand is *not* itself a beartype validator.
'''
# Callable accepting no arguments returning a machine-readable
# representation of this binary validator.
get_repr = lambda: (
f'{self._operator_symbol}{repr(validator_operand)}')
# Initialize our superclass with all remaining parameters.
super().__init__(
is_valid_code_locals=validator_operand._is_valid_code_locals,
get_repr=get_repr,
**kwargs
)
# Classify all remaining passed parameters.
self._validator_operand = validator_operand
# ..................{ GETTERS }..................
#FIXME: Unit test us up, please.
def get_diagnosis(
self,
*,
# Mandatory keyword-only parameters.
obj: object,
indent_level_outer: str,
indent_level_inner: str,
**kwargs
) -> str:
# Line diagnosing this object against this negated parent validator.
line_outer_prefix = format_diagnosis_line(
validator_repr='(',
indent_level_outer=indent_level_outer,
indent_level_inner=indent_level_inner,
is_obj_valid=self.is_valid(obj),
)
# Line diagnosing this object against this non-negated child validator
# with an increased indentation level for readability.
line_inner_operand = self._validator_operand.get_diagnosis(
obj=obj,
indent_level_outer=indent_level_outer,
indent_level_inner=indent_level_inner + CODE_INDENT_1,
**kwargs
)
# Line providing the suffixing ")" delimiter for readability.
line_outer_suffix = format_diagnosis_line(
validator_repr=')',
indent_level_outer=indent_level_outer,
indent_level_inner=indent_level_inner,
)
# Return these lines concatenated.
return (
f'{self._operator_symbol}{line_outer_prefix}\n'
f'{line_inner_operand}\n'
f'{line_outer_suffix}'
)
# ..................{ ABSTRACT }..................
# Abstract methods required to be concretely implemented by subclasses.
@property
@abstractmethod
def _operator_symbol(self) -> str:
'''
Human-readable string embodying the operation performed by this unary
validator - typically the single-character mathematical sign
symbolizing this operation.
'''
pass
# ....................{ SUBCLASSES }....................
class BeartypeValidatorNegation(BeartypeValidatorUnaryABC):
'''
**Negation beartype validator** (i.e., validator negating the boolean
truthiness returned by the validation performed by a lower-level beartype
validator, typically instantiated and returned by the
:meth:`BeartypeValidator.__invert__` dunder method of that validator).
'''
# ..................{ INITIALIZERS }..................
def __init__(self, validator_operand: BeartypeValidator) -> None:
'''
Initialize this higher-level validator from the passed validator.
Parameters
----------
validator_operand : BeartypeValidator
Validator operated upon by this higher-level validator.
Raises
----------
BeartypeValeSubscriptionException
If this operand is *not* a beartype validator.
'''
# Validate the passed operand as sane.
_validate_operand(self, validator_operand)
# Initialize our superclass with all remaining parameters.
super().__init__(
validator_operand=validator_operand,
is_valid=lambda obj: not validator_operand.is_valid(obj),
is_valid_code=f'(not {validator_operand._is_valid_code})',
)
# ..................{ PROPERTIES }..................
@property
def _operator_symbol(self) -> str:
return '~'
# ....................{ PRIVATE ~ validators }....................
def _validate_operand(
self: BeartypeValidatorUnaryABC,
validator_operand: BeartypeValidator,
) -> None:
'''
Validate the passed validator operand as sane.
Parameters
----------
self : BeartypeValidatorUnaryABC
Beartype unary validator operating upon this operand.
validator_operand : BeartypeValidator
Validator operated upon by this higher-level validator.
Raises
----------
BeartypeValeSubscriptionException
If this operand is *not* a beartype validator.
'''
#FIXME: Unit test us up, please.
# If this operand is *NOT* a beartype validator, raise an exception.
if not isinstance(validator_operand, BeartypeValidator):
raise BeartypeValeSubscriptionException(
f'Beartype "{self._operator_symbol}" validator operand '
f'{represent_object(validator_operand)} not beartype '
f'validator (i.e., "beartype.vale.Is*[...]" object).'
)
# Else, this operand is a beartype validator.
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Core beartype validator.**
This private submodule defines the core private :class:`BeartypeValidator`
class instantiated by public **beartype validator factories** (i.e., instances
of concrete subclasses of the private
:class:`beartype._vale._factory._valeisabc._BeartypeValidatorFactoryABC`
abstract base class (ABC)).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeValeSubscriptionException
from beartype.vale._util._valeutilfunc import die_unless_validator_tester
from beartype.vale._util._valeutiltext import format_diagnosis_line
from beartype.vale._util._valeutiltyping import (
BeartypeValidatorTester,
BeartypeValidatorRepresenter,
)
from beartype._data.datatyping import LexicalScope
from beartype._util.func.arg.utilfuncargtest import is_func_argless
from beartype._util.text.utiltextrepr import represent_object
# ....................{ CLASSES }....................
class BeartypeValidator(object):
'''
**Beartype validator** (i.e., object encapsulating a caller-defined
validation callable returning ``True`` when an arbitrary object passed to
that callable satisfies an arbitrary constraint, suitable for subscripting
(indexing) :pep:`593`-compliant :attr:`typing.Annotated` type hints
enforcing that validation on :mod:`beartype`-decorated callable parameters
and returns annotated by those hints).
Caveats
----------
**This private class is not intended to be externally instantiated** (e.g.,
by calling the :meth:`__init__` constructor). This class is *only* intended
to be internally instantiated by subscripting (indexing) various public
type hint factories (e.g., :class:`beartype.vale.Is`).
Attributes
----------
_get_repr : BeartypeValidatorRepresenter
**Representer** (i.e., either a string *or* caller-defined callable
accepting no arguments returning a machine-readable representation of
this validator). See the :data:`BeartypeValidatorRepresenter` type hint
for further details.
_is_valid : BeartypeValidatorTester
**Validator tester** (i.e., caller-defined callable accepting a single
arbitrary object and returning either ``True`` if that object satisfies
an arbitrary constraint *or* ``False`` otherwise).
_is_valid_code : str
**Validator code** (i.e., Python code snippet validating the
previously localized parameter or return value against the same
validation performed by the :meth:`is_valid` function). For efficiency,
callers validating data through dynamically generated code (e.g., the
:func:`beartype.beartype` decorator) rather than standard function
calls (e.g., the private :mod:`beartype._decor._hint._pep._error`
subpackage) should prefer :attr:`is_valid_code` to :meth:`is_valid`.
Despite performing the same validation as the :meth:`is_valid`
callable, this code avoids the additional stack frame imposed by
calling that callable and thus constitutes an optimization.
_is_valid_code_locals : LexicalScope
**Validator code local scope** (i.e., dictionary mapping from the name
to value of each local attribute referenced in :attr:`code`) required
to dynamically compile this validator code into byte code at runtime.
See Also
----------
:class:`Is`
Class docstring for further details.
'''
# ..................{ CLASS VARIABLES }..................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Subclasses declaring uniquely subclass-specific instance
# variables *MUST* additionally slot those variables. Subclasses violating
# this constraint will be usable but unslotted, which defeats our purposes.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Slot all instance variables defined on this object to minimize the time
# complexity of both reading and writing variables across frequently called
# cache dunder methods. Slotting has been shown to reduce read and write
# costs by approximately ~10%, which is non-trivial.
__slots__ = (
'_get_repr',
'_is_valid',
'_is_valid_code',
'_is_valid_code_locals',
)
# ..................{ INITIALIZERS }..................
def __init__(
self,
*,
# Mandatory keyword-only parameters.
is_valid: BeartypeValidatorTester,
is_valid_code: str,
is_valid_code_locals: LexicalScope,
get_repr: BeartypeValidatorRepresenter,
) -> None:
'''
Initialize this validator from the passed metadata.
Parameters
----------
is_valid : BeartypeValidatorTester
**Validator tester** (i.e., caller-defined callable accepting a
single arbitrary object and returning either ``True`` if that object
satisfies an arbitrary constraint *or* ``False`` otherwise).
is_valid_code : str
**Validator code** (i.e., Python code snippet validating the
previously localized parameter or return value against the same
validation performed by the :func:`is_valid` function). This code:
* *Must* contain one or more ``"{obj}"`` substrings, which external
code generators (e.g., the :func:`beartype.beartype` decorator)
will globally replace at evaluation time with the actual test
subject object to be validated by this code.
* *May* contain one or more ``"{indent}"`` substrings, which such
code generators will globally replace at evaluation time with the
line-oriented indentation required to generate a
valid Python statement embedding this code. For consistency with
:pep:`8`-compliant and well-established Python style guides, any
additional indentation hard-coded into this code should be
aligned to **four-space indentation.**
is_valid_code_locals : LexicalScope
**Validator code local scope** (i.e., dictionary mapping from the
name to value of each local attribute referenced in
:attr:`is_valid_code` code) required to dynamically compile this
validator code into byte code at runtime.
get_repr : BeartypeValidatorRepresenter
**Representer** (i.e., either a string *or* caller-defined callable
accepting no arguments returning a machine-readable representation
of this validator). See the :data:`BeartypeValidatorRepresenter`
type hint for further details.
Raises
----------
beartype.roar.BeartypeValeSubscriptionException
If either:
* ``is_valid`` is either:
* *Not* callable.
* A C-based rather than pure-Python callable.
* A pure-Python callable accepting two or more arguments.
* ``is_valid_code`` is either:
* *Not* a string.
* A string either:
* Empty.
* Non-empty but **invalid** (i.e., *not* containing the test
subject substring ``{obj}``).
* ``is_valid_locals`` is *not* a dictionary.
* ``get_repr`` is either:
* *Not* callable.
* A C-based rather than pure-Python callable.
* A pure-Python callable accepting one or more arguments.
* The empty string.
'''
# Avoid circular import dependencies.
from beartype.vale._is._valeisabc import _BeartypeValidatorFactoryABC
# If that callable is *NOT* a validator tester, raise an exception.
die_unless_validator_tester(is_valid)
# Else, that callable is a validator tester.
# If this code is *NOT* a string, raise an exception.
if not isinstance(is_valid_code, str):
raise BeartypeValeSubscriptionException(
f'Validator code not string:\n'
f'{represent_object(is_valid_code)}'
)
# Else, this code is a string.
#
# If this code is the empty string, raise an exception.
elif not is_valid_code:
raise BeartypeValeSubscriptionException('Validator code empty.')
# Else, this code is a non-empty string.
#
# If this code does *NOT* contain the test subject substring
# "{obj}" and is invalid, raise an exception.
elif '{obj}' not in is_valid_code:
raise BeartypeValeSubscriptionException(
f'Validator code invalid '
f'(i.e., test subject substring "{{obj}}" not found):\n'
f'{is_valid_code}'
)
# Else, this code is hopefully valid.
#
# If this code is *NOT* explicitly prefixed by "(" and suffixed by
# ")", do so to ensure this code remains safely evaluable when
# embedded in parent expressions.
elif not (
is_valid_code[ 0] == '(' and
is_valid_code[-1] == ')'
):
is_valid_code = f'({is_valid_code})'
# Else, this code is explicitly prefixed by "(" and suffixed by ")".
# If this dictionary of code locals is *NOT* a dictionary, raise an
# exception.
if not isinstance(is_valid_code_locals, dict):
raise BeartypeValeSubscriptionException(
f'Validator locals '
f'{represent_object(is_valid_code_locals)} not dictionary.'
)
# Else, this dictionary of code locals is a dictionary.
# Classify this validator, effectively binding this callable to this
# object as an object-specific static method.
self._is_valid = is_valid
# Classify this representer via a writeable property internally
# validating this representer. (Embrace the magical, people.)
self.get_repr = get_repr
# Classify all remaining parameters.
self._is_valid_code = is_valid_code
self._is_valid_code_locals = is_valid_code_locals
# ..................{ PROPERTIES ~ read-only }..................
# Properties with no corresponding setter and thus read-only.
@property
def is_valid(self) -> BeartypeValidatorTester:
'''
**Validator callable** (i.e., caller-defined callable accepting a
single arbitrary object and returning either ``True`` if that object
satisfies an arbitrary constraint *or* ``False`` otherwise).
'''
return self._is_valid
# ..................{ PROPERTIES ~ writeable }..................
# Properties with a corresponding setter and thus writeable.
@property
def get_repr(self) -> BeartypeValidatorRepresenter:
'''
**Representer** (i.e., either a string *or* caller-defined callable
accepting no arguments returning a machine-readable representation of
this validator). See the :data:`BeartypeValidatorRepresenter` type hint
for further details.
'''
return self._get_repr
@get_repr.setter
def get_repr(self, get_repr: BeartypeValidatorRepresenter) -> None:
'''
Override the initial representer for this validator.
Parameters
----------
get_repr : BeartypeValidatorRepresenter
**Representer** (i.e., either a string *or* caller-defined callable
accepting no arguments returning a machine-readable representation
of this validator). See the :data:`BeartypeValidatorRepresenter`
type hint for further details.
Raises
----------
:exc:`BeartypeValeSubscriptionException`
This representer is either:
* *Not* callable.
* A C-based rather than pure-Python callable.
* A pure-Python callable accepting one or more arguments.
'''
# If this representer is a string...
if isinstance(get_repr, str):
# If this string is empty, raise an exception.
if not get_repr:
raise BeartypeValeSubscriptionException(
'Representer string empty.')
# Else, this representer is *NOT* a string.
#
# If this representer is *NOT* a pure-Python callable accepting one
# argument, raise an exception.
elif not is_func_argless(
func=get_repr, exception_cls=BeartypeValeSubscriptionException):
raise BeartypeValeSubscriptionException(
f'Representer {repr(get_repr)} neither string nor '
f'argument-less pure-Python callable.'
)
# Else, this representer is an argument-less pure-Python callable.
# Set this representer.
self._get_repr = get_repr
# ..................{ DUNDERS ~ str }..................
def __repr__(self) -> str:
'''
Machine-readable representation of this validator.
This function is memoized for efficiency.
Warns
----------
BeartypeValeLambdaWarning
If this validator is implemented as a pure-Python lambda function
whose definition is *not* parsable from the script or module
defining that lambda.
'''
# If the instance variable underlying this dunder method is a callable,
# reduce this variable to the string returned by this callable.
if callable(self._get_repr):
self._get_repr = self._get_repr()
# In either case, this variable is now a string. Guarantee this.
assert isinstance(self._get_repr, str), f'{self._get_repr} not string.'
# Return this string as is.
return self._get_repr
# ..................{ GETTERS }..................
def get_diagnosis(
self,
*,
# Mandatory keyword-only parameters.
obj: object,
indent_level_outer: str,
indent_level_inner: str,
# Optional keyword-only parameters.
is_shortcircuited: bool = False,
) -> str:
'''
Human-readable **validation failure diagnosis** (i.e., substring
describing how the passed object either satisfies *or* violates this
validator).
This method is typically called by high-level error-handling logic to
unambiguously describe the failure of an arbitrary object to satisfy an
arbitrary validator. Since this validator may be synthesized from one
or more lower-level validators (e.g., via the :meth:`__and__`,
:meth:`__or__`, and :meth:`__invert__` dunder methods), the simple
machine-readable representation of this validator does *not* adequately
describe how exactly the passed object satisfies or fails to satisfy
this validator. Only an exhaustive description suffices.
Parameters
----------
obj : object
Arbitrary object to be diagnosed against this validator.
indent_level_outer : str
**Outermost indentation level** (i.e., zero or more adjacent spaces
prefixing each line of the returned substring).
indent_level_inner : str
**Innermost indentation level** (i.e., zero or more adjacent spaces
delimiting the human-readable representation of the tri-state
boolean and validator representation in the returned substring).
is_shortcircuited : bool, optional
``True`` only if this validator has been **short-circuited** (i.e.,
*not* required to be tested against) by a previously tested sibling
validator, in which case this method will silently catch and reduce
exceptions raised by the :meth:`is_valid` method to ``False``.
Short-circuiting typically arises from binary validators (e.g.,
:class:`beartype.vale._core._valecore.BeartypeValidatorConjunction`)
in which a low-level sibling validator, previously tested against by
the higher-level binary validator encapsulating both this validator
and that sibling validator, has already either fully satisfied *or*
failed to satisfy that binary validator; a binary validator
explicitly sets this parameter to ``True`` for *all* children
validators except the first child validator when the first child
validator either fully satisfies *or* fails to satisfy that binary
validator.
This is *not* merely an optimization; this is a design requirement.
External users often chain validators together with set operators
(e.g., ``&``, ``|``) under the standard expectation of
short-circuiting, in which later validators are *not* tested when
earlier validators already satisfy requirements. Violating this
expectation causes later validators to trivially raise exceptions.
Without short-circuiting, the otherwise valid following example
raises a non-human-readable exception. The short-circuited
``IsArrayMatrix`` validator expects to be tested *only* when the
preceding non-short-circuited ``IsArray2D`` validator fails:
.. code-block:: python
>>> import numpy as np
>>> from beartype.vale import Is
>>> IsArray2D = Is[lambda arr: arr.ndim == 2]
>>> IsArrayMatrix = Is[lambda arr: arr.shape[0] == arr.shape[1]]
>>> IsArray2DMatrix = IsArray2D & IsArrayMatrix
>>> IsArray2DMatrix.get_diagnosis(
... obj=np.zeros((4,)),
... indent_level_outer='',
... indent_level_inner=' ',
... )
Traceback (most recent call last):
File "/home/leycec/tmp/mopy.py", line 10, in <module>
print(IsArray2DMatrix.get_diagnosis(
File "/home/leycec/py/beartype/beartype/vale/_core/_valecorebinary.py", line 149, in get_diagnosis
line_inner_operand_2 = self._validator_operand_2.get_diagnosis(
File "/home/leycec/py/beartype/beartype/vale/_core/_valecore.py", line 480, in get_diagnosis
is_obj_valid = self.is_valid(obj)
File "/home/leycec/tmp/mopy.py", line 7, in <lambda>
IsArrayMatrix = Is[lambda arr: arr.shape[0] == arr.shape[1]]
IndexError: tuple index out of range
Defaults to ``False``.
Returns
----------
str
Substring diagnosing this object against this validator.
'''
assert isinstance(is_shortcircuited, bool), (
f'{repr(is_shortcircuited)} not boolean.')
# True only if the passed object satisfies this validator.
is_obj_valid = None
# If this validator has been short-circuited by a prior sibling...
if is_shortcircuited:
# Attempt to decide whether that object satisfies this validator.
try:
is_obj_valid = self.is_valid(obj)
# If doing so raises an exception, this short-circuited validator
# was *NOT* intended to be called under short-circuiting. In this
# case, silently ignore this exception. See the above discussion.
except:
pass
# Else, this validator is *NOT* short-circuited. In this case, this
# validator is *NOT* expected to raise exceptions. Nonetheless, if this
# validator does so, ensure that exception is propagated up the call
# stack by *NOT* silently ignoring that exception (as above).
else:
is_obj_valid = self.is_valid(obj)
# Format the validity of this object against this validator for the
# typical case of a lowest-level beartype validator *NOT* wrapping one
# or more other even lower-level beartype validators (e.g., via a set
# theoretic operator).
return format_diagnosis_line(
validator_repr=repr(self),
indent_level_outer=indent_level_outer,
indent_level_inner=indent_level_inner,
is_obj_valid=is_obj_valid,
)
# ..................{ DUNDERS ~ operator }..................
# Define a domain-specific language (DSL) enabling callers to dynamically
# synthesize higher-level validators from lower-level validators via
# overloaded set theoretic operators.
def __and__(self, other: 'BeartypeValidator') -> 'BeartypeValidator':
'''
**Conjunction** (i.e., ``self & other``), synthesizing a new
:class:`BeartypeValidator` object whose validator returns ``True`` only
when the validators of both this *and* the passed
:class:`BeartypeValidator` objects all return ``True``.
Parameters
----------
other : BeartypeValidator
Object to conjunctively synthesize with this object.
Returns
----------
BeartypeValidator
New object conjunctively synthesized with this object.
Raises
----------
BeartypeValeSubscriptionException
If the passed object is *not* also an instance of the same class.
'''
# Avoid circular import dependencies.
from beartype.vale._core._valecorebinary import (
BeartypeValidatorConjunction)
# Closures for great justice.
return BeartypeValidatorConjunction(
validator_operand_1=self,
validator_operand_2=other,
)
def __or__(self, other: 'BeartypeValidator') -> 'BeartypeValidator':
'''
**Disjunction** (i.e., ``self | other``), synthesizing a new
:class:`BeartypeValidator` object whose validator returns ``True`` only
when the validators of either this *or* the passed
:class:`BeartypeValidator` objects return ``True``.
Parameters
----------
other : BeartypeValidator
Object to disjunctively synthesize with this object.
Returns
----------
BeartypeValidator
New object disjunctively synthesized with this object.
'''
# Avoid circular import dependencies.
from beartype.vale._core._valecorebinary import (
BeartypeValidatorDisjunction)
# Closures for great justice.
return BeartypeValidatorDisjunction(
validator_operand_1=self,
validator_operand_2=other,
)
#FIXME: Fun optimization: if inverting something that's already been
#inverted, return the original "BeartypeValidator" object sans inversion.
def __invert__(self) -> 'BeartypeValidator':
'''
**Negation** (i.e., ``~self``), synthesizing a new
:class:`BeartypeValidator` object whose validator returns ``True`` only
when the validators of this :class:`BeartypeValidator` object returns
``False``.
Returns
----------
BeartypeValidator
New object negating this object.
'''
# Avoid circular import dependencies.
from beartype.vale._core._valecoreunary import (
BeartypeValidatorNegation)
# Closures for profound lore.
return BeartypeValidatorNegation(validator_operand=self)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype declarative type validation classes** (i.e.,
:mod:`beartype`-specific classes enabling callers to define PEP-compliant
validators from arbitrary caller-defined classes tested via explicitly
supported object introspectors efficiently generating stack-free code).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeValeSubscriptionException
from beartype.vale._is._valeisabc import _BeartypeValidatorFactoryABC
from beartype.vale._util._valeutilsnip import (
VALE_CODE_CHECK_ISINSTANCE_TEST_format,
VALE_CODE_CHECK_ISSUBCLASS_TEST_format,
)
from beartype.vale._core._valecore import BeartypeValidator
from beartype._data.datatyping import (
LexicalScope,
TypeOrTupleTypes,
)
from beartype._util.cache.utilcachecall import callable_cached
from beartype._util.cls.utilclstest import is_type_subclass
from beartype._util.cls.pep.utilpep3119 import (
die_unless_type_isinstanceable,
die_unless_type_or_types_isinstanceable,
die_unless_type_issubclassable,
die_unless_type_or_types_issubclassable,
)
from beartype._util.func.utilfuncscope import add_func_scope_attr
from beartype._util.utilobject import get_object_name
# ....................{ SUBCLASSES ~ instance }....................
class _IsInstanceFactory(_BeartypeValidatorFactoryABC):
'''
**Beartype type instance validator factory** (i.e., object creating and
returning a new beartype validator when subscripted (indexed) by any class,
validating that :mod:`beartype`-decorated callable parameters and returns
annotated by :attr:`typing.Annotated` type hints subscripted by that
validator are objects whose classes subclass that class).
This class efficiently validates that callable parameters and returns are
instances of the arbitrary class subscripting (indexing) this factory. Any
:mod:`beartype`-decorated callable parameter or return annotated by a
:attr:`typing.Annotated` type hint subscripted by this factory subscripted
by any class (e.g., ``typing.Annotated[type,
beartype.vale.IsInstance[{cls}]]`` for any class ``{cls}``)
validates that parameter or return value to be a subclass of that class.
This factory generalizes :pep:`484`-compliant **isinstanceable types**
(i.e., normal pure-Python and C-based classes that may be passed as the
second parameter to the :func:`isinstance` builtin), because this factory
does everything those types do and considerably more. Superficially,
isinstanceable types also validate that callable parameters and returns are
instances of those types. The similarity ends there, however.
Isinstanceable types only narrowly apply to callable parameters and
returns; meanwhile, this factory produces beartype validators universally
applicable to both:
* Callable parameters and returns.
* **Attributes** of callable parameters and returns via the
:class:`beartype.vale.IsAttr` factory.
**This factory incurs no time performance penalties at call time.** Whereas
the general-purpose :class:`beartype.vale.Is` factory necessarily calls
the caller-defined callable subscripting that factory at call time and thus
incurs a minor time performance penalty, this factory efficiently reduces
to one-line tests in :mod:`beartype`-generated wrapper functions *without*
calling any callables and thus incurs *no* time performance penalties.
Examples
----------
.. code-block:: python
# Import the requisite machinery.
>>> from beartype import beartype
>>> from beartype.vale import IsInstance
>>> from math import factorial as loose_factorial
>>> from typing import Annotated
# Type hint matching any non-boolean integer, generating code like:
# (isinstance(number, int) and not isinstance(number, bool)))
# Surprisingly, booleans are literally integers in Python (e.g.,
# ``issubclass(bool, int) is True``). Callable parameters and returns
# annotated as accepting only integers thus implicitly accept booleans
# as well by default. This type hint explicitly prevents that ambiguity.
>>> IntNonbool = Annotated[int, ~IsInstance[bool]]
# Annotate callables by those type hints.
>>> @beartype
... def strict_factorial(integer: IntNonbool) -> IntNonbool:
... """
... Factorial of the passed integer, explicitly prohibiting booleans
... masquerading as integers.
... """
... return loose_factorial(integer)
# Call those callables with parameters satisfying those hints.
>>> strict_factorial(42)
1405006117752879898543142606244511569936384000000000
# Call those callables with parameters violating those hints.
>>> strict_factorial(True)
beartype.roar.BeartypeCallHintParamViolation: @beartyped
strict_factorial() parameter integer=True violates type hint
typing.Annotated[int, ~IsInstance[builtins.bool]], as True violates
validator ~IsInstance[builtins.bool]:
See Also
----------
:class:`beartype.vale.Is`
Further commentary.
'''
# ..................{ DUNDERS }..................
@callable_cached
def __getitem__(self, types: TypeOrTupleTypes) -> BeartypeValidator: # type: ignore[override]
'''
Create and return a new beartype validator validating type instancing
against at least one of the passed classes, suitable for subscripting
:pep:`593`-compliant :attr:`typing.Annotated` type hints.
This method is memoized for efficiency.
Parameters
----------
types : TypeOrTupleTypes
One or more arbitrary classes to validate type instancing against.
Returns
----------
BeartypeValidator
Beartype validator encapsulating this validation.
Raises
----------
BeartypeValeSubscriptionException
If this factory was subscripted by either:
* *No* arguments.
* One or more arguments that are *not* **isinstanceable types**
(i.e., classes passable as the second argument to the :func:
`isinstance` builtin).
See Also
----------
:class:`_IsAttrFactory`
Usage instructions.
'''
# Machine-readable string representing this type or tuple of types.
types_repr = ''
# If this factory was subscripted by either no arguments *OR* two or
# more arguments...
if isinstance(types, tuple):
# If this factory was subscripted by *NO* arguments, raise an
# exception.
if not types:
raise BeartypeValeSubscriptionException(
f'{self._getitem_exception_prefix}empty tuple.')
# Else, this factory was subscripted by two or more arguments.
# If any such argument is *NOT* an isinstanceable type, raise an
# exception.
die_unless_type_or_types_isinstanceable(
type_or_types=types,
exception_cls=BeartypeValeSubscriptionException,
exception_prefix=self._getitem_exception_prefix,
)
# Else, all such arguments are isinstanceable types.
# Append the fully-qualified name of each such type to this string.
for cls in types:
types_repr += f'{get_object_name(cls)}, '
# Strip the suffixing ", " from this string for readability.
types_repr = types_repr[:-2]
# Else, this factory was subscripted by one argument. In this case...
else:
# If this argument is *NOT* an isinstanceable type, raise an
# exception.
die_unless_type_isinstanceable(
cls=types,
exception_cls=BeartypeValeSubscriptionException,
exception_prefix=self._getitem_exception_prefix,
)
# Else, this argument is an isinstanceable type.
# Fully-qualified name of this type.
types_repr = get_object_name(types)
# Callable inefficiently validating against this type.
is_valid = lambda pith: isinstance(pith, types)
# Dictionary mapping from the name to value of each local attribute
# referenced in the "is_valid_code" snippet defined below.
is_valid_code_locals: LexicalScope = {}
# Name of a new parameter added to the signature of wrapper functions
# whose value is this type or tuple of types, enabling this type or
# tuple of types to be tested in those functions *WITHOUT* additional
# stack frames.
param_name_types = add_func_scope_attr(
attr=types, func_scope=is_valid_code_locals)
# Code snippet efficiently validating against this type.
is_valid_code = VALE_CODE_CHECK_ISINSTANCE_TEST_format(
param_name_types=param_name_types)
# Create and return this subscription.
return BeartypeValidator(
is_valid=is_valid,
is_valid_code=is_valid_code,
is_valid_code_locals=is_valid_code_locals,
# Intentionally pass this subscription's machine-readable
# representation as a string rather than lambda function returning
# a string, as this string is safely, immediately, and efficiently
# constructable from these arguments' representation.
get_repr=f'{self._basename}[{types_repr}]',
)
# ....................{ SUBCLASSES ~ subclass }....................
class _IsSubclassFactory(_BeartypeValidatorFactoryABC):
'''
**Beartype type inheritance validator factory** (i.e., object creating and
returning a new beartype validator when subscripted (indexed) by any class,
validating that :mod:`beartype`-decorated callable parameters and returns
annotated by :attr:`typing.Annotated` type hints subscripted by that
validator subclass that class).
This class efficiently validates that callable parameters and returns are
subclasses of the arbitrary class subscripting (indexing) this factory. Any
:mod:`beartype`-decorated callable parameter or return annotated by a
:attr:`typing.Annotated` type hint subscripted by this factory subscripted
by any class (e.g., ``typing.Annotated[type,
beartype.vale.IsSubclass[{cls}]]`` for any class ``{cls}``)
validates that parameter or return value to be a subclass of that class.
This factory generalizes the :pep:`484`-compliant :attr:`typing.Type` and :
pep:`585`-compliant :class:`type` type hint factories, because this factory
does everything those factories do and substantially more. Superficially, :
attr:`typing.Type` and :class:`type` type hints also validate that callable
parameters and returns are subclasses of the classes subscripting those
hints. The similarity ends there, however. Those hints only narrowly apply
to callable parameters and returns; meanwhile, this factory produces
beartype validators universally applicable to both:
* Callable parameters and returns.
* **Attributes** of callable parameters and returns via the
:class:`beartype.vale.IsAttr` factory.
**This factory incurs no time performance penalties at call time.** Whereas
the general-purpose :class:`beartype.vale.Is` factory necessarily calls
the caller-defined callable subscripting that factory at call time and thus
incurs a minor time performance penalty, this factory efficiently reduces
to one-line tests in :mod:`beartype`-generated wrapper functions *without*
calling any callables and thus incurs *no* time performance penalties.
Examples
----------
.. code-block:: python
# Import the requisite machinery.
>>> from beartype import beartype
>>> from beartype.vale import IsAttr, IsSubclass
>>> from typing import Annotated
>>> import numpy as np
# Type hint matching only NumPy arrays of floats of arbitrary precision,
# generating code resembling:
# (isinstance(array, np.ndarray) and
# np.issubdtype(array.dtype, np.floating))
>>> NumpyFloatArray = Annotated[
... np.ndarray, IsAttr['dtype', IsAttr['type', IsSubclass[np.floating]]]]
# Type hint matching only NumPy arrays of integers of arbitrary
# precision, generating code resembling:
# (isinstance(array, np.ndarray) and
# np.issubdtype(array.dtype, np.integer))
>>> NumpyIntArray = Annotated[
... np.ndarray, IsAttr['dtype', IsAttr['type', IsSubclass[np.integer]]]]
# NumPy arrays of well-known real number series.
>>> E_APPROXIMATIONS = np.array(
... [1+1, 1+1+1/2, 1+1+1/2+1/6, 1+1+1/2+1/6+1/24,])
>>> FACTORIALS = np.array([1, 2, 6, 24, 120, 720, 5040, 40320, 362880,])
# Annotate callables by those type hints.
>>> @beartype
... def round_int(array: NumpyFloatArray) -> NumpyIntArray:
... """
... NumPy array of integers rounded from the passed NumPy array of
... floating-point numbers to the nearest 64-bit integer.
... """
... return np.around(array).astype(np.int64)
# Call those callables with parameters satisfying those hints.
>>> round_int(E_APPROXIMATIONS)
[2, 3, 3, 3]
# Call those callables with parameters violating those hints.
>>> round_int(FACTORIALS)
beartype.roar.BeartypeCallHintParamViolation: @beartyped round_int()
parameter array="array([ 1, 2, 6, 24, 120, 720, 5040, 40320, ...])"
violates type hint typing.Annotated[numpy.ndarray, IsAttr['dtype',
IsAttr['type', IsSubclass[numpy.floating]]]], as "array([ 1, 2, 6, 24,
120, 720, 5040, 40320, ...])" violates validator IsAttr['dtype',
IsAttr['type', IsSubclass[numpy.floating]]]
See Also
----------
:class:`beartype.vale.Is`
Further commentary.
'''
# ..................{ DUNDERS }..................
@callable_cached
def __getitem__(self, types: TypeOrTupleTypes) -> BeartypeValidator: # type: ignore[override]
'''
Create and return a new beartype validator validating type inheritance
against at least one of the passed classes, suitable for subscripting
:pep:`593`-compliant :attr:`typing.Annotated` type hints.
This method is memoized for efficiency.
Parameters
----------
types : TypeOrTupleTypes
One or more arbitrary classes to validate type inheritance against.
Returns
----------
BeartypeValidator
Beartype validator encapsulating this validation.
Raises
----------
BeartypeValeSubscriptionException
If this factory was subscripted by either:
* *No* arguments.
* One or more arguments that are *not* **issubclassable types**
(i.e., classes passable as the second argument to the :func:
`issubclass` builtin).
See Also
----------
:class:`_IsAttrFactory`
Usage instructions.
'''
# Machine-readable string representing this type or tuple of types.
types_repr = ''
# If this factory was subscripted by either no arguments *OR* two or
# more arguments...
if isinstance(types, tuple):
# If this factory was subscripted by *NO* arguments, raise an
# exception.
if not types:
raise BeartypeValeSubscriptionException(
f'{self._getitem_exception_prefix}empty tuple.')
# Else, this factory was subscripted by two or more arguments.
# If any such argument is *NOT* an issubclassable type, raise an
# exception.
die_unless_type_or_types_issubclassable(
type_or_types=types,
exception_cls=BeartypeValeSubscriptionException,
exception_prefix=self._getitem_exception_prefix,
)
# Else, all such arguments are issubclassable types.
# Append the fully-qualified name of each such type to this string.
for cls in types:
types_repr += f'{get_object_name(cls)}, '
# Strip the suffixing ", " from this string for readability.
types_repr = types_repr[:-2]
# Else, this factory was subscripted by one argument. In this case...
else:
# If this argument is *NOT* an issubclassable type, raise an
# exception.
die_unless_type_issubclassable(
cls=types,
exception_cls=BeartypeValeSubscriptionException,
exception_prefix=self._getitem_exception_prefix,
)
# Else, this argument is an issubclassable type.
# Fully-qualified name of this type.
types_repr = get_object_name(types)
# Callable inefficiently validating against this type.
is_valid = lambda pith: is_type_subclass(pith, types)
# Dictionary mapping from the name to value of each local attribute
# referenced in the "is_valid_code" snippet defined below.
is_valid_code_locals: LexicalScope = {}
# Name of a new parameter added to the signature of wrapper functions
# whose value is this type or tuple of types, enabling this type or
# tuple of types to be tested in those functions *WITHOUT* additional
# stack frames.
param_name_types = add_func_scope_attr(
attr=types, func_scope=is_valid_code_locals)
# Code snippet efficiently validating against this type.
is_valid_code = VALE_CODE_CHECK_ISSUBCLASS_TEST_format(
param_name_types=param_name_types)
# Create and return this subscription.
return BeartypeValidator(
is_valid=is_valid,
is_valid_code=is_valid_code,
is_valid_code_locals=is_valid_code_locals,
# Intentionally pass this subscription's machine-readable
# representation as a string rather than lambda function returning
# a string, as this string is safely, immediately, and efficiently
# constructable from these arguments' representation.
get_repr=f'{self._basename}[{types_repr}]',
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype functional validation classes** (i.e., :mod:`beartype`-specific
classes enabling callers to define PEP-compliant validators from arbitrary
caller-defined callables *not* efficiently generating stack-free code).
This private submodule defines the core low-level class hierarchy driving the
entire :mod:`beartype` validation ecosystem.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import (
BeartypeValeLambdaWarning,
BeartypeValeValidationException,
)
from beartype.vale._is._valeisabc import _BeartypeValidatorFactoryABC
from beartype.vale._core._valecore import BeartypeValidator
from beartype.vale._util._valeutilfunc import die_unless_validator_tester
from beartype.vale._util._valeutiltyping import BeartypeValidatorTester
from beartype._data.datatyping import LexicalScope
from beartype._util.func.utilfuncscope import add_func_scope_attr
from beartype._util.py.utilpyversion import IS_PYTHON_AT_LEAST_3_8
from beartype._util.text.utiltextrepr import (
represent_func,
represent_object,
)
# ....................{ PRIVATE ~ protocols }....................
# If the active Python interpreter targets Python >= 3.8 and thus supports
# PEP 544-compliant protocols...
if IS_PYTHON_AT_LEAST_3_8:
# Defer version-specific imports.
from beartype.typing import Protocol
class _SupportsBool(Protocol):
'''
Fast caching protocol matching any object whose class defines the
:meth:`__bool__` dunder method.
'''
def __bool__(self) -> bool: ...
class _SupportsLen(Protocol):
'''
Fast caching protocol matching any object whose class defines the
:meth:`__len__` dunder method.
'''
def __len__(self) -> bool: ...
_BoolLike = (_SupportsBool, _SupportsLen)
'''
:func:`isinstance`-able tuple of fast caching protocols matching any
**bool-like** (i.e., object whose class defines at least one of the
:meth:`__bool__` and/or :meth:`__len__` dunder methods).
'''
# ....................{ PRIVATE ~ subclasses }....................
class _IsFactory(_BeartypeValidatorFactoryABC):
'''
**Beartype callable validator factory** (i.e., class that, when subscripted
(indexed) by an arbitrary callable returning ``True`` when the object
passed to that callable satisfies a caller-defined constraint, creates a
new :class:`BeartypeValidator` object encapsulating that callable suitable
for subscripting (indexing) :attr:`typing.Annotated` type hints, enforcing
that constraint on :mod:`beartype`-decorated callable parameters and
returns annotated by those hints).
This class validates that callable parameters and returns satisfy the
arbitrary **callable validator** (i.e., callable whose signature satisfies
``collections.abc.Callable[[typing.Any], bool]``) subscripting (indexing)
this class. Callable validators are caller-defined and may thus validate
the internal integrity, consistency, and structure of arbitrary objects
ranging from simple builtin scalars like integers and strings to complex
data structures defined by third-party packages like NumPy arrays and
Pandas DataFrames.
This class creates one new :class:`BeartypeValidator` object for each
callable validator subscripting (indexing) this class. These objects:
* Are **PEP-compliant** and thus guaranteed to *never* violate existing or
future standards.
* Are **Safely ignorable** by *all* static and runtime type checkers other
than :mod:`beartype` itself.
* **Less efficient** than :class:`BeartypeValidator` objects created by
subscripting every other :mod:`beartype.vale` class. Specifically:
* Every :class:`BeartypeValidator` object created by subscripting this
class necessarily calls a callable validator and thus incurs at least
one additional call stack frame per :mod:`beartype`-decorated callable
call.
* Every :class:`BeartypeValidator` object created by subscripting every
other :mod:`beartype.vale` class directly calls *no* callable and thus
incurs additional call stack frames only when the active Python
interpreter internally calls dunder methods (e.g., ``__eq__()``) to
satisfy their validation constraint.
Usage
----------
Any :mod:`beartype`-decorated callable parameter or return annotated by a
:attr:`typing.Annotated` type hint subscripted (indexed) by this class
subscripted (indexed) by a callable validator (e.g.,
``typing.Annotated[{cls}, beartype.vale.Is[lambda obj: {expr}]]`` for any
class ``{cls}`` and Python expression ``{expr}` evaluating to a boolean)
validates that parameter or return value to be an instance of that class
satisfying that callable validator.
Specifically, callers are expected to (in order):
#. Annotate a callable parameter or return to be validated with a
:pep:`593`-compliant :attr:`typing.Annotated` type hint.
#. Subscript that hint with (in order):
#. The type expected by that parameter or return.
#. One or more subscriptions (indexations) of this class, each itself
subscripted (indexed) by a **callable validator** (i.e., callable
accepting a single arbitrary object and returning either ``True`` if
that object satisfies an arbitrary constraint *or* ``False``
otherwise). If that hint is subscripted by:
* Only one subscription of this class, that parameter or return
satisfies that hint when both:
* That parameter or return is an instance of the expected type.
* That validator returns ``True`` when passed that parameter or
return.
* Two or more subscriptions of this class, that parameter or return
satisfies that hint when both:
* That parameter or return is an instance of the expected type.
* *All* callable validators subscripting *all* subscriptions of
this class return ``True`` when passed that parameter or return.
Formally, the signature of each callable validator *must* resemble:
.. code-block:: python
def is_object_valid(obj) -> bool:
return bool(obj)
Equivalently, each callable validator *must* satisfy the type hint
``collections.abc.Callable[[typing.Any,], bool]``. If not the case,
an exception is raised. Note that:
* If that parameter or return is *not* an instance of the expected
type, **no callable validator is called.** Equivalently, each
callable validator is called *only* when that parameter or return
is already an instance of the expected type. Callable validators
need *not* revalidate that type (e.g., by passing that parameter or
return and type to the :func:`isinstance` builtin).
* The name of each callable validator is irrelevant. For convenience,
most callable validators are defined as nameless lambda functions.
For example, the following type hint only accepts non-empty strings:
.. code-block:: python
Annotated[str, Is[lambda text: bool(text)]]
:class:`BeartypeValidator` objects also support an expressive
domain-specific language (DSL) enabling callers to trivially synthesize new
objects from existing objects with standard Pythonic math operators:
* **Negation** (i.e., ``not``). Negating an :class:`BeartypeValidator`
object with the ``~`` operator synthesizes a new
:class:`BeartypeValidator` object whose validator returns ``True`` only
when the validator of the original object returns ``False``. For example,
the following type hint only accepts strings containing *no* periods:
.. code-block:: python
Annotated[str, ~Is[lambda text: '.' in text]]
* **Conjunction** (i.e., ``and``). Conjunctively combining two or more
:class:`BeartypeValidator` objects with the ``&`` operator synthesizes a
new :class:`BeartypeValidator` object whose validator returns ``True``
only when all data validators of the original objects return ``True``.
For example, the following type hint only accepts non-empty strings
containing *no* periods:
.. code-block:: python
Annotated[str, (
Is[lambda text: bool(text)] &
~Is[lambda text: '.' in text]
)]
* **Disjunction** (i.e., ``or``). Disjunctively combining two or more
:class:`BeartypeValidator` objects with the ``|`` operator synthesizes a
new :class:`BeartypeValidator` object whose validator returns ``True``
only when at least one validator of the original objects returns
``True``. For example, the following type hint accepts both empty strings
*and* non-empty strings containing at least one period:
.. code-block:: python
Annotated[str, (
~Is[lambda text: bool(text)] |
Is[lambda text: '.' in text]
)]
See also the **Examples** subsection below.
Caveats
----------
**This class is currently only supported by the** :func:`beartype.beartype`
**decorator.** All other static and runtime type checkers silently ignore
subscriptions of this class subscripting :attr:`typing.Annotated` type
hints.
**This class incurs a minor time performance penalty at call time.**
Specifically, each type hint of a :mod:`beartype`-decorated callable
subscripted by a subscription of this class adds one additional stack frame
to each call of that callable. While negligible (in the average case), this
cost can become non-negligible when compounded across multiple type hints
annotating a frequently called :mod:`beartype`-decorated callable --
especially when those type hints are subscripted by multiple subscriptions
of this class at different nesting levels.
**This class prohibits instantiation.** This class is *only* intended to be
subscripted. Attempting to instantiate this class into an object will raise
an :exc:`BeartypeValeSubscriptionException` exception.
Examples
----------
.. code-block:: python
# Import the requisite machinery.
>>> from beartype import beartype
>>> from beartype.vale import Is
>>> from typing import Annotated
# Validator matching only strings with lengths ranging [4, 40].
>>> IsRangy = Is[lambda text: 4 <= len(text) <= 40]
# Validator matching only unquoted strings.
>>> IsUnquoted = Is[lambda text:
... text.count('"') < 2 and text.count("'") < 2]
# Type hint matching only unquoted strings.
>>> UnquotedString = Annotated[str, IsUnquoted]
# Type hint matching only quoted strings.
>>> QuotedString = Annotated[str, ~IsUnquoted]
# Type hint matching only unquoted strings with lengths ranging [4, 40].
>>> UnquotedRangyString = Annotated[str, IsUnquoted & IsRangy]
# Annotate callables by those type hints.
>>> @beartype
... def doublequote_text(text: UnquotedString) -> QuotedString:
... """
... Double-quote the passed unquoted string.
... """
... return f'"{text}"' # The best things in life are one-liners.
>>> @beartype
... def singlequote_prefix(text: UnquotedRangyString) -> QuotedString:
... """
... Single-quote the prefix spanning characters ``[0, 3]`` of the
... passed unquoted string with length ranging ``[4, 40]``.
... """
... return f"'{text[:3]}'" # "Guaranteed to work," says @beartype.
# Call those callables with parameters satisfying those validators.
>>> doublequote_text("You know anything about nuclear fusion?")
"You know anything about nuclear fusion?"
>>> singlequote_prefix("Not now, I'm too tired. Maybe later.")
'Not'
# Call those callables with parameters not satisfying those validators.
>>> doublequote_text('''"Everybody relax, I'm here."''')
beartype.roar._roarexc.BeartypeCallHintParamViolation: @beartyped
doublequote_text() parameter text='"Everybody relax, I\'m here."'
violates type hint typing.Annotated[str, Is[lambda text: text.count('"')
< 2 and text.count("'") < 2]], as value '"Everybody relax, I\'m here."'
violates validator Is[lambda text: text.count('"') < 2 and
text.count("'") < 2].
'''
# ..................{ DUNDERS }..................
def __getitem__( # type: ignore[override]
self, is_valid: BeartypeValidatorTester) -> BeartypeValidator:
'''
Create and return a new beartype validator from the passed **validator
callable** (i.e., caller-defined callable accepting a single arbitrary
object and returning either ``True`` if that object satisfies an
arbitrary constraint *or* ``False`` otherwise), suitable for
subscripting :pep:`593`-compliant :attr:`typing.Annotated` type hints.
This method is intentionally *not* memoized, as this method is usually
subscripted only by subscription-specific lambda functions uniquely
defined for each subscription of this class.
Parameters
----------
is_valid : Callable[[Any,], bool]
Validator callable to validate parameters and returns against.
Returns
----------
BeartypeValidator
New object encapsulating this validator callable.
Raises
----------
BeartypeValeSubscriptionException
If either:
* This class was subscripted by two or more arguments.
* This class was subscripted by one argument that either:
* Is *not* callable.
* Is a C-based rather than pure-Python callable.
* Is a pure-Python callable accepting two or more arguments.
See Also
----------
:class:`_IsAttrFactory`
Usage instructions.
'''
# ..................{ VALIDATE }..................
# If this class was subscripted by either no arguments *OR* two or more
# arguments, raise an exception.
self._die_unless_getitem_args_1(is_valid)
# Else, this class was subscripted by exactly one argument.
# If that callable is *NOT* a validator tester, raise an exception.
die_unless_validator_tester(is_valid)
# Else, that callable is a validator tester.
# Lambda function dynamically generating the machine-readable
# representation of this validator, deferred due to the computational
# expense of accurately retrieving the source code for this validator
# (especially when this validator is itself a lambda function).
get_repr = lambda: (
f'{self._basename}['
f'{represent_func(func=is_valid, warning_cls=BeartypeValeLambdaWarning)}'
f']'
)
# ..................{ CLOSURE }..................
#FIXME: Unit test edge cases extensively, please.
def _is_valid_bool(obj: object) -> bool:
'''
``True`` only if the passed object satisfies the caller-defined
validation callable subscripting this :attr:`beartype.vale.Is`
validator factory.
This closure wraps that possibly unsafe callable with an implicit
type cast, guaranteeing that either:
* If that callable returns a boolean, this closure returns that
boolean as is.
* If that callable returns a non-boolean object, either:
* If that non-boolean is implicitly convertible into a boolean
(i.e., if passing that non-boolean to the :class:`bool` type
succeeds *without* raising an exception), this closure coerces
that non-boolean into a boolean and returns that boolean.
* Else, this closure raises a human-readable exception.
This closure is principally intended to massage non-standard
validation callables defined by popular third-party packages like
NumPy, which commonly return non-boolean objects that are implicitly
convertible into boolean objects: e.g.,
.. code-block::
>>> import numpy as np
>>> matrix = np.array([[2, 1], [1, 2]])
>>> is_all = np.all(matrix > 0))
>>> type(is_all)
<class 'numpy.bool_'>
>>> is_all
True # <-- y u lie, numpy
>>> bool(is_all)
True
Caveats
----------
**This closure is comparatively slower than the passed callable.**
This closure should *never* be called directly from code snippets
embedded in wrapper functions dynamically generated by the
:func:`beartype.beartype` decorator. This closure should *only* be
called indirectly by exception-handling functionality performed by
those wrapper functions in the event of a type-checking violation,
at which time efficiency is no longer a driving force.
This implies that wrapper functions dynamically generated by the
:func:`beartype.beartype` decorator *could* implicitly coerce
non-boolean objects returned by the passed callable into the
``True`` singleton. Although non-ideal, debugging such concerns is
squarely the user's concern; attempting to safeguard users from
semantic issues like this would destroy runtime performance for *no*
tangible gain in the general case. The best :mod:`beartype` can (and
should) do is defer validation until a type-checking violation.
Parameters
----------
obj : object
Object to be validated by that validation callable.
Returns
----------
bool
``True`` only if that object satisfies that validation callable.
Raises
----------
BeartypeValeValidationException
If that validation callable returns a **non-bool-like**, where
"non-bool-like" is any object that:
* Is *not* a **boolean** (i.e., :class:`bool` instance).
* Is *not* **implicitly convertible** into a boolean (i.e., is
an object whose class defines neither the :meth:`__bool__` nor
:meth:`__len__` dunder methods).
* Subscript the :attr:`beartype.vale.Is` factory by a **non-bool-like
validator** (i.e., tester function returning an object that is neither a
:class:`bool` *nor* implicitly convertible into a :class:`bool`).
'''
# Object returned by validating this object against that callable.
is_obj_valid = is_valid(obj)
# If that object is a boolean, return that object as is.
if isinstance(is_obj_valid, bool):
return is_obj_valid
# Else, that object is *NOT* a boolean.
# "True" *ONLY* if that object is a bool-like (i.e., object whose
# class defines the __bool__() and/or __len__() dunder methods).
#
# Note that we intentionally avoid the Easier to Ask for Permission
# than Forgiveness (EAFP) approach typically favoured by the Python
# community for coercing types. Namely, we avoid doing this:
# # Attempt to coerce this boolean into a non-boolean.
# try:
# is_obj_valid_bool = bool(is_obj_valid)
# except Exception as exception:
# raise SomeBeartypeException(...) from exception
#
# Why? Because the bool() constructor is overly permissive to the
# point of being *FRANKLY BROKEN.* Why? Because that constructor
# *NEVER* raises an exception (unless the class of that object
# defines a __bool__() dunder method raising an exception). Why?
# Because that constructor implicitly coerces *ALL* objects whose
# classes define *NO* __bool__() dunder method to "True" except for
# the following, which the bool() constructor explicitly detects
# and hard-codes to be coerced to "False":
# * The "None" singleton.
# * The "False" singleton.
# * Numeric 0 across all numeric types, including:
# * Integer 0.
# * Floating-point 0.0.
# * Empty containers across all container types, including:
# * The empty tuple singleton (i.e., "()").
# * The empty string singleton (i.e., "''").
# * Any empty list (e.g., "[]").
#
# The proof is in the gelatinous spaghetti code:
# >>> class OhMyGods(object): pass
# >>> bool(OhMyGods())
# True # <-- WHAT THE HECK IS THIS, GUIDO. SRSLY, BRO. SRSLY.
#
# This is, of course, unbelievable. This is, of course, all true.
# What is this, Guido? Visual Basic in my Python? *facepalm*
#
# Note also that there are several means of testing for booliness.
# The obvious approach of calling getattr() is also the slowest,
# because getattr() internally performs the EAFP approach and
# exception handling in Python is known to be an obvious bottleneck.
# Ergo, we intentionally avoid doing this:
# is_obj_valid_bool_method = getattr(is_obj_valid, '__bool__', None)
#
# Ideally, we would instead defer to a beartype-specific fast
# caching protocol that also internally performs a similar getattr()
# call wrapped within caching logic that amortizes the cost of that
# call across all isinstance() calls passed an object of that same
# type. Since there exists *NO* standard "SupportsBool" protocol,
# we would then trivially define our own like so:
# from beartype.typing import Protocol
# class SupportsBool(Protocol):
# def __bool__(self) -> bool: ...
#
# Surprisingly, that fails. Why? Because the bool() constructor
# internally coerces objects into booleans like so:
# * If the passed object defines the __bool__() dunder method, that
# constructor defers to that method.
# * Else if the passed object defines the __len__() dunder method,
# that constructor defers to that method.
# * Else if the passed object is one of several hard-coded objects
# evaluating to "False", that constructor returns "False".
# * Else, that constructor returns "True".
#
# To handle the first two cases, we instead:
#
# * Under Python >= 3.8, define both our own "SupportsBool" *AND*
# "SupportsLen" protocols.
# * Under Python 3.7, fallback to slower getattr()-based tests.
is_obj_valid_boollike: bool = None # type: ignore[assignment]
# If the active Python interpreter targets Python >= 3.8 and thus
# supports the PEP 544-compliant protocols defined above, decide
# whether that object is bool-like by deferring to those protocols.
if IS_PYTHON_AT_LEAST_3_8:
is_obj_valid_boollike = isinstance(is_obj_valid, _BoolLike) # pyright: ignore
# Else, the active Python interpreter targets Python 3.7 and thus
# fails to support the PEP 544-compliant protocols defined above.
# Instead, decide whether that object is bool-like manually.
else:
is_obj_valid_boollike = (
hasattr(is_obj_valid, '__bool__') or
hasattr(is_obj_valid, '__len__')
)
# If that object is *NOT* bool-like, raise an exception.
if not is_obj_valid_boollike:
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize with the exception raised below, please.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
raise BeartypeValeValidationException(
f'Validator {get_repr()} '
f'return value {repr(is_obj_valid)} not bool-like '
f'(i.e., instance of neither "bool" nor '
f'class defining __bool__() or __len__() dunder methods) '
f'for subject object:\n{represent_object(obj)}'
)
# Else, that object is bool-like.
# Boolean coerced from this non-boolean via the __bool__() or
# __len__() dunder methods declared by the type of this non-boolean,
# initialized to "False" for safety.
is_obj_valid_bool = False
# Attempt to perform this coercion.
try:
is_obj_valid_bool = bool(is_obj_valid)
# If whichever of the __bool__() or __len__() dunder methods is
# called by the above bool() constructor raises an exception, wrap
# that exception in a higher-level @beartype exception.
#
# Note that this is *NOT* simply an uncommon edge case. In
# particular, the Pandas "DataFrame" type defines a __bool__()
# dunder method that unconditionally raises an exception. *facepalm*
except Exception as exception:
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize with the exception raised above, please.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
raise BeartypeValeValidationException(
f'Validator {get_repr()} '
f'return value {repr(is_obj_valid)} erroneously bool-like '
f'(i.e., instance of class defining __bool__() or __len__() '
f'dunder methods raising unexpected exception) '
f'for subject object:\n{represent_object(obj)}'
) from exception
# Return this boolean.
return is_obj_valid_bool
# ..................{ VALIDATOR }..................
# Dictionary mapping from the name to value of each local attribute
# referenced in the "is_valid_code" snippet defined below.
is_valid_code_locals: LexicalScope = {}
# Name of a new parameter added to the signature of each
# @beartype-decorated wrapper function whose value is this validator,
# enabling this validator to be called directly in the body of those
# functions *WITHOUT* imposing additional stack frames.
is_valid_attr_name = add_func_scope_attr(
attr=_is_valid_bool, func_scope=is_valid_code_locals)
# One one-liner to rule them all and in "pdb" bind them.
return BeartypeValidator(
is_valid=_is_valid_bool,
# Python code snippet calling this validator (via this new
# parameter), passed an object to be interpolated into this snippet
# by downstream logic.
is_valid_code=f'{is_valid_attr_name}({{obj}})',
is_valid_code_locals=is_valid_code_locals,
get_repr=get_repr,
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype declarative object validation classes** (i.e.,
:mod:`beartype`-specific classes enabling callers to define PEP-compliant
validators from arbitrary caller-defined objects tested via explicitly
supported object introspectors efficiently generating stack-free code).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
# All "FIXME:" comments for this submodule reside in this package's "__init__"
# submodule to improve maintainability and readability here.
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeValeSubscriptionException
from beartype.typing import Any, Tuple
from beartype.vale._is._valeisabc import _BeartypeValidatorFactoryABC
from beartype.vale._util._valeutilsnip import (
VALE_CODE_CHECK_ISATTR_TEST_format,
VALE_CODE_CHECK_ISATTR_VALUE_EXPR_format,
VALE_CODE_INDENT_1,
)
from beartype.vale._core._valecore import BeartypeValidator
from beartype._data.datatyping import LexicalScope
from beartype._util.cache.utilcachecall import callable_cached
from beartype._util.kind.utilkinddict import update_mapping
from beartype._util.func.utilfuncscope import add_func_scope_attr
from beartype._util.py.utilpyversion import IS_PYTHON_AT_LEAST_3_8
from beartype._util.text.utiltextrepr import represent_object
from beartype._util.utilobject import SENTINEL
# ....................{ SUBCLASSES ~ attr }....................
class _IsAttrFactory(_BeartypeValidatorFactoryABC):
'''
**Beartype object attribute validator factory** (i.e., object creating and
returning a new beartype validator when subscripted (indexed) by both the
name of any object attribute *and* any **attribute validator** (i.e., other
beartype validator created by subscripting any :mod:`beartype.vale` class),
validating that :mod:`beartype`-decorated callable parameters and returns
annotated by :attr:`typing.Annotated` type hints subscripted by the former
validator define an attribute with that name satisfying that attribute
validator).
This class efficiently validates that callable parameters and returns
define arbitrary object attributes satisfying arbitrary validators
subscripting this factory. Any :mod:`beartype`-decorated callable parameter
or return annotated by a :attr:`typing.Annotated` type hint subscripted by
this factory subscripted by any object attribute name and validator (e.g.,
``typing.Annotated[{cls}, beartype.vale.IsAttr[{attr_name},
{attr_validator}]]`` for any class ``{cls}``, object attribute name
``{attr_name}`, and object attribute validator ``{attr_validator}``)
validates that parameter or return value to be an instance of that class
defining an attribute with that name satisfying that attribute validator.
**This factory incurs no time performance penalties at call time.** Whereas
the general-purpose :class:`beartype.vale.Is` factory necessarily calls
the caller-defined callable subscripting that factory at call time and thus
incurs a minor time performance penalty, this factory efficiently reduces
to one-line tests in :mod:`beartype`-generated wrapper functions *without*
calling any callables and thus incurs *no* time performance penalties.
Examples
----------
.. code-block:: python
# Import the requisite machinery.
>>> from beartype import beartype
>>> from beartype.vale import IsAttr, IsEqual
>>> from typing import Annotated
>>> import numpy as np
# Type hint matching only two-dimensional NumPy arrays of 64-bit floats,
# generating code resembling:
# (isinstance(array, np.ndarray) and
# array.ndim == 2 and
# array.dtype == np.dtype(np.float64))
>>> Numpy2dFloat64Array = Annotated[
... np.ndarray,
... IsAttr['ndim', IsEqual[2]],
... IsAttr['dtype', IsEqual[np.dtype(np.float64)]],
... ]
# Type hint matching only one-dimensional NumPy arrays of 64-bit floats,
# generating code resembling:
# (isinstance(array, np.ndarray) and
# array.ndim == 2 and
# array.dtype.type == np.float64)
>>> Numpy1dFloat64Array = Annotated[
... np.ndarray,
... IsAttr['ndim', IsEqual[2]],
... # Nested attribute validators test equality against a "."-delimited
... # attribute lookup (e.g., "dtype.type"), as expected.
... IsAttr['dtype', IsAttr['type', IsEqual[np.float64]]],
... ]
# NumPy arrays of well-known real number series.
>>> FAREY_2D_FLOAT64_ARRAY = np.array(
... [[0/1, 1/8,], [1/7, 1/6,], [1/5, 1/4], [2/7, 1/3], [3/8, 2/5]])
>>> FAREY_1D_FLOAT64_ARRAY = np.array(
... [3/7, 1/2, 4/7, 3/5, 5/8, 2/3, 5/7, 3/4, 4/5, 5/6, 6/7, 7/8])
# Annotate callables by those type hints.
>>> @beartype
... def sqrt_sum_2d(
... array: Numpy2dFloat64Array) -> Numpy1dFloat64Array:
... """
... One-dimensional NumPy array of 64-bit floats produced by first
... summing the passed two-dimensional NumPy array of 64-bit floats
... along its second dimension and then square-rooting those sums.
... """
... return np.sqrt(array.sum(axis=1))
# Call those callables with parameters satisfying those hints.
>>> sqrt_sum_2d(FAREY_2D_FLOAT64_ARRAY)
[0.35355339 0.55634864 0.67082039 0.78679579 0.88034084]
# Call those callables with parameters violating those hints.
>>> sqrt_sum_2d(FAREY_1D_FLOAT64_ARRAY)
beartype.roar.BeartypeCallHintParamViolation: @beartyped
sqrt_sum_2d() parameter array="array([0.42857143, 0.5, 0.57142857, 0.6,
0.625, ...])" violates type hint typing.Annotated[numpy.ndarray,
IsAttr['ndim', IsEqual[2]], IsAttr['dtype', IsEqual[dtype('float64')]]],
as value "array([0.42857143, 0.5, 0.57142857, 0.6, 0.625, ...])"
violates validator IsAttr['ndim', IsEqual[2]].
See Also
----------
:class:`beartype.vale.Is`
Further commentary.
'''
# ..................{ DUNDERS }..................
@callable_cached
def __getitem__( # type: ignore[override]
self, args: Tuple[str, BeartypeValidator]) -> BeartypeValidator:
'''
Create and return a new beartype validator validating object attributes
with the passed name satisfying the passed validator, suitable for
subscripting :pep:`593`-compliant :attr:`typing.Annotated` type hints.
This method is memoized for efficiency.
Parameters
----------
args : Tuple[str, BeartypeValidator]
2-tuple ``(attr_name, attr_validator)``, where:
* ``attr_name`` is the arbitrary attribute name to validate that
parameters and returns define satisfying the passed validator.
* ``attr_validator`` is the attribute validator to validate that
attributes with the passed name of parameters and returns
satisfy.
Returns
----------
BeartypeValidator
Beartype validator encapsulating this validation.
Raises
----------
BeartypeValeSubscriptionException
If this factory was subscripted by either:
* *No* arguments.
* One argument.
* Three or more arguments.
See Also
----------
:class:`_IsAttrFactory`
Usage instructions.
'''
# If this class was subscripted by one non-tuple argument, raise an
# exception.
if not isinstance(args, tuple):
raise BeartypeValeSubscriptionException(
f'{self._getitem_exception_prefix}non-tuple argument '
f'{represent_object(args)}.'
)
# Else, this class was subscripted by either no *OR* two or more
# arguments (contained in this tuple).
#
# If this class was *NOT* subscripted by two arguments...
elif len(args) != 2:
# If this class was subscripted by one or more arguments, then by
# deduction this class was subscripted by three or more arguments.
# In this case, raise a human-readable exception.
if args:
raise BeartypeValeSubscriptionException(
f'{self._getitem_exception_prefix}three or more arguments '
f'{represent_object(args)}.'
)
# Else, this class was subscripted by *NO* arguments. In this case,
# raise a human-readable exception.
else:
raise BeartypeValeSubscriptionException(
f'{self._getitem_exception_prefix}empty tuple.')
# Else, this class was subscripted by exactly two arguments.
# Localize these arguments to human-readable local variables.
attr_name, attr_validator = args
# Representer (i.e., callable accepting *NO* arguments returning a
# machine-readable representation of this validator), defined *AFTER*
# localizing these validator arguments.
get_repr = lambda: (
f'{self._basename}[{repr(attr_name)}, {repr(attr_validator)}]')
# If this name is *NOT* a string, raise an exception.
if not isinstance(attr_name, str):
raise BeartypeValeSubscriptionException(
f'{get_repr()} first argument '
f'{represent_object(attr_name)} not string.'
)
# Else, this name is a string.
#
# If this name is the empty string, raise an exception.
elif not attr_name:
raise BeartypeValeSubscriptionException(
f'{get_repr()} first argument is empty string.')
# Else, this name is a non-empty string.
#
# Note that this name has *NOT* yet been validated to be valid Python
# identifier. While we could do so here by calling our existing
# is_identifier() tester, doing so would inefficiently repeat
# the split on "." characters performed below. Instead, we iteratively
# validate each split substring to be a valid Python identifier below.
# Callable inefficiently validating object attributes with this name
# against this validator.
# is_valid: BeartypeValidatorTester = None # type: ignore[assignment]
# Code snippet efficiently validating object attributes with this name
# against this validator.
is_valid_code = ''
# Dictionary mapping from the name to value of each local attribute
# referenced in the "is_valid_code" snippet defined below.
is_valid_code_locals: LexicalScope = {}
# If this attribute name is unqualified (i.e., contains no "."
# delimiters), prefer an efficient optimization avoiding iteration.
if '.' not in attr_name:
# If this name is *NOT* a valid Python identifier, raise an
# exception.
if not attr_name.isidentifier():
raise BeartypeValeSubscriptionException(
f'{get_repr()} first argument {repr(attr_name)} not '
f'syntactically valid Python identifier.'
)
# Else, this name is a valid Python identifier.
def is_valid(pith: Any) -> bool:
f'''
``True`` only if the passed object defines an attribute named
"{attr_name}" whose value satisfies the validator
{repr(attr_validator)}.
'''
# Attribute of this object with this name if this object
# defines such an attribute *OR* a sentinel placeholder
# otherwise (i.e., if this object defines *NO* such attribute).
pith_attr = getattr(pith, attr_name, SENTINEL)
# Return true only if...
return (
# This object defines an attribute with this name *AND*...
pith_attr is not SENTINEL and
# This attribute satisfies this validator.
attr_validator.is_valid(pith_attr)
)
# Names of new parameters added to the signature of wrapper
# functions enabling this validator to be tested in those functions
# *WITHOUT* additional stack frames whose values are:
# * The sentinel placeholder.
#
# Add these parameters *BEFORE* generating locals.
local_name_sentinel = add_func_scope_attr(
attr=SENTINEL, func_scope=is_valid_code_locals)
# Generate locals safely merging the locals required by both the
# code generated below *AND* the externally provided code
# validating this attribute.
update_mapping(
mapping_trg=is_valid_code_locals,
mapping_src=attr_validator._is_valid_code_locals,
)
#FIXME: Unfortunately, "local_name_attr_value" still isn't a
#sufficiently unique name below, because "IsAttr['name',
#IsAttr['name', IsEqual[True]]]" is a trivial counter-example where
#the current approach breaks down. For true uniquification here,
#we're going to need to instead:
#* Define a global private counter:
# _local_name_obj_attr_value_counter = Counter(0)
#* Replace the assignment below with:
# local_name_obj_attr_value = (
# f'{{obj}}_isattr_'
# f'{next(_local_name_obj_attr_value_counter)}'
# )
#Of course, this assumes "Counter" objects are thread-safe. If
#they're not, we'll need to further obfuscate all this behind a
#[R]Lock of some sort. *sigh*
#FIXME: Oh, right. We mixed up "collections.Counter" with
#"itertools.count". The former is orthogonal to our interests here;
#the latter is of interest but *NOT* thread-safe. The solution is
#for us to implement a new "FastWriteCounter" class resembling that
#published in this extremely clever (and thus awesome) article:
# https://julien.danjou.info/atomic-lock-free-counters-in-python
# Name of a local variable in this code whose:
# * Name is sufficiently obfuscated as to be hopefully unique to
# the code generated by this validator.
# * Value is the value of this attribute of the arbitrary object
# being validated by this code.
local_name_attr_value = f'{{obj}}_isattr_{attr_name}'
# Python expression expanding to the value of this attribute,
# efficiently optimized under Python >= 3.8 with an assignment
# expression to avoid inefficient access of this value.
attr_value_expr = VALE_CODE_CHECK_ISATTR_VALUE_EXPR_format(
attr_name_expr=repr(attr_name),
local_name_attr_value=local_name_attr_value,
local_name_sentinel=local_name_sentinel,
)
# Python expression validating the value of this attribute,
# formatted so as to be safely embeddable in the larger code
# expression defined below.
attr_value_is_valid_expr = (
attr_validator._is_valid_code.format(
# Replace the placeholder substring "{obj}" in this code
# with the expression expanding to this attribute's value,
# defined as either...
obj=(
# If the active Python interpreter targets Python >=
# 3.8 and thus supports assignment expressions, the
# name of the local variable previously assigned the
# value of this attribute by the
# "VALE_CODE_CHECK_ISATTR_VALUE_EXPR" code snippet
# subsequently embedded in the
# "VALE_CODE_CHECK_ISATTR_VALUE_TEST" code snippet;
local_name_attr_value
if IS_PYTHON_AT_LEAST_3_8 else
# Else, the value of this attribute directly accessed.
attr_value_expr
),
# Replace the placeholder substring "{indent}" in this code
# with an indentation increased by one level.
indent=VALE_CODE_INDENT_1,
))
# Code snippet efficiently validating against this object.
is_valid_code = VALE_CODE_CHECK_ISATTR_TEST_format(
attr_value_expr=attr_value_expr,
attr_value_is_valid_expr=attr_value_is_valid_expr,
local_name_sentinel=local_name_sentinel,
)
# Else, this attribute name is qualified (i.e., contains one or more
# "." delimiters), fallback to a general solution performing iteration.
else:
#FIXME: Implement us up when we find the time, please. We currently
#raise an exception simply because we ran out of time for this. :{
raise BeartypeValeSubscriptionException(
f'{get_repr()} first argument '
f'{repr(attr_name)} not unqualified Python identifier '
f'(i.e., contains one or more "." characters).'
)
# Create and return this subscription.
return BeartypeValidator(
is_valid=is_valid,
is_valid_code=is_valid_code,
is_valid_code_locals=is_valid_code_locals,
get_repr=get_repr,
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype declarative operator validation classes** (i.e.,
:mod:`beartype`-specific classes enabling callers to define PEP-compliant
validators from arbitrary caller-defined objects tested via explicitly
supported operators efficiently generating stack-free code).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
#FIXME: *Useful optimization.* For "_IsEqualFactory", we can (and should)
#directly embed the values of builtins when comparing against builtins (e.g.,
#integers, strings). Specifically, we should only conditionally perform this
#line below:
# param_name_obj_value = add_func_scope_attr(
# attr=obj, func_scope=is_valid_code_locals)
#...when we absolutely must. So when mustn't we? We see two simple approaches
#to detecting builtin objects:
#* Detect the types of those objects. While obvious, this presents several
# subtleties:
# * Fake builtin objects, which would naturally need to be excluded.
# * Subclasses of builtin objects, which would *ALSO* need to be excluded.
# In short, "isinstance(param_name_obj_value, TUPLE_OF_TRUE_BUILTIN_TYPES)"
# fails to suffice -- although something more brute-force like
# "type(param_name_obj_value) in SET_OF_TRUE_BUILTIN_TYPES" might suffice.
#* Detect the first character of their repr() strings as belonging to the set:
# BUILTIN_OBJ_REPR_CHARS_FIRST = {
# "'", '"', 0, 1, 2, 3, 4, 5, 6, 7, 8, 9}
# repr(param_name_obj_value) in BUILTIN_OBJ_REPR_CHARS_FIRST
#We like the latter quite a bit more, as it has *NO* obvious edge cases,
#requires *NO* hard-coding of types, and appears to scale gracefully. The only
#downside is that it assumes third-party repr() strings to be sane, but... if
#that *ISN'T* the case, that is a bug in those third-parties. *shrug*
#FIXME: Generalize to support arbitrary binary operators by:
#* Define a new "_IsOperatorBinaryABC(_BeartypeValidatorFactoryABC, metaclass=ABCMeta)" superclass.
#* In that superclass:
# * Define a stock __class_getitem__() method whose implementation is
# sufficiently generic so as to be applicable to all subclasses. To do so,
# this method should access class variables defined by those subclasses.
# * Note that there is absolutely no reason or point to define abstract class
# methods forcing subclasses to define various metadata, for the unfortunate
# reason that abstract class methods do *NOT* actually enforce subclasses
# that aren't instantiable anyway to implement those methods. *sigh*
#* Refactor "_IsEqualFactory" to:
# * Subclass that superclass.
# * Define the following class variables, which the superclass
# __class_getitem__() method will internally access to implement itself:
# from operator import __eq__
#
# class _IsEqualFactory(_IsOperatorBinaryABC):
# _operator = __eq__
# _operator_code = '=='
#
#Ridiculously sweet, eh? We know.
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeValeSubscriptionException
from beartype.typing import Any
from beartype.vale._is._valeisabc import _BeartypeValidatorFactoryABC
from beartype.vale._util._valeutilsnip import (
VALE_CODE_CHECK_ISEQUAL_TEST_format)
from beartype.vale._core._valecore import BeartypeValidator
from beartype._data.datatyping import LexicalScope
from beartype._util.cache.utilcachecall import callable_cached
from beartype._util.func.utilfuncscope import add_func_scope_attr
# ....................{ SUBCLASSES ~ equal }....................
class _IsEqualFactory(_BeartypeValidatorFactoryABC):
'''
**Beartype object equality validator factory** (i.e., object creating and
returning a new beartype validator when subscripted (indexed) by any
object, validating that :mod:`beartype`-decorated callable parameters and
returns annotated by :attr:`typing.Annotated` type hints subscripted by
that validator equal that object).
This class efficiently validates that callable parameters and returns are
equal to the arbitrary object subscripting this factory. Any
:mod:`beartype`-decorated callable parameter or return annotated by a
:attr:`typing.Annotated` type hint subscripted by this factory subscripted
by any object (e.g., ``typing.Annotated[{cls},
beartype.vale.IsEqual[{obj}]]`` for any class ``{cls}`` and object
``{obj}`) validates that parameter or return value to equal that object
under the standard ``==`` equality comparison.
This factory is a generalization of the :pep:`586`-compliant
:attr:`typing.Literal` type hint factory, because this factory does
everything that factory does and substantially more. Superficially,
:attr:`typing.Literal` type hints also validate that callable parameters
and returns are equal to (i.e., ``==``) the literal object subscripting
those hints. The similarity ends there, however. :attr:`typing.Literal` is
only subscriptable by literal :class:`bool`, :class:`bytes`, :class:`int`,
:class:`str`, :class:`Enum`, and ``type(None)`` objects; meanwhile, this
factory is subscriptable by *any* object.
**This factory incurs no time performance penalties at call time.** Whereas
the general-purpose :class:`beartype.vale.Is` factory necessarily calls
the caller-defined callable subscripting that factory at call time and thus
incurs a minor time performance penalty, this factory efficiently reduces
to one-line tests in :mod:`beartype`-generated wrapper functions *without*
calling any callables and thus incurs *no* time performance penalties.
Caveats
----------
**This class is intentionally subscriptable by only a single object.** Why?
Disambiguity. When subscripted by variadic positional (i.e., one or more)
objects, this class internally treats those objects as items of a tuple to
validate equality against rather than as independent objects to iteratively
validate equality against. Since this is non-intuitive, callers should avoid
subscripting this class by multiple objects. Although non-intuitive, this is
also unavoidable. The ``__class_getitem__()`` dunder method obeys the same
semantics as the ``__getitem__()`` dunder method, which is unable to
differentiate between being subscripted two or more objects and being
subscripted by a tuple of two or more objects. Since being able to validate
equality against tuples of two or more objects is essential and since this
class being subscripted by two or more objects would trivially reduce to
shorthand for the existing ``|`` set operator already supported by this
class, this class preserves support for tuples of two or more objects at a
cost of non-intuitive results when subscripted by multiple objects.
Don't blame us. We didn't vote for :pep:`560`.
Examples
----------
.. code-block:: python
# Import the requisite machinery.
>>> from beartype import beartype
>>> from beartype.vale import IsEqual
>>> from typing import Annotated
# Lists of the first ten items of well-known simple whole number series.
>>> WHOLE_NUMBERS = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> WHOLE_NUMBERS_EVEN = [0, 2, 4, 6, 8, 10, 12, 14, 16, 18]
>>> WHOLE_NUMBERS_ODD = [1, 3, 5, 7, 9, 11, 13, 15, 17, 19]
# Type hint matching only lists of integers equal to one of these lists.
>>> SimpleWholeNumberSeries = Annotated[
... list[int],
... IsEqual[WHOLE_NUMBERS] |
... IsEqual[WHOLE_NUMBERS_EVEN] |
... IsEqual[WHOLE_NUMBERS_ODD]
... ]
# Annotate callables by those type hints.
>>> @beartype
... def guess_next(series: SimpleWholeNumberSeries) -> int:
... """
... Guess the next whole number in the passed whole number series.
... """
... if series == WHOLE_NUMBERS: return WHOLE_NUMBERS[-1] + 1
... else: return series[-1] + 2
# Call those callables with parameters equal to one of those objects.
>>> guess_next(list(range(10)))
10
>>> guess_next([number*2 for number in range(10)])
20
# Call those callables with parameters unequal to one of those objects.
>>> guess_next([1, 2, 3, 6, 7, 14, 21, 42,])
beartype.roar.BeartypeCallHintParamViolation: @beartyped guess_next()
parameter series=[1, 2, 3, 6, 7, 14, 21, 42] violates type hint
typing.Annotated[list[int], IsEqual[[0, 1, 2, 3, 4, 5, 6, 7, 8,
9]] | IsEqual[[0, 2, 4, 6, 8, 10, 12, 14, 16, 18]] | IsEqual[[1, 3, 5,
7, 9, 11, 13, 15, 17, 19]]], as value [1, 2, 3, 6, 7, 14, 21, 42]
violates validator IsEqual[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]] |
IsEqual[[0, 2, 4, 6, 8, 10, 12, 14, 16, 18]] | IsEqual[[1, 3, 5, 7, 9,
11, 13, 15, 17, 19]].
See Also
----------
:class:`beartype.vale.Is`
Further commentary.
'''
# ..................{ DUNDERS }..................
@callable_cached
def __getitem__(self, obj: Any) -> BeartypeValidator: # type: ignore[override]
'''
Create and return a new beartype validator validating equality against
the passed object, suitable for subscripting :pep:`593`-compliant
:attr:`typing.Annotated` type hints.
This method is memoized for efficiency.
Parameters
----------
obj : Any
Arbitrary object to validate equality against.
Returns
----------
BeartypeValidator
Beartype validator encapsulating this validation.
Raises
----------
BeartypeValeSubscriptionException
If this factory was subscripted by either:
* *No* arguments.
* Two or more arguments.
See Also
----------
:class:`_IsEqualFactory`
Usage instructions.
'''
# If...
if (
# This factory was subscripted by either no arguments *OR* two or
# more arguments *AND*...
isinstance(obj, tuple) and
# This factory was subscripted by no arguments...
not obj
# Then raise an exception.
):
raise BeartypeValeSubscriptionException(
f'{self._getitem_exception_prefix}empty tuple.')
# Else, this factory was subscripted by one or more arguments. In any
# case, accept this object as is. See the class docstring for details.
# print(f'_IsEqualFactory[{repr(obj)}]')
# Callable inefficiently validating against this object.
is_valid = lambda pith: pith == obj
# Dictionary mapping from the name to value of each local attribute
# referenced in the "is_valid_code" snippet defined below.
is_valid_code_locals: LexicalScope = {}
# Name of a new parameter added to the signature of wrapper functions
# whose value is this object, enabling this object to be tested in
# those functions *WITHOUT* additional stack frames.
param_name_obj_value = add_func_scope_attr(
attr=obj, func_scope=is_valid_code_locals)
# Code snippet efficiently validating against this object.
is_valid_code = VALE_CODE_CHECK_ISEQUAL_TEST_format(
param_name_obj_value=param_name_obj_value)
# Create and return this subscription.
return BeartypeValidator(
is_valid=is_valid,
is_valid_code=is_valid_code,
is_valid_code_locals=is_valid_code_locals,
get_repr=lambda: f'{self._basename}[{repr(obj)}]',
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype validation superclasses** (i.e., :mod:`beartype`-specific abstract
base classes (ABCs) from all concrete beartype validation subclasses derive).
This private submodule defines the core low-level class hierarchy driving the
entire :mod:`beartype` data validation ecosystem.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from abc import ABCMeta, abstractmethod
from beartype.roar import BeartypeValeSubscriptionException
from beartype.typing import Any
from beartype.vale._core._valecore import BeartypeValidator
from beartype._util.text.utiltextrepr import represent_object
# ....................{ METACLASSES }....................
class _BeartypeValidatorFactoryABCMeta(ABCMeta):
'''
Metaclass all **beartype validator factory subclasses** (i.e.,
:class:`_BeartypeValidatorFactoryABC` subclasses).
'''
# ..................{ INITIALIZERS }..................
def __init__(cls, classname, superclasses, attr_name_to_value) -> None:
super().__init__(classname, superclasses, attr_name_to_value)
# Sanitize the fully-qualified name of the module declaring this class
# from the private name of the module implementing this classes to the
# public name of the module exporting this class, improving end user
# clarity and usability.
cls.__module__ = 'beartype.vale'
# ....................{ SUPERCLASSES }....................
#FIXME: Pyright appears to be extremely confused. It thinks that the
#"_BeartypeValidatorFactoryABCMeta" metaclass is a "generic" (i.e., subclasses
#"typing.Generic"), when in fact that metaclass merely subclasses the standard
#"abc.ABCMeta" metaclass. Consider submitting an upstream pyright issue, please.
class _BeartypeValidatorFactoryABC(
object, metaclass=_BeartypeValidatorFactoryABCMeta): # pyright: ignore[reportGeneralTypeIssues]
'''
Abstract base class of all **beartype validator factory subclasses**
(i.e., subclasses that, when subscripted (indexed) by subclass-specific
objects, create new :class:`BeartypeValidator` objects encapsulating those
objects, themselves suitable for subscripting (indexing)
:attr:`typing.Annotated` type hints, themselves enforcing subclass-specific
validation constraints and contracts on :mod:`beartype`-decorated callable
parameters and returns annotated by those hints).
Attributes
----------
_basename : str
Machine-readable basename of the public factory singleton
instantiating this private factory subclass (e.g., ``"IsAttr"``).
_getitem_exception_prefix : str
Human-readable substring prefixing exceptions raised by the subclass
implementation of the abstract :meth:__getitem__` dunder method.
'''
# ..................{ INITIALIZERS }..................
def __init__(self, basename: str) -> None:
'''
Initialize this subclass instance.
Parameters
----------
basename : str
Machine-readable basename of the public factory singleton
instantiating this private factory subclass (e.g., ``"IsAttr"``).
'''
assert isinstance(basename, str), f'{repr(basename)} not string.'
# Classify all passed parameters.
self._basename = basename
# Initialize all remaining instance variables.
self._getitem_exception_prefix = (
f'Beartype validator factory "{self._basename}" '
f'subscripted by '
)
# ..................{ ABSTRACT ~ dunder }..................
@abstractmethod
def __getitem__(self, *args, **kwargs) -> BeartypeValidator:
'''
Create and return a new beartype validator validating the subclass
constraint parametrized by the passed arguments subscripting this
beartype validator factory.
Like standard type hints (e.g., :attr:`typing.Union`), instances of
concrete subclasses of this abstract base class (ABC) are *only*
intended to be subscripted (indexed).
Concrete subclasses are required to implement this abstract method.
Concrete subclasses are strongly recommended (but *not* required) to
memoize their implementations by the
:func:`beartype._util.cache.utilcachecall.callable_cached` decorator.
Returns
----------
BeartypeValidator
Beartype validator encapsulating this validation.
'''
pass
# ..................{ PRIVATE ~ validator }..................
#FIXME: Unit test us up, please.
def _die_unless_getitem_args_1(self, args: Any) -> None:
'''
Raise an exception unless this beartype validator factory was
subscripted (indexed) by exactly one argument.
This validator is intended to be called by concrete subclass
implementations of the :meth:`__getitem__` dunder method to validate
the arguments subscripting this beartype validator factory.
Parameters
----------
args : Any
Variadic positional arguments to be inspected.
Raises
----------
BeartypeValeSubscriptionException
If the caller dunder method was passed either:
* No arguments.
* Two or more arguments.
'''
# If this object was subscripted by either no arguments or two or more
# arguments, raise an exception. Specifically...
if isinstance(args, tuple):
# If this object was subscripted by two or more arguments, raise a
# human-readable exception.
if args:
raise BeartypeValeSubscriptionException(
f'{self._getitem_exception_prefix}two or more arguments '
f'{represent_object(args)}.'
)
# Else, this object was subscripted by *NO* arguments. In this case,
# raise a human-readable exception.
else:
raise BeartypeValeSubscriptionException(
f'{self._getitem_exception_prefix}empty tuple.')
# Else, this object was subscripted by exactly one argument.
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype Python Enhancement Proposal (PEP) API.**
This subpackage provides a medley of miscellaneous low-level utility functions
implementing unofficial (albeit well-tested) runtime support for PEPs lacking
official runtime support in CPython's standard library. This subpackage is
intended to be used both by downstream third-party packages and the
:mod:`beartype` codebase itself. Supported PEPs include:
* :pep:`563` (i.e., "Postponed Evaluation of Annotations") via the
:func:`resolve_pep563` function.
'''
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# should be locally imported at module scope *ONLY* under alternate private
# names (e.g., "from argparse import ArgumentParser as _ArgumentParser" rather
# than merely "from argparse import ArgumentParser").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from beartype.peps._pep563 import (
resolve_pep563 as resolve_pep563)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype** :pep:`563` **support.** (i.e., low-level functions resolving
stringified PEP-compliant type hints implicitly postponed by the active Python
interpreter via a ``from __future__ import annotations`` statement at the head
of the external user-defined module currently being introspected).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
#FIXME: Conditionally emit a non-fatal PEP 563-specific warning when the active
#Python interpreter targets Python >= 3.10 *AND* the passed callable is nested.
# ....................{ IMPORTS }....................
import __future__
from beartype.roar import BeartypePep563Exception
from beartype.typing import (
Any,
FrozenSet,
Optional,
)
from beartype._data.datatyping import (
LexicalScope,
TypeStack,
)
from beartype._util.cls.utilclsget import get_type_locals
from beartype._util.func.utilfuncscope import (
get_func_globals,
get_func_locals,
is_func_nested,
)
from beartype._util.func.utilfunctest import die_unless_func_python
from beartype._util.text.utiltextident import is_identifier
from beartype._util.text.utiltextlabel import prefix_callable_decorated_pith
from collections.abc import Callable
from sys import modules as sys_modules
# ....................{ CONSTANTS }....................
_FROZEN_SET_EMPTY: FrozenSet[Any] = frozenset()
'''
Empty frozen set, globalized as a mild optimization for the body of the
:func:`resolve_pep563` resolver.
'''
# ....................{ RESOLVERS }....................
def resolve_pep563(
# Mandatory parameters.
func: Callable,
# Optional parameters.
cls_stack: TypeStack = None,
) -> None:
'''
Resolve all :pep:`563`-based **postponed annotations** (i.e., strings that
when dynamically evaluated as Python expressions yield actual annotations)
on the passed callable to their **referents** (i.e., the actual annotations
to which those postponed annotations evaluate) if `PEP 563`_ is active for
that callable *or* silently reduce to a noop otherwise (i.e., if :pep:`563`
is *not* active for that callable).
:pep:`563` is active for that callable if the module declaring that callable
explicitly enabled :pep:`563` support with a leading dunder importation of
the form ``from __future__ import annotations``. If :pep:`563` is active for
that callable, then for each type-hint annotating that callable:
* If that hint is a string and thus postponed, this function:
#. Dynamically evaluates that string within that callable's globals
context (i.e., set of all global variables defined by the module
declaring that callable).
#. Replaces that hint's string value with the expression produced by this
dynamic evaluation.
* Else, this function preserves that hint as is (e.g., due to that hint
that was previously postponed having already been evaluated by a prior
decorator).
Parameters
----------
func : Callable
Callable to resolve postponed annotations on.
cls_stack : TypeStack
Either:
* If that callable is a method of a class, the **type stack** (i.e.,
tuple of one or more lexically nested classes in descending order of
top- to bottom-most lexically nested) such that:
* The first item of this tuple is expected to be the **root class**
(i.e., top-most class whose lexical scope encloses that callable,
typically declared at module scope and thus global).
* The last item of this tuple is expected to be the **current class**
(i.e., possibly nested class directly containing that method).
* Else, that callable is *not* a method of a class. In this case,
``None``.
Defaults to ``None``.
Note that this function requires *both* the root and current class to
correctly resolve edge cases under :pep:`563`: e.g.,
.. code-block:: python
from __future__ import annotations
from beartype import beartype
@beartype
class Outer(object):
class Inner(object):
# At this time, the "Outer" class has been fully defined but
# is *NOT* yet accessible as a module-scoped attribute. Ergo,
# the *ONLY* means of exposing the "Outer" class to the
# recursive decoration of this get_outer() method is to
# explicitly pass the "Outer" class as the "cls_root"
# parameter to all decoration calls.
def get_outer(self) -> Outer:
return Outer()
Note also that nested classes have *no* implicit access to either their
parent classes *or* to class variables declared by those parent classes.
Nested classes *only* have explicit access to module-scoped classes --
exactly like any other arbitrary objects: e.g.,
.. code-block:: python
class Outer(object):
my_str = str
class Inner(object):
# This induces a fatal compile-time exception resembling:
# NameError: name 'my_str' is not defined
def get_str(self) -> my_str:
return 'Oh, Gods.'
Nonetheless, this tuple *must* contain all of those nested classes
lexically containing the passed method. Why? Because this function
resolves local attributes defined in the body of the callable on the
current call stack lexically containing those nested classes (if any) by
treating the length of this tuple as the total number of classes
lexically nesting the current class. In short, just pass everything.
Raises
----------
beartype.roar.BeartypePep563Exception
If either:
* ``func`` is *not* a pure-Python callable.
* Evaluating a postponed annotation on that callable raises an exception
(e.g., due to that annotation referring to local state inaccessible in
this deferred context).
'''
# ..................{ VALIDATION }..................
# If that callable is *NOT* pure-Python, raise an exception.
die_unless_func_python(
func=func, exception_cls=BeartypePep563Exception)
# Else, that callable is pure-Python.
# ..................{ DETECTION }..................
# Module directly defining that callable if that callable is defined by a
# module that exists *OR* "None" otherwise (i.e., if that callable declared
# itself to be either dynamically defined in memory by setting its
# "__module" attribute to "None" *OR* statically defined on disk by a
# non-existent module, in which case that callable should have set its
# "__module__" attribute to "None" instead).
#
# Note that shockingly many callables erroneously declare themselves to be
# statically defined on disk by non-existent modules. This includes methods
# synthesized by the standard "typing.NamedTuple" class -- which set their
# "__module__" attributes to the non-existent module "namedtuple_Foo". :O
func_module = sys_modules.get(func.__module__)
# If it is *NOT* the case that...
if not (
# That callable's module exists *AND*...
func_module and
# That callable's module defined an "annotations" attribute to be
# the "__future__.annotations" object, that module enabled PEP 563
# support with a leading statement resembling:
# from __future__ import annotations
getattr(func_module, 'annotations', None) is _FUTURE_ANNOTATIONS
# Then that callable's hints are *NOT* postponed under PEP 563. In this
# case, silently reduce to a noop.
):
return
# Else, these hints are postponed under PEP 563. In this case, resolve these
# hints to their referents.
# ..................{ LOCALS }..................
# Global scope for the decorated callable.
func_globals = get_func_globals(
func=func, exception_cls=BeartypePep563Exception)
# print(f'PEP 563-postponed type hint {repr(func)} globals:\n{repr(func_globals)}\n')
# Dictionary mapping from parameter name to postponed hint for each
# annotated parameter and return value of this callable, localized for
# negligible efficiency gains.
func_hints_postponed = func.__annotations__
# Dictionary mapping from parameter name to resolved hint for each
# annotated parameter and return value of this callable, initialized to a
# shallow copy of the postponed dictionary.
#
# Note that the "func.__annotations__" dictionary *CANNOT* be safely
# directly assigned to below, as the loop performing that assignment below
# necessarily iterates over that dictionary. As with most languages, Python
# containers cannot be safely mutated while being iterated.
func_hints_resolved = func_hints_postponed.copy()
# Local scope for the decorated callable. Since calculating this scope is
# O(n**2) for an arbitrary large integer n, defer doing so until we must
# (i.e., when that callable's postponed annotations are *NOT* resolvable
# given only the global scope of that callable).
func_locals: Optional[LexicalScope] = None
# Non-empty frozen set of the unqualified names of all parent callables
# lexically containing this nested callable (including this nested
# callable itself) if this callable is nested *OR* the empty frozen set
# otherwise (i.e., if this callable is declared at global scope in its
# submodule).
func_scope_names = (
frozenset(func.__qualname__.rsplit(sep='.'))
if is_func_nested(func) else
_FROZEN_SET_EMPTY
)
# ..................{ RESOLUTION }..................
# For the parameter name (or "return" for the return value) and
# corresponding annotation of each of this callable's type hints...
#
# Note that refactoring this iteration into a dictionary comprehension
# would be largely infeasible (e.g., due to the need to raise
# human-readable exceptions on evaluating unevaluatable type hints) as well
# as largely pointless (e.g., due to dictionary comprehensions being either
# no faster or even slower than explicit iteration for small dictionary
# sizes, as "func.__annotations__" usually is).
for pith_name, pith_hint in func_hints_postponed.items():
# If...
if (
# This hint is a string *AND*...
isinstance(pith_hint, str) and
# This string is non-empty...
pith_hint
):
# Then this hint is a PEP 563-compliant postponed hint. Note that this
# test could technically yield a false positive in the unlikely edge
# case that this annotation was previously postponed but has since been
# replaced in-place by its referent that is itself a string matching the
# PEP 563 format without actually being a PEP 563-formatted postponed
# string. Since PEP 563 authors failed to provide solutions to this or
# any other outstanding runtime issues with PEP 563, there's *NOTHING*
# we can do about that. We prefer to pretend everything will be okay.
# print(f'Resolving postponed hint {repr(pith_hint)}...')
#FIXME: Since CPython appears to currently be incapable of even
#defining a deeply nested annotation that would violate this limit,
#we avoid performing this test for the moment. Nonetheless, it's
#likely that CPython will permit such annotations to be defined
#under some *VERY* distant major version. Ergo, we preserve this.
# If this string internally exceeds the child limit (i.e., maximum
# number of nested child type hints listed as subscripted arguments
# of the parent PEP-compliant type hint produced by evaluating this
# string) permitted by the @beartype decorator, raise an exception.
#_die_if_hint_repr_exceeds_child_limit(
# hint_repr=pith_hint, pith_label=pith_label)
# If this hint is the unqualified name of one or more parent
# callables or classes of this callable, then this hint is a
# relative forward reference to a parent callable or class of this
# callable that is currently being defined but has yet to be
# defined in full. By deduction, we can infer this hint *MUST* have
# been a locally or globally scoped attribute of this callable
# before being postponed by PEP 563 into a relative forward
# reference to that attribute: e.g.,
# # If this loop is iterating over a postponed type hint
# # annotating this post-PEP 563 method signature...
# class MuhClass:
# @beartype
# def muh_method(self) -> 'MuhClass': ...
#
# # ...then the original type hints prior to being postponed
# # *MUST* have annotated this pre-PEP 563 method signature.
# class MuhClass:
# @beartype
# def muh_method(self) -> MuhClass: ...
#
# In this case, we absolutely *MUST* avoid attempting to resolve
# this forward reference. Why? Disambiguity. Although the
# "MuhClass" class has yet to be defined at the time @beartype
# decorates the muh_method() method, an attribute of the same name
# may already have been defined at that time: e.g.,
# # While bad form, PEP 563 postpones this valid logic...
# MuhClass = "Just kidding! Had you going there, didn't I?"
# class MuhClass:
# @beartype
# def muh_method(self) -> MuhClass: ...
#
# # ...into this relative forward reference.
# MuhClass = "Just kidding! Had you going there, didn't I?"
# class MuhClass:
# @beartype
# def muh_method(self) -> 'MuhClass': ...
#
# Naively resolving this forward reference would erroneously
# replace this hint with the previously declared attribute rather
# than the class currently being declared: e.g.,
# # Naive PEP 563 resolution would replace the above by this!
# MuhClass = "Just kidding! Had you going there, didn't I?"
# class MuhClass:
# @beartype
# def muh_method(self) -> (
# "Just kidding! Had you going there, didn't I?"): ...
#
# This isn't simply an edge-case disambiguity, however. This exact
# situation commonly arises whenever reloading modules containing
# @beartype-decorated callables annotated with self-references
# (e.g., by passing those modules to the standard
# importlib.reload() function). Why? Because module reloading is
# ill-defined and mostly broken under Python. Since the
# importlib.reload() function fails to delete any of the attributes
# of the module to be reloaded before reloading that module, the
# parent callable or class referred to by this hint will be briefly
# defined for the duration of @beartype's decoration of this
# callable as the prior version of that parent callable or class!
#
# Resolving this hint would thus superficially succeed, while
# actually erroneously replacing this hint with the prior rather
# than current version of that parent callable or class. @beartype
# would then wrap the decorated callable with a wrapper expecting
# the prior rather than current version of that parent callable or
# class. All subsequent calls to that wrapper would then fail.
# Since this actually happened, we ensure it never does again.
#
# Lastly, note that this edge case *ONLY* supports top-level
# relative forward references (i.e., syntactically valid Python
# identifier names subscripting *NO* parent type hints). Child
# relative forward references will continue to raise exceptions. As
# resolving PEP 563-postponed type hints effectively reduces to a
# single "all or nothing" call of the low-level eval() builtin
# accepting *NO* meaningful configuration, there exists *NO* means
# of only partially resolving parent type hints while preserving
# relative forward references subscripting those hints. The
# solution in those cases is for end users to either:
#
# * Decorate classes rather than methods: e.g.,
# # Users should replace this method decoration, which will
# # fail at runtime...
# class MuhClass:
# @beartype
# def muh_method(self) -> list[MuhClass]: ...
#
# # ...with this class decoration, which will work.
# @beartype
# class MuhClass:
# def muh_method(self) -> list[MuhClass]: ...
# * Replace implicit with explicit forward references: e.g.,
# # Users should replace this implicit forward reference, which
# # will fail at runtime...
# class MuhClass:
# @beartype
# def muh_method(self) -> list[MuhClass]: ...
#
# # ...with this explicit forward reference, which will work.
# class MuhClass:
# @beartype
# def muh_method(self) -> list['MuhClass']: ...
#
# Indeed, the *ONLY* reasons we support this common edge case are:
# * This edge case is indeed common.
# * This edge case is both trivial and efficient to support.
#
# tl;dr: Preserve this hint for disambiguity and skip to the next.
if pith_hint in func_scope_names:
continue
# If the local scope of the decorated callable has yet to be
# decided...
if func_locals is None:
# Attempt to resolve this hint against the global scope defined
# by the module declaring the decorated callable.
#
# Note that this first attempt intentionally does *NOT* attempt
# to evaluate this postponed hint against both the global and
# local scope of the decorated callable. Why? Because:
# * The overwhelming majority of real-world type hints are
# imported at module scope (e.g., from "collections.abc" and
# "typing") and thus accessible as global attributes.
# * Deciding the local scope of the decorated callable is an
# O(n**2) operation for an arbitrarily large integer n. Ergo,
# that decision should be deferred as long as feasible to
# minimize space and time costs of the @beartype decorator.
try:
func_hints_resolved[pith_name] = eval(
pith_hint, func_globals)
# If that succeeded, continue to the next postponed hint.
continue
# If that resolution failed, it probably did so due to
# requiring one or more attributes available only in the local
# scope for the decorated callable. In this case...
except Exception:
# print(f'Resolving PEP 563-postponed type hint {repr(pith_hint)} locals...')
# print(f'Ignoring {len(cls_stack or ())} lexical parent class scopes...')
# Local scope for the decorated callable.
func_locals = get_func_locals(
func=func,
# Ignore all lexical scopes in the fully-qualified name
# of the decorated callable corresponding to owner
# classes lexically nesting the current decorated class
# containing that callable (including the current
# decorated class). Why? Because these classes are *ALL*
# currently being decorated and thus have yet to be
# encapsulated by new stack frames on the call stack. If
# these lexical scopes are *NOT* ignored, this call to
# get_func_locals() will fail to find the parent lexical
# scope of the decorated callable and then raise an
# unexpected exception.
#
# Consider, for example, this nested class decoration of
# a fully-qualified "muh_package.Outer" class:
# from beartype import beartype
#
# @beartype
# class Outer(object):
# class Middle(object):
# class Inner(object):
# def muh_method(self) -> str:
# return 'Painful API is painful.'
#
# When @beartype finally recurses into decorating the
# nested muh_package.Outer.Middle.Inner.muh_method()
# method, this call to get_func_locals() if *NOT* passed
# this parameter would naively assume that the parent
# lexical scope of the current muh_method() method on
# the call stack is named "Inner". Instead, the parent
# lexical scope of that method on the call stack is
# named "muh_package" -- the first lexical scope
# enclosing that method that exists on the call stack.
# Ergo, the non-existent "Outer", "Middle", and "Inner"
# lexical scopes must *ALL* be silently ignored here.
func_scope_names_ignore=(
0 if cls_stack is None else len(cls_stack)),
#FIXME: Consider dynamically calculating exactly how
#many additional @beartype-specific frames are ignorable
#on the first call to this function, caching that
#number, and then reusing that cached number on all
#subsequent calls to this function. The current approach
#employed below of naively hard-coding a number of
#frames to ignore was incredibly fragile and had to be
#effectively disabled, which hampers runtime efficiency.
# Ignore additional frames on the call stack embodying:
# * The current call to this function.
#
# Note that, for safety, we currently avoid ignoring
# additional frames that we could technically ignore.
# These include:
# * The call to the parent
# beartype._check.checkcall.BeartypeCall.reinit()
# method.
# * The call to the parent @beartype.beartype()
# decorator.
#
# Why? Because the @beartype codebase has been
# sufficiently refactored so as to render any such
# attempts non-trivial, fragile, and frankly dangerous.
func_stack_frames_ignore=1,
exception_cls=BeartypePep563Exception,
)
# If the decorated callable is a method transitively defined
# by a root decorated class, add a pair of new local
# attributes exposing both:
#
# * The unqualified basename of the root decorated class.
# Why? Because this class may be recursively referenced in
# postponed type hints and *MUST* thus be exposed to *ALL*
# postponed type hints. However, this class is currently
# being decorated and thus has yet to be defined in
# either:
# * If this class is module-scoped, the global attribute
# dictionary of that module and thus the "func_globals"
# dictionary.
# * If this class is closure-scoped, the local attribute
# dictionary of that closure and thus the "func_locals"
# dictionary.
# * The unqualified basename of the current decorated class.
# Why? For similar reasons. Since the current decorated
# class may be lexically nested in the root decorated
# class, the current decorated class is *NOT* already
# accessible as either a global or local; the current
# decorated class is *NOT* already exposed by either the
# "func_globals" or "func_locals" dictionary. Exposing the
# current decorated class to postponed type hints
# referencing that class thus requires adding a local
# attribute exposing that class.
#
# Note that:
# * *ALL* intermediary classes (i.e., excluding the root
# decorated class) lexically nesting the current decorated
# class are irrelevant. Intermediary classes are neither
# module-scoped nor closure-scoped and thus *NOT*
# accessible as either globals or locals to the nested
# lexical scope of the current decorated class: e.g.,
# # This raises a parser error and is thus *NOT* fine:
# # NameError: name 'muh_type' is not defined
# class Outer(object):
# class Middle(object):
# muh_type = str
#
# class Inner(object):
# def muh_method(self) -> muh_type:
# return 'Dumpster fires are all I see.'
# * This implicitly overrides any previously declared locals
# of the same name. Although non-ideal, this constitutes
# syntactically valid Python and is thus *NOT* worth
# emitting even a non-fatal warning over: e.g.,
# # This is fine... technically.
# from beartype import beartype
# def muh_closure() -> None:
# MuhClass = 'This is horrible, yet fine.'
#
# @beartype
# class MuhClass(object):
# def muh_method(self) -> str:
# return 'Look away and cringe, everyone!'
if cls_stack:
# Root and current decorated classes.
cls_root = cls_stack[0]
cls_curr = cls_stack[-1]
# Unqualified basenames of the root and current
# decorated classes.
cls_root_basename = cls_root.__name__
cls_curr_basename = cls_curr.__name__
# Add new locals exposing these classes to type hints,
# implicitly overwriting any locals of the same name in
# the higher-level local scope for any closure declaring
# this class if any. These classes are currently being
# decorated and thus guaranteed to be the most recent
# declarations of local variables by these names.
#
# Note that the current class assumes lexical precedence
# over the root class and is thus intentionally added
# *AFTER* the latter.
func_locals[cls_root_basename] = cls_root
func_locals[cls_curr_basename] = cls_curr
# Local scope for the class directly defining the
# decorated callable.
#
# Note that callables *ONLY* have direct access to
# attributes declared by the classes directly defining
# those callables. Ergo, the local scopes for parent
# classes of this class (including the root decorated
# class) are irrelevant.
cls_curr_locals = get_type_locals(
cls=cls_curr,
exception_cls=BeartypePep563Exception,
)
# Forcefully merge this local scope into the current
# local scope, implicitly overwriting any locals of the
# same name. Class locals necessarily assume lexical
# precedence over:
# * These classes themselves.
# * Locals defined by higher-level parent classes.
# * Locals defined by closures defining these classes.
func_locals.update(cls_curr_locals)
# Else, the decorated callable is *NOT* a method
# transitively declared by a root decorated class.
# In either case, the local scope of the decorated callable has now
# been decided. (Validate this to be the case.)
assert func_locals is not None, (
f'{func.__qualname__}() local scope undecided.')
# Attempt to resolve this hint against both the global and local
# scopes for the decorated callable.
try:
func_hints_resolved[pith_name] = eval(
pith_hint, func_globals, func_locals)
# If that resolution also fails...
except Exception as exception:
# If...
if (
# That resolution fails with a "NameError" *AND*...
isinstance(exception, NameError) and
# This hint is a valid Python identifier...
is_identifier(pith_hint)
):
# This hint is *PROBABLY* a forward reference hinted as a
# string. In this case, defer validation of this string as a
# valid forward reference to a class (which presumably has
# yet to be declared) until call time of the decorated
# callable by preserving this string as is.
#
# PEP 563 prevents runtime type checkers from distinguishing
# between forward references hinted as strings and
# non-forward references postponed under PEP
# 563 as strings. Ideally, PEP 563 would postpone the former
# as machine-readable string representations (e.g.,
# converting "muh.class.name" to "'muh.class.name'"). It
# doesn't. Instead, it simply preserves forward references
# hinted as strings! Who approved this appalling
# abomination that breaks CPython itself?
# print(f'Deferring postponed forward reference hint {repr(pith_hint)}...')
continue
# Else, this hint is *PROBABLY NOT* a forward reference hinted
# as a string.
# Human-readable label describing this pith.
exception_prefix = prefix_callable_decorated_pith(
func=func, pith_name=pith_name)
# Wrap this low-level non-human-readable exception with a
# high-level human-readable beartype-specific exception.
raise BeartypePep563Exception(
f'{exception_prefix}PEP 563-postponed type hint '
f'{repr(pith_hint)} syntactically invalid '
f'(i.e., "{str(exception)}") under:\n'
f'~~~~[ GLOBAL SCOPE ]~~~~\n{repr(func_globals)}\n'
f'~~~~[ LOCAL SCOPE ]~~~~\n{repr(func_locals)}'
) from exception
# Else, this hint is *NOT* a PEP 563-formatted postponed string. Since
# PEP 563 is active for this callable, this implies this hint *MUST*
# have been previously postponed but has since been replaced in-place
# with its referent -- typically due to this callable being decorated
# by @beartype and one or more other hint-based decorators.
#
# In this case, silently preserve this hint as is. Since PEP 563
# provides no means of distinguishing expected from unexpected
# evaluation of postponed hint, either emitting a non-fatal
# warning *OR* raising a fatal exception here would be overly violent.
# Instead, we conservatively assume this hint was previously
# postponed but has already been properly resolved to its referent by
# external logic elsewhere (e.g., yet another runtime type checker).
#
# Did we mention that PEP 563 is a shambolic cesspit of inelegant
# language design and thus an indictment of Guido himself, who approved
# this festering mess that:
#
# * Critically breaks backward compatibility throughout the
# well-established Python 3 ecosystem.
# * Unhelpfully provides no general-purpose API for either:
# * Detecting postponed hints on arbitrary objects.
# * Resolving those hints.
# * Dramatically reduces the efficiency of hint-based decorators
# for no particularly good reason.
# * Non-orthogonally prohibits hints from accessing local state.
#
# Because we should probably mention those complaints here.
# else:
#FIXME: See above.
# If the machine-readable representation of this annotation (which
# internally encapsulates the same structural metadata as the
# PEP 563-formatted postponed string representation of this
# annotation) internally exceeds the child limit as tested above,
# again raise an exception.
#
# Note that obtaining the machine-readable representation of this
# annotation incurs a minor performance penalty. However, since
# effectively *ALL* annotations will be PEP 563-formatted postponed
# strings once the next major Python version officially instates
# PEP 563 as a mandatory backward compatibility-breaking change,
# this penalty will effectively cease to existence for the
# overwhelming majority of real-world annotations. *shrug*
#_die_if_hint_repr_exceeds_child_limit(
# hint_repr=repr(pith_hint),
# pith_label=pith_label)
# Assert the above resolution resolved the expected number of type hints.
assert len(func_hints_resolved) == len(func_hints_postponed), (
f'{func.__qualname__}() PEP 563-postponed type hint resolution mismatch: '
f'{len(func_hints_resolved)} resolved hints != '
f'{len(func_hints_postponed)} postponed hints.')
# Atomically (i.e., all-at-once) replace this callable's postponed
# annotations with these resolved annotations for safety and efficiency.
#
# While the @beartype decorator goes to great lengths to preserve the
# originating "__annotations__" dictionary as is, PEP 563 is sufficiently
# expensive, non-trivial, and general-purpose to implement that generally
# resolving postponed annotations for downstream third-party callers is
# justified. Everyone benefits from replacing useless postponed annotations
# with useful real annotations; so, we do so.
# print(
# f'{func.__name__}() PEP 563-postponed annotations resolved:'
# f'\n\t------[ POSTPONED ]------\n\t{func_hints_postponed}'
# f'\n\t------[ RESOLVED ]------\n\t{func_hints_resolved}'
# )
func.__annotations__ = func_hints_resolved
# ....................{ PRIVATE ~ constants }....................
_FUTURE_ANNOTATIONS = __future__.annotations
'''
:attr:`__future__.annotations` object, globalized as a private constant of this
submodule to negligibly optimize repeated lookups of this object.
'''
# ....................{ PRIVATE ~ resolvers }....................
#FIXME: We currently no longer require this. See above for further commentary.
# from beartype.roar import BeartypeDecorHintPepException
# from beartype._util.cache.pool.utilcachepoollistfixed import FIXED_LIST_SIZE_MEDIUM
#
# def _die_if_hint_repr_exceeds_child_limit(
# hint_repr: str, pith_label: str) -> None:
# '''
# Raise an exception if the passed machine-readable representation of an
# arbitrary annotation internally exceeds the **child limit** (i.e., maximum
# number of nested child type hints listed as subscripted arguments of
# PEP-compliant type hints) permitted by the :func:`beartype.beartype`
# decorator.
#
# The :mod:`beartype` decorator internally traverses over these nested child
# types of the parent PEP-compliant type hint produced by evaluating this
# string representation to its referent with a breadth-first search (BFS).
# For efficiency, this search is iteratively implemented with a cached
# **fixed list** (i.e.,
# :class:`beartype._util.cache.pool.utilcachepoollistfixed.FixedList`
# instance) rather than recursively implemented with traditional recursion.
# Since the size of this list is sufficiently large to handle all uncommon
# *and* uncommon edge cases, this list suffices for *all* PEP-compliant type
# hints of real-world interest.
#
# Nonetheless, safety demands that we guarantee this by explicitly raising an
# exception when the internal structure of this string suggests that the
# resulting PEP-compliant type hint will subsequently violate this limit.
# This has the convenient side effect of optimizing that BFS, which may now
# unconditionally insert child hints into arbitrary indices of that cached
# fixed list without having to explicitly test whether each index exceeds the
# fixed length of that list.
#
# Caveats
# ----------
# **This function is currently irrelevant.** Why? Because all existing
# implementations of the :mod:`typing` module are sufficiently
# space-consumptive that they already implicitly prohibit deep nesting of
# PEP-compliant type hints. See commentary in the
# :mod:`beartype_test.a00_unit.data.pep.pep563.data_pep563_poem` submodule for appalling details.
# Ergo, this validator could technically be disabled. Indeed, if this
# validator actually incurred any measurable costs, it *would* be disabled.
# Since it doesn't, this validator has preserved purely for forward
# compatibility with some future revision of the :mod:`typing` module that
# hopefully improves that module's horrid space consumption.
#
# Parameters
# ----------
# hint_repr : str
# Machine-readable representation of this annotation, typically but *not*
# necessarily as a :pep:`563`-formatted postponed string.
# pith_label : str
# Human-readable label describing the callable parameter or return value
# annotated by this string.
#
# Raises
# ----------
# BeartypeDecorHintPepException
# If this representation internally exceeds this limit.
# '''
# assert isinstance(hint_repr, str), f'{repr(hint_repr)} not string.'
#
# # Total number of hints transitively encapsulated in this hint (i.e., the
# # total number of all child hints of this hint as well as this hint
# # itself), defined as the summation of...
# hints_num = (
# # Number of parent PEP-compliant type hints nested in this hint,
# # including this hint itself *AND*...
# hint_repr.count('[') +
# # Number of child type hints (both PEP-compliant type hints and
# # non-"typing" types) nested in this hint, excluding the last child
# # hint subscripting each parent PEP-compliant type hint *AND*...
# hint_repr.count(',') +
# # Number of last child hints subscripting all parent PEP-compliant type
# # hints.
# hint_repr.count(']')
# )
#
# # If this number exceeds the fixed length of the cached fixed list with
# # which the @beartype decorator traverses this hint, raise an exception.
# if hints_num >= FIXED_LIST_SIZE_MEDIUM:
# raise BeartypeDecorHintPepException(
# f'{pith_label} hint representation "{hint_repr}" '
# f'contains {hints_num} subscripted arguments '
# f'exceeding maximum limit {FIXED_LIST_SIZE_MEDIUM-1}.'
# )
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype slow cave** (i.e., private subset of the public :mod:`beartype.cave`
subpackage profiled to *not* be efficiently importable at :mod:`beartype`
startup and thus *not* safely importable throughout the internal
:mod:`beartype` codebase).
This submodule currently imports from expensive third-party packages on
importation (e.g., :mod:`numpy`) despite :mod:`beartype` itself *never*
requiring those imports. Until resolved, that subpackage is considered tainted.
'''
# ....................{ TODO }....................
#FIXME: Excise this submodule away, please. This submodule was a horrendous idea
#and has plagued the entire "beartype.cave" subpackage with unnecessary slowdown
#at import time. It's simply time for this to go, please.
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# should be locally imported at module scope *ONLY* under alternate private
# names (e.g., "from argparse import ArgumentParser as _ArgumentParser" rather
# than merely "from argparse import ArgumentParser").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from argparse import (
ArgumentParser,
_SubParsersAction,
)
from weakref import (
ProxyTypes,
ref,
)
# ....................{ TYPES ~ lib }....................
# Types conditionally dependent upon the importability of third-party
# dependencies. These types are subsequently redefined by try-except blocks
# below and initially default to "UnavailableType" for simple types.
# ....................{ TYPES ~ stdlib : argparse }....................
ArgParserType = ArgumentParser
'''
Type of argument parsers parsing all command-line arguments for either
top-level commands *or* subcommands of those commands.
'''
ArgSubparsersType = _SubParsersAction
'''
Type of argument subparser containers parsing subcommands for parent argument
parsers parsing either top-level commands *or* subcommands of those commands.
'''
# ....................{ TYPES ~ stdlib : weakref }....................
WeakRefCType = ref
'''
Type of all **unproxied weak references** (i.e., callable objects yielding
strong references to their referred objects when called).
This type matches both the C-based :class:`weakref.ref` class *and* the
pure-Python :class:`weakref.WeakMethod` class, which subclasses the former.
'''
# ....................{ TUPLES ~ stdlib : weakref }....................
WeakRefProxyCTypes = ProxyTypes
'''
Tuple of all **C-based weak reference proxy classes** (i.e., classes
implemented in low-level C whose instances are weak references to other
instances masquerading as those instances).
This tuple contains classes matching both callable and uncallable weak
reference proxies.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype cave.**
This submodule collects common types (e.g., :class:`NoneType`, the type of the
``None`` singleton) and tuples of common types (e.g., :data:`CallableTypes`, a
tuple of the types of all callable objects).
PEP 484
----------
This module is intentionally *not* compliant with the :pep:`484` standard
implemented by the stdlib :mod:`typing` module, which formalizes type hinting
annotations with a catalogue of generic classes and metaclasses applicable to
common use cases. :mod:`typing` enables end users to enforce contractual
guarantees over the contents of arbitrarily complex data structures with the
assistance of third-party static type checkers (e.g., :mod:`mypy`,
:mod:`pyre`), runtime type checkers (e.g., :mod:`beartype`, :mod:`typeguard`),
and integrated development environments (e.g., PyCharm).
Genericity comes at a cost, though. Deeply type checking a container containing
``n`` items, for example, requires type checking both that container itself
non-recursively *and* each item in that container recursively. Doing so has
time complexity ``O(N)`` for ``N >= n`` the total number of items transitively
contained in this container (i.e., items directly contained in this container
*and* items directly contained in containers contained in this container).
While the cost of this operation can be paid either statically *or* amortized
at runtime over all calls to annotated callables accepting that container, the
underlying cost itself remains the same.
By compare, this module only contains standard Python classes and tuples of
such classes intended to be passed as is to the C-based :func:`isinstance`
builtin and APIs expressed in terms of that builtin (e.g., :mod:`beartype`).
This module only enables end users to enforce contractual guarantees over the
types but *not* contents of arbitrarily complex data structures. This
intentional tradeoff maximizes runtime performance at a cost of ignoring the
types of items contained in containers.
In summary:
===================== ==================== ====================================
feature set :mod:`beartype.cave` :mod:`typing`
===================== ==================== ====================================
type checking **shallow** **deep**
type check items? **no** **yes**
:pep:`484`-compliant? **no** **yes**
time complexity ``O(1)`` ``O(N)``
performance stupid fast *much* less stupid fast
implementation C-based builtin call pure-Python (meta)class method calls
low-level primitive :func:`isinstance` :mod:`typing.TypingMeta`
===================== ==================== ====================================
'''
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: *NEVER IMPORT FROM THIS SUBPACKAGE FROM WITHIN BEARTYPE ITSELF.*
# This subpackage currently imports from expensive third-party packages on
# importation (e.g., NumPy) despite beartype itself *NEVER* requiring those
# imports. Until resolved, this subpackage is considered tainted.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To prevent "mypy --no-implicit-reexport" from raising literally
# hundreds of errors at static analysis time, *ALL* public attributes *MUST* be
# explicitly reimported under the same names with "{exception_name} as
# {exception_name}" syntax rather than merely "{exception_name}". Yes, this is
# ludicrous. Yes, this is mypy. For posterity, these failures resemble:
# beartype/_cave/_cavefast.py:47: error: Module "beartype.roar" does not
# explicitly export attribute "BeartypeCallUnavailableTypeException";
# implicit reexport disabled [attr-defined]
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# should be locally imported at module scope *ONLY* under alternate private
# names (e.g., "from argparse import ArgumentParser as _ArgumentParser" rather
# than merely "from argparse import ArgumentParser").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from beartype.cave._cavelib import (
# Types.
ArgParserType as ArgParserType,
ArgSubparsersType as ArgSubparsersType,
WeakRefCType as WeakRefCType,
# Type tuples.
WeakRefProxyCTypes as WeakRefProxyCTypes,
)
from beartype._cave._caveabc import (
BoolType as BoolType,
)
from beartype._cave._cavefast import (
# Types.
AnyType as AnyType,
AsyncCoroutineCType as AsyncCoroutineCType,
AsyncGeneratorCType as AsyncGeneratorCType,
CallableCodeObjectType as CallableCodeObjectType,
ClassDictType as ClassDictType,
CallableFrameType as CallableFrameType,
CallablePartialType as CallablePartialType,
ClassType as ClassType,
ClosureVarCellType as ClosureVarCellType,
CollectionType as CollectionType,
ContainerType as ContainerType,
EllipsisType as EllipsisType,
EnumType as EnumType,
EnumMemberType as EnumMemberType,
ExceptionTracebackType as ExceptionTracebackType,
FileType as FileType,
FunctionType as FunctionType,
FunctionOrMethodCType as FunctionOrMethodCType,
GeneratorCType as GeneratorCType,
GeneratorType as GeneratorType,
HashableType as HashableType,
HintGenericSubscriptedType as HintGenericSubscriptedType,
IntOrFloatType as IntOrFloatType,
IntType as IntType,
IterableType as IterableType,
IteratorType as IteratorType,
MappingMutableType as MappingMutableType,
MappingType as MappingType,
MethodBoundInstanceDunderCType as MethodBoundInstanceDunderCType,
MethodBoundInstanceOrClassType as MethodBoundInstanceOrClassType,
MethodDecoratorClassType as MethodDecoratorClassType,
MethodDecoratorPropertyType as MethodDecoratorPropertyType,
MethodDecoratorStaticType as MethodDecoratorStaticType,
MethodUnboundClassCType as MethodUnboundClassCType,
MethodUnboundInstanceDunderCType as MethodUnboundInstanceDunderCType,
MethodUnboundInstanceNondunderCType as MethodUnboundInstanceNondunderCType,
MethodUnboundPropertyNontrivialCExtensionType as
MethodUnboundPropertyNontrivialCExtensionType,
MethodUnboundPropertyTrivialCExtensionType as
MethodUnboundPropertyTrivialCExtensionType,
ModuleType as ModuleType,
NoneType as NoneType,
NotImplementedType as NotImplementedType,
NumberRealType as NumberRealType,
NumberType as NumberType,
SizedType as SizedType,
QueueType as QueueType,
RegexCompiledType as RegexCompiledType,
RegexMatchType as RegexMatchType,
SetType as SetType,
SequenceMutableType as SequenceMutableType,
SequenceType as SequenceType,
StrType as StrType,
UnavailableType as UnavailableType,
# Type tuples.
AsyncCTypes as AsyncCTypes,
BoolOrNumberTypes as BoolOrNumberTypes,
CallableCTypes as CallableCTypes,
CallableOrClassTypes as CallableOrClassTypes,
CallableOrStrTypes as CallableOrStrTypes,
CallableTypes as CallableTypes,
DecoratorTypes as DecoratorTypes,
FunctionTypes as FunctionTypes,
ModuleOrStrTypes as ModuleOrStrTypes,
MethodBoundTypes as MethodBoundTypes,
MethodDecoratorBuiltinTypes as MethodDecoratorBuiltinTypes,
MethodUnboundTypes as MethodUnboundTypes,
MethodTypes as MethodTypes,
MappingOrSequenceTypes as MappingOrSequenceTypes,
ModuleOrSequenceTypes as ModuleOrSequenceTypes,
NumberOrIterableTypes as NumberOrIterableTypes,
NumberOrSequenceTypes as NumberOrSequenceTypes,
RegexTypes as RegexTypes,
ScalarTypes as ScalarTypes,
TestableTypes as TestableTypes,
UnavailableTypes as UnavailableTypes,
)
from beartype._cave._cavemap import (
NoneTypeOr as NoneTypeOr,
)
# ....................{ DEPRECATIONS }....................
def __getattr__(attr_deprecated_name: str) -> object:
'''
Dynamically retrieve a deprecated attribute with the passed unqualified
name from this submodule and emit a non-fatal deprecation warning on each
such retrieval if this submodule defines this attribute *or* raise an
exception otherwise.
The Python interpreter implicitly calls this :pep:`562`-compliant module
dunder function under Python >= 3.7 *after* failing to directly retrieve an
explicit attribute with this name from this submodule. Since this dunder
function is only called in the event of an error, neither space nor time
efficiency are a concern here.
Parameters
----------
attr_deprecated_name : str
Unqualified name of the deprecated attribute to be retrieved.
Returns
----------
object
Value of this deprecated attribute.
Warns
----------
:class:`DeprecationWarning`
If this attribute is deprecated.
Raises
----------
:exc:`AttributeError`
If this attribute is unrecognized and thus erroneous.
'''
# Isolate imports to avoid polluting the module namespace.
from beartype._util.mod.utilmoddeprecate import deprecate_module_attr
# Return the value of this deprecated attribute and emit a warning.
return deprecate_module_attr(
attr_deprecated_name=attr_deprecated_name,
attr_deprecated_name_to_nondeprecated_name={
'HintPep585Type': 'HintGenericSubscriptedType',
},
attr_nondeprecated_name_to_value=globals(),
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype fast cave** (i.e., private subset of the public :mod:`beartype.cave`
subpackage profiled to be efficiently importable at :mod:`beartype` startup and
thus safely importable throughout the internal :mod:`beartype` codebase).
The public :mod:`beartype.cave` subpackage has been profiled to *not* be
efficiently importable at :mod:`beartype` startup and thus *not* safely
importable throughout the internal :mod:`beartype` codebase. Why? Because
:mod:`beartype.cave` currently imports from expensive third-party packages on
importation (e.g., :mod:`numpy`) despite :mod:`beartype` itself *never*
requiring those imports. Until resolved, that subpackage is considered tainted.
'''
# ....................{ TODO }....................
#FIXME: Add types for all remaining useful "collections.abc" interfaces,
#including:
#* "Reversible".
#* "AsyncIterable".
#* "AsyncIterator".
#* "AsyncGenerator".
#
#There certainly exist other "collections.abc" interfaces as well, but it's
#unclear whether they have any practical real-world utility during type
#checking. These include:
#* "ByteString". (wut)
#* Dictionary-specific views (e.g., "MappingView", "ItemsView").
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# should be locally imported at module scope *ONLY* under alternate private
# names (e.g., "from argparse import ArgumentParser as _ArgumentParser" rather
# than merely "from argparse import ArgumentParser").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
import functools as _functools
import numbers as _numbers
import re as _re
from beartype.roar import BeartypeCallUnavailableTypeException
from beartype._cave._caveabc import BoolType
from beartype._util.py.utilpyversion import (
IS_PYTHON_AT_LEAST_3_9,
IS_PYTHON_AT_LEAST_3_8,
)
from collections import deque as _deque
from collections.abc import (
Collection as _Collection,
Container as _Container,
Generator as _Generator,
Hashable as _Hashable,
Iterable as _Iterable,
Iterator as _Iterator,
Mapping as _Mapping,
MutableMapping as _MutableMapping,
Sequence as _Sequence,
MutableSequence as _MutableSequence,
Set as _Set,
Sized as _Sized,
)
from enum import (
Enum as _Enum,
EnumMeta as _EnumMeta,
)
from io import IOBase as _IOBase
from typing import Any
# Note that:
#
# * "BuiltinMethodType" is intentionally *NOT* imported, as that type is
# exactly synonymous with "BuiltinFunctionType", implying C-based methods are
# indistinguishable from C-based functions. To prevent C-based functions from
# being misidentified as C-based methods, all C-based functions and methods
# are ambiguously identified as C-based callables.
# * "LambdaType" is intentionally *NOT* imported, as that type is exactly
# synonymous with "FunctionType", implying lambdas are indistinguishable from
# pure-Python functions. To prevent pure-Python functions from being
# misidentified as lambdas, all lambdas are currently misidentified as
# pure-Python functions.
#
# These are the lesser of multiple evils.
from types import (
AsyncGeneratorType as _AsyncGeneratorType,
BuiltinFunctionType as _BuiltinFunctionType,
CoroutineType as _CoroutineType,
FrameType as _FrameType,
FunctionType as _FunctionType,
GeneratorType as _GeneratorType,
GetSetDescriptorType as _GetSetDescriptorType,
MemberDescriptorType as _MemberDescriptorType,
MethodType as _MethodType,
ModuleType as _ModuleType,
TracebackType as _TracebackType,
)
# ....................{ IMPORTS ~ conditional }....................
#FIXME: Preserve for when we inevitably require similar logic in the future.
# # Attempt to import types unavailable under Python 3.5, all of which should
# # be passed through the intermediary _get_type_or_unavailable() helper
# # function first before being assigned to module globals below. The
# # docstrings for such globals should contain a sentence resembling:
# # **This type is unavailable under Python 3.5,** where it defaults to
# # :class:`UnavailableType` for safety.
# try:
# _Collection = type(list[str])
# # If this is Python 3.5, define placeholder globals of the same name.
# except ImportError:
# _Collection = None
# ....................{ CLASSES }....................
class UnavailableType(object):
'''
**Unavailable type** (i.e., type *not* available under the active Python
interpreter, typically due to insufficient Python version or non-installed
third-party dependencies).
'''
def __instancecheck__(self, obj) -> None:
raise BeartypeCallUnavailableTypeException(
f'{self} not passable as the second parameter to isinstance().')
def __subclasscheck__(self, cls) -> None:
raise BeartypeCallUnavailableTypeException(
f'{self} not passable as the second parameter to issubclass().')
# This is private, as it's unclear whether anyone requires access to this yet.
class _UnavailableTypesTuple(tuple):
'''
Type of any **tuple of unavailable types** (i.e., types *not* available
under the active Python interpreter, typically due to insufficient Python
version or non-installed third-party dependencies).
'''
pass
# ....................{ TYPES ~ core }....................
AnyType = object
'''
Type of all objects regardless of type.
'''
ClassType = type
'''
Type of all types.
'''
FileType = _IOBase
'''
Abstract base class of all **file-like objects** (i.e., objects implementing
the standard ``read()``, ``write()``, and ``close()`` methods).
'''
ModuleType = _ModuleType
'''
Type of all **C- and Python-based modules** (i.e., importable files implemented
either as C extensions or in pure Python).
'''
# ....................{ TYPES ~ core : singleton }....................
EllipsisType: type = type(Ellipsis)
'''
Type of the ``Ellipsis`` singleton.
'''
NoneType: type = type(None)
'''
Type of the ``None`` singleton.
Curiously, although the type of the ``None`` object is a class object whose
``__name__`` attribute is ``NoneType``, there exists no globally accessible
class by that name. To circumvents this obvious oversight, this global globally
exposes this class.
This class is principally useful for annotating both:
* Callable parameters accepting ``None`` as a valid value.
* Callables returning ``None`` as a valid value.
Note that, for obscure and uninteresting reasons, the standard :mod:`types`
module defined the same type with the same name under Python 2.x but *not* 3.x.
Depressingly, this type must now be manually redefined everywhere.
'''
NotImplementedType: type = type(NotImplemented) # type: ignore[misc]
'''
Type of the ``NotImplemented`` singleton.
'''
# ....................{ TYPES ~ call }....................
CallablePartialType = _functools.partial
'''
Type of all **pure-Python partial callables** (i.e., callables dynamically
wrapped by the function-like :class:`functools.partial` class, implemented in
pure Python).
Caveats
----------
This type does *not* distinguish between whether the original callable wrapped
by :class:`functools.partial` is C-based or pure Python -- only that some
callable of indeterminate origin is in fact wrapped.
'''
CallableCodeObjectType: Any = type((lambda: None).__code__)
'''
Type of all **code objects** (i.e., C-based objects underlying all pure-Python
callables to which those callables are compiled for efficiency).
'''
# If the active Python interpreter targets Python >= 3.8...
if IS_PYTHON_AT_LEAST_3_8:
# Defer version-specific imports.
from types import CellType as _CellType # type: ignore[attr-defined]
# Alias this type to this standard type.
#
# Note that this is explicitly required for "nuitka" support, which supports
# this standard type but *NOT* the non-standard approach used to deduce this
# type under Python 3.7 leveraged below.
ClosureVarCellType = _CellType
'''
Type of all **pure-Python closure cell variables.**
'''
# Else, the active Python interpreter only targets Python 3.7. In this case...
else:
def _closure_varcell_factory():
'''
Arbitrary function returning a closure-specific cell variable exposed by
an arbitrary closure isolated to this function.
'''
# Arbitrary outer local variable.
cell_variable = 42
def closure():
'''
Arbitrary closure exposing a closure-specific cell variable.
'''
nonlocal cell_variable
return closure.__closure__[0] # pyright: ignore[reportOptionalSubscript]
# Although Python >= 3.7 now exposes an explicit closure cell variable type
# via the standard "types.CellType" object, this is of no benefit to older
# versions of Python. Ergo, the type of an arbitrary method wrapper
# guaranteed to *ALWAYS* exist is obtained instead.
ClosureVarCellType = type(_closure_varcell_factory()) # type: ignore[misc]
'''
Type of all **pure-Python closure cell variables.**
'''
# Delete this factory function for safety.
del _closure_varcell_factory
# ....................{ TYPES ~ call : exception }....................
ExceptionTracebackType = _TracebackType
'''
Type of all **traceback objects** (i.e., C-based objects comprising the full
stack traces associated with raised exceptions).
'''
CallableFrameType = _FrameType
'''
Type of all **call stack frame objects** (i.e., C-based objects
encapsulating each call to each callable on the current call stack).
'''
# ....................{ TYPES ~ call : function }....................
FunctionType = _FunctionType
'''
Type of all **pure-Python functions** (i.e., functions implemented in Python
*not* associated with an owning class or instance of a class).
Caveats
----------
**This type ambiguously matches many callables not commonly associated with
standard functions,** including:
* **Lambda functions.** Of course, distinguishing between conventional named
functions and unnamed lambda functions would usually be seen as overly
specific. So, this ambiguity is *not* necessarily a bad thing.
* **Unbound instance methods** (i.e., instance methods accessed on their
declaring classes rather than bound instances).
* **Static methods** (i.e., methods decorated with the builtin
:func:`staticmethod` decorator, regardless of those methods are accessed on
their declaring classes or associated instances).
**This type matches no callables whatsoever under some non-CPython
interpreters,** including:
* PyPy, which unconditionally compiles *all* pure-Python functions into C-based
functions. Ergo, under PyPy, *all* functions are guaranteed to be of the type
:class:`FunctionOrMethodCType` regardless of whether those functions were
initially defined in Python or C.
See Also
----------
:class:`MethodBoundInstanceOrClassType`
Type of all pure-Python bound instance and class methods.
'''
FunctionOrMethodCType = _BuiltinFunctionType
'''
Type of all **C-based callables** (i.e., functions and methods implemented with
low-level C rather than high-level Python, typically either in third-party C
extensions, official stdlib C extensions, or the active Python interpreter
itself).
'''
# ....................{ TYPES ~ call : method : bound }....................
MethodBoundInstanceOrClassType = _MethodType
'''
Type of all **pure-Python bound instance and class methods** (i.e., methods
implemented in pure Python, bound to either instances of classes or classes and
implicitly passed those instances or classes as their first parameters).
Caveats
----------
There exists *no* corresponding :class:`MethodUnboundInstanceType` type, as
unbound pure-Python instance methods are ambiguously implemented as functions
of type :class:`FunctionType` indistinguishable from conventional functions.
Indeed, `official documentation <PyInstanceMethod_Type documentation_>`__ for
the ``PyInstanceMethod_Type`` C type explicitly admits that:
This instance of PyTypeObject represents the Python instance method type.
It is not exposed to Python programs.
.. _PyInstanceMethod_Type documentation:
https://docs.python.org/3/c-api/method.html#c.PyInstanceMethod_Type
'''
# Although Python >= 3.7 now exposes an explicit method wrapper type via the
# standard "types.MethodWrapperType" object, this is of no benefit to older
# versions of Python. Ergo, the type of an arbitrary method wrapper guaranteed
# to *ALWAYS* exist is obtained instead.
MethodBoundInstanceDunderCType: Any = type(''.__add__)
'''
Type of all **C-based bound method wrappers** (i.e., callable objects
implemented in low-level C, associated with special methods of builtin types
when accessed as instance rather than class attributes).
See Also
----------
:class:`MethodUnboundInstanceDunderCType`
Type of all C-based unbound dunder method wrapper descriptors.
'''
# ....................{ TYPES ~ call : method : unbound }....................
# Although Python >= 3.7 now exposes an explicit method wrapper type via the
# standard "types.ClassMethodDescriptorType" object, this is of no benefit to
# older versions of Python. Ergo, the type of an arbitrary method descriptor
# guaranteed to *ALWAYS* exist is obtained instead.
MethodUnboundClassCType: Any = type(dict.__dict__['fromkeys'])
'''
Type of all **C-based unbound class method descriptors** (i.e., callable objects
implemented in low-level C, associated with class methods of builtin types when
accessed with the low-level :attr:`object.__dict__` dictionary rather than as
class or instance attributes).
Despite being unbound, class method descriptors remain callable (e.g., by
explicitly passing the intended ``cls`` objects as their first parameters).
'''
# Although Python >= 3.7 now exposes an explicit method wrapper type via the
# standard "types.WrapperDescriptorType" object, this is of no benefit to older
# versions of Python. Ergo, the type of an arbitrary method descriptor
# guaranteed to *ALWAYS* exist is obtained instead.
MethodUnboundInstanceDunderCType: Any = type(str.__add__)
'''
Type of all **C-based unbound dunder method wrapper descriptors** (i.e.,
callable objects implemented in low-level C, associated with dunder methods of
builtin types when accessed as class rather than instance attributes).
Despite being unbound, method descriptor wrappers remain callable (e.g., by
explicitly passing the intended ``self`` objects as their first parameters).
See Also
----------
:class:`MethodBoundInstanceDunderCType`
Type of all C-based unbound dunder method wrappers.
:class:`MethodUnboundInstanceNondunderCType`
Type of all C-based unbound non-dunder method descriptors.
'''
# Although Python >= 3.7 now exposes an explicit method wrapper type via the
# standard "types.MethodDescriptorType" object, this is of no benefit to older
# versions of Python. Ergo, the type of an arbitrary method descriptor
# guaranteed to *ALWAYS* exist is obtained instead.
MethodUnboundInstanceNondunderCType: Any = type(str.upper)
'''
Type of all **C-based unbound non-dunder method descriptors** (i.e., callable
objects implemented in low-level C, associated with non-dunder methods of
builtin types when accessed as class rather than instance attributes).
Despite being unbound, method descriptors remain callable (e.g., by explicitly
passing the intended ``self`` objects as their first parameters).
See Also
----------
:class:`MethodUnboundInstanceDunderCType`
Type of all C-based unbound dunder method wrapper descriptors.
'''
MethodUnboundPropertyNontrivialCExtensionType = _GetSetDescriptorType
'''
Type of all **C extension-specific unbound non-trivial property method
descriptors** (i.e., uncallable objects implemented in low-level C extensions,
associated with **non-trivial property methods** (i.e., wrapping underlying
attributes that are *not* trivially convertible to C types) of C extensions when
accessed with the low-level :attr:`object.__dict__` dictionary rather than as
class or instance attributes).
'''
MethodUnboundPropertyTrivialCExtensionType = _MemberDescriptorType
'''
Type of all **C extension-specific unbound trivial property method descriptors**
(i.e., uncallable objects implemented in low-level C extensions, associated with
**trivial property methods** (i.e., wrapping underlying attributes that are
trivially convertible to C types) of C extensions when accessed with the
low-level :attr:`object.__dict__` dictionary rather than as class or instance
attributes).
'''
# ....................{ TYPES ~ call : method : decorator }....................
MethodDecoratorClassType = classmethod
'''
Type of all **C-based unbound class method descriptors** (i.e., non-callable
instances of the builtin :class:`classmethod` decorator class implemented in
low-level C, associated with class methods implemented in pure Python, and
accessed with the low-level :attr:`object.__dict__` dictionary rather than as
class or instance attributes).
Caveats
----------
Class method objects are *only* directly accessible via the low-level
:attr:`object.__dict__` dictionary. When accessed as class or instance
attributes, class methods reduce to instances of the standard
:class:`MethodBoundInstanceOrClassType` type.
Class method objects are *not* callable, as their implementations fail to
define the ``__call__`` dunder method.
'''
MethodDecoratorPropertyType = property
'''
Type of all **C-based unbound property method descriptors** (i.e., non-callable
instances of the builtin :class:`property` decorator class implemented in
low-level C, associated with property getter and setter methods implemented in
pure Python, and accessed as class rather than instance attributes).
Caveats
----------
Property objects are directly accessible both as class attributes *and* via the
low-level :attr:`object.__dict__` dictionary. Property objects are *not*
accessible as instance attributes, for hopefully obvious reasons.
Property objects are *not* callable, as their implementations fail to define
the ``__call__`` dunder method.
'''
MethodDecoratorStaticType = staticmethod
'''
Type of all **C-based unbound static method descriptors** (i.e., non-callable
instances of the builtin :class:`classmethod` decorator class implemented in
low-level C, associated with static methods implemented in pure Python, and
accessed with the low-level :attr:`object.__dict__` dictionary rather than as
class or instance attributes).
Caveats
----------
Static method objects are *only* directly accessible via the low-level
:attr:`object.__dict__` dictionary. When accessed as class or instance
attributes, static methods reduce to instances of the standard
:class:`FunctionType` type.
Static method objects are *not* callable, as their implementations fail to
define the ``__call__`` dunder method.
'''
# ....................{ TYPES ~ call : return : async }....................
AsyncGeneratorCType = _AsyncGeneratorType
'''
C-based type returned by all **asynchronous pure-Python generators** (i.e.,
callables implemented in pure Python containing one or more ``yield``
statements whose declaration is preceded by the ``async`` keyword).
Caveats
----------
**This is not the type of asynchronous generator callables** but rather the
type implicitly created and *returned* by these callables. Since these
callables are simply callables subject to syntactic sugar, the type of these
callables is simply :data:`CallableTypes`.
'''
AsyncCoroutineCType = _CoroutineType
'''
C-based type returned by all **asynchronous coroutines** (i.e., callables
implemented in pure Python *not* containing one or more ``yield`` statements
whose declaration is preceded by the ``async`` keyword).
Caveats
----------
**This is not the type of asynchronous coroutine callables** but rather the
type implicitly created and *returned* by these callables. Since these
callables are simply callables subject to syntactic sugar, the type of these
callables is simply :data:`CallableTypes`.
'''
# ....................{ TYPES ~ call : return : generator }....................
GeneratorType = _Generator
'''
Type of all **C- and Python-based generator objects** (i.e., iterators
implementing the :class:`collections.abc.Generator` protocol), including:
* Pure-Python subclasses of the :class:`collections.abc.Generator` superclass.
* C-based generators returned by pure-Python callables containing one or more
``yield`` statements.
* C-based generator comprehensions created by pure-Python syntax delimited by
``(`` and ``)``.
Caveats
----------
**This is not the type of generator callables** but rather the type implicitly
created and *returned* by these callables. Since these callables are simply
callables subject to syntactic sugar, the type of these callables is simply
:data:`CallableTypes`.
See Also
----------
:class:`GeneratorCType`
Subtype of all C-based generators.
'''
GeneratorCType = _GeneratorType
'''
C-based type returned by all **pure-Python generators** (i.e., callables
implemented in pure Python containing one or more ``yield`` statements,
implicitly converted at runtime to return a C-based iterator of this type) as
well as the C-based type of all **pure-Python generator comprehensions** (i.e.,
``(``- and ``)``-delimited syntactic sugar implemented in pure Python, also
implicitly converted at runtime to return a C-based iterator of this type).
Caveats
----------
**This is not the type of generator callables** but rather the type implicitly
created and *returned* by these callables. Since these callables are simply
callables subject to syntactic sugar, the type of these callables is simply
:data:`CallableTypes`.
This special-purpose type is a subtype of the more general-purpose
:class:`GeneratorType`. Whereas the latter applies to *all* generators
implementing the :class:`collections.abc.Iterator` protocol, the former only
applies to generators implicitly created by Python itself.
'''
# ....................{ TYPES ~ class }....................
ClassDictType = type(type.__dict__)
'''
Type of all **pure-Python class dictionaries** (i.e., immutable mappings
officially referred to as "mapping proxies," whose keys are strictly constrained
for both efficiency and correctness to be Python identifier strings).
'''
# ....................{ TYPES ~ data }....................
ContainerType = _Container
'''
Type of all **containers** (i.e., concrete instances of the abstract
:class:`collections.abc.Container` base class as well as arbitrary objects
whose classes implement all abstract methods declared by that base class
regardless of whether those classes actually subclass that base class).
Caveats
----------
This type ambiguously matches both:
* **Explicit container subtypes** (i.e., concrete subclasses of the
:class:`collections.abc.Container` abstract base class (ABC)).
* **Structural container subtypes** (i.e., arbitrary classes implementing the
abstract ``__contains__`` method declared by that ABC *without* subclassing
that ABC), as formalized by :pep:`544`. Notably, since the **NumPy array
type** (i.e., :class:`numpy.ndarray`) defines that method, this type magically
matches the NumPy array type as well.
Of course, distinguishing between explicit and structural subtypes would
usually be seen as overly specific. So, this ambiguity is *not* necessarily a
BadThing™.
What is a BadThing™ is that container ABCs violate the "explicit is better than
implicit" maxim of `PEP 20 -- The Zen of Python <PEP 20_>`__ by intentionally
deceiving you for your own benefit, which you of course appreciate. Thanks to
arcane dunder magics buried in the :class:`abc.ABCMeta` metaclass, the
:func:`isinstance` and :func:`issubclass` builtin functions (which the
:func:`beartype.beartype` decorator internally defers to) ambiguously mistype
structural container subtypes as explicit container subtypes:
.. code-block:: python
>>> from collections.abc import Container
>>> class FakeContainer(object):
... def __contains__(self, obj): return True
>>> FakeContainer.__mro__
... (FakeContainer, object)
>>> issubclass(FakeContainer, Container)
True
>>> isinstance(FakeContainer(), Container)
True
.. _PEP 20:
https://www.python.org/dev/peps/pep-0020
'''
IterableType = _Iterable
'''
Type of all **iterables** (i.e., both concrete and structural instances of the
abstract :class:`collections.abc.Iterable` base class).
Iterables are containers that may be indirectly iterated over by calling the
:func:`iter` builtin, which internally calls the ``__iter__()`` dunder methods
implemented by these containers, which return **iterators** (i.e., instances of
the :class:`IteratorType` type), which directly support iteration.
This type also matches **NumPy arrays** (i.e., instances of the concrete
:class:`numpy.ndarray` class) via structural subtyping.
See Also
----------
:class:`ContainerType`
Further details on structural subtyping.
:class:`IteratorType`
Further details on iteration.
'''
IteratorType = _Iterator
'''
Type of all **iterators** (i.e., both concrete and structural instances of
the abstract :class:`collections.abc.Iterator` base class; objects iterating
over associated data streams, which are typically containers).
Iterators implement at least two dunder methods:
* ``__next__()``, iteratively returning successive items from associated data
streams (e.g., container objects) until throwing standard
:data:`StopIteration` exceptions on reaching the ends of those streams.
* ``__iter__()``, returning themselves. Since iterables (i.e., instances of the
:class:`IterableType` type) are *only* required to implement the
``__iter__()`` dunder method, all iterators are by definition iterables as
well.
See Also
----------
:class:`ContainerType`
Further details on structural subtyping.
:class:`IterableType`
Further details on iteration.
'''
SizedType = _Sized
'''
Type of all **sized containers** (i.e., both concrete and structural instances
of the abstract :class:`collections.abc.Sized` base class; containers defining
the ``__len__()`` dunder method internally called by the :func:`len` builtin).
This type also matches **NumPy arrays** (i.e., instances of the concrete
:class:`numpy.ndarray` class) via structural subtyping.
See Also
----------
:class:`ContainerType`
Further details on structural subtyping.
'''
CollectionType = _Collection
'''
Type of all **collections** (i.e., both concrete and structural instances of
the abstract :class:`collections.abc.Collection` base class; sized iterable
containers defining the ``__contains__()``, ``__iter__()``, and ``__len__()``
dunder methods).
This type also matches **NumPy arrays** (i.e., instances of the concrete
:class:`numpy.ndarray` class) via structural subtyping.
See Also
----------
:class:`ContainerType`
Further details on structural subtyping.
'''
QueueType = _deque
'''
Type of all **double-ended queues** (i.e., instances of the concrete
:class:`collections.deque` class, the only queue type defined by the Python
stdlib).
Caveats
----------
The :mod:`collections.abc` subpackage currently provides no corresponding
abstract interface to formalize queue types. Double-ended queues are it, sadly.
'''
SetType = _Set
'''
Type of all **set-like containers** (i.e., both concrete and structural
instances of the abstract :class:`collections.abc.Set` base class; containers
guaranteeing uniqueness across all contained items).
This type matches both the standard :class:`set` and :class:`frozenset` types
*and* the types of the :class:`dict`-specific views returned by the
:meth:`dict.items` and :meth:`dict.keys` (but *not* :meth:`dict.values`)
methods.
See Also
----------
:class:`ContainerType`
Further details on structural subtyping.
'''
# ....................{ TYPES ~ data : mapping }....................
HashableType = _Hashable
'''
Type of all **hashable objects** (i.e., both concrete and structural instances
of the abstract :class:`collections.abc.Hashable` base class; objects
implementing the ``__hash__()`` dunder method required for all dictionary keys
and set items).
See Also
----------
:class:`ContainerType`
Further details on structural subtyping.
'''
MappingType = _Mapping
'''
Type of all **mutable** and **immutable mappings** (i.e., both concrete and
structural instances of the abstract :class:`collections.abc.Mapping` base
class; dictionary-like containers containing key-value pairs mapping from
hashable keys to corresponding values).
Caveats
----------
**This type does not guarantee mutability** (i.e., the capacity to modify
instances of this type after instantiation). This type ambiguously matches both
mutable mapping types (e.g., :class:`dict`) and immutable mapping types (e.g.,
:class:`ClassDictType`). Where mutability is required, prefer the non-ambiguous
:class:`MappingMutableType` type instead.
See Also
----------
:class:`ContainerType`
Further details on structural subtyping.
'''
MappingMutableType = _MutableMapping
'''
Type of all **mutable mappings** (i.e., both concrete and structural instances
of the abstract :class:`collections.abc.MutableMapping` base class;
dictionary-like containers permitting modification of contained key-value
pairs).
See Also
----------
:class:`ContainerType`
Further details on structural subtyping.
:class:`MappingType`
Type of all mutable and immutable mappings.
'''
# ....................{ TYPES ~ data : sequence }....................
SequenceType = _Sequence
'''
Type of all **mutable** and **immutable sequences** (i.e., both concrete and
structural instances of the abstract :class:`collections.abc.Sequence` base
class; reversible collections whose items are efficiently accessible but *not*
necessarily modifiable with 0-based integer-indexed lookup).
Caveats
----------
**This type does not guarantee mutability** (i.e., the capacity to modify
instances of this type after instantiation). This type ambiguously matches both
mutable sequence types (e.g., :class:`list`) and immutable sequence types
(e.g., :class:`tuple`). Where mutability is required, prefer the non-ambiguous
:class:`SequenceMutableType` type instead.
**This type matches the string type (i.e., :class:`str`),** which satisfies the
:class:`collections.abc.Sequence` API but *not* the
:class:`collections.abc.MutableSequence` API. Where **non-string sequences**
(i.e., sequences that are anything but strings) are required, prefer the
non-ambiguous :class:`SequenceMutableType` type instead.
**This type does not match NumPy arrays (i.e., instances of the concrete
:class:`numpy.ndarray` class),** which satisfy most but *not* all of the
:class:`collections.abc.Sequence` API. Specifically, NumPy arrays fail to
define:
* The ``__reversible__`` dunder method.
* The ``count`` public method.
* The ``index`` public method.
Most callables accepting sequences *never* invoke these edge-case methods and
should thus be typed to accept NumPy arrays as well. To do so, prefer either:
* The :class:`beartype.cave.SequenceOrNumpyArrayTypes` tuple of types matching
both sequences and NumPy arrays.
* The :class:`beartype.cave.SequenceMutableOrNumpyArrayTypes` tuple of types
matching both mutable sequences and NumPy arrays.
See Also
----------
:class:`ContainerType`
Further details on structural subtyping.
'''
SequenceMutableType = _MutableSequence
'''
Type of all **mutable sequences** (i.e., both concrete and structural instances
of the abstract :class:`collections.abc.Sequence` base class; reversible
collections whose items are both efficiently accessible *and* modifiable with
0-based integer-indexed lookup).
Caveats
----------
**This type does not match NumPy arrays (i.e., instances of the concrete
:class:`numpy.ndarray` class),** which satisfy most but *not* all of the
:class:`collections.abc.MutableSequence` API. Specifically, NumPy arrays fail
to define:
* The ``__reversible__`` dunder method.
* The ``append`` public method.
* The ``count`` public method.
* The ``extend`` public method.
* The ``index`` public method.
* The ``insert`` public method.
* The ``pop`` public method.
* The ``remove`` public method.
* The ``reverse`` public method.
Most callables accepting mutable sequences *never* invoke these edge-case
methods and should thus be typed to accept NumPy arrays as well. To do so,
prefer the :class:`beartype.cave.SequenceMutableOrNumpyArrayTypes` tuple of
types matching both mutable sequences and NumPy arrays.
See Also
----------
:class:`ContainerType`
Further details on structural subtyping.
:class:`SequenceType`
Further details on sequences.
'''
# ....................{ TYPES ~ enum }....................
# Enumeration types are sufficiently obscure to warrant formalization here.
EnumType = _EnumMeta
'''
Type of all **enumeration types** (i.e., metaclass of all classes containing
all enumeration members comprising those enumerations).
Motivation
----------
This type is commonly used to validate callable parameters as enumerations. In
recognition of its popularity, this type is intentionally named ``EnumType``
rather than ``EnumMetaType``. While the latter *would* technically be less
ambiguous, the former has the advantage of inviting correctness throughout
downstream codebases -- a less abundant resource.
Why? Because *all* enumeration types are instances of this type rather than the
:class:`Enum` class despite being superficially defined as instances of the
:class:`Enum` class. Thanks to metaclass abuse, enumeration types do *not*
adhere to standard Pythonic semantics. Notably, the following non-standard
invariants hold across *all* enumerations:
.. code-block:: python
>>> from enum import Enum
>>> GyreType = Enum(
... 'GyreType', ('THE', 'FALCON', 'CANNOT', 'HEAR', 'THE', 'FALCONER'))
>>> from beartype import cave
>>> isinstance(GyreType, Enum)
False
>>> isinstance(GyreType, cave.EnumType)
True
>>> isinstance(GyreType, cave.ClassType)
True
>>> isinstance(GyreType.FALCON, cave.EnumType)
False
>>> isinstance(GyreType.FALCON, cave.EnumMemberType)
True
>>> isinstance(GyreType.FALCON, cave.ClassType)
False
Yes, this is insane. Yes, this is Python.
'''
EnumMemberType = _Enum
'''
Type of all **enumeration members** (i.e., abstract base class of all
alternative choices defined as enumeration fields).
Caveats
----------
When type checking callable parameters, this class should *only* be referenced
where the callable permissively accepts any enumeration member type rather than
a specific enumeration member type. In the latter case, that type is simply
that enumeration's type and should be directly referenced as such: e.g.,
>>> from enum import Enum
>>> from beartype import beartype
>>> EndymionType = Enum('EndymionType', ('BEAUTY', 'JOY',))
>>> @beartype
... def our_feet_were_soft_in_flowers(superlative: EndymionType) -> str:
... return str(superlative).lower()
'''
# ....................{ TYPES ~ hint }....................
# Define this type as either...
HintGenericSubscriptedType: Any = (
# If the active Python interpreter targets at least Python >= 3.9 and thus
# supports PEP 585, this type;
type(list[str]) # type: ignore[misc]
if IS_PYTHON_AT_LEAST_3_9 else
# Else, a placeholder type.
UnavailableType
)
'''
C-based type of all subscripted generics if the active Python interpreter
targets Python >= 3.9 *or* :class:`UnavailableType` otherwise.
Subscripted generics include:
* :pep:`585`-compliant **builtin type hints** (i.e., C-based type hints
instantiated by subscripting either a concrete builtin container class like
:class:`list` or :class:`tuple` *or* an abstract base class (ABC) declared by
the :mod:`collections.abc` submodule like :class:`collections.abc.Iterable`
or :class:`collections.abc.Sequence`). Since *all* :pep:`585`-compliant
builtin type hints are classes, this C-based type is the class of those
classes and thus effectively itself a metaclass. It's probably best not to
think about that.
* :pep:`484`-compliant **subscripted generics** (i.e., user-defined classes
subclassing one or more :pep:`484`-compliant type hints subsequently
subscripted by one or more PEP-compliant type hints).
* :pep:`585`-compliant **subscripted generics** (i.e., user-defined classes
subclassing one or more :pep:`585`-compliant type hints subsequently
subscripted by one or more PEP-compliant type hints).
Caveats
----------
**This low-level type ambiguously matches semantically unrelated PEP-compliant
type hints,** rendering this type all but useless for most practical purposes.
To distinguish between the various semantic types of hints ambiguously matched
by this type, higher-level PEP-specific functions *must* be called instead.
These include:
* :func:`beartype._util.hint.pep.proposal.pep484.utilpep484.is_hint_pep484_generic`,
detecting :pep:`484`-compliant generic type hints.
* :func:`beartype._util.hint.pep.proposal.utilpep585.is_hint_pep585_builtin`,
detecting :pep:`585`-compliant builtin type hints.
* :func:`beartype._util.hint.pep.proposal.utilpep585.is_hint_pep585_generic`,
detecting :pep:`585`-compliant generic type hints.
'''
# ....................{ TYPES ~ scalar }....................
StrType = str # Well, isn't that special.
'''
Type of all **unencoded Unicode strings** (i.e., instances of the builtin
:class:`str` class; sequences of abstract Unicode codepoints that have yet to
be encoded into physical encoded bytes in encoded byte strings).
This type matches:
* **Builtin Unicode strings** (i.e., :class:`str` instances).
* **NumPy Unicode strings** (i.e., :class:`numpy.str_` instances) if
:mod:`numpy` is importable. Whereas most NumPy scalar types do *not* subclass
builtin scalar types, the :class:`numpy.str_` class *does* subclass the
builtin :class:`str` type. NumPy Unicode strings are thus usable wherever
builtin Unicode strings are usable.
Caveats
----------
This type does *not* match **encoded byte strings** (i.e., sequences of
physical encoded bytes, including the builtin :class:`bytestring` type), which
require foreknowledge of the encoding previously used to encode those bytes.
Unencoded Unicode strings require no such foreknowledge and are thus
incompatible with encoded byte strings at the API level.
This type only matches **builtin Unicode strings** (i.e., :class:`str`
instances) and instances of subtypes of that type (e.g., :class:`numpy.str_`,
the NumPy Unicode string type). Whereas the comparable :class:`BoolType`
matches arbitrary objects satisfying the boolean protocol (i.e., ``__bool__()``
dunder method) via structural subtyping, this type does *not* match arbitrary
objects satisfying the string protocol via structural subtyping -- because
there is no string protocol. While Python's data model does define a
``__str__()`` dunder method called to implicitly convert arbitrary objects into
strings, that method is called infrequently. As exhibited by the infamously
rejected :pep:`3140` proposal, the :meth:`list.__str__` implementation
stringifies list items by erroneously calling the unrelated ``__repr__()``
method rather than the expected ``__str__()`` method on those items. Moreover,
``__str__()`` fails to cover common string operations such as string
concatenation and repetition. Covering those operations would require a new
abstract base class (ABC) matching arbitrary objects satisfying the
:class:`Sequence` protocol as well as ``__str__()`` via structural subtyping;
while trivial, that ABC would then ambiguously match all builtin sequence types
(e.g., :class:`list`, :class:`tuple`) as string types, which they clearly are
not. In short, matching only :class:`str` is the *only* unambiguous means of
matching Unicode string types.
'''
# ....................{ TYPES ~ scalar : number }....................
NumberType = _numbers.Number
'''
Type of all **numbers** (i.e., concrete instances of the abstract
:class:`numbers.Number` base class).
This type effectively matches *all* numbers regardless of implementation,
including:
* **Integers** (i.e., real numbers expressible without fractional components),
including:
* **Builtin integers** (i.e., :class:`int` instances).
* **NumPy integers** (e.g., :class:`numpy.int_` instances), whose types are
all implicitly registered at :mod:`numpy` importation time as satisfying
the :class:`numbers.Integral` protocol.
* **SymPy integers** (e.g., :class:`sympy.core.numbers.Integer` instances),
whose type is implicitly registered at :mod:`sympy` importation time as
satisfying the class:`numbers.Integral` protocol.
* **Rational numbers** (i.e., real numbers expressible as the ratio of two
integers), including:
* **Builtin floating-point numbers** (i.e., :class:`float` instances).
* **NumPy floating-point numbers** (e.g., :class:`numpy.single` instances),
all of which are implicitly registered at :mod:`numpy` importation time as
:class:`numbers.Rational` subclasses.
* **Stdlib fractions** (i.e., :class:`fractions.Fraction` instances).
* **SymPy floating-point numbers** (e.g., :class:`sympy.core.numbers.Float`
instances), whose type implicitly registered at :mod:`sympy` importation
time as satisfying the class:`numbers.Real` protocol.
* **SymPy rational numbers** (e.g., :class:`sympy.core.numbers.Rational`
instances), whose type implicitly registered at :mod:`sympy` importation
time as satisfying the class:`numbers.Rational` protocol.
* **Irrational numbers** (i.e., real numbers *not* expressible as the ratio of
two integers), including:
* **SymPy irrational numbers** (i.e., SymPy-specific symbolic objects whose
``is_irrational`` assumption evaluates to ``True``).
Caveats
----------
This type does *not* match:
* **Stdlib decimals** (i.e., :class:`decimal.Decimal` instances), which support
both unrounded decimal (i.e., fixed-point arithmetic) and rounded
floating-point arithmetic. Despite being strictly rational, the
:class:`decimal.Decimal` class only subclasses the coarse-grained abstract
:class:`numbers.Number` base superclass rather than the fine-grained abstract
:class:`numbers.Rational` base subclass. So it goes.
* **SymPy complex numbers,** which are "non-atomic" (i.e., defined as the
combination of two separate real and imaginary components rather than as one
unified complex number containing these components) and thus incommensurable
with all of the above "atomic" types.
'''
NumberRealType = IntOrFloatType = _numbers.Real
'''
Type of all **real numbers** (i.e., concrete instances of the abstract
:class:`numbers.Real` base class; numbers expressible as linear values on the
real number line).
This type matches all numbers matched by :class:`NumberType` *except* complex
numbers with non-zero imaginary components, which (as the name implies) are
non-real.
Equivalently, this type matches all integers (e.g., :class:`int`,
:class:`numpy.int_`), floating-point numbers (e.g., :class:`float`,
:class:`numpy.single`), rational numbers (e.g., :class:`fractions.Fraction`,
:class:`sympy.core.numbers.Rational`), and irrational numbers. However,
rational and irrational numbers are rarely used in comparison to integers and
floating-point numbers. This type thus reduces to matching all integer and
floating-point types in practice and is thus also accessible under the alias
:class:`IntOrFloatType` -- a less accurate but more readable name than
:class:`NumberRealType`.
See Also
----------
:class:`NumberType`
Further details.
'''
IntType = _numbers.Integral
'''
Type of all **integers** (i.e., concrete instances of the abstract
:class:`numbers.Integral` base class; real numbers expressible without
fractional components).
This type matches all numbers matched by the :class:`NumberType` *except*
complex numbers with non-zero imaginary components, rational numbers with
denominators not equal to one, and irrational numbers.
Equivalently, this type matches all integers (e.g., :class:`int`,
:class:`numpy.int_`).
See Also
----------
:class:`NumberType`
Further details.
'''
# ....................{ TYPES ~ stdlib : re }....................
# Regular expression types are also sufficiently obscure to warrant
# formalization here.
# Yes, this is the only reliable means of obtaining the type of compiled
# regular expressions. For unknown reasons presumably concerning the archaic
# nature of Python's regular expression support, this type is *NOT* publicly
# exposed. While the private "re._pattern_type" attribute does technically
# provide this type, it does so in a private and hence non-portable manner.
RegexCompiledType: type = _re.Pattern
'''
Type of all **compiled regular expressions** (i.e., objects created and
returned by the stdlib :func:`re.compile` function).
'''
# Yes, this type is required for type validation at module scope elsewhere.
# Yes, this is the most time-efficient means of obtaining this type. No, this
# type is *NOT* directly importable. Although this type's classname is
# published to be "_sre.SRE_Match", the "_sre" C extension provides no such
# type for pure-Python importation. So it goes.
RegexMatchType: type = _re.Match
'''
Type of all **regular expression match objects** (i.e., objects returned by the
:func:`re.match` function).
'''
# ....................{ TUPLES ~ unavailable }....................
# Unavailable types are defined *BEFORE* any subsequent types, as the latter
# commonly leverage the former.
UnavailableTypes = _UnavailableTypesTuple()
'''
**Tuple of unavailable types** (i.e., types *not* available under the active
Python interpreter, typically due to insufficient Python version or
non-installed third-party dependencies).
Caveats
----------
**This tuple should always be used in lieu of the empty tuple.** Although
technically equivalent to the empty tuple, the :func:`beartype.beartype`
decorator explicitly distinguishes between this tuple and the empty tuple.
Specifically, for any callable parameter or return type annotated with:
* This tuple, :func:`beartype.beartype` emits a non-fatal warning ignorable
with a simple :mod:`warnings` filter.
* The empty tuple, :func:`beartype.beartype` raises a fatal exception.
'''
# ....................{ TUPLES ~ py }....................
ModuleOrStrTypes = (ModuleType, StrType)
'''
Tuple of both the module *and* string type.
'''
#FIXME: This is probably incorrect under Python >= 3.9, where isinstance() also
#accepts "|"-delimited unions of types (e.g., float | int | str). What are
#those types, exactly?
TestableTypes = (ClassType, tuple)
'''
Tuple of all **testable types** (i.e., types suitable for use as the second
parameter passed to the :func:`isinstance` and :func:`issubclass` builtins).
'''
# ....................{ TUPLES ~ call }....................
FunctionTypes = (FunctionType, FunctionOrMethodCType,)
'''
Tuple of all **function types** (i.e., types whose instances are either
built-in or user-defined functions).
Caveats
----------
**This tuple may yield false positives when used to validate types.** Since
Python fails to distinguish between C-based functions and methods, this tuple
is the set of all function types as well as the ambiguous type of all C-based
functions and methods.
'''
# ....................{ TUPLES ~ call : method }....................
MethodBoundTypes = (
MethodBoundInstanceOrClassType, MethodBoundInstanceDunderCType)
'''
Tuple of all **bound method types** (i.e., types whose instances are callable
objects bound to either instances or classes).
'''
MethodUnboundTypes = (
MethodUnboundClassCType,
MethodUnboundInstanceDunderCType,
MethodUnboundInstanceNondunderCType,
)
'''
Tuple of all **unbound method types** (i.e., types whose instances are callable
objects bound to neither instances nor classes).
Unbound decorator objects (e.g., non-callable instances of the builtin
:class:`classmethod`, :class:`property`, or :class:`staticmethod` decorator
classes) are *not* callable and thus intentionally excluded.
'''
MethodDecoratorBuiltinTypes = (
MethodDecoratorClassType,
MethodDecoratorPropertyType,
MethodDecoratorStaticType,
)
'''
Tuple of all **C-based unbound method decorator types** (i.e., builtin decorator
types implemented in low-level C whose instances are typically uncallable,
associated with callable methods implemented in pure Python).
'''
MethodDescriptorTypes = (
# @classmethod, @staticmethod, and @property descriptor types.
MethodDecoratorBuiltinTypes + (
# Method descriptor type.
MethodBoundInstanceOrClassType,
)
)
'''
Tuple of all **C-based unbound method descriptor types** (i.e., builtin types
implemented in low-level C whose instances are typically uncallable, associated
with callable methods implemented in pure Python).
This tuple matches the types of all:
* **Class method descriptors** (i.e., methods decorated by the builtin
:class:`classmethod` decorator).
* Instance method descriptors (i.e., methods *not* decorated by a builtin method
decorator).
* **Property method descriptors** (i.e., methods decorated by the builtin
:class:`property` decorator).
* **Static method descriptors** (i.e., methods decorated by the builtin
:class:`staticmethod` decorator).
'''
MethodTypes = (FunctionOrMethodCType,) + MethodBoundTypes + MethodUnboundTypes
'''
Tuple of all **method types** (i.e., types whose instances are callable objects
associated with methods implemented in either low-level C or pure Python).
Unbound decorator objects (e.g., non-callable instances of the builtin
:class:`classmethod`, :class:`property`, or :class:`staticmethod` decorator
classes) are *not* callable and thus intentionally excluded.
Caveats
----------
**This tuple may yield false positives when used to validate types.** Since
Python fails to distinguish between C-based functions and methods, this tuple
is the set of all pure-Python bound and unbound method types as well as the
ambiguous type of all C-based bound methods and non-method functions.
'''
# ....................{ TUPLES ~ call : callable }....................
# For DRY, this tuple is defined as the set union of all function and method
# types defined above converted back to a tuple.
#
# While this tuple could also be defined as the simple concatenation of the
# "FunctionTypes" and "MethodTypes" tuples, doing so would duplicate all types
# ambiguously residing in both tuples (i.e., "FunctionOrMethodCType"). Doing so
# would induce inefficiencies during type checking. That would be bad.
CallableTypes = tuple(set(FunctionTypes) | set(MethodTypes))
'''
Tuple of all **callable types** (i.e., types whose instances are callable
objects implemented in either low-level C or high-level Python, including both
built-in and user-defined functions, lambdas, methods, and method descriptors).
'''
CallableCTypes = (
FunctionOrMethodCType,
MethodBoundInstanceDunderCType,
MethodUnboundInstanceDunderCType,
MethodUnboundInstanceNondunderCType,
MethodUnboundClassCType,
)
'''
Tuple of all **C-based callable types** (i.e., types whose instances are
callable objects implemented in low-level C rather than high-level Python).
'''
CallablePyTypes = (
FunctionType,
MethodBoundInstanceOrClassType,
)
'''
Tuple of all **pure-Python callable types** (i.e., types whose instances are
callable objects implemented in high-level Python rather than low-level C).
**This tuple is empty under PyPy,** which unconditionally compiles *all*
pure-Python callables into C-based callables.
'''
CallableOrClassTypes = CallableTypes + (ClassType,)
'''
Tuple of all callable types as well as the type of all types.
'''
CallableOrStrTypes = CallableTypes + (StrType,)
'''
Tuple of all callable types as well as the string type.
'''
#FIXME: Define a new "CallableClassType" by copying the "BoolType" approach
#except for the __call__() dunder method instead.
#FIXME: Replace "ClassType" below by "CallableClassType".
#FIXME: Add the "CallableClassType" type to the "CallableTypes" tuple as well.
DecoratorTypes = CallableTypes + (ClassType,)
'''
Tuple of all **decorator types** (i.e., both callable classes *and* the type of
those classes).
Caveats
----------
**This tuple may yield false positives when used to validate types.** Since
classes themselves may be callable (i.e., by defining the special ``__call__``
method), this tuple is the set of all standard callable types as well as that
of classes. In particular, this tuple describes all types permissible for use
as decorators. Since most classes are *not* callable, however, this tuple may
yield false positives when passed classes.
'''
# ....................{ TUPLES ~ call : return }....................
AsyncCTypes = (AsyncGeneratorCType, AsyncCoroutineCType)
'''
Tuple of all C-based types returned by all **asynchronous callables** (i.e.,
callables implemented in pure Python whose declaration is preceded by the
``async`` keyword).
'''
# ....................{ TUPLES ~ scalar }....................
BoolOrNumberTypes = (BoolType, NumberType,)
'''
Tuple of all **boolean** and **number types** (i.e., classes whose instances
are either numbers or types trivially convertible into numbers).
This tuple matches booleans, integers, rational numbers, irrational numbers,
real numbers, and complex numbers.
Booleans are trivially convertible into integers. While details differ by
implementation, common implementations in lower-level languages (e.g., C, C++,
Perl) typically implicitly convert:
* ``False`` to ``0`` and vice versa.
* ``True`` to ``1`` and vice versa.
'''
# ....................{ TUPLES ~ post-init : container }....................
# Tuples of types assuming the above initialization to have been performed.
MappingOrSequenceTypes = (MappingType, SequenceType)
'''
Tuple of all container base classes conforming to (but *not* necessarily
subclassing) the canonical :class:`collections.abc.Mapping` *or*
:class:`collections.abc.Sequence` APIs.
'''
ModuleOrSequenceTypes = (ModuleType, SequenceType)
'''
Tuple of the module type *and* all container base classes conforming to (but
*not* necessarily subclassing) the canonical :class:`collections.abc.Sequence`
API.
'''
NumberOrIterableTypes = (NumberType, IterableType,)
'''
Tuple of all numeric types *and* all container base classes conforming to (but
*not* necessarily subclassing) the canonical :class:`collections.abc.Iterable`
API.
'''
NumberOrSequenceTypes = (NumberType, SequenceType,)
'''
Tuple of all numeric types *and* all container base classes conforming to (but
*not* necessarily subclassing) the canonical :class:`collections.abc.Sequence`
API.
'''
# ....................{ TUPLES ~ post-init : scalar }....................
ScalarTypes = BoolOrNumberTypes + (StrType,)
'''
Tuple of all **scalar types** (i.e., classes whose instances are atomic scalar
primitives).
This tuple matches all:
* **Boolean types** (i.e., types satisfying the :class:`BoolType` protocol).
* **Numeric types** (i.e., types satisfying the :class:`NumberType` protocol).
* **Textual types** (i.e., types contained in the :class:`StrTypes` tuple).
'''
# ....................{ TUPLES ~ stdlib }....................
RegexTypes = (RegexCompiledType, StrType)
'''
Tuple of all **regular expression-like types** (i.e., types either defining
regular expressions or losslessly convertible to such types).
This tuple matches:
* The **compiled regular expression type** (i.e., type of all objects created
and returned by the stdlib :func:`re.compile` function).
* All **textual types** (i.e., types contained in the :class:`StrTypes`
tuple).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype cave-specific abstract base classes (ABCs).**
'''
# ....................{ TODO }....................
#FIXME: As with the parallel "beartype._cave.abc" submodule, refactor the
#contents of this private submodule into the newly proposed public
#"beartype.caver" submodule. To do so:
#
#* In the "beartype.caver" submodule:
# * Define a new make_type() function copied from the
# betse.util.type.classes.define_class() function (but renamed, obviously).
# * Define a new make_type_defaultdict() function copied from the
# betse.util.type.iterable.mapping.mapcls.DefaultDict() function, but with
# signature resembling:
# def make_type_defaultdict(
# name: str,
# missing_key_maker: CallableTypes,
# items: (Iterable, type(None)),
# ) -> type:
# Internally, this function should call make_type() to do so.
# ....................{ IMPORTS }....................
from beartype.roar import (
BeartypeCaveNoneTypeOrKeyException,
BeartypeCaveNoneTypeOrMutabilityException,
)
from beartype._util.hint.nonpep.utilnonpeptest import (
die_unless_hint_nonpep)
from typing import Any, Tuple, Union
# ....................{ CONSTANTS }....................
_NoneType: type = type(None)
'''
Type of the ``None`` singleton, duplicated from the :mod:`beartype.cave`
submodule to prevent cyclic import dependencies.
'''
_NoneTypes: Tuple[type, ...] = (_NoneType,)
'''
Tuple of only the type of the ``None`` singleton.
'''
# ....................{ HINTS }....................
_TypeTuple = Tuple[Union[type, str], ...]
'''
PEP-compliant type hint matching a **type tuple** (i.e., tuple containing only
types and forward references to deferred types specified as the fully-qualified
names of those types).
'''
# ....................{ CLASSES }....................
class _NoneTypeOrType(dict):
'''
:class:`NoneType` **tuple factory type** (i.e., :class:`dict` subclass,
instances of which are dictionaries mapping from arbitrary types or tuples
of types to the same types or tuples of types concatenated with the type of
the ``None`` singleton).
'''
# ..................{ DUNDERS }..................
def __setitem__(self, key: Any, value: Any) -> None:
'''
Dunder method explicitly called by the superclass on setting the passed
key-value pair with``[``- and ``]``-delimited syntax.
Specifically, this method prohibits external attempts to explicitly set
key-value pairs on this factory by unconditionally raising an
exception.
Parameters
----------
key : object
Key to map this value to.
value : object
Value to be mapped to.
Raises
----------
BeartypeCaveNoneTypeOrMutabilityException
Unconditionally.
'''
raise BeartypeCaveNoneTypeOrMutabilityException(
f'{repr(self)} externally immutable (i.e., not settable).')
def __missing__(self, hint: Union[type, str, _TypeTuple]) -> _TypeTuple:
'''
Dunder method explicitly called by the superclass
:meth:`dict.__getitem__` method implicitly called on getting the passed
missing key with ``[``- and ``]``-delimited syntax.
Specifically, this method:
* If a single type or string is passed:
#. Creates a new 2-tuple containing only that object and the type of
the ``None`` singleton.
#. Maps the passed type to that 2-tuple.
#. Returns that 2-tuple.
* Else if a tuple of one or more types and/or strings is passed:
#. Creates a new tuple appending the type of the ``None`` singleton
to the passed tuple.
#. Maps the passed type to the new tuple.
#. Returns the new tuple.
* Else, raises an exception.
Parameters
----------
hint : (type, str, _TypeTuple)
Type, string, or tuple of one or more types and/or strings *not*
currently cached by this factory.
Returns
----------
_TypeTuple
Tuple of types appending the type of the ``None`` singleton to the
passed type, string, or tuple of types and/or strings.
Raises
----------
BeartypeCaveNoneTypeOrKeyException
If this key is neither:
* A **string** (i.e., forward reference specified as either a
fully-qualified or unqualified classname).
* A **type** (i.e., class).
* A **non-empty tuple** (i.e., semantic union of types) containing
only strings and types.
'''
# If this missing key is *NOT* a PEP-noncompliant type hint, raise an
# exception.
die_unless_hint_nonpep(
hint=hint,
exception_prefix='"NoneTypeOr" key',
exception_cls=BeartypeCaveNoneTypeOrKeyException,
)
# Tuple of types to be cached and returned by this call.
hint_or_none: _TypeTuple = None # type: ignore[assignment]
# If this key is a type...
if isinstance(hint, type):
# If this type is "NoneType", reuse the existing "_NoneTypes" tuple
# containing only this type.
if hint is _NoneType:
hint_or_none = _NoneTypes
# Else, this type is *NOT* "NoneType". In this case, instantiate a
# new tuple of types concatenating this type with "NoneType".
else:
hint_or_none = (hint, _NoneType)
# Else if this key is a non-empty tuple...
elif isinstance(hint, tuple):
# If "NoneType" is already in this tuple, reuse this tuple as is.
if _NoneType in hint:
hint_or_none = hint
# Else, "NoneType" is *NOT* already in this tuple. In this case,
# instantiate a new tuple of types concatenating this tuple with
# "NoneType".
else:
hint_or_none = hint + _NoneTypes
# Else, this key is invalid. Thanks to the above call to the
# die_unless_hint_nonpep() function, this should *NEVER* occur.
# Nonetheless, raise a human-readable exception for sanity.
else:
raise BeartypeCaveNoneTypeOrKeyException(
f'"NoneTypeOr" key {repr(hint)} unsupported.')
# Return this new tuple.
#
# The superclass dict.__getitem__() dunder method then implicitly maps
# the passed missing key to this new tuple of types by effectively:
# self[hint] = hint_or_none
return hint_or_none
# ....................{ SINGLETONS }....................
NoneTypeOr: Any = _NoneTypeOrType()
'''
**:class:``NoneType`` tuple factory** (i.e., dictionary mapping from arbitrary
types or tuples of types to the same types or tuples of types concatenated with
the type of the ``None`` singleton).
This factory efficiently generates and caches tuples of types containing
:class:``NoneType`` from arbitrary user-specified types and tuples of types. To
do so, simply index this factory with any desired type *or* tuple of types; the
corresponding value will then be another tuple containing :class:``NoneType``
and that type *or* those types.
Motivation
----------
This factory is commonly used to type-hint **optional callable parameters**
(i.e., parameters defaulting to ``None`` when *not* explicitly passed by the
caller). Although such parameters may also be type-hinted with a tuple manually
containing :class:``NoneType``, doing so inefficiently recreates these tuples
for each optional callable parameter across the entire codebase.
This factory avoids such inefficient recreation. Instead, when indexed with any
arbitrary key:
* If that key has already been successfully accessed on this factory, this
factory returns the existing value (i.e., tuple containing
:class:``NoneType`` and that key if that key is a type *or* the items of that
key if that key is a tuple) previously mapped and cached to that key.
* Else, if that key is:
* A type, this factory:
#. Creates a new tuple containing that type and :class:``NoneType``.
#. Associates that key with that tuple.
#. Returns that tuple.
* A tuple of types, this factory:
#. Creates a new tuple containing these types and :class:``NoneType``.
#. Associates that key with that tuple.
#. Returns that tuple.
* Any other object, raises a human-readable
:class:`beartype.roar.BeartypeCaveNoneTypeOrKeyException` exception.
This factory is analogous to the :pep:`484`_-compliant :class:`typing.Optional`
type despite otherwise *not* complying with :pep:`484`_.
Examples
----------
# Function accepting an optional parameter with neither
# "beartype.cave" nor "typing".
>>> def to_autumn(season_of_mists: (str, type(None)) = None) -> str
... return season_of_mists if season_of_mists is not None else (
... 'While barred clouds bloom the soft-dying day,')
# Function accepting an optional parameter with "beartype.cave".
>>> from beartype.cave import NoneTypeOr
>>> def to_autumn(season_of_mists: NoneTypeOr[str] = None) -> str
... return season_of_mists if season_of_mists is not None else (
... 'Then in a wailful choir the small gnats mourn')
# Function accepting an optional parameter with "typing".
>>> from typing import Optional
>>> def to_autumn(season_of_mists: Optional[str] = None) -> str
... return season_of_mists if season_of_mists is not None else (
... 'Or sinking as the light wind lives or dies;')
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype cave-specific abstract base classes (ABCs).**
'''
# ....................{ TODO }....................
#FIXME: Refactor this private submodule into a new public "beartype.caver"
#submodule, so-named as it enables users to externally create new ad-hoc
#protocols implementing structural subtyping resembling those predefined by
#"beartype.cave". To do so:
#
#* In the "beartype.caver" submodule:
# * Define a new make_type_structural() function with signature resembling:
# def make_type_structural(name: str, method_names: Iterable) -> type:
# * Implement this function to dynamically create a new type with the passed
# classname defining:
# * Abstract methods with the passed method names.
# * A __subclasshook__() dunder method checking the passed class for
# concrete methods with these names.
# To do so, note that abstract methods *CANNOT* be dynamically
# monkey-patched in after class creation but *MUST* instead be statically
# defined at class creation time (due to metaclass shenanigans).
# Fortunately, doing so is trivial; simply use the three-argument form of
# the type() constructor, as demonstrated by this StackOverflow answer:
# https://stackoverflow.com/a/14219244/2809027
# * *WAIT!* There's no need to call the type() constructor diroctly. Instead,
# define a new make_type() function in this new submodule copied from the
# betse.util.type.classes.define_class() function (but renamed, obviously).
#* Replace the current manual definition of "_BoolType" below with an in-place
# call to that method from the "beartype.cave" submodule: e.g.,
# BoolType = _make_type_structural(
# name='BoolType', method_names=('__bool__',))
#
#Dis goin' be good.
#FIXME: Actually, don't do any of the above. That would simply be reinventing
#the wheel, as the "typing.Protocol" superclass already exists and is more than
#up to the task. In fact, once we drop support for Python < 3.7, we should:
#* Redefine the "_BoolType" class declared below should in terms of the
# "typing.Protocol" superclass.
#* Shift the "_BoolType" class directly into the "beartype.cave" submodule.
#* Refactor away this entire submodule.
# ....................{ IMPORTS }....................
from abc import ABCMeta, abstractmethod
# ....................{ FUNCTIONS }....................
def _check_methods(C: type, *methods: str):
'''
Private utility function called by abstract base classes (ABCs) implementing
structural subtyping by detecting whether the passed class or some
superclass of that class defines all of the methods with the passed method
names.
For safety, this function has been duplicated as is from its eponymous
counterpart in the private stdlib :mod:`_colletions_abc` module.
Parameters
----------
C : type
Class to be validated as defining these methods.
methods : Tuple[str, ...]
Tuple of the names of all methods to validate this class as defining.
Returns
----------
Either:
* ``True`` if this class defines all of these methods.
* ``NotImplemented`` if this class fails to define one or more of these
methods.
'''
mro = C.__mro__
for method in methods:
for B in mro: # pyright: ignore[reportGeneralTypeIssues]
if method in B.__dict__:
if B.__dict__[method] is None:
return NotImplemented
break
else:
return NotImplemented
return True
# ....................{ SUPERCLASSES }....................
class BoolType(object, metaclass=ABCMeta):
'''
Type of all **booleans** (i.e., objects defining the ``__bool__()`` dunder
method; objects reducible in boolean contexts like ``if`` conditionals to
either ``True`` or ``False``).
This type matches:
* **Builtin booleans** (i.e., instances of the standard :class:`bool` class
implemented in low-level C).
* **NumPy booleans** (i.e., instances of the :class:`numpy.bool_` class
implemented in low-level C and Fortran) if :mod:`numpy` is importable.
Usage
----------
Non-standard boolean types like NumPy booleans are typically *not*
interoperable with the standard standard :class:`bool` type. In particular,
it is typically *not* the case, for any variable ``my_bool`` of
non-standard boolean type and truthy value, that either ``my_bool is True``
or ``my_bool == True`` yield the desired results. Rather, such variables
should *always* be coerced into the standard :class:`bool` type before
being compared -- either:
* Implicitly (e.g., ``if my_bool: pass``).
* Explicitly (e.g., ``if bool(my_bool): pass``).
Caveats
----------
**There exists no abstract base class governing booleans in Python.**
Although various Python Enhancement Proposals (PEPs) were authored on the
subject, all were rejected as of this writing. Instead, this type trivially
implements an ad-hoc abstract base class (ABC) detecting objects satisfying
the boolean protocol via structural subtyping. Although no actual
real-world classes subclass this :mod:`beartype`-specific ABC, the
detection implemented by this ABC suffices to match *all* boolean types.
See Also
----------
:class:`beartype.cave.ContainerType`
Further details on structural subtyping.
'''
# ..................{ DUNDERS }..................
# This abstract base class (ABC) has been implemented ala standard
# container ABCs in the private stdlib "_collections_abc" module (e.g., the
# trivial "_collections_abc.Sized" type).
__slots__ = ()
@abstractmethod
def __bool__(self):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is BoolType:
return _check_methods(C, '__bool__')
return NotImplemented
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Unmemoized beartype decorator.**
This private submodule defines all core high-level logic underlying the
:func:`beartype.beartype` decorator, whose implementation in the parent
:mod:`beartype._decor._cache.cachedecor` submodule is a thin wrapper
efficiently memoizing closures internally created and returned by that
decorator. In turn, those closures directly defer to this submodule.
This private submodule is effectively the :func:`beartype.beartype` decorator
despite *not* actually being that decorator (due to being unmemoized).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import (
BeartypeException,
BeartypeDecorWrappeeException,
BeartypeDecorWrapperException,
# BeartypeWarning,
)
from beartype.typing import no_type_check
from beartype._cave._cavefast import (
MethodDecoratorBuiltinTypes,
MethodDecoratorClassType,
MethodDecoratorPropertyType,
MethodDecoratorStaticType,
)
from beartype._conf.confcls import BeartypeConf
from beartype._conf.confenum import BeartypeStrategy
from beartype._data.datatyping import (
BeartypeableT,
TypeStack,
TypeWarning,
)
from beartype._data.cls.datacls import TYPES_BEARTYPEABLE
from beartype._decor._wrapper.wrappermain import generate_code
from beartype._check.checkcall import BeartypeCall
from beartype._util.cache.pool.utilcachepoolobjecttyped import (
acquire_object_typed,
release_object_typed,
)
from beartype._util.func.lib.utilbeartypefunc import (
is_func_unbeartypeable,
set_func_beartyped,
)
from beartype._util.func.utilfuncmake import make_func
from beartype._util.mod.utilmodget import get_object_module_line_number_begin
from beartype._util.utilobject import get_object_name
from traceback import format_exc
from warnings import warn
# ....................{ DECORATORS }....................
def beartype_object(
# Mandatory parameters.
obj: BeartypeableT,
conf: BeartypeConf,
# Optional parameters.
cls_stack: TypeStack = None,
) -> BeartypeableT:
'''
Decorate the passed **beartypeable** (i.e., caller-defined object that may
be decorated by the :func:`beartype.beartype` decorator) with optimal
type-checking dynamically generated unique to that beartypeable.
Parameters
----------
obj : BeartypeableT
**Beartypeable** (i.e., pure-Python callable or class) to be decorated.
conf : BeartypeConf
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all flags, options, settings, and other metadata configuring the
current decoration of the decorated callable or class).
cls_stack : TypeStack
Either:
* If this beartypeable is an attribute (e.g., method, nested class) of a
class currently being decorated by the :func:`beartype.beartype`
decorator, the **type stack** (i.e., tuple of one or more lexically
nested classes that are either currently being decorated *or* have
already been decorated by this decorator in descending order of
top- to bottom-most lexically nested) such that:
* The first item of this tuple is expected to be the **root decorated
class** (i.e., module-scoped class initially decorated by this
decorator whose lexical scope encloses this beartypeable).
* The last item of this tuple is expected to be the **current
decorated class** (i.e., possibly nested class currently being
decorated by this decorator).
* Else, this beartypeable was decorated directly by this decorator. In
this case, ``None``.
Defaults to ``None``.
Note that this decorator requires *both* the root and currently
decorated class to correctly resolve edge cases under :pep:`563`: e.g.,
.. code-block:: python
from __future__ import annotations
from beartype import beartype
@beartype
class Outer(object):
class Inner(object):
# At this time, the "Outer" class has been fully defined but
# is *NOT* yet accessible as a module-scoped attribute. Ergo,
# the *ONLY* means of exposing the "Outer" class to the
# recursive decoration of this get_outer() method is to
# explicitly pass the "Outer" class as the "cls_root"
# parameter to all decoration calls.
def get_outer(self) -> Outer:
return Outer()
Note also that nested classes have *no* implicit access to either their
parent classes *or* to class variables declared by those parent classes.
Nested classes *only* have explicit access to module-scoped classes --
exactly like any other arbitrary objects: e.g.,
.. code-block:: python
class Outer(object):
my_str = str
class Inner(object):
# This induces a fatal compile-time exception resembling:
# NameError: name 'my_str' is not defined
def get_str(self) -> my_str:
return 'Oh, Gods.'
Ergo, the *only* owning class of interest to :mod:`beartype` is the root
owning class containing other nested classes; *all* of those other
nested classes are semantically and syntactically irrelevant.
Nonetheless, this tuple intentionally preserves *all* of those other
nested classes. Why? Because :pep:`563` resolution can only find the
parent callable lexically containing that nested class hierarchy on the
current call stack (if any) treating the total number of classes
lexically nesting the currently decorated class as input metadata, as
trivially provided by the length of this tuple.
Returns
----------
BeartypeableT
Either:
* If the passed object is a class, this existing class embellished with
dynamically generated type-checking.
* If the passed object is a callable, a new callable wrapping that
callable with dynamically generated type-checking.
See Also
----------
:func:`beartype._decor.decormain.beartype`
Memoized parent decorator wrapping this unmemoized child decorator.
'''
# If this object is a class, return this class decorated with type-checking.
#
# Note that the passed "cls_curr" class is ignorable in this context.
# Why? There are three cases. "obj" is either a:
# * Root decorated class, in which case both "cls_root" and
# "cls_curr" are "None". Ergo, "cls_curr" conveys *NO*
# meaningful semantics.
# * Inner decorated class of a root decorated class, in which case both
# "cls_root" and "cls_curr" refer to that root decorated case.
# Ergo, "cls_curr" conveys *NO* additional meaningful semantics.
# * Leaf decorated class of an inner decorated class of a root decorated
# class, in which case "cls_root" and "cls_curr" refer to
# different classes. However, lexical scoping rules in Python prevent
# leaf classes from directly referring to any parent classes *OTHER* than
# module-scoped root classes. Ergo, "cls_curr" conveys *NO*
# meaningful semantics again.
#
# In all cases, "cls_curr" conveys *NO* meaningful semantics.
if isinstance(obj, type):
return _beartype_type( # type: ignore[return-value]
cls=obj,
conf=conf,
cls_stack=cls_stack,
)
# Else, this object is a non-class.
# Type of this object.
obj_type = type(obj)
# If this object is an uncallable builtin method descriptor (i.e., either a
# property, class method, instance method, or static method object),
# @beartype was listed above rather than below the builtin decorator
# generating this descriptor in the chain of decorators decorating this
# decorated callable. Although @beartype typically *MUST* decorate a
# callable directly, this edge case is sufficiently common *AND* trivial to
# resolve to warrant doing so. To do so, this conditional branch effectively
# reorders @beartype to be the first decorator decorating the pure-Python
# function underlying this method descriptor: e.g.,
#
# # This branch detects and reorders this edge case...
# class MuhClass(object):
# @beartype
# @classmethod
# def muh_classmethod(cls) -> None: pass
#
# # ...to resemble this direct decoration instead.
# class MuhClass(object):
# @classmethod
# @beartype
# def muh_classmethod(cls) -> None: pass
#
# Note that most but *NOT* all of these objects are uncallable. Regardless,
# *ALL* of these objects are unsuitable for direct decoration. Specifically:
# * Under Python < 3.10, *ALL* of these objects are uncallable.
# * Under Python >= 3.10:
# * Descriptors created by @classmethod and @property are uncallable.
# * Descriptors created by @staticmethod are technically callable but
# C-based and thus unsuitable for decoration.
if obj_type in MethodDecoratorBuiltinTypes:
return _beartype_descriptor( # type: ignore[return-value]
descriptor=obj,
conf=conf,
cls_stack=cls_stack,
)
# Else, this object is *NOT* an uncallable builtin method descriptor.
#
# If this object is uncallable, raise an exception.
elif not callable(obj):
# Raise an exception.
raise BeartypeDecorWrappeeException(
f'Uncallable {repr(obj)} not decoratable by @beartype.')
# Else, this object is callable.
# Return a new callable decorating that callable with type-checking.
return _beartype_func( # type: ignore[return-value]
func=obj,
conf=conf,
cls_stack=cls_stack,
)
#FIXME: Generalize to accept a "cls_stack" parameter, please.
#FIXME: Unit test us up, please.
def beartype_object_nonfatal(
obj: BeartypeableT,
conf: BeartypeConf,
warning_category: TypeWarning,
) -> BeartypeableT:
'''
Decorate the passed **beartypeable** (i.e., pure-Python callable or class)
with optimal type-checking dynamically generated unique to that
beartypeable and any otherwise uncaught exception raised by doing so safely
coerced into a warning instead.
Motivation
----------
This decorator is principally intended to be called by our **all-at-once
API** (i.e., the import hooks defined by the :mod:`beartype.claw`
subpackage). Raising detailed exception tracebacks on unexpected error
conditions is:
* The right thing to do for callables and classes manually type-checked with
the :func:`beartype.beartype` decorator.
* The wrong thing to do for callables and classes automatically type-checked
by import hooks installed by public functions exported by the
:mod:`beartype.claw` subpackage. Why? Because doing so would render those
import hooks fragile to the point of being practically useless on
real-world packages and codebases by unexpectedly failing on the first
callable or class defined *anywhere* under a package that is not
type-checkable by :func:`beartype.beartype` (whether through our fault or
that package's). Instead, the right thing to do is to:
* Emit a warning for each callable or class that :func:`beartype.beartype`
fails to generate a type-checking wrapper for.
* Proceed to the next callable or class.
Parameters
----------
obj : BeartypeableT
**Beartypeable** (i.e., pure-Python callable or class) to be decorated.
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all flags, options, settings, and other metadata configuring the
current decoration of the decorated callable or class).
warning_category : TypeWarning
Category of the non-fatal warning to emit if :func:`beartype.beartype`
fails to generate a type-checking wrapper for this callable or class.
Returns
----------
BeartypeableT
Either:
* If the passed object is a class, this existing class embellished with
dynamically generated type-checking.
* If the passed object is a callable, a new callable wrapping that
callable with dynamically generated type-checking.
Warns
----------
warning_category
If :func:`beartype.beartype` fails to generate a type-checking wrapper
for this callable or class by raising a fatal exception, this function
coerces that exception into a non-fatal warning describing that error.
See Also
----------
:func:`beartype._decor.decormain.beartype`
Memoized parent decorator wrapping this unmemoized child decorator.
'''
# Attempt to decorate the passed beartypeable.
try:
return beartype_object(obj, conf)
# If doing so unexpectedly raises an exception, coerce that fatal exception
# into a non-fatal warning for nebulous safety.
except Exception as exception:
assert isinstance(warning_category, Warning), (
f'{repr(warning_category)} not warning category.')
# Original error message to be embedded in the warning message to be
# emitted, defined as either...
error_message = (
# If this exception is beartype-specific, this exception's message
# is probably human-readable as is. In this case, coerce only that
# message directly into a warning for brevity and readability.
str(exception)
if isinstance(exception, BeartypeException) else
# Else, this exception is *NOT* beartype-specific. In this case,
# this exception's message is probably *NOT* human-readable as is.
# Prepend that non-human-readable message by this exception's
# traceback to for disambiguity and debuggability. Note that the
# format_exc() function appends this exception's message to this
# traceback and thus suffices as is.
format_exc()
)
#FIXME: Unconditionally parse this warning message by globally replacing
#*EACH* newline (i.e., "\n" substring) in this message with a newline
#followed by four spaces (i.e., "\n ").
#FIXME: Inadequate, really. Instead defer to the:
#* "label_callable(func=obj, is_contextualized=True)" function if this
# object is *NOT* a class.
#* "label_class(cls=obj, is_contextualized=True)" function if this
# object is a class. Of course, that function currently accepts no such
# "is_contextualized" parameter. Make it so, please. *sigh*
#
#This suggests we probably just want to define a new higher-level
#label_object() function with signature resembling:
# label_object(obj: object, is_contextualized: Optional[bool] = None)
#FIXME: Note that we'll want to capitalize the first character of the
#string returned by the label_object() function, please.
# Fully-qualified name of this beartypeable.
obj_name = get_object_name(obj)
# Line number of the first line declaring this beartypeable in its
# underlying source code module file.
obj_lineno = get_object_module_line_number_begin(obj)
# Warning message to be emitted.
warning_message = (
error_message
)
# Emit this message under this category.
warn(warning_message, warning_category)
# Return this object unmodified, as @beartype failed to successfully wrap
# this object with a type-checking class or callable. So it goes, fam.
return obj
# ....................{ PRIVATE ~ beartypers : func }....................
def _beartype_descriptor(
# Mandatory parameters.
descriptor: BeartypeableT,
conf: BeartypeConf,
# Variadic keyword parameters.
**kwargs
) -> BeartypeableT:
'''
Decorate the passed C-based unbound method descriptor with dynamically
generated type-checking.
Parameters
----------
descriptor : BeartypeableT
Descriptor to be decorated by :func:`beartype.beartype`.
conf : BeartypeConf
Beartype configuration configuring :func:`beartype.beartype` uniquely
specific to this descriptor.
All remaining keyword parameters are passed as is to the
:func:`_beartype_func` decorator.
Returns
----------
BeartypeableT
New pure-Python callable wrapping this descriptor with type-checking.
'''
assert isinstance(descriptor, MethodDecoratorBuiltinTypes), (
f'{repr(descriptor)} not builtin method descriptor.')
# assert isinstance(conf, BeartypeConf), f'{repr(conf)} not configuration.'
# Type of this descriptor.
descriptor_type = type(descriptor)
# If this descriptor is a property method...
#
# Note that property method descriptors are intentionally tested next, due
# to their ubiquity "in the wild." Class and static method descriptors are
# comparatively rarefied by comparison.
if descriptor_type is MethodDecoratorPropertyType:
# Pure-Python unbound getter, setter, and deleter functions wrapped by
# this descriptor if any *OR* "None" otherwise (i.e., for each such
# function currently unwrapped by this descriptor).
descriptor_getter = descriptor.fget # type: ignore[assignment,union-attr]
descriptor_setter = descriptor.fset # type: ignore[assignment,union-attr]
descriptor_deleter = descriptor.fdel # type: ignore[assignment,union-attr]
# Decorate this getter function with type-checking.
#
# Note that *ALL* property method descriptors wrap at least a getter
# function (but *NOT* necessarily a setter or deleter function). This
# function is thus guaranteed to be non-"None".
descriptor_getter = _beartype_func( # type: ignore[type-var]
func=descriptor_getter, # pyright: ignore[reportGeneralTypeIssues]
conf=conf,
**kwargs
)
# If this property method descriptor additionally wraps a setter and/or
# deleter function, type-check those functions as well.
if descriptor_setter is not None:
descriptor_setter = _beartype_func(
func=descriptor_setter,
conf=conf,
**kwargs
)
if descriptor_deleter is not None:
descriptor_deleter = _beartype_func(
func=descriptor_deleter,
conf=conf,
**kwargs
)
# Return a new property method descriptor decorating all of these
# functions, implicitly destroying the prior descriptor.
#
# Note that the "property" class interestingly has this signature:
# class property(fget=None, fset=None, fdel=None, doc=None): ...
return property( # type: ignore[return-value]
fget=descriptor_getter,
fset=descriptor_setter,
fdel=descriptor_deleter,
doc=descriptor.__doc__,
)
# Else, this descriptor is *NOT* a property method.
#
# If this descriptor is a class method...
elif descriptor_type is MethodDecoratorClassType:
# Pure-Python unbound function type-checking this class method.
func_checked = _beartype_func(
func=descriptor.__func__, # type: ignore[union-attr]
conf=conf,
**kwargs
)
# Return a new class method descriptor decorating the pure-Python
# unbound function wrapped by this descriptor with type-checking,
# implicitly destroying the prior descriptor.
return classmethod(func_checked) # type: ignore[return-value]
# Else, this descriptor is *NOT* a class method.
#
# If this descriptor is a static method...
elif descriptor_type is MethodDecoratorStaticType:
# Pure-Python unbound function type-checking this static method.
func_checked = _beartype_func(
func=descriptor.__func__, # type: ignore[union-attr]
conf=conf,
**kwargs
)
# Return a new static method descriptor decorating the pure-Python
# unbound function wrapped by this descriptor with type-checking,
# implicitly destroying the prior descriptor.
return staticmethod(func_checked) # type: ignore[return-value]
# Else, this descriptor is *NOT* a static method.
#FIXME: Unconvinced this edge case is required or desired. Does @beartype
#actually need to decorate instance method descriptors? Nonetheless, this
#is sufficiently useful to warrant preservation... probably.
#FIXME: Unit test us up, please.
# # If this descriptor is an instance method...
# #
# # Note that instance method descriptors are intentionally tested first, as
# # most methods are instance methods.
# if descriptor_type is MethodBoundInstanceOrClassType:
# # Pure-Python unbound function decorating the similarly pure-Python
# # unbound function wrapped by this descriptor with type-checking.
# descriptor_func = _beartype_func(func=descriptor.__func__, conf=conf)
#
# # New instance method descriptor rebinding this function to the
# # instance of the class bound to the prior descriptor.
# descriptor_new = MethodBoundInstanceOrClassType(
# descriptor_func, descriptor.__self__) # type: ignore[return-value]
#
# # Propagate the docstring from the prior to new descriptor.
# descriptor_new.__doc__ = descriptor.__doc__
#
# # Return this new descriptor, implicitly destroying the prior
# # descriptor.
# return descriptor_new
# Else, this descriptor is *NOT* an instance method.
# Raise a fallback exception. This should *NEVER happen. This *WILL* happen.
raise BeartypeDecorWrappeeException(
f'Builtin method descriptor {repr(descriptor)} '
f'not decoratable by @beartype '
f'(i.e., neither property, class method, nor static method descriptor).'
)
def _beartype_func(
# Mandatory parameters.
func: BeartypeableT,
conf: BeartypeConf,
# Variadic keyword parameters.
**kwargs
) -> BeartypeableT:
'''
Decorate the passed callable with dynamically generated type-checking.
Parameters
----------
func : BeartypeableT
Callable to be decorated by :func:`beartype.beartype`.
conf : BeartypeConf
Beartype configuration configuring :func:`beartype.beartype` uniquely
specific to this callable.
All remaining keyword parameters are passed as is to the
:meth:`BeartypeCall.reinit` method.
Returns
----------
BeartypeableT
New pure-Python callable wrapping this callable with type-checking.
'''
assert callable(func), f'{repr(func)} uncallable.'
# assert isinstance(conf, BeartypeConf), f'{repr(conf)} not configuration.'
# assert isinstance(cls_root, NoneTypeOr[type]), (
# f'{repr(cls_root)} neither type nor "None".')
# assert isinstance(cls_curr, NoneTypeOr[type]), (
# f'{repr(cls_curr)} neither type nor "None".')
#FIXME: Uncomment to display all annotations in "pytest" tracebacks.
# func_hints = func.__annotations__
# If this configuration enables the no-time strategy performing *NO*
# type-checking, monkey-patch that callable with the standard
# @typing.no_type_check decorator detected above by the call to the
# is_func_unbeartypeable() tester on all subsequent decorations passed the
# same callable. (Doing so prevents all subsequent decorations from
# erroneously ignoring this previously applied no-time strategy.)
if conf.strategy is BeartypeStrategy.O0:
no_type_check(func)
# Else, this configuration enables a positive-time strategy performing at
# least the minimal amount of type-checking.
# If that callable is unbeartypeable (i.e., if this decorator should
# preserve that callable as is rather than wrap that callable with
# constant-time type-checking), silently reduce to the identity decorator.
#
# Note that this conditional implicitly handles the prior conditional! :O
if is_func_unbeartypeable(func): # type: ignore[arg-type]
return func # type: ignore[return-value]
# Else, that callable is beartypeable. Let's do this, folks.
# Previously cached callable metadata reinitialized from that callable.
bear_call = acquire_object_typed(BeartypeCall)
bear_call.reinit(func, conf, **kwargs)
# Generate the raw string of Python statements implementing this wrapper.
func_wrapper_code = generate_code(bear_call)
# If that callable requires *NO* type-checking, silently reduce to a noop
# and thus the identity decorator by returning that callable as is.
if not func_wrapper_code:
return func # type: ignore[return-value]
# Else, that callable requires type-checking. Let's *REALLY* do this, fam.
# Function wrapping that callable with type-checking to be returned.
#
# For efficiency, this wrapper accesses *ONLY* local rather than global
# attributes. The latter incur a minor performance penalty, since local
# attributes take precedence over global attributes, implying all global
# attributes are *ALWAYS* first looked up as local attributes before falling
# back to being looked up as global attributes.
func_wrapper = make_func(
func_name=bear_call.func_wrapper_name,
func_code=func_wrapper_code,
func_locals=bear_call.func_wrapper_scope,
#FIXME: String formatting is infamously slow. As an optimization, it'd
#be strongly preferable to instead pass a lambda function accepting *NO*
#parameters and returning the desired string, which make_func() should
#then internally call on an as-needed basis to make this string: e.g.,
# func_label_factory=lambda: f'@beartyped {bear_call.func_wrapper_name}() wrapper',
#
#This is trivial. The only question then is: "Which is actually faster?"
#Before finalizing this refactoring, let's profile both, adopt whichever
#outperforms the other, and then document this choice in make_func().
func_label=f'@beartyped {bear_call.func_wrapper_name}() wrapper',
func_wrapped=func,
is_debug=conf.is_debug,
exception_cls=BeartypeDecorWrapperException,
)
# Declare this wrapper to be generated by @beartype, which tests for the
# existence of this attribute above to avoid re-decorating callables
# already decorated by @beartype by efficiently reducing to a noop.
set_func_beartyped(func_wrapper)
# Release this callable metadata back to its object pool.
release_object_typed(bear_call)
# Return this wrapper.
return func_wrapper # type: ignore[return-value]
# ....................{ PRIVATE ~ beartypers : type }....................
def _beartype_type(
# Mandatory parameters.
cls: BeartypeableT,
conf: BeartypeConf,
# Optional parameters.
cls_stack: TypeStack = None,
) -> BeartypeableT:
'''
Decorate the passed class with dynamically generated type-checking.
Parameters
----------
cls : BeartypeableT
Class to be decorated by :func:`beartype.beartype`.
conf : BeartypeConf
Beartype configuration configuring :func:`beartype.beartype` uniquely
specific to this class.
cls_stack : TypeStack
**Type stack** (i.e., either tuple of zero or more arbitrary types *or*
``None``). Defaults to ``None``. See also :func:`beartype_object`.
Note this function intentionally accepts *no* ``cls_curr`` parameter, unlike
most functions defined by this submodule. See :func:`beartype_object` for
further details.
Returns
----------
BeartypeableT
This class decorated by :func:`beartype.beartype`.
'''
assert isinstance(cls, type), f'{repr(cls)} not type.'
# assert isinstance(conf, BeartypeConf), f'{repr(conf)} not configuration.'
# assert isinstance(cls_root, NoneTypeOr[type]), (
# f'{repr(cls_root)} neither type nor "None".')
#FIXME: Insufficient. We also want to set a beartype-specific dunder
#attribute -- say, "__beartyped" -- on this class. Additionally, if this
#class has already been @beartyped, we want to detect that here and avoid
#re-@beartype-ing this class. In short, we want to generalize our existing
#"beartype._util.func.lib.utilbeartypefunc" submodule to support classes as
#well. Let's shift that submodule somewhere more general, please. Perhaps:
#* Rename "beartype._util.func.lib.utilbeartypefunc" to
# "beartype._util.check.utilcheckfunc".
#* Define a new "beartype._util.check.utilchecktype" submodule containing
# similar class-specific functionality.
#FIXME: Unit test us up, please. Test against at least:
#* A dataclass. We already do this, of course. Hurrah!
#* An uncallable class (i.e., defining *NO* __call__() dunder method)
# defining at least:
# * A class variable (e.g., "muh_classvar: ClassVar[int] = 42").
# * A standard instance method.
# * A class method.
# * A static method.
# * A property getter, setter, and deleter.
#* A callable class (i.e., defining a __call__() dunder method).
#* A PEP 563-fueled self-referential class. See this as a simple example:
# https://github.com/beartype/beartype/issues/152#issuecomment-1197778501
# Replace the passed class stack with a new class stack appending this
# decorated class to the top of this stack, reflecting the fact that this
# decorated class is now the most deeply lexically nested class for the
# currently recursive chain of @beartype-decorated classes.
cls_stack = (
# If the caller passed *NO* class stack, then this class is necessarily
# the first decorated class being decorated directly by @beartype and
# thus the root decorated class.
#
# Note this is the common case and thus tested first. Since nested
# classes effectively do *NOT* exist in the wild, this comprises
# 99.999% of all real-world cases.
(cls,)
if cls_stack is None else
# Else, the caller passed a clack stack comprising at least a root
# decorated class. Preserve that class as is to properly expose that
# class elsewhere.
cls_stack + (cls,)
)
# For the unqualified name and value of each direct (i.e., *NOT* indirectly
# inherited) attribute of this class...
for attr_name, attr_value in cls.__dict__.items(): # pyright: ignore[reportGeneralTypeIssues]
# If this attribute is beartypeable...
if isinstance(attr_value, TYPES_BEARTYPEABLE):
# This attribute decorated with type-checking configured by this
# configuration if *NOT* already decorated.
attr_value_beartyped = beartype_object(
obj=attr_value,
conf=conf,
cls_stack=cls_stack,
)
# Replace this undecorated attribute with this decorated attribute.
#
# Note that class attributes are *ONLY* settable by calling the
# tragically slow setattr() builtin. Attempting to directly set an
# attribute on the class dictionary raises an exception. Why? Because
# class dictionaries are actually low-level "mappingproxy" objects that
# intentionally override the __setattr__() dunder method to
# unconditionally raise an exception. Why? Because this constraint
# enables the type.__setattr__() dunder method to enforce critical
# efficiency constraints on class attributes -- including that class
# attribute keys are not only strings but valid Python identifiers:
# >>> class OhGodHelpUs(object): pass
# >>> OhGodHelpUs.__dict__['even_god_cannot_help'] = 2
# TypeError: 'mappingproxy' object does not support item assignment
#
# See also this relevant StackOverflow answer by Python luminary
# Raymond Hettinger:
# https://stackoverflow.com/a/32720603/2809027
setattr(cls, attr_name, attr_value_beartyped)
# Else, this attribute is *NOT* beartypeable. In this case, silently
# ignore this attribute.
# Return this class as is.
return cls # type: ignore[return-value]
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
# ....................{ TODO }....................
#FIXME: "typing.LiteralString". We just had a mildly brilliant revelation in
#the "beartype.claw._clawast" submodule as to how we might go about performing
#static analysis at runtime via the third-party "executing" submodule. \o/
#FIXME: [PEP 484]: Support subscripted bounded type variables. We didn't even
#know this was a thing -- but it makes sense. An example is probably the best
#way to explain this madness. Witness!
# from beartype import beartype
# from typing import Iterable
#
# T = TypeVar('T', bound=Iterable)
#
# @beartype
# def stringify_iterable_items(arg: T[int]) -> T[str]:
# return type(arg)(str(item) for item in arg)
#
#Clearly, @beartype should just quietly reduce both the "T[int]" and "T[str]"
#type hints that we can't really do anything with to "Iterable[int]" and
#"Iterable[str]" type hints, which we can. Does @beartype currently do that?
#Probably... not. At the least, we should begin testing this exhaustively.
#FIXME: [PEP 585] It looks like CPython's stdlib quietly extended PEP 585
#support to a variety of undocumented classes, including:
#* "asyncio.Future[T]".
#* "asyncio.Task[T]".
#* "asyncio.Queue[T]".
#* "pathlib.PathLike[T]".
#
#Yes, we can verify that *ALL* of those actually are subscriptable at runtime.
#@beartype will need to add corresponding support for such hints, beginning with
#defining new sign singletons suffixed by the same basenames (e.g.,
#"HintSignFuture", "HintSignTask"). Or... maybe not? Maybe everything just
#behaves as expected as is?
#
#At the least, we'll want to rigorously test *ALL* of the above in our test
#suite to ensure that @beartype does indeed type-check these as expected.
#FIXME: Sadly, we do need to explicitly do *SOMETHING*. @beartype currently
#raises exceptions on callables annotated by any of the above, as the metaclass
#of these hints prohibits isinstance() checks: e.g.,
# asyncio.Task[~T] uncheckable at runtime (i.e., not passable as second
# parameter to isinstance(), due to raising "isinstance() argument 2 cannot
# be a parameterized generic" from metaclass __instancecheck__() method).
#
#Rather than explicitly matching all of the above, we instead want @beartype to
#perform an automated solution implicitly matching all of the above. Notably,
#improve @beartype to:
#
#* Detect parametrized generic hints that are otherwise unrecognized (e.g.,
# "asyncio.Task[~T]").
#* Introspect the origin (i.e., "__origin__" dunder attribute) from these hints.
#* Internally replace each such parametrized generic hint with its origin when
# generating type-checking code. Voila!
#FIXME: [PEP] Add PEP 613 support (i.e., "typing.TypeAlias"). Thankfully, this
#is trivial. "typing.TypeAlias" is prohibited in callable definitions and
#inside the bodies of callables. Ergo, @beartype should just raise a
#decoration-time exception if any parameter or return is annotated as an
#explicit "TypeAlias". That constitutes full support for PEP 613 from our
#side. Good enough! :p
#FIXME: [PEP] Add PEP 673 support (i.e., "typing.Self"). Since "typing.Self" is
#simply a singleton syntactic sugar for "typing.TypeVar('Self', bound={cls})"
#where {cls} is the class containing the "typing.Self" reference, this can be
#trivially achieved with a reduction in "beartype._util.hint.utilhintconv"
#contextually reducing each "typing.Self" type hint in a callable signature to
#the corresponding "typing.TypeVar('Self', bound={cls})" object: e.g.,
# # This canonical PEP 673-specific example...
# from typing import Self
# class Shape:
# def set_scale(self, scale: float) -> Self:
# self.scale = scale
# return self
#
# # ...is semantically identical to this PEP 673-agnostic example.
# from typing import TypeVar
# class Shape:
# def set_scale(self, scale: float) -> TypeVar('Self', Shape):
# self.scale = scale
# return self
#
#Note that implementing this reduction will require:
#* Adding a new "cls_scope" (or something) instance variable to our
# "beartype._check.checkcall.BeartypeCall" dataclass, defined as either:
# * "None" for non-method callables.
# * The type of the class lexically declaring the current method callable. Note
# that this type is trivially retrievable from C-based bound instance method
# descriptors via the "__self__" dunder variable: e.g.,
# >>> class Yam(object): def yim(self): pass
# >>> Yam().yim.__self__
# <__main__.Yam at 0x7f4f70aad130>
#
# That said, it kinda seems unlikely that our dynamically generated unbound
# pure-Python wrapper functions would have access to the C-based parent
# instance method descriptor. Given that, the only means of supporting this
# is probably to require that users decorate classes rather than class
# methods by @beartype. In the former case, @beartype trivially has access to
# this type and can safely set "BeartypeCall.cls_scope"; in the latter case,
# @beartype does not and *MUST* leave "BeartypeCall.cls_scope" as "None".
#
# Ergo, @beartype should raise a fatal exception when visiting a "Self" hint
# *AND* "BeartypeCall.cls_scope is None" is true.
#* It would be advisable to cache the "TypeVar" objects produced in this
# manner. Or perhaps that's overkill, as the same "utilhintconv" function
# performing this reduction *ALSO* currently reduces "TypeVar" objects to
# their bounds. So... *shrug*
#FIXME: [SPEED] As a useful MACROoptimization, render the entire @beartype
#toolchain thread-safe upfront rather than doing so piecemeal throughout the
#toolchain. While the latter certainly works as well, the former is
#*SUBSTANTIALLY* more efficient due to the non-trivial expense of each
#threadsafe context manager. To do so:
#* Simply wrap the body of the implementation of the @beartype decorator in a
# context manager locking on a globally declared lock: e.g.,
# with lock:
# ...
# Note that an "RLock" is neither needed nor desired here, as @beartype
# *NEVER* invokes itself recursively. A non-reentrant "Lock" suffices.
#* Rip out all now-redundant "with lock:" expressions throughout the codebase.
#FIXME: [SPEED] As a useful microoptimization, consider memoizing "repr(hint)"
#calls. We strongly suspect these calls to be a performance bottleneck, because
#we repeat them so frequently for the same hint throughout the codebase. The
#best approach to doing so is to:
#* Define a new memoized "beartype._util.hint.utilhintget" getter: e.g.,
# @callable_cached
# def get_hint_repr(hint: object) -> str:
# return repr(hint)
#* Globally replace all calls to the repr() builtin throughout the codebase
# passed a hint with calls to get_hint_repr() instead.
#FIXME: [SPEED] As a useful microoptimization, unroll *ALL* calls to the any()
#and all() builtins into equivalent "for" loops in our critical path. Since we
#typically pass these builtins generator comprehensions created and destroyed
#on-the-fly, we've profiled these builtins to incur substantially higher
#runtime costs than equivalent "for" loops. Thanks alot, CPython. *sigh*
#FIXME: [FEATURE] Plugin architecture. The NumPy type hints use case will come
#up again and again. So, let's get out ahead of that use case rather than
#continuing to reinvent the wheel. Let's begin by defining a trivial plugin API
#enabling users to define their own arbitrary type hint *REDUCTIONS.* Because
#it's capitalized, we know the term "REDUCTIONS" is critical here. We are *NOT*
#(at least, *NOT* initially) defining a full-blown plugin API. We're only
#enabling users to reduce arbitrary type hints:
#* From domain-specific objects they implement and annotate their code with...
#* Into PEP-compliant type hints @beartype already supports.
#Due to their versatility, the standard use case is reducing PEP-noncompliant
#type hints to PEP 593-compliant beartype validators. To do so, consider:
#* Defining a new public "beartype.plug" subpackage, defining:
# * A private "_PLUGIN_NAME_TO_SIGN" dictionary mapping from each "name"
# parameter passed to each prior call of the plug_beartype() function to the
# "HintSign" object that function dynamically creates to represent
# PEP-noncompliant type hints handled by that plugin. This dictionary
# effectively maps from the thing our users care about but we don't (i.e.,
# plugin names) to the thing our users don't care about but we do (i.e.,
# hint signs).
# * A public plug_beartype() function with signature resembling:
# def plug_beartype(
# # Mandatory parameters.
# name: str,
# hint_reduce: Callable[[object,], object],
#
# # Optional parameters.
# hint_detect_from_repr_prefix_args_1_or_more: Optional[str] = None,
# hint_detect_from_type_name: Optional[str] = None,
# ) -> None:
# ...where:
# * The "name" parameter is an arbitrary non-empty string (e.g., "Numpy").
# This function will then synthesize a new hint sign suffixed by this
# substring (e.g., f'HintSign{name}') and map this name to that sign in
# the "_PLUGIN_NAME_TO_SIGN" dictionary.
# * The "hint_detect_from_repr_prefix_args_1_or_more" parameter is an
# arbitrary non-empty string typically corresponding to the
# fully-qualified name of a subclass of "types.GenericAlias" serving as a
# PEP 585-compliant type hint factory(e.g.,
# "muh_package.MuhTypeHintFactory"), corresponding exactly to the items
# of the "HINT_REPR_PREFIX_ARGS_1_OR_MORE_TO_SIGN" set.
# * The "hint_detect_from_type_name" parameter is the fully-qualified name
# of a caller-defined class (e.g., "muh_package.MuhTypeHintFactoryType"),
# corresponding exactly to the items of the "HINT_TYPE_NAME_TO_SIGN" set.
# * The "hint_reduce" parameter is an arbitrary caller-defined callable
# reducing all type hints identified by one or more of the detection
# schemes below to another arbitrary (but hopefully PEP-compliant and
# beartype-supported) type hint. Again, that will typically be a
# PEP 593-compliant beartype validator.
# * A public unplug_beartype() function with signature resembling:
# def unplug_beartype(name: str) -> None:
# This function simply looks up the passed name in various internal data
# structures (e.g.,"_PLUGIN_NAME_TO_SIGN") to undo the effects of the prior
# plug_beartype() call passed that name.
#
#Given that, we should then entirely reimplement our current strategy for
#handling NumPy type hints into a single call to plug_beartype(): e.g.,
# # Pretty boss, ain't it? Note we intentionally pass
# # "hint_detect_from_repr_prefix_args_1_or_more" here, despite the fact
# # that the unsubscripted "numpy.typing.NDArray" factory is a valid type
# # hint. Yes, this actually works. Why? Because that factory implicitly
# # subscripts itself when unsubscripted. In other words, there is *NO* such
# # thing as an unsubscripted typed NumPy array. O_o
# def plug_beartype(
# name='NumpyArray',
# hint_reduce=reduce_hint_numpy_ndarray,
# hint_detect_from_repr_prefix_args_1_or_more='numpy.ndarray',
# )
#
#Yes, this would then permit us to break backward compatibility by bundling
#that logic into a new external "beartype_numpy" plugin for @beartype -- but we
#absolutely should *NOT* do that, both because it would severely break backward
#compatibility *AND* because everyone (including us) wants NumPy support
#out-of-the-box. We're all data scientists here. Do the right thing.
#FIXME: [FEATURE] Define the following supplementary decorators:
#* @beartype.beartype_O1(), identical to the current @beartype.beartype()
# decorator but provided for disambiguity. This decorator only type-checks
# exactly one item from each container for each call rather than all items.
#* @beartype.beartype_Ologn(), type-checking log(n) random items from each
# container of "n" items for each call.
#* @beartype.beartype_On(), type-checking all items from each container for
# each call. We have various ideas littered about GitHub on how to optimize
# this for various conditions, but this is never going to be ideal and should
# thus never be the default.
#
#To differentiate between these three strategies, consider:
#* Declare an enumeration in "beartype._check.checkcall" resembling:
# from enum import Enum
# BeartypeStrategyKind = Enum('BeartypeStrategyKind ('O1', 'Ologn', 'On',))
#* Define a new "BeartypeCall.strategy_kind" instance variable.
#* Set this variable to the corresponding "BeartypeStrategyKind" enumeration
# member based on which of the three decorators listed above was called.
#* Explicitly pass the value of the "BeartypeCall.strategy_kind" instance
# variable to the beartype._check.expr.exprmake.make_func_wrapper_code()
# function as a new memoized "strategy_kind" parameter.
#* Conditionally generate type-checking code throughout that function depending
# on the value of that parameter.
#FIXME: Emit one non-fatal warning for each annotated type that is either:
#
#* "beartype.cave.UnavailableType".
#* "beartype.cave.UnavailableTypes".
#
#Both cases imply user-side misconfiguration, but not sufficiently awful enough
#to warrant fatal exceptions. Moreover, emitting warnings rather than
#exceptions enables end users to unconditionally disable all unwanted warnings,
#whereas no such facilities exist for unwanted exceptions.
#FIXME: Validate all tuple annotations to be non-empty *EXCLUDING*
#"beartype.cave.UnavailableTypes", which is intentionally empty.
#FIXME: Unit test the above edge case.
#FIXME: Add support for all possible kinds of parameters. @beartype currently
#supports most but *NOT* all types. Specifically:
#
#* Type-check variadic keyword arguments. Currently, only variadic positional
# arguments are type-checked. When doing so, remove the
# "Parameter.VAR_KEYWORD" type from the "_PARAM_KIND_IGNORABLE" set.
#* Type-check positional-only arguments under Python >= 3.8. Note that, since
# C-based callables have *ALWAYS* supported positional-only arguments, the
# "Parameter.POSITIONAL_ONLY" type is defined for *ALL* Python versions
# despite only being usable in actual Python from Python >= 3.8. In other
# words, support for type-checking positional-only arguments should be added
# unconditionally without reference to Python version -- we suspect, anyway.
# When doing so, remove the "Parameter.POSITIONAL_ONLY" type from the
# "_PARAM_KIND_IGNORABLE" set.
#* Remove the "_PARAM_KIND_IGNORABLE" set entirely.
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Public beartype decorator.**
This private submodule defines the core :func:`beartype` decorator, which the
:mod:`beartype.__init__` submodule then imports for importation as the public
:mod:`beartype.beartype` decorator by downstream callers -- completing the
virtuous cycle of code life.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
# All "FIXME:" comments for this submodule reside in this package's "__init__"
# submodule to improve maintainability and readability here.
# ....................{ IMPORTS }....................
from beartype.typing import TYPE_CHECKING
from beartype._conf.confcls import (
BEARTYPE_CONF_DEFAULT,
BeartypeConf,
)
from beartype._data.datatyping import (
BeartypeConfedDecorator,
BeartypeReturn,
BeartypeableT,
)
# Intentionally import the standard mypy-friendly @typing.overload decorator
# rather than a possibly mypy-unfriendly @beartype.typing.overload decorator --
# which, in any case, would be needlessly inefficient and thus bad.
from typing import overload
# ....................{ OVERLOADS }....................
# Declare PEP 484-compliant overloads to avoid breaking downstream code
# statically type-checked by a static type checker (e.g., mypy). The concrete
# @beartype decorator declared below is permissively annotated as returning a
# union of multiple types desynchronized from the types of the passed arguments
# and thus fails to accurately convey the actual public API of that decorator.
# See also: https://www.python.org/dev/peps/pep-0484/#function-method-overloading
@overload # type: ignore[misc,no-overload-impl]
def beartype(obj: BeartypeableT) -> BeartypeableT: ...
@overload
def beartype(*, conf: BeartypeConf) -> BeartypeConfedDecorator: ...
# ....................{ DECORATORS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: *THE ORDER OF CONDITIONAL STATEMENTS BELOW IS SIGNIFICANT.* Notably,
# mypy 0.940 erroneously emits this fatal error when the "TYPE_CHECKING or"
# condition is *NOT* the first condition of this "if" statement:
# beartype/_decor/main.py:294: error: Condition can't be inferred, unable
# to merge overloads [misc]
# See also: https://github.com/python/mypy/issues/12335#issuecomment-1065591703
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# If the active Python interpreter is either...
if (
# Running under an external static type checker -- in which case there is
# no benefit to attempting runtime type-checking whatsoever...
#
# Note that this test is largely pointless. By definition, static type
# checkers should *NOT* actually run any code -- merely parse and analyze
# that code. Ergo, this boolean constant should *ALWAYS* be false from the
# runtime context under which @beartype is only ever run. Nonetheless, this
# test is only performed once per process and is thus effectively free.
TYPE_CHECKING or
# Optimized (e.g., option "-O" was passed to this interpreter) *OR*...
not __debug__
):
# Then unconditionally disable @beartype-based type-checking across the entire
# codebase by reducing the @beartype decorator to the identity decorator.
# Ideally, this would have been implemented at the top rather than bottom of
# this submodule as a conditional resembling:
# if __debug__:
# def beartype(func: CallableTypes) -> CallableTypes:
# return func
# return
#
# Tragically, Python fails to support module-scoped "return" statements. *sigh*
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize the signature of this identity decorator with the
# non-identity decorator imported below.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
def beartype( # type: ignore[no-redef]
obj: BeartypeableT, # pyright: ignore[reportInvalidTypeVarUse]
# Optional keyword-only parameters.
*,
conf: BeartypeConf = BEARTYPE_CONF_DEFAULT,
) -> BeartypeReturn:
return obj
# Else, the active Python interpreter is in a standard runtime state. In this
# case, define the @beartype decorator in the standard way.
else:
# This is where @beartype *REALLY* lives. Grep here for all the goods.
from beartype._decor._cache.cachedecor import beartype
# ....................{ DECORATORS ~ doc }....................
# Document the @beartype decorator with the same documentation regardless of
# which of the above implementations currently implements that decorator.
beartype.__doc__ = (
'''
Decorate the passed **beartypeable** (i.e., pure-Python callable or
class) with optimal type-checking dynamically generated unique to that
beartypeable under the passed beartype configuration.
This decorator supports two distinct (albeit equally efficient) modes
of operation:
* **Decoration mode.** The caller activates this mode by passing this
decorator a type-checkable object via the ``obj`` parameter; this
decorator then creates and returns a new callable wrapping that object
with optimal type-checking. Specifically:
* If this object is a callable, this decorator creates and returns a new
**runtime type-checker** (i.e., pure-Python function validating all
parameters and returns of all calls to that callable against all
PEP-compliant type hints annotating those parameters and returns). The
type-checker returned by this decorator is:
* Optimized uniquely for the passed callable.
* Guaranteed to run in ``O(1)`` constant-time with negligible constant
factors.
* Type-check effectively instantaneously.
* Add effectively no runtime overhead to the passed callable.
* If the passed object is a class, this decorator iteratively applies
itself to all annotated methods of this class by dynamically wrapping
each such method with a runtime type-checker (as described previously).
* **Configuration mode.** The caller activates this mode by passing this
decorator a beartype configuration via the ``conf`` parameter; this
decorator then creates and returns a new beartype decorator enabling that
configuration. That decorator may then be called (in decoration mode) to
create and return a new callable wrapping the passed type-checkable
object with optimal type-checking configured by that configuration.
If optimizations are enabled by the active Python interpreter (e.g., due to
option ``-O`` passed to this interpreter), this decorator silently reduces
to a noop.
Parameters
----------
obj : Optional[BeartypeableT]
**Beartypeable** (i.e., pure-Python callable or class) to be decorated.
Defaults to ``None``, in which case this decorator is in configuration
rather than decoration mode. In configuration mode, this decorator
creates and returns an efficiently cached private decorator that
generically applies the passed beartype configuration to any
beartypeable object passed to that decorator. Look... It just works.
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object). Defaults
to ``BeartypeConf()``, the default ``O(1)`` constant-time configuration.
Returns
----------
BeartypeReturn
Either:
* If in decoration mode (i.e., ``obj`` is *not* ``None` while ``conf``
is ``None``) *and*:
* If ``obj`` is a callable, a new callable wrapping that callable
with dynamically generated type-checking.
* If ``obj`` is a class, this existing class embellished with
dynamically generated type-checking.
* If in configuration mode (i.e., ``obj`` is ``None` while ``conf`` is
*not* ``None``), a new beartype decorator enabling this
configuration.
Raises
----------
BeartypeConfException
If the passed configuration is *not* actually a configuration (i.e.,
instance of the :class:`BeartypeConf` class).
BeartypeDecorHintException
If any annotation on this callable is neither:
* A **PEP-compliant type** (i.e., instance or class complying with a
PEP supported by :mod:`beartype`), including:
* :pep:`484` types (i.e., instance or class declared by the stdlib
:mod:`typing` module).
* A **PEP-noncompliant type** (i.e., instance or class complying with
:mod:`beartype`-specific semantics rather than a PEP), including:
* **Fully-qualified forward references** (i.e., strings specified as
fully-qualified classnames).
* **Tuple unions** (i.e., tuples containing one or more classes
and/or forward references).
BeartypePep563Exception
If :pep:`563` is active for this callable and evaluating a **postponed
annotation** (i.e., annotation whose value is a string) on this
callable raises an exception (e.g., due to that annotation referring to
local state no longer accessible from this deferred evaluation).
BeartypeDecorParamNameException
If the name of any parameter declared on this callable is prefixed by
the reserved substring ``__beartype_``.
BeartypeDecorWrappeeException
If this callable is either:
* Uncallable.
* A class, which :mod:`beartype` currently fails to support.
* A C-based callable (e.g., builtin, third-party C extension).
BeartypeDecorWrapperException
If this decorator erroneously generates a syntactically invalid wrapper
function. This should *never* happen, but here we are, so this probably
happened. Please submit an upstream issue with our issue tracker if you
ever see this. (Thanks and abstruse apologies!)
'''
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Memoized beartype decorator.**
This private submodule defines the core :func:`beartype.beartype` decorator,
conditionally imported (in order):
#. Into the parent :mod:`beartype._decor.decormain`
submodule if this decorator is *not* currently reducing to a noop (e.g., due
to ``python3 -O`` optimization).
#. Into the root :mod:`beartype.__init__` submodule if the :mod:`beartype`
package is *not* currently being installed by :mod:`setuptools`.
This private submodule is literally the :func:`beartype.beartype` decorator,
despite *not* actually being that decorator (due to being unmemoized).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeConfException
from beartype.typing import (
Dict,
Optional,
)
from beartype._conf.confcls import (
BEARTYPE_CONF_DEFAULT,
BeartypeConf,
)
from beartype._data.datatyping import (
BeartypeConfedDecorator,
BeartypeReturn,
BeartypeableT,
)
from beartype._decor.decorcore import beartype_object
# ....................{ DECORATORS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize the signature of this non-identity decorator with the
# identity decorator defined by the "beartype._decor.decormain" submodule.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Documentation for this decorator intentionally resides in the parent
# "beartype._decor.decormain" submodule.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
def beartype(
# Optional positional or keyword parameters.
obj: Optional[BeartypeableT] = None,
# Optional keyword-only parameters.
*,
conf: BeartypeConf = BEARTYPE_CONF_DEFAULT,
) -> BeartypeReturn:
# If "conf" is *NOT* a configuration, raise an exception.
if not isinstance(conf, BeartypeConf):
raise BeartypeConfException(
f'{repr(conf)} not beartype configuration.')
# Else, "conf" is a configuration.
#
# If passed an object to be decorated, this decorator is in decoration
# rather than configuration mode. In this case, decorate this object with
# type-checking configured by this configuration.
#
# Note this branch is typically *ONLY* entered when the "conf" parameter
# is *NOT* explicitly passed and thus defaults to the default
# configuration. While callers may technically run this decorator in
# decoration mode with a non-default configuration, doing so would be both
# highly irregular *AND* violate PEP 561-compliance by violating the
# decorator overloads declared above. Nonetheless, we're largely permissive
# here; callers that are doing this are sufficiently intelligent to be
# trusted to violate PEP 561-compliance if they so choose. So... *shrug*
elif obj is not None:
return beartype_object(obj, conf)
# Else, we were passed *NO* object to be decorated. In this case, this
# decorator is in configuration rather than decoration mode.
# Private decorator (possibly previously generated and cached by a prior
# call to this decorator also in configuration mode) generically applying
# this configuration to any beartypeable object passed to that decorator
# if a prior call to this public decorator has already been passed the same
# configuration (and thus generated and cached this private decorator) *OR*
# "None" otherwise (i.e., if this is the first call to this public
# decorator passed this configuration in configuration mode). Phew!
beartype_confed_cached = _bear_conf_to_decor.get(conf)
# If a prior call to this public decorator has already been passed the same
# configuration (and thus generated and cached this private decorator),
# return this private decorator for subsequent use in decoration mode.
if beartype_confed_cached:
return beartype_confed_cached
# Else, this is the first call to this public decorator passed this
# configuration in configuration mode.
# Define a private decorator generically applying this configuration to any
# beartypeable object passed to this decorator.
def beartype_confed(obj: BeartypeableT) -> BeartypeableT:
'''
Decorate the passed **beartypeable** (i.e., pure-Python callable or
class) with optimal type-checking dynamically generated unique to
that beartypeable under the beartype configuration passed to a
prior call to the :func:`beartype.beartype` decorator.
Parameters
----------
obj : BeartypeableT
Beartypeable to be decorated.
Returns
----------
BeartypeableT
Either:
* If the passed object is a class, this existing class
embellished with dynamically generated type-checking.
* If the passed object is a callable, a new callable wrapping
that callable with dynamically generated type-checking.
See Also
----------
:func:`beartype.beartype`
Further details.
'''
# Decorate this object with type-checking configured by this
# configuration.
return beartype_object(obj, conf)
# Cache this private decorator against this configuration.
_bear_conf_to_decor[conf] = beartype_confed
# Return this private decorator.
return beartype_confed
# ....................{ SINGLETONS }....................
_bear_conf_to_decor: Dict[BeartypeConf, BeartypeConfedDecorator] = {}
'''
Non-thread-safe **beartype decorator cache.**
This cache is implemented as a singleton dictionary mapping from each
**beartype configuration** (i.e., self-caching dataclass encapsulating all
flags, options, settings, and other metadata configuring the current decoration
of the decorated callable or class) to the corresponding **configured beartype
decorator** (i.e., closure created and returned from the
:func:`beartype.beartype` decorator when passed a beartype configuration via
the optional ``conf`` parameter rather than an object to be decorated via
the optional ``obj`` parameter).
Caveats
----------
**This cache is not thread-safe.** Although rendering this cache thread-safe
would be trivial, doing so would needlessly reduce efficiency. This cache is
merely a runtime optimization and thus need *not* be thread-safe.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartypistry** (i.e., singleton dictionary mapping from the fully-qualified
classnames of all type hints annotating callables decorated by the
:func:`beartype.beartype` decorator to those types).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import (
BeartypeCallHintForwardRefException,
BeartypeDecorHintForwardRefException,
)
from beartype.roar._roarexc import _BeartypeDecorBeartypistryException
from beartype._check.checkmagic import ARG_NAME_TYPISTRY
from beartype._util.cache.utilcachecall import callable_cached
from beartype._util.cls.pep.utilpep3119 import die_unless_type_isinstanceable
from beartype._util.cls.utilclstest import is_type_builtin
from beartype._util.mod.utilmodimport import import_module_attr
from beartype._util.mod.utilmodtest import die_unless_module_attr_name
from beartype._util.utilobject import get_object_type_name
# ....................{ CONSTANTS }....................
TYPISTRY_HINT_NAME_TUPLE_PREFIX = '+'
'''
**Beartypistry tuple key prefix** (i.e., substring prefixing the keys of all
beartypistry key-value pairs whose values are tuples).
Since fully-qualified classnames are guaranteed *not* to be prefixed by this
prefix, this prefix suffices to uniquely distinguish key-value pairs whose
values are types from pairs whose values are tuples.
'''
# ....................{ CONSTANTS ~ code }....................
_CODE_TYPISTRY_HINT_NAME_TO_HINT_PREFIX = f'{ARG_NAME_TYPISTRY}['
'''
Substring prefixing a Python expression mapping from the subsequent string to
an arbitrary object cached by the beartypistry singleton via the private
beartypistry parameter.
'''
_CODE_TYPISTRY_HINT_NAME_TO_HINT_SUFFIX = ']'
'''
Substring prefixing a Python expression mapping from the subsequent string to
an arbitrary object cached by the beartypistry singleton via the private
beartypistry parameter.
'''
# ....................{ REGISTRARS ~ forwardref }....................
#FIXME: Unit test us up.
# Note this function intentionally does *NOT* accept an optional "hint_labal"
# parameter as doing so would conflict with memoization.
@callable_cached
def register_typistry_forwardref(hint_classname: str) -> str:
'''
Register the passed **fully-qualified forward reference** (i.e., string
whose value is the fully-qualified name of a user-defined class that
typically has yet to be defined) with the beartypistry singleton *and*
return a Python expression evaluating to this class when accessed via the
private ``__beartypistry`` parameter implicitly passed to all wrapper
functions generated by the :func:`beartype.beartype` decorator.
This function is memoized for both efficiency *and* safety, preventing
accidental reregistration.
Parameters
----------
hint_classname : object
Forward reference to be registered, defined as either:
* A string whose value is the syntactically valid name of a class.
* An instance of the :class:`typing.ForwardRef` class.
Returns
----------
str
Python expression evaluating to the user-defined referred to by this
forward reference when accessed via the private ``__beartypistry``
parameter implicitly passed to all wrapper functions generated by the
:func:`beartype.beartype` decorator.
Raises
----------
BeartypeDecorHintForwardRefException
If this forward reference is *not* a syntactically valid
fully-qualified classname.
'''
# If this object is *NOT* the syntactically valid fully-qualified name of a
# module attribute which may *NOT* actually exist, raise an exception.
die_unless_module_attr_name(
module_attr_name=hint_classname,
exception_cls=BeartypeDecorHintForwardRefException,
exception_prefix='Forward reference ',
)
# Return a Python expression evaluating to this type *WITHOUT* explicitly
# registering this forward reference with the beartypistry singleton. Why?
# Because the Beartypistry.__missing__() dunder method implicitly handles
# forward references by dynamically registering types on their first access
# if *NOT* already registered. Ergo, our job is actually done here.
return (
f'{_CODE_TYPISTRY_HINT_NAME_TO_HINT_PREFIX}{repr(hint_classname)}'
f'{_CODE_TYPISTRY_HINT_NAME_TO_HINT_SUFFIX}'
)
# ....................{ CLASSES }....................
class Beartypistry(dict):
'''
**Beartypistry** (i.e., singleton dictionary mapping from strings uniquely
identifying PEP-noncompliant type hints annotating callables decorated
by the :func:`beartype.beartype` decorator to those hints).**
This dictionary implements a global registry for **PEP-noncompliant type
hints** (i.e., :mod:`beartype`-specific annotation *not* compliant with
annotation-centric PEPs), including:
* Non-:mod:`typing` types (i.e., classes *not* defined by the :mod:`typing`
module, which are PEP-compliant type hints that fail to comply with
standard type semantics and are thus beyond the limited scope of this
PEP-noncompliant-specific dictionary).
* Tuples of non-:mod:`typing` types, commonly referred to as **tuple
unions** in :mod:`beartype` jargon.
This dictionary efficiently shares these hints across all type-checking
wrapper functions generated by this decorator, enabling these functions to:
* Obtain type and tuple objects at wrapper runtime given only the strings
uniquely identifying those objects hard-coded into the bodies of those
wrappers at decoration time.
* Resolve **forward references** (i.e., type hints whose values are strings
uniquely identifying type and tuple objects) at wrapper runtime, which
this dictionary supports by defining a :meth:`__missing__` dunder method
dynamically adding a new mapping from each such reference to the
corresponding object on the first attempt to access that reference.
'''
# ..................{ DUNDERS }..................
def __setitem__(self, hint_name: str, hint: object) -> None:
'''
Dunder method explicitly called by the superclass on setting the passed
key-value pair with``[``- and ``]``-delimited syntax, mapping the
passed string uniquely identifying the passed PEP-noncompliant type
hint to that hint.
Parameters
----------
hint_name: str
String uniquely identifying this hint in a manner dependent on the
type of this hint. Specifically, if this hint is:
* A non-:mod:`typing` type, this is the fully-qualified classname
of the module attribute defining this type.
* A tuple of non-:mod:`typing` types, this is a string:
* Prefixed by the :data:`TYPISTRY_HINT_NAME_TUPLE_PREFIX`
substring distinguishing this string from fully-qualified
classnames.
* Hash of these types (ignoring duplicate types and type order in
this tuple).
hint : object
PEP-noncompliant type hint to be mapped from this string.
Raises
----------
TypeError
If this hint is **unhashable** (i.e., *not* hashable by the builtin
:func:`hash` function and thus unusable in hash-based containers
like dictionaries and sets). Since *all* supported PEP-noncompliant
type hints are hashable, this exception should *never* be raised.
_BeartypeDecorBeartypistryException
If either:
* This name either:
* *Not* a string.
* Is an existing string key of this dictionary, implying this
name has already been registered, implying a key collision
between the type or tuple already registered under this key and
this passed type or tuple to be reregistered under this key.
Since the high-level :func:`register_typistry_type` and
:func:`register_typistry_tuple` functions implicitly calling
this low-level dunder method are memoized *and* since the
latter function explicitly avoids key collisions by detecting
and uniquifying colliding keys, every call to this method
should be passed a unique key.
* This hint is either:
* A type but either:
* This name is *not* the fully-qualified classname of this
type.
* This type is **PEP-compliant** (most of which violate
standard type semantics and thus require PEP-specific
handling), including either:
* A class defined by the :mod:`typing` module.
* A subclass of such a class.
* A generic alias.
* A tuple but either:
* This name is *not* prefixed by the magic substring
:data:`TYPISTRY_HINT_NAME_TUPLE_PREFIX`.
* This tuple contains one or more items that are either:
* *Not* types.
* PEP-compliant types.
'''
# If this name is *NOT* a string, raise an exception.
if not isinstance(hint_name, str):
raise _BeartypeDecorBeartypistryException(
f'Beartypistry key {repr(hint_name)} not string.')
# Else, this name is a string.
#
# If this name is an existing key of this dictionary, this name has
# already been registered, implying a key collision between the type or
# tuple already registered under this key and the passed type or
# tuple to be reregistered under this key. In this case, raise an
# exception.
elif hint_name in self:
raise _BeartypeDecorBeartypistryException(
f'Beartypistry key "{hint_name}" already registered '
f'(i.e., key collision between '
f'prior registered value {repr(self[hint_name])} and '
f'newly registered value {repr(hint)}).'
)
# Else, this name is *NOT* an existing key of this dictionary.
#
# If this hint is a class...
#
# Note that although *MOST* classes are PEP-noncompliant (e.g., the
# builtin "str" type), some classes are PEP-compliant (e.g., the
# stdlib "typing.SupportsInt" protocol). Since both PEP-noncompliant
# and -compliant classes are shallowly type-checkable via the
# isinnstance() builtin, there exists no demonstrable benefit to
# distinguishing between either here.
elif isinstance(hint, type):
# Fully-qualified classname of this type as declared by this type.
hint_clsname = get_object_type_name(hint)
# If...
if (
# The passed name is not this classname *AND*...
hint_name != hint_clsname and
# This type is *NOT* builtin (and thus requires importation
# into the body of the current wrapper function)...
#
# Note that builtin types are registered under their
# unqualified basenames (e.g., "list" rather than
# "builtins.list") for runtime efficiency, a core optimization
# requiring manual whitelisting here.
not is_type_builtin(hint)
# Then raise an exception.
):
raise _BeartypeDecorBeartypistryException(
f'Beartypistry key "{hint_name}" not '
f'fully-qualified classname "{hint_clsname}" of '
f'type {hint}.'
)
# Else, this hint is *NOT* a class.
#
# If this hint is a tuple...
elif isinstance(hint, tuple):
# If this tuple's name is *NOT* prefixed by a magic substring
# uniquely identifying this hint as a tuple, raise an exception.
#
# Ideally, this block would strictly validate this name to be the
# concatenation of this prefix followed by this tuple's hash.
# Sadly, Python fails to cache tuple hashes (for largely spurious
# reasons, like usual):
# https://bugs.python.org/issue9685
#
# Potentially introducing a performance bottleneck for mostly
# redundant validation is a bad premise, given that we mostly
# trust callers to call the higher-level
# :func:`register_typistry_tuple` function instead, which already
# guarantees this constraint to be the case.
if not hint_name.startswith(TYPISTRY_HINT_NAME_TUPLE_PREFIX):
raise _BeartypeDecorBeartypistryException(
f'Beartypistry key "{hint_name}" not '
f'prefixed by "{TYPISTRY_HINT_NAME_TUPLE_PREFIX}" for '
f'tuple {repr(hint)}.'
)
# Else, this hint is neither a class nor a tuple. In this case,
# something has gone terribly awry. Pour out an exception.
else:
raise _BeartypeDecorBeartypistryException(
f'Beartypistry key "{hint_name}" value {repr(hint)} invalid '
f'(i.e., neither type nor tuple).'
)
# Cache this object under this name.
super().__setitem__(hint_name, hint)
def __missing__(self, hint_classname: str) -> type:
'''
Dunder method explicitly called by the superclass
:meth:`dict.__getitem__` method implicitly called on caller attempts to
access the passed missing key with ``[``- and ``]``-delimited syntax.
This method treats this attempt to get this missing key as the
intentional resolution of a forward reference whose fully-qualified
classname is this key.
Parameters
----------
hint_classname : str
**Name** (i.e., fully-qualified name of the user-defined class) of
this hint to be resolved as a forward reference.
Returns
----------
type
User-defined class whose fully-qualified name is this missing key.
Raises
----------
BeartypeCallHintForwardRefException
If either:
* This name is *not* a syntactically valid fully-qualified
classname.
* *No* module prefixed this name exists.
* An importable module prefixed by this name exists *but* this
module declares no attribute by this name.
* The module attribute to which this name refers is *not* an
isinstanceable class.
'''
# Module attribute whose fully-qualified name is this forward
# reference, dynamically imported at callable call time.
hint_class: type = import_module_attr(
module_attr_name=hint_classname,
exception_cls=BeartypeCallHintForwardRefException,
exception_prefix='Forward reference ',
)
# If this attribute is *NOT* an isinstanceable class, raise an
# exception.
die_unless_type_isinstanceable(
cls=hint_class,
exception_cls=BeartypeCallHintForwardRefException,
exception_prefix=f'Forward reference "{hint_classname}" referent ',
)
# Else, this hint is an isinstanceable class.
# Return this class. The superclass dict.__getitem__() dunder method
# then implicitly maps the passed missing key to this class by
# effectively assigning this name to this class: e.g.,
# self[hint_classname] = hint_class
return hint_class # type: ignore[return-value]
# ....................{ SINGLETONS }....................
bear_typistry = Beartypistry()
'''
**Beartypistry** (i.e., singleton dictionary mapping from the fully-qualified
classnames of all type hints annotating callables decorated by the
:func:`beartype.beartype` decorator to those types).**
See Also
----------
:class:`Beartypistry`
Further details.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype exception raisers** (i.e., high-level callables raising
human-readable exceptions called by :func:`beartype.beartype`-decorated
callables on the first invalid parameter or return value failing a type-check
against the PEP-compliant type hint annotating that parameter or return).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
#FIXME: Generalizing the "random_int" concept (i.e., the optional "random_int"
#parameter accepted by the get_beartype_violation() function) that enables
#O(1) rather than O(n) exception handling to containers that do *NOT* provide
#efficient random access like mappings and sets will be highly non-trivial.
#While there exist a number of alternative means of implementing that
#generalization, the most reasonable *BY FAR* is probably to:
#
#* Embed additional assignment expressions in the type-checking tests generated
# by the make_func_wrapper_code() function that uniquely store the value of each
# item, key, or value returned by each access of a non-indexable container
# iterator into a new unique local variable. Note this unavoidably requires:
# * Adding a new index to the "hint_curr_meta" tuples internally created by
# that function -- named, say, "_HINT_META_INDEX_ITERATOR_NAME". The value
# of the tuple item at this index should either be:
# * If the currently iterated type hint is a non-indexable container, the
# name of the new unique local variable assigned to by this assignment
# expression whose value is obtained from the iterator cached for that
# container.
# * Else, "None".
# Actually... hmm. Perhaps we only need a new local variable
# "iterator_nonsequence_names" whose value is a cached "FixedList" of
# sufficiently large size (so, "FIXED_LIST_SIZE_MEDIUM"?). We could then simply
# iteratively insert the names of the wrapper-specific new unique local
# variables into this list.
# Actually... *WAIT.* Is all we need a single counter initialized to, say:
# iterators_nonsequence_len = 0
# We then both use that counter to:
# * Uniquify the names of these wrapper-specific new unique local variables
# during iteration over type hints.
# * Trivially generate a code snippet passing a list of these names to the
# "iterators_nonsequence" parameter of get_beartype_violation() function
# after iteration over type hints.
# Right. That looks like The Way, doesn't it? This would seem to be quite a
# bit easier than we'd initially thought, which is always nice. Oi!
# * Python >= 3.8, but that's largely fine. Python 3.6 and 3.7 are
# increasingly obsolete in 2021.
#* Add a new optional "iterators_nonsequence" parameter to the
# get_beartype_violation() function, accepting either:
# * If the current parameter or return of the parent wrapper function was
# annotated with one or more non-indexable container type hints, a *LIST* of
# the *VALUES* of all unique local variables assigned to by assignment
# expressions in that parent wrapper function. These values were obtained
# from the iterators cached for those containers. To enable these exception
# handlers to efficiently treat this list like a FIFO stack (e.g., with the
# list.pop() method), this list should be sorted in the reverse order that
# these assignment expressions are defined in.
#* Refactor exception handlers to then preferentially retrieve non-indexable
# container items in O(1) time from this stack rather than simply iterating
# over all container items in O(n) brute-force time. Obviously, extreme care
# must be taken here to ensure that this exception handling algorithm visits
# containers in the exact same order as visited by our testing algorithm.
# ....................{ IMPORTS }....................
from beartype.meta import URL_ISSUES
from beartype.roar._roarexc import (
BeartypeCallHintViolation,
BeartypeCallHintParamViolation,
BeartypeCallHintReturnViolation,
_BeartypeCallHintPepRaiseException,
_BeartypeCallHintPepRaiseDesynchronizationException,
)
from beartype.typing import (
Callable,
Dict,
NoReturn,
Optional,
)
from beartype._cave._cavemap import NoneTypeOr
from beartype._conf.confcls import BeartypeConf
from beartype._data.hint.pep.sign.datapepsigncls import HintSign
from beartype._data.hint.pep.sign.datapepsigns import (
HintSignAnnotated,
HintSignForwardRef,
HintSignGeneric,
HintSignLiteral,
HintSignNoReturn,
HintSignTuple,
HintSignType,
)
from beartype._data.hint.pep.sign.datapepsignset import (
HINT_SIGNS_SEQUENCE_ARGS_1,
HINT_SIGNS_ORIGIN_ISINSTANCEABLE,
HINT_SIGNS_UNION,
)
from beartype._decor._error._errorcause import ViolationCause
from beartype._decor._error._util.errorutilcolor import (
color_hint,
color_repr,
strip_text_ansi_if_configured,
)
from beartype._decor._error._util.errorutiltext import (
prefix_callable_decorated_arg_value,
prefix_callable_decorated_return_value,
)
from beartype._util.hint.utilhinttest import die_unless_hint
from beartype._util.text.utiltextmunge import suffix_unless_suffixed
from beartype._util.text.utiltextrepr import represent_object
from beartype._data.datatyping import TypeException
# ....................{ GLOBALS }....................
# Initialized with automated inspection below in the _init() function.
PEP_HINT_SIGN_TO_GET_CAUSE_FUNC: Dict[
HintSign, Callable[[ViolationCause], ViolationCause]] = {}
'''
Dictionary mapping each **sign** (i.e., arbitrary object uniquely identifying a
PEP-compliant type) to a private getter function defined by this submodule
whose signature matches that of the :func:`_find_cause` function and
which is dynamically dispatched by that function to describe type-checking
failures specific to that unsubscripted :mod:`typing` attribute.
'''
# ....................{ GETTERS }....................
def get_beartype_violation(
# Mandatory parameters.
func: Callable,
conf: BeartypeConf,
pith_name: str,
pith_value: object,
# Optional parameters.
random_int: Optional[int] = None,
) -> BeartypeCallHintViolation:
'''
Human-readable exception detailing the failure of the parameter with the
passed name *or* return if this name is the magic string ``return`` of the
passed decorated function fails to satisfy the PEP-compliant type hint
annotating this parameter or return.
This function intentionally returns rather than raises this exception. Why?
Because the ignorable stack frame encapsulating the call of the parent
type-checking wrapper function generated by the :mod:`beartype.beartype`
decorator complicates inspection of type-checking violations in tracebacks
(especially from :mod:`pytest`, which unhelpfully recapitulates the full
definition of this function including this docstring in those tracebacks).
Instead, that wrapper function raises this exception directly from itself.
Design
----------
The :mod:`beartype` package actually implements two parallel PEP-compliant
runtime type-checkers, each complementing the other by providing
functionality unsuited for the other. These are:
* The :mod:`beartype._check.expr` submodule, dynamically generating
optimized PEP-compliant runtime type-checking code embedded in the body
of the wrapper function wrapping the decorated callable. For both
efficiency and maintainability, that code only tests whether or not a
parameter passed to that callable or value returned from that callable
satisfies a PEP-compliant annotation on that callable; that code does
*not* raise human-readable exceptions in the event that value fails to
satisfy that annotation. Instead, that code defers to...
* This function, performing unoptimized PEP-compliant runtime type-checking
generically applicable to all wrapper functions. The aforementioned
code calls this function only in the event that value fails to satisfy
that annotation, in which case this function then returns a human-readable
exception after discovering the underlying cause of this type failure by
recursively traversing that value and annotation. While efficiency is the
foremost focus of this package, efficiency is irrelevant during exception
handling -- which typically only occurs under infrequent edge cases.
Likewise, while raising this exception *would* technically be feasible
from the aforementioned code, doing so proved sufficiently non-trivial,
fragile, and ultimately unmaintainable to warrant offloading to this
function universally callable from all wrapper functions.
Parameters
----------
func : CallableTypes
Decorated callable to raise this exception from.
conf : BeartypeConf
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all flags, options, settings, and other metadata configuring the
current decoration of the decorated callable or class).
pith_name : str
Either:
* If the object failing to satisfy this hint is a passed parameter, the
name of this parameter.
* Else, the magic string ``return`` implying this object to be the
value returned from this callable.
pith_value : object
Passed parameter or returned value violating this hint.
random_int: Optional[int]
**Pseudo-random integer** (i.e., unsigned 32-bit integer
pseudo-randomly generated by the parent :func:`beartype.beartype`
wrapper function in type-checking randomly indexed container items by
the current call to that function) if that function generated such an
integer *or* ``None`` otherwise (i.e., if that function generated *no*
such integer). Note that this parameter critically governs whether this
exception handler runs in constant or linear time. Specifically, if
this parameter is:
* An integer, this handler runs in **constant time.** Since there
exists a one-to-one relation between this integer and the random
container item(s) type-checked by the parent
:func:`beartype.beartype` wrapper function, receiving this integer
enables this handler to efficiently re-type-check the same random
container item(s) type-checked by the parent in constant time rather
type-checking all container items in linear time.
* ``None``, this handler runs in **linear time.**
Defaults to ``None``, implying this exception handler runs in linear
time by default.
Returns
----------
BeartypeCallHintViolation
Human-readable exception detailing the failure of this parameter or
return to satisfy the PEP-compliant type hint annotating this parameter
or return value, guaranteed to be an instance of either:
* :class:`BeartypeCallHintParamViolation`, if the object failing to
satisfy this hint is a parameter.
* :class:`BeartypeCallHintReturnViolation`, if the object failing to
satisfy this hint is a return.
Raises
----------
BeartypeDecorHintPepException
If the type hint annotating this object is *not* PEP-compliant.
_BeartypeCallHintPepRaiseException
If the parameter or return value with the passed name is unannotated.
_BeartypeCallHintPepRaiseDesynchronizationException
If this pith actually satisfies this hint, implying either:
* The parent wrapper function generated by the :mod:`beartype.beartype`
decorator type-checking this pith triggered a false negative by
erroneously misdetecting this pith as failing this type check.
* This child helper function re-type-checking this pith triggered a
false positive by erroneously misdetecting this pith as satisfying
this type check when in fact this pith fails to do so.
'''
assert callable(func), f'{repr(func)} uncallable.'
assert isinstance(conf, BeartypeConf), f'{repr(conf)} not configuration.'
assert isinstance(pith_name, str), f'{repr(pith_name)} not string.'
assert isinstance(random_int, NoneTypeOr[int]), (
f'{repr(random_int)} neither integer nor "None".')
# print('''get_beartype_violation(
# func={!r},
# conf={!r},
# pith_name={!r},
# pith_value={!r}',
# )'''.format(func, conf, pith_name, pith_value))
# ....................{ LOCALS }....................
# Type of exception to be raised.
exception_cls: TypeException = None # type: ignore[assignment]
# Human-readable label describing this parameter or return value.
exception_prefix: str = None # type: ignore[assignment]
# If the name of this parameter is the magic string implying the passed
# object to be a return value, set the above local variables appropriately.
if pith_name == 'return':
exception_cls = BeartypeCallHintReturnViolation
exception_prefix = prefix_callable_decorated_return_value(
func=func, return_value=pith_value)
# Else, the passed object is a parameter. In this case, set the above local
# variables appropriately.
else:
exception_cls = BeartypeCallHintParamViolation
exception_prefix = prefix_callable_decorated_arg_value(
func=func, arg_name=pith_name, arg_value=pith_value)
# ....................{ HINTS }....................
# If this parameter or return value is unannotated, raise an exception.
#
# Note that this should *NEVER* occur, as the caller guarantees this
# parameter or return value to be annotated. Nonetheless, since callers
# could deface the "__annotations__" dunder dictionary without our
# knowledge or permission, precautions are warranted.
if pith_name not in func.__annotations__:
raise _BeartypeCallHintPepRaiseException(
f'{exception_prefix}unannotated.')
# Else, this parameter or return value is annotated.
# PEP-compliant type hint annotating this parameter or return value.
hint = func.__annotations__[pith_name]
# If this hint is *NOT* the PEP 484-compliant "typing.NoReturn" type hint
# permitted *ONLY* as a return annotation, this is a standard type hint
# generally supported by both parameters and return values. In this case...
if hint is not NoReturn:
# If this hint is unsupported, raise an exception.
die_unless_hint(hint=hint, exception_prefix=exception_prefix)
# Else, this type hint is supported.
# ....................{ CAUSE }....................
# Cause describing the failure of this pith to satisfy this hint.
violation_cause = ViolationCause(
func=func,
conf=conf,
pith=pith_value,
hint=hint,
cause_indent='',
exception_prefix=exception_prefix,
random_int=random_int,
).find_cause()
# If this pith satisfies this hint, *SOMETHING HAS GONE TERRIBLY AWRY.* In
# theory, this should never happen, as the parent wrapper function
# performing type checking should *ONLY* call this child helper function
# when this pith does *NOT* satisfy this hint. In this case, raise an
# exception encouraging the end user to submit an upstream issue with us.
if not violation_cause.cause_str_or_none:
pith_value_repr = represent_object(
obj=pith_value, max_len=_CAUSE_TRIM_OBJECT_REPR_MAX_LEN)
raise _BeartypeCallHintPepRaiseDesynchronizationException(
f'{exception_prefix}violates type hint {color_hint(repr(hint))}, '
f'but utility function get_beartype_violation() '
f'erroneously suggests this object satisfies this hint. '
f'Please report this desynchronization failure to '
f'the beartype issue tracker ({URL_ISSUES}) with '
f'the accompanying exception traceback and '
f'the representation of this object:\n'
f'{color_repr(pith_value_repr)}'
)
# Else, this pith violates this hint as expected and as required for sanity.
# This failure suffixed by a period if *NOT* yet suffixed by a period.
violation_cause_suffixed = suffix_unless_suffixed(
text=violation_cause.cause_str_or_none, suffix='.')
# List of the one or more culprits responsible for this violation,
# initialized to the passed parameter or returned value violating this hint.
violation_culprits = [pith_value,]
# If the actual object directly responsible for this violation is *NOT* the
# passed parameter or returned value indirectly violating this hint, then
# the latter is almost certainly a container transitively containing the
# former as an item. In this case, add this item to this list as well.
if pith_value is not violation_cause.pith:
violation_culprits.append(violation_cause.pith)
# Else, the actual object directly responsible for this violation is the
# passed parameter or returned value indirectly violating this hint. In this
# case, avoid adding duplicate items to this list.
# ....................{ EXCEPTION }....................
# Exception message embedding this cause.
exception_message = (
f'{exception_prefix}violates type hint {color_hint(repr(hint))}, as '
f'{violation_cause_suffixed}'
)
#FIXME: Unit test us up, please.
# Strip all ANSI escape sequences from this message if requested by this
# external user-defined configuration.
exception_message = strip_text_ansi_if_configured(
text=exception_message, conf=conf)
#FIXME: Unit test that the caller receives the expected culprit, please.
# Exception of the desired class embedding this cause.
exception = exception_cls( # type: ignore[misc]
message=exception_message,
culprits=tuple(violation_culprits),
)
# Return this exception to the @beartype-generated type-checking wrapper
# (which directly calls this function), which will then squelch the
# ignorable stack frame encapsulating that call to this function by raising
# this exception directly from that wrapper.
return exception
# ....................{ PRIVATE ~ constants }....................
# Assuming a line length of 80 characters, this magic number truncates
# arbitrary object representations to 100 lines (i.e., 8000/80), which seems
# more than reasonable and (possibly) not overly excessive.
_CAUSE_TRIM_OBJECT_REPR_MAX_LEN = 8000
'''
Maximum length of arbitrary object representations suffixing human-readable
strings returned by the :func:`_find_cause` getter function, intended to
be sufficiently long to assist in identifying type-check failures but not so
excessively long as to prevent human-readability.
'''
# ....................{ PRIVATE ~ initializers }....................
def _init() -> None:
'''
Initialize this submodule.
'''
# Defer heavyweight imports.
from beartype._decor._error._errortype import (
find_cause_instance_type_forwardref,
find_cause_subclass_type,
find_cause_type_instance_origin,
)
from beartype._decor._error._pep._pep484._errornoreturn import (
find_cause_noreturn)
from beartype._decor._error._pep._pep484._errorunion import (
find_cause_union)
from beartype._decor._error._pep._pep484585._errorgeneric import (
find_cause_generic)
from beartype._decor._error._pep._pep484585._errorsequence import (
find_cause_sequence_args_1,
find_cause_tuple,
)
from beartype._decor._error._pep._errorpep586 import (
find_cause_literal)
from beartype._decor._error._pep._errorpep593 import (
find_cause_annotated)
# Map each originative sign to the appropriate getter *BEFORE* any other
# mappings. This is merely a generalized fallback subsequently replaced by
# sign-specific getters below.
for pep_sign_origin_isinstanceable in HINT_SIGNS_ORIGIN_ISINSTANCEABLE:
PEP_HINT_SIGN_TO_GET_CAUSE_FUNC[pep_sign_origin_isinstanceable] = (
find_cause_type_instance_origin)
# Map each 1-argument sequence sign to its corresponding getter.
for pep_sign_sequence_args_1 in HINT_SIGNS_SEQUENCE_ARGS_1:
PEP_HINT_SIGN_TO_GET_CAUSE_FUNC[pep_sign_sequence_args_1] = (
find_cause_sequence_args_1)
# Map each union-specific sign to its corresponding getter.
for pep_sign_type_union in HINT_SIGNS_UNION:
PEP_HINT_SIGN_TO_GET_CAUSE_FUNC[pep_sign_type_union] = (
find_cause_union)
# Map each sign validated by a unique getter to that getter *AFTER* all
# other mappings. These sign-specific getters are intended to replace all
# other automated mappings above.
PEP_HINT_SIGN_TO_GET_CAUSE_FUNC.update({
HintSignAnnotated: find_cause_annotated,
HintSignForwardRef: find_cause_instance_type_forwardref,
HintSignGeneric: find_cause_generic,
HintSignLiteral: find_cause_literal,
HintSignNoReturn: find_cause_noreturn,
HintSignTuple: find_cause_tuple,
HintSignType: find_cause_subclass_type,
})
# Initialize this submodule.
_init()
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype type-checking error cause sleuth** (i.e., object recursively
fabricating the human-readable string describing the failure of the pith
associated with this object to satisfy this PEP-compliant type hint also
associated with this object) classes.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
#FIXME: *YIKES.* Tragically, the existing ViolationCause.find_cause() method
#completely fails to suffice to define the "culprit" parameter for the
#"BeartypeCallHintViolation" exception. We'll need to fundamentally refactor
#*ALL* of this as follows:
#* In the get_beartype_violation() function:
# * Set the "culprit" parameter for the "BeartypeCallHintViolation" exception
# as follows:
# cause = ViolationCause(...).find_cause()
# culprit = cause.pith # <-- pretty sure that's right... pretty. sure.
#
#Extremely non-trivial, but extremely necessary. Let's do this, everybody.
#FIXME: The recursive "ViolationCause" class strongly overlaps with the equally
#recursive (and substantially superior) "beartype.door.TypeHint" class. Ideally:
#* Define a new private "beartype.door._doorerror" submodule.
#* Shift the "ViolationCause" class to
# "beartype.door._doorerror._TypeHintUnbearability".
#* Shift the _TypeHintUnbearability.find_cause() method to a new
# *PRIVATE* TypeHint._find_cause() method.
#* Preserve most of the remainder of the "_TypeHintUnbearability" class as a
# dataclass encapsulating metadata describing the current type-checking
# violation. That metadata (e.g., "cause_indent") is inappropriate for
# general-purpose type hints. Exceptions include:
# * "hint", "hint_sign", and "hint_childs" -- all of which are subsumed by the
# "TypeHint" dataclass and should thus be excised.
#* Refactor the TypeHint._find_cause() method to accept an instance of
# the "_TypeHintUnbearability" dataclass: e.g.,
# class TypeHint(...):
# def _get_unbearability_cause_or_none(
# self, unbearability: _TypeHintUnbearability) -> Optional[str]:
# ...
#* Refactor existing find_cause_*() getters (e.g.,
# find_cause_sequence_args_1(), find_cause_union()) into
# _get_unbearability_cause_or_none() methods of the corresponding "TypeHint"
# subclasses, please.
#
#This all seems quite reasonable. Now, let's see whether it is. *gulp*
# ....................{ IMPORTS }....................
from beartype.roar._roarexc import _BeartypeCallHintPepRaiseException
from beartype.typing import (
Any,
Callable,
Optional,
Tuple,
)
from beartype._cave._cavemap import NoneTypeOr
from beartype._conf.confcls import BeartypeConf
from beartype._data.hint.pep.sign.datapepsignset import (
HINT_SIGNS_SUPPORTED_DEEP,
HINT_SIGNS_ORIGIN_ISINSTANCEABLE,
)
from beartype._util.hint.pep.utilpepget import (
get_hint_pep_args,
get_hint_pep_sign,
)
from beartype._util.hint.pep.utilpeptest import (
is_hint_pep,
is_hint_pep_args,
)
from beartype._check.conv.convsanify import sanify_hint_child
from beartype._util.hint.utilhinttest import is_hint_ignorable
# ....................{ CLASSES }....................
class ViolationCause(object):
'''
**Type-checking error cause sleuth** (i.e., object recursively fabricating
the human-readable string describing the failure of the pith associated
with this object to satisfy this PEP-compliant type hint also associated
with this object).
Attributes
----------
cause_indent : str
**Indentation** (i.e., string of zero or more spaces) preceding each
line of the string returned by this getter if this string spans
multiple lines *or* ignored otherwise (i.e., if this string is instead
embedded in the current line).
cause_str_or_none : Optional[str]
If this pith either:
* Violates this hint, a human-readable string describing this violation.
* Satisfies this hint, ``None``.
conf : BeartypeConf
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all flags, options, settings, and other metadata configuring the
current decoration of the decorated callable or class).
exception_prefix : str
Human-readable label describing the parameter or return value from
which this object originates, typically embedded in exceptions raised
from this getter in the event of unexpected runtime failure.
func : Callable
Decorated callable generating this type-checking error.
hint_sign : Any
Unsubscripted :mod:`typing` attribute identifying this hint if this hint
is PEP-compliant *or* ``None`` otherwise.
hint_childs : Optional[Tuple]
Either:
* If this hint is PEP-compliant, the possibly empty tuple of all
arguments subscripting (indexing) this hint.
* Else, ``None``.
pith : Any
Arbitrary object to be validated.
random_int: Optional[int]
**Pseudo-random integer** (i.e., unsigned 32-bit integer
pseudo-randomly generated by the parent :func:`beartype.beartype`
wrapper function in type-checking randomly indexed container items by
the current call to that function) if that function generated such an
integer *or* ``None`` otherwise (i.e., if that function generated *no*
such integer). See the same parameter accepted by the higher-level
:func:`beartype._decor._error.errormain.get_beartype_violation`
function for further details.
Attributes (Private)
----------
_hint : Any
Type hint to validate this object against.
'''
# ..................{ CLASS VARIABLES }..................
# Slot *ALL* instance variables defined on this object to both:
# * Prevent accidental declaration of erroneous instance variables.
# * Minimize space and time complexity.
__slots__ = (
'cause_indent',
'cause_str_or_none',
'conf',
'exception_prefix',
'func',
'hint_sign',
'hint_childs',
'pith',
'random_int',
'_hint',
)
_INIT_PARAM_NAMES = frozenset((
'cause_indent',
'cause_str_or_none',
'conf',
'exception_prefix',
'func',
'hint',
'pith',
'random_int',
))
'''
Frozen set of the names of all parameters accepted by the :meth:`init`
method, defined as a set to enable efficient membership testing.
'''
# ..................{ INITIALIZERS }..................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Whenever adding, deleting, or renaming any parameter accepted by
# this method, make similar changes to the "_INIT_PARAM_NAMES" set above.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
def __init__(
self,
# Mandatory parameters.
func: Callable,
conf: BeartypeConf,
pith: Any,
hint: Any,
cause_indent: str,
exception_prefix: str,
random_int: Optional[int],
# Optional parameters.
cause_str_or_none: Optional[str] = None,
) -> None:
'''
Initialize this object.
'''
assert callable(func), f'{repr(func)} not callable.'
assert isinstance(conf, BeartypeConf), (
f'{repr(conf)} not configuration.')
assert isinstance(cause_indent, str), (
f'{repr(cause_indent)} not string.')
assert isinstance(exception_prefix, str), (
f'{repr(exception_prefix)} not string.')
assert isinstance(random_int, NoneTypeOr[int]), (
f'{repr(random_int)} not integer or "None".')
assert isinstance(cause_str_or_none, NoneTypeOr[str]), (
f'{repr(cause_str_or_none)} not string or "None".')
# Classify all passed parameters.
self.func = func
self.conf = conf
self.pith = pith
self.cause_indent = cause_indent
self.exception_prefix = exception_prefix
self.random_int = random_int
self.cause_str_or_none = cause_str_or_none
# Nullify all remaining parameters for safety.
self.hint_sign: Any = None
self.hint_childs: Tuple = None # type: ignore[assignment]
# Classify this hint *AFTER* initializing all parameters above.
self.hint = hint
# ..................{ PROPERTIES }..................
@property
def hint(self) -> Any:
'''
Type hint to validate this object against.
'''
return self._hint
@hint.setter
def hint(self, hint: Any) -> None:
'''
Set the type hint to validate this object against.
'''
# Sanitize this hint if unsupported by @beartype in its current form
# (e.g., "numpy.typing.NDArray[...]") to another form supported by
# @beartype (e.g., "typing.Annotated[numpy.ndarray, beartype.vale.*]").
hint = sanify_hint_child(
hint=hint,
conf=self.conf,
exception_prefix=self.exception_prefix,
)
# If this hint is PEP-compliant...
if is_hint_pep(hint):
# Arbitrary object uniquely identifying this hint.
self.hint_sign = get_hint_pep_sign(hint)
# Tuple of the zero or more arguments subscripting this hint.
self.hint_childs = get_hint_pep_args(hint)
# Else, this hint is PEP-noncompliant (e.g., isinstanceable class).
# Classify this hint *AFTER* all other assignments above.
self._hint = hint
# ..................{ GETTERS }..................
def find_cause(self) -> 'ViolationCause':
'''
Output cause describing whether the pith of this input cause either
satisfies or violates the type hint of this input cause.
Design
----------
This method is intentionally generalized to support objects both
satisfying and *not* satisfying hints as equally valid use cases. While
the parent
:func:`beartype._decor._error.errormain.get_beartype_violation` function
calling this method is *always* passed an object *not* satisfying the
passed hint, this method is under no such constraints. Why? Because this
method is also called to find which of an arbitrary number of objects
transitively nested in the object passed to
:func:`beartype._decor._error.errormain.get_beartype_violation` fails to
satisfy the corresponding hint transitively nested in the hint passed to
that function.
For example, consider the PEP-compliant type hint ``List[Union[int,
str]]`` describing a list whose items are either integers or strings
and the list ``list(range(256)) + [False,]`` consisting of the integers
0 through 255 followed by boolean ``False``. Since that list is a
standard sequence, the
:func:`._peperrorsequence.find_cause_sequence_args_1`
function must decide the cause of this list's failure to comply with
this hint by finding the list item that is neither an integer nor a
string, implemented by by iteratively passing each list item to the
:func:`._peperrorunion.find_cause_union` function. Since
the first 256 items of this list are integers satisfying this hint,
:func:`._peperrorunion.find_cause_union` returns a dataclass instance
whose :attr:`cause` field is ``None`` up to
:func:`._peperrorsequence.find_cause_sequence_args_1`
before finally finding the non-compliant boolean item and returning the
human-readable cause.
Returns
----------
ViolationCause
Output cause type-checking this pith against this type hint.
Raises
----------
_BeartypeCallHintPepRaiseException
If this type hint is either:
* PEP-noncompliant (e.g., tuple union).
* PEP-compliant but no getter function has been implemented to
handle this category of PEP-compliant type hint yet.
'''
# If this hint is ignorable, all possible objects satisfy this hint.
# Since this hint *CANNOT* (by definition) be the cause of this failure,
# return the same cause as is.
if is_hint_ignorable(self.hint):
return self
# Else, this hint is unignorable.
# Getter function returning the desired string.
cause_finder: Callable[[ViolationCause], ViolationCause] = None # type: ignore[assignment]
# If *NO* sign uniquely identifies this hint, this hint is either
# PEP-noncompliant *OR* only contextually PEP-compliant in certain
# specific use cases. In either case...
if self.hint_sign is None:
# If this hint is a tuple union...
if isinstance(self.hint, tuple):
# Avoid circular import dependencies.
from beartype._decor._error._errortype import (
find_cause_instance_types_tuple)
# Defer to the getter function specific to tuple unions.
cause_finder = find_cause_instance_types_tuple
# Else, this hint is *NOT* a tuple union. In this case, assume this
# hint to be an isinstanceable class. If this is *NOT* the case, the
# getter deferred to below raises a human-readable exception.
else:
# Avoid circular import dependencies.
from beartype._decor._error._errortype import (
find_cause_instance_type)
# Defer to the getter function specific to classes.
cause_finder = find_cause_instance_type
# Else, this hint is PEP-compliant.
#
# If this hint...
elif (
# Originates from an origin type and may thus be shallowly
# type-checked against that type *AND is either...
self.hint_sign in HINT_SIGNS_ORIGIN_ISINSTANCEABLE and (
# Unsubscripted *OR*...
not is_hint_pep_args(self.hint) or
#FIXME: Remove this branch *AFTER* deeply supporting all hints.
# Currently unsupported with deep type-checking...
self.hint_sign not in HINT_SIGNS_SUPPORTED_DEEP
)
# Then this hint is both unsubscripted and originating from a standard
# type origin. In this case, this hint was type-checked shallowly.
):
# Avoid circular import dependencies.
from beartype._decor._error._errortype import (
find_cause_type_instance_origin)
# Defer to the getter function supporting hints originating from
# origin types.
cause_finder = find_cause_type_instance_origin
# Else, this hint is either subscripted *OR* unsubscripted but not
# originating from a standard type origin. In either case, this hint
# was type-checked deeply.
else:
# Avoid circular import dependencies.
from beartype._decor._error.errormain import (
PEP_HINT_SIGN_TO_GET_CAUSE_FUNC)
# Getter function returning the desired string for this attribute
# if any *OR* "None" otherwise.
cause_finder = PEP_HINT_SIGN_TO_GET_CAUSE_FUNC.get(
self.hint_sign, None) # type: ignore[arg-type]
# If no such function has been implemented to handle this attribute
# yet, raise an exception.
if cause_finder is None:
raise _BeartypeCallHintPepRaiseException(
f'{self.exception_prefix} type hint '
f'{repr(self.hint)} unsupported (i.e., no '
f'"find_cause_"-prefixed getter function defined '
f'for this category of hint).'
)
# Else, a getter function has been implemented to handle this
# attribute.
# Call this getter function with ourselves and return the string
# returned by this getter.
return cause_finder(self)
# ..................{ PERMUTERS }..................
def permute(self, **kwargs) -> 'ViolationCause':
'''
Shallow copy of this object such that each the passed keyword argument
overwrites the instance variable of the same name in this copy.
Parameters
----------
Keyword arguments of the same name and type as instance variables of
this object (e.g., ``hint``, ``pith``).
Returns
----------
ViolationCause
Shallow copy of this object such that each keyword argument
overwrites the instance variable of the same name in this copy.
Raises
----------
_BeartypeCallHintPepRaiseException
If the name of any passed keyword argument is *not* the name of an
existing instance variable of this object.
Examples
----------
>>> sleuth = ViolationCause(
... pith=[42,]
... hint=typing.List[int],
... cause_indent='',
... exception_prefix='List of integers',
... )
>>> sleuth_copy = sleuth.permute(pith=[24,])
>>> sleuth_copy.pith
[24,]
>>> sleuth_copy.hint
typing.List[int]
'''
# For the name of each passed keyword argument...
for arg_name in kwargs.keys():
# If this name is *NOT* that of a parameter accepted by the
# __init__() method, raise an exception.
if arg_name not in self._INIT_PARAM_NAMES:
raise _BeartypeCallHintPepRaiseException(
f'{self.__class__}.__init__() parameter '
f'{arg_name} unrecognized.'
)
# For the name of each parameter accepted by the __init__() method...
for arg_name in self._INIT_PARAM_NAMES:
# If this parameter was *NOT* explicitly passed by the caller,
# default this parameter to its current value from this object.
if arg_name not in kwargs:
kwargs[arg_name] = getattr(self, arg_name)
# Return a new instance of this class initialized with these arguments.
return ViolationCause(**kwargs)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype class type hint violation describers** (i.e., functions returning
human-readable strings explaining violations of type hints that are standard
isinstanceable classes rather than PEP-specific objects).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeCallHintForwardRefException
from beartype.roar._roarexc import _BeartypeCallHintPepRaiseException
from beartype._cave._cavefast import TestableTypes
from beartype._data.hint.pep.sign.datapepsigns import (
HintSignForwardRef,
HintSignType,
)
from beartype._decor._error._errorcause import ViolationCause
from beartype._decor._error._util.errorutilcolor import color_hint
from beartype._util.cls.utilclstest import is_type_subclass
from beartype._util.cls.pep.utilpep3119 import (
die_unless_type_isinstanceable,
die_unless_type_issubclassable,
)
from beartype._util.hint.nonpep.utilnonpeptest import (
die_unless_hint_nonpep_tuple)
from beartype._util.hint.pep.proposal.pep484585.utilpep484585ref import (
import_pep484585_forwardref_type_relative_to_object)
from beartype._util.hint.pep.proposal.pep484585.utilpep484585type import (
get_hint_pep484585_subclass_superclass)
from beartype._util.hint.pep.utilpepget import (
get_hint_pep_origin_type_isinstanceable_or_none)
from beartype._util.text.utiltextjoin import join_delimited_disjunction_types
from beartype._util.text.utiltextlabel import label_type
from beartype._decor._error._util.errorutiltext import represent_pith
# ....................{ GETTERS ~ instance : type }....................
def find_cause_instance_type(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either is
or is not an instance of the isinstanceable type of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
# If this hint is *NOT* an isinstanceable type, raise an exception.
die_unless_type_isinstanceable(
cls=cause.hint,
exception_cls=_BeartypeCallHintPepRaiseException,
exception_prefix=cause.exception_prefix,
)
# Else, this hint is an isinstanceable type.
# Output cause to be returned, permuted from this input cause such that the
# output cause justification is either...
cause_return = cause.permute(cause_str_or_none=(
# If this pith is an instance of this type, "None";
None
if isinstance(cause.pith, cause.hint) else
# Else, this pith is *NOT* an instance of this type. In this case, a
# human-readable string describing this failure.
(
f'{represent_pith(cause.pith)} not instance of '
f'{color_hint(label_type(cause.hint))}'
)
))
# Return this output cause.
return cause_return
def find_cause_instance_type_forwardref(
cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either is
or is not an instance of the class referred to by the **forward reference
type hint** (i.e., string whose value is the name of a user-defined class
which has yet to be defined) of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
assert cause.hint_sign is HintSignForwardRef, (
f'{cause.hint_sign} not forward reference.')
# Class referred to by this forward reference.
hint_forwardref_type = import_pep484585_forwardref_type_relative_to_object(
hint=cause.hint,
obj=cause.func,
exception_cls=BeartypeCallHintForwardRefException,
exception_prefix=cause.exception_prefix,
)
# Defer to the function handling isinstanceable classes. Neato!
return find_cause_instance_type(cause.permute(hint=hint_forwardref_type))
def find_cause_type_instance_origin(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either is
or is not an instance of the isinstanceable type underlying the
**originative type hint** (i.e., PEP-compliant type hint originating from a
non-:mod:`typing` class) of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
# Isinstanceable origin type originating this hint if any *OR* "None".
hint_type = get_hint_pep_origin_type_isinstanceable_or_none(cause.hint)
# If this hint does *NOT* originate from such a type, raise an exception.
if hint_type is None:
raise _BeartypeCallHintPepRaiseException(
f'{cause.exception_prefix}type hint '
f'{repr(cause.hint)} not originated from '
f'isinstanceable origin type.'
)
# Else, this hint originates from such a type.
# Defer to the getter function handling non-"typing" classes. Presto!
return find_cause_instance_type(cause.permute(hint=hint_type))
# ....................{ GETTERS ~ instance : types }....................
def find_cause_instance_types_tuple(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either is
or is not an instance of one or more isinstanceable types in the tuple of
these types of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
# If this hint is *NOT* a tuple union, raise an exception.
die_unless_hint_nonpep_tuple(
hint=cause.hint,
exception_prefix=cause.exception_prefix,
exception_cls=_BeartypeCallHintPepRaiseException,
)
# Else, this hint is a tuple union.
# Output cause to be returned, permuted from this input cause such that the
# output cause justification is either...
cause_return = cause.permute(cause_str_or_none=(
# If this pith is an instance of one or more types in this tuple union,
# "None";
None
if isinstance(cause.pith, cause.hint) else
# Else, this pith is an instance of *NO* types in this tuple union. In
# this case, a substring describing this failure to be embedded in a
# longer string.
(
f'{represent_pith(cause.pith)} not instance of '
f'{color_hint(join_delimited_disjunction_types(cause.hint))}'
)
))
# Return this cause.
return cause_return
# ....................{ GETTERS ~ subclass : type }....................
def find_cause_subclass_type(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either is
or is not a subclass of the issubclassable type of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
assert cause.hint_sign is HintSignType, (
f'{cause.hint_sign} not HintSignType.')
# Superclass this pith is required to be a subclass of.
hint_superclass = get_hint_pep484585_subclass_superclass(
hint=cause.hint, exception_prefix=cause.exception_prefix)
# If this superclass is neither a class nor tuple of classes, this
# superclass *MUST* by process of elimination and the validation already
# performed above by the get_hint_pep484585_subclass_superclass() getter be
# a forward reference to a class. In this case...
if not isinstance(hint_superclass, TestableTypes):
# Reduce this superclass to the class referred to by this forward
# reference.
hint_superclass = import_pep484585_forwardref_type_relative_to_object(
hint=hint_superclass, # type: ignore[arg-type]
obj=cause.func,
exception_cls=BeartypeCallHintForwardRefException,
exception_prefix=cause.exception_prefix,
)
# If this superclass is *NOT* issubclassable, raise an exception.
die_unless_type_issubclassable(
cls=hint_superclass,
exception_cls=_BeartypeCallHintPepRaiseException,
exception_prefix=cause.exception_prefix,
)
# Else, this superclass is issubclassable.
# In either case, this superclass is now issubclassable.
# Output cause to be returned, permuted from this input cause.
cause_return = cause.permute()
# If this pith subclasses this superclass, set the output cause
# justification to "None".
if is_type_subclass(cause_return.pith, hint_superclass):
cause_return.cause_str_or_none = None
# Else, this pith does *NOT* subclass this superclass. In this case...
else:
# Description of this superclasses, defined as either...
hint_superclass_label = (
# If this superclass is a class, a description of this class;
label_type(hint_superclass)
if isinstance(hint_superclass, type) else
# Else, this superclass is a tuple of classes. In this case, a
# description of these classes...
join_delimited_disjunction_types(hint_superclass)
)
# Human-readable string describing this failure.
cause_return.cause_str_or_none = (
f'{represent_pith(cause_return.pith)} not subclass of '
f'{hint_superclass_label}'
)
# Return this cause.
return cause_return
|
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype exception message color utilities** (i.e., low-level callables
conditionally accenting type-checking violation messages with ANSI escape
sequences colouring those strings when configured to do so by the
:func:`beartype.beartype`-decorated callables raising those violations).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._conf.confcls import BeartypeConf
from beartype._util.os.utilostty import is_stdout_terminal
from beartype._util.text.utiltextansi import (
ANSI_RESET,
COLOR_GREEN,
COLOR_RED,
COLOR_BLUE,
COLOR_YELLOW,
STYLE_BOLD,
strip_text_ansi,
)
# ....................{ COLOURIZERS }....................
def color_error(text: str) -> str:
'''
Colour the passed substring as an error.
Parameters
----------
text : str
Text to be coloured as an error.
Returns
----------
str
This text coloured as an error.
'''
assert isinstance(text, str), f'{repr(text)} not string.'
return f'{STYLE_BOLD}{COLOR_RED}{text}{ANSI_RESET}'
def color_hint(text: str) -> str:
'''
Colour the passed substring as a PEP-compliant type hint.
Parameters
----------
text : str
Text to be coloured as a type hint.
Returns
----------
str
This text coloured as a type hint.
'''
assert isinstance(text, str), f'{repr(text)} not string.'
return f'{STYLE_BOLD}{COLOR_BLUE}{text}{ANSI_RESET}'
def color_repr(text: str) -> str:
'''
Colour the passed substring as a **representation** (i.e., machine-readable
string returned by the :func:`repr` builtin).
Parameters
----------
text : str
Text to be coloured as a representation.
Returns
----------
str
This text coloured as a representation.
'''
assert isinstance(text, str), f'{repr(text)} not string.'
return f'{COLOR_YELLOW}{text}{ANSI_RESET}'
def color_type(text: str) -> str:
'''
Colour the passed substring as a simple class.
Parameters
----------
text : str
Text to be coloured as a simple class.
Returns
----------
str
This text coloured as a simple class.
'''
assert isinstance(text, str), f'{repr(text)} not string.'
return f'{STYLE_BOLD}{COLOR_GREEN}{text}{ANSI_RESET}'
# ....................{ STRIPPERS }....................
#FIXME: Unit test us up, please.
#FIXME: Inefficient and thus non-ideal. Since efficiency isn't a pressing
#concern in an exception raiser, this is more a matter of design purity than
#anything. Still, it would be preferable to avoid embedding ANSI escape
#sequences in the cause when the user requests that rather than forcibly
#stripping those sequences out after the fact via an inefficient regex. To do
#so, we'll want to:
#* Augment the color_*() family of functions with a mandatory "conf:
# BeartypeConf" parameter.
#* Pass that parameter to *EVERY* call to one of those functions.
#* Refactor those functions to respect that parameter. The ideal means of
# doing so would probably be define in the
# "beartype._util.text.utiltextansi" submodule:
# * A new "_BeartypeTheme" dataclass mapping from style names to format
# strings embedding the ANSI escape sequences styling those styles.
# * A new pair of private "_THEME_MONOCHROME" and "_THEME_PRISMATIC"
# instances of that dataclass. The values of the "_THEME_MONOCHROME"
# dictionary should all just be the default format string: e.g.,
# _THEME_MONOCHROME = _BeartypeTheme(
# format_error='{text}',
# ...
# )
#
# _THEME_PRISMATIC = _BeartypeTheme(
# format_error=f'{_STYLE_BOLD}{_COLOUR_RED}{{text}}{_COLOUR_RESET}',
# ...
# )
# * A new "_THEME_DEFAULT" instance of that dataclass conditionally defined
# as either "_THEME_MONOCHROME" or "_THEME_PRISMATIC" depending on
# whether stdout is attached to a TTY or not. Alternately, to avoid
# performing that somewhat expensive logic at module scope (and thus on
# initial beartype importation), it might be preferable to instead define
# a new cached private getter resembling:
#
# @callable_cached
# def _get_theme_default() -> _BeartypeTheme:
# return (
# _THEME_PRISMATIC
# if is_stdout_terminal() else
# _THEME_MONOCHROME
# )
def strip_text_ansi_if_configured(text: str, conf: BeartypeConf) -> str:
'''
Strip all ANSI escape sequences from the passed string if the
:attr:`BeartypeConf.is_color` instance variable of the passed beartype
configuration instructs this function to do so.
Specifically:
* If ``conf.is_color is True``, this function silently reduces to a noop.
* If ``conf.is_color is False``, this function unconditionally strips all
ANSI escape sequences from this string.
* If ``conf.is_color is None``, this function conditionally strips all
ANSI escape sequences from this string only if standard output is
currently attached to an interactive terminal.
Parameters
----------
text : str
Text to be stripped of ANSI.
conf : BeartypeConf
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all flags, options, settings, and other metadata configuring the
current decoration of the decorated callable or class).
Returns
----------
str
This text stripped of ANSI.
'''
assert isinstance(text, str), f'{repr(text)} not string.'
assert isinstance(conf, BeartypeConf), f'{repr(conf)} not configuration.'
# Return either...
return (
# This string with all ANSI stripped when this configuration instructs
# this function to either...
strip_text_ansi(text)
if (
# Unconditionally strip all ANSI from this string *OR*...
# Conditionally strip all ANSI from this string only when standard
# output is *NOT* attached to an interactive terminal;
conf.is_color is False or
(conf.is_color is None and not is_stdout_terminal())
) else
# Else, this string unmodified.
text
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype error-handling string munging utilities** (i.e., functions returning
substrings intended to be embedded in strings explaining type hint violations).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._util.text.utiltextlabel import (
label_type,
prefix_callable_decorated,
prefix_callable_decorated_return,
)
from beartype._util.text.utiltextrepr import represent_object
from collections.abc import Callable
# ....................{ PREFIXERS }....................
def prefix_callable_decorated_arg_value(
func: Callable, arg_name: str, arg_value: object) -> str:
'''
Human-readable label describing the parameter with the passed name and
trimmed value of the passed **decorated callable** (i.e., callable wrapped
by the :func:`beartype.beartype` decorator with a wrapper function
type-checking that callable) suffixed by delimiting whitespace.
Parameters
----------
func : Callable
Decorated callable to be labelled.
arg_name : str
Name of the parameter of this callable to be labelled.
arg_value : object
Value of the parameter of this callable to be labelled.
Returns
----------
str
Human-readable label describing this parameter's name and value.
'''
assert isinstance(arg_name, str), f'{repr(arg_name)} not string.'
# Avoid circular import dependencies.
from beartype._decor._error._util.errorutilcolor import color_repr
# Human-readable string depicting this parameter name and value.
arg_name_value = color_repr(f'{arg_name}={represent_object(arg_value)}')
# Create and return this label.
return f'{prefix_callable_decorated(func)}parameter {arg_name_value} '
def prefix_callable_decorated_return_value(
func: Callable, return_value: object) -> str:
'''
Human-readable label describing the passed trimmed return value of the
passed **decorated callable** (i.e., callable wrapped by the
:func:`beartype.beartype` decorator with a wrapper function type-checking
that callable) suffixed by delimiting whitespace.
Parameters
----------
func : Callable
Decorated callable to be labelled.
return_value : object
Value returned by this callable to be labelled.
Returns
----------
str
Human-readable label describing this return value.
'''
# Avoid circular import dependencies.
from beartype._decor._error._util.errorutilcolor import color_repr
# Create and return this label.
return (
f'{prefix_callable_decorated_return(func)}'
f'{color_repr(represent_object(return_value))} '
)
# ....................{ REPRESENTERS }....................
def represent_pith(pith: object) -> str:
'''
Human-readable description of the passed **pith** (i.e., arbitrary object
violating the current type check) intended to be embedded in an exception
message explaining this violation.
Parameters
----------
pith : object
Arbitrary object violating the current type check.
Returns
----------
str
Human-readable description of this object.
'''
# Avoid circular import dependencies.
from beartype._decor._error._util.errorutilcolor import (
color_error,
color_repr,
)
# Create and return this representation.
return (
f'{color_error(label_type(type(pith)))} '
f'{color_repr(represent_object(pith))}'
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype** :pep:`586`-compliant **type hint violation describers** (i.e.,
functions returning human-readable strings explaining violations of
:pep:`586`-compliant :attr:`typing.Literal` type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._decor._error._errorcause import ViolationCause
from beartype._data.hint.pep.sign.datapepsigns import HintSignLiteral
from beartype._util.hint.pep.proposal.utilpep586 import (
get_hint_pep586_literals)
from beartype._util.text.utiltextjoin import join_delimited_disjunction
from beartype._decor._error._util.errorutiltext import represent_pith
# ....................{ GETTERS }....................
def find_cause_literal(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either
satisfies or violates the :pep:`586`-compliant :mod:`beartype`-specific
**literal** (i.e., :attr:`typing.Literal` type hint) of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
assert cause.hint_sign is HintSignLiteral, (
f'{repr(cause.hint_sign)} not "HintSignLiteral".')
# Tuple of zero or more literal objects subscripting this hint,
# intentionally replacing the current such tuple due to the non-standard
# implementation of the third-party "typing_extensions.Literal" factory.
hint_childs = get_hint_pep586_literals(
hint=cause.hint, exception_prefix=cause.exception_prefix)
# If this pith is equal to any literal object subscripting this hint, this
# pith satisfies this hint. Specifically, if there exists at least one...
if any(
# Literal object subscripting this hint such that...
(
# This pith is of the same type as that of this literal *AND*...
#
# Note that PEP 586 explicitly requires this pith to be validated
# to be an instance of the same type as this literal *BEFORE*
# validated as equal to this literal, due to subtle edge cases in
# equality comparison that could yield false positives.
isinstance(cause.pith, type(hint_literal)) and
# This pith is equal to this literal.
cause.pith == hint_literal
)
# For each literal object subscripting this hint...
for hint_literal in hint_childs
):
# Then return this cause unmodified, as this pith deeply satisfies this
# hint.
return cause
# Else, this pith fails to satisfy this hint.
# Tuple union of the types of all literals subscripting this hint.
hint_literal_types = tuple(
type(hint_literal) for hint_literal in hint_childs)
# Shallow output cause to be returned, type-checking only whether this pith
# is an instance of one or more of these types.
cause_shallow = cause.permute(hint=hint_literal_types).find_cause()
# If this pith is *NOT* such an instance, return this string.
if cause_shallow.cause_str_or_none is not None:
return cause_shallow
# Else, this pith is such an instance and thus shallowly satisfies this
# hint. Since this pith fails to satisfy this hint, this pith must by
# deduction be unequal to all literals subscripting this hint.
# Human-readable comma-delimited disjunction of the machine-readable
# representations of all literal objects subscripting this hint.
cause_literals_unsatisfied = join_delimited_disjunction(
repr(hint_literal) for hint_literal in hint_childs)
# Deep output cause to be returned, permuted from this input cause such that
# the justification is a human-readable string describing this failure.
cause_deep = cause.permute(cause_str_or_none=(
f'{represent_pith(cause.pith)} != {cause_literals_unsatisfied}.'))
# Return this cause.
return cause_deep
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype** :pep:`593`-compliant **type hint violation describers** (i.e.,
functions returning human-readable strings explaining violations of
:pep:`593`-compliant :attr:`typing.Annotated` type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar._roarexc import _BeartypeCallHintPepRaiseException
from beartype._data.hint.pep.sign.datapepsigns import HintSignAnnotated
from beartype._decor._error._errorcause import ViolationCause
from beartype._decor._error._util.errorutiltext import represent_pith
from beartype._util.hint.pep.proposal.utilpep593 import (
get_hint_pep593_metadata,
get_hint_pep593_metahint,
)
from beartype._util.text.utiltextmagic import CODE_INDENT_1
# ....................{ GETTERS }....................
def find_cause_annotated(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either
satisfies or violates the :pep:`593`-compliant :mod:`beartype`-specific
**metahint** (i.e., type hint annotating a standard class with one or more
:class:`beartype.vale._core._valecore.BeartypeValidator` objects, each
produced by subscripting the :class:`beartype.vale.Is` class or a subclass
of that class) of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
assert cause.hint_sign is HintSignAnnotated, (
f'{cause.hint_sign} not "HintSignAnnotated".')
# Defer heavyweight imports.
from beartype.vale._core._valecore import BeartypeValidator
# Type hint annotated by this metahint.
metahint = get_hint_pep593_metahint(cause.hint)
# Tuple of zero or more arbitrary objects annotating this metahint.
hint_validators = get_hint_pep593_metadata(cause.hint)
# Shallow output cause to be returned, type-checking only whether this pith
# satisfies this metahint.
cause_shallow = cause.permute(hint=metahint).find_cause()
# If this pith fails to satisfy this metahint, return this cause as is.
if cause_shallow.cause_str_or_none is not None:
return cause_shallow
# Else, this pith satisfies this metahint.
# Deep output cause to be returned, permuted from this input cause.
cause_deep = cause.permute()
# For each beartype validator annotating this metahint...
for hint_validator in hint_validators:
# If this is *NOT* a beartype validator, raise an exception.
#
# Note that this object should already be a beartype validator, as the
# @beartype decorator enforces this constraint at decoration time.
if not isinstance(hint_validator, BeartypeValidator):
raise _BeartypeCallHintPepRaiseException(
f'{cause_deep.exception_prefix}PEP 593 type hint '
f'{repr(cause_deep.hint)} argument {repr(hint_validator)} '
f'not beartype validator '
f'(i.e., "beartype.vale.Is*[...]" object).'
)
# Else, this is a beartype validator.
#
# If this pith fails to satisfy this validator and is thus the cause of
# this failure...
elif not hint_validator.is_valid(cause_deep.pith):
#FIXME: Unit test this up, please.
# Human-readable string diagnosing this failure.
hint_diagnosis = hint_validator.get_diagnosis(
obj=cause_deep.pith,
indent_level_outer=CODE_INDENT_1,
indent_level_inner='',
)
# Human-readable string describing this failure.
cause_deep.cause_str_or_none = (
f'{represent_pith(cause_deep.pith)} violates validator '
f'{repr(hint_validator)}:\n'
f'{hint_diagnosis}'
)
# Immediately halt iteration.
break
# Else, this pith satisfies this validator. Ergo, this validator is
# *NOT* the cause of this failure. Silently continue to the next.
# Return this output cause.
return cause_deep
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype PEP-compliant generic type hint exception raisers** (i.e., functions
raising human-readable exceptions called by :mod:`beartype`-decorated callables
on the first invalid parameter or return value failing a type-check against the
PEP-compliant generic type hint annotating that parameter or return).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._data.hint.pep.sign.datapepsigns import HintSignGeneric
from beartype._decor._error._errorcause import ViolationCause
from beartype._decor._error._errortype import find_cause_instance_type
from beartype._util.hint.pep.proposal.pep484585.utilpep484585generic import (
get_hint_pep484585_generic_type,
iter_hint_pep484585_generic_bases_unerased_tree,
)
# ....................{ GETTERS }....................
def find_cause_generic(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either
satisfies or violates the :pep:`484`- or :pep:`585`-compliant **generic**
(i.e., type hint subclassing a combination of one or more of the
:mod:`typing.Generic` superclass, the :mod:`typing.Protocol` superclass,
and/or other :mod:`typing` non-class pseudo-superclasses) of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
assert cause.hint_sign is HintSignGeneric, (
f'{repr(cause.hint_sign)} not generic.')
# print(f'[find_cause_generic] cause.pith: {cause.pith}')
# print(f'[find_cause_generic] cause.hint [pre-reduction]: {cause.hint}')
# Origin type originating this generic, deduced by stripping all child type
# hints subscripting this hint from this hint.
hint_type = get_hint_pep484585_generic_type(
hint=cause.hint, exception_prefix=cause.exception_prefix)
# Shallow output cause to be returned, type-checking only whether this pith
# is instance of this origin type.
cause_shallow = cause.permute(hint=hint_type)
cause_shallow = find_cause_instance_type(cause_shallow)
# print(f'[find_cause_generic] cause.hint [post-reduction]: {cause.hint}')
# If this pith is *NOT* an instance of this type, return this cause.
if cause_shallow.cause_str_or_none is not None:
return cause_shallow
# Else, this pith is an instance of this type.
# For each unignorable unerased transitive pseudo-superclass originally
# declared as an erased superclass of this generic...
for hint_child in iter_hint_pep484585_generic_bases_unerased_tree(
hint=cause.hint, exception_prefix=cause.exception_prefix):
# Deep output cause to be returned, permuted from this input cause.
cause_deep = cause.permute(hint=hint_child).find_cause()
# print(f'tuple pith: {pith_item}\ntuple hint child: {hint_child}')
# If this pseudo-superclass is the cause of this failure...
if cause_deep.cause_str_or_none is not None:
# Human-readable string prefixing this failure with additional
# metadata describing this pseudo-superclass.
cause_deep.cause_str_or_none = (
f'generic base {repr(hint_child)} '
f'{cause_deep.cause_str_or_none}'
)
# Return this cause.
return cause_deep
# Else, this pseudo-superclass is *NOT* the cause of this failure.
# Silently continue to the next.
# print(f'[find_cause_generic] Ignoring satisfied base {hint_child}...')
# Return this cause as is. This pith satisfies both this generic itself
# *AND* all pseudo-superclasses subclassed by this generic, implying this
# pith to deeply satisfy this hint.
return cause
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype** :pep:`484`- and :pep:`585`-compliant **sequence type hint
violation describers** (i.e., functions returning human-readable strings
explaining violations of :pep:`484`- and :pep:`585`-compliant sequence type
hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._data.hint.pep.sign.datapepsigns import HintSignTuple
from beartype._data.hint.pep.sign.datapepsignset import (
HINT_SIGNS_SEQUENCE_ARGS_1)
from beartype._decor._error._errorcause import ViolationCause
from beartype._decor._error._errortype import (
find_cause_type_instance_origin)
from beartype._decor._error._util.errorutilcolor import color_type
from beartype._decor._error._util.errorutiltext import represent_pith
from beartype._util.hint.pep.proposal.pep484585.utilpep484585 import (
is_hint_pep484585_tuple_empty)
from beartype._util.hint.utilhinttest import is_hint_ignorable
from beartype._util.text.utiltextlabel import label_obj_type
# ....................{ GETTERS ~ sequence }....................
def find_cause_sequence_args_1(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either
satisfies or violates the **single-argument variadic sequence type hint**
(i.e., PEP-compliant type hint accepting exactly one subscripted argument
constraining *all* items of this object, which necessarily satisfies the
:class:`collections.abc.Sequence` protocol with guaranteed ``O(1)``
indexation across all sequence items) of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
assert cause.hint_sign in HINT_SIGNS_SEQUENCE_ARGS_1, (
f'{repr(cause.hint)} not 1-argument sequence hint.')
# Assert this sequence was subscripted by exactly one argument. Note that
# the "typing" module should have already guaranteed this on our behalf.
assert len(cause.hint_childs) == 1, (
f'1-argument sequence hint {repr(cause.hint)} subscripted by '
f'{len(cause.hint_childs)} != 1.')
# Shallow output cause to be returned, type-checking only whether this path
# is an instance of the type originating this hint (e.g., "list" for
# "list[str]").
cause_shallow = find_cause_type_instance_origin(cause)
# Return either...
return (
# If this pith is *NOT* an instance of this type, this shallow cause;
cause_shallow
if cause_shallow.cause_str_or_none is not None else
# Else, this pith is an instance of this type and is thus a sequence.
# In this case, defer to this function supporting arbitrary sequences.
_find_cause_sequence(cause)
)
def find_cause_tuple(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either
satisfies or violates the **tuple type hint** (i.e., PEP-compliant type hint
accepting either zero or more subscripted arguments iteratively constraining
each item of this fixed-length tuple *or* exactly one subscripted arguments
constraining *all* items of this variadic tuple) of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
assert cause.hint_sign is HintSignTuple, (
f'{repr(cause.hint_sign)} not "HintSignTuple".')
# Shallow output cause to be returned, type-checking only whether this path
# is an instance of the type originating this hint (e.g., "list" for
# "list[str]").
cause_shallow = find_cause_type_instance_origin(cause)
# If this pith is *NOT* a tuple, return this shallow cause.
if cause_shallow.cause_str_or_none is not None:
return cause_shallow
# Else, this pith is a tuple.
#
# If this hint is a tuple...
elif (
# Subscripted by exactly two child hints *AND*...
len(cause.hint_childs) == 2 and
# The second child hint is just an unquoted ellipsis...
cause.hint_childs[1] is Ellipsis
):
# Then this hint is of the variadic form "Tuple[{typename}, ...]", typing a
# tuple accepting a variadic number of items all satisfying the
# child hint "{typename}". Since this case semantically reduces to a simple
# sequence, defer to this function supporting arbitrary sequences.
return _find_cause_sequence(cause)
# Else, this hint is of the fixed-length form "Tuple[{typename1}, ...,
# {typenameN}]", typing a tuple accepting a fixed number of items each
# satisfying a unique child hint.
#
# If this hint is the empty fixed-length tuple, validate this pith to be
# the empty tuple.
elif is_hint_pep484585_tuple_empty(cause.hint):
# If this pith is non-empty and thus fails to satisfy this hint...
if cause.pith:
# Deep output cause to be returned, permuted from this input cause
# with a human-readable string describing this failure.
cause_deep = cause.permute(cause_str_or_none=(
f'tuple {represent_pith(cause.pith)} non-empty'))
# Return this cause.
return cause_deep
# Else, this pith is the empty tuple and thus satisfies this hint.
# Else, this hint is a standard fixed-length tuple. In this case...
else:
# If this pith and hint are of differing lengths, this tuple fails to
# satisfy this hint. In this case...
if len(cause.pith) != len(cause.hint_childs):
# Deep output cause to be returned, permuted from this input cause
# with a human-readable string describing this failure.
cause_deep = cause.permute(cause_str_or_none=(
f'tuple {represent_pith(cause.pith)} length '
f'{len(cause.pith)} != {len(cause.hint_childs)}'
))
# Return this cause.
return cause_deep
# Else, this pith and hint are of the same length.
# For each enumerated item of this tuple...
for pith_item_index, pith_item in enumerate(cause.pith):
# Child hint corresponding to this tuple item. Since this pith and
# hint are of the same length, this child hint exists.
hint_child = cause.hint_childs[pith_item_index]
# If this child hint is ignorable, continue to the next.
if is_hint_ignorable(hint_child):
continue
# Else, this child hint is unignorable.
# Deep output cause to be returned, type-checking whether this tuple
# item satisfies this child hint.
# print(f'tuple pith: {pith_item}\ntuple hint child: {hint_child}')
# sleuth_copy = cause.permute(pith=pith_item, hint=hint_child)
# pith_item_cause = sleuth_copy.find_cause()
cause_deep = cause.permute(
pith=pith_item, hint=hint_child).find_cause()
# If this item is the cause of this failure...
if cause_deep.cause_str_or_none is not None:
# print(f'tuple pith: {sleuth_copy.pith}\ntuple hint child: {sleuth_copy.hint}\ncause: {pith_item_cause}')
# Human-readable substring prefixing this failure with metadata
# describing this item.
cause_deep.cause_str_or_none = (
f'tuple index {pith_item_index} item '
f'{cause_deep.cause_str_or_none}'
)
# Return this cause.
return cause_deep
# Else, this item is *NOT* the cause of this failure. Silently
# continue to the next.
# Return this cause as is; all items of this fixed-length tuple are valid,
# implying this pith to deeply satisfy this hint.
return cause
# ....................{ GETTERS ~ private }....................
def _find_cause_sequence(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either
satisfies or violates the **variadic sequence type hint** (i.e.,
PEP-compliant type hint accepting one or more subscripted arguments
constraining *all* items of this object, which necessarily satisfies the
:class:`collections.abc.Sequence` protocol with guaranteed ``O(1)``
indexation across all sequence items) of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
# Assert this type hint to describe a variadic sequence. See the parent
# find_cause_sequence_args_1() and find_cause_tuple()
# functions for derivative logic.
#
# Note that this pith need *NOT* be validated to be an instance of the
# expected variadic sequence, as the caller guarantees this to be the case.
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
assert (
cause.hint_sign in HINT_SIGNS_SEQUENCE_ARGS_1 or (
cause.hint_sign is HintSignTuple and
len(cause.hint_childs) == 2 and
cause.hint_childs[1] is Ellipsis
)
), (f'{repr(cause.hint)} neither '
f'standard sequence nor variadic tuple hint.')
# If this sequence is non-empty...
if cause.pith:
# First child hint of this hint. All remaining child hints if any are
# ignorable. Specifically, if this hint is:
# * A standard sequence (e.g., "typing.List[str]"), this hint is
# subscripted by only one child hint.
# * A variadic tuple (e.g., "typing.Tuple[str, ...]"), this hint is
# subscripted by only two child hints the latter of which is
# ignorable syntactic chuff.
hint_child = cause.hint_childs[0]
# If this child hint is *NOT* ignorable...
if not is_hint_ignorable(hint_child):
# Arbitrary iterator satisfying the enumerate() protocol, yielding
# zero or more 2-tuples of the form "(item_index, item)", where:
# * "item" is an arbitrary item of this sequence.
# * "item_index" is the 0-based index of this item.
pith_enumerator = None
# If this sequence was indexed by the parent @beartype-generated
# wrapper function by a pseudo-random integer in O(1) time,
# type-check *ONLY* the same index of this sequence also in O(1)
# time. Since the current call to that function failed a type-check,
# either this index is the index responsible for that failure *OR*
# this sequence is valid and another container is responsible for
# that failure. In either case, no other indices of this sequence
# need be checked.
if cause.random_int is not None:
# 0-based index of this item calculated from this random
# integer in the *SAME EXACT WAY* as in the parent
# @beartype-generated wrapper function.
pith_item_index = cause.random_int % len(cause.pith)
# Pseudo-random item with this index in this sequence.
pith_item = cause.pith[pith_item_index]
# 2-tuple of this index and item in the same order as the
# 2-tuples returned by the enumerate() builtin.
pith_enumeratable = (pith_item_index, pith_item)
# Iterator yielding only this 2-tuple.
pith_enumerator = iter((pith_enumeratable,))
# print(f'Checking item {pith_item_index} in O(1) time!')
# Else, this sequence was iterated by the parent
# @beartype-generated wrapper function in O(n) time. In this case,
# type-check *ALL* indices of this sequence in O(n) time as well.
else:
# Iterator yielding all indices and items of this sequence.
pith_enumerator = enumerate(cause.pith)
# print('Checking sequence in O(n) time!')
# For each enumerated item of this (sub)sequence...
for pith_item_index, pith_item in pith_enumerator:
# Deep output cause, type-checking whether this item satisfies
# this child hint.
cause_deep = cause.permute(
pith=pith_item, hint=hint_child).find_cause()
# If this item is the cause of this failure...
if cause_deep.cause_str_or_none is not None:
# Human-readable substring prefixing this failure with
# metadata describing this item.
cause_deep.cause_str_or_none = (
f'{color_type(label_obj_type(cause.pith))} '
f'index {pith_item_index} item '
f'{cause_deep.cause_str_or_none}'
)
# Return this cause.
return cause_deep
# Else, this item is *NOT* the cause of this failure. Silently
# continue to the next.
# Else, this child hint is ignorable.
# Else, this sequence is empty, in which case all items of this sequence
# (of which there are none) are valid. Just go with it, people.
# Return this cause as is; all items of this sequence are valid, implying
# this sequence to deeply satisfy this hint.
return cause
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype** :pep:`484`-compliant :attr:`typing.NoReturn` **type hint violation
describers** (i.e., functions returning human-readable strings explaining
violations of :pep:`484`-compliant :attr:`typing.NoReturn` type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._data.hint.pep.sign.datapepsigns import HintSignNoReturn
from beartype._decor._error._errorcause import ViolationCause
from beartype._decor._error._util.errorutiltext import represent_pith
from beartype._util.text.utiltextlabel import prefix_callable
# ....................{ GETTERS }....................
def find_cause_noreturn(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing describing the failure of the decorated callable to
*not* return a value in violation of the :pep:`484`-compliant
:attr:`typing.NoReturn` type hint.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
assert cause.hint_sign is HintSignNoReturn, (
f'{repr(cause.hint)} not "HintSignNoReturn".')
# Output cause to be returned, permuted from this input cause such that the
# justification is a human-readable string describing this failure.
cause_return = cause.permute(cause_str_or_none=(
f'{prefix_callable(cause.func)} with PEP 484 return type hint '
f'"typing.NoReturn" returned {represent_pith(cause.pith)}'
))
# Return this cause.
return cause_return
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype** :pep:`484`-compliant **union type hint violation describers**
(i.e., functions returning human-readable strings explaining violations of
:pep:`484`-compliant union type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar._roarexc import _BeartypeCallHintPepRaiseException
from beartype._data.hint.pep.sign.datapepsignset import HINT_SIGNS_UNION
from beartype._decor._error._errorcause import ViolationCause
from beartype._decor._error._util.errorutilcolor import color_hint
from beartype._decor._error._util.errorutiltext import represent_pith
from beartype._util.hint.pep.utilpepget import (
get_hint_pep_origin_type_isinstanceable_or_none)
from beartype._util.hint.pep.utilpeptest import is_hint_pep
from beartype._util.hint.utilhinttest import is_hint_ignorable
from beartype._util.text.utiltextjoin import join_delimited_disjunction_types
from beartype._util.text.utiltextmunge import (
suffix_unless_suffixed,
uppercase_char_first,
)
# ....................{ GETTERS }....................
def find_cause_union(cause: ViolationCause) -> ViolationCause:
'''
Output cause describing whether the pith of the passed input cause either
satisfies or violates the PEP-compliant union type hint of that cause.
Parameters
----------
cause : ViolationCause
Input cause providing this data.
Returns
----------
ViolationCause
Output cause type-checking this data.
'''
assert isinstance(cause, ViolationCause), f'{repr(cause)} not cause.'
assert cause.hint_sign in HINT_SIGNS_UNION, (
f'{repr(cause.hint)} not union sign.')
# Indentation preceding each line of the strings returned by child getter
# functions called by this parent getter function, offset to visually
# demarcate child from parent causes in multiline strings.
CAUSE_INDENT_CHILD = cause.cause_indent + ' '
# Subset of all classes shallowly associated with these child hints (i.e.,
# by being either these child hints in the case of non-"typing" classes
# *OR* the classes originating these child hints in the case of
# PEP-compliant type hints) that this pith fails to shallowly satisfy.
hint_types_violated = set()
# List of all human-readable strings describing the failure of this pith to
# satisfy each of these child hints.
cause_strs = []
# For each subscripted argument of this union...
for hint_child in cause.hint_childs:
# If this child hint is ignorable, continue to the next.
if is_hint_ignorable(hint_child):
continue
# Else, this child hint is unignorable.
# If this child hint is PEP-compliant...
if is_hint_pep(hint_child):
# Non-"typing" class originating this child hint if any *OR* "None"
# otherwise.
hint_child_origin_type = (
get_hint_pep_origin_type_isinstanceable_or_none(hint_child))
# If...
if (
# This child hint originates from a non-"typing" class *AND*...
hint_child_origin_type is not None and
# This pith is *NOT* an instance of this class...
not isinstance(cause.pith, hint_child_origin_type)
# Then this pith fails to satisfy this child hint. In this case...
):
# Add this class to the subset of all classes this pith does
# *NOT* satisfy.
hint_types_violated.add(hint_child_origin_type)
# Continue to the next child hint.
continue
# Else, this pith is an instance of this class and thus shallowly
# (but *NOT* necessarily deeply) satisfies this child hint.
# Child hint output cause to be returned, type-checking only whether
# this pith deeply satisfies this child hint.
cause_child = cause.permute(
hint=hint_child,
cause_indent=CAUSE_INDENT_CHILD,
).find_cause()
# If this pith deeply satisfies this child hint, return this cause
# as is.
if cause_child.cause_str_or_none is None:
# print('Union child {!r} pith {!r} deeply satisfied!'.format(hint_child, pith))
return cause
# Else, this pith deeply violates this child hint.
# Append the cause of this violation as a bullet-prefixed line to
# the running list of these lines.
cause_strs.append(cause_child.cause_str_or_none)
# Else, this child hint is PEP-noncompliant. In this case...
else:
# Assert this child hint to be a non-"typing" class. Note that
# the "typing" module should have already guaranteed that all
# subscripted arguments of unions are either PEP-compliant type
# hints or non-"typing" classes.
assert isinstance(hint_child, type), (
f'{cause.exception_prefix}union type hint '
f'{repr(cause.hint)} child hint {repr(hint_child)} invalid '
f'(i.e., neither type hint nor non-"typing" class).')
# Else, this child hint is a non-"typing" type.
# If this pith is an instance of this class, this pith satisfies
# this hint. In this case, return this cause as is.
if isinstance(cause.pith, hint_child):
return cause
# Else, this pith is *NOT* an instance of this class, implying this
# pith to *NOT* satisfy this hint. In this case, add this class to
# the subset of all classes this pith does *NOT* satisfy.
hint_types_violated.add(hint_child)
# If this pith fails to shallowly satisfy one or more of the types of this
# union, concatenate these failures onto one discrete bullet-prefixed line.
if hint_types_violated:
# Human-readable comma-delimited disjunction of the names of these
# classes (e.g., "bool, float, int, or str").
cause_types_unsatisfied = join_delimited_disjunction_types(
hint_types_violated)
# Prepend this cause as a discrete bullet-prefixed line.
#
# Note that this cause is intentionally prependend rather than appended
# to this list. Since this cause applies *ONLY* to the shallow type of
# the current pith rather than any items contained in this pith,
# listing this shallow cause *BEFORE* other deeper causes typically
# applying to items contained in this pith produces substantially more
# human-readable exception messages: e.g.,
# # This reads well.
# @beartyped pep_hinted() parameter pep_hinted_param=(1,) violates
# PEP type hint typing.Union[int, typing.Sequence[str]], as (1,):
# * Not int.
# * Tuple item 0 value "1" not str.
#
# # This does not.
# @beartyped pep_hinted() parameter pep_hinted_param=(1,) violates
# PEP type hint typing.Union[int, typing.Sequence[str]], as (1,):
# * Tuple item 0 value "1" not str.
# * Not int.
#
# Note that prepending to lists is an O(n) operation, but that this
# cost is negligible in this case both due to the negligible number of
# child hints of the average "typing.Union" in general *AND* due to the
# fact that this function is only called when a catastrophic type-check
# failure has already occurred.
cause_strs.insert(0, f'not {color_hint(cause_types_unsatisfied)}')
# Else, this pith shallowly satisfies *ALL* the types of this union.
# If prior logic appended *NO* causes, raise an exception.
if not cause_strs:
raise _BeartypeCallHintPepRaiseException(
f'{cause.exception_prefix}type hint '
f'{repr(cause.hint)} failure causes unknown.'
)
# Else, prior logic appended one or more strings describing these failures.
# Truncated object representation of this pith.
pith_repr = represent_pith(cause.pith)
# Output cause to be returned, permuted from this input cause such that the
# output cause justification is either...
cause_return = cause.permute(cause_str_or_none=(
# If prior logic appended one cause, a single-line
# substring intended to be embedded in a longer string;
f'{pith_repr} {cause_strs[0]}'
if len(cause_strs) == 1 else
# Else, prior logic appended two or more causes. In this case, a
# multiline string comprised of...
'{}:\n{}'.format(
# This truncated object representation followed by...
pith_repr,
# The newline-delimited concatenation of each cause as a discrete
# bullet-prefixed line...
'\n'.join(
'{}* {}'.format(
# Indented by the current indent...
cause.cause_indent,
# Whose first character is uppercased...
uppercase_char_first(
# Suffixed by a period if not yet suffixed by a period.
suffix_unless_suffixed(text=cause_str, suffix='.')
)
)
# '{}* {}.'.format(cause_indent, uppercase_char_first(cause_union))
for cause_str in cause_strs
)
)
))
# Return this cause.
return cause_return
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype type-checking wrapper function code factories** (i.e., low-level
callables dynamically generating pure-Python code snippets type-checking
parameters and return values of :mod:`beartype`-decorated callables against the
PEP-compliant type hints annotating those callables).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._check.checkmagic import ARG_NAME_GETRANDBITS
from beartype._check.expr.exprmake import make_check_expr
from beartype._conf.confcls import BeartypeConf
from beartype._data.datatyping import CodeGenerated
from beartype._decor._wrapper.wrappersnip import (
CODE_HINT_ROOT_PREFIX,
CODE_HINT_ROOT_SUFFIX,
CODE_HINT_ROOT_SUFFIX_RANDOM_INT,
)
from beartype._util.cache.utilcachecall import callable_cached
# ....................{ MAKERS }....................
@callable_cached
def make_func_wrapper_code(hint: object, conf: BeartypeConf) -> CodeGenerated:
'''
**Type-checking wrapper function code factory** (i.e., low-level callable
dynamically generating a pure-Python code snippet type-checking the
previously localized parameter or return value annotated by the passed
PEP-compliant type hint against that hint of the current
:mod:`beartype`-decorated callable).
This code factory is memoized for efficiency.
Parameters
----------
hint : object
PEP-compliant type hint to be type-checked.
conf : BeartypeConf
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object).
Returns
----------
CodeGenerated
Tuple containing the Python code snippet dynamically generated by this
code factory and metadata describing that code. See the
:attr:`beartype._data.datatyping.CodeGenerated` type hint for details.
Raises
----------
All exceptions raised by the lower-level :func:`make_check_expr` factory.
Warns
----------
All warnings emitted by the lower-level :func:`make_check_expr` factory.
See Also
----------
:func:`make_check_expr`
Further details.
'''
# Python code snippet comprising a single boolean expression type-checking
# an arbitrary object against this hint.
(
func_wrapper_code_expr,
func_wrapper_scope,
hint_forwardrefs_class_basename,
) = make_check_expr(hint, conf)
# PEP-compliant code snippet passing the value of the random integer
# previously generated for the current call to the exception-handling
# function call embedded in the "CODE_HINT_ROOT_SUFFIX" snippet,
# defaulting to passing *NO* such integer.
func_wrapper_code_random_int_if_any = (
CODE_HINT_ROOT_SUFFIX_RANDOM_INT
if ARG_NAME_GETRANDBITS in func_wrapper_scope else
''
)
# Suffix this code by a Python code snippet raising a human-readable
# exception when the root pith violates the root type hint.
func_wrapper_code_suffix = CODE_HINT_ROOT_SUFFIX.format(
random_int_if_any=func_wrapper_code_random_int_if_any)
# Python code snippet type-checking the root pith against the root hint.
func_wrapper_code = (
f'{CODE_HINT_ROOT_PREFIX}'
f'{func_wrapper_code_expr}'
f'{func_wrapper_code_suffix}'
)
# Return all metadata required by higher-level callers.
return (
func_wrapper_code,
func_wrapper_scope,
hint_forwardrefs_class_basename,
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
# ....................{ TODO }....................
#FIXME: Major optimization: duplicate the signature of the decorated callable
#as the signature of our wrapper function. Why? Because doing so obviates the
#need to explicitly test whether each possible parameter was passed and how
#that parameter was passed (e.g., positional, keyword) as well as the need to
#localize "__beartype_args_len" and so on. In short, this is a massive win.
#Again, see the third-party "makefun" package, which purports to already do so.
#FIXME: Cray-cray optimization: don't crucify us here, folks, but eliminating
#the innermost call to the original callable in the generated wrapper may be
#technically feasible. It's probably a BadIdea™, but the idea goes like this:
#
# # Source code for this callable as a possibly multiline string,
# # dynamically parsed at runtime with hacky regular expressions from
# # the physical file declaring this callable if any *OR* "None" otherwise
# # (e.g., if this callable is defined dynamically or cannot be parsed from
# # that file).
# func_source = None
#
# # Attempt to find the source code for this callable.
# try:
# func_source = inspect.getsource(func)
# # If the inspect.getsource() function fails to do so, shrug.
# except OSError:
# pass
#
# # If the source code for this callable cannot be found, fallback to
# # simply calling this callable in the conventional way.
# if func_source is None:
# #FIXME: Do what we currently do here.
# # Else, the source code for this callable was found. In this case,
# # carefully embed this code into the code generated for this wrapper.
# else:
# #FIXME: Do something wild, crazy, and dangerous here.
#
#Extreme care will need to be taken, including:
#
#* Ensuring code is indented correctly.
#* Preserving the signature (especially with respect to passed parameters) of
# the original callable in the wrapper. See the third-party "makefun" package,
# which purports to already do so. So, this is mostly a solved problem --
# albeit still non-trivial, as "beartype" will never have dependencies.
#* Don't bother reading any of this. Just skip to the synopsis below:
# * Preventing local attributes defined by this wrapper as well as global
# attributes imported into this wrapper's namespace from polluting the
# namespace expected by the original callable. The former is trivial; simply
# explicitly "del {attr_name1},...,{attr_nameN}" immediately before
# embedding the source code for that callable. The latter is tricky; we'd
# probably want to stop passing "globals()" to exec() below and instead pass
# a much smaller list of attributes explicitly required by this wrapper.
# Even then, though, there's probably no means of perfectly insulating the
# original code from all wrapper-specific global attributes. Or:
# * Perhaps this isn't an issue? After all, *ALL* locals and globals exposed
# to decorated callables are now guaranteed to be "__bear"-prefixed. This
# implies that searching the body of the decorated callable for the
# substring "\b__bear" and then raising an exception if any such
# substrings are found should suffice to prevent name collision.
# * Rewriting return values and yielded values. Oh, boy. That's the killer,
# honestly. Regular expression-based parsing only gets us so far. We could
# try analyzing the AST for that code, but... yikes. Each "return" and
# "yield" statement would need to be replaced by a beartype-specific
# "return" or "yield" statement checking the types of the values to be
# returned or
# yielded. We can guarantee that that rapidly gets cray-cray, especially
# when implementing non-trivial PEP 484-style type checking requiring
# multiple Python statements and local variables and... yeah. Actually:
# * Why *CAN'T* regex-based parsing suffice? Python's Backus-Naur form (BNF)
# is almost certainly quite constrained. We'll have to check where exactly
# "return" and "yield" statements are permissible, but we're fairly sure
# they're permissible only after newlines followed by sufficient
# indentation.
# * Note that the objects produced by Python's standard "ast" *AND* "dis"
# modules contain line number attributes yielding the line numbers on
# which those syntactic object were parsed. Ergo, whichever of these is
# the more efficient almost certainly the simplest (and possibly even)
# fastest approach. Is this worth benchmarking? Perhaps we should simply
# adopt the "ast" approach, as that's likely to be substantially more
# robust *AND* generalize to the case of annotated local variables, where
# naive regexes (and probably "dis" as well) fall down. Of course, "dis"
# is likely to be *MUCH* more space- and time-performant than "ast".
# * *SIGH.* Yes, absolutely use the standard "ast" module. Absolutely do
# *NOT* use either hand-rolled regexes or the standard "dis" module. Why?
# Because:
# * The low-level internals of the "ast" module are implemented in C. That
# means it's likely to be fast enough for our purposes.
# * CPython *ALREADY* has to do all (or at least, enough) of the AST
# analysis performed by the "ast" module. Since that cost has to be paid
# anyway, we'd might as well avoid paying additional regex or "dis"
# costs by reusing "ast" with @beartype. Oh, wait... No, that's not how
# things work at all. You basically can't reverse-engineer an AST from a
# callable code object. Since Python doesn't preserve the AST it
# internally produces to generate byte-code for a callable on that
# callable, we have no choice but to:
# * Get the source for that callable (e.g., with dill.source.getsource()
# or inspect.getsource()).
# * Pass that source string to ast.parse(). Man, that sure blows chunks.
# * So, ignore the prior point. The only additional meaningful point is
# that, unlike the "dis" module, the "ast" module makes it trivial to:
# * Transform the produced AST by injecting additional nodes (e.g.,
# dynamically generated statements) into the AST.
# * Compile that AST down into a code object.
# Does any of the above help us? Maybe not. All we really need from "ast"
# and "dis" are line numbers and the ability to crudely identify:
# * "return" statements. "dis" trivially does this.
# * "yield" statements. "dis" trivially does this.
# * Local annotated variable assignments. "dis" *PROBABLY* does not
# trivially do this. Indeed, it's not necessarily clear that "ast" does
# this either. Actually, that's absolutely *NOT* true. "ast" appears to
# trivially detect local annotated variable assignments, which is nice.
# Hilariously, regexes *DO* trivially detect local annotated variable
# assignments, because that's just a search for
# r"\n\s*[a-zA-Z_][a-zA-Z0-9_]*\s*:". Like, seriously. That's by far the
# easiest way to do that. Detecting "return" and "yield" statements is
# similarly trivial (we think, anyway) with regexes.
# *WAIT.* Regexes may vary well detect the *START* of a local annotated
# variable assignment, but they clearly fail to detect the *END*, as that
# requires context-free parsing. Welp. That's the death-knell for both
# regexes and "dis", then. "ast" is it!
#
#In synopsis, don't bother reading the above. Just know that parsing "return"
#and "yield" statements as well as annotated local variable assignments
#unsurprisingly requires use of the standard "ast" module. Specifically:
#* Get the source for the decorated callable. Ideally, we'll want to do so by
# implementing our own get_callable_source() utility getter inspired by the
# third-party "dill" implementation at dill.source.getsource() rather than the
# standard inspect.getsource().
#* Pass that source string to ast.parse(). Note that the following snippet
# appears to be the most robust means of doing so, as it implicitly accounts
# for encoding issues that we do *NOT* want to concern ourselves with:
# import ast
# import tokenize
#
# def parse_file(filename):
# with tokenize.open(filename) as f:
# return ast.parse(f.read(), filename=filename)
# Please cite the original source for this, which is this blog article:
# https://julien.danjou.info/finding-definitions-from-a-source-file-and-a-line-number-in-python
#* Search the resulting AST for any nodes referencing an object name (e.g.,
# variable, callable, class) prefixed by "__bear" and raise an exception on
# the first such node to prevent name collision.
#* Munge that AST as required.
#* Compile that AST -- ideally directly into a callable (but possibly first
# indirectly into a code object into then that directly into a callable).
#
#I suppose we could gradually roll out support by (in order):
#* Initially duplicating the signature of the decorated callable onto the
# wrapper function. Since this is both a hard prerequisite for all subsequent
# work *AND* yields tangible benefits in and of itself (e.g., for runtime
# introspection), this is absolutely the first big ticket item here. Note that
# several approaches exist here:
# * Programmatically reconstruct this signature. This is almost certainly the
# optimal technique.
# * Use "ast" to find the line interval for the signature of the decorated
# callable in its source file.
# * Use "dis" to find the same.
#
# Note that this is complicated by default values, which will need to be
# propagated from the decorated callable onto the wrapper function. As we
# recall, the "callable.__defaults__" dunder variable contains these defaults,
# so that's probably trivial. Just copy that variable, right? Similarly, the
# "callable.__annotations__" dunder variable should also be propagated.
#
# Actually, just see the standard inspect._signature_from_function() function,
# which implements the core callable signature parsing logic. Alternately, I
# believe we'd previously found a third-party library or two whose sole reason
# for existence was parsing and duplicating callable signatures, wasn't it?
#* Then optimizing callables annotated by either no return type hint *OR* a
# deeply ignorable return hint, which reduces to a significantly simpler edge
# case requiring *NO* "ast" use.
#* Then optimizing callables returning and yielding nothing by falling back to
# the unoptimized approach for callables that do so.
#* Then optimizing callables terminating in a single "return" or "yield"
# statement that *DIRECTLY* return a local or global variable. This is the
# easy common case, as we can then immediately precede that statement with a
# type-check on that variable.
#* Then optimizing callables terminating in a single "return" or "yield"
# statement that return an arbitrary expression. If that expression is *NOT* a
# local or global variable, we need to capture that expression into a new
# local variable *BEFORE* type-checking that variable *BEFORE* returning that
# variable. So it goes.
#* Then optimizing callables containing multiple such statements.
#
#Note lastly that the third-party "dill" package provides a
#dill.source.getsource() function with the same API as the stdlib
#inspect.getsource() function but augmented in various favourable ways. *shrug*
#
#Although this will probably never happen, it's still mildly fun to ponder.
#FIXME: Actually, this should probably happen -- but not necessarily for the
#reasons stipulated above. Don't get us wrong; optimizing away the additional
#stack frame by embedding the body of the decorated callable directly into the
#wrapper function wrapping that callable is a clever (albeit highly
#non-trivial) optimization.
#
#The *REAL* tangible benefit, however, is in type-checking annotated local
#variables. Currently, neither @beartype nor any other runtime type checker has
#the means to check annotated local variables: e.g.,
# @beartype
# def muh_func(muh_list: list[int]) -> int:
# list_item: int = list[0] # <- can't check this
# return list_item
#
#The reason, of course, is that those variables and thus variable annotations
#are effectively "locked" behind the additional stack frame separating the
#decorated callable from its wrapper function. Integrating the former into the
#latter, however, trivially dissolves this barrier; indeed, since Python
#currently has no notion of a variable decorator and prohibits function return
#values from being assigned to as l-values, there is no pragmatic alternative.
#
#The idea here is that we could augment the body of the decorated callable when
#merged into its wrapper function as follows:
#* Iteratively search that body for local annotated variable declarations.
#* For each such declaration:
# * Inject one or more statements after each such declaration type-checking
# that variable against its annotation.
#
#The issue here then becomes: *WHERE* after each such declaration? This is a
#pertinent question, because we could type-check a variable immediately after
#its declaration, only to have a subsequent assignment to that variable later
#in the body of the decorated callable silently invalidate the prior
#type-check. Technically, since @beartype is an O(1) type-checker, we could
#re-perform type-checks after each assignment to an annotated local variable.
#But that seems a bit heavy-handed. Perhaps we should simply inject that code
#at the last possible moment -- which is to say, immediately *BEFORE* each
#"return" or "yield" statement in that callable. We have to inject code there
#anyway to type-check that "return" or "yield" statement, so we'd be hitting
#two birds with one beating stick to additionally type-check annotated local
#variables there as well.
#
#Note that the answer to where we type-check local variables has a profound
#impact on whether we adopt a regex- or "ast"-based solution. If we type-check
#everything before "return" or "yield" statements, regex suffices. If we check
#variables immediately after their declaration or assignment, however, only
#"ast" suffices. This is, of course, yet another point in favour of checking
#everything before "return" or "yield" statements, as regex is likely to be
#substantially faster and more portable (due to changes in "ast" design and
#implementation across Python versions) than the "ast"-based approach.
#
#For example, this regex should (in theory) suffice to detect all annotated
#local variable declarations in a callable: r"\n\s+[a-zA-Z_][a-zA-Z0-9_]*\s*:".
#Oh... wait. No. Even that doesn't generalize. Why? Literal triple-quoted
#strings, obviously. Welp. "ast" it is, then! No point in beating around that
#context-free bush then, is there? Consider using the third-party "astor"
#package if available, which purportedly improves upon the standard "ast"
#module in various ways and is internally leveraged by "pylint" to perform its
#magic. In any case, Relevant articles include:
#* "Static Modification of Python With Python: The AST Module", a well-written
# introduction to the topic:
# https://dzone.com/articles/static-modification-python
#
#Note that we have two significant high-level choices here:
#* Use the "ast" module just to obtain line number intervals for the desired
# statements. Note that the existence of the rarely used optional statement
# terminator ";" makes this less trivial than desired. We can't simply assume
# that statements begin and end on newlines, for example. Instead, we need to
# employ either the Python >= 3.8-specific ast.get_source_segment() function
# *OR* the Python >= 3.8-specific "end_lineno" and "end_col_offset" attributes
# of AST nodes. In either case, Python >= 3.8 will absolutely be required.
#* Use the "ast" to dynamically transform the AST itself. This is considerably
# less trivial *AND* invites significant issues. Sanely transforming the AST
# would probably require refactoring our entire workflow to generate new
# low-level AST nodes rather than new high-level Python code. Issues include:
# * Efficiency. "ast" is both space- and time-inefficient, given both the
# large number of objects it creates *AND* the inherent inefficiency of
# binary trees as O(n log n) structures.
# * Portably. "ast" commonly changes in significant ways between major Python
# versions, casting doubts on our ability to reasonably port code
# transforming the AST between major Python versions, which is unacceptable.
#
#Actually, we'll probably end up combining the two approaches above. We
#definitely *WILL* want to apply trivial AST transformations, including:
#* For "return" and "yield" statements, we'll need to split the AST nodes
# representing those statements into at least three nodes plus a few new ones:
# * The AST node representing each "return" and "yield" statement should be
# transformed into a node instead localizing that statement's expression
# into a new local variable named "__beartype_pith_0".
# * Adding a new AST node returning or yielding the value of that variable.
#
#We can't reasonably do that transformation by any other means. Note that this
#then requires calling the Python >= 3.9-specific ast.unparse() function to
#losslessly generate source code from that transformed tree, which we then
#split into lines and inject our desired code after the desired line number
#corresponding to each shifted "return" and "yield" statement.
#
#After performing that hopefully simple transform, we then get the line number
#of the new AST node returning or yielding the value of that variable and then
#manually inject our code type-checking "__beartype_pith_0" there. Phew!
#
#Alternately, rather than ast.unparse() AST back into source code, we might
#instead try injecting AST nodes that we auto-generate by:
#* Passing our code type-checking the current "return" or "yield" statement to
# the ast.parse() function.
#* Inject the target sub-AST returned by that call into the desired node of
# the source full AST of the decorated callable. Note that this will probably
# require prefixing the body of the decorated callable with our parameter
# type-checking code *BEFORE* parsing that body with ast.parse(), to ensure
# that references in our code type-checking the current "return" or "yield"
# statement are properly resolved when merged back into the full AST.
#FIXME: Lastly, note that the above is likely to make beartype's
#decoration-time cost prohibitive under CPython, regardless of the call-time
#improvements due to stack frame compaction. Ergo, we may want to adopt the
#following defaults:
#* Under PyPy, *ENABLE* AST modification by default.
#* Under all other interpreters (especially including CPython), *DISABLE* AST
# modification by default.
#
#Naturally, profile this to decide what we should do. To facilitate choice,
#we'll need to refactor the @beartype decorator to support a new optional
#"is_ast" parameter defaulting to something resembling these defaults. When
#this parameter is false, @beartype defaults to the current approach; else,
#@beartype modifies the AST of decorated callables as above.
#FIXME: *AH HA!* We just realized that the prior AST approach can be
#significantly optimized to a degree that might make this reasonably tractable
#under CPython as well. How? As follows (in order):
#* Dynamically synthesize the *PRELIMINARY* body of the wrapper function from
# (in order):
# * Code declaring the signature of the wrapper function. Note that we
# *SHOULD* (in theory) be able to trivially extract this *WITHOUT* needing
# to programmatically generate this ourselves this by performing a
# preliminary walk over the AST of the decorated callable for the node(s)
# responsible for declaring that callable's signature. Hopefully trivial.
# Why? Because AST nodes provide line number ranges, which leads directly to
# trivial extraction of callable signatures. That said... we probably
# already need to programmatically generate signatures ourselves for the
# common edge case in which the decorated callable is *NOT* annotated by a
# return type hint. So, who knows!
# * Code typing-checking all parameters, as above.
# * Code typing-checking the "return" value. Don't worry about "yield"
# statements for now. *YES,* we are intentionally type-checking the "return"
# early in the body of the wrapper function. Why? So that we can have the
# "ast" module generate a full AST tree containing a node performing that
# type-check. Of course, that node will *NOT* be in the correct node
# position. But that's fine. A subsequent step will shift that node to its
# desired final position in the AST. This code should resemble:
# __beartype_pith_0 = True
# if ({code_checking_beartype_pith_0_value_here}):
# raise {code_raising_beartype_pith_0_exception_here}
# This is, of course, valid code that should generate valid AST nodes.
# * The body of the decorated callable.
#* Parse that preliminary body of the wrapper function through the ast.parse()
# function, producing an AST.
#* Transform that AST as follows:
# * Iteratively walk that AST until finding a node assigning "True" to
# "__beartype_pith_0". This shouldn't be troublesome.
# * Extract both that node and the subsequent node subtree consisting of the
# type-check and exception raising out of their current position in the AST.
# Naturally, save these two nodes for subsequent reinsertion back into the
# AST at a different position.
# * Iteratively walk the remainder of the AST until finding a node performing
# a return.
# * Inject the two previously extracted nodes into that node position.
# * Repeat until all "return" statements have been transformed.
# * Voila!
#* Compile that AST directly into a code object by calling the ast.compile()
# function.
#* Evaluate that code object by calling either the exec() or eval() builtin to
# produce the actual wrapper function.
#
#Note that there is a significant annoyance associated with AST
#transformations: *LINE NUMBERS.* Specifically, the ast.compile() function
#called above absolutely requires that line numbers be coherent (i.e.,
#monotonically increase). To ensure this, we'll need to "fix up" line numbers
#for basically *ALL* nodes following those representing the code
#typing-checking all parameters (whose line numbers require no modification).
#This is annoying but inexpensive, given that we have to walk all nodes anyway.
#Note that the "ast" modules provides functions for repairing line numbers as
#well (e.g., ast.increment_lineno()), but that those functions are almost
#certainly inefficient and inapplicable for us.
#
#Note that the ast.copy_location() function appears to already do a *BIT* of
#what we need. Since we need cutting instead of copying, however, we'll
#probably just want to use that function's implementation as inspiration rather
#than directly calling that function.
#
#And... don't get us wrong. This is absolutely still going to be expensive. But
#the fact that we can flow directly from:
# decorated callable -> source code -> AST -> code object -> wrapper func
#...does imply that this should be considerable faster than previously thought.
#FIXME: We just realized that there's a significant optimization here that
#renders stack frame reduction unconditionally worthwhile across all Python
#interpreters and versions in a simple common case: callables annotated either
#with no return type hints *OR* deeply ignorable type hints. Why? Because we
#can trivially eliminate the additional stack frame in this edge case by
#unconditionally prefixing the body of the decorated callable by (in order):
#
#1. Code type-checking parameters passed to that callable.
#2. Code deleting *ALL* beartype-specific "__bear"-prefixed locals and globals
# referenced by the code type-checking those parameters. This is essential,
# as it implies that we then no longer need to iteratively search the body of
# the decorated callable for local variables with conflicting names, which
# due to strings we can't reliably do without "ast"- or "dis"-style parsing.
#
#Note this edge case only applies to callables:
#* Whose return hint is either:
# * Unspecified.
# * Deeply ignorable.
# * "None", implying this callable to return nothing. Callables explicitly
# returning a "None" value should instead be annotated with a return hint of
# "beartype.cave.NoneType"; this edge case would *NOT* apply to those.
#* *DIRECTLY* decorated by @beartype: e.g.,
# @beartype
# def muh_func(): pass
# This edge case does *NOT* apply to callables directly decorated by another
# decorator first, as in that case the above procedure would erroneously
# discard the dynamic decoration of that other decorator: e.g.,
# @beartype
# @other_decorator
# def wat_func(): pass
#* *NOT* implicitly transformed by one or more other import hooks. If any other
# import hooks are in effect, this edge case does *NOT* apply, as in that case
# the above procedure could again erroneously discard the dynamic
# transformations applied by those other import hooks.
#FIXME: *GENERALIZATION:* All of the above would seem to pertain to a
#prospective higher-level package, which has yet to be officially named but
#which we are simply referring to as "beartypecache" for now. "beartypecache"
#has one dependency: unsurprisingly, this is "beartype". The principal goal of
#"beartypecache" is *NOT* to perform AST translations as detailed above,
#although that certainly is a laudable secondary goal.
#
#The principal goal of "beartypecache" is, as the name suggests, to cache
#wrapper functions dynamically generated by the @beartype decorator across
#Python processes. This goal succinctly ties in to the above AST transform
#concepts, because the *ONLY* sane means of performing these transforms (even
#under PyPy and similarly fast Python environments) is to cache the results of
#these transformations across Python processes.
#
#The underlying idea here is that the @beartype decorator only needs to be
#applied once to each version of a callable. If that callable has not changed
#since the last application of @beartype to that decorator (or since @beartype
#itself has changed, obviously), then the previously cached application of
#@beartype to the current version of that callable suffices. Naturally, of
#course, there exists *NO* efficient means of deciding when a callable has
#changed over multiple Python invocations. There does, however, exist an
#efficient means of deciding when an on-disk module defining standard callables
#has changed: the "__pycache__" directory formalized by "PEP 3147 -- PYC
#Repository Directories" at:
# https://www.python.org/dev/peps/pep-3147
#
#Ergo, we soften the above idea to the following: "The @beartype decorator only
#needs to be applied once to each callable defined by each version of a
#module." If this sounds like import hooks, you would not be wrong. Sadly,
#there currently exists no public API in the stdlib for generically applying
#AST transformations via import hooks. But all is not lost, since we'll simply
#do it ourselves. In fact, unsurprisingly, this is a sufficiently useful
#concept that it's already been done by a number of third-party projects -- the
#most significant of which is "MacroPy3":
# https://github.com/lihaoyi/macropy
#
#The "MacroPy3" synopsis reads:
# "MacroPy provides a mechanism for user-defined functions (macros) to
# perform transformations on the abstract syntax tree (AST) of a Python
# program at import time."
#
#...which is exactly what we need. We certainly are *NOT* going to depend upon
#"MacroPy3" as a mandatory dependency, however. Like "beartype" before it,
#"beartypecache" should ideally only depend upon "beartype" as a mandatory
#dependency. Ideology aside, however, there exists a more significant reason:
#"beartypecache" is intended to be brutally fast. That's what the "cache"
#means. "MacroPy3" is undoubtedly slow by compare to a highly micro-optimized
#variant of that package, because no in the Python world cares about
#efficiency -- perhaps justifiably, but perhaps not. Moreover, generalization
#itself incurs space and time efficiency costs. We can eliminate those costs by
#developing our own internal, private, ad-hoc AST-transform-on-import-hook
#implementation micro-optimized for our specific use case.
#
#Amusingly, even the abandoned prominently references "MacroPy3":
# The MacroPy project uses an import hook: it adds its own module finder in
# sys.meta_path to hook its AST transformer.
#
#Note that "sys.meta_path" is *NOT* necessarily the optimum approach for
#"beartypecache". Since the @beartype decorator can only, by definition, be
#applied to third-party user-defined modules, "sys.meta_path" is might or might
#not be overkill for us, because "sys.meta_path" even applies to builtin
#stdlib modules. In any case, what we principally care about is the capacity to
#directly feed low-level *CODE OBJECTS* (rather than high-level *SOURCE CODE*)
#from our AST transformations into some sort of import hook machinery.
#
#Note this relevant StackOverflow answer:
# https://stackoverflow.com/a/43573798/2809027
#The synopsis of that answer reads:
# You will also need to examine if you want to use a MetaPathFinder or a
# PathEntryFinder as the system to invoke them is different. That is, the
# meta path finder goes first and can override builtin modules, whereas the
# path entry finder works specifically for modules found on sys.path.
#That answer then goes on to succinctly define example implementations of both,
#which is ludicrously helpful. Again, we should adopt whichever allows us to
#most efficiently generate low-level *CODE OBJECTS* from AST transformations.
#
#Note that the public importlib.util.source_from_cache(path) function trivially
#enables us to obtain the absolute filename of the previously cached byte code
#file if any from the absolute filename of any arbitrary Python module. That's
#nice. Additionally, note this preamble to PEP 3147:
#
# Byte code files [in "__pycache__" directories] contain two 32-bit
# big-endian numbers followed by the marshaled code object. The 32-bit
# numbers represent a magic number and a timestamp. The magic number changes
# whenever Python changes the byte code format, e.g. by adding new byte
# codes to its virtual machine. This ensures that pyc files built for
# previous versions of the VM won't cause problems. The timestamp is used to
# make sure that the pyc file match the py file that was used to create it.
# When either the magic number or timestamp do not match, the py file is
# recompiled and a new pyc file is written.
#
#Presumably, there exists some efficient programmatic means of deciding from
#pure Python whether "the magic number or timestamp do not match" for the byte
#code file cached for an arbitrary module.
#
#We're almost there. We then need some efficient means of deciding whether an
#arbitrary byte code file has been instrumented by "beartypecache" yet.
#That's... a much tougher nut to crack. We can think of two possible approaches
#here, both equally valid but one probably easier to implement than the other.
#For each byte code file cached in a "__pycache__" directory, the
#"beartypecache" package should either:
#* The easiest way *BY FAR* is probably to just emit one 0-byte
# "beartypecache"-specific file named
# "__pycache__/{module_name}.{python_name}.beartypecache" or something.
# There's *NO* way any other package is writing that sort of file, so filename
# collisions should in theory be infeasible. Given such a file, the "mtime" of
# this file should coincide with that of the source module from which this
# file is generated. Indeed, this approach suggests we don't even need to
# extract the magic number and timestamp from the byte code file. Nice! So,
# this is the way... probably.
#* The harder way *BY FAR* is probably to suffix the contents of this file by a
# superfluous byte code statement specific to "beartypecache", effectively the
# equivalent of:
# __beartypecache_is_cached = True
# That's more-or-less a noop and more-or-less trivially generated during our
# AST transformation of this source module from an import hook. Given that,
# we'd then just to need to compare the end of this file with the expected
# byte sequence. This *DOES* entail some I/O overhead and considerably more
# complexity than the prior approach, however.
#
#In any case, the above then enables us to efficiently cache @beartype
#decorations and AST transformations across an entire codebase as follows:
#
#* The root "__init__.py" module of the top-level package for downstream
# third-party consumers should contain the following import:
# import beartypecache.all
# As a side effect, the "beartypecache.all" submodule then installs an import
# hook globally decorating all callables across all subsequently imported
# modules with @beartype as well as applying AST transformations. This is the
# default approach. Of course, subsequent revisions could then provide some
# degree of configurability via different submodules or subpackages.
#* This "beartypecache.all" import hook then confines itself to each
# user-defined submodule *OF THE CALLING PACKAGE THAT IMPORTED*
# "beartypecache.all". This is critical. We can't simply globally apply the
# "beartypecache.all" import hook to *EVERYTHING*, because many callables will
# neither be intended nor able to support decoration by @beartype, which has
# rather firm views on PEP-compliant type hints and so on.
#* For each user-defined submodule of the calling package, this
# "beartypecache.all" import hook then performs the following:
# * Decide whether the previously cached byte code file for this submodule is
# still synchronized with this submodule and has been previously
# instrumented by "beartypecache", using one of the above approaches.
# * If so, avoid uselessly re-instrumenting this file.
# * Else, instrument this file as detailed above. As a first draft
# implementation, "beartypecache" should simply:
# * Replace the name of each function and method defined in this source
# submodule by "__beartype_wrapped_{func_name}". Note this will require a
# trivial sort of AST instrumentation. We can't avoid that.
# * Define the replacement wrapper function with the name "{func_name}",
# thus replacing the original callable with our decorated callable.
# This draft implementation efficiently caches @beartype decorations across
# the entire codebase, thus serving as a pragmatically useful demonstration
# of the underlying concept.
#
#All in all, this requires funding. Technically feasible, but cray-cray.
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype decorator code generator.**
This private submodule dynamically generates both the signature and body of the
wrapper function type-checking all annotated parameters and return value of the
the callable currently being decorated by the :func:`beartype.beartype`
decorator in a general-purpose manner. For genericity, this relatively
high-level submodule implements *no* support for annotation-based PEPs (e.g.,
:pep:`484`); other lower-level submodules do so instead.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
# All "FIXME:" comments for this submodule reside in this package's "__init__"
# submodule to improve maintainability and readability here.
#FIXME: Split this large submodule into smaller submodules for maintainability.
#A useful approach might be:
#* Define a new private "_codearg" submodule and shift the _code_check_args()
# function there.
#* Define a new private "_codereturn" submodule and shift the
# _code_check_return() function there.
# ....................{ IMPORTS }....................
from beartype.roar import (
BeartypeDecorParamNameException,
BeartypeDecorHintPepException,
)
from beartype.typing import NoReturn
from beartype._check.checkmagic import ARG_NAME_TYPISTRY
from beartype._check.expr._exprsnip import (
PEP_CODE_HINT_FORWARDREF_UNQUALIFIED_PLACEHOLDER_PREFIX,
PEP_CODE_HINT_FORWARDREF_UNQUALIFIED_PLACEHOLDER_SUFFIX,
)
from beartype._check.util.checkutilmake import make_func_signature
from beartype._decor._cache.cachetype import (
bear_typistry,
register_typistry_forwardref,
)
from beartype._check.checkcall import BeartypeCall
from beartype._decor._wrapper.wrappersnip import (
CODE_INIT_ARGS_LEN,
CODE_PITH_ROOT_PARAM_NAME_PLACEHOLDER,
CODE_RETURN_CHECK_PREFIX,
CODE_RETURN_CHECK_SUFFIX,
CODE_RETURN_UNCHECKED,
CODE_SIGNATURE,
PARAM_KIND_TO_CODE_LOCALIZE,
PEP484_CODE_CHECK_NORETURN,
)
from beartype._decor._wrapper._wrappercode import make_func_wrapper_code
from beartype._util.error.utilerror import (
EXCEPTION_PLACEHOLDER,
reraise_exception_placeholder,
)
from beartype._util.func.arg.utilfuncargiter import (
ARG_META_INDEX_KIND,
ARG_META_INDEX_NAME,
ArgKind,
iter_func_args,
)
from beartype._util.hint.pep.proposal.pep484585.utilpep484585func import (
reduce_hint_pep484585_func_return)
from beartype._util.hint.pep.proposal.pep484585.utilpep484585ref import (
get_hint_pep484585_forwardref_classname_relative_to_object)
from beartype._check.conv.convsanify import sanify_func_hint_root
from beartype._util.hint.utilhinttest import is_hint_ignorable
from beartype._util.kind.utilkinddict import update_mapping
from beartype._util.text.utiltextlabel import (
prefix_callable_decorated_arg,
prefix_callable_decorated_return,
)
from beartype._util.text.utiltextmunge import replace_str_substrs
from beartype._util.utilobject import SENTINEL
from collections.abc import (
Callable,
Iterable,
)
# ....................{ GENERATORS }....................
def generate_code(
bear_call: BeartypeCall,
# "beartype._decor._wrapper.wrappersnip" string globals required only for
# their bound "str.format" methods.
CODE_RETURN_UNCHECKED_format: Callable = CODE_RETURN_UNCHECKED.format,
) -> str:
'''
Generate a Python code snippet dynamically defining the wrapper function
type-checking the passed decorated callable.
This high-level function implements this decorator's core type-checking,
converting all unignorable PEP-compliant type hints annotating this
callable into pure-Python code type-checking the corresponding parameters
and return values of each call to this callable.
Parameters
----------
bear_call : BeartypeCall
Decorated callable to be type-checked.
Returns
----------
str
Generated function wrapper code. Specifically, either:
* If the decorated callable requires *no* type-checking (e.g., due to
all type hints annotating this callable being ignorable), the empty
string. Note this edge case is distinct from a related edge case at
the head of the :func:`beartype.beartype` decorator reducing to a
noop for unannotated callables. By compare, this boolean is ``True``
only for callables annotated with **ignorable type hints** (i.e.,
:class:`object`, :class:`beartype.cave.AnyType`,
:class:`typing.Any`): e.g.,
.. code-block:: python
>>> from beartype.cave import AnyType
>>> from typing import Any
>>> def muh_func(muh_param1: AnyType, muh_param2: object) -> Any: pass
>>> muh_func is beartype(muh_func)
True
* Else, a code snippet defining the wrapper function type-checking the
decorated callable, including (in order):
* A signature declaring this wrapper, accepting both
beartype-agnostic and -specific parameters. The latter include:
* A private ``__beartype_func`` parameter initialized to the
decorated callable. In theory, this callable should be accessible
as a closure-style local in this wrapper. For unknown reasons
(presumably, a subtle bug in the exec() builtin), this is *not*
the case. Instead, a closure-style local must be simulated by
passing this callable at function definition time as the default
value of an arbitrary parameter. To ensure this default is *not*
overwritten by a function accepting a parameter of the same name,
this unlikely edge case is guarded against elsewhere.
* Statements type checking parameters passed to the decorated
callable.
* A call to the decorated callable.
* A statement type checking the value returned by the decorated
callable.
Raises
----------
BeartypeDecorParamNameException
If the name of any parameter declared on this callable is prefixed by
the reserved substring ``__bear``.
BeartypeDecorHintNonpepException
If any type hint annotating any parameter of this callable is neither:
* **PEP-compliant** (i.e., :mod:`beartype`-agnostic hint compliant with
annotation-centric PEPs).
* **PEP-noncompliant** (i.e., :mod:`beartype`-specific type hint *not*
compliant with annotation-centric PEPs)).
_BeartypeUtilMappingException
If generated code type-checking any pair of parameters and returns
erroneously declares an optional private beartype-specific parameter of
the same name with differing default value. Since this should *never*
happen, a private non-human-readable exception is raised in this case.
'''
assert bear_call.__class__ is BeartypeCall, (
f'{repr(bear_call)} not @beartype call.')
# Python code snippet type-checking all callable parameters if one or more
# such parameters are annotated with unignorable type hints *OR* the empty
# string otherwise.
code_check_params = _code_check_args(bear_call)
# Python code snippet type-checking the callable return if this return is
# annotated with an unignorable type hint *OR* the empty string otherwise.
code_check_return = _code_check_return(bear_call)
# If the callable return requires *NO* type-checking...
#
# Note that this branch *CANNOT* be embedded in the prior call to the
# _code_check_return() function, as doing so would prevent us from
# efficiently reducing to a noop here.
if not code_check_return:
# If all callable parameters also require *NO* type-checking, this
# callable itself requires *NO* type-checking. In this case, return the
# empty string instructing the parent @beartype decorator to reduce to
# a noop (i.e., the identity decorator returning this callable as is).
if not code_check_params:
return ''
# Else, one or more callable parameters require type-checking.
# Python code snippet calling this callable unchecked, returning the
# value returned by this callable from this wrapper.
code_check_return = CODE_RETURN_UNCHECKED_format(
func_call_prefix=bear_call.func_wrapper_code_call_prefix)
# Else, the callable return requires type-checking.
# Python code snippet declaring the signature of this type-checking wrapper
# function, deferred for efficiency until *AFTER* confirming that a wrapper
# function is even required.
code_signature = make_func_signature(
func_name=bear_call.func_wrapper_name,
func_scope=bear_call.func_wrapper_scope,
code_signature_format=CODE_SIGNATURE,
code_signature_prefix=bear_call.func_wrapper_code_signature_prefix,
conf=bear_call.conf,
)
# Return Python code defining the wrapper type-checking this callable.
# While there exist numerous alternatives to string formatting (e.g.,
# appending to a list or bytearray before joining the items of that
# iterable into a string), these alternatives are either:
# * Slower, as in the case of a list (e.g., due to the high up-front cost
# of list construction).
# * Cumbersome, as in the case of a bytearray.
#
# Since string concatenation is heavily optimized by the official CPython
# interpreter, the simplest approach is the most ideal. KISS, bro.
return (
f'{code_signature}'
f'{code_check_params}'
f'{code_check_return}'
)
# ....................{ PRIVATE ~ constants }....................
#FIXME: Remove this set *AFTER* handling these kinds of parameters.
_PARAM_KINDS_IGNORABLE = frozenset((
ArgKind.VAR_KEYWORD,
))
'''
Frozen set of all :attr:`ArgKind` enumeration members to be ignored
during annotation-based type checking in the :func:`beartype.beartype`
decorator.
This includes:
* Constants specific to variadic keyword parameters (e.g., ``**kwargs``), which
are currently unsupported by :func:`beartype`.
* Constants specific to positional-only parameters, which apply only to
non-pure-Python callables (e.g., defined by C extensions). The
:func:`beartype` decorator applies *only* to pure-Python callables, which
provide no syntactic means for specifying positional-only parameters.
'''
_PARAM_KINDS_POSITIONAL = frozenset((
ArgKind.POSITIONAL_ONLY,
ArgKind.POSITIONAL_OR_KEYWORD,
))
'''
Frozen set of all **positional parameter kinds** (i.e.,
:attr:`ArgKind` enumeration members signifying that a callable parameter
either may *or* must be passed positionally).
'''
_RETURN_REPR = repr('return')
'''
Object representation of the magic string implying a return value in various
Python objects (e.g., the ``__annotations__`` dunder dictionary of annotated
callables).
'''
# ....................{ PRIVATE ~ args }....................
def _code_check_args(bear_call: BeartypeCall) -> str:
'''
Generate a Python code snippet type-checking all annotated parameters of
the decorated callable if any *or* the empty string otherwise (i.e., if
these parameters are unannotated).
Parameters
----------
bear_call : BeartypeCall
Decorated callable to be type-checked.
Returns
----------
str
Code type-checking all annotated parameters of the decorated callable.
Raises
----------
BeartypeDecorParamNameException
If the name of any parameter declared on this callable is prefixed by
the reserved substring ``__bear``.
BeartypeDecorHintNonpepException
If any type hint annotating any parameter of this callable is neither:
* A PEP-noncompliant type hint.
* A supported PEP-compliant type hint.
'''
assert bear_call.__class__ is BeartypeCall, (
f'{repr(bear_call)} not @beartype call.')
# ..................{ LOCALS ~ func }..................
#FIXME: Unit test this up, please. Specifically, unit test:
#* A callable annotated with only a single return type hint accepting both:
# * *NO* parameters.
# * One or more parameters each of which is unannotated.
#
#We probably already do this, but let's be double-sure here. Safety first!
# If *NO* callable parameters are annotated, silently reduce to a noop.
#
# Note that this is purely an optimization short-circuit mildly improving
# efficiency for the common case of callables accepting either no
# parameters *OR* one or more parameters, all of which are unannotated.
if (
# That callable is annotated by only one type hint *AND*...
len(bear_call.func_arg_name_to_hint) == 1 and
# That type hint annotates that callable's return rather than a
# parameter accepted by that callable...
'return' in bear_call.func_arg_name_to_hint
):
return ''
# Else, one or more callable parameters are annotated.
# Python code snippet to be returned.
func_wrapper_code = ''
# ..................{ LOCALS ~ parameter }..................
#FIXME: Remove this *AFTER* optimizing signature generation, please.
# True only if this callable possibly accepts one or more positional
# parameters.
is_args_positional = False
# ..................{ LOCALS ~ hint }..................
# Type hint annotating this parameter if any *OR* "_PARAM_HINT_EMPTY"
# otherwise (i.e., if this parameter is unannotated).
hint = None
# ..................{ GENERATE }..................
#FIXME: Locally remove the "arg_index" local variable (and thus avoid
#calling the enumerate() builtin here) AFTER* refactoring @beartype to
#generate callable-specific wrapper signatures.
# For the name of each parameter accepted by this callable and the
# "ParameterMeta" object describing this parameter (in declaration order)...
for arg_index, arg_meta in enumerate(iter_func_args(
# Possibly lowest-level wrappee underlying the possibly
# higher-level wrapper currently being decorated by the @beartype
# decorator. The latter typically fails to convey the same callable
# metadata conveyed by the former -- including the names and kinds
# of parameters accepted by the possibly unwrapped callable. This
# renders the latter mostly useless for our purposes.
func=bear_call.func_wrappee_wrappee,
func_codeobj=bear_call.func_wrappee_wrappee_codeobj,
is_unwrapping=False,
)):
# Kind and name of this parameter.
arg_kind: ArgKind = arg_meta[ARG_META_INDEX_KIND] # type: ignore[assignment]
arg_name: str = arg_meta[ARG_META_INDEX_NAME] # type: ignore[assignment]
# Type hint annotating this parameter if any *OR* the sentinel
# placeholder otherwise (i.e., if this parameter is unannotated).
#
# Note that "None" is a semantically meaningful PEP 484-compliant type
# hint equivalent to "type(None)". Ergo, we *MUST* explicitly
# distinguish between that type hint and unannotated parameters.
hint = bear_call.func_arg_name_to_hint_get(arg_name, SENTINEL)
# If this parameter is unannotated, continue to the next parameter.
if hint is SENTINEL:
continue
# Else, this parameter is annotated.
# Attempt to...
try:
# If this parameter's name is reserved for use by the @beartype
# decorator, raise an exception.
if arg_name.startswith('__bear'):
raise BeartypeDecorParamNameException(
f'{EXCEPTION_PLACEHOLDER}reserved by @beartype.')
# If either the type of this parameter is silently ignorable, continue
# to the next parameter.
elif arg_kind in _PARAM_KINDS_IGNORABLE:
continue
# Else, this parameter is non-ignorable.
# Sanitize this hint to either:
# * If this hint is PEP-noncompliant, the PEP-compliant type hint
# converted from this PEP-noncompliant type hint.
# * Else if this hint is both PEP-compliant and supported, this hint
# as is.
# * Else, raise an exception (i.e., if this hint is neither
# PEP-noncompliant nor a supported PEP-compliant hint).
#
# Do this first *BEFORE* passing this hint to any further callables.
hint = sanify_func_hint_root(
hint=hint, arg_name=arg_name, bear_call=bear_call)
# If this hint is ignorable, continue to the next parameter.
#
# Note that this is intentionally tested *AFTER* this hint has been
# coerced into a PEP-compliant type hint to implicitly ignore
# PEP-noncompliant type hints as well (e.g., "(object, int, str)").
if is_hint_ignorable(hint):
# print(f'Ignoring {bear_call.func_name} parameter {arg_name} hint {repr(hint)}...')
continue
# Else, this hint is unignorable.
#
# If this unignorable parameter either may *OR* must be passed
# positionally, record this fact. Note this conditional branch must
# be tested after validating this parameter to be unignorable; if
# this branch were instead nested *BEFORE* validating this
# parameter to be unignorable, @beartype would fail to reduce to a
# noop for otherwise ignorable callables -- which would be rather
# bad, really.
elif arg_kind in _PARAM_KINDS_POSITIONAL:
is_args_positional = True
# Python code template localizing this parameter.
#
# Since @beartype now supports *ALL* parameter kinds, we safely
# assume this behaves as expected without additional validation.
# PARAM_LOCALIZE_TEMPLATE = PARAM_KIND_TO_CODE_LOCALIZE[arg_kind]
#FIXME: Preserved in the event of a new future unsupported parameter kind.
# Python code template localizing this parameter if this kind of
# parameter is supported *OR* "None" otherwise.
PARAM_LOCALIZE_TEMPLATE = PARAM_KIND_TO_CODE_LOCALIZE.get( # type: ignore
arg_kind, None)
# If this kind of parameter is unsupported, raise an exception.
#
# Note this edge case should *NEVER* occur, as the parent function
# should have simply ignored this parameter.
if PARAM_LOCALIZE_TEMPLATE is None:
raise BeartypeDecorHintPepException(
f'{EXCEPTION_PLACEHOLDER}kind {repr(arg_kind)} '
f'currently unsupported by @beartype.'
)
# Else, this kind of parameter is supported. Ergo, this code is
# non-"None".
# Generate a memoized parameter-agnostic code snippet type-checking
# any parameter or return value with an arbitrary name.
(
code_param_check_pith,
func_wrapper_scope,
hint_forwardrefs_class_basename,
) = make_func_wrapper_code(hint, bear_call.conf)
# Merge the local scope required to check this parameter into the
# local scope currently required by the current wrapper function.
update_mapping(bear_call.func_wrapper_scope, func_wrapper_scope)
# Python code snippet localizing this parameter.
code_param_localize = PARAM_LOCALIZE_TEMPLATE.format(
arg_name=arg_name, arg_index=arg_index)
# Unmemoize this snippet against the current parameter.
code_param_check = _unmemoize_func_wrapper_code(
bear_call=bear_call,
func_wrapper_code=code_param_check_pith,
pith_repr=repr(arg_name),
hint_forwardrefs_class_basename=hint_forwardrefs_class_basename,
)
# Append code type-checking this parameter against this hint.
func_wrapper_code += f'{code_param_localize}{code_param_check}'
# If any exception was raised, reraise this exception with each
# placeholder substring (i.e., "EXCEPTION_PLACEHOLDER" instance)
# replaced by a human-readable description of this callable and
# annotated parameter.
except Exception as exception:
reraise_exception_placeholder(
exception=exception,
#FIXME: Embed the kind of parameter as well (e.g.,
#"positional-only", "keyword-only", "variadic positional"), ideally
#by improving the existing prefix_callable_decorated_arg()
#function to introspect this kind from that callable's code object.
target_str=prefix_callable_decorated_arg(
func=bear_call.func_wrappee, arg_name=arg_name),
)
# If this callable accepts one or more positional type-checked parameters,
# prefix this code by a snippet localizing the number of these parameters.
if is_args_positional:
func_wrapper_code = f'{CODE_INIT_ARGS_LEN}{func_wrapper_code}'
# Else, this callable accepts *NO* positional type-checked parameters. In
# this case, preserve this code as is.
# Return this code.
return func_wrapper_code
# ....................{ PRIVATE ~ return }....................
def _code_check_return(bear_call: BeartypeCall) -> str:
'''
Generate a Python code snippet type-checking the annotated return declared
by the decorated callable if any *or* the empty string otherwise (i.e., if
this return is unannotated).
Parameters
----------
bear_call : BeartypeCall
Decorated callable to be type-checked.
Returns
----------
str
Code type-checking any annotated return of the decorated callable.
Raises
----------
BeartypeDecorHintPep484585Exception
If this callable is either:
* A coroutine *not* annotated by a :attr:`typing.Coroutine` type hint.
* A generator *not* annotated by a :attr:`typing.Generator` type hint.
* An asynchronous generator *not* annotated by a
:attr:`typing.AsyncGenerator` type hint.
BeartypeDecorHintNonpepException
If the type hint annotating this return (if any) of this callable is
neither:
* **PEP-compliant** (i.e., :mod:`beartype`-agnostic hint compliant with
annotation-centric PEPs).
* **PEP-noncompliant** (i.e., :mod:`beartype`-specific type hint *not*
compliant with annotation-centric PEPs)).
'''
assert bear_call.__class__ is BeartypeCall, (
f'{repr(bear_call)} not @beartype call.')
# Type hint annotating this callable's return if any *OR* "SENTINEL"
# otherwise (i.e., if this return is unannotated).
#
# Note that "None" is a semantically meaningful PEP 484-compliant type hint
# equivalent to "type(None)". Ergo, we *MUST* explicitly distinguish
# between that type hint and an unannotated return.
hint = bear_call.func_arg_name_to_hint_get('return', SENTINEL)
# If this return is unannotated, silently reduce to a noop.
if hint is SENTINEL:
return ''
# Else, this return is annotated.
# Python code snippet to be returned, defaulting to the empty string
# implying this callable's return to either be unannotated *OR* annotated by
# a safely ignorable type hint.
func_wrapper_code = ''
# Attempt to...
try:
# This hint reduced to a simpler hint if this hint is either PEP 484-
# *OR* 585-compliant *AND* requires reduction (e.g., from
# "Coroutine[None, None, str]" to just "str"), raising an exception if
# this hint is contextually invalid for this callable (e.g., generator
# whose return is *NOT* annotated as "Generator[...]").
#
# Perform this reduction *BEFORE* performing subsequent tests (e.g., to
# accept "Coroutine[None, None, typing.NoReturn]" as expected).
hint = reduce_hint_pep484585_func_return(
func=bear_call.func_wrappee, exception_prefix=EXCEPTION_PLACEHOLDER)
# If this is the PEP 484-compliant "typing.NoReturn" type hint permitted
# *ONLY* as a return annotation...
if hint is NoReturn:
# Default this snippet to a pre-generated snippet validating this
# callable to *NEVER* successfully return. Yup!
func_wrapper_code = PEP484_CODE_CHECK_NORETURN.format(
func_call_prefix=bear_call.func_wrapper_code_call_prefix)
# Else, this is *NOT* "typing.NoReturn". In this case...
else:
# Sanitize this hint to either:
# * If this hint is PEP-noncompliant, the PEP-compliant type hint
# converted from this PEP-noncompliant type hint.
# * Else if this hint is both PEP-compliant and supported, this hint
# as is.
# * Else, raise an exception (i.e., if this hint is neither
# PEP-noncompliant nor a supported PEP-compliant hint).
#
# Do this first *BEFORE* passing this hint to any further callables.
hint = sanify_func_hint_root(
hint=hint, arg_name='return', bear_call=bear_call)
# If this PEP-compliant hint is unignorable, generate and return a
# snippet type-checking this return against this hint.
if not is_hint_ignorable(hint):
# Empty tuple, passed below to satisfy the
# _unmemoize_func_wrapper_code() API.
hint_forwardrefs_class_basename = ()
# Generate a memoized parameter-agnostic code snippet
# type-checking any parameter or return with any name.
(
code_return_check_pith,
func_wrapper_scope,
hint_forwardrefs_class_basename,
) = make_func_wrapper_code(hint, bear_call.conf) # type: ignore[assignment]
# Merge the local scope required to type-check this return into
# the local scope currently required by the current wrapper
# function.
update_mapping(
bear_call.func_wrapper_scope, func_wrapper_scope)
# Unmemoize this snippet against this return.
code_return_check_pith_unmemoized = _unmemoize_func_wrapper_code(
bear_call=bear_call,
func_wrapper_code=code_return_check_pith,
pith_repr=_RETURN_REPR,
hint_forwardrefs_class_basename=(
hint_forwardrefs_class_basename),
)
# Python code snippet type-checking this return.
code_return_check_prefix = CODE_RETURN_CHECK_PREFIX.format(
func_call_prefix=bear_call.func_wrapper_code_call_prefix)
# Return a Python code snippet:
# * Calling the decorated callable and localize its return
# *AND*...
# * Type-checking this return *AND*...
# * Returning this return from this wrapper function.
func_wrapper_code = (
f'{code_return_check_prefix}'
f'{code_return_check_pith_unmemoized}'
f'{CODE_RETURN_CHECK_SUFFIX}'
)
# Else, this PEP-compliant hint is ignorable.
# if not func_wrapper_code: print(f'Ignoring {bear_call.func_name} return hint {repr(hint)}...')
# If any exception was raised, reraise this exception with each placeholder
# substring (i.e., "EXCEPTION_PLACEHOLDER" instance) replaced by a
# human-readable description of this callable and annotated return.
except Exception as exception:
reraise_exception_placeholder(
exception=exception,
target_str=prefix_callable_decorated_return(
bear_call.func_wrappee),
)
# Return this code.
return func_wrapper_code
# ....................{ PRIVATE ~ unmemoize }....................
def _unmemoize_func_wrapper_code(
bear_call: BeartypeCall,
func_wrapper_code: str,
pith_repr: str,
hint_forwardrefs_class_basename: tuple,
) -> str:
'''
Convert the passed memoized code snippet type-checking any parameter or
return of the decorated callable into a memoized code snippet type-checking
a specific parameter or return of that callable.
Specifically, this function (in order):
#. Globally replaces all references to the
:data:`CODE_PITH_ROOT_PARAM_NAME_PLACEHOLDER` placeholder substring
cached into this code with the passed ``pith_repr`` parameter.
#. Unmemoizes this code by globally replacing all relative forward
reference placeholder substrings cached into this code with Python
expressions evaluating to the classes referred to by those substrings
relative to that callable when accessed via the private
``__beartypistry`` parameter.
Parameters
----------
bear_call : BeartypeCall
Decorated callable to be type-checked.
func_wrapper_code : str
Memoized callable-agnostic code snippet type-checking any parameter or
return of the decorated callable.
pith_repr : str
Machine-readable representation of the name of this parameter or
return.
hint_forwardrefs_class_basename : tuple
Tuple of the unqualified classnames referred to by all relative forward
reference type hints visitable from the current root type hint.
Returns
----------
str
This memoized code unmemoized by globally resolving all relative
forward reference placeholder substrings cached into this code relative
to the currently decorated callable.
'''
assert bear_call.__class__ is BeartypeCall, (
f'{repr(bear_call)} not @beartype call.')
assert isinstance(func_wrapper_code, str), (
f'{repr(func_wrapper_code)} not string.')
assert isinstance(pith_repr, str), f'{repr(pith_repr)} not string.'
assert isinstance(hint_forwardrefs_class_basename, Iterable), (
f'{repr(hint_forwardrefs_class_basename)} not iterable.')
# Generate an unmemoized parameter-specific code snippet type-checking this
# parameter by replacing in this parameter-agnostic code snippet...
func_wrapper_code = replace_str_substrs(
text=func_wrapper_code,
# This placeholder substring cached into this code with...
old=CODE_PITH_ROOT_PARAM_NAME_PLACEHOLDER,
# This object representation of the name of this parameter or return.
new=pith_repr,
)
# If this code contains one or more relative forward reference placeholder
# substrings memoized into this code, unmemoize this code by globally
# resolving these placeholders relative to the decorated callable.
if hint_forwardrefs_class_basename:
# Callable currently being decorated by @beartype.
func = bear_call.func_wrappee
# Pass the beartypistry singleton as a private "__beartypistry"
# parameter to this wrapper function.
bear_call.func_wrapper_scope[ARG_NAME_TYPISTRY] = bear_typistry
# For each unqualified classname referred to by a relative forward
# reference type hints visitable from the current root type hint...
for hint_forwardref_class_basename in hint_forwardrefs_class_basename:
# Generate an unmemoized callable-specific code snippet checking
# this class by globally replacing in this callable-agnostic code...
func_wrapper_code = replace_str_substrs(
text=func_wrapper_code,
# This placeholder substring cached into this code with...
old=(
f'{PEP_CODE_HINT_FORWARDREF_UNQUALIFIED_PLACEHOLDER_PREFIX}'
f'{hint_forwardref_class_basename}'
f'{PEP_CODE_HINT_FORWARDREF_UNQUALIFIED_PLACEHOLDER_SUFFIX}'
),
# Python expression evaluating to this class when accessed
# via the private "__beartypistry" parameter.
new=register_typistry_forwardref(
# Fully-qualified classname referred to by this forward
# reference relative to the decorated callable.
get_hint_pep484585_forwardref_classname_relative_to_object(
hint=hint_forwardref_class_basename, obj=func)
),
)
# Return this unmemoized callable-specific code snippet.
return func_wrapper_code
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Beartype decorator **wrapper function code snippets** (i.e., triple-quoted
pure-Python string constants formatted and concatenated together to dynamically
generate the implementations of wrapper functions type-checking
:func:`beartype.beartype`-decorated callables).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._check.checkmagic import (
VAR_NAME_PITH_ROOT,
VAR_NAME_RANDOM_INT,
)
from beartype._check.checkmagic import (
ARG_NAME_BEARTYPE_CONF,
ARG_NAME_FUNC,
ARG_NAME_RAISE_EXCEPTION,
VAR_NAME_ARGS_LEN,
)
from beartype._util.func.arg.utilfuncargiter import ArgKind
from beartype._util.text.utiltextmagic import CODE_INDENT_1
# ....................{ CODE }....................
CODE_SIGNATURE = f'''{{code_signature_prefix}}def {{func_name}}(
*args,
{{code_signature_args}}{CODE_INDENT_1}**kwargs
):'''
'''
Code snippet declaring the signature of a type-checking callable.
Note that:
* ``code_signature_prefix`` is usually either:
* For synchronous callables, the empty string.
* For asynchronous callables (e.g., asynchronous generators, coroutines),
the space-suffixed keyword ``"async "``.
'''
CODE_INIT_ARGS_LEN = f'''
# Localize the number of passed positional arguments for efficiency.
{VAR_NAME_ARGS_LEN} = len(args)'''
'''
PEP-agnostic code snippet localizing the number of passed positional arguments
for callables accepting one or more such arguments.
'''
# ....................{ CODE ~ check }....................
CODE_PITH_ROOT_PARAM_NAME_PLACEHOLDER = '?|PITH_ROOT_NAME`^'
'''
Placeholder source substring to be globally replaced by the **root pith name**
(i.e., name of the current parameter if called by the
:func:`pep_code_check_param` function *or* ``return`` if called by the
:func:`pep_code_check_return` function) in the parameter- and return-agnostic
code generated by the memoized :func:`make_func_wrapper_code` function.
See Also
----------
:func:`beartype._check.expr.exprmake.make_func_wrapper_code`
:attr:`beartype._util.error.utilerror.EXCEPTION_PLACEHOLDER`
Related commentary.
'''
CODE_HINT_ROOT_PREFIX = '''
# Type-check this passed parameter or return value against this
# PEP-compliant type hint.
if not '''
'''
PEP-compliant code snippet prefixing all code type-checking the **root pith**
(i.e., value of the current parameter or return value) against the root
PEP-compliant type hint annotating that pith.
This prefix is intended to be locally suffixed in the
:func:`beartype._check.expr.exprmake.make_func_wrapper_code` function by:
#. The value of the ``hint_child_placeholder`` local variable.
#. The :data:`CODE_HINT_ROOT_SUFFIX` suffix.
'''
CODE_HINT_ROOT_SUFFIX = f''':
raise {ARG_NAME_RAISE_EXCEPTION}(
func={ARG_NAME_FUNC},
conf={ARG_NAME_BEARTYPE_CONF},
pith_name={CODE_PITH_ROOT_PARAM_NAME_PLACEHOLDER},
pith_value={VAR_NAME_PITH_ROOT},{{random_int_if_any}}
)
'''
'''
PEP-compliant code snippet suffixing all code type-checking the **root pith**
(i.e., value of the current parameter or return value) against the root
PEP-compliant type hint annotating that pith.
This snippet expects to be formatted with these named interpolations:
* ``{random_int_if_any}``, whose value is either:
* If type-checking the current type hint requires a pseudo-random integer,
:data:`CODE_HINT_ROOT_SUFFIX_RANDOM_INT`.
* Else, the empty substring.
Design
----------
**This string is the only code snippet defined by this submodule to raise an
exception.** All other such snippets only test the current pith against the
current child PEP-compliant type hint and are thus intended to be dynamically
embedded in the conditional test initiated by the
:data:`CODE_HINT_ROOT_PREFIX` code snippet.
'''
CODE_HINT_ROOT_SUFFIX_RANDOM_INT = f'''
random_int={VAR_NAME_RANDOM_INT},'''
'''
PEP-compliant code snippet passing the value of the random integer previously
generated for the current call to the exception-handling function call embedded
in the :data:`CODE_HINT_ROOT_SUFFIX` snippet.
'''
# ....................{ CODE ~ arg }....................
PARAM_KIND_TO_CODE_LOCALIZE = {
# Snippet localizing any positional-only parameter (e.g.,
# "{posonlyarg}, /") by lookup in the wrapper's "*args" dictionary.
ArgKind.POSITIONAL_ONLY: f'''
# If this positional-only parameter was passed...
if {VAR_NAME_ARGS_LEN} > {{arg_index}}:
# Localize this positional-only parameter.
{VAR_NAME_PITH_ROOT} = args[{{arg_index}}]''',
# Snippet localizing any positional or keyword parameter as follows:
#
# * If this parameter's 0-based index (in the parameter list of the
# decorated callable's signature) does *NOT* exceed the number of
# positional parameters passed to the wrapper function, localize this
# positional parameter from the wrapper's variadic "*args" tuple.
# * Else if this parameter's name is in the dictionary of keyword
# parameters passed to the wrapper function, localize this keyword
# parameter from the wrapper's variadic "*kwargs" tuple.
# * Else, this parameter is unpassed. In this case, localize this parameter
# as a placeholder value guaranteed to *NEVER* be passed to any wrapper
# function: the private "__beartypistry" singleton passed to this wrapper
# function as a hidden default parameter and thus accessible here. While
# we could pass a "__beartype_sentinel" parameter to all wrapper
# functions defaulting to "object()" and then use that here instead,
# doing so would slightly reduce efficiency for no tangible gain. *shrug*
ArgKind.POSITIONAL_OR_KEYWORD: f'''
# Localize this positional or keyword parameter if passed *OR* to the
# sentinel "__beartype_raise_exception" guaranteed to never be passed.
{VAR_NAME_PITH_ROOT} = (
args[{{arg_index}}] if {VAR_NAME_ARGS_LEN} > {{arg_index}} else
kwargs.get({{arg_name!r}}, {ARG_NAME_RAISE_EXCEPTION})
)
# If this parameter was passed...
if {VAR_NAME_PITH_ROOT} is not {ARG_NAME_RAISE_EXCEPTION}:''',
# Snippet localizing any keyword-only parameter (e.g., "*, {kwarg}") by
# lookup in the wrapper's variadic "**kwargs" dictionary. (See above.)
ArgKind.KEYWORD_ONLY: f'''
# Localize this keyword-only parameter if passed *OR* to the sentinel value
# "__beartype_raise_exception" guaranteed to never be passed.
{VAR_NAME_PITH_ROOT} = kwargs.get({{arg_name!r}}, {ARG_NAME_RAISE_EXCEPTION})
# If this parameter was passed...
if {VAR_NAME_PITH_ROOT} is not {ARG_NAME_RAISE_EXCEPTION}:''',
# Snippet iteratively localizing all variadic positional parameters.
ArgKind.VAR_POSITIONAL: f'''
# For all passed variadic positional parameters...
for {VAR_NAME_PITH_ROOT} in args[{{arg_index!r}}:]:''',
#FIXME: Probably impossible to implement under the standard decorator
#paradigm, sadly. This will have to wait for us to fundamentally revise
#our signature generation algorithm.
# # Snippet iteratively localizing all variadic keyword parameters.
# ArgKind.VAR_KEYWORD: f'''
# # For all passed variadic keyword parameters...
# for {VAR_NAME_PITH_ROOT} in kwargs[{{arg_index!r}}:]:''',
}
'''
Dictionary mapping from the type of each callable parameter supported by the
:func:`beartype.beartype` decorator to a PEP-compliant code snippet localizing
that callable's next parameter to be type-checked.
'''
# ....................{ CODE ~ return ~ check }....................
CODE_RETURN_CHECK_PREFIX = f'''
# Call this function with all passed parameters and localize the value
# returned from this call.
{VAR_NAME_PITH_ROOT} = {{func_call_prefix}}{ARG_NAME_FUNC}(*args, **kwargs)
# Noop required to artificially increase indentation level. Note that
# CPython implicitly optimizes this conditional away. Isn't that nice?
if True:'''
'''
PEP-compliant code snippet calling the decorated callable and localizing the
value returned by that call.
Note that this snippet intentionally terminates on a noop increasing the
indentation level, enabling subsequent type-checking code to effectively ignore
indentation level and thus uniformly operate on both:
* Parameters localized via values of the
:data:`PARAM_KIND_TO_PEP_CODE_LOCALIZE` dictionary.
* Return values localized via this snippet.
See Also
----------
https://stackoverflow.com/a/18124151/2809027
Bytecode disassembly demonstrating that CPython optimizes away the spurious
``if True:`` conditional hardcoded into this snippet.
'''
CODE_RETURN_CHECK_SUFFIX = f'''
return {VAR_NAME_PITH_ROOT}'''
'''
PEP-compliant code snippet returning from the wrapper function the successfully
type-checked value returned from the decorated callable.
'''
# ....................{ CODE ~ return ~ check ~ noreturn }....................
#FIXME: *FALSE.* The following comment is entirely wrong, sadly. Although that
#comment does, in fact, apply to asynchronous generators, that comment does
#*NOT* apply to coroutines. PEP 484 stipulates that the returns of coroutines
#are annotated in the exact same standard way as the returns of synchronous
#callables are annotated: e.g.,
# # This is valid, but @beartype currently fails to support this.
# async def muh_coroutine() -> typing.NoReturn:
# await asyncio.sleep(0)
# raise ValueError('Dude, who stole my standards compliance?')
#
#Generalize this snippet to contain a "{{func_call_prefix}}" substring prefixing
#the "{ARG_NAME_FUNC}(*args, **kwargs)" call, please.
# Unlike above, this snippet intentionally omits the "{{func_call_prefix}}"
# substring prefixing the "{ARG_NAME_FUNC}(*args, **kwargs)" call. Why? Because
# callables whose returns are annotated by "typing.NoReturn" *MUST* necessarily
# be synchronous (rather than asynchronous) and thus require no such prefix.
# Why? Because the returns of asynchronous callables are either unannotated
# *OR* annotated by either "Coroutine[...]" *OR* "AsyncGenerator[...]" type
# hints. Since "typing.NoReturn" is neither, "typing.NoReturn" *CANNOT*
# annotate the returns of asynchronous callables. The implication then follows.
PEP484_CODE_CHECK_NORETURN = f'''
# Call this function with all passed parameters and localize the value
# returned from this call.
{VAR_NAME_PITH_ROOT} = {{func_call_prefix}}{ARG_NAME_FUNC}(*args, **kwargs)
# Since this function annotated by "typing.NoReturn" successfully returned a
# value rather than raising an exception or halting the active Python
# interpreter, unconditionally raise an exception.
raise {ARG_NAME_RAISE_EXCEPTION}(
func={ARG_NAME_FUNC},
conf={ARG_NAME_BEARTYPE_CONF},
pith_name='return',
pith_value={VAR_NAME_PITH_ROOT},
)'''
'''
:pep:`484`-compliant code snippet calling the decorated callable annotated by
the :attr:`typing.NoReturn` singleton and raising an exception if this call
successfully returned a value rather than raising an exception or halting the
active Python interpreter.
'''
# ....................{ CODE ~ return ~ uncheck }....................
CODE_RETURN_UNCHECKED = f'''
# Call this function with all passed parameters and return the value
# returned from this call.
return {{func_call_prefix}}{ARG_NAME_FUNC}(*args, **kwargs)'''
'''
PEP-agnostic code snippet calling the decorated callable *without*
type-checking the value returned by that call (if any).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype type-checking function code factories** (i.e., low-level
callables dynamically generating pure-Python code snippets type-checking
arbitrary objects passed to arbitrary callables against PEP-compliant type hints
passed to those same callables).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
#FIXME: Create a new make_func_raiser_code() factory. After doing so, refactor
#the lower-level beartype._decor._wrapper._wrappercode.make_func_wrapper_code()
#factory in terms of that higher-level make_func_raiser_code() factory, please.
#
#Note that doing so *WILL* prove non-trivial. That's why this submodule has
#currently focused only on the make_func_tester_code() factory. Why the
#non-triviality? Because make_func_raiser_code() will need to embed a substring
#raising an exception by calling a beartype-specific exception handler that does
#*NOT* currently exist. To create that handler, we'll need to:
#* Generalize the existing decoration-specific
# "beartype._decor._error.errormain" submodule into a new general-purpose
# "beartype._check._checkerror" submodule. To do so, initially just copy the
# former to the latter. Do *NOT* bother generalizing any other submodules of
# the "beartype._decor._error" subpackage, for the moment. One thing at a time.
#* Rename the *COPIED* beartype._check._checkerror.get_beartype_violation()
# getter to get_func_raiser_violation().
#* Refactor get_func_raiser_violation() to have a signature resembling:
# def get_func_raiser_violation(
# # Mandatory parameters.
# obj: object,
# hint: object,
# exception_prefix: str,
#
# # Optional parameters.
# random_int: Optional[int] = None,
# ) -> BeartypeCallHintViolation:
#
# Crucially, note the new mandatory "exception_prefix" parameter, enabling
# callers to generate violation exceptions with arbitrary context-specific
# human-readable prefixes.
#* Shift code currently residing in the BeartypeCall.reinit() method that
# adds "ARG_NAME_RAISE_EXCEPTION" to "func_wrapper_scope" into the
# make_func_raiser_code() factory instead.
#* Refactor the original lower-level
# beartype._decor._error.errormain.get_beartype_violation() getter in terms of
# the new higher-level get_func_raiser_violation() getter.
# ....................{ IMPORTS }....................
from beartype.roar import (
BeartypeConfException,
BeartypeDecorHintForwardRefException,
)
# from beartype.roar._roarexc import _BeartypeCheckException
from beartype._conf.confcls import (
BEARTYPE_CONF_DEFAULT,
BeartypeConf,
)
from beartype._data.datatyping import (
CallableTester,
# TypeException,
)
from beartype._check.checkmagic import (
FUNC_TESTER_NAME_PREFIX,
)
from beartype._check.conv.convsanify import sanify_hint_root
from beartype._check.expr.exprmake import make_check_expr
from beartype._check.util.checkutilmake import make_func_signature
from beartype._check._checksnip import (
FUNC_TESTER_CODE_RETURN,
FUNC_TESTER_CODE_SIGNATURE,
)
from beartype._util.cache.utilcachecall import callable_cached
from beartype._util.error.utilerror import EXCEPTION_PLACEHOLDER
from beartype._util.func.utilfuncmake import make_func
from beartype._util.hint.utilhinttest import is_hint_ignorable
from itertools import count
# ....................{ PRIVATE ~ globals }....................
_func_tester_name_counter = count(start=0, step=1)
'''
**Type-checking tester function name uniquifier** (i.e., iterator yielding the
next integer incrementation starting at 0, leveraged by the
:func:`make_func_tester` factory to uniquify the names of the tester functions
created by that factory).
'''
# ....................{ PRIVATE ~ testers }....................
def _func_tester_ignorable(obj: object) -> bool:
'''
**Ignorable type-checking tester function singleton** (i.e., function
unconditionally returning ``True``, semantically equivalent to a tester
testing whether an arbitrary object passed to this tester satisfies an
ignorable PEP-compliant type hint).
The :func:`make_func_tester` factory efficiently returns this singleton when
passed an ignorable type hint rather than inefficiently regenerating a
unique ignorable type-checking tester function for that hint.
'''
return True
# ....................{ MAKERS }....................
@callable_cached
def make_func_tester(
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: All calls to this memoized factory pass parameters *POSITIONALLY*
# rather than by keyword. Care should be taken when refactoring parameters,
# particularly with respect to parameter position.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Mandatory parameters.
hint: object,
# Optional parameters.
conf: BeartypeConf = BEARTYPE_CONF_DEFAULT,
) -> CallableTester:
'''
**Type-checking tester function factory** (i.e., low-level callable
dynamically generating a pure-Python tester function testing whether an
arbitrary object passed to that tester satisfies the PEP-compliant type hint
passed to this factory and returning that result as its boolean return).
This factory is memoized for efficiency.
Caveats
----------
**This factory intentionally accepts no** ``exception_prefix``
**parameter.** Why? Since that parameter is typically specific to the
context-sensitive use case of the caller, accepting that parameter would
prevent this factory from memoizing the passed hint with the returned code,
which would rather defeat the point. Instead, this factory only:
* Raises generic non-human-readable exceptions containing the placeholder
:attr:`beartype._util.error.utilerror.EXCEPTION_PLACEHOLDER` substring
that the caller is required to explicitly catch and raise non-generic
human-readable exceptions from by calling the
:func:`beartype._util.error.utilerror.reraise_exception_placeholder`
function.
**This factory intentionally accepts no** ``exception_cls`` **parameter.**
Doing so would only ambiguously obscure context-sensitive exceptions raised
by lower-level utility functions called by this higher-level factory.
Parameters
----------
hint : object
PEP-compliant type hint to be type-checked.
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object). Defaults
to ``BeartypeConf()``, the default ``O(1)`` constant-time configuration.
Returns
----------
CallableTester
Type-checking tester function generated by this factory for this hint.
Raises
----------
All exceptions raised by the lower-level :func:`make_check_expr` factory.
Additionally, this factory also raises:
BeartypeConfException
If this configuration is *not* a :class:`BeartypeConf` instance.
BeartypeDecorHintForwardRefException
If this hint contains one or more relative forward references, which
this factory explicitly prohibits to improve both the efficiency and
portability of calls by users to the resulting type-checker.
_BeartypeUtilCallableException
If this function erroneously generates a syntactically invalid
type-checking tester function. That should *never* happen, but let's
admit that you're still reading this for a reason.
Warns
----------
All warnings emitted by the lower-level :func:`make_check_expr` factory.
'''
# If the passed "conf" is *NOT* a configuration, raise an exception.
if not isinstance(conf, BeartypeConf):
raise BeartypeConfException(
f'{repr(conf)} not beartype configuration.')
# Else, the passed "conf" is a configuration.
# Either:
# * If this hint is PEP-noncompliant, the PEP-compliant type hint converted
# from this PEP-noncompliant type hint.
# * Else if this hint is both PEP-compliant and supported, this hint as is.
# * Else, raise an exception (i.e., if this hint is neither PEP-noncompliant
# nor a supported PEP-compliant hint).
#
# Do this first *BEFORE* passing this hint to any further callables.
hint = sanify_hint_root(
hint=hint,
conf=conf,
exception_prefix=EXCEPTION_PLACEHOLDER,
)
# If this hint is ignorable, all objects satisfy this hint. In this case,
# return the trivial tester function unconditionally returning true.
if is_hint_ignorable(hint):
return _func_tester_ignorable
# Else, this hint is unignorable.
# Python code snippet comprising a single boolean expression type-checking
# an arbitrary object against this hint.
(
code_check_expr,
func_scope,
hint_forwardrefs_class_basename,
) = make_check_expr(hint, conf)
# If this hint contains one or more relative forward references, this hint
# is non-portable across lexical scopes. Why? Because this hint is relative
# to and thus valid only with respect to the caller's current lexical scope.
# However, there is *NO* guarantee that the tester function created and
# returned by this factory resides in the same lexical scope.
#
# Suppose that tester does, however. Even in that best case, *ALL* calls to
# that tester would still be non-portable. Why? Because those calls would
# now tacitly assume the original lexical scope that they were called in.
# Those calls are now lexically-dependent and thus could *NOT* be trivially
# copy-and-pasted into different lexical scopes (e.g., submodules, classes,
# or callables); doing so would raise exceptions at call time, due to being
# unable to resolve those references. Preventing users from doing something
# that will blow up in their test suites commits after the fact is not
# simply a good thing; it's really the only sane thing left.
#
# Suppose that we didn't particularly care about end user sanity, however.
# Even in that worst case, resolving these references would still be
# non-trivial, non-portable, and (perhaps most importantly) incredibly slow.
# Why? Because doing so would require iteratively introspecting the call
# stack for the first callable *NOT* residing in the "beartype" codebase.
# These references would then be resolved against the global and local
# lexical scope of that callable. While technically feasible, doing so would
# render higher-level "beartype" functions calling this lower-level
# factory (e.g., our increasingly popular public beartype.door.is_bearable()
# tester) sufficiently slow as to be pragmatically infeasible.
if hint_forwardrefs_class_basename:
raise BeartypeDecorHintForwardRefException(
f'{EXCEPTION_PLACEHOLDER}type hint {repr(hint)} '
f'contains one or more relative forward references:\n'
f'\t{repr(hint_forwardrefs_class_basename)}\n'
f'Beartype prohibits relative forward references outside of '
f'@beartype-decorated callables. For your own personal safety and '
f'those of the codebases you love, consider canonicalizing these '
f'relative forward references into absolute forward references '
f'(e.g., by replacing "MuhClass" with "muh_module.MuhClass").'
)
# Else, this hint contains *NO* relative forward references.
# Unqualified basename of this tester function, uniquified by suffixing an
# arbitrary integer guaranteed to be unique to this tester function.
func_tester_name = (
f'{FUNC_TESTER_NAME_PREFIX}{next(_func_tester_name_counter)}')
# Python code snippet declaring the signature of this tester function.
code_signature = make_func_signature(
func_name=func_tester_name,
func_scope=func_scope,
code_signature_format=FUNC_TESTER_CODE_SIGNATURE,
conf=conf,
)
# Python code snippet returning the boolean result of type-checking the
# arbitrary object passed to this tester function against this type hint.
code_check_return = FUNC_TESTER_CODE_RETURN.format(
code_check_expr=code_check_expr)
# Python code snippet defining this tester function in entirety.
func_tester_code = (
f'{code_signature}'
f'{code_check_return}'
)
# Type-checking tester function to be returned.
func_tester = make_func(
func_name=func_tester_name,
func_code=func_tester_code,
func_locals=func_scope,
func_label=f'{EXCEPTION_PLACEHOLDER}tester {func_tester_name}()',
is_debug=conf.is_debug,
)
# Return this tester function.
return func_tester
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Beartype decorator **type-checking function code magic** (i.e., global string
constants embedded in the implementations of functions type-checking arbitrary
objects against arbitrary PEP-compliant type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ NAMES ~ parameter }....................
FUNC_TESTER_NAME_PREFIX = '__beartype_tester_'
'''
Substring prefixing the unqualified basenames of all type-checking tester
functions created by the :func:`beartype._check.checkmagic.make_func_tester`
factory.
'''
# ....................{ NAMES ~ parameter }....................
# To avoid colliding with the names of arbitrary caller-defined parameters, the
# beartype-specific parameter names *MUST* be prefixed by "__beartype_".
ARG_NAME_BEARTYPE_CONF = '__beartype_conf'
'''
Name of the **private beartype configuration parameter** (i.e.,
:mod:`beartype`-specific parameter whose default value is the
:class:`beartype.BeartypeConf` instance configuring each wrapper function
generated by the :func:`beartype.beartype` decorator).
'''
ARG_NAME_FUNC = '__beartype_func'
'''
Name of the **private decorated callable parameter** (i.e.,
:mod:`beartype`-specific parameter whose default value is the decorated
callable passed to each wrapper function generated by the
:func:`beartype.beartype` decorator).
'''
ARG_NAME_GETRANDBITS = '__beartype_getrandbits'
'''
Name of the **private getrandbits parameter** (i.e., :mod:`beartype`-specific
parameter whose default value is the highly performant C-based
:func:`random.getrandbits` function conditionally passed to every wrapper
functions generated by the :func:`beartype.beartype` decorator internally
requiring one or more random integers).
'''
ARG_NAME_RAISE_EXCEPTION = '__beartype_get_violation'
'''
Name of the **private exception raising parameter** (i.e.,
:mod:`beartype`-specific parameter whose default value is the
:func:`beartype._decor._error.errormain.get_beartype_violation`
function raising human-readable exceptions on call-time type-checking failures
passed to each wrapper function generated by the :func:`beartype.beartype`
decorator).
'''
ARG_NAME_TYPISTRY = '__beartypistry'
'''
Name of the **private beartypistry parameter** (i.e., :mod:`beartype`-specific
parameter whose default value is the beartypistry singleton conditionally
passed to every wrapper function generated by the :func:`beartype.beartype`
decorator requiring one or more types or tuples of types cached by this
singleton).
'''
# ....................{ NAMES ~ locals }....................
VAR_NAME_ARGS_LEN = '__beartype_args_len'
'''
Name of the local variable providing the **positional argument count** (i.e.,
number of positional arguments passed to the current call).
'''
VAR_NAME_RANDOM_INT = '__beartype_random_int'
'''
Name of the local variable providing a **pseudo-random integer** (i.e.,
unsigned 32-bit integer pseudo-randomly generated for subsequent use in
type-checking randomly indexed container items by the current call).
'''
# ....................{ NAMES ~ locals : pith }....................
VAR_NAME_PREFIX_PITH = '__beartype_pith_'
'''
Substring prefixing all local variables providing a **pith** (i.e., either the
current parameter or return value *or* item contained in the current parameter
or return value being type-checked by the current call).
'''
VAR_NAME_PITH_ROOT = f'{VAR_NAME_PREFIX_PITH}0'
'''
Name of the local variable providing the **root pith** (i.e., value of the
current parameter or return value being type-checked by the current call).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **type-checking function code snippets** (i.e., triple-quoted
pure-Python string constants formatted and concatenated together to dynamically
generate the implementations of functions type-checking arbitrary objects
against arbitrary PEP-compliant type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._check.checkmagic import (
VAR_NAME_PITH_ROOT,
)
# ....................{ CODE ~ signature }....................
FUNC_TESTER_CODE_SIGNATURE = f'''{{code_signature_prefix}}def {{func_name}}(
{VAR_NAME_PITH_ROOT}: object,
{{code_signature_args}}
) -> bool:'''
'''
Code snippet declaring the signature of all type-checking tester functions
created by the :func:`beartype._check.checkmagic.make_func_tester` factory.
Note that:
* This signature intentionally names the single public parameter accepted by
this tester function ``{VAR_NAME_PITH_ROOT}``. Doing so trivially ensures that
the memoized type-checking boolean expression generated by the
:func:`beartype._check.expr.exprmake.make_check_expr` code factory implicitly
type-checks the passed object *without* further modification (e.g., global
search-and-replacement), ensuring that memoized expression may be efficiently
reused as is *without* subsequent unmemoization. Clever, huh? Yeah... we know.
* ``code_signature_prefix`` is usually either:
* For synchronous callables, the empty string.
* For asynchronous callables (e.g., asynchronous generators, coroutines),
the space-suffixed keyword ``"async "``.
'''
# ....................{ CODE ~ return }....................
FUNC_TESTER_CODE_RETURN = '''
# Return true only if the passed object satisfies this type hint.
return {code_check_expr}'''
'''
Code snippet returning the boolean result of type-checking the arbitrary object
passed to a type-checking tester function against the type hint passed to the
factory function creating that tester function.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype dataclass** (i.e., class aggregating *all* metadata for the callable
currently being decorated by the :func:`beartype.beartype` decorator).**
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.peps import resolve_pep563
from beartype.roar import BeartypeDecorWrappeeException
from beartype.typing import (
Callable,
Dict,
)
from beartype._cave._cavefast import CallableCodeObjectType
from beartype._cave._cavemap import NoneTypeOr
from beartype._check.checkmagic import (
ARG_NAME_BEARTYPE_CONF,
ARG_NAME_FUNC,
ARG_NAME_RAISE_EXCEPTION,
)
from beartype._conf.confcls import BeartypeConf
from beartype._data.datatyping import (
LexicalScope,
TypeStack,
)
from beartype._util.func.utilfunccodeobj import get_func_codeobj
from beartype._util.func.utilfunctest import is_func_coro
from beartype._util.func.utilfuncwrap import unwrap_func
# ....................{ CLASSES }....................
class BeartypeCall(object):
'''
**Beartype data** (i.e., object aggregating *all* metadata for the callable
currently being decorated by the :func:`beartype.beartype` decorator).**
Design
----------
This the *only* object instantiated by that decorator for that callable,
substantially reducing both space and time costs. That decorator then
passes this object to most lower-level functions, which then:
#. Access read-only instance variables of this object as input.
#. Modify writable instance variables of this object as output. In
particular, these lower-level functions typically accumulate pure-Python
code comprising the generated wrapper function type-checking the
decorated callable by setting various instance variables of this object.
Caveats
----------
**This object cannot be used to communicate state between low-level
memoized callables** (e.g.,
:func:`beartype._check.expr.exprmake.make_func_wrapper_code`) **and
higher-level callables** (e.g.,
:func:`beartype._decor._wrapper.wrappermain.generate_code`). Instead, memoized
callables *must* return that state as additional return values up the call
stack to those higher-level callables. By definition, memoized callables
are *not* recalled on subsequent calls passed the same parameters. Since
only the first call to those callables passed those parameters would set
the appropriate state on this object intended to be communicated to
higher-level callables, *all* subsequent calls would subtly fail with
difficult-to-diagnose issues. See also `<issue #5_>`__, which exhibited
this very complaint.
.. _issue #5:
https://github.com/beartype/beartype/issues/5
Attributes
----------
cls_stack : TypeStack
**Type stack** (i.e., either tuple of zero or more arbitrary types *or*
``None``). Defaults to ``None``. See also the parameter of the same name
accepted by the
:func:`beartype._decor.decorcore.beartype_object` function for details.
conf : BeartypeConf
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all flags, options, settings, and other metadata configuring the
current decoration of the decorated callable).
func_arg_name_to_hint : dict[str, object]
Dictionary mapping from the name of each annotated parameter accepted
by the decorated callable to the type hint annotating that parameter.
func_arg_name_to_hint_get : Callable[[str, object], object]
:meth:`dict.get` method bound to the :attr:`func_arg_name_to_hint`
dictionary, localized as a negligible microoptimization. Blame Guido.
func_wrappee : Optional[Callable]
Possibly wrapped **decorated callable** (i.e., high-level callable
currently being decorated by the :func:`beartype.beartype` decorator)
if the :meth:`reinit` method has been called *or* ``None`` otherwise.
Note the lower-level :attr:`func_wrappee_wrappee` callable should
*usually* be accessed instead; although higher-level, this callable may
only be a wrapper function and hence yield inaccurate or even erroneous
metadata (especially the code object) for the callable being wrapped.
func_wrappee_codeobj : CallableCodeObjectType
Possibly wrapped **decorated callable wrappee code object** (i.e.,
code object underlying the high-level :attr:`func_wrappee` callable
currently being decorated by the :func:`beartype.beartype` decorator).
For efficiency, this code object should *always* be accessed in lieu of
inefficiently calling the comparatively slower
:func:`beartype._util.func.utilfunccodeobj.get_func_codeobj` getter.
func_wrappee_wrappee : Optional[Callable]
Possibly unwrapped **decorated callable wrappee** (i.e., low-level
callable wrapped by the high-level :attr:`func_wrappee` callable
currently being decorated by the :func:`beartype.beartype` decorator)
if the :meth:`reinit` method has been called *or* ``None`` otherwise.
If the higher-level :attr:`func_wrappee` callable does *not* actually
wrap another callable, this callable is identical to that callable.
func_wrappee_wrappee_codeobj : CallableCodeObjectType
Possibly unwrapped **decorated callable wrappee code object** (i.e.,
code object underlying the low-level :attr:`func_wrappee_wrappee`
callable wrapped by the high-level :attr:`func_wrappee` callable
currently being decorated by the :func:`beartype.beartype` decorator).
For efficiency, this code object should *always* be accessed in lieu of
inefficiently calling the comparatively slower
:func:`beartype._util.func.utilfunccodeobj.get_func_codeobj` getter.
func_wrapper_code_call_prefix : Optional[str]
Code snippet prefixing all calls to the decorated callable in the body
of the wrapper function wrapping that callable with type checking if
the :meth:`reinit` method has been called *or* ``None`` otherwise. If
non-``None``, this string is guaranteed to be either:
* If the decorated callable is synchronous (i.e., neither a coroutine
nor asynchronous generator), the empty string.
* If the decorated callable is asynchronous (i.e., either a coroutine
nor asynchronous generator), the ``"await "`` keyword.
func_wrapper_code_signature_prefix : Optional[str]
Code snippet prefixing the signature declaring the wrapper function
wrapping the decorated callable with type checking if the
:meth:`reinit` method has been called *or* ``None`` otherwise. If
non-``None``, this string is guaranteed to be either:
* If the decorated callable is synchronous (i.e., neither a coroutine
nor asynchronous generator), the empty string.
* If the decorated callable is asynchronous (i.e., either a coroutine
nor asynchronous generator), the ``"async "`` keyword.
func_wrapper_scope : LexicalScope
**Local scope** (i.e., dictionary mapping from the name to value of
each attribute referenced in the signature) of this wrapper function.
func_wrapper_name : Optional[str]
Machine-readable name of the wrapper function to be generated and
returned by this decorator if the :meth:`reinit` method has been called
*or* ``None`` otherwise.
'''
# ..................{ CLASS VARIABLES }..................
# Slot all instance variables defined on this object to minimize the time
# complexity of both reading and writing variables across frequently
# called @beartype decorations. Slotting has been shown to reduce read and
# write costs by approximately ~10%, which is non-trivial.
__slots__ = (
'cls_stack',
'conf',
'func_arg_name_to_hint',
'func_arg_name_to_hint_get',
'func_wrappee_codeobj',
'func_wrappee_wrappee_codeobj',
'func_wrappee',
'func_wrappee_wrappee',
'func_wrapper_code_call_prefix',
'func_wrapper_code_signature_prefix',
'func_wrapper_scope',
'func_wrapper_name',
)
# Coerce instances of this class to be unhashable, preventing spurious
# issues when accidentally passing these instances to memoized callables by
# implicitly raising an "TypeError" exceptions on the first call to such a
# callable. There exists no tangible benefit to permitting these instances
# to be hashed (and thus also cached), since these instances are:
# * Specific to the decorated callable and thus *NOT* safely cacheable
# across functions applying to different decorated callables.
# * Already cached via the acquire_object_typed() function called by the
# "beartype._decor.decormain" submodule.
#
# See also:
# https://docs.python.org/3/reference/datamodel.html#object.__hash__
__hash__ = None # type: ignore[assignment]
# ..................{ INITIALIZERS }..................
def __init__(self) -> None:
'''
Initialize this metadata by nullifying all instance variables.
Caveats
----------
**This class is not intended to be explicitly instantiated.** Instead,
callers are expected to (in order):
#. Acquire cached instances of this class via the
:mod:`beartype._util.cache.pool.utilcachepoolobjecttyped` submodule.
#. Call the :meth:`reinit` method on these instances to properly
initialize these instances.
'''
# Nullify instance variables for safety.
self.cls_stack: TypeStack = None
self.conf: BeartypeConf = None # type: ignore[assignment]
self.func_arg_name_to_hint: Dict[str, object] = None # type: ignore[assignment]
self.func_arg_name_to_hint_get: Callable[[str, object], object] = None # type: ignore[assignment]
self.func_wrappee: Callable = None # type: ignore[assignment]
self.func_wrappee_codeobj: CallableCodeObjectType = None # type: ignore[assignment]
self.func_wrappee_wrappee: Callable = None # type: ignore[assignment]
self.func_wrappee_wrappee_codeobj: CallableCodeObjectType = None # type: ignore[assignment]
self.func_wrapper_code_call_prefix: str = None # type: ignore[assignment]
self.func_wrapper_code_signature_prefix: str = None # type: ignore[assignment]
self.func_wrapper_scope: LexicalScope = {}
self.func_wrapper_name: str = None # type: ignore[assignment]
def reinit(
self,
# Mandatory parameters.
func: Callable,
conf: BeartypeConf,
# Optional parameters.
cls_stack: TypeStack = None,
) -> None:
'''
Reinitialize this metadata from the passed callable, typically after
acquisition of a previously cached instance of this class from the
:mod:`beartype._util.cache.pool.utilcachepoolobject` submodule.
If :pep:`563` is conditionally active for this callable, this function
additionally resolves all postponed annotations on this callable to
their referents (i.e., the intended annotations to which those
postponed annotations refer).
Parameters
----------
func : Callable
Callable currently being decorated by :func:`beartype.beartype`.
conf : BeartypeConf
Beartype configuration configuring :func:`beartype.beartype`
specific to this callable.
cls_root : Optional[type]
**Root decorated class** if any or ``None`` otherwise. Defaults to
``None``. See the class docstring for further details.
cls_curr : Optional[type]
**Current decorated class** if any or ``None`` otherwise. Defaults
to ``None``. See the class docstring for further details.
Raises
----------
BeartypePep563Exception
If evaluating a postponed annotation on this callable raises an
exception (e.g., due to that annotation referring to local state no
longer accessible from this deferred evaluation).
BeartypeDecorWrappeeException
If either:
* This callable is uncallable.
* This callable is neither a pure-Python function *nor* method;
equivalently, if this callable is either C-based *or* a class or
object defining the ``__call__()`` dunder method.
* This configuration is *not* actually a configuration.
* ``cls_owner`` is neither a class *nor* ``None``.
'''
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Note this method intentionally avoids creating and passing an
# "exception_prefix" substring to callables called below. Why? Because
# exhaustive profiling has shown that creating that substring consumes a
# non-trivial slice of decoration time. In other words, efficiency.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Avoid circular import dependencies.
from beartype._decor._error.errormain import get_beartype_violation
# If this callable is uncallable, raise an exception.
if not callable(func):
raise BeartypeDecorWrappeeException(f'{repr(func)} uncallable.')
# Else, this callable is callable.
#
# If this configuration is *NOT* a configuration, raise an exception.
elif not isinstance(conf, BeartypeConf):
raise BeartypeDecorWrappeeException(
f'"conf" {repr(conf)} not beartype configuration.')
# Else, this configuration is a configuration.
#
# If this class stack is neither a tuple *NOR* "None", raise an
# exception.
elif not isinstance(cls_stack, _TypeStackOrNone):
raise BeartypeDecorWrappeeException(
f'"cls_stack" {repr(cls_stack)} neither tuple nor "None".')
# Else, this class stack is either a tuple *OR* "None".
# If this class stack is *NOT* "None", this class stack is a tuple. In
# this case, for each item of this class stack tuple...
if cls_stack is not None:
for cls_stack_item in cls_stack:
# If this item is *NOT* a type, raise an exception.
if not isinstance(cls_stack_item, type):
raise BeartypeDecorWrappeeException(
f'"cls_stack" item {repr(cls_stack_item)} not type.')
# Else, this class stack is "None".
# Classify all passed parameters.
self.cls_stack = cls_stack
self.conf = conf
# Possibly wrapped callable currently being decorated.
self.func_wrappee = func
# Possibly unwrapped callable unwrapped from that callable.
self.func_wrappee_wrappee = unwrap_func(func)
# Possibly wrapped callable code object.
self.func_wrappee_codeobj = get_func_codeobj(
func=func, exception_cls=BeartypeDecorWrappeeException)
# Possibly unwrapped callable code object.
self.func_wrappee_wrappee_codeobj = get_func_codeobj(
func=self.func_wrappee_wrappee,
exception_cls=BeartypeDecorWrappeeException,
)
# Efficiently reduce this local scope back to the dictionary of all
# parameters unconditionally required by *ALL* wrapper functions.
self.func_wrapper_scope.clear()
self.func_wrapper_scope[ARG_NAME_FUNC] = func
self.func_wrapper_scope[ARG_NAME_BEARTYPE_CONF] = conf
#FIXME: Non-ideal. This should *NOT* be set here but rather in the
#lower-level code generating factory function that actually embeds the
#call to this function (e.g.,
#beartype._check._checkcode.make_func_code()).
self.func_wrapper_scope[ARG_NAME_RAISE_EXCEPTION] = (
get_beartype_violation)
# Machine-readable name of the wrapper function to be generated.
self.func_wrapper_name = func.__name__
# Resolve all postponed hints on this callable if any *BEFORE* parsing
# the actual hints these postponed hints refer to.
resolve_pep563(
func=self.func_wrappee,
cls_stack=self.cls_stack,
)
#FIXME: Globally replace all references to "__annotations__" throughout
#the "beartype._decor" subpackage with references to this instead.
#Since doing so is a negligible optimization, this is fine... for now.
# Annotations dictionary *AFTER* resolving all postponed hints.
#
# The functools.update_wrapper() function underlying the
# @functools.wrap decorator underlying all sane decorators propagates
# this dictionary by default from lower-level wrappees to higher-level
# wrappers. We intentionally classify the annotations dictionary of
# this higher-level wrapper, which *SHOULD* be the superset of that of
# this lower-level wrappee (and thus more reflective of reality).
self.func_arg_name_to_hint = func.__annotations__
# dict.get() method bound to this dictionary.
self.func_arg_name_to_hint_get = self.func_arg_name_to_hint.get
# If this callable is an asynchronous coroutine callable (i.e.,
# callable declared with "async def" rather than merely "def" keywords
# containing *NO* "yield" expressions)...
#
# Note that:
# * The code object of the higher-level wrapper rather than lower-level
# wrappee is passed. Why? Because @beartype directly decorates *ONLY*
# the former, whose asynchronicity has *NO* relation to that of the
# latter. Notably, it is both feasible and (relatively) commonplace
# for third-party decorators to enable:
# * Synchronous callables to be called asynchronously by wrapping
# synchronous callables with asynchronous closures.
# * Asynchronous callables to be called synchronously by wrapping
# asynchronous callables with synchronous closures. Indeed, our
# top-level "conftest.py" pytest plugin does exactly this --
# enabling asynchronous tests to be safely called by pytest's
# currently synchronous framework.
# * The higher-level is_func_async() tester is intentionally *NOT*
# called here, as doing so would also implicitly prefix all calls to
# asynchronous generator callables (i.e., callables also declared
# with the "async def" rather than merely "def" keywords but
# containing one or more "yield" expressions) with the "await"
# keyword. Whereas asynchronous coroutine objects implicitly returned
# by all asynchronous coroutine callables return a single awaitable
# value, asynchronous generator objects implicitly returned by all
# asynchronous generator callables *NEVER* return any awaitable
# value; they instead yield one or more values to external "async
# for" loops.
if is_func_coro(self.func_wrappee_codeobj):
# Code snippet prefixing all calls to this callable.
self.func_wrapper_code_call_prefix = 'await '
# Code snippet prefixing the declaration of the wrapper function
# wrapping this callable with type-checking.
self.func_wrapper_code_signature_prefix = 'async '
# Else, this callable is synchronous (i.e., callable declared with
# "def" rather than "async def"). In this case, reduce these code
# snippets to the empty string.
else:
self.func_wrapper_code_call_prefix = ''
self.func_wrapper_code_signature_prefix = ''
# ....................{ GLOBALS ~ private }....................
_TypeStackOrNone = NoneTypeOr[tuple]
'''
2-tuple ``(type, type(None)``, globally cached for negligible space and time
efficiency gains on validating passed parameters below.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **type-checking function utility code snippets** (i.e.,
triple-quoted pure-Python string constants formatted and concatenated together
to dynamically generate the implementations of functions type-checking arbitrary
objects against arbitrary PEP-compliant type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._check.checkmagic import (
ARG_NAME_GETRANDBITS,
VAR_NAME_RANDOM_INT,
)
from beartype._util.text.utiltextmagic import CODE_INDENT_1
# ....................{ CODE }....................
CODE_SIGNATURE_ARG = (
# Indentation prefixing all wrapper parameters.
f'{CODE_INDENT_1}'
# Default this parameter to the current value of the module-scoped attribute
# of the same name, passed to the make_func() function by the parent
# @beartype decorator. While awkward, this is the optimally efficient means
# of exposing arbitrary attributes to the body of this wrapper function.
f'{{arg_name}}={{arg_name}},{{arg_comment}}'
# Newline for readability.
f'\n'
)
'''
Code snippet declaring a **hidden parameter** (i.e., parameter whose name is
prefixed by ``"__beartype_"`` and whose value is that of an external attribute
internally referenced in the body of a type-checking callable) in the signature
of that callable.
'''
# ....................{ CODE ~ init }....................
#FIXME: Note that NumPy provides an efficient means of generating a large
#number of pseudo-random integers all-at-once. The core issue there, of
#course, is that we then need to optionally depend upon and detect NumPy,
#which then requires us to split our random integer generation logic into two
#parallel code paths that we'll then have to maintain -- and the two will be
#rather different. In any case, here's how one generates a NumPy array
#containing 100 pseudo-random integers in the range [0, 127]:
# random_ints = numpy.random.randint(128, size=100)
#
#To leverage that sanely, we'd need to:
#* Globally cache that array somewhere.
#* Globally cache the current index into that array.
#* When NumPy is unimportable, fallback to generating a Python list containing
# the same number of pseudo-random integers in the same range.
#* In either case, we'd probably want to wrap that logic in a globally
# accessible infinite generator singleton that returns another pseudo-random
# integer every time you iterate it. This assumes, of course, that iterating
# generators is reasonably fast in Python. (If not, just make that a getter
# method of a standard singleton object.)
#* Replace the code snippet below with something resembling:
# '''
# __beartype_random_int = next(__beartype_random_int_generator)
# '''
#Note that thread concurrency issues are probable ignorable here, but that
#there's still a great deal of maintenance and refactoring that would need to
#happen to sanely support this. In other words, ain't happenin' anytime soon.
#FIXME: To support both NumPy and non-NumPy code paths transparently, design a
#novel private data structure named "_BeartypeRNJesus" whose __next__() dunder
#method transparently returns a new random integer. The implementation of that
#method then handles all of the low-level minutiae like:
#* Storing and iterating the 0-based index of the next index into an internally
# cached NumPy array created by calling numpy.random.randint().
#* Creating a new cached NumPy array after exhausting the prior cached array.
CODE_INIT_RANDOM_INT = f'''
# Generate and localize a sufficiently large pseudo-random integer for
# subsequent indexation in type-checking randomly selected container items.
{VAR_NAME_RANDOM_INT} = {ARG_NAME_GETRANDBITS}(32)'''
'''
PEP-specific code snippet generating and localizing a pseudo-random unsigned
32-bit integer for subsequent use in type-checking randomly indexed container
items.
This bit length was intentionally chosen to correspond to the number of bits
generated by each call to Python's C-based Mersenne Twister underlying the
:func:`random.getrandbits` function called here. Exceeding this number of bits
would cause that function to inefficiently call the Twister multiple times.
This bit length produces unsigned 32-bit integers efficiently representable as
C-based atomic integers rather than **big numbers** (i.e., aggregations of
C-based atomic integers) ranging 0–``2**32 - 1`` regardless of the word size of
the active Python interpreter.
Since the cost of generating integers to this maximum bit length is
approximately the same as generating integers of much smaller bit lengths, this
maximum is preferred. Although big numbers transparently support the same
operations as non-big integers, the latter are dramatically more efficient with
respect to both space and time consumption and thus preferred.
Usage
-----
Since *most* containers are likely to contain substantially fewer items than
the maximum integer in this range, pseudo-random container indices are
efficiently selectable by simply taking the modulo of this local variable with
the lengths of those containers.
Any container containing more than this maximum number of items is typically
defined as a disk-backed data structure (e.g., Pandas dataframe) rather than an
in-memory standard object (e.g., :class:`list`). Since :mod:`beartype`
currently ignores the former with respect to deep type-checking, this local
typically suffices for real-world in-memory containers. For edge-case
containers containing more than this maximum number of items, :mod:`beartype`
will only deeply type-check items with indices in this range; all trailing
items will *not* be deeply type-checked, which we consider an acceptable
tradeoff, given the infeasibility of even storing such objects in memory.
Caveats
-------
**The only safely callable function declared by the stdlib** :mod:`random`
**module is** :func:`random.getrandbits`. While that function is efficiently
implemented in C, all other functions declared by that module are inefficiently
implemented in Python. In fact, their implementations are sufficiently
inefficient that there exist numerous online articles lamenting the fact.
See Also
--------
https://stackoverflow.com/a/11704178/2809027
StackOverflow answer demonstrating Python's C-based Mersenne Twister
underlying the :func:`random.getrandbits` function to generate 32 bits of
pseudo-randomness at a time.
https://gist.github.com/terrdavis/1b23b7ff8023f55f627199b09cfa6b24#gistcomment-3237209
Self GitHub comment introducing the core concepts embodied by this snippet.
https://eli.thegreenplace.net/2018/slow-and-fast-methods-for-generating-random-integers-in-python
Authoritative article profiling various :mod:`random` callables.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype type-checking function code utility factories** (i.e., low-level
callables dynamically generating pure-Python code snippets type-checking
arbitrary objects passed to arbitrary callables against PEP-compliant type hints
passed to those same callables).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.typing import Callable
from beartype._check.checkmagic import (
ARG_NAME_GETRANDBITS,
)
from beartype._check.util._checkutilsnip import (
CODE_SIGNATURE_ARG,
CODE_INIT_RANDOM_INT,
)
from beartype._conf.confcls import BeartypeConf
from beartype._data.datatyping import (
LexicalScope,
)
from beartype._util.text.utiltextrepr import represent_object
# ....................{ MAKERS ~ signature }....................
#FIXME: Unit test us up, please.
def make_func_signature(
# Mandatory parameters.
func_name: str,
func_scope: LexicalScope,
code_signature_format: str,
conf: BeartypeConf,
# Optional parameters.
code_signature_prefix: str = '',
# String globals required only for their bound str.format() methods.
CODE_SIGNATURE_ARG_format: Callable = (
CODE_SIGNATURE_ARG.format),
) -> str:
'''
**Type-checking signature factory** (i.e., low-level function dynamically
generating and returning the **signature** (i.e., callable declaration
prefixing the body of that callable) of a callable type-checking arbitrary
objects against arbitrary PEP-compliant type hints to be subsequently
defined, described by the passed parameters.
Parameters
----------
func_name : str
Unqualified basename of the callable declared by this signature.
func_scope : LexicalScope
**Local scope** (i.e., dictionary mapping from the name to value of
each hidden parameter declared in this signature) of that callable,
where a "hidden parameter" is a parameter whose name is prefixed by
``"__beartype_"`` and whose value is that of an external attribute
internally referenced in the body of that callable.
code_signature_format : str
Code snippet declaring the unformatted signature of that callable, which
this factory then formats by replacing these format variables in this
code snippet:
* ``{func_name}``, replaced by the value of the ``func_name`` parameter.
* ``{code_signature_prefix}``, replaced by the value of the
``code_signature_prefix`` parameter.
* ``{code_signature_args}``, replaced by the declaration of all hidden
parameters in the passed ``func_scope`` parameter.
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object).
code_signature_prefix : str, optional
Code snippet prefixing this signature, typically either:
* For synchronous callables, the empty string.
* For asynchronous callables (e.g., asynchronous generators,
coroutines), the space-suffixed keyword ``"async "``.
Defaults to the empty string and thus synchronous behaviour.
Yields
----------
str
Signature of this callable.
'''
assert isinstance(func_name, str), f'{repr(func_name)} not string.'
assert isinstance(func_scope, dict), f'{repr(func_scope)} not dictionary.'
assert isinstance(conf, BeartypeConf), f'{repr(conf)} not configuration.'
assert isinstance(code_signature_format, str), (
f'{repr(code_signature_format)} not string.')
assert isinstance(code_signature_prefix, str), (
f'{repr(code_signature_prefix)} not string.')
# Python code snippet declaring all optional private beartype-specific
# parameters directly derived from the local scope established by the above
# calls to the _code_check_args() and _code_check_return() functions.
code_signature_args = ''
# For the name and value of each such parameter...
for arg_name, arg_value in func_scope.items():
# Machine-readable representation of this parameter's initial value,
# stripped of newline and truncated to a (hopefully) sensible length.
# Since the represent_object() function called below to sanitize this
# value is incredibly slow, this representation is conditionally
# appended as a human-readable comment to the declaration of this
# parameter below *ONLY* if the caller explicitly requested debugging.
arg_comment = (
f' # is {represent_object(arg_value)}'
if conf.is_debug else
''
)
# Compose the declaration of this parameter in the signature of this
# wrapper from...
code_signature_args += CODE_SIGNATURE_ARG_format(
arg_name=arg_name,
arg_comment=arg_comment,
)
#FIXME: *YIKES.* We need to pass a unique tester function signature here
#resembling:
# def {{func_name}}(obj: object) -> bool:
#To do so sanely, let's generalize this factory to accept an additional
#mandatory "func_signature" parameter, please. We'll need to note in the
#docstring exactly what format variables that parameter is expected to
#contain, of course.
# Python code snippet declaring the signature of this wrapper.
code_signature = code_signature_format.format(
func_name=func_name,
code_signature_prefix=code_signature_prefix,
code_signature_args=code_signature_args,
)
# Python code snippet of preliminary statements (e.g., local variable
# assignments) if any *AFTER* generating snippets type-checking parameters
# and returns (which modifies dataclass variables tested below).
code_body_init = (
# If the body of this wrapper requires a pseudo-random integer, append
# code generating and localizing such an integer to this signature.
CODE_INIT_RANDOM_INT
if ARG_NAME_GETRANDBITS in func_scope else
# Else, this body requires *NO* such integer. In this case, preserve
# this signature as is.
''
)
# Return this signature suffixed by zero or more preliminary statements.
return (
f'{code_signature}'
f'{code_body_init}'
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Beartype decorator **type-checking expression snippets** (i.e., triple-quoted
pure-Python string constants formatted and concatenated together to dynamically
generate boolean expressions type-checking arbitrary objects against arbitrary
PEP-compliant type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._check.checkmagic import (
VAR_NAME_RANDOM_INT,
)
# ....................{ PITH }....................
PEP_CODE_PITH_ASSIGN_EXPR = '''{pith_curr_var_name} := {pith_curr_expr}'''
'''
Python >= 3.8-specific assignment expression assigning the full Python
expression yielding the value of the current pith to a unique local variable,
enabling PEP-compliant child hints to obtain this pith via this efficient
variable rather than via this inefficient full Python expression.
'''
# ....................{ HINT ~ placeholder : child }....................
PEP_CODE_HINT_CHILD_PLACEHOLDER_PREFIX = '@['
'''
Prefix of each **placeholder hint child type-checking substring** (i.e.,
placeholder to be globally replaced by a Python code snippet type-checking the
current pith expression against the currently iterated child hint of the
currently visited parent hint).
'''
PEP_CODE_HINT_CHILD_PLACEHOLDER_SUFFIX = ')!'
'''
Suffix of each **placeholder hint child type-checking substring** (i.e.,
placeholder to be globally replaced by a Python code snippet type-checking the
current pith expression against the currently iterated child hint of the
currently visited parent hint).
'''
# ....................{ HINT ~ placeholder : forwardref }....................
PEP_CODE_HINT_FORWARDREF_UNQUALIFIED_PLACEHOLDER_PREFIX = '${FORWARDREF:'
'''
Prefix of each **placeholder unqualified forward reference classname
substring** (i.e., placeholder to be globally replaced by a Python code snippet
evaluating to the currently visited unqualified forward reference hint
canonicalized into a fully-qualified classname relative to the external
caller-defined module declaring the currently decorated callable).
'''
PEP_CODE_HINT_FORWARDREF_UNQUALIFIED_PLACEHOLDER_SUFFIX = ']?'
'''
Suffix of each **placeholder unqualified forward reference classname
substring** (i.e., placeholder to be globally replaced by a Python code snippet
evaluating to the currently visited unqualified forward reference hint
canonicalized into a fully-qualified classname relative to the external
caller-defined module declaring the currently decorated callable).
'''
# ....................{ HINT ~ pep : (484|585) : generic }....................
PEP484585_CODE_HINT_GENERIC_PREFIX = '''(
{indent_curr} # True only if this pith is of this generic type.
{indent_curr} isinstance({pith_curr_assign_expr}, {hint_curr_expr}) and'''
'''
PEP-compliant code snippet prefixing all code type-checking the current pith
against each unerased pseudo-superclass subclassed by a :pep:`484`-compliant
**generic** (i.e., PEP-compliant type hint subclassing a combination of one or
more of the :mod:`typing.Generic` superclass, the :mod:`typing.Protocol`
superclass, and/or other :mod:`typing` non-class objects).
Caveats
----------
The ``{indent_curr}`` format variable is intentionally brace-protected to
efficiently defer its interpolation until the complete PEP-compliant code
snippet type-checking the current pith against *all* subscripted arguments of
this parent type has been generated.
'''
PEP484585_CODE_HINT_GENERIC_SUFFIX = '''
{indent_curr})'''
'''
PEP-compliant code snippet suffixing all code type-checking the current pith
against each unerased pseudo-superclass subclassed by a :pep:`484`-compliant
generic.
'''
PEP484585_CODE_HINT_GENERIC_CHILD = '''
{{indent_curr}} # True only if this pith deeply satisfies this unerased
{{indent_curr}} # pseudo-superclass of this generic.
{{indent_curr}} {hint_child_placeholder} and'''
'''
PEP-compliant code snippet type-checking the current pith against the current
unerased pseudo-superclass subclassed by a :pep:`484`-compliant generic.
Caveats
----------
The caller is required to manually slice the trailing suffix ``" and"`` after
applying this snippet to the last unerased pseudo-superclass of such a generic.
While there exist alternate and more readable means of accomplishing this, this
approach is the optimally efficient.
The ``{indent_curr}`` format variable is intentionally brace-protected to
efficiently defer its interpolation until the complete PEP-compliant code
snippet type-checking the current pith against *all* subscripted arguments of
this parent type has been generated.
'''
# ....................{ HINT ~ pep : (484|585) : sequence }....................
PEP484585_CODE_HINT_SEQUENCE_ARGS_1 = '''(
{indent_curr} # True only if this pith is of this sequence type.
{indent_curr} isinstance({pith_curr_assign_expr}, {hint_curr_expr}) and
{indent_curr} # True only if either this pith is empty *OR* this pith is
{indent_curr} # both non-empty and a random item deeply satisfies this hint.
{indent_curr} (not {pith_curr_var_name} or {hint_child_placeholder})
{indent_curr})'''
'''
PEP-compliant code snippet type-checking the current pith against a parent
**standard sequence type** (i.e., PEP-compliant type hint accepting exactly one
subscripted type hint unconditionally constraining *all* items of this pith,
which necessarily satisfies the :class:`collections.abc.Sequence` protocol with
guaranteed ``O(1)`` indexation across all sequence items).
Caveats
----------
**This snippet cannot contain ternary conditionals.** For unknown reasons
suggesting a critical defect in the current implementation of Python 3.8's
assignment expressions, this snippet raises :class:`UnboundLocalError`
exceptions resembling the following when this snippet contains one or more
ternary conditionals:
UnboundLocalError: local variable '__beartype_pith_1' referenced before assignment
In particular, the initial draft of this snippet guarded against empty
sequences with a seemingly reasonable ternary conditional:
.. code-block:: python
PEP484585_CODE_HINT_SEQUENCE_ARGS_1 = \'\'\'(
{indent_curr} isinstance({pith_curr_assign_expr}, {hint_curr_expr}) and
{indent_curr} {hint_child_placeholder} if {pith_curr_var_name} else True
{indent_curr})\'\'\'
That should behave as expected, but doesn't, presumably due to obscure scoping
rules and a non-intuitive implementation of ternary conditionals in CPython.
Ergo, the current version of this snippet guards against empty sequences with
disjunctions and conjunctions (i.e., ``or`` and ``and`` operators) instead.
Happily, the current version is more efficient than the equivalent approach
based on ternary conditional (albeit slightly less intuitive).
'''
PEP484585_CODE_HINT_SEQUENCE_ARGS_1_PITH_CHILD_EXPR = (
f'''{{pith_curr_var_name}}[{VAR_NAME_RANDOM_INT} % len({{pith_curr_var_name}})]''')
'''
PEP-compliant Python expression yielding the value of a randomly indexed item
of the current pith (which, by definition, *must* be a standard sequence).
'''
# ....................{ HINT ~ pep : (484|585) : tuple }....................
PEP484585_CODE_HINT_TUPLE_FIXED_PREFIX = '''(
{indent_curr} # True only if this pith is a tuple.
{indent_curr} isinstance({pith_curr_assign_expr}, tuple) and'''
'''
PEP-compliant code snippet prefixing all code type-checking the current pith
against each subscripted child hint of an itemized :class:`typing.Tuple` type
of the form ``typing.Tuple[{typename1}, {typename2}, ..., {typenameN}]``.
'''
PEP484585_CODE_HINT_TUPLE_FIXED_SUFFIX = '''
{indent_curr})'''
'''
PEP-compliant code snippet suffixing all code type-checking the current pith
against each subscripted child hint of an itemized :class:`typing.Tuple` type
of the form ``typing.Tuple[{typename1}, {typename2}, ..., {typenameN}]``.
'''
PEP484585_CODE_HINT_TUPLE_FIXED_EMPTY = '''
{{indent_curr}} # True only if this tuple is empty.
{{indent_curr}} not {pith_curr_var_name} and'''
'''
PEP-compliant code snippet prefixing all code type-checking the current pith
to be empty against an itemized :class:`typing.Tuple` type of the non-standard
form ``typing.Tuple[()]``.
See Also
----------
:data:`PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_CHILD`
Further details.
'''
PEP484585_CODE_HINT_TUPLE_FIXED_LEN = '''
{{indent_curr}} # True only if this tuple is of the expected length.
{{indent_curr}} len({pith_curr_var_name}) == {hint_childs_len} and'''
'''
PEP-compliant code snippet prefixing all code type-checking the current pith
to be of the expected length against an itemized :class:`typing.Tuple` type of
the non-standard form ``typing.Tuple[()]``.
See Also
----------
:data:`PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_CHILD`
Further details.
'''
PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_CHILD = '''
{{indent_curr}} # True only if this item of this non-empty tuple deeply
{{indent_curr}} # satisfies this child hint.
{{indent_curr}} {hint_child_placeholder} and'''
'''
PEP-compliant code snippet type-checking the current pith against the current
child hint subscripting an itemized :class:`typing.Tuple` type of the form
``typing.Tuple[{typename1}, {typename2}, ..., {typenameN}]``.
Caveats
----------
The caller is required to manually slice the trailing suffix ``" and"`` after
applying this snippet to the last subscripted child hint of an itemized
:class:`typing.Tuple` type. While there exist alternate and more readable means
of accomplishing this, this approach is the optimally efficient.
The ``{indent_curr}`` format variable is intentionally brace-protected to
efficiently defer its interpolation until the complete PEP-compliant code
snippet type-checking the current pith against *all* subscripted arguments of
this parent type has been generated.
'''
PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_PITH_CHILD_EXPR = (
'''{pith_curr_var_name}[{pith_child_index}]''')
'''
PEP-compliant Python expression yielding the value of the currently indexed
item of the current pith (which, by definition, *must* be a tuple).
'''
# ....................{ HINT ~ pep : (484|585) : subclass }....................
PEP484585_CODE_HINT_SUBCLASS = '''(
{indent_curr} # True only if this pith is a class *AND*...
{indent_curr} isinstance({pith_curr_assign_expr}, type) and
{indent_curr} # True only if this class subclasses this superclass.
{indent_curr} issubclass({pith_curr_var_name}, {hint_curr_expr})
{indent_curr})'''
'''
PEP-compliant code snippet type-checking the current pith to be a subclass of
the subscripted child hint of a :pep:`484`- or :pep:`585`-compliant **subclass
type hint** (e.g., :attr:`typing.Type`, :class:`type`).
'''
# ....................{ HINT ~ pep : 484 : instance }....................
PEP484_CODE_HINT_INSTANCE = (
'''isinstance({pith_curr_expr}, {hint_curr_expr})''')
'''
PEP-compliant code snippet type-checking the current pith against the
current child PEP-compliant type expected to be a trivial non-:mod:`typing`
type (e.g., :class:`int`, :class:`str`).
'''
# ....................{ HINT ~ pep : 484 : union }....................
PEP484_CODE_HINT_UNION_PREFIX = '''('''
'''
PEP-compliant code snippet prefixing all code type-checking the current pith
against each subscripted argument of a :class:`typing.Union` type hint.
'''
PEP484_CODE_HINT_UNION_SUFFIX = '''
{indent_curr})'''
'''
PEP-compliant code snippet suffixing all code type-checking the current pith
against each subscripted argument of a :class:`typing.Union` type hint.
'''
PEP484_CODE_HINT_UNION_CHILD_PEP = '''
{{indent_curr}} {hint_child_placeholder} or'''
'''
PEP-compliant code snippet type-checking the current pith against the current
PEP-compliant child argument subscripting a parent :class:`typing.Union` type
hint.
Caveats
----------
The caller is required to manually slice the trailing suffix ``" or"`` after
applying this snippet to the last subscripted argument of such a hint. While
there exist alternate and more readable means of accomplishing this, this
approach is the optimally efficient.
The ``{indent_curr}`` format variable is intentionally brace-protected to
efficiently defer its interpolation until the complete PEP-compliant code
snippet type-checking the current pith against *all* subscripted arguments of
this parent hint has been generated.
'''
PEP484_CODE_HINT_UNION_CHILD_NONPEP = '''
{{indent_curr}} # True only if this pith is of one of these types.
{{indent_curr}} isinstance({pith_curr_expr}, {hint_curr_expr}) or'''
'''
PEP-compliant code snippet type-checking the current pith against the current
PEP-noncompliant child argument subscripting a parent :class:`typing.Union`
type hint.
See Also
----------
:data:`PEP484_CODE_HINT_UNION_CHILD_PEP`
Further details.
'''
# ....................{ HINT ~ pep : 586 }....................
PEP586_CODE_HINT_PREFIX = '''(
{{indent_curr}} # True only if this pith is of one of these literal types.
{{indent_curr}} isinstance({pith_curr_assign_expr}, {hint_child_types_expr}) and ('''
'''
PEP-compliant code snippet prefixing all code type-checking the current pith
against a :pep:`586`-compliant :class:`typing.Literal` type hint subscripted by
one or more literal objects.
'''
PEP586_CODE_HINT_SUFFIX = '''
{indent_curr}))'''
'''
PEP-compliant code snippet suffixing all code type-checking the current pith
against a :pep:`586`-compliant :class:`typing.Literal` type hint subscripted by
one or more literal objects.
'''
PEP586_CODE_HINT_LITERAL = '''
{{indent_curr}} # True only if this pith is equal to this literal.
{{indent_curr}} {pith_curr_var_name} == {hint_child_expr} or'''
'''
PEP-compliant code snippet type-checking the current pith against the current
child literal object subscripting a :pep:`586`-compliant
:class:`typing.Literal` type hint.
Caveats
----------
The caller is required to manually slice the trailing suffix ``" and"`` after
applying this snippet to the last subscripted argument of such a
:class:`typing.Literal` type. While there exist alternate and more readable
means of accomplishing this, this approach is the optimally efficient.
The ``{indent_curr}`` format variable is intentionally brace-protected to
efficiently defer its interpolation until the complete PEP-compliant code
snippet type-checking the current pith against *all* subscripted arguments of
this parent hint has been generated.
'''
# ....................{ HINT ~ pep : 593 }....................
PEP593_CODE_HINT_VALIDATOR_PREFIX = '''(
{indent_curr} {hint_child_placeholder} and'''
'''
PEP-compliant code snippet prefixing all code type-checking the current pith
against a :pep:`593`-compliant :class:`typing.Annotated` type hint subscripted
by one or more :class:`beartype.vale.BeartypeValidator` objects.
'''
PEP593_CODE_HINT_VALIDATOR_SUFFIX = '''
{indent_curr})'''
'''
PEP-compliant code snippet suffixing all code type-checking the current pith
against each a :pep:`593`-compliant :class:`typing.Annotated` type hint
subscripted by one or more :class:`beartype.vale.BeartypeValidator` objects.
'''
PEP593_CODE_HINT_VALIDATOR_CHILD = '''
{indent_curr} # True only if this pith satisfies this caller-defined
{indent_curr} # validator of this annotated.
{indent_curr} {hint_child_expr} and'''
'''
PEP-compliant code snippet type-checking the current pith against
:mod:`beartype`-specific **data validator code** (i.e., caller-defined
:meth:`beartype.vale.BeartypeValidator._is_valid_code` string) of the current
child :class:`beartype.vale.BeartypeValidator` argument subscripting a parent `PEP
593`_-compliant :class:`typing.Annotated` type hint.
Caveats
----------
The caller is required to manually slice the trailing suffix ``" and"`` after
applying this snippet to the last subscripted argument of such a
:class:`typing.Annotated` type. While there exist alternate and more readable
means of accomplishing this, this approach is the optimally efficient.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype type-checking code factories** (i.e., low-level callables dynamically
generating pure-Python code snippets type-checking arbitrary objects against
PEP-compliant type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
# All "FIXME:" comments for this submodule reside in this package's "__init__"
# submodule to improve maintainability and readability here.
# ....................{ IMPORTS }....................
from beartype.roar import (
BeartypeDecorHintPepException,
BeartypeDecorHintPepUnsupportedException,
BeartypeDecorHintPep593Exception,
)
from beartype.typing import Optional
from beartype._cave._cavefast import TestableTypes
from beartype._check.checkmagic import (
ARG_NAME_GETRANDBITS,
VAR_NAME_PREFIX_PITH,
VAR_NAME_PITH_ROOT,
)
from beartype._check.expr.exprmagic import (
EXCEPTION_PREFIX_FUNC_WRAPPER_LOCAL,
EXCEPTION_PREFIX_HINT,
HINT_META_INDEX_HINT,
HINT_META_INDEX_PLACEHOLDER,
HINT_META_INDEX_PITH_EXPR,
HINT_META_INDEX_PITH_VAR_NAME,
HINT_META_INDEX_INDENT,
)
from beartype._check.expr._exprscope import (
add_func_scope_type,
add_func_scope_types,
add_func_scope_type_or_types,
express_func_scope_type_forwardref,
)
from beartype._check.expr._exprsnip import (
PEP_CODE_HINT_CHILD_PLACEHOLDER_PREFIX,
PEP_CODE_HINT_CHILD_PLACEHOLDER_SUFFIX,
PEP_CODE_PITH_ASSIGN_EXPR,
PEP484585_CODE_HINT_GENERIC_CHILD,
PEP484585_CODE_HINT_GENERIC_PREFIX,
PEP484585_CODE_HINT_GENERIC_SUFFIX,
PEP484585_CODE_HINT_SEQUENCE_ARGS_1,
PEP484585_CODE_HINT_SEQUENCE_ARGS_1_PITH_CHILD_EXPR,
PEP484585_CODE_HINT_SUBCLASS,
PEP484585_CODE_HINT_TUPLE_FIXED_EMPTY,
PEP484585_CODE_HINT_TUPLE_FIXED_LEN,
PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_CHILD,
PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_PITH_CHILD_EXPR,
PEP484585_CODE_HINT_TUPLE_FIXED_PREFIX,
PEP484585_CODE_HINT_TUPLE_FIXED_SUFFIX,
PEP484_CODE_HINT_INSTANCE,
PEP484_CODE_HINT_UNION_CHILD_PEP,
PEP484_CODE_HINT_UNION_CHILD_NONPEP,
PEP484_CODE_HINT_UNION_PREFIX,
PEP484_CODE_HINT_UNION_SUFFIX,
PEP586_CODE_HINT_LITERAL,
PEP586_CODE_HINT_PREFIX,
PEP586_CODE_HINT_SUFFIX,
PEP593_CODE_HINT_VALIDATOR_CHILD,
PEP593_CODE_HINT_VALIDATOR_PREFIX,
PEP593_CODE_HINT_VALIDATOR_SUFFIX,
)
from beartype._conf.confcls import BeartypeConf
from beartype._data.datatyping import CodeGenerated
from beartype._util.cache.utilcachecall import callable_cached
from beartype._util.cache.pool.utilcachepoollistfixed import (
FIXED_LIST_SIZE_MEDIUM,
acquire_fixed_list,
release_fixed_list,
)
from beartype._util.cache.pool.utilcachepoolobjecttyped import (
acquire_object_typed,
release_object_typed,
)
from beartype._data.datatyping import LexicalScope
from beartype._util.error.utilerror import EXCEPTION_PLACEHOLDER
from beartype._data.hint.pep.sign.datapepsigns import (
HintSignAnnotated,
HintSignForwardRef,
HintSignGeneric,
HintSignLiteral,
HintSignNone,
HintSignTuple,
HintSignType,
)
from beartype._data.hint.pep.sign.datapepsignset import (
HINT_SIGNS_SEQUENCE_ARGS_1,
HINT_SIGNS_SUPPORTED_DEEP,
HINT_SIGNS_ORIGIN_ISINSTANCEABLE,
HINT_SIGNS_UNION,
)
from beartype._util.func.utilfuncscope import add_func_scope_attr
from beartype._util.hint.pep.proposal.pep484585.utilpep484585 import (
is_hint_pep484585_tuple_empty)
from beartype._util.hint.pep.proposal.pep484585.utilpep484585arg import (
get_hint_pep484585_args_1)
from beartype._util.hint.pep.proposal.pep484585.utilpep484585generic import (
get_hint_pep484585_generic_type,
iter_hint_pep484585_generic_bases_unerased_tree,
)
from beartype._util.hint.pep.proposal.pep484585.utilpep484585type import (
get_hint_pep484585_subclass_superclass)
from beartype._util.hint.pep.proposal.utilpep586 import (
die_unless_hint_pep586,
get_hint_pep586_literals,
)
from beartype._util.hint.pep.proposal.utilpep593 import (
get_hint_pep593_metadata,
get_hint_pep593_metahint,
)
from beartype._util.hint.pep.utilpepget import (
get_hint_pep_args,
get_hint_pep_sign,
get_hint_pep_origin_type_isinstanceable,
)
from beartype._util.hint.pep.utilpeptest import (
die_if_hint_pep_unsupported,
is_hint_pep,
is_hint_pep_args,
warn_if_hint_pep_deprecated,
)
from beartype._check.conv.convsanify import sanify_hint_child
from beartype._util.hint.utilhinttest import is_hint_ignorable
from beartype._util.kind.utilkinddict import update_mapping
from beartype._util.py.utilpyversion import IS_PYTHON_AT_LEAST_3_8
from beartype._util.text.utiltextmagic import (
CODE_INDENT_1,
CODE_INDENT_2,
LINE_RSTRIP_INDEX_AND,
LINE_RSTRIP_INDEX_OR,
)
from beartype._util.text.utiltextmunge import replace_str_substrs
from beartype._util.text.utiltextrepr import represent_object
from collections.abc import Callable
from random import getrandbits
# ....................{ MAKERS }....................
#FIXME: Attempt to JIT this function with Numba at some point. This will almost
#certainly either immediately blow up or improve nothing, but we're curious to
#see what happens. Make it so, Ensign Numba!
# from numba import jit
# @jit
@callable_cached
def make_check_expr(
# ..................{ PARAMS ~ mandatory }..................
hint: object,
conf: BeartypeConf,
# ..................{ PARAMS ~ optional }..................
# Globals defined above, declared as optional parameters for efficient
# lookup as local attributes. Yes, this is an absurd microoptimization.
# *fight me, github developer community*
# "beartype._check.checkmagic" globals.
_ARG_NAME_GETRANDBITS=ARG_NAME_GETRANDBITS,
_CODE_INDENT_1=CODE_INDENT_1,
_CODE_INDENT_2=CODE_INDENT_2,
_EXCEPTION_PREFIX=EXCEPTION_PLACEHOLDER,
_EXCEPTION_PREFIX_FUNC_WRAPPER_LOCAL=EXCEPTION_PREFIX_FUNC_WRAPPER_LOCAL,
_EXCEPTION_PREFIX_HINT=EXCEPTION_PREFIX_HINT,
# "beartype._check.expr.exprmagic" globals.
_HINT_META_INDEX_HINT=HINT_META_INDEX_HINT,
_HINT_META_INDEX_PLACEHOLDER=HINT_META_INDEX_PLACEHOLDER,
_HINT_META_INDEX_PITH_EXPR=HINT_META_INDEX_PITH_EXPR,
_HINT_META_INDEX_PITH_VAR_NAME=HINT_META_INDEX_PITH_VAR_NAME,
_HINT_META_INDEX_INDENT=HINT_META_INDEX_INDENT,
_LINE_RSTRIP_INDEX_AND=LINE_RSTRIP_INDEX_AND,
_LINE_RSTRIP_INDEX_OR=LINE_RSTRIP_INDEX_OR,
# "beartype._check.expr._exprsnip" string globals required only for
# their bound str.format() methods.
PEP_CODE_PITH_ASSIGN_EXPR_format: Callable = (
PEP_CODE_PITH_ASSIGN_EXPR.format),
PEP484_CODE_HINT_INSTANCE_format: Callable = (
PEP484_CODE_HINT_INSTANCE.format),
PEP484585_CODE_HINT_GENERIC_CHILD_format: Callable = (
PEP484585_CODE_HINT_GENERIC_CHILD.format),
PEP484585_CODE_HINT_SEQUENCE_ARGS_1_format: Callable = (
PEP484585_CODE_HINT_SEQUENCE_ARGS_1.format),
PEP484585_CODE_HINT_SEQUENCE_ARGS_1_PITH_CHILD_EXPR_format: Callable = (
PEP484585_CODE_HINT_SEQUENCE_ARGS_1_PITH_CHILD_EXPR.format),
PEP484585_CODE_HINT_SUBCLASS_format: Callable = (
PEP484585_CODE_HINT_SUBCLASS.format),
PEP484585_CODE_HINT_TUPLE_FIXED_EMPTY_format: Callable = (
PEP484585_CODE_HINT_TUPLE_FIXED_EMPTY.format),
PEP484585_CODE_HINT_TUPLE_FIXED_LEN_format: Callable = (
PEP484585_CODE_HINT_TUPLE_FIXED_LEN.format),
PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_CHILD_format: Callable = (
PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_CHILD.format),
PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_PITH_CHILD_EXPR_format: Callable = (
PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_PITH_CHILD_EXPR.format),
PEP484_CODE_HINT_UNION_CHILD_PEP_format: Callable = (
PEP484_CODE_HINT_UNION_CHILD_PEP.format),
PEP484_CODE_HINT_UNION_CHILD_NONPEP_format: Callable = (
PEP484_CODE_HINT_UNION_CHILD_NONPEP.format),
PEP586_CODE_HINT_LITERAL_format: Callable = (
PEP586_CODE_HINT_LITERAL.format),
PEP586_CODE_HINT_PREFIX_format: Callable = (
PEP586_CODE_HINT_PREFIX.format),
PEP593_CODE_HINT_VALIDATOR_PREFIX_format: Callable = (
PEP593_CODE_HINT_VALIDATOR_PREFIX.format),
PEP593_CODE_HINT_VALIDATOR_SUFFIX_format: Callable = (
PEP593_CODE_HINT_VALIDATOR_SUFFIX.format),
PEP593_CODE_HINT_VALIDATOR_CHILD_format: Callable = (
PEP593_CODE_HINT_VALIDATOR_CHILD.format),
) -> CodeGenerated:
'''
**Type-checking expression factory** (i.e., low-level callable dynamically
generating a pure-Python boolean expression type-checking an arbitrary
object against the passed PEP-compliant type hint).
This code factory performs a breadth-first search (BFS) over the abstract
graph of nested type hints reachable from the subscripted arguments of the
passed root type hint. For each such (possibly nested) hint, this factory
embeds one or more boolean subexpressions validating a (possibly nested
sub)object of an arbitrary object against that hint into the full boolean
expression created and returned by this factory. In short, this factory is
the beating heart of :mod:`beartype`. We applaud you for your perseverance.
You finally found the essence of the Great Bear. You did it!! Now, we clap.
This code factory is memoized for efficiency.
Caveats
----------
**This factory intentionally accepts no** ``exception_prefix``
**parameter.** Why? Since that parameter is typically specific to the
context-sensitive use case of the caller, accepting that parameter would
prevent this factory from memoizing the passed hint with the returned code,
which would rather defeat the point. Instead, this factory only:
* Returns generic non-working code containing the placeholder
:data:`VAR_NAME_PITH_ROOT` substring that the caller is required to
globally replace by either the name of the current parameter *or*
``return`` for return values (e.g., by calling the builtin
:meth:`str.replace` method) to generate the desired non-generic working
code type-checking that parameter or return value.
* Raises generic non-human-readable exceptions containing the placeholder
:attr:`beartype._util.error.utilerror.EXCEPTION_PLACEHOLDER` substring
that the caller is required to explicitly catch and raise non-generic
human-readable exceptions from by calling the
:func:`beartype._util.error.utilerror.reraise_exception_placeholder`
function.
Parameters
----------
hint : object
PEP-compliant type hint to be type-checked.
conf : BeartypeConf
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object).
Returns
----------
CodeGenerated
Tuple containing the Python code snippet dynamically generated by this
code generator and metadata describing that code. See the
:attr:`beartype._data.datatyping.CodeGenerated` type hint for details.
Raises
----------
BeartypeDecorHintPepException
If this object is *not* a PEP-compliant type hint.
BeartypeDecorHintPepUnsupportedException
If this object is a PEP-compliant type hint currently unsupported by
the :func:`beartype.beartype` decorator.
BeartypeDecorHintPep484Exception
If one or more PEP-compliant type hints visitable from this object are
nested :attr:`typing.NoReturn` child hints, since
:attr:`typing.NoReturn` is valid *only* as a non-nested return hint.
BeartypeDecorHintPep593Exception
If one or more PEP-compliant type hints visitable from this object
subscript the :pep:`593`-compliant :class:`typing.Annotated` class such
that:
* The second argument subscripting that class is an instance of the
:class:`beartype.vale.Is` class.
* One or more further arguments subscripting that class are *not*
instances of the :class:`beartype.vale.Is` class.
Warns
----------
BeartypeDecorHintPep585DeprecationWarning
If one or more :pep:`484`-compliant type hints visitable from this
object have been deprecated by :pep:`585`.
'''
# ..................{ HINT ~ root }..................
# Top-level hint relocalized for disambiguity.
hint_root = hint
# Delete the passed parameter whose name is ambiguous within the context of
# this function for similar disambiguity.
del hint
# ..................{ HINT ~ current }..................
# Currently visited hint.
hint_curr = None
# Current unsubscripted typing attribute associated with this hint (e.g.,
# "Union" if "hint_curr == Union[int, str]").
hint_curr_sign = None
# Python expression evaluating to an isinstanceable type (e.g., origin
# type) associated with the currently visited type hint if any.
hint_curr_expr = None
# Placeholder string to be globally replaced in the Python code snippet to
# be returned (i.e., "func_wrapper_code") by a Python code snippet
# type-checking the current pith expression (i.e.,
# "pith_curr_var_name") against the currently visited hint (i.e.,
# "hint_curr").
hint_curr_placeholder = None
# Full Python expression evaluating to the value of the current pith (i.e.,
# possibly nested object of the passed parameter or return value to be
# type-checked against the currently visited hint).
#
# Note that this is *NOT* a Python >= 3.8-specific assignment expression
# but rather the original inefficient expression provided by the parent
# PEP-compliant type hint of the currently visited hint.
pith_curr_expr = None
# Name of the current pith variable (i.e., local Python variable in the
# body of the wrapper function whose value is that of the current pith).
# This name is either:
# * Initially, the name of the currently type-checked parameter or return.
# * On subsequently type-checking nested items of the parameter or return
# under Python >= 3.8, the name of the local variable uniquely assigned to
# by the assignment expression defined by "pith_curr_assign_expr" (i.e.,
# the left-hand side (LHS) of that assignment expression).
pith_curr_var_name = VAR_NAME_PITH_ROOT
# Python code snippet expanding to the current level of indentation
# appropriate for the currently visited hint.
indent_curr = _CODE_INDENT_2
# ..................{ HINT ~ child }..................
# Currently iterated PEP-compliant child hint subscripting the currently
# visited hint, initialized to the root hint to enable the subsequently
# called _enqueue_hint_child() function to enqueue the root hint.
hint_child = hint_root
# Python code snippet expanding to the current level of indentation
# appropriate for the currently iterated child hint, initialized to the
# root hint indentation to enable the subsequently called
# _enqueue_hint_child() function to enqueue the root hint.
indent_child = indent_curr
# ..................{ HINT ~ childs }..................
# Current tuple of all PEP-compliant child hints subscripting the currently
# visited hint (e.g., "(int, str)" if "hint_curr == Union[int, str]").
hint_childs: tuple = None # type: ignore[assignment]
# Number of PEP-compliant child hints subscripting the currently visited
# hint.
hint_childs_len: int = None # type: ignore[assignment]
# ..................{ HINT ~ pep 484 : forwardref }..................
# Set of the unqualified classnames referred to by all relative forward
# references visitable from this root hint if any *OR* "None" otherwise
# (i.e., if no such forward references are visitable).
hint_forwardrefs_class_basename: Optional[set] = None
# ..................{ HINT ~ pep 572 }..................
# The following local variables isolated to this subsection are only
# relevant when these conditions hold:
# * The active Python interpreter targets at least Python 3.8, the first
# major Python version to introduce support for "PEP 572 -- Assignment
# Expressions."
# * The currently visited hint is *NOT* the root hint (i.e., "hint_root").
# If the currently visited hint is the root hint, the current pith has
# already been localized to a local variable whose name is the value of
# the "VAR_NAME_PITH_ROOT" string global and thus need *NOT* be
# relocalized to another local variable using an assignment expression.
#
# This is a necessary and sufficient condition for deciding whether a
# Python >= 3.8-specific assignment expression localizing the current pith
# should be embedded in the code generated to type-check this pith against
# this hint. This is a non-trivial runtime optimization eliminating
# repeated computations to obtain this pith from PEP-compliant child hints.
# For example, if this hint constrains this pith to be a standard sequence,
# the child pith of this parent pith is a random item selected from this
# sequence; since obtaining this child pith is non-trivial, the computation
# required to do so is performed only once by assigning this child pith to
# a unique local variable during runtime type-checking and then repeatedly
# type-checking that variable rather than the computation required to
# continually reacquire this child pith: e.g.,
#
# # Type-checking conditional for "List[List[str]]" under Python < 3.8.
# if not (
# isinstance(__beartype_pith_0, list) and
# (
# isinstance(__beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)], list) and
# isinstance(__beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)][__beartype_random_int % len(__beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)])], str) if __beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)] else True
# ) if __beartype_pith_0 else True
# ):
#
# # The same conditional under Python >= 3.8.
# if not (
# isinstance(__beartype_pith_0, list) and
# (
# isinstance(__beartype_pith_1 := __beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)], list) and
# isinstance(__beartype_pith_1[__beartype_random_int % len(__beartype_pith_1)], str) if __beartype_pith_1 else True
# ) if __beartype_pith_0 else True
# ):
#
# Note that:
# * The random item selected from the root pith (i.e., "__beartype_pith_1
# := __beartype_pith_0[__beartype_random_int % len(__beartype_pith_0)")
# only occurs once under Python >= 3.8 but repeatedly under Python < 3.8.
# In both cases, the same semantic type-checking is performed regardless
# of optimization.
# * This optimization implicitly "bottoms out" when the currently visited
# hint is *NOT* subscripted by unignorable PEP-compliant child hint
# arguments. If all child hints of the currently visited hint are either
# ignorable (e.g., "object", "Any") *OR* are unignorable isinstanceable
# types (e.g., "int", "str"), the currently visited hint has *NO*
# meaningful PEP-compliant child hints and is thus effectively a leaf
# node with respect to performing this optimization.
#is_pith_curr_assign_expr = None
# Integer suffixing the name of each local variable assigned the value of
# the current pith in a Python >= 3.8-specific assignment expression, thus
# uniquifying this variable in the body of the current wrapper function.
#
# Note that this integer is intentionally incremented as an efficient
# low-level scalar rather than as an inefficient high-level
# "itertools.Counter" object. Since both are equally thread-safe in the
# internal context of this function body, the former is preferable.
pith_curr_assign_expr_name_counter = 0
# Python >= 3.8-specific assignment expression assigning this full Python
# expression to the local variable assigned the value of this expression.
pith_curr_assign_expr: str = None # type: ignore[assignment]
# ..................{ METADATA }..................
# Tuple of metadata describing the currently visited hint, appended by
# the previously visited parent hint to the "hints_meta" stack.
hint_curr_meta: tuple = None # type: ignore[assignment]
# Fixed list of all metadata describing all visitable hints currently
# discovered by the breadth-first search (BFS) below. This list acts as a
# standard First In First Out (FILO) queue, enabling this BFS to be
# implemented as an efficient imperative algorithm rather than an
# inefficient (and dangerous, due to both unavoidable stack exhaustion and
# avoidable infinite recursion) recursive algorithm.
#
# Note that this list is guaranteed by the previously called
# _die_if_hint_repr_exceeds_child_limit() function to be larger than the
# number of hints transitively visitable from this root hint. Ergo, *ALL*
# indexation into this list performed by this BFS is guaranteed to be safe.
# Ergo, avoid explicitly testing below that the "hints_meta_index_last"
# integer maintained by this BFS is strictly less than "FIXED_LIST_SIZE_MEDIUM", as this
# constraint is already guaranteed to be the case.
hints_meta = acquire_fixed_list(FIXED_LIST_SIZE_MEDIUM)
# 0-based index of metadata describing the currently visited hint in the
# "hints_meta" list.
hints_meta_index_curr = 0
# 0-based index of metadata describing the last visitable hint in the
# "hints_meta" list, initialized to "-1" to ensure that the initial
# incrementation of this index by the _enqueue_hint_child() directly called
# below initializes index 0 of the "hints_meta" fixed list.
#
# For efficiency, this integer also uniquely identifies the currently
# iterated child PEP-compliant type hint of the currently visited parent
# PEP-compliant type hint.
hints_meta_index_last = -1
# ..................{ FUNC ~ code }..................
# Python code snippet type-checking the current pith against the currently
# visited hint (to be appended to the "func_wrapper_code" string).
func_curr_code: str = None # type: ignore[assignment]
# ..................{ FUNC ~ code : locals }..................
# Local scope (i.e., dictionary mapping from the name to value of each
# attribute referenced in the signature) of this wrapper function required
# by this Python code snippet.
func_wrapper_scope: LexicalScope = {}
# True only if one or more PEP-compliant type hints visitable from this
# root hint require a pseudo-random integer. If true, the higher-level
# beartype._decor._wrapper.wrappermain.generate_code() function prefixes the body
# of this wrapper function with code generating such an integer.
is_var_random_int_needed = False
# ..................{ CLOSURES }..................
# Closures centralizing frequently repeated logic, addressing Don't Repeat
# Yourself (DRY) concerns during the breadth-first search (BFS) below.
def _enqueue_hint_child(pith_child_expr: str) -> str:
'''
**Enqueue** (i.e., append) a new tuple of metadata describing the
currently iterated child hint to the end of the ``hints_meta`` queue,
enabling this hint to be visited by the ongoing breadth-first search
(BFS) traversing over this queue.
Parameters
----------
pith_child_expr : str
Python code snippet evaluating to the child pith to be
type-checked against the currently iterated child hint.
This closure also implicitly expects the following local variables of
the outer scope to be set to relevant values:
hint_child : object
Currently iterated PEP-compliant child hint subscripting the
currently visited hint.
Returns
----------
str
Placeholder string to be subsequently replaced by code
type-checking this child pith against this child hint.
'''
# Allow these local variables of the outer scope to be modified below.
nonlocal hints_meta_index_last
# Increment both the 0-based index of metadata describing the last
# visitable hint in the "hints_meta" list and the unique identifier of
# the currently iterated child hint *BEFORE* overwriting the existing
# metadata at this index.
#
# Note this index is guaranteed to *NOT* exceed the fixed length of
# this list, by prior validation.
hints_meta_index_last += 1
# Placeholder string to be globally replaced by code type-checking the
# child pith against this child hint, intentionally prefixed and
# suffixed by characters that:
#
# * Are intentionally invalid as Python code, guaranteeing that the
# top-level call to the exec() builtin performed by the @beartype
# decorator will raise a "SyntaxError" exception if the caller fails
# to replace all placeholder substrings generated by this method.
# * Protect the identifier embedded in this substring against ambiguous
# global replacements of larger identifiers containing this
# identifier. If this identifier were *NOT* protected in this manner,
# then the first substring "0" generated by this method would
# ambiguously overlap with the subsequent substring "10" generated by
# this method, which would then produce catastrophically erroneous
# and non-trivial to debug Python code.
hint_child_placeholder = (
f'{PEP_CODE_HINT_CHILD_PLACEHOLDER_PREFIX}'
f'{str(hints_meta_index_last)}'
f'{PEP_CODE_HINT_CHILD_PLACEHOLDER_SUFFIX}'
)
# Create and insert a new tuple of metadata describing this child hint
# at this index of this list.
#
# Note that this assignment is guaranteed to be safe, as "FIXED_LIST_SIZE_MEDIUM" is
# guaranteed to be substantially larger than "hints_meta_index_last".
hints_meta[hints_meta_index_last] = (
hint_child,
hint_child_placeholder,
pith_child_expr,
pith_curr_var_name,
indent_child,
)
# Return this placeholder string.
return hint_child_placeholder
# ..................{ CLOSURES ~ locals }..................
# Local variables calling one or more closures declared above and thus
# deferred until after declaring those closures.
# Placeholder string to be globally replaced in the Python code snippet to
# be returned (i.e., "func_wrapper_code") by a Python code snippet
# type-checking the child pith expression (i.e., "pith_child_expr") against
# the currently iterated child hint (i.e., "hint_child"), initialized to a
# placeholder describing the root hint.
hint_child_placeholder = _enqueue_hint_child(VAR_NAME_PITH_ROOT)
# Python code snippet type-checking the root pith against the root hint,
# localized separately from the "func_wrapper_code" snippet to enable this
# function to validate this code to be valid *BEFORE* returning this code.
func_root_code = hint_child_placeholder
# Python code snippet to be returned, seeded with a placeholder to be
# replaced on the first iteration of the breadth-first search performed
# below with a snippet type-checking the root pith against the root hint.
func_wrapper_code = func_root_code
# ..................{ SEARCH }..................
# While the 0-based index of metadata describing the next visited hint in
# the "hints_meta" list does *NOT* exceed that describing the last
# visitable hint in this list, there remains at least one hint to be
# visited in the breadth-first search performed by this iteration.
while hints_meta_index_curr <= hints_meta_index_last:
# Metadata describing the currently visited hint.
hint_curr_meta = hints_meta[hints_meta_index_curr]
# Assert this metadata is a tuple as expected. This enables us to
# distinguish between proper access of used items and improper access
# of unused items of the parent fixed list containing this tuple, since
# an unused item of this list is initialized to "None" by default.
assert hint_curr_meta.__class__ is tuple, (
f'Current hint metadata {repr(hint_curr_meta)} at '
f'index {hints_meta_index_curr} not tuple.')
# Localize metadatum for both efficiency and f-string purposes.
hint_curr = hint_curr_meta[_HINT_META_INDEX_HINT]
hint_curr_placeholder = hint_curr_meta[_HINT_META_INDEX_PLACEHOLDER]
pith_curr_expr = hint_curr_meta[_HINT_META_INDEX_PITH_EXPR]
pith_curr_var_name = hint_curr_meta[_HINT_META_INDEX_PITH_VAR_NAME]
indent_curr = hint_curr_meta[_HINT_META_INDEX_INDENT]
# If this is a child hint rather than the root hint, sanify (i.e.,
# sanitize) this hint if this hint is reducible *OR* preserve this hint
# otherwise (i.e., if this hint is irreducible).
#
# Note that the root hint has already been permanently sanified by the
# calling "beartype._decor._wrapper.wrappermain" submodule and thus need
# *NOT* be inefficiently resanified here.
if hints_meta_index_curr:
hint_curr = sanify_hint_child(
hint=hint_curr,
conf=conf,
exception_prefix=_EXCEPTION_PREFIX,
)
# Else, this is the already sanified root hint.
#FIXME: Comment this sanity check out after we're sufficiently
#convinced this algorithm behaves as expected. While useful, this check
#requires a linear search over the entire code and is thus costly.
# assert hint_curr_placeholder in func_wrapper_code, (
# '{} {!r} placeholder {} not found in wrapper body:\n{}'.format(
# hint_curr_exception_prefix, hint, hint_curr_placeholder, func_wrapper_code))
# ................{ PEP }................
# If this hint is PEP-compliant...
if is_hint_pep(hint_curr):
#FIXME: Refactor to call warn_if_hint_pep_unsupported() instead.
#Actually...wait. This is probably still a valid test here. We'll
#need to instead augment the is_hint_ignorable() function to
#additionally test whether the passed hint is unsupported, in which
#case that function should return false as well as emit a non-fatal
#warning ala the new warn_if_hint_pep_unsupported() function --
#which should probably simply be removed now. *sigh*
#FIXME: Actually, in that case, we can simply reduce the following
#two calls to simply:
# die_if_hint_pep_ignorable(
# hint=hint_curr, exception_prefix=hint_curr_exception_prefix)
#Of course, this implies we want to refactor the
#die_if_hint_pep_unsupported() function into
#die_if_hint_pep_ignorable()... probably.
# If this hint is currently unsupported, raise an exception.
#
# Note the human-readable label prefixing the representations of
# child PEP-compliant type hints is unconditionally passed. Since
# the root hint has already been validated to be supported by
# the above call to the same function, this call is guaranteed to
# *NEVER* raise an exception for that hint.
die_if_hint_pep_unsupported(
hint=hint_curr, exception_prefix=_EXCEPTION_PREFIX)
# Else, this hint is supported.
# Assert that this hint is unignorable. Iteration below generating
# code for child hints of the current parent hint is *REQUIRED* to
# explicitly ignore ignorable child hints. Since the caller has
# explicitly ignored ignorable root hints, these two guarantees
# together ensure that all hints visited by this breadth-first
# search *SHOULD* be unignorable. Naturally, we validate that here.
assert not is_hint_ignorable(hint_curr), (
f'{_EXCEPTION_PREFIX}ignorable type hint '
f'{repr(hint_curr)} not ignored.')
# Sign uniquely identifying this hint.
hint_curr_sign = get_hint_pep_sign(hint_curr)
# print(f'Type-checking PEP type hint {repr(hint_curr)} sign {repr(hint_curr_sign)}...')
# If this hint is deprecated, emit a non-fatal warning.
# print(f'Testing {hint_curr_exception_prefix} hint {repr(hint_curr)} for deprecation...')
warn_if_hint_pep_deprecated(
hint=hint_curr, warning_prefix=_EXCEPTION_PREFIX)
# Tuple of all arguments subscripting this hint if any *OR* the
# empty tuple otherwise (e.g., if this hint is its own unsubscripted
# "typing" attribute).
#
# Note that the "__args__" dunder attribute is *NOT* guaranteed to
# exist for arbitrary PEP-compliant type hints. Ergo, we obtain
# this attribute via a higher-level utility getter instead.
hint_childs = get_hint_pep_args(hint_curr)
hint_childs_len = len(hint_childs)
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# NOTE: Whenever adding support for (i.e., when generating code
# type-checking) a new "typing" attribute below, similar support
# for that attribute *MUST* also be added to the parallel:
# * "beartype._util.hint.pep.errormain" submodule, which
# raises exceptions on the current pith failing this check.
# * "beartype._data.hint.pep.sign.datapepsignset.HINT_SIGNS_SUPPORTED_DEEP"
# frozen set of all signs for which this function generates
# deeply type-checking code.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#FIXME: Python 3.10 provides proper syntactic support for "case"
#statements, which should allow us to dramatically optimize this
#"if" logic into equivalent "case" logic *AFTER* we drop support
#for Python 3.9. Of course, that will be basically never, so we'll
#have to preserve this for basically forever. What you gonna do?
# Switch on (as in, pretend Python provides a "case" statement)
# the sign identifying this hint to decide which type of code to
# generate to type-check the current pith against the current hint.
#
# This decision is intentionally implemented as a linear series of
# tests ordered in descending likelihood for efficiency. While
# alternative implementations (that are more readily readable and
# maintainable) do exist, these alternatives all appear to be
# substantially less efficient.
#
# Consider the standard alternative of sequestering the body of
# each test implemented below into either:
#
# * A discrete private function called by this function. This
# approach requires maintaining a global private dictionary
# mapping from each support unsubscripted typing attribute to
# the function generating code for that attribute: e.g.,
# def pep_code_check_union(...): ...
# _HINT_TYPING_ATTR_ARGLESS_TO_CODER = {
# typing.Union: pep_code_check_union,
# }
# Each iteration of this loop then looks up the function
# generating code for the current attribute from this dictionary
# and calls that function to do so. Function calls come with
# substantial overhead in Python, impacting performance more
# than the comparable linear series of tests implemented below.
# Additionally, these functions *MUST* mutate local variables of
# this function by some arcane means -- either:
# * Passing these locals to each such function, returning these
# locals from each such function, and assigning these return
# values to these locals in this function after each such call.
# * Passing a single composite fixed list of these locals to each
# such function, which then mutates these locals in-place,
# which then necessitates this function permanently store these
# locals in such a list rather than as local variables.
# * A discrete closure of this function, which adequately resolves
# the aforementioned locality issue via the "nonlocal" keyword at
# a substantial up-front performance cost of redeclaring these
# closures on each invocation of this function.
#
# ..............{ SHALLOW }..............
# Perform shallow type-checking logic (i.e., logic that does *NOT*
# recurse and thus "bottoms out" at this hint) *BEFORE* deep
# type-checking logic. The latter needs additional setup (e.g.,
# generation of assignment expressions) *NOT* needed by the former,
# whose requirements are more understandably minimalist.
#
# ..............{ ORIGIN }..............
# If this hint both...
if (
# Originates from an origin type and may thus be shallowly
# type-checked against that type *AND is either...
hint_curr_sign in HINT_SIGNS_ORIGIN_ISINSTANCEABLE and (
# Unsubscripted *OR*...
not is_hint_pep_args(hint_curr) or
#FIXME: Remove this branch *AFTER* deeply supporting all
#hints.
# Currently unsupported with deep type-checking...
hint_curr_sign not in HINT_SIGNS_SUPPORTED_DEEP
)
):
# Then generate trivial code shallowly type-checking the current
# pith as an instance of the origin type originating this sign
# (e.g., "list" for the hint "typing.List[int]").
# Code type-checking the current pith against this origin type.
func_curr_code = PEP484_CODE_HINT_INSTANCE_format(
pith_curr_expr=pith_curr_expr,
# Python expression evaluating to this origin type.
hint_curr_expr=add_func_scope_type(
# Origin type of this hint if any *OR* raise an
# exception -- which should *NEVER* happen, as this
# hint was validated above to be supported.
cls=get_hint_pep_origin_type_isinstanceable(hint_curr),
func_scope=func_wrapper_scope,
exception_prefix=_EXCEPTION_PREFIX_HINT,
),
)
# Else, this hint is either subscripted, not shallowly
# type-checkable, *OR* deeply type-checkable.
#
# ..............{ FORWARDREF }..............
# If this hint is a forward reference...
elif hint_curr_sign is HintSignForwardRef:
# Render this forward reference accessible to the body of this
# wrapper function by populating:
# * Python expression evaluating to the class referred to by
# this forward reference when accessed via the private
# "__beartypistry" parameter.
# * Set of the unqualified classnames referred to by all
# relative forward references, including this reference if
# relative. If this set was previously uninstantiated (i.e.,
# "None"), this assignment initializes this local to the new
# set instantiated by this call; else, this assignment
# preserves this local set as is.
hint_curr_expr, hint_forwardrefs_class_basename = (
express_func_scope_type_forwardref(
forwardref=hint_curr,
forwardrefs_class_basename=(
hint_forwardrefs_class_basename),
func_scope=func_wrapper_scope,
exception_prefix=_EXCEPTION_PREFIX,
))
# Code type-checking the current pith against this class.
func_curr_code = PEP484_CODE_HINT_INSTANCE_format(
pith_curr_expr=pith_curr_expr,
hint_curr_expr=hint_curr_expr,
)
# Else, this hint is *NOT* a forward reference.
#
# Since this hint is *NOT* shallowly type-checkable, this hint
# *MUST* be deeply type-checkable. So, we do so now.
#
# ..............{ DEEP }..............
# Perform deep type-checking logic (i.e., logic that recurses and
# thus does *NOT* "bottom out" at this hint).
else:
# Python code snippet expanding to the current level of
# indentation appropriate for the current child hint.
#
# Note that this is almost always but technically *NOT* always
# required below by logic generating code type-checking the
# currently visited parent hint. Naturally, unconditionally
# setting this string here trivially optimizes the common case.
indent_child = f'{indent_curr}{_CODE_INDENT_1}'
# ............{ DEEP ~ expression }............
#FIXME: Unit test that this is behaving as expected. Doing so will
#require further generalizations, including:
#* In the "beartype._decor.decormain" submodule:
# * Detect when running under tests.
# * When running under tests, define a new
# "func_wrapper.__beartype_wrapper_code" attribute added to
# decorated callables to be the "func_wrapper_code" string
# rather than True. Note that this obviously isn't the right way
# to do source code association. Ideally, we'd at least
# interface with the stdlib "linecache" module (e.g., by calling
# the linecache.lazycache() function intended to be used to
# cache the source code for non-file-based modules) and possibly
# even go so far as to define a PEP 302-compatible beartype
# module loader. That's out of scope, so this suffices for now.
#* In the "beartype_test.a00_unit.data._data_hint_pep" submodule:
# * Add a new "_PepHintMetadata.code_str_match_regexes" field,
# defined as an iterable of regular expressions matching
# substrings of the "func_wrapper.__beartype_wrapper_code"
# attribute that are expected to exist.
# * For most "HINTS_PEP_META" entries, default this field to
# merely the empty tuple.
# * For deeply nested "HINTS_PEP_META" entries, define this
# field as follows:
# code_str_match_regexes=(r'\s+:=\s+',)
#* In the "beartype_test.a00_unit.pep.p484.test_p484" submodule:
# * Match the "pep_hinted.__beartype_wrapper_code" string against
# all regular expressions in the "code_str_match_regexes"
# iterable for the currently iterated "pep_hint_meta".
#
#This is fairly important, as we have no other reusable means of
#ascertaining whether this is actually being applied in general.
#FIXME: That's all great, except for the
#"func_wrapper.__beartype_wrapper_code" part. Don't do that,
#please. We really do just want to do this right the first time. As
#expected, the key to doing so is the linecache.lazycache()
#function, whose implementation under Python 3.7 reads:
#
# def lazycache(filename, module_globals):
# """Seed the cache for filename with module_globals.
#
# The module loader will be asked for the source only when getlines is
# called, not immediately.
#
# If there is an entry in the cache already, it is not altered.
#
# :return: True if a lazy load is registered in the cache,
# otherwise False. To register such a load a module loader with a
# get_source method must be found, the filename must be a cachable
# filename, and the filename must not be already cached.
# """
# if filename in cache:
# if len(cache[filename]) == 1:
# return True
# else:
# return False
# if not filename or (filename.startswith('<') and filename.endswith('>')):
# return False
# # Try for a __loader__, if available
# if module_globals and '__loader__' in module_globals:
# name = module_globals.get('__name__')
# loader = module_globals['__loader__']
# get_source = getattr(loader, 'get_source', None)
#
# if name and get_source:
# get_lines = functools.partial(get_source, name)
# cache[filename] = (get_lines,)
# return True
# return False
#
#Given that, what we need to do is:
#* Define a new "beartype._decor._pep302" submodule implementing a
# PEP 302-compatible loader for @beartype-generated wrapper
# functions, enabling external callers (including the stdlib
# "linecache" module) to obtain the source for these functions.
# For space efficiency, this submodule should internally store
# code in a compressed format -- which probably means "gzip" for
# maximal portability. This submodule should at least define these
# attributes:
# * "_FUNC_WRAPPER_MODULE_NAME_TO_CODE", a dictionary mapping from
# the unique fake module names assigned to @beartype-generated
# wrapper functions by the @beartype decorator to the compressed
# source strings for those fake modules.
# * get_source(), a function accepting one unique fake module name
# assigned to an arbitrary @beartype-generated wrapper function
# by the @beartype decorator and returning the uncompressed
# source string for that fake module. Clearly, this function
# should internally access the
# "_FUNC_WRAPPER_MODULE_NAME_TO_CODE" dictionary and either:
# * If the passed module name has *NOT* already been registered
# to that dictionary, raise an exception.
# * Else, uncompress the compressed source string previously
# registered under that module name with that dictionary and
# return that uncompressed string. Don't worry about caching
# uncompressed strings here; that's exactly what the stdlib
# "linecache" module already does on our behalf.
# Ergo, this function should have signature resembling:
# def get_source(func_wrapper_module_name: str) -> str:
# * set_source(), a function accepting one unique fake module name
# assigned to an arbitrary @beartype-generated wrapper function
# by the @beartype decorator as well as as the uncompressed
# source string for that fake module. Clearly, this function
# should internally
# "_FUNC_WRAPPER_MODULE_NAME_TO_CODE" dictionary and either:
# * If the passed module name has already been registered to
# that dictionary, raise an exception.
# * Else, compress the passed uncompressed source string and
# register that compressed string under that module name with
# that dictionary.
#* In the "beartype._decor.decormain" submodule:
# * Do... something? Oh, boy. Why didn't we finish this comment?
# If the active Python interpreter targets Python >= 3.8 and
# thus supports assignment expressions...
if IS_PYTHON_AT_LEAST_3_8:
# If...
if (
# The current pith is not the root pith *AND*...
#
# Note that we explicitly test against piths rather
# than seemingly equivalent metadata to account for
# edge cases. Notably, child hints of unions (and
# possibly other "typing" objects) do *NOT* narrow the
# current pith and are *NOT* the root hint. Ergo, a
# seemingly equivalent test like "hints_meta_index_curr
# != 0" would generate false positives and thus
# unnecessarily inefficient code.
pith_curr_expr is not VAR_NAME_PITH_ROOT and
#FIXME: Overly ambiguous, unfortunately. This suffices
#for now but absolutely *WILL* fail with inscrutable
#errors under some future release. The issue is that
#this trivial test reports false negatives for
#sufficiently complex "pith_curr_expr" strings.
#
#For example, if "pith_curr_expr ==
#'(yam := yum[0])[1]'", the detection below would
#incorrectly detect that as being an assignment
#expression. It isn't. It *CONTAINS* an embedded
#assignment expression, but it itself is *NOT* an
#assignment expression. Ergo, that "pith_curr_expr"
#should be assigned via an assignment expression here.
#
#To handle embedded assignment expressions like that,
#we'll probably need to generalize this yet again:
#* Define a new "HINT_META_INDEX_IS_PITH_EXPR_ASSIGN"
# global.
#* Define a new "is_pith_curr_expr_assign" local,
# "True" only if "pith_curr_expr" itself is an
# assignment expression, defaulting to "False":
# is_pith_curr_expr_assign = False
#* Assign above:
# is_pith_curr_expr_assign = hint_curr_meta[
# HINT_META_INDEX_IS_PITH_EXPR_ASSIGN]
#* Assign below in the body of this "if" conditional:
# is_pith_curr_expr_assign = True
#* Assign below in the body of this "else" branch:
# is_pith_curr_expr_assign = False
#* Pass "is_pith_curr_expr_assign" in the
# _enqueue_hint_child() closure above.
#* Replace this "':=' not in pith_curr_expr" test here
# with "not is_pith_curr_expr_assign" instead.
#
#Voila! What could be simpler? O_o
# The current pith expression does *NOT* already
# perform an assignment expression...
#
# If the current pith expression already performs an
# assignment expression, there's no benefit to
# assigning that to another local variable via another
# assignment expression, which would just be an alias
# of the existing local variable assigned via the
# existing assignment expression. Moreover, whereas
# chained assignments are syntactically valid, chained
# assignment expressions are syntactically invalid
# unless protected with parens:
# >>> a = b = 'Mother*Teacher*Destroyer' # <-- fine
# >>> (a := "Mother's Abomination") # <-- fine
# >>> (a := (b := "Mother's Illumination")) # <-- fine
# >>> (a := b := "Mother's Illumination") # <-- not fine
# SyntaxError: invalid syntax
':=' not in pith_curr_expr
):
# Then all conditions needed to assign the current pith to a
# unique local variable via a Python >= 3.8-specific
# assignment expression are satisfied. In this case...
# Increment the integer suffixing the name of this
# variable *BEFORE* defining this local variable.
pith_curr_assign_expr_name_counter += 1
# Reduce the current pith expression to the name of
# this local variable.
pith_curr_var_name = (
f'{VAR_NAME_PREFIX_PITH}'
f'{pith_curr_assign_expr_name_counter}'
)
# Python >= 3.8-specific assignment expression
# assigning this full expression to this variable.
pith_curr_assign_expr = (
PEP_CODE_PITH_ASSIGN_EXPR_format(
pith_curr_var_name=pith_curr_var_name,
pith_curr_expr=pith_curr_expr,
))
# Else, one or more of the above conditions have *NOT* been
# satisfied. In this case, preserve the Python code snippet
# evaluating to the current pith as is.
else:
pith_curr_assign_expr = pith_curr_expr
# Else, the active Python interpreter targets Python < 3.8 and
# thus does *NOT* support assignment expressions. In this case,
# assign the variables assigned above to sane expressions.
else:
pith_curr_assign_expr = pith_curr_var_name = pith_curr_expr
# ............{ UNION }............
# If this hint is a union (e.g., "typing.Union[bool, str]",
# typing.Optional[float]")...
#
# Note that unions are non-physical abstractions of physical
# types and thus *NOT* themselves subject to type-checking;
# only the subscripted arguments of unions are type-checked.
# This differs from "typing" pseudo-containers like
# "List[int]", in which both the parent "List" and child "int"
# types represent physical types to be type-checked. Ergo,
# unions themselves impose no narrowing of the current pith
# expression and thus *CANNOT* by definition benefit from
# Python >= 3.8-specific assignment expressions. This differs
# from "typing" pseudo-containers, which narrow the current
# pith expression and thus do benefit from these expressions.
if hint_curr_sign in HINT_SIGNS_UNION:
# Assert this union to be subscripted by one or more child
# hints. Note this should *ALWAYS* be the case, as:
# * The unsubscripted "typing.Union" object is explicitly
# listed in the "HINTS_REPR_IGNORABLE_SHALLOW" set and
# should thus have already been ignored when present.
# * The "typing" module explicitly prohibits empty union
# subscription: e.g.,
# >>> typing.Union[]
# SyntaxError: invalid syntax
# >>> typing.Union[()]
# TypeError: Cannot take a Union of no types.
assert hint_childs, (
f'{_EXCEPTION_PREFIX}union type hint '
f'{repr(hint_curr)} unsubscripted.')
# Else, this union is subscripted by two or more arguments.
# Why two rather than one? Because the "typing" module
# reduces unions of one argument to that argument: e.g.,
# >>> import typing
# >>> typing.Union[int]
# int
# For efficiency, reuse previously created sets of the
# following (when available):
# * "hint_childs_nonpep", the set of all PEP-noncompliant
# child hints subscripting this union.
# * "hint_childs_pep", the set of all PEP-compliant child
# hints subscripting this union.
#
# Since these child hints require fundamentally different
# forms of type-checking, prefiltering child hints into
# these sets *BEFORE* generating code type-checking these
# child hints improves both efficiency and maintainability.
hint_childs_nonpep = acquire_object_typed(set)
hint_childs_pep = acquire_object_typed(set)
# Clear these sets prior to use below.
hint_childs_nonpep.clear()
hint_childs_pep.clear()
# For each subscripted argument of this union...
for hint_child in hint_childs:
#FIXME: Uncomment as desired for debugging. This test is
#currently a bit too costly to warrant uncommenting.
# Assert that this child hint is *NOT* shallowly ignorable.
# Why? Because any union containing one or more shallowly
# ignorable child hints is deeply ignorable and should thus
# have already been ignored after a call to the
# is_hint_ignorable() tester passed this union on handling
# the parent hint of this union.
# assert (
# repr(hint_curr) not in HINTS_REPR_IGNORABLE_SHALLOW), (
# f'{hint_curr_exception_prefix} {repr(hint_curr)} child '
# f'{repr(hint_child)} ignorable but not ignored.')
# If this child hint is PEP-compliant...
if is_hint_pep(hint_child):
# Filter this child hint into the set of
# PEP-compliant child hints.
#
# Note that this PEP-compliant child hint *CANNOT*
# also be filtered into the set of PEP-noncompliant
# child hints, even if this child hint originates
# from a non-"typing" type (e.g., "List[int]" from
# "list"). Why? Because that would then induce
# false positives when the current pith shallowly
# satisfies this non-"typing" type but does *NOT*
# deeply satisfy this child hint.
hint_childs_pep.add(hint_child)
# Else, this child hint is PEP-noncompliant. In this
# case, filter this child hint into the list of
# PEP-noncompliant arguments.
else:
hint_childs_nonpep.add(hint_child)
# Initialize the code type-checking the current pith
# against these arguments to the substring prefixing all
# such code.
func_curr_code = PEP484_CODE_HINT_UNION_PREFIX
# If this union is subscripted by one or more
# PEP-noncompliant child hints, generate and append
# efficient code type-checking these child hints *BEFORE*
# less efficient code type-checking any PEP-compliant child
# hints subscripting this union.
if hint_childs_nonpep:
func_curr_code += (
PEP484_CODE_HINT_UNION_CHILD_NONPEP_format(
# Python expression yielding the value of the
# current pith. Specifically...
pith_curr_expr=(
# If this union is subscripted by one or
# more PEP-compliant child hints, prefer
# the expression assigning this value to a
# local variable efficiently reused by
# subsequent code generated for
# PEP-compliant child hints.
pith_curr_assign_expr
if hint_childs_pep else
# Else, this union is *NOT* subscripted by
# one or more PEP-compliant child hints.
# Since this is the first and only test
# generated for this union, prefer the
# expression yielding the value of the
# current pith *WITHOUT* assigning this
# value to a local variable, which would
# otherwise pointlessly go unused.
pith_curr_expr
),
# Python expression evaluating to a tuple of
# these arguments.
#
# Note that we would ideally avoid coercing
# this set into a tuple when this set only
# contains one type by passing that type
# directly to the
# _add_func_wrapper_local_type() function.
# Sadly, the "set" class defines no convenient
# or efficient means of retrieving the only
# item of a 1-set. Indeed, the most efficient
# means of doing so is to iterate over that set
# and break:
# for first_item in muh_set: break
# While we *COULD* technically leverage that
# approach here, doing so would also mandate
# adding multiple intermediate tests,
# mitigating any performance gains. Ultimately,
# we avoid doing so by falling back to the
# usual approach. See also this relevant
# self-StackOverflow post:
# https://stackoverflow.com/a/40054478/2809027
hint_curr_expr=add_func_scope_types(
types=hint_childs_nonpep,
func_scope=func_wrapper_scope,
exception_prefix=_EXCEPTION_PREFIX_HINT,
),
))
# For each PEP-compliant child hint of this union, generate
# and append code type-checking this child hint.
for hint_child_index, hint_child in enumerate(
hint_childs_pep):
func_curr_code += (
PEP484_CODE_HINT_UNION_CHILD_PEP_format(
# Python expression yielding the value of the
# current pith.
hint_child_placeholder=_enqueue_hint_child(
# If this union is subscripted by either...
#
# Then prefer the expression efficiently
# reusing the value previously assigned to
# a local variable by either the above
# conditional or prior iteration of the
# current conditional.
pith_curr_var_name
if (
# One or more PEP-noncompliant child
# hints *OR*...
hint_childs_nonpep or
# This is any PEP-compliant child hint
# *EXCEPT* the first...
hint_child_index
) else
# Else, this union is both subscripted by
# no PEP-noncompliant child hints *AND*
# this is the first PEP-compliant child
# hint, prefer the expression assigning
# this value to a local variable
# efficiently reused by code generated by
# subsequent iteration.
#
# Note this child hint is guaranteed to be
# followed by at least one more child hint.
# Why? Because the "typing" module forces
# unions to be subscripted by two or more
# child hints. By deduction, those child
# hints *MUST* be PEP-compliant. Ergo, we
# needn't explicitly validate that
# constraint here.
pith_curr_assign_expr
)))
# If this code is *NOT* its initial value, this union is
# subscripted by one or more unignorable child hints and
# the above logic generated code type-checking these child
# hints. In this case...
if func_curr_code is not (
PEP484_CODE_HINT_UNION_PREFIX):
# Munge this code to...
func_curr_code = (
# Strip the erroneous " or" suffix appended by the
# last child hint from this code.
f'{func_curr_code[:_LINE_RSTRIP_INDEX_OR]}'
# Suffix this code by the substring suffixing all
# such code.
f'{PEP484_CODE_HINT_UNION_SUFFIX}'
# Format the "indent_curr" prefix into this code
# deferred above for efficiency.
).format(indent_curr=indent_curr)
# Else, this snippet is its initial value and thus
# ignorable.
# Release this pair of sets back to their respective pools.
release_object_typed(hint_childs_nonpep)
release_object_typed(hint_childs_pep)
# Else, this hint is *NOT* a union.
#
# ..........{ SEQUENCES ~ variadic }............
# If this hint is either...
elif (
# A standard sequence (e.g., "typing.List[int]") *OR*...
hint_curr_sign in HINT_SIGNS_SEQUENCE_ARGS_1 or (
# A tuple *AND*...
hint_curr_sign is HintSignTuple and
# This tuple is subscripted by exactly two child hints
# *AND*...
hint_childs_len == 2 and
# The second child hint is just an unquoted ellipsis...
hint_childs[1] is Ellipsis
)
# Then this hint is of the form "Tuple[{typename}, ...]",
# typing a tuple accepting a variadic number of items all
# satisfying the "{typename}" child hint. Since this case
# is semantically equivalent to that of standard sequences,
# we transparently handle both here for maintainability.
#
# See below for logic handling fixed-length tuples.
):
# Then this hint is either a single-argument sequence *OR* a
# similar hint semantically resembling a single-argument
# sequence subscripted by one argument and one or more
# ignorable arguments.
# Python expression evaluating to this origin type.
hint_curr_expr = add_func_scope_type(
# Origin type of this sequence.
cls=get_hint_pep_origin_type_isinstanceable(hint_curr),
func_scope=func_wrapper_scope,
exception_prefix=_EXCEPTION_PREFIX_HINT,
)
# print(f'Sequence type hint {hint_curr} origin type scoped: {hint_curr_expr}')
# If this hint is a fixed-length tuple, the parent "if"
# statement above has already validated the contents of
# this tuple. In this case, efficiently get the lone child
# hint of this parent hint *WITHOUT* validation.
if hint_curr_sign is HintSignTuple:
hint_child = hint_childs[0]
# Else, this hint is a single-argument sequence, in which
# case the contents of this sequence have yet to be
# validated. In this case, inefficiently get the lone child
# hint of this parent hint *WITH* validation.
else:
hint_child = get_hint_pep484585_args_1(
hint=hint_curr,
exception_prefix=_EXCEPTION_PREFIX,
)
# If this child hint is *NOT* ignorable, deeply type-check
# both the type of the current pith *AND* a randomly
# indexed item of this pith. Specifically...
if not is_hint_ignorable(hint_child):
# Record that a pseudo-random integer is now required.
is_var_random_int_needed = True
# Code type-checking this pith against this type.
func_curr_code = (
PEP484585_CODE_HINT_SEQUENCE_ARGS_1_format(
indent_curr=indent_curr,
pith_curr_assign_expr=pith_curr_assign_expr,
pith_curr_var_name=(
pith_curr_var_name),
hint_curr_expr=hint_curr_expr,
hint_child_placeholder=_enqueue_hint_child(
# Python expression yielding the value of a
# randomly indexed item of the current pith
# (i.e., standard sequence) to be
# type-checked against this child hint.
PEP484585_CODE_HINT_SEQUENCE_ARGS_1_PITH_CHILD_EXPR_format(
pith_curr_var_name=(
pith_curr_var_name))),
))
# Else, this child hint is ignorable. In this case,
# fallback to generating trivial code shallowly
# type-checking the current pith as an instance of this
# origin type.
else:
func_curr_code = (
PEP484_CODE_HINT_INSTANCE_format(
pith_curr_expr=pith_curr_expr,
hint_curr_expr=hint_curr_expr,
))
# Else, this hint is neither a standard sequence *NOR* variadic
# tuple.
#
# ............{ SEQUENCES ~ tuple : fixed }............
# If this hint is a tuple, this tuple is *NOT* of the variadic
# form and *MUST* thus be of the fixed-length form.
#
# Note that if this hint is a:
# * PEP 484-compliant "typing.Tuple"-based hint, this hint is
# guaranteed to contain one or more child hints. Moreover, if
# this hint contains exactly one child hint that is the empty
# tuple, this hint is the empty fixed-length form
# "typing.Tuple[()]".
# * PEP 585-compliant "tuple"-based hint, this hint is *NOT*
# guaranteed to contain one or more child hints. If this hint
# contains *NO* child hints, this hint is equivalent to the
# empty fixed-length PEP 484-compliant form
# "typing.Tuple[()]". Yes, PEP 585 even managed to violate
# PEP 484-compliance. UUUURGH!
#
# While tuples are sequences, the "typing.Tuple" singleton that
# types tuples violates the syntactic norms established for
# other standard sequences by concurrently supporting two
# different syntaxes with equally different semantics:
# * "typing.Tuple[{typename}, ...]", typing a tuple whose items
# all satisfy the "{typename}" child hint. Note that the
# "..." substring here is a literal ellipses.
# * "typing.Tuple[{typename1}, {typename2}, ..., {typenameN}]",
# typing a tuple whose:
# * First item satisfies the "{typename1}" child hint.
# * Second item satisfies the "{typename2}" child hint.
# * Last item satisfies the "{typenameN}" child hint.
# Note that the "..." substring here is *NOT* a literal
# ellipses.
#
# This is what happens when non-human-readable APIs are
# promoted.
elif hint_curr_sign is HintSignTuple:
# Assert this tuple is *NOT* of the syntactic form
# "typing.Tuple[{typename}, ...]" handled by prior logic.
assert (
hint_childs_len <= 1 or
hint_childs[1] is not Ellipsis
), (f'{_EXCEPTION_PREFIX}variadic tuple type hint '
f'{repr(hint_curr)} unhandled.')
# Initialize the code type-checking this pith against this
# tuple to the substring prefixing all such code.
func_curr_code = PEP484585_CODE_HINT_TUPLE_FIXED_PREFIX
# If this hint is the empty fixed-length tuple, generate
# and append code type-checking the current pith to be the
# empty tuple. This edge case constitutes a code smell.
if is_hint_pep484585_tuple_empty(hint_curr):
func_curr_code += (
PEP484585_CODE_HINT_TUPLE_FIXED_EMPTY_format(
pith_curr_var_name=(
pith_curr_var_name),
))
# Else, that ridiculous edge case does *NOT* apply. In this
# case...
else:
# Append code type-checking the length of this pith.
func_curr_code += (
PEP484585_CODE_HINT_TUPLE_FIXED_LEN_format(
pith_curr_var_name=(
pith_curr_var_name),
hint_childs_len=hint_childs_len,
))
# For each child hint of this tuple...
for hint_child_index, hint_child in enumerate(
hint_childs):
# If this child hint is ignorable, skip to the
# next.
if is_hint_ignorable(hint_child):
continue
# Else, this child hint is unignorable.
# Append code type-checking this child pith.
func_curr_code += PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_CHILD_format(
hint_child_placeholder=_enqueue_hint_child(
# Python expression yielding the value of
# the currently indexed item of this tuple
# to be type-checked against this child
# hint.
PEP484585_CODE_HINT_TUPLE_FIXED_NONEMPTY_PITH_CHILD_EXPR_format(
pith_curr_var_name=(
pith_curr_var_name),
pith_child_index=hint_child_index,
)
),
)
# Munge this code to...
func_curr_code = (
# Strip the erroneous " and" suffix appended by the
# last child hint from this code.
f'{func_curr_code[:_LINE_RSTRIP_INDEX_AND]}'
# Suffix this code by the substring suffixing all such
# code.
f'{PEP484585_CODE_HINT_TUPLE_FIXED_SUFFIX}'
# Format...
).format(
# Indentation deferred above for efficiency.
indent_curr=indent_curr,
pith_curr_assign_expr=pith_curr_assign_expr,
)
# Else, this hint is *NOT* a tuple.
#
# ............{ ANNOTATED }............
# If this hint is a PEP 593-compliant type metahint, this
# metahint is guaranteed by the reduction performed above to be
# beartype-specific (i.e., metahint whose second argument is a
# beartype validator produced by subscripting a beartype
# validator factory). In this case...
elif hint_curr_sign is HintSignAnnotated:
# Defer heavyweight imports.
from beartype.vale._core._valecore import BeartypeValidator
# PEP-compliant type hint annotated by this metahint,
# localized to the "hint_child" local variable to satisfy
# the public API of the _enqueue_hint_child() closure
# called below.
hint_child = get_hint_pep593_metahint(hint_curr)
# Initialize the code type-checking this pith against this
# metahint to the substring prefixing all such code.
#
# Note that we intentionally do *NOT* defer formatting
# these variables into this string as we do for most other
# kinds of type hints. Why? Safety. Since caller-defined
# code could theoretically embed substrings accidentally
# matching these variable names, we safely (but
# inefficiently) format these variables into the exact
# strings known to embed them.
func_curr_code = (
PEP593_CODE_HINT_VALIDATOR_PREFIX_format(
indent_curr=indent_curr,
hint_child_placeholder=_enqueue_hint_child(
# Python expression yielding the value of the
# current pith assigned to a local variable
# efficiently reused by code generated by the
# following iteration.
#
# Note this child hint is guaranteed to be
# followed by at least one more test expression
# referencing this local variable. Why? Because
# the "typing" module forces metahints to be
# subscripted by one child hint and one or more
# arbitrary objects. Ergo, we needn't
# explicitly validate that here.
pith_curr_assign_expr),
))
# For each beartype validator annotating this metahint...
for hint_child in get_hint_pep593_metadata(hint_curr):
# print(f'Type-checking PEP 593 type hint {repr(hint_curr)} argument {repr(hint_child)}...')
# If this is *NOT* a beartype validator, raise an
# exception.
#
# Note that the previously called sanify_hint_child()
# function validated only the first such to be a
# beartype validator. All remaining arguments have yet
# to be validated, so we do so now for consistency and
# safety.
if not isinstance(hint_child, BeartypeValidator):
raise BeartypeDecorHintPep593Exception(
f'{_EXCEPTION_PREFIX}PEP 593 type hint '
f'{repr(hint_curr)} subscripted by both '
f'@beartype-specific and -agnostic metadata '
f'(i.e., {represent_object(hint_child)} not '
f'beartype validator).'
)
# Else, this argument is beartype-specific.
# Generate and append efficient code type-checking this
# validator by embedding this code as is.
func_curr_code += (
PEP593_CODE_HINT_VALIDATOR_CHILD_format(
indent_curr=indent_curr,
# Python expression formatting the current pith
# into the "{obj}" variable already embedded by
# that class into this code.
hint_child_expr=(
hint_child._is_valid_code.format(
indent=indent_child,
obj=pith_curr_var_name,
)),
))
# Generate locals safely merging the locals required by
# both this validator code *AND* the current code
# type-checking this entire root hint.
update_mapping(
mapping_trg=func_wrapper_scope,
mapping_src=hint_child._is_valid_code_locals,
)
# Munge this code to...
func_curr_code = (
# Strip the erroneous " and" suffix appended by the
# last child hint from this code.
f'{func_curr_code[:_LINE_RSTRIP_INDEX_AND]}'
# Suffix this code by the substring suffixing all such
# code.
f'{PEP593_CODE_HINT_VALIDATOR_SUFFIX_format(indent_curr=indent_curr)}'
)
# Else, this hint is *NOT* a metahint.
#
# ............{ SUBCLASS }............
# If this hint is either a PEP 484- or 585-compliant subclass
# type hint...
elif hint_curr_sign is HintSignType:
#FIXME: Optimization: if the superclass is an ignorable
#class (e.g., "object", "Protocol"), this type hint is
#ignorable (e.g., "Type[object]", "type[Protocol]"). We'll
#thus want to:
#* Add that detection logic to one or more
# is_hint_*_ignorable() testers elsewhere.
#* Call is_hint_ignorable() below.
#* Unit test such type hints to indeed be ignorable.
# Superclass this pith is required to be a subclass of.
hint_child = get_hint_pep484585_subclass_superclass(
hint=hint_curr,
exception_prefix=_EXCEPTION_PREFIX,
)
# If this superclass is either a class *OR* tuple of
# classes...
if isinstance(hint_child, TestableTypes):
# Python expression evaluating to this superclass.
hint_curr_expr = add_func_scope_type_or_types(
type_or_types=hint_child, # type: ignore[arg-type]
func_scope=func_wrapper_scope,
exception_prefix=_EXCEPTION_PREFIX_HINT,
)
# Else, this superclass is *NOT* actually a class. By
# process of elimination and the validation already
# performed above by the
# get_hint_pep484585_subclass_superclass() getter, this
# superclass *MUST* be a forward reference to a class.
else:
# Render this forward reference accessible to the body
# of this wrapper function. See above for commentary.
hint_curr_expr, hint_forwardrefs_class_basename = (
express_func_scope_type_forwardref(
forwardref=hint_child, # type: ignore[arg-type]
forwardrefs_class_basename=(
hint_forwardrefs_class_basename),
func_scope=func_wrapper_scope,
exception_prefix=_EXCEPTION_PREFIX,
))
# Code type-checking this pith against this superclass.
func_curr_code = PEP484585_CODE_HINT_SUBCLASS_format(
pith_curr_assign_expr=pith_curr_assign_expr,
pith_curr_var_name=pith_curr_var_name,
hint_curr_expr=hint_curr_expr,
indent_curr=indent_curr,
)
# Else, this hint is neither a PEP 484- nor 585-compliant
# subclass type hint.
#
# ............{ GENERIC or PROTOCOL }............
# If this hint is either a:
# * PEP 484-compliant generic (i.e., user-defined class
# subclassing a combination of one or more of the
# "typing.Generic" superclass and other "typing" non-class
# pseudo-superclasses) *OR*...
# * PEP 544-compliant protocol (i.e., class subclassing a
# combination of one or more of the "typing.Protocol"
# superclass and other "typing" non-class
# pseudo-superclasses) *OR*...
# * PEP 585-compliant generic (i.e., user-defined class
# subclassing at least one non-class PEP 585-compliant
# pseudo-superclasses) *OR*...
# Then this hint is a PEP-compliant generic. In this case...
elif hint_curr_sign is HintSignGeneric:
#FIXME: *THIS IS NON-IDEAL.* Ideally, we should propagate
#*ALL* child type hints subscripting a generic up to *ALL*
#pseudo-superclasses of that generic (e.g., the "int" child
#hint subscripting a parent hint "MuhGeneric[int]" of type
#"class MuhGeneric(list[T]): pass" up to its "list[T]"
#pseudo-superclass).
#
#For now, we just strip *ALL* child type hints subscripting
#a generic with the following call. This suffices, because
#we just need this to work. So it goes, uneasy code
#bedfellows.
# Reduce this hint to the object originating this generic
# (if any) by stripping all child type hints subscripting
# this hint from this hint. Why? Because these child type
# hints convey *NO* meaningful semantics and are thus
# safely ignorable. Consider this simple example, in which
# the subscription "[int]" not only conveys *NO* meaningful
# semantics but actually conveys paradoxically conflicting
# semantics contradicting the original generic declaration:
# class ListOfListsOfStrs(list[list[str]]): pass
# ListOfListsOfStrs[int] # <-- *THIS MEANS NOTHING*
#
# Specifically:
# * If this hint is an unsubscripted generic (e.g.,
# "typing.IO"), preserve this hint as is. In this case,
# this hint is a standard isinstanceable class.
# * If this hint is a subscripted generic (e.g.,
# "typing.IO[str]"), reduce this hint to the object
# originating this generic (e.g., "typing.IO").
hint_curr = get_hint_pep484585_generic_type(
hint=hint_curr, exception_prefix=_EXCEPTION_PREFIX)
# Initialize the code type-checking this pith against this
# generic to the substring prefixing all such code.
func_curr_code = PEP484585_CODE_HINT_GENERIC_PREFIX
# For each unignorable unerased transitive pseudo-superclass
# originally declared as a superclass of this generic...
for hint_child in (
iter_hint_pep484585_generic_bases_unerased_tree(
hint=hint_curr,
exception_prefix=_EXCEPTION_PREFIX,
)):
# Generate and append code type-checking this pith
# against this superclass.
func_curr_code += (
PEP484585_CODE_HINT_GENERIC_CHILD_format(
hint_child_placeholder=(_enqueue_hint_child(
# Python expression efficiently reusing the
# value of this pith previously assigned to
# a local variable by the prior expression.
pith_curr_var_name))))
# Munge this code to...
func_curr_code = (
# Strip the erroneous " and" suffix appended by the
# last child hint from this code.
f'{func_curr_code[:_LINE_RSTRIP_INDEX_AND]}'
# Suffix this code by the substring suffixing all such
# code.
f'{PEP484585_CODE_HINT_GENERIC_SUFFIX}'
# Format...
).format(
# Indentation deferred above for efficiency.
indent_curr=indent_curr,
pith_curr_assign_expr=pith_curr_assign_expr,
# Python expression evaluating to this generic type.
hint_curr_expr=add_func_scope_type(
cls=hint_curr,
func_scope=func_wrapper_scope,
exception_prefix=_EXCEPTION_PREFIX_HINT,
),
)
# print(f'{hint_curr_exception_prefix} PEP generic {repr(hint)} handled.')
# Else, this hint is *NOT* a generic.
#
# ............{ LITERAL }............
# If this hint is a PEP 586-compliant type hint (i.e., the
# "typing.Literal" singleton subscripted by one or more literal
# objects), this hint is largely useless and thus intentionally
# detected last. Why? Because "typing.Literal" is subscriptable
# by objects that are instances of only *SIX* possible types,
# which is sufficiently limiting as to render this singleton
# patently absurd and a farce that we weep to even implement.
# In this case...
elif hint_curr_sign is HintSignLiteral:
# If this hint does *NOT* comply with PEP 586 despite being
# a "typing.Literal" subscription, raise an exception.
die_unless_hint_pep586(
hint=hint_curr,
exception_prefix=_EXCEPTION_PREFIX,
)
# Else, this hint complies with PEP 586 and is thus
# subscripted by one or more compliant literal objects.
# Tuple of zero or more literal objects subscripting this
# hint, intentionally replacing the current such tuple due
# to the non-standard implementation of the third-party
# "typing_extensions.Literal" type hint factory.
hint_childs = get_hint_pep586_literals(
hint=hint_curr,
exception_prefix=_EXCEPTION_PREFIX,
)
# Initialize the code type-checking this pith against this
# hint to the substring prefixing all such code.
func_curr_code = PEP586_CODE_HINT_PREFIX_format(
pith_curr_assign_expr=pith_curr_assign_expr,
#FIXME: If "typing.Literal" is ever extended to support
#substantially more types (and thus actually becomes
#useful), optimize the construction of the "types" set
#below to instead leverage a similar
#"acquire_object_typed(set)" caching solution as that
#currently employed for unions. For now, we only shrug.
# Python expression evaluating to a tuple of the unique
# types of all literal objects subscripting this hint.
hint_child_types_expr=add_func_scope_types(
# Set comprehension of all unique literal objects
# subscripting this hint, implicitly discarding all
# duplicate such objects.
types={
type(hint_child)
for hint_child in hint_childs
},
func_scope=func_wrapper_scope,
exception_prefix=_EXCEPTION_PREFIX_HINT,
),
)
# For each literal object subscripting this hint...
for hint_child in hint_childs:
# Generate and append efficient code type-checking
# this data validator by embedding this code as is.
func_curr_code += PEP586_CODE_HINT_LITERAL_format(
pith_curr_var_name=pith_curr_var_name,
# Python expression evaluating to this object.
hint_child_expr=add_func_scope_attr(
attr=hint_child,
func_scope=func_wrapper_scope,
exception_prefix=(
_EXCEPTION_PREFIX_FUNC_WRAPPER_LOCAL),
),
)
# Munge this code to...
func_curr_code = (
# Strip the erroneous " or" suffix appended by the last
# child hint from this code.
f'{func_curr_code[:_LINE_RSTRIP_INDEX_OR]}'
# Suffix this code by the appropriate substring.
f'{PEP586_CODE_HINT_SUFFIX}'
).format(indent_curr=indent_curr)
# Else, this hint is *NOT* a PEP 586-compliant type hint.
# ............{ UNSUPPORTED }............
# Else, this hint is neither shallowly nor deeply supported and
# is thus unsupported. Since an exception should have already
# been raised above in this case, this conditional branch
# *NEVER* be triggered. Nonetheless, raise an exception.
else:
raise BeartypeDecorHintPepUnsupportedException(
f'{_EXCEPTION_PREFIX_HINT}'
f'{repr(hint_curr)} unsupported but '
f'erroneously detected as supported.'
)
# ................{ NON-PEP }................
# Else, this hint is *NOT* PEP-compliant.
#
# ................{ NON-PEP ~ type }................
# If this hint is a non-"typing" class...
#
# Note that:
# * This test is intentionally performed *AFTER* that testing whether
# this hint is PEP-compliant, thus guaranteeing this hint to be a
# PEP-noncompliant non-"typing" class rather than a PEP-compliant
# type hint originating from such a class. Since many hints are both
# PEP-compliant *AND* originate from such a class (e.g., the "List"
# in "List[int]", PEP-compliant but originating from the
# PEP-noncompliant builtin class "list"), testing these hints first
# for PEP-compliance ensures we generate non-trivial code deeply
# type-checking these hints instead of trivial code only shallowly
# type-checking the non-"typing" classes from which they originate.
# * This class is guaranteed to be a subscripted argument of a
# PEP-compliant type hint (e.g., the "int" in "Union[Dict[str, str],
# int]") rather than the root type hint. Why? Because if this class
# were the root type hint, it would have already been passed into a
# faster submodule generating PEP-noncompliant code instead.
elif isinstance(hint_curr, type):
# Code type-checking the current pith against this type.
func_curr_code = PEP484_CODE_HINT_INSTANCE_format(
pith_curr_expr=pith_curr_expr,
# Python expression evaluating to this type.
hint_curr_expr=add_func_scope_type(
cls=hint_curr,
func_scope=func_wrapper_scope,
exception_prefix=_EXCEPTION_PREFIX_HINT,
),
)
# ................{ NON-PEP ~ bad }................
# Else, this hint is neither PEP-compliant *NOR* a class. In this case,
# raise an exception. Note that:
# * This should *NEVER* happen, as the "typing" module goes to great
# lengths to validate the integrity of PEP-compliant types at
# declaration time.
# * The higher-level die_unless_hint_nonpep() validator is
# intentionally *NOT* called here, as doing so would permit both:
# * PEP-noncompliant forward references, which could admittedly be
# disabled by passing "is_str_valid=False" to that call.
# * PEP-noncompliant tuple unions, which currently *CANNOT* be
# disabled by passing such an option to that call.
else:
raise BeartypeDecorHintPepException(
f'{_EXCEPTION_PREFIX_HINT}{repr(hint_curr)} '
f'not PEP-compliant.'
)
# ................{ CLEANUP }................
# Inject this code into the body of this wrapper.
func_wrapper_code = replace_str_substrs(
text=func_wrapper_code,
old=hint_curr_placeholder,
new=func_curr_code,
)
# Nullify the metadata describing the previously visited hint in this
# list for safety.
hints_meta[hints_meta_index_curr] = None
# Increment the 0-based index of metadata describing the next visited
# hint in the "hints_meta" list *BEFORE* visiting that hint but *AFTER*
# performing all other logic for the currently visited hint.
hints_meta_index_curr += 1
# ..................{ CLEANUP }..................
# Release the fixed list of all such metadata.
release_fixed_list(hints_meta)
# If the Python code snippet to be returned remains unchanged from its
# initial value, the breadth-first search above failed to generate code. In
# this case, raise an exception.
#
# Note that this test is inexpensive, as the third character of the
# "func_root_code" code snippet is guaranteed to differ from that of
# "func_wrapper_code" code snippet if this function behaved as expected,
# which it should have... but may not have, which is why we're testing.
if func_wrapper_code == func_root_code:
raise BeartypeDecorHintPepException(
f'{_EXCEPTION_PREFIX_HINT}{repr(hint_root)} unchecked.')
# Else, the breadth-first search above successfully generated code.
# ..................{ CODE ~ locals }..................
# If type-checking the root pith requires a pseudo-random integer...
if is_var_random_int_needed:
# Pass the random.getrandbits() function required to generate this
# integer to this wrapper function as an optional hidden parameter.
func_wrapper_scope[_ARG_NAME_GETRANDBITS] = getrandbits
# ..................{ CODE ~ suffix }..................
# Tuple of the unqualified classnames referred to by all relative forward
# references visitable from this hint converted from that set to reduce
# space consumption after memoization by @callable_cached, defined as...
hint_forwardrefs_class_basename_tuple = (
# If *NO* relative forward references are visitable from this root
# hint, the empty tuple;
()
if hint_forwardrefs_class_basename is None else
# Else, that set converted into a tuple.
tuple(hint_forwardrefs_class_basename)
)
# Return all metadata required by higher-level callers.
return (
func_wrapper_code,
func_wrapper_scope,
hint_forwardrefs_class_basename_tuple,
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Beartype decorator **type-checking expression magic** (i.e., global string
constants embedded in the implementations of boolean expressions type-checking
arbitrary objects against arbitrary PEP-compliant type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._util.error.utilerror import EXCEPTION_PLACEHOLDER
from itertools import count
# ....................{ EXCEPTION }....................
EXCEPTION_PREFIX_FUNC_WRAPPER_LOCAL = (
f'{EXCEPTION_PLACEHOLDER}wrapper parameter ')
'''
Human-readable substring describing a new wrapper parameter required by the
current root type hint in exception messages.
'''
EXCEPTION_PREFIX_HINT = f'{EXCEPTION_PLACEHOLDER}type hint '
'''
Human-readable substring describing the current root type hint generically
(i.e., agnostic of the specific PEP standard to which this hint conforms) in
exception messages.
'''
# ....................{ HINT ~ meta }....................
# Iterator yielding the next integer incrementation starting at 0, to be safely
# deleted *AFTER* defining the following 0-based indices via this iterator.
__hint_meta_index_counter = count(start=0, step=1)
HINT_META_INDEX_HINT = next(__hint_meta_index_counter)
'''
0-based index into each tuple of hint metadata providing the currently
visited hint.
For both space and time efficiency, this metadata is intentionally stored as
0-based integer indices of an unnamed tuple rather than:
* Human-readable fields of a named tuple, which incurs space and time costs we
would rather *not* pay.
* 0-based integer indices of a tiny fixed list. Previously, this metadata was
actually stored as a fixed list. However, exhaustive profiling demonstrated
that reinitializing each such list by slice-assigning that list's items from
a tuple to be faster than individually assigning these items:
.. code-block:: shell-session
$ echo 'Slice w/ tuple:' && command python3 -m timeit -s \
'muh_list = ["a", "b", "c", "d",]' \
'muh_list[:] = ("e", "f", "g", "h",)'
Slice w/ tuple:
2000000 loops, best of 5: 131 nsec per loop
$ echo 'Slice w/o tuple:' && command python3 -m timeit -s \
'muh_list = ["a", "b", "c", "d",]' \
'muh_list[:] = "e", "f", "g", "h"'
Slice w/o tuple:
2000000 loops, best of 5: 138 nsec per loop
$ echo 'Separate:' && command python3 -m timeit -s \
'muh_list = ["a", "b", "c", "d",]' \
'muh_list[0] = "e"
muh_list[1] = "f"
muh_list[2] = "g"
muh_list[3] = "h"'
Separate:
2000000 loops, best of 5: 199 nsec per loop
So, not only does there exist no performance benefit to flattened fixed lists,
there exists demonstrable performance costs.
'''
HINT_META_INDEX_PLACEHOLDER = next(__hint_meta_index_counter)
'''
0-based index into each tuple of hint metadata providing the **current
placeholder type-checking substring** (i.e., placeholder to be globally
replaced by a Python code snippet type-checking the current pith expression
against the hint described by this metadata on visiting that hint).
This substring provides indirection enabling the currently visited parent hint
to defer and delegate the generation of code type-checking each child argument
of that hint to the later time at which that child argument is visited.
Example
----------
For example, the
:func:`beartype._decor._hint._pep._pephint.make_func_wrapper_code` function might
generate intermediary code resembling the following on visiting the
:data:`Union` parent of a ``Union[int, str]`` object *before* visiting either
the :class:`int` or :class:`str` children of that object:
if not (
@{0}! or
@{1}!
):
raise get_beartype_violation(
func=__beartype_func,
pith_name=$%PITH_ROOT_NAME/~,
pith_value=__beartype_pith_root,
)
Note the unique substrings ``"@{0}!"`` and ``"@{1}!"`` in that code, which that
function iteratively replaces with code type-checking each of the child
arguments of that :data:`Union` parent (i.e., :class:`int`, :class:`str`). The
final code memoized by that function might then resemble:
if not (
isinstance(__beartype_pith_root, int) or
isinstance(__beartype_pith_root, str)
):
raise get_beartype_violation(
func=__beartype_func,
pith_name=$%PITH_ROOT_NAME/~,
pith_value=__beartype_pith_root,
)
'''
HINT_META_INDEX_PITH_EXPR = next(__hint_meta_index_counter)
'''
0-based index into each tuple of hint metadata providing the **current
pith expression** (i.e., Python code snippet evaluating to the current possibly
nested object of the passed parameter or return value to be type-checked
against the currently visited hint).
'''
HINT_META_INDEX_PITH_VAR_NAME = next(__hint_meta_index_counter)
'''
0-based index into each tuple of hint metadata providing the **current pith
variable name** (i.e., name of the unique local variable assigned the value of
the current pith either by a prior assignment statement or expression).
'''
HINT_META_INDEX_INDENT = next(__hint_meta_index_counter)
'''
0-based index into each tuple of hint metadata providing **current
indentation** (i.e., Python code snippet expanding to the current level of
indentation appropriate for the currently visited hint).
'''
# Delete the above counter for safety and sanity in equal measure.
del __hint_meta_index_counter
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
# ....................{ TODO }....................
#FIXME: Add support for Python 3.10-specific PEPs and thus:
#* PEP 604-compliance (e.g., "def square(number: int | float): pass"). Note
# PEP 604 thankfully preserves backward compatibility with "typing.Union":
# The existing typing.Union and | syntax should be equivalent.
# int | str == typing.Union[int, str]
# This means that we should:
# * Require no changes to the "beartype" package to support PEP 604.
# * Add unit tests explicitly support PEP 604 compliance under Python >= 3.10
# to the "beartype_test" package.
# * Note this support in documentation.
#* PEP 612-compliance. Since we don't currently support callable annotations,
# we probably can't extend that non-existent support to PEP 612. Nonetheless,
# we *ABSOLUTELY* should ensure that we do *NOT* raise exceptions when passed
# the two new "typing" singletons introduced by this:
# * "typing.ParamSpec", documented at:
# https://docs.python.org/3.10/library/typing.html#typing.ParamSpec
# * "typing.Concatenate", documented at:
# https://docs.python.org/3.10/library/typing.html#typing.Concatenate
# Ideally, we should simply ignore these singletons for now in a similar
# manner to how we currently ignore type variables. After all, these
# singletons are actually a new unique category of callable-specific type
# variables. See also:
# https://www.python.org/dev/peps/pep-0612
#* PEP 647-compliance. PEP 647 introduces a silly new subscriptable
# "typing.TypeGuard" attribute. With respect to runtime type-checking, *ALL*
# "typing.TypeGuard" subscriptions unconditionally reduce to "bool": e.g.,
# from typing import TypeGuard, Union
#
# # This...
# def muh_func(muh_param: object) -> TypeGuard[str]:
# return isinstance(muh_param, str) # <-- gods help us
#
# # This conveys the exact same runtime semantics as this.
# def muh_func(muh_param: object) -> bool:
# return isinstance(muh_param, str) # <-- gods help us
# Lastly, note that (much like "typing.NoReturn") "typing.TypeGuard"
# subscriptions are *ONLY* usable as return annotations. Raise exceptions, yo.
#FIXME: *WOOPS.* The "LRUDuffleCacheStrong" class designed below assumes that
#calculating the semantic height of a type hint (e.g., 3 for the complex hint
#Optional[int, dict[Union[bool, tuple[int, ...], Sequence[set]], list[str]])
#is largely trivial. It isn't -- at all. Computing that without a context-free
#recursion-esque algorithm of some sort is literally infeasible. We absolutely
#*MUST* get that height right, since we'll be exponentiating that height to
#estimate space consumption of arbitrary objects. Off-by-one errors are
#unacceptable when the difference between a height of 2 and a height of 3 means
#tens of thousands in additional estimated space consumption.
#
#So. How do we do this, then? *SIMPLE.* Okay, not simple -- but absolutely
#beneficial for a medley of unrelated pragmatic reasons and thus something we
#need to pursue anyway regardless of the above concerns.
#
#The solution is to make the breadth-first search (BFS) internally performed
#by the make_func_wrapper_code() function below more recursion-esque. We will
#*NOT* be refactoring that function to leverage:
#
#* Recursion rather than iteration for all of the obvious reasons.
#* A stack-like depth-first search (DFS) approach. While implementing a DFS
# with iteration can technically be done, doing so imposes non-trivial
# technical constraints because you then need to store interim results (which
# in a proper recursive function would simply be local variables) as you
# iteratively complete each non-leaf node. That's horrifying. So, we'll be
# preserving our breadth-first search (BFS) approach. The reason why a BFS is
# often avoided in the real world are space concerns: a BFS consumes
# significantly more space than a comparable DFS, because:
# * The BFS constructs the entire tree before operating on that tree.
# * The DFS only constructs a vertical slice of the entire tree before
# operating only on that slice.
# In our case, however, space consumption of a BFS versus DFS is irrelevant.
# Why? Because type hints *CANNOT* be deeply nested without raising recursion
# limit errors from deep within the CPython interpreter, as we well know.
# Ergo, a BFS will only consume slightly more temporary space than a DFS. This
# means a "FixedList" of the same size trivially supports both.
#
#First, let's recap what we're currently doing:
#
#* In a single "while ...:" loop, we simultaneously construct the BFS tree
# (stored in a "FixedList" of tuples) *AND* output results from that tree as
# we are dynamically constructing it.
#
#The "simultaneously" is the problem there. We're disappointed we didn't
#realize it sooner, but our attempt to do *EVERYTHING* in a single pass is why
#we had such extraordinary difficulties correctly situating code generated by
#child type hints into the code generated for parent type hints. We
#circumvented the issue by repeatedly performing a global search-and-replace on
#the code being generated, which is horrifyingly inefficient *AND* error-prone.
#We should have known then that something was wrong. Sadly, we proceeded.
#
#Fortunately, this is the perfect moment to correct our wrongs -- before we
#proceed any deeper into a harmful path dependency. How? By splitting our
#current monolithic BFS algorithm into two disparate BFS phases -- each
#mirroring the behaviour of a recursive algorithm:
#
#1. In the first phase, a "while ...:" loop constructs the BFS tree by
# beginning at the root hint, iteratively visiting all child hints, and
# inserting metadata describing those hints into our "hints_meta" list as we
# currently do. That's it. That's all. But that's enough. This construction
# then gives us efficient random access over the entire type hinting
# landscape, which then permits us to implement the next phase -- which does
# the bulk of the work. To do so, we'll add additional metadata to our
# current "hint_meta" tuple: e.g.,
# * "_HINT_META_INDEX_CHILD_FIRST_INDEX", the 0-based index into the
# "hints_meta" FixedList of the first child hint of the current hint if any
# *OR* "None" otherwise. Since this is a BFS, that child hint could appear
# at any 0-based index following the current hint; finding that child hint
# during the second phase thus requires persisting the index of that hint.
# Note that the corresponding index of the last child hint of the current
# hint need *NOT* be stored, as adding the length of the argument list of
# the current hint to the index of the first child hint trivially gives the
# index of the last child hint.
# * "_HINT_META_INDEX_CODE", the Python code snippet type-checking the
# current hint to be generated by the second phase.
# * "_HINT_META_INDEX_HEIGHT", the 1-based height of the current hint in this
# BFS tree. Leaf nodes have a height of 1. All non-leaf nodes have a height
# greater than 1. This height *CANNOT* be defined during the first phase
# but *MUST* instead be deferred to the second phase.
# * ...probably loads more stuff, but that's fine.
#2. In the second phase, another "while ...:" loop generates a Python code
# snippet type-checking the root hint and all child hints visitable from that
# hint in full by beginning *AT THE LAST CHILD HINT ADDED TO THE*
# "hints_meta" FixedList, generating code type-checking that hint,
# iteratively visiting all hints *IN THE REVERSE DIRECTION BACK UP THE TREE*,
# and so on.
#
#That's insanely swag. It shames us that we only thought of it now. *sigh*
#FIXME: Note that this new approach will probably (hopefully only slightly)
#reduce decoration efficiency. This means that we should revert to optimizing
#the common case of PEP-noncompliant classes. Currently, we uselessly iterate
#over these classes with the same BFS below as we do PEP-compliant classes --
#which is extreme overkill. This will be trivial (albeit irksome) to revert,
#but it really is fairly crucial. *sigh*
#FIXME: Now that we actually have an audience (yay!), we *REALLY* need to avoid
#breaking anything. But implementing the above refactoring would absolutely
#break everything for an indeterminate period of time. So how do we do this?
#*SIMPLE*. We leave this submodule as is *UNTIL* our refactoring passes tests.
#In the meanwhile, we safely isolate our refactoring work to the following new
#submodules:
#* "_pephinttree.py", implementing the first phase detailed above.
#* "_pephintgene.py", implementing the second phase detailed above.
#
#To test, we locally change a simple "import" statement in the parent
#"_pepcode" submodule and then revert that import before committing. Rinse
#until tests pass, which will presumably take several weeks at least.
#FIXME: Note that there exists a significant optimization that we *ABSOLUTELY*
#should add to these new modules. Currently, the "hints_meta" data structure is
#represented as a FixedList of size j, each item of which is a k-length tuple.
#If you briefly consider it, however, that structure could equivalently be
#represented as a FixedList of size j * k, where we simply store the items
#previously stored in each k-length tuple directly in that FixedList itself.
#
#Iterating forward and backward by single hints over that FixedList is still
#trivial. Rather than incrementing or decrementing an index by 1, we instead
#increment or decrement an index by k.
#
#The resulting structure is guaranteed to be considerably more space-efficient,
#due to being both contiguous in memory and requiring only a single object
#(and thus object dictionary) to maintain. Cue painless forehead slap.
#FIXME: See additional commentary at this front-facing issue:
# https://github.com/beartype/beartype/issues/31#issuecomment-799938621
#FIXME: Actually, *FORGET EVERYTHING ABOVE.* We actually do want to
#fundamentally refactor this iterative BFS into an iterative DFS. Why? Because
#we increasingly need to guard against both combinatorial explosion *AND*
#recursion. That's imperative -- and we basically *CANNOT* do that with the
#current naive BFS approach. Yes, implementing a DFS is somewhat more work. But
#it's *NOT* infeasible. It's very feasible. More importantly, it's necessary.
#Since @beartype should eventually handle recursive type hints, we'll need
#recursion guards anyway. Specifically:
#* Guarding against recursion would be trivial if we were actually using a
# depth-first algorithm. While delving, you'd just maintain a local set of the
# IDs of all type hints previously visited. You'd then avoid delving into
# anything if the ID of that thing is already in that set. Likewise, after
# delving into that thing, you'd then pop the ID of that thing off that set.
#* Likewise, handling combinatorial explosion would *ALSO* be trivial if we were
# actually using a depth-first algorithm. By "combinatorial explosion," we are
# referring to what happens if we try to type-check dataclass and
# "typing.NamedTuple" instances that are *NOT* decorated by @beartype.
# Type-checking those instances has to happen at @beartype call time,
# obviously. There are actually two kinds of combinatorial explosion at play
# here:
# * Combinatorial explosion while type-checking at @beartype call time. This is
# avoidable by simply type-checking *EXACTLY ONE* random field of each
# "NamedTuple" instance on each call. Simple. "NamedTuple" instances are
# literally just tuples, so random access is trivial. (Type-checking random
# fields of dataclass instances is less trivial but still feasible; just pass
# a list whose values are dataclass field names as a private
# @beartype-specific parameter to type-checking @beartype wrapper functions.
# That list then effectively maps from 0-based indices to dataclass field
# names. We then perform random access on that list to generate random field
# names, which can then be accessed with reasonable efficiency.)
# * Combinatorial explosion while generating type-checking code at @beartype
# decoration time. This is the problem, folks. Why? Because we currently
# employ a breadth-first search (BFS), which requires generating the entire
# tree of all type hints to be visited. Currently, that's fine, because type
# hints are typically compact; exhausting memory is unlikely. But as soon as
# we start generating type-checking code for "NamedTuple" instances *NOT*
# decorated by @beartype, we have to begin visiting *ALL* type hints
# annotating *ALL* fields of those type hints by adding those hints to our
# BFS tree. Suddenly, combinatorial explosion becomes a very real thing.
#
#The solution is to radically transform our existing BFS search into a DFS
#search. Again, this is something we would need to do eventually anyway to
#handle recursive type hints, because how can you guard against recursion in an
#iterative BFS anyway? And... anyway, DFS is simply the right approach. It's
#what we should have done all along, clearly. It's also non-trivial, which is
#why we didn't do it all along.
#
#For example, for each type hint visited by a DFS, we'll need to additionally
#record metadata like:
#* "_HINT_META_INDEX_ARGS_INDEX_NEXT", the 0-based index into the
# "hint.__args__" tuple (listing all child type hints for the currently visited
# type hint "hint") of the next child type hint of the associated parent type
# hint to be visited. When "_HINT_META_INDEX_ARGS_INDEX_NEXT ==
# len(hint.__args__)", the DFS has successfully visited all child type hints of
# the currently visited type hint "hint" and should now iteratively recurse up
# (rather than down) the DFS stack.
#* "_HINT_META_INDEX_CODE", the Python code snippet type-checking the currently
# visited hint. This code snippet will be gradually filled in as child type
# hints of the currently visited type hint are themselves visited. Indeed, this
# implies that the currently visited parent type hint *MUST* always be able to
# access the "_HINT_META_INDEX_CODE" entry of the most recently visited child
# type hint of that parent -- which, in turn, implies that the entire
# "hints_meta" FixedList of each child type hint must be temporarily preserved.
# Specifically, when recursing up the DFS stack, each parent type hint will:
# 1. Access the "hints_meta" FixedList of its most recently visited child type
# to fill in its own "_HINT_META_INDEX_CODE".
# 2. Pop that "hints_meta" FixedList of its most recently visited child type
# hint off the DFS stack.
#
#Some type hints like unions will additionally require hint-specific entries in
#their "hints_meta" FixedList. The code for a union *CANNOT* be efficiently
#generated until *ALL* child type hints of that union have been. Although
#hint-specific entries could be appended to the "hints_meta" FixedList
#structure, doing so would rapidly increase the memory consumption of all other
#types of hints for no particularly good reason. Instead, a single new
#hint-specific entry should be added:
#* "_HINT_META_INDEX_DATA", an arbitrary object required by this kind of hint.
# In the case of unions, this will be an instance of a dataclass resembling:
# @dataclass
# def _HintMetaDataUnion(object):
# HINTS_CHILD_NONPEP = set()
# '''
# Set of all previously visited PEP-noncompliant child type hints
# (e.g., isinstanceable classes) of this parent union type hint.
# '''
#
# HINTS_CHILD_PEP = set()
# '''
# Set of all previously visited PEP-compliant child type hints of this
# parent union type hint.
# '''
#
# Naturally, "_HintMetaDataUnion" instances should be cached with the standard
# acquire_object() and release_object() approach. *shrug*
#
#Oh! Wait. Nevermind. We don't actually need "_HINT_META_INDEX_DATA" or
#"_HintMetaDataUnion". It's true that we would need both if we needed to handle
#unions strictly with a classical DFS approach -- but there's *NO* pragmatic
#reason to do so. Instead, we'll just continue handling unions as we currently
#do: by iterating over child type hints of unions and separating them into
#PEP-compliant and PEP-noncompliant sets. So, basically a mini-BFS over unions
#*BEFORE* we then delve into their PEP-compliant child type hints in the
#standard DFS way. That's fine, because we're *NOT* purists here. Whatever is
#fastest and simplest (in that order) is what wins.
#
#Note that a DFS still needs to expensively interpolate code snippets into
#format templates. There's *NO* way around that; since dynamic code generation
#is what we've gotten ourselves into here, string munging is a necessary "good."
#FIXME: Note that there exist four possible approaches to random item selection
#for arbitrary containers depending on container type. Either the actual pith
#object (in descending order of desirability):
#* Satisfies "collections.abc.Sequence" (*NOTE: NOT* "typing.Sequence", as we
# don't particularly care how the pith is type-hinted for this purpose), in
# which case the above approach trivially applies.
#* Else is *NOT* a one-shot container (e.g., generator and... are there any
# other one-shot container types?) and is *NOT* slotted (i.e., has no
# "__slots__" attribute), then generalize the mapping-specific
# _get_dict_nonempty_random_key() approach delineated below.
#* Else is *NOT* a one-shot container (e.g., generator and... are there any
# other one-shot container types?) but is slotted (i.e., has a "__slots__"
# attribute), then the best we can do is the trivial O(1) approach by
# calling "{hint_child_pith} := next({hint_curr_pith})" to unconditionally
# check the first item of this container. What you goin' do? *shrug* (Note
# that we could try getting around this with a global cache of weak references
# to iterators mapped on object ID, but... ain't nobody got time or interest
# for that. Also, prolly plenty dangerous.)
#* Else is a one-shot container, in which case *DO ABSOLUTELY NUTHIN'.*
#FIXME: We should ultimately make this user-configurable (e.g., as a global
#configuration setting). Some users might simply prefer to *ALWAYS* look up a
#fixed 0-based index (e.g., "0", "-1"). For the moment, however, the above
#probably makes the most sense as a reasonably general-purpose default.
#FIXME: [THIS-IS-BOSS] *AH-HA.* First, note that the above
#_get_dict_nonempty_random_key() concept, while clever, is largely useless. Why?
#Because *ALL* builtin C-based reiterables (e.g., dict, set) are slotted. We'd
#might as well just ignore that and leap straight to a general-purpose answer.
#
#Indeed, we've *FINALLY* realized how to genuinely perform iterative access to
#arbitrary non-sequence containers in an O(1) manner *WITHOUT* introducing
#memory leaks or requiring asynchronous background shenanigans. The core conceit
#is quite simple, really. Internally:
#
#* @beartype maintains two global dictionaries:
# * A global "_REITERABLE_ID_TO_WEAKPROXY" dictionary mapping from the object
# ID of each reiterable that has been previously type-checked by @beartype to
# at least once to a strong reference to a "weakref.proxy" instance safely
# proxying that reiterable.
# * A global "_REITERABLE_ID_TO_ITER" dictionary mapping from the object ID of
# each reiterable that has been previously type-checked by @beartype to
# at least once to a strong reference to an iterator over that
# "weakref.proxy" instance safely proxying that reiterable.
#
# This approach substantially reduces the negative harms associated with memory
# leaks -- although one worst-case memory leak *DOES* still remain. Notably,
# since these proxies are themselves discrete objects, storing strong
# references to both these proxies and these iterators could under worst-case
# behaviour consume all available space. Ergo, this dictionary will need to be
# efficiently maintained as a large (but still limited) LRU cache. Ideally, the
# real-world size of this cache should be bound to a maximum of (say) 1MB space
# consumption. Since only proxy shim objects and iterators over those objects
# are stored (and we expect the size of these proxies and iterators to be quite
# small), this cache *SHOULD* be able to support an exceedingly large number of
# proxies before becoming full.
#
# Since this dictionary is only leveraged for type-checking, thread
# synchronization is irrelevant (although, of course, care should be taken to
# ensure that this dictionary remains internally consistent regardless of
# thread preemption).
#* @beartype provides a trivial _get_reiterable_item_next() getter for use in
# dynamically generated type-checking code, which will then call that getter
# rather than iter() on reiterables to retrieve an effectively random item from
# those reiterables. Internally, this getter leverages the above global
# dictionaries as follows:
# # Note that this getter assumes the passed reiterable to be *NON-EMPTY.*
# # It is the caller's responsibility to ensure that (e.g., by explicitly
# # calling "len(reiterable)" or "bool(reiterable)" *BEFORE* calling this
# # getter). Trivial, but worth noting. For efficiency, this getter
# # intentionally does *NOT* explicitly validate that constraint.
# def _get_reiterable_item_next(
# reiterable: _BeartypeReiterableTypes) -> object:
#
# #FIXME: Curiously, some C-based containers *DO* support weak
# #references. Crucially, this includes sets, frozensets, arrays, and
# #deques. Dicts, however, do *NOT*. Are dicts the only notable
# #exceptions? Not quite. *ANY* user-defined reiterable defining
# #"__slots__" that does *NOT* contain the string '__weakref__' also
# #does *NOT* support weak references. In short, we can really only
# #perform the following for a small subset of type hints: e.g.,
# #* "typing.Deque".
# #* "typing.FrozenSet".
# #* "typing.Set".
# #
# #Aaaaaaand... we're pretty sure that's it. Three is better than
# #nothing, of course. But... that's still not that great. We could try
# #to dynamically test for weak-referenceability -- except we're pretty
# #sure that that *CANNOT* be done efficiently in the general case.
# #We'd need to either:
# #* Call dir(), which dynamically creates and returns a new dict.
# # That's right out.
# #* Access "__dict__" directly, which is only defined for pure-Python
# # instances. That attribute does *NOT* exist for C-based instances.
# #
# #Actually, the most efficient detection heuristic would probably be:
# #* Define yet another global
# # "_REITERABLE_TYPE_TO_IS_WEAKREFFABLE" dictionary mapping from
# # reiterable types to boolean "True" only if those types can be
# # weakly referenced. This dictionary can be pre-initialized for
# # efficiency with the most common builtin C-based reiterable types
# # in a global context as follows:
# # _REITERABLE_TYPE_TO_IS_WEAKREFFABLE = {
# # DefaultDict: False,
# # dict: False,
# # deque: True,
# # frozenset: True,
# # set: True,
# # }
# # We don't bother LRU-bounding that. Size is irrelevant here.
# #* Likewise, define yet another globl
# # "_REITERABLE_TYPE_TO_IS_LENGTHHINTED" dictionary mapping from
# # reiterable types to boolean "True" only if those types define
# # iterators defining semantically meaningful __length_hint__()
# # dunder methods. Since detecting that at runtime is infeasible, we
# # simply preallocate that to those we do know about:
# # _REITERABLE_TYPE_TO_IS_LENGTHHINTED = {
# # deque: True, # <-- unsure if true, actually *shrug*
# # frozenset: True,
# # set: True,
# # }
# #* Given that, we then efficiently detect weak-referenceability:
#
# REITERABLE_TYPE = reiterable.__class__
#
# #FIXME: Again, optimize ...get() method access, please.
# reiterable_is_weakreffable = (
# _REITERABLE_TYPE_TO_IS_WEAKREFFABLE.get(REITERABLE_TYPE))
#
# if reiterable_is_weakreffable is None:
# reiterable_dict = getattr(reiterable, '__dict__')
# #FIXME: Alternately, we could try just taking a weak proxy
# #of this reiterable and catching exceptions. Although
# #slower, this caching operation only occurs once per type.
# #For now, let's run with this faster heuristic.
# if not reiterable_dict:
# reiterable_is_weakreffable = False
# else:
# reiterable_slots = reiterable_dict.get('__slots__')
# reiterable_is_weakreffable = (
# reiterable_slots and
# '__weakref__' not in reiterable_slots
# )
# _REITERABLE_TYPE_TO_IS_WEAKREFFABLE[REITERABLE_TYPE] = (
# reiterable_is_weakreffable)
#
# # If this reiterable is a C-based container that does *NOT* support
# # weak references, reduce to simply returning the first item of this
# # reiterable.
# if not reiterable_is_weakreffable:
# return next(iter(reiterable))
#
# REITERABLE_ID = id(reiterable)
#
# #FIXME: Optimize by storing and calling a bound
# #"_REITERABLE_ID_TO_ITER_get" method instead, please.
# reiterable_iter = _REITERABLE_ID_TO_ITER.get(REITERABLE_ID)
#
# if reiterable_iter:
# #FIXME: Note that this can be conditionally optimized for
# #iterators that define the PEP 424-compliant __length_hint__()
# #dunder method -- which thankfully appears to be *ALL* iterators
# #over C-based reiterables (e.g., dicts, sets). For these objects,
# #__length_hint__() provides the number of remaining items in the
# #iterator. Ergo, this can be optimized as follows:
# # if reiterable_iter.__length_hint__():
# # return next(reiterable_iter)
# # else:
# # ...
# #
# #The issue, of course, is that that *ONLY* works for strict type
# #hints constrained to C-based iterables (e.g.,
# #"typing.Dict[...]"). General-purpose type hints like
# #"typing.Mapping[...]" would be inapplicable, sadly. For the
# #latter, dynamically testing for the existence of a semantically
# #meaningful __length_hint__() getter would consume far more time
# #than calling that getter would actually save. *shrug*
# try:
# return next(reiterable_iter)
# except StopIteration:
# #FIXME: Violates DRY a bit, but more efficient. *shrug*
# reiterable_weakproxy = _REITERABLE_ID_TO_WEAKPROXY[
# REITERABLE_ID]
# _REITERABLE_ID_TO_ITER[REITERABLE_ID] = iter(
# reiterable_weakproxy)
# return next(reiterable_iter)
#
# if len(_REITERABLE_ID_TO_WEAKPROXY) >= _REITERABLE_CACHE_MAX_LEN:
# #FIXME: Efficiently kick out the least-used item from both the
# #"_REITERABLE_ID_TO_WEAKPROXY" and "_REITERABLE_ID_TO_ITER"
# #dictionaries here. Research exactly how to do that. Didn't we
# #already implement an efficient LRU somewhere in @beartype?
#
# reiterable_weakproxy = _REITERABLE_ID_TO_WEAKPROXY[REITERABLE_ID] = (
# proxy(reiterable))
# _REITERABLE_ID_TO_ITER[REITERABLE_ID] = iter(reiterable_weakproxy)
# return next(reiterable_iter)
#FIXME: Pretty boss, if we do say so. And we do. There are also considerable
#opportunities for both macro- and microoptimization. The biggest
#macrooptimization would be doing away entirely with the
#"_REITERABLE_ID_TO_WEAKPROXY" cache. Strictly speaking, we only actually need
#the "_REITERABLE_ID_TO_ITER" cache. Doing away with the
#"_REITERABLE_ID_TO_WEAKPROXY" cache is *PROBABLY* the right thing to do in most
#cases. Why? Because we don't necessarily expect that @beartype type-checkers
#will exhaust all available items for most reiterables. But we *DO* know that
#all reiterables that will be type-checked will be type-checked at least once.
#In other words, the trailing code in _get_reiterable_item_next() is guaranteed
#to *ALWAYS* happen at least once per reiterable (so we should optimize that);
#conversely, the leading code that restarts iteration from the beginning only
#happens in edge cases for smaller reiterables passed or returned frequently
#between @beartype-decorated callables (so we shouldn't bother optimizing that).
#Optimizing away "_REITERABLE_ID_TO_WEAKPROXY" then yields:
#
# #FIXME: Don't even bother calling this getter with "dict" objects. The
# #caller should explicitly perform an "pith.__class__ is dict" check to
# #switch to more efficient "next(iter(pith.keys())" and
# #"next(iter(pith.values())" logic when the pith is a "dict" object. Note
# #that user-defined "dict" subclasses are fine, however. *facepalm*
# def _get_reiterable_item_next(
# reiterable: _BeartypeReiterableTypes) -> object:
#
# REITERABLE_TYPE = reiterable.__class__
#
# #FIXME: Again, optimize ...get() method access, please.
# reiterable_is_weakreffable = (
# _REITERABLE_TYPE_TO_IS_WEAKREFFABLE.get(REITERABLE_TYPE))
#
# if reiterable_is_weakreffable is None:
# reiterable_dict = getattr(reiterable, '__dict__')
# #FIXME: Alternately, we could try just taking a weak proxy
# #of this reiterable and catching exceptions. Although
# #slower, this caching operation only occurs once per type.
# #For now, let's run with this faster heuristic.
# if not reiterable_dict:
# reiterable_is_weakreffable = False
# else:
# reiterable_slots = reiterable_dict.get('__slots__')
# reiterable_is_weakreffable = (
# reiterable_slots and
# '__weakref__' not in reiterable_slots
# )
# _REITERABLE_TYPE_TO_IS_WEAKREFFABLE[REITERABLE_TYPE] = (
# reiterable_is_weakreffable)
#
# # If this reiterable is a C-based container that does *NOT* support
# # weak references, reduce to simply returning the first item of this
# # reiterable.
# if not reiterable_is_weakreffable:
# return next(iter(reiterable))
#
# REITERABLE_ID = id(reiterable)
#
# #FIXME: Optimize by storing and calling a bound
# #"_REITERABLE_ID_TO_ITER_get" method instead, please.
# reiterable_iter = _REITERABLE_ID_TO_ITER.get(REITERABLE_ID)
#
# if reiterable_iter:
# #FIXME: Optimize us up, yo!
# if _REITERABLE_TYPE_TO_IS_LENGTHHINTED.get(REITERABLE_TYPE):
# if reiterable_iter.__length_hint__():
# return next(reiterable_iter)
# else:
# try:
# return next(reiterable_iter)
# except StopIteration:
# pass
# elif len(_REITERABLE_ID_TO_ITER) >= _REITERABLE_CACHE_MAX_LEN:
# #FIXME: Efficiently kick out the least-used item from the
# #"_REITERABLE_ID_TO_ITER" dictionary here. Research exactly how
# #to do that. Didn't we already implement an efficient LRU
# #somewhere in @beartype?
# #FIXME: *AH-HA!* Forget LRU. Seriously. LRU would impose too
# #much overhead here, as we'd need to update the LRU on each
# #access. Instead, let's just friggin *CLEAR* the entire cache
# #here. Yes, that's right! Nuke it from orbit, bois! So, what?
# #Right? Who cares if we start over from zero? Nobody! It's
# #minimal overhead to just start iterating things all over again.
# #And if the cache is full up, that's a good indication that the
# #caller has gone off the rails a bit, anyway.
# _REITERABLE_ID_TO_ITER.clear() # <-- *BOOM STICK*
#
# reiterable_iter = _REITERABLE_ID_TO_ITER[REITERABLE_ID] = iter(
# proxy(reiterable))
# return next(reiterable_iter)
#
#Seems legitimately boss, yes? Everything above is feasible and *REASONABLY*
#efficient -- but is that efficient enough? Honestly, that's probably fast
#enough for *MOST* use cases. If users justifiably complain about performance
#degradations, we could always provide a new "BeartypeConf" parameter defaulting
#to enabled to control this behaviour. *shrug*
#FIXME: Note that randomly checking mapping (e.g., "dict") keys and/or values
#will be non-trivial, as there exists no out-of-the-box O(1) approach in either
#the general case or the specific case of a "dict". Actually, there does -- but
#we'll need to either internally or externally maintain one dict.items()
#iterator for each passed mapping. We should probably investigate the space
#costs of that *BEFORE* doing so. Assuming minimal costs, one solution under
#Python >= 3.8 might resemble:
#* Define a new _get_dict_random_key() function resembling:
# def _get_dict_nonempty_random_key(mapping: MappingType) -> object:
# '''
# Caveats
# ----------
# **This mapping is assumed to be non-empty.** If this is *not* the
# case, this function raises a :class:`StopIteration` exception.
# '''
# items_iter = getattr(mapping, '__beartype_items_iter', None)
# if items_iter is None:
# #FIXME: This should probably be a weak reference to prevent
# #unwanted reference cycles and hence memory leaks.
# #FIXME: We need to protect this both here and below with a
# #"try: ... except Exception: ..." block, where the body of the
# #"except Exception:" condition should probably just return
# #"beartype._util.utilobject.SENTINEL", as the only type hints
# #that would ever satisfy are type hints *ALL* objects satisfy
# #(e.g., "Any", "object").
# mapping.__beartype_items_iter = iter(mapping.items())
# try:
# return next(mapping.__beartype_items_iter)
# # If we get to the end (i.e., the prior call to next() raises a
# # "StopIteration" exception) *OR* anything else happens (i.e., the
# # prior call to next() raises a "RuntimeError" exception due to the
# # underlying mapping having since been externally mutated), just
# # start over. :p
# except Exception:
# mapping.__beartype_items_iter = None
#
# # We could also recursively call ourselves: e.g.,
# # return _get_dict_random_key(mapping)
# # However, that would be both inefficient and dangerous.
# mapping.__beartype_items_iter = iter(mapping.items())
# return next(mapping.__beartype_items_iter)
#* In "beartype._decor._main":
# import _get_dict_nonempty_random_key as __beartype_get_dict_nonempty_random_key
#* In code generated by this submodule, internally call that helper when
# checking keys of non-empty mappings *THAT ARE UNSLOTTED* (for obvious
# reasons) ala:
# (
# {hint_curr_pith} and
# not hasattr({hint_curr_pith}, '__slots__') and
# {!INSERT_CHILD_TEST_HERE@?(
# {hint_child_pith} := __beartype_get_dict_nonempty_random_key({hint_curr_pith}))
# )
# Obviously not quite right, but gives one the general gist of the thing.
#
#We could get around the slots limitation by using an external LRU cache
#mapping from "dict" object ID to items iterator, and maybe that *IS* what we
#should do. Actually... *NO.* We absolutely should *NOT* do that sort of thing
#anywhere in the codebase, as doing so would guaranteeably induce memory leaks
#by preventing "dict" objects cached in that LRU from being garbage collected.
#
#Note that we basically can't do this under Python < 3.8, due to the lack of
#assignment expressions there. Since _get_dict_nonempty_random_key() returns a
#new random key each call, we can't repeatedly call that for each child pith
#and expect the same random key to be returned. So, Python >= 3.8 only. *shrug*
#
#Note that the above applies to both immutable mappings (i.e., objects
#satisfying "Mapping" but *NOT* "MutableMapping"), which is basically none of
#them, and mutable mappings. Why? Because we don't particularly care if the
#caller externally modifies the underlying mapping between type-checks, even
#though the result is the above call to "next(mapping.__beartype_items_iter)"
#raising a "RuntimeError". Who cares? Whenever an exception occurs, we just
#restart iteration over from the beginning and carry on. *GOOD 'NUFF.*
#FIXME: *YIKES.* So, as expected, the above approach fundamentally fails on
#builtin dicts and sets. Why? Because *ALL* builtin types prohibit
#monkey-patching, which the above technically is. Instead, we need a
#fundamentally different approach.
#
#That approach is to globally (but thread-safely, obviously) cache *STRONG*
#references to iterators over dictionary "ItemsView" objects. Note that we
#can't cache weak references, as the garbage collector would almost certainly
#immediately dispose of them, entirely defeating the point. Of course, these
#references implicitly prevent garbage collection of the underlying
#dictionaries, which means we *ALSO* need a means of routinely removing these
#references from our global cache when these references are the only remaining
#references to the underlying dictionaries. Can we do any of this? We can.
#
#First, note that we can trivially obtain the number of live references to any
#arbitrary object by calling "sys.getrefcount(obj)". Note, however, that the
#count returned by this function is mildly non-deterministic. In particular,
#off-by-one issues are not merely edge cases but commonplace. Ergo:
#
# from sys import getrefcount
#
# def is_obj_nearly_dead(obj: object) -> bool:
# '''
# ``True`` only if there only exists one external strong reference to
# the passed object.
# '''
#
# # Note that the integer returned by this getter is intentionally *NOT*
# # tested for equality with "1". Why? Because:
# # * The "obj" parameter passed to this tester is an ignorable strong
# # reference to this object.
# # * The "obj" parameter passed to the getrefcount() getter is yet
# # another strong reference to this object.
# return getrefcount(obj) <= 3
#
#Second, note that neither the iterator API nor the "ItemsView" API provide a
#public means of obtaining a strong reference to the underlying dictionary.
#This means we *MUST* necessarily maintain for each dictionary a 2-tuple
#"(mapping, mapping_iter)", where:
#* "mapping" is a strong reference to that dictionary.
#* "mapping_iter" is an iterator over that dictionary's "ItemsView" object.
#
#This implies that we want to:
#* Define a new "beartype._util.cache.utilcachemapiter" submodule.
#* In that submodule:
# * Define a new global variable resembling:
# # Note that this is unbounded. There's probably no reasonable reason to
# # use an LRU-style bounded cache here... or maybe there is for safety to
# # avoid exhausting memory. Right.
# #
# # So, this should obviously be LRU-bounded at some point. Since Python's
# # standard @lru decorator is inefficient, we'll need to build that our
# # ourselves, which means this is *NOT* an immediate priority.
# _MAP_ITER_CACHE = {}
# '''
# Mapping from mapping identifiers to 2-tuples
# ``(mapping: Mapping, mapping_iter: Iterator)``,
# where ``mapping`` is a strong reference to the mapping whose key is that
# mapping's identifier and ``mapping_iter`` is an iterator over that
# mapping's ``ItemsView`` object.
# '''
# * Define a new asynchronous cleanup_cache() function. See the
# cleanup_beartype() function defined below for inspiration.
#* Extensively unit test that submodule.
#
#Third, note that this means the above is_obj_nearly_dead() fails to apply to
#this edge case. In our case, a cached dictionary is nearly dead if and only if
#the following condition applies:
#
# def is_cached_mapping_nearly_dead(mapping: Mapping) -> bool:
# '''
# ``True`` only if there only exists one external strong reference to
# the passed mapping internally cached by the :mod:`beartype.beartype`
# decorator.
# '''
#
# # Note that the integer returned by this getter is intentionally *NOT*
# # tested for equality with "1". Why? Because ignorable strong
# # references to this mapping include:
# # * The "mapping" parameter passed to this tester.
# # * The "mapping" parameter passed to the getrefcount() getter.
# # * This mapping cached by the beartype-specific global container
# # caching these mappings.
# # * The iterator over this mapping cached by the same container.
# return getrefcount(mapping) <= 5 # <--- yikes!
#
#Fourth, note that there are many different means of routinely removing these
#stale references from our global cache (i.e., references that are the only
#remaining references to the underlying dictionaries). For example, we could
#routinely iterate over our entire cache, find all stale references, and remove
#them. This is the brute-force approach. Of course, this approach is both slow
#and invites needlessly repeated work across repeated routine iterations. Ergo,
#rather than routinely iterating *ALL* cache entries, we instead only want to
#routinely inspect a single *RANDOM* cache entry on each scheduled callback of
#our cleanup routine. This is the O(1) beartype approach and still eventually
#gets us where we want to go (i.e., complete cleanup of all stale references)
#with minimal costs. A random walk wins yet again.
#
#Fifth, note that there are many different means of routinely scheduling work.
#We ignore the existence of the GIL throughout the following discussion, both
#because we have no choice *AND* because the randomized cleanup we need to
#perform on each scheduled callback is an O(1) operation with negligible
#constant factors and thus effectively instantaneous rather than CPU- or
#IO-bound. The antiquated approach is "threading.Timer". The issue with the
#entire "threading" module is that it is implemented with OS-level threads,
#which are ludicrously expensive and thus fail to scale. Our usage of the
#"threading" module in beartype would impose undue costs on downstream apps by
#needlessly consuming a precious thread, preventing apps from doing so. That's
#bad. Instead, we *MUST* use coroutines, which are implemented in Python itself
#rather than exposed to the OS and thus suffer no such scalability concerns,
#declared as either:
#* Old-school coroutines via the @asyncio.coroutine decorator. Yielding under
# this approach is trivial (and possibly more efficient): e.g.,
# yield
#* New-school coroutines via the builtin "async def" syntax. Yielding under
# this approach is non-trivial (and possibly less efficient): e.g.,
# await asyncio.sleep_ms(0)
#
#In general, the "async def" approach is strongly favoured by the community.
#Note that yielding control in the "async def" approach is somewhat more
#cumbersome and possibly less efficient than simply performing a "yield".
#Clearly, a bit of research here is warranted. Note this online commentary:
# In performance-critical code yield does offer a small advantage. There are
# other tricks such as yielding an integer (number of milliseconds to
# pause). In the great majority of cases code clarity trumps the small
# performance gain achieved by these hacks. In my opinion, of course.
#
#In either case, we declare an asynchronous coroutine. We then need to schedule
#that coroutine with the global event loop (if any). The canonical way of doing
#this is to:
#* Pass our "async def" function to the asyncio.create_task() function.
# Although alternatives exist (e.g., futures), this function is officially
# documented as being the preferred approach:
# create_task() (added in Python 3.7) is the preferable way for spawning new
# tasks.
# Of course, note this requires Python >= 3.7. We could care less. *shrug*
#* Pass that task to the asyncio.run() function... or something, something.
# Clearly, we still need to research how to routinely schedule that task with
# "asyncio" rather than running it only once. In theory, that'll be trivial.
#
#Here's a simple example:
#
# async def cleanup_beartype(event_loop):
# # Disregard how simple this is, it's just for example
# s = await asyncio.create_subprocess_exec("ls", loop=event_loop)
#
# def schedule_beartype_cleanup():
# event_loop = asyncio.get_event_loop()
# event_loop.run_until_complete(asyncio.wait_for(
# cleanup_beartype(event_loop), 1000))
#
#The above example was culled from this StackOverflow post:
# https://stackoverflow.com/questions/45010178/how-to-use-asyncio-event-loop-in-library-function
#Unlike the asyncio.create_task() approach, that works on Python >= 3.6.
#Anyway, extensive research is warranted here.
#
#Sixthly, note that the schedule_beartype_cleanup() function should be called
#only *ONCE* per active Python process by the first call to the @beartype
#decorator passed a callable annotated by one or more "dict" or
#"typing.Mapping" type hints. We don't pay these costs unless we have to. In
#particular, do *NOT* unconditionally call the schedule_beartype_cleanup()
#function on the first importation of the "beartype" package.
#
#Lastly, note there technically exists a trivial alternative to the above
#asynchronous approach: the "gc.callbacks" list, which allows us to schedule
#arbitrary user-defined standard non-asynchronous callback functions routinely
#called by the garbage collector either immediately before or after each
#collection. So what's the issue? Simple: end users are free to either
#explicitly disable the garbage collector *OR* compile or interpreter their
#apps under a non-CPython executable that does not perform garbage collection.
#Ergo, this alternative fails to generalize and is thus largely useless.
#FIXME: Actually... let's not do the "asyncio" approach -- at least not
#initially. Why? The simplest reason is that absolutely no one expects a
#low-level decorator to start adding scheduled asynchronous tasks to the global
#event loop. The less simple reason is that doing so would probably have
#negative side effects to at least one downstream consumer, the likes of which
#we could never possibly predict.
#
#So, what can we do instead? Simple. We do this by:
#* If garbage collection is enabled, registering a new cleanup callback with
# "gc.callbacks".
#* Else, we get creative. First, note that garbage collection is really only
# ever disabled in the real world when compiling Python to a lower-level
# language (typically, C). Ergo, efficiency isn't nearly as much of a concern
# in this currently uncommon edge case. So, here's what we do:
# * After the first call to the @beartype decorator passed a callable
# annotated by one or more mapping or set type hints, globally set a private
# "beartype" boolean -- say, "WAS_HINT_CLEANABLE" -- noting this to have
# been the case.
# * In the _code_check_params() function generating code type-checking *ALL*
# annotated non-ignorable parameters:
# * If "WAS_HINT_CLEANABLE" is True, conditionally append code calling our
# cleanup routine *AFTER* code type-checking these parameters. While
# mildly inefficient, function calls incur considerably less overhead
# when compiled away from interpreted Python bytecode.
#FIXME: Note that the above scheme by definition *REQUIRES* assignment
#expressions and thus Python >= 3.8 for general-purpose O(1) type-checking of
#arbitrarily nested dictionaries and sets. Why? Because each time we iterate an
#iterator over those data structures we lose access to the previously iterated
#value, which means there is *NO* sane means of type-checking nested
#dictionaries or sets without assignment expressions. But that's unavoidable
#and Python <= 3.7 is the past, so that's largely fine.
#
#What we can do under Python <= 3.7, however, is the following:
#* If the (possibly nested) type hint is of the form
# "{checkable}[...,{dict_or_set}[{class},{class}],...]" where
# "{checkable}" is an arbitrary parent type hint safely checkable under Python
# <= 3.7 (e.g., lists, unions), "{dict_or_set}" is (wait for it) either "dict"
# or "set", and "{class}" is an arbitrary type, then that hint *IS* safely
# checkable under Python <= 3.7. Note that items (i.e., keys and values) can
# both be checked in O(1) time under Python <= 3.7 by just validating the key
# and value of a different key-value pair (e.g., by iterating once for the key
# and then again for the value). That does have the disadvantage of then
# requiring O(n) iteration to raise a human-readable exception if a dictionary
# value fails a type-check, but we're largely okay with that. Again, this only
# applies to an edge case under obsolete Python versions, so... *shrug*
#* Else, a non-fatal warning should be emitted and the portion of that type
# hint that *CANNOT* be safely checked under Python <= 3.7 should be ignored.
#FIXME: Note that mapping views now provide a "mapping" attribute enabling
#direct access of the mapping mapped by that view under Python >= 3.10:
# The views returned by dict.keys(), dict.values() and dict.items() now all
# have a mapping attribute that gives a types.MappingProxyType object
# wrapping the original dictionary.
#This means that we do *NOT* need to explicitly cache the "mapping" object
#mapped by any cached view under Python >= 3.10, reducing space consumption.
#FIXME: *WOOPS.* The "CacheLruStrong" class is absolutely awesome and we'll
#absolutely be reusing that for various supplementary purposes across the
#codebase (e.g., for perfect O(1) tuple type-checking below). However, this
#class sadly doesn't get us where we need to be for full O(1) dictionary and
#set type-checking. Why? Two main reasons:
#* *ITERATIVE ACCESS.* Our routinely scheduled cleanup function needs to
# iteratively or randomly access arbitrary cache items for inspection to
# decide whether they need to be harvested or not.
#* *VARIABLE OBJECT SIZES.* We gradually realized, given the plethora of
# related "FIXME:" comments below, that we'll eventually want to cache a
# variety of possible types of objects across different caches -- each cache
# caching a unique type of object. This makes less and less sense the more one
# considers, however. For example, why have an LRU cache of default size 256
# specific to iterators for a downstream consumer that only passes one
# iterator to a single @beartype-decorated callable?
#
#The solution to both is simple, but not: we define a new derivative
#"LRUDuffleCacheStrong" class. The motivation for using the term "duffle" is
#that, just like a duffle bag, a duffle cache:
#* Provides random access.
#* Elegantly stretches to contains a variable number of arbitrary objects of
# variable size.
#
#The "LRUDuffleCacheStrong" class satisfies both concerns by caching to a
#maximum *OBJECT SIZE CAPACITY* rather than merely to an *OBJECT NUMBER
#CAPACITY.* Whereas the "CacheLruStrong" class treats all cached objects as
#having a uniform size of 1, the "LRUDuffleCacheStrong" class instead assigns
#each cached object an estimated abstract size (EAS) as a strictly positive
#integer intended to reflect its actual transitive in-memory size -- where a
#cached object of EAS 1 is likely to be the smallest object in that cache.
#While estimating EAS will depend on object type, the following should apply:
#* EAS estimators *MUST* run in O(1) time. That is, estimating the abstract
# size of an object *MUST* be implementable in constant time with negligible
# constant factors. This means that the standard approach of recursively
# inspecting the physical in-memory sizes of all objects visitable from the
# target object should *NOT* be employed.
#* For containers:
# * Note that type hints provide us the expected height
# "sizeable_height" of any data structure, where "sizeable_height" is
# defined as the number of "[" braces in a type hint ignoring those that do
# *NOT* connote semantic depth (e.g., "Optional", "Union", "Annotated"). So:
# * The "sizeable_height" for a type hint "list[list[list[int]]]" is 3.
# * Since any unsubscripted type hint (e.g., "list") is implicitly
# subscripted by "[Any]", the "sizeable_height" for the type hints "list"
# and "list[int]" is both 1.
# * Note also that most containers satisfy the "collections.abc.Sizeable" ABC.
# * Given that, we can trivially estimate the EAS "sizeable_bigo_size" of any
# type-hinted sizeable object "sizeable" as follows:
# sizeable_bigo_size = len(sizeable) ** sizeable_height
# Ergo, a list of length 100 type-hinted as "list[list[int]]" has a size of:
# sizeable_bigo_size = 100 ** 2 = 10,000
#* For dictionaries, the "sizeable_bigo_size" provided by the equation above
# should be multiplied by two to account for the increased space consumption
# due to storing key-value pairs.
#
#Here's then how the "LRUDuffleCacheStrong" class is implemented:
#* The "LRUDuffleCacheStrong" class should *NOT* subclass the
# "CacheLruStrong" class but copy-and-paste from the latter into the former.
# This is both for efficiency and maintainability; it's likely their
# implementations will mildly diverge.
#* The LRUDuffleCacheStrong.__init__() method should be implemented like this:
# def __init__(
# self,
# bigo_size_max: int,
# value_metadata_len: 'Optional[int]' = 0,
# )
# assert bigo_size_max > 0
# assert value_metadata_len >= 0
#
# # Classify all passed parameters as instance variables.
# self._EAS_MAX = bigo_size_max
# self._FIXED_LIST_SIZE = value_metadata_len + 2
#
# # Initialize all remaining instance variables.
# self._bigo_size_cur = 0
# self._iter = None
#* Note the above assignment of these new instance variables:
# * "_EAS_MAX", the maximum capacity of this LRU cache in EAS units. Note that
# this capacity should ideally default to something that *DYNAMICALLY SCALES
# WITH THE RAM OF THE LOCAL MACHINE.* Ergo, "_bigo_size_max" should be
# significantly larger in a standard desktop system with 32GB RAM than it is
# on a Raspberry Pi 2 with 1GB RAM: specifically, 32 times larger.
# * "_bigo_size_cur", the current capacity of this LRU cache in EAS units.
# * "_FIXED_LIST_SIZE", the number of additional supplementary objects to
# be cached with each associated value of this LRU cache. The idea here is
# that each key-value pair of this cache is an arbitrary hashable object
# (the key) mapping to a "FixedList(size=self._FIXED_LIST_SIZE)"
# (the value) whose 0-based indices provide (in order):
# 1. The EAS of that object. For completeness, we should also add to the
# "sizeable_bigo_size" estimate given above the additional estimated cost
# of this "FixedList". Since the length of this "FixedList" is guaranteed
# to be exactly "self._value_metadata_len + 2", this then gives a final
# EAS of that object as:
# sizeable_bigo_size = (
# self._value_metadata_len + 2 + len(sizeable) ** sizeable_height)
# 2. A strong reference to the primary object being cached under this key.
# For dictionaries and sets, this is an iterator over those dictionaries
# and sets.
# 3...self._value_metadata_len + 2: Additional supplementary objects to be
# cached along with that object. For dictionaries and sets, exactly one
# supplementary object must be cached, so this is:
# 3. The underlying dictionary or set being iterated over, so we can
# lookup the number of existing strong references to that dictionary
# or set during cleanup and decide whether to uncache that or not.
# * "_iter", an iterator over this dictionary. Yes, we *COULD* implement
# random access (e.g., with a linked list or list), but doing so introduces
# extreme complications and inefficiencies in both space and time. Instead,
# persisting a simple iterator over this dictionary suffices.
#* Allow any "LRUDuffleCacheStrong" instance to be trivially incremented
# (e.g., during garbage collection cleanup) as an iterator by also defining:
# def get_pair_next_or_none(
# self,
# __dict_len = dict.__len__,
# ) -> 'Optional[Tuple[Hashable, FixedList]]':
# '''
# Next most recently used key-value pair of this cache if this cache
# is non-empty *or* ``None`` otherwise (i.e., if this cache is empty).
#
# The first call to this method returns the least recently used
# key-value pair of this cache. Each successive call returns the next
# least recently used key-value pair of this cache until finally
# returning the most recently used key-value pair of this cache, at
# which time the call following that call rewinds time by again
# returning the least recently used key-value pair of this cache.
# '''
#
# #FIXME: Probably nest this in a "with self._thread_lock:" block.
#
# # If this cache is empty, return None.
# if not __dict_len(self):
# return None
# # Else, this cache is non-empty.
#
# # Attempt to...
# try:
# # Return the next recent key-value pair of this cache.
# return self._iter.__next__()
# # If doing so raises *ANY* exception, this iterator has become
# # desynchronized from this cache. In this case...
# #
# # Note this implicitly handles the initial edge case in which this
# # cache has yet to be iterated (i.e., "self._iter == None"). Since
# # this is *ONLY* the case for the first call to this method for the
# # entire lifetime of the active Python process, the negligible
# # overhead of handling this exception is preferable to violating DRY
# # by duplicating this logic with an explicit
# # "if self._iter == None:" block.
# except:
# # Reinitialize this iterator.
# self._iter = self.items()
#
# # Return the least recent such pair.
# return self._iter.__next__()
#* Refactor the __setitem__() method. Specifically, when caching a new
# key-value pair with EAS "bigo_size_item" such that:
# while bigo_size_item + self._bigo_size_cur > self._bigo_size_max:
# ...we need to iteratively remove the least recently used key-value pair of
# this cache (which, yes, technically has O(n) worst-case time, which is
# non-ideal, which may be why nobody does this, but that's sort-of okay here,
# since we're doing something monstrously productive each iteration by freeing
# up critical space and avoiding memory leaks, which seems more than worth the
# cost of iteration, especially as we expect O(1) average-case time) until
# this cache can fit that pair into itself. Once it does, we:
# # Bump the current EAS of this cache by the EAS of this pair.
# self._bigo_size_cur += bigo_size_item
# Oh, and there's an obvious edge case here: if "bigo_size_item >
# self._bigo_size_max", we do *NOT* attempt to do anything with that object.
# We don't cache it or an iterator over it. It's too bid. Instead, we just
# type-check the first item of that object in O(1) time. *shrug*
#
#Seems sweet to us. We can store arbitrarily large nested containers in our
#duffle cache without exhausting memory, which is actually more than the
#brute-force LRU cache can say. We get trivial iteration persistence. We also
#avoid a proliferation of different LRU caches, because a single
#"LRUDuffleCacheStrong" instance can flexibly store heterogeneous types.
#FIXME: *RIGHT.* So, "LRUDuffleCacheStrong" is mostly awesome as defined above.
#We'd just like to make a few minor tweaks for improved robustness:
#
#* Drop the "value_metadata_len" parameter from the
# LRUDuffleCacheStrong.__init__() method. We'd prefer to have that parameter
# individually passed to each cache_item() call (see below) rather than
# globally, as the former enables different types of cached objects to have a
# different quantity of metadata cached with those objects.
#* Drop the __setitem__() implementation borrow from "CacheLruStrong". Instead,
# defer to the existing dict.__setitem__() implementation. Why? Because we
# need to pass additional cache-specific parameters to our own
# __setitem__()-like non-dunder method, which __setitem__() doesn't support.
#* Define a new cache_obj() method resembling CacheLruStrong.__setitem__() but
# even more virile and awesome with signature resembling:
# def cache_value(
# self,
#
# # Mandatory parameters.
# key: 'Hashable',
# value: object,
# *metadata: object,
#
# # Optional parameters.
# value_height: 'Optional[int]' = 1,
# ) -> None:
#FIXME: Here's a reasonably clever idea for perfect O(1) tuple type-checking
#guaranteed to check all n items of an arbitrary tuple in exactly n calls, with
#each subsequent call performing *NO* type-checking by reducing to a noop. How?
#Simple! We:
#* Augment our existing "CacheLruStrong" data structure to optionally accept a
# new initialization-time "value_maker" factory function defaulting to "None".
# If non-"None", "CacheLruStrong" will implicitly call that function on each
# attempt to access a missing key by assigning the object returned by that
# call as the key of a new key-value pair -- or, in other words, by behaving
# exactly like "collections.defaultdict".
#* Globally define a new "_LRU_CACHE_TUPLE_TO_COUNTER" cache somewhere as an
# instance of "CacheLruStrong" whose "value_maker" factory function is
# initialized to a lambda function simply returning a new
# "collections.Counter" object that starts counting at 0. Since tuples
# themselves are hashable and thus permissible for direct use as dictionary
# keys, this cache maps from tuples (recently passed to or returned from
# @beartype-decorated callables) to either:
# * If that tuple has been type-checked to completion, "True" or any other
# arbitrary sentinel placeholder, really. "True" is simpler, however,
# because the resulting object needs to be accessible from dynamically
# generated wrapper functions.
# * Else, a counter such that the non-negative integer returned by
# "next(counter)" is the 0-based index of the next item of that tuple to be
# type-checked.
#
#Given that low-level infrastructure, the make_func_wrapper_code() function below
#then generates code perfectly type-checking arbitrary tuples in O(1) time that
#should ideally resemble (where "__beartype_pith_j" is the current pith
#referring to this tuple):
# (
# _LRU_CACHE_TUPLE_TO_COUNTER[__beartype_pith_j] is True or
# {INSERT_CHILD_TYPE_CHECK_HERE}(
# __beartype_pith_k := __beartype_pith_j[
# next(_LRU_CACHE_TUPLE_TO_COUNTER[__beartype_pith_j])]
# )
# )
#
#Awesome, eh? The same concept trivially generalizes to immutable sequences
#(i.e., "Sequence" type hints that are *NOT* "MutableSequence" type hints).
#Sadly, since many users use "Sequence" to interchangeably denote both
#immutable and mutable sequences, we probably have no means of reliably
#distinguishing the two. So it goes! So, just tuples then in practice. *sigh*
#FIXME: Huzzah! We finally invented a reasonably clever means of (more or less)
#safely type-checking one-shot iterables like generators and iterators in O(1)
#time without destroying those iterables. Yes, doing so requires proxying those
#iterables with iterables of our own. Yes, this is non-ideal but not nearly as
#bad as you might think. Why? Because *NO ONE CARES ABOUT ONE-SHOT ITERABLES.*
#They're one-shot. By definition, you can't really care about them, because
#they don't last long enough. You certainly can't cache them or stash them in
#data structures or really do anything with them beside pass or return them
#between callables until they inevitably get exhausted.
#
#This means that proxying one-shot iterables is almost always safe. Moreover,
#we devised a clever means of proxying that introduces negligible overhead
#while preserving our O(1) guarantee. First, let's examine the standard
#brute-force approach to proxying one-shot iterables:
#
# class BeartypeIteratorProxy(object):
# def __init__(self, iterator: 'Iterator') -> None:
# self._iterator = iterator
#
# def __next__(self) -> object:
# item_next = next(self._iterator)
#
# if not {INSERT_TYPE_CHECKS_HERE}(item_next):
# raise SomeBeartypeException(f'Iterator {item_next} bad!')
#
# return item_next
#
#That's bad, because that's an O(n) type-check run on every single iteration.
#Instead, we do this:
#
# class BeartypeIteratorProxy(object):
# def __init__(self, iterator: 'Iterator') -> None:
# self._iterator = iterator
#
# def __next__(self) -> object:
# # Here is where the magic happens, folks.
# self.__next__ = self._iterator.__next__
#
# item_next = self.__next__(self._iterator)
#
# if not {INSERT_TYPE_CHECKS_HERE}(item_next):
# raise SomeBeartypeException(f'Iterator {item_next} bad!')
#
# return item_next
#
#See what we did there? We dynamically monkey-patch away the
#BeartypeIteratorProxy.__next__() method by replacing that method with the
#underlying __next__() method of the proxied iterator immediately after
#type-checking one and only one item of that iterator.
#
#The devil, of course, is in that details. Assuming a method can monkey-patch
#itself away (we're pretty sure it can, as that's the basis of most efficient
#decorators that cache property method results, *BUT WE SHOULD ABSOLUTELY
#VERIFY THAT THIS IS THE CASE), the trick is then to gracefully handle
#reentrancy. That is to say, although we have technically monkey-patched away
#the BeartypeIteratorProxy.__next__() method, that object is still a live
#object that *WILL BE RECREATED ON EACH CALL TO THE SAME* @beartype-decorated
#callable. Yikes! So, clearly we yet again cache with an "CacheLruStrong" cache
#specific to iterators... or perhaps something like "CacheLruStrong" that
#provides a callback mechanism to enable arbitrary objects to remove themselves
#from the cache. Yes! Perhaps just augment our existing "CacheLruStrong" strong
#with some sort of callback or hook support?
#
#In any case, the idea here is that the "BeartypeIteratorProxy" class defined
#above should internally:
#* Store a weak rather than strong reference to the underlying iterator.
#* Register a callback with that weak reference such that:
# * When the underlying iterator is garbage-collected, the wrapping
# "BeartypeIteratorProxy" proxy removes itself from its "CacheLruStrong"
# proxy.
#
#Of course, we're still not quite done yet. Why? Because we want to avoid
#unnecessarily wrapping "BeartypeIteratorProxy" instances in
#"BeartypeIteratorProxy" instances. This will happen whenever such an instance
#is passed to a @beartype-decorated callable annotated as accepting or
#returning an iterator. How can we avoid that? Simple. Whenever we detect that
#an iterator to be type-checked is already a "BeartypeIteratorProxy" instance,
#we just efficiently restore the __next__() method of that instance to its
#pre-monkey-patched version: e.g.,
# (
# isinstance(__beartype_pith_n, BeartypeIteratorProxy) and
# # Unsure if this sort of assignment expression hack actually works.
# # It probably doesn't. So, this may need to be sealed away into a
# # utility function performing the same operation. *shrug*
# __beartype_pith_n.__next__ = BeartypeIteratorProxy.__next__
# )
#FIXME: Huzzah! The prior commentary on type-checking iterators in O(1) time
#also generalizes to most of the other non-trivial objects we had no idea how
#to type-check -- notably, callables. How? Simple. *WE PROXY CALLABLES WITH
#OBJECTS WHOSE* __call__() methods:
#* Type-check parameters to be passed to the underlying callable.
#* Call the underlying callable.
#* Type-check the return value.
#* Monkey-patch themselves away by replacing themselves (i.e., the __call__()
# methods of that object) with the underlying callable. The only issue here,
# and it might be a deal-breaker, is whether or not a bound method can simply
# be replaced with either an unbound function *OR* a bound method of another
# object entirely. Maybe it can? We suspect it can in both cases, but research
# will certainly be required here.
#
#Again, cache such objects to avoid reentrancy issues. That said, there is a
#significant complication here that one-shot iterables do *NOT* suffer:
#proxying. Unlike one-shot iterables, callables are often expected to retain
#their object identities. Proxying disrupts that. I still believe that we
#should enable proxying across the board by default despite that, because less
#than 1% of our users will manually enable an option enabling proxying, simply
#because they'll never think to go look for it and when they do find it will be
#understandably hesitant to enable it when everything else is working. Users
#(including myself) typically only enable options when they encounter issues
#requiring they do so. Ergo, proxy by default. But we *ABSOLUTELY* need to
#allow users to conditionally disable proxying on a per-decoration basis --
#especially when proxying callables.
#
#So we propose adding a new optional "is_proxying" parameter to the @beartype
#decorator. Unfortunately, doing so in an efficient manner will prove highly
#non-trivial. Why? Because the standard approach of doing so is *PROBABLY*
#extremely inefficient. We need to test that hypothesis, of course, but the
#standard approach to adding optional parameters to decorators is to nest a
#closure in a closure in a function. We don't need the innermost closure, of
#course, because we dynamically generate it at runtime. We would need the
#outermost closure, though, to support optional decorator parameters under the
#standard approach. That seems outrageously expensive, because each call to the
#@beartype decorator would then internally generate and return a new closure!
#Yikes. We can avoid that by instead, on each @beartype call:
#* Create a new functools.partial()-based wrapper decorator passed our
# @beartype decorator and all options passed to the current @beartype call.
#* Cache that wrapper decorator into a new private "CacheLruStrong" instance.
#* Return that decorator.
#* Return the previously cached wrapper decorator on the next @beartype call
# passed the same options (rather than recreating that decorator).
#
#Naturally, this requires these options to be hashable. Certainly, booleans
#are, so this smart approach supports a new optional "is_proxying" parameter.
#FIXME: Note that the above approach should only be employed as a last-ditch
#fallback in the event that the passed callable both:
#* Lacks a non-None "__annotations__" dictionary.
#* Is *not* annotated by the third-party optional "typeshed" dependency.
#
#If the passed callable satisfies either of those two constraints, the existing
#type hints annotating that callable should be trivially inspected instead in
#O(1) time (e.g., by just performing a brute-force dictionary comparison from
#that callable's "__annotations__" dictionary to a dictionary that we
#internally construct and cache based on the type hints annotating the
#currently decorated callable, except that doesn't quite work because the
#"__annotations__" dictionary maps from parameter and return names whereas the
#"typing.Callable" and "collections.abc.Callable" syntax omits those names,
#which begs the question of how the latter syntax handles positional versus
#keyword arguments anyway)... *OR SOMETHING.*
#
#Fascinatingly, "Callable" syntax supports *NO* distinction between mandatory,
#optional, positional, or keyword arguments, because PEP 484 gonna PEP 484:
# "There is no syntax to indicate optional or keyword arguments; such
# function types are rarely used as callback types."
#
#Note that mapping from the return type hint given by "typing.Callable" syntax
#into the "__annotations__" dictionary is trivial, because the return is always
#unconditionally named "return" in that dictionary. So, we then just have to
#resolve how to ignore parameter names. Actually, handling mandatory positional
#parameters (i.e., positional parameters lacking defaults) on the passed
#callable should also be trivial, because they *MUST* strictly correspond to
#the first n child type hints of the first argument of the expected parent
#"typing.Callable" type hint. It's optional positional parameters and keyword
#arguments that are the rub. *shrug*
#
#Obviously, we'll want to dynamically generate the full test based on the
#expected parent "typing.Callable" type hint. For sanity, do this iteratively
#by generating code testing arbitrary "__annotations__" against a "Callable"
#type hint (in increasing order of complexity):
#* Passed *NO* parameters and returning something typed.
#* Passed *ONE* parameter and returning something typed.
#* Passed *TWO* parameters and returning something typed.
#* Passed an arbitrary number of parameters and returning something typed.
#
#Note that test should ideally avoid iteration. We're fairly certain we can do
#that by mapping various attributes from the code object of the passed callable
#into something that enables us to produce a tuple of type hints matching the
#first argument of the expected parent "Callable" type hint.
#
#*BINGO!* The value of the "func.__code__.co_varnames" attribute is a tuple of
#both parameter names *AND* local variables. Fortunately, the parameter names
#come first. Unfortunately, there are two types: standard and keyword-only.
#Altogether, an expression yielding a tuple of the names of all parameters
#(excluding local variables) is given by:
#
# #FIXME: Insufficient. Variadic parameters also exist. Also, note that this
# #has already been efficiently implemented as get_func_arg_names()!
# func_codeobj = get_func_unwrapped_codeobj(func)
#
# # Tuple of the names of all parameters accepted by this callable.
# func_param_names = func_codeobj.co_varnames[
# :func_codeobj.co_argcount + func_codeobj.co_kwonlyargcount]
#
#Note that "func_param_names" probably excludes variadic positional and keyword
#argument names, but that's probably fine, because "Callable" type hint syntax
#doesn't appear to explicitly support that sort of thing anyway. I mean, how
#would it? Probably using the "..." singleton ellipse object, I'm sure. But
#that's completely undefined, so it seems doubtful anyone's actually doing it.
#
#We then need to use that tuple to slice "func.__annotations__". Of course, you
#can't slice a dictionary in Python, because Python dictionaries are much less
#useful than they should be. See also:
# https://stackoverflow.com/questions/29216889/slicing-a-dictionary
#
#The simplest and fastest approach we can currently think of is given by:
# func_param_name_to_hint = func.__annotations__
#
# # Generator comprehension producing type hints for this callable's
# # parameters in the same order expected by the first argument of the
# # "Callable" type hint.
# func_param_hints = (
# func_param_name_to_hint[func_param_name]
# for func_param_name in func_param_names
# )
#
#Note that because we know the exact number of expected parameters up front
#(i.e., as the len() of the first argument of the "Callable" type hint), we can
#generate optimal code *WITHOUT* a generator or other comprehension and thus
#*WITHOUT* iteration. Yes, this is literally loop unrolling in Python, which is
#both hilarious and exactly what CPython developers get for failing to support
#generally useful operations on dictionaries and sets: e.g.,
#
# callable_type_hint = ... # Give this a name for reference below.
#
# # Number of non-variadic parameters expected to be accepted by this
# # caller-passed callable.
# FUNC_PARAM_LEN_EXPECTED = len(callable_type_hint[0])
#
# # Generator comprehension producing type hints for this callable's
# # parameters in the same order expected by the first argument of the
# # "Callable" type hint.
# func_param_hints = (
# func_param_name_to_hint[func_param_names[0]],
# func_param_name_to_hint[func_param_names[1]],
# ...
# func_param_name_to_hint[func_param_names[FUNC_PARAM_LEN_EXPECTED]],
# )
#
#Clearly, there's *LOADS* of additional preliminary validation that needs to
#happen here as well. Since "Callable" type hint syntax *REQUIRES* a return
#type hint to be specified (yes, this is absolutely non-optional), we also need
#to ensure that "func_param_name_to_hint" contains the 'return' key.
#
#Given all that, the final test would then resemble something like:
#
# (
# __beartype_pith_n_func_param_name_to_hint := (
# func.__annotations__ or LOOKUP_IN_TYPESHED_SOMEHOW) and
# 'return' in __beartype_pith_n_func_param_name_to_hint and
# __beartype_pith_n_func_codeobj := getattr(
# __beartype_pith_n, '__code__', None) and
# # Just ignore C-based callables and assume they're valid. Unsure what
# # else we can do with them. Okay, we could also proxy them here, but
# # that seems a bit lame. Just accept them as is for now, perhaps?
# __beartype_pith_n_func_codeobj is None or (
# __beartype_pith_n_func_param_names := (
# __beartype_pith_n_func_codeobj.co_varnames) and
# len(__beartype_pith_n_func_param_names) == {FUNC_PARAM_LEN_EXPECTED} and
# (
# __beartype_pith_n_func_param_name_to_hint[__beartype_pith_n_func_param_names[0]],
# __beartype_pith_n_func_param_name_to_hint[__beartype_pith_n_func_param_names[1]],
# ...
# __beartype_pith_n_func_param_name_to_hint[__beartype_pith_n_func_param_names[FUNC_PARAM_LEN_EXPECTED]],
# __beartype_pith_n_func_param_name_to_hint['return']
# ) == {callable_type_hint}
# )
# )
#
#*YUP.* That's super hot, that is. We're sweating.
#
#Note this test is effectively O(1) but really O(FUNC_PARAM_LEN_EXPECTED) where
#FUNC_PARAM_LEN_EXPECTED is sufficiently small that it's basically O(1). That
#said, the constant factors are non-negligible. Fortunately, callables *NEVER*
#change once declared. You should now be thinking what we're thinking:
#*CACHING*. That's right. Just stuff the results of the above test (i.e., a
#boolean) into our duffel LRU cache keyed on the fully-qualified name of that
#callable. We only want to pay the above price once per callable, if we can
#help it, which we absolutely can, so let's do that please.
#
#*NOTE THAT ASSIGNMENT EXPRESSIONS ARE EFFECTIVELY MANDATORY.* I mean, there's
#basically no way we can avoid them, so let's just require them. By the time we
#get here anyway, Python 3.6 will be obsolete, which just leaves Python 3.7. We
#could just emit warnings when decorating callables annotated by "Callable"
#type hints under Python 3.7. </insert_shrug>
#
#*NOTE THAT BUILTINS DO NOT HAVE CODE OBJECTS,* complicating matters. At this
#point, we could care less, but we'll have to care sometime that is not now.
#FIXME: *OH.* Note that things are slightly less trivial than detailed above.
#It's not enough for a callable to be annotated, of course; that callable also
#needs to be annotated *AND* type-checked by a runtime type checker like
#@beartype or @typeguard. The same, of course, does *NOT* apply to "typeshed"
#annotations, because we generally expect stdlib callables to do exactly what
#they say and nothing more or less. This means the above approach should only
#be employed as a last-ditch fallback in the event that the passed callable
#does *NOT* satisfy any of the following:
#* Is decorated by a runtime type checker *AND* has a non-None
# "__annotations__" dictionary.
#* Is annotated by the third-party optional "typeshed" dependency.
#
#Trivial, but worth noting.
#FIXME: Lastly, note that everywhere we say "typeshed" above, we *REALLY* mean
#a PEP 561-compliant search for stub files annotating that callable.
#Unsurprisingly, the search algorithm is non-trivial, which will impact the
#performance gains associated with type-checking annotations in the first
#place. Ergo, we might consider omitting aspects of this search that are both
#highly inefficient *AND* unlikely to yield positive hits. See also:
# https://www.python.org/dev/peps/pep-0561/
#FIXME: *IT'S CONFIGURATION TIME.* So, let's talk about how we efficiently
#handle @beartype configuration like the "is_proxying" boolean introduced
#above. It's worth getting this right the first time. Happily, we got this
#right the first time with a balls-crazy scheme that gives us O(1)
#configurability that supports global defaults that can be both trivially
#changed globally *AND* overridden by passed optional @beartype parameters.
#
#Note this scheme does *NOT* require us to litter the codebase with cumbersome
#and inefficient logic like:
# muh_setting = (
# beartype_params.muh_setting if beartype_params.muh_setting is not None else
# beartype._global_config.muh_setting)
#
#What is this magic we speak of? *SIMPLE.* We twist class variable MRO lookup
#in our favour. Since CPython already efficiently implements such lookup with a
#fast C implementation, we can hijack that implementation for our own sordid
#purposes to do something completely different. Note that only *CLASS* variable
#MRO lookup suffices. Since classes are global singletons, all subclasses will
#implicitly perform efficient lookups for undefined class variables in their
#superclass -- which is exactly what we want and need here.
#
#Specifically:
#* Define a new private "beartype._config" submodule.
#* In that submodule:
# * Define a new public "BeartypeConfigGlobal" class declaring all
# configuration settings as class variables defaulting to their desired
# arbitrary global defaults: e.g.,
# class BeartypeConfigGlobal(object):
# '''
# **Global beartype configuration.**
# '''
#
# is_proxying = True
# ...
#* Publicly expose that class to external users as a new public
# "beartype.config" *PSEUDO-MODULE.* In reality, that object will simply be an
# alias of "beartype._config.BeartypeConfigGlobal". But users shouldn't know
# that. They should just treat that object as if it was a module. To effect
# this, just establish this alias in the "beartype.__init__" submodule: e.g.,
# from beartype._config import BeartypeConfigGlobal
#
# # It really is that simple, folks. Maybe. Gods, let it be that simple.
# config = BeartypeConfigGlobal
#* Privatize the existing public "beartype._decor.decormain" submodule to a new
# "beartype._decor._template" submodule.
#* In that submodule:
# * Rename the existing @beartype decorator to beartype_template(). That
# function will now only be called internally rather than externally.
#* Define a new private "beartype._decor._cache.cachedecor" submodule.
#* In that submodule:
# * Define a new "BEARTYPE_PARAMS_TO_DECOR" dictionary mapping from a *TUPLE*
# of positional arguments listed in the exact same order as the optional
# parameters accepted by the new @beartype decorator discussed below to
# subclasses to dynamically generated @beartype decorators configured by
# those subclasses. This tuple should just literally be the argument tuple
# passed to the @beartype decorator, which is probably easiest to achieve if
# we force @beartype parameters to be passed as keyword-only arguments:
#
# # Keyword-only arguments require Python >= 3.8. Under older Pythons,
# # just drop the "*". Under older Pythons, let's just *NOT ALLOW
# # CONFIGURATION AT ALL.* So, this gives us:
# if IS_PYTHON_AT_LEAST_3_8:
# def beartype(*, is_proxying: bool = None, ...) -> Callable:
# BEARTYPE_PARAMS = (is_proxying, ...)
#
# beartype_decor = BEARTYPE_PARAMS_TO_DECOR.get(BEARTYPE_PARAMS)
# if beartype_decor:
# return beartype_decor
#
# # Else, we need to make a new @beartype decorator passed
# # these parameters, cache that decorator in
# # "BEARTYPE_PARAMS_TO_DECOR", and return that decorator.
# else:
# # Probably not quite right, but close enough.
# beartype = beartype_template
#
# We need a hashable tuple for lookup purposes. That's *ABSOLUTELY* the
# fastest way, given that we expect keyword arguments. So, we're moving on.
# Also, do *NOT* bother with LRU caching here, as the expected size of that
# dictionary will almost certainly always be less than 10 and surely 100.
#* Define a new private "beartype._decor.decormain" submodule.
#* In that submodule:
# * Define a new @beartype decorator accepting *ALL* of the *EXACT* same
# class variables declared by the "BeartypeConfigGlobal" class as optional
# parameters of the same name but *UNCONDITIONALLY* defaulting to "None".
# That last bit is critical. Do *NOT* default them to what the
# "BeartypeConfigGlobal" superclass defaults them to, as that would obstruct
# our purposes, which is to have lookups punted upward to the
# "BeartypeConfigGlobal" superclass only when undefined in a subclass.
# * The purpose of this new @beartype decorator is to (in order):
# * First lookup the passed parameters to get an existing decorator passed
# those parameters, as already implemented above. (This is trivial.)
# * If we need to make a new decorator, this is also mostly trivial. Just:
# * Define a new local dictionary "BEARTYPE_PARAM_NAME_TO_VALUE" bundling
# these optional parameters for efficient lookup: e.g.,
# BEARTYPE_PARAM_NAME_TO_VALUE = {
# 'is_proxying': is_proxying,
# ...
# }
# * Dynamically create a new "BeartypeConfigGlobal" subclass *SETTING THE
# DESIRED CLASS VARIABLES* based on all of the passed optional
# parameters whose values are *NOT* "None". For example, if the only
# passed non-"None" optional parameter was "is_proxying", this would be:
# class _BeartypeConfigDecor{ARBITRARY_NUMBER}(BeartypeConfigGlobal):
# is_proxying = False
# This will probably require a bit of iteration to filter out all
# non-"None" optional parameters. Note that the simplest way to
# implement this would probably be to just dynamically declare an empty
# subclass and then monkey-patch that subclass' dictionary with the
# desired non-"None" optional parameters: e.g.,
# # Pseudo-code, but close enough.
# BeartypeConfigDecor = eval(
# f'''class _BeartypeConfigDecor{ARBITRARY_NUMBER}(BeartypeConfigGlobal): pass''')
#
# # Yes, this is a bit lame, but it suffices for now. Remember,
# # we're caching this class, so the logic constructing this class
# # doesn't need to be lightning fast. It's *FAR* more critical that
# # the logic looking up this class in this class be lightning fast.
# #
# # Do *NOT* try to embed this logic into the above evaluation
# # (e.g., as f-expressions). Yes, that sort of hackery is trivial
# # with booleans but rapidly gets hairy with containers. So, I
# # *GUESS* we could do that for booleans. Just remember that that
# # doesn't generalize to the general case. Actually, don't bother.
# # The following suffices and doesn't violate DRY, which is the
# # only important thing here.
# BeartypeConfigDecor.__dict__.update({
# arg_name: arg_value
# arg_name, arg_value in BEARTYPE_PARAM_NAME_TO_VALUE.items()
# if arg_value is not None
# })
# * Dynamically *COPY* the beartype_template() function into a new
# function specific to that subclass, which means that function is
# actually just a template. We'll never actually the original function
# itself; we just use that function as the basis for dynamically
# generating new decorators on-the-fly. Heh! Fortunately, we only need
# a shallow function copy. That is to say, we want the code objects to
# remain the same. Note that the most efficient means of implementing
# this is given directly be this StackOverflow answer:
# https://stackoverflow.com/a/13503277/2809027
# Note that that answer can be slightly improved to resemble:
# WRAPPER_ASSIGNMENTS = functools.WRAPPER_ASSIGNMENTS + ('__kwdefaults__',)
# def copy_func(f):
# g = types.FunctionType(f.__code__, f.__globals__, name=f.__name__,
# argdefs=f.__defaults__,
# closure=f.__closure__)
# g = functools.update_wrapper(g, f, WRAPPER_ASSIGNMENTS)
# return g
# That's the most general form. Of course, we don't particularly care
# about copying metadata, since we don't expect anyone to care about
# these dynamically generated decorators. That means we can reduce the
# above to simply:
# def copy_func(f):
# return types.FunctionType(
# f.__code__,
# f.__globals__,
# name=f.__name__,
# argdefs=f.__defaults__,
# closure=f.__closure__,
# )
# * Monkey-patch the new decorator returned by
# "copy_func(beartype_template)" with the new subclass: e.g.,
# beartype_decor = copy_func(beartype_template)
# beartype_decor.__beartype_config = BeartypeConfigDecor
# *HMMM.* Minor snag. That doesn't work, but the beartype_template()
# template won't have access to that "__beartype_config". Instead, we'll
# need to:
# * Augment the signature of the beartype_template() template to accept
# a new optional "config" parameter default to "None": e.g.,.
# def beartype_template(
# func: Callable, config: BeartypeConfigGlobal = None) -> Callable:
# * Either refactor the copy_func() function defined above to accept a
# caller-defined "argdefs" parameter *OR* (more reasonably) just
# inline the body of that function in @beartype as:
# beartype_decor = types.FunctionType(
# f.__code__,
# f.__globals__,
# name=f.__name__,
# # Yup. In theory, that should do it, if we recall the internal
# # data structure of this parameter correctly.
# argdefs=(BeartypeConfigDecor,),
# closure=f.__closure__,
# )
# * Cache and return that decorator:
# BEARTYPE_PARAMS_TO_DECOR[BEARTYPE_PARAMS] = beartype_decor
# return beartype_decor
#
#Pretty trivial, honestly. We've basically already implemented all of the hard
#stuff above, which is nice.
#
#Note that the beartype_template() function will now accept an optional
#"config" parameter -- which will, of course, *ALWAYS* be non-"None" by the
#logic above. Assert this, of course. We can then trivially expose that
#"config" to lower-level beartype functions by just stuffing it into the
#existing "BeartypeCall" class: e.g.,
# # Welp, that was trivial.
# func_data.config = config
#
#Since we pass "func_data" everywhere, we get configuration for free. Muhaha!
#FIXME: Propagate generic subscriptions both to *AND* from pseudo-superclasses.
#First, consider the simpler case of propagating a generic subscription to
#pseudo-superclasses: e.g.,
# from typing import List
# class MuhList(List): pass
#
# @beartype
# def muh_lister(muh_list: MuhList[int]) -> None: pass
#
#During internal type hint visitation, @beartype should propagate the "int"
#child type hint subscripting the "MuhList" type hint up to the "List"
#pseudo-superclass under Python >= 3.9. Under older Python versions, leaving
#"List" unsubscripted appears to raise exceptions at parse time. *shrug*
#
#Of the two cases, this first case is *SIGNIFICANTLY* more important than the
#second case documented below. Why? Because mypy (probably) supports this first
#but *NOT* second case, for which mypy explicitly raises an "error". Since
#mypy has effectively defined the standard interpretation of type hints,
#there's little profit in contravening that ad-hoc standard by supporting
#something unsupported under mypy -- especially because doing so would then
#expose end user codebases to mypy errors. Sure, that's "not our problem, man,"
#but it kind of is, because community standards exist for a reason -- even if
#they're ad-hoc community standards we politely disagree with.
#
#Nonetheless, here's the second case. Consider the reverse case of propagating
#a generic subscription from a pseudo-superclass down to its unsubscripted
#generic: e.g.,
# from typing import Generic, TypeVar
#
# T = TypeVar('T')
# class MuhGeneric(Generic[T]):
# def __init__(self, muh_param: T): pass
#
# @beartype
# def muh_genericizer(generic: MuhGeneric, T) -> None: pass
#
#During internal type hint visitation, @beartype should propagate the "T"
#child type hint subscripting the "Generic" pseudo-superclass down to the
#"MuhGeneric" type hint under Python >= 3.9 and possibly older versions. Doing
#so would reduce DRY violations, because there's no tangible reason why users
#should have to perpetually subscript "MuhGeneric" when its pseudo-superclass
#already has been. Of course, mypy doesn't see it that way. *shrug*
#FIXME: When time permits, we can augment the pretty lame approach by
#publishing our own "BeartypeDict" class that supports efficient random access
#of both keys and values. Note that:
#* The existing third-party "randomdict" package provides baseline logic that
# *MIGHT* be useful in getting "BeartypeDict" off the ground. The issue with
# "randomdict", however, is that it internally leverages a "list", which
# probably then constrains key-value pair deletions on the exterior
# "randomdict" object to an O(n) rather than O(1) operation, which is
# absolutely unacceptable.
#* StackOverflow questions provide a number of solutions that appear to be
# entirely O(1), but which require maintaining considerably more internal data
# structures, which is also unacceptable (albeit less so), due to increased
# space consumption that probably grows unacceptable fast and thus fails to
# generally scale.
#* Since we don't control "typing", we'll also need to augment "BeartypeDict"
# with a "__class_getitem__" dunder method (or whatever that is called) to
# enable that class to be subscripted with "typing"-style types ala:
# def muh_func(muh_mapping: BeartypeDict[str, int]) -> None: pass
#In short, we'll need to conduct considerably more research here.
#FIXME: Actually, none of the above is necessary or desirable. Rather than
#designing a random access "BeartypeDict" class, it would be *FAR* more useful
#to design a series of beartype-specific container types in a new external
#"beartypes" package, each of which performs O(1) type-checking *ON INSERTION
#OF EACH CONTAINER ITEM.* This should be stupidly fast under standard use
#cases, because we typically expect an item to be inserted only once but
#accessed many, many times. By just checking on insertion, we avoid *ALL* of
#the complications of trying to type-check after the fact during sequential
#non-random iteration over items.
#
#Indeed, there appears to be a number of similar projects with the same idea,
#with the caveat that these projects *ALL* leverage package-specific constructs
#rather than PEP-compliant type hints -- a significant negative. The most
#interesting of these are:
#* "typed_python", a fascinating package with a variety of novel ideas at play.
# In addition to providing package-specific container types that perform
# PEP-noncompliant type-checking on item insertion *IMPLEMENTED THAT AT THE C
# LEVEL* rather than in pure Python (which is both horrible and fascinating,
# mainly because... why bother? I mean, PyPy, Nuitka, and Cython already
# exist, so why go to all that trouble to work in C rather than Python?),
# this package also offers:
# * "typed_python.Entrypoint", which looks balls-cray-cray. This is probably
# the most interesting aspect of this package, presuming it actually behaves
# as advertised, which it almost certainly doesn't. Nonetheless, it appears
# to be a bit of a cross between Nuitka and beartype. To quote:
# "Simply stick the @typed_python.Entrypoint decorator around any function
# that uses "typed_python" primitives to get a fast version of it:
# @Entrypoint
# def sum(someList, zero):
# for x in someList:
# zero += x
# return x
# ...will generate specialized code for different data types
# ("ListOf(int)", say, or "ListOf(float)", or even "Dict(int)") that's not
# only many times faster than the python equivalent, but that can operate
# using multiple processors. Compilation occurs each time you call the
# method with a new combination of types." The "that can operate using
# multiple processors" part is particularly novel, as it implies
# circumvention of the GIL. "typed_python" appears to implement this magic
# by leveraging LLVM to compile Python down to C. Again, we strongly doubt
# any of this actually works under real-world industrial constraints, but
# it's still a fascinating thought experiment.
# * "type_python.Class", a generic-style class one subclasses to generate
# "strongly typed class with a packed memory layout." The "strongly typed"
# part isn't terribly interesting, as it's PEP-noncompliant. The "packed
# memory layout" part, however, *IS* interesting. Reducing space consumption
# by presumably compiling to C is intriguing, if tangential to our concerns.
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype decorator PEP-compliant code wrapper scope utilities** (i.e.,
functions handling the possibly nested lexical scopes enclosing wrapper
functions generated by the :func:`beartype.beartype` decorator).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeDecorHintNonpepException
from beartype._cave._cavemap import NoneTypeOr
from beartype._data.datatyping import LexicalScope
from beartype._decor._cache.cachetype import (
TYPISTRY_HINT_NAME_TUPLE_PREFIX,
bear_typistry,
register_typistry_forwardref,
)
from beartype._check.checkmagic import ARG_NAME_TYPISTRY
from beartype._check.expr._exprsnip import (
PEP_CODE_HINT_FORWARDREF_UNQUALIFIED_PLACEHOLDER_PREFIX,
PEP_CODE_HINT_FORWARDREF_UNQUALIFIED_PLACEHOLDER_SUFFIX,
)
from beartype._util.cls.pep.utilpep3119 import die_unless_type_isinstanceable
from beartype._util.cls.utilclstest import is_type_builtin
from beartype._util.func.utilfuncscope import add_func_scope_attr
from beartype._util.hint.nonpep.utilnonpeptest import (
die_unless_hint_nonpep_type)
from beartype._util.hint.pep.proposal.pep484585.utilpep484585ref import (
HINT_PEP484585_FORWARDREF_UNION,
die_unless_hint_pep484585_forwardref,
get_hint_pep484585_forwardref_classname,
)
from beartype._util.utilobject import get_object_type_basename
from beartype._data.datatyping import (
TypeOrTupleTypes,
TupleTypes,
)
from collections.abc import Set
from typing import AbstractSet, Optional, Tuple, Union
# ....................{ PRIVATE }....................
_SET_OR_TUPLE = (Set, tuple)
'''
2-tuple containing the superclasses of all frozen sets and tuples.
Note that the :class:`Set` abstract base class (ABC) rather than the concrete
:class:`set` subclass is intentionally listed here, as the concrete
:class:`frozenset` subclass subclasses the former but *not* latter: e.g.,
.. code-block:: python
>>> from collections.abc import Set
>>> issubclass(frozenset, Set)
True
>>> issubclass(frozenset, set)
False
'''
_HINT_TYPES_SET_OR_TUPLE = Union[AbstractSet[type], TupleTypes]
'''
PEP-compliant type hint matching a set *or* tuple of zero or more classes.
'''
# ....................{ ADDERS ~ type }....................
#FIXME: Unit test us up, please.
def add_func_scope_type_or_types(
# Mandatory parameters.
type_or_types: TypeOrTupleTypes,
func_scope: LexicalScope,
# Optional parameters.
exception_prefix: str = (
'Globally or locally scoped class or tuple of classes '),
) -> str:
'''
Add a new **scoped class or tuple of classes** (i.e., new key-value pair of
the passed dictionary mapping from the name to value of each globally or
locally scoped attribute externally accessed elsewhere, whose key is a
machine-readable name internally generated by this function to uniquely
refer to the passed class or tuple of classes and whose value is that class
or tuple) to the passed scope *and* return that name.
This function additionally caches this tuple with the beartypistry
singleton to reduce space consumption for tuples duplicated across the
active Python interpreter.
Parameters
----------
type_or_types : TypeOrTupleTypes
Arbitrary class or tuple of classes to be added to this scope.
func_scope : LexicalScope
Local or global scope to add this class or tuple of classes to.
exception_prefix : str, optional
Human-readable label prefixing the representation of this object in the
exception message. Defaults to the empty string.
Returns
----------
str
Name of this class or tuple in this scope generated by this function.
Raises
----------
BeartypeDecorHintNonpepException
If this hint is either:
* Neither a class nor tuple.
* A tuple that is empty.
BeartypeDecorHintPep3119Exception
If hint is:
* A class that is *not* isinstanceable (i.e., passable as the second
argument to the :func:`isinstance` builtin).
* A tuple of one or more items that are *not* isinstanceable classes.
_BeartypeUtilCallableException
If an attribute with the same name as that internally generated by this
adder but having a different value already exists in this scope. This
adder uniquifies names by object identifier and should thus *never*
generate name collisions. This exception is thus intentionally raised
as a private rather than public exception.
'''
# Return either...
return (
# If this hint is a class, the name of a new parameter passing this
# class;
add_func_scope_type(
cls=type_or_types,
func_scope=func_scope,
exception_prefix=exception_prefix,
)
if isinstance(type_or_types, type) else
# Else, this hint is *NOT* a class. In this case:
# * If this hint is a tuple of classes, the name of a new parameter
# passing this tuple.
# * Else, raise an exception.
add_func_scope_types(
types=type_or_types,
func_scope=func_scope,
exception_prefix=exception_prefix,
)
)
def add_func_scope_type(
# Mandatory parameters.
cls: type,
func_scope: LexicalScope,
# Optional parameters.
exception_prefix: str = 'Globally or locally scoped class ',
) -> str:
'''
Add a new **scoped class** (i.e., new key-value pair of the passed
dictionary mapping from the name to value of each globally or locally
scoped attribute externally accessed elsewhere, whose key is a
machine-readable name internally generated by this function to uniquely
refer to the passed class and whose value is that class) to the passed
scope *and* return that name.
Parameters
----------
cls : type
Arbitrary class to be added to this scope.
func_scope : LexicalScope
Local or global scope to add this class to.
exception_prefix : str, optional
Human-readable label prefixing the representation of this object in the
exception message. Defaults to the empty string.
Returns
----------
str
Name of this class in this scope generated by this function.
Raises
----------
BeartypeDecorHintPep3119Exception
If this class is *not* isinstanceable (i.e., passable as the second
argument to the :func:`isinstance` builtin).
_BeartypeUtilCallableException
If an attribute with the same name as that internally generated by this
adder but having a different value already exists in this scope. This
adder uniquifies names by object identifier and should thus *never*
generate name collisions. This exception is thus intentionally raised
as a private rather than public exception.
'''
# If this object is *NOT* an isinstanceable class, raise an exception.
die_unless_type_isinstanceable(cls=cls, exception_prefix=exception_prefix)
# Else, this object is an isinstanceable class.
# Return either...
return (
# If this type is a builtin (i.e., globally accessible C-based type
# requiring *no* explicit importation), the unqualified basename of
# this type as is, as this type requires no parametrization;
get_object_type_basename(cls)
if is_type_builtin(cls) else
# Else, the name of a new parameter passing this class.
add_func_scope_attr(
attr=cls, func_scope=func_scope, exception_prefix=exception_prefix)
)
def add_func_scope_types(
# Mandatory parameters.
types: _HINT_TYPES_SET_OR_TUPLE,
func_scope: LexicalScope,
# Optional parameters.
is_unique: bool = False,
exception_prefix: str = (
'Globally or locally scoped set or tuple of classes '),
) -> str:
'''
Add a new **scoped tuple of classes** (i.e., new key-value pair of the
passed dictionary mapping from the name to value of each globally or
locally scoped attribute externally accessed elsewhere, whose key is a
machine-readable name internally generated by this function to uniquely
refer to the passed set or tuple of classes and whose value is that tuple)
to the passed scope *and* return that machine-readable name.
This function additionally caches this tuple with the beartypistry
singleton to reduce space consumption for tuples duplicated across the
active Python interpreter.
Design
----------
Unlike types, tuples are commonly dynamically constructed on-the-fly by
various tuple factories (e.g., :attr:`beartype.cave.NoneTypeOr`,
:attr:`typing.Optional`) and hence have no reliable fully-qualified names.
Instead, this function caches this tuple into the beartypistry under a
string synthesized as the unique concatenation of:
* The magic substring :data:`TYPISTRY_HINT_NAME_TUPLE_PREFIX`. Since
fully-qualified classnames uniquely identifying types as beartypistry
keys are guaranteed to *never* contain this substring, this substring
prevents collisions between tuple and type names.
* This tuple's hash. Note that this tuple's object ID is intentionally
*not* embedded in this string. Two tuples with the same items are
typically different objects and thus have different object IDs, despite
producing identical hashes: e.g.,
>>> ('Das', 'Kapitel',) is ('Das', 'Kapitel',)
False
>>> id(('Das', 'Kapitel',)) == id(('Das', 'Kapitel',))
False
>>> hash(('Das', 'Kapitel',)) == hash(('Das', 'Kapitel',))
True
The exception is the empty tuple, which is a singleton and thus *always*
has the same object ID and hash: e.g.,
>>> () is ()
True
>>> id(()) == id(())
True
>>> hash(()) == hash(())
True
Identifying tuples by their hashes enables the beartypistry singleton to
transparently cache duplicate class tuples with distinct object IDs as the
same underlying object, reducing space consumption. While hashing tuples
does impact time performance, the gain in space is worth the cost.
Parameters
----------
types : _HINT_TYPES_SET_OR_TUPLE
Set or tuple of arbitrary types to be added to this scope.
func_scope : LexicalScope
Local or global scope to add this object to.
is_unique : bool, optional
``True`` only if the caller guarantees this tuple to contain *no*
duplicate types. This boolean is ignored if ``types`` is a set rather
than tuple. Defaults to ``False``. If ``False``, this function assumes
this tuple to contain duplicate types by internally:
#. Coercing this tuple into a set, thus implicitly ignoring both
duplicates and ordering of types in this tuple.
#. Coercing that set back into another tuple.
#. If these two tuples differ, the passed tuple contains one or more
duplicates; in this case, the duplicate-free tuple is cached and
passed.
#. Else, the passed tuple contains no duplicates; in this case, the
passed tuple is cached and passed.
This boolean does *not* simply enable an edge-case optimization, though
it certainly does that; this boolean enables callers to guarantee that
this function caches and passes the passed tuple rather than a new
tuple internally created by this function.
exception_prefix : str, optional
Human-readable label prefixing the representation of this object in the
exception message. Defaults to the empty string.
Returns
----------
str
Name of this tuple in this scope generated by this function.
Raises
----------
BeartypeDecorHintNonpepException
If this hint is either:
* Neither a set nor tuple.
* A set or tuple that is empty.
BeartypeDecorHintPep3119Exception
If one or more items of this hint are *not* isinstanceable classes
(i.e., classes passable as the second argument to the
:func:`isinstance` builtin).
_BeartypeUtilCallableException
If an attribute with the same name as that internally generated by this
adder but having a different value already exists in this scope. This
adder uniquifies names by object identifier and should thus *never*
generate name collisions. This exception is thus intentionally raised
as a private rather than public exception.
'''
assert is_unique.__class__ is bool, f'{repr(is_unique)} not bool.'
# If this object is neither a set nor tuple, raise an exception.
if not isinstance(types, _SET_OR_TUPLE):
raise BeartypeDecorHintNonpepException(
f'{exception_prefix}{repr(types)} neither set nor tuple.')
# Else, this object is either a set or tuple.
#
# If this collection is empty, raise an exception.
elif not types:
raise BeartypeDecorHintNonpepException(f'{exception_prefix}empty.')
# Else, this collection is non-empty.
#FIXME: *EXCEPTIONALLY INEFFICIENT.* Let's optimize this sometime, please.
# If any item in this collection is *NOT* an isinstanceable class, raise an
# exception.
for cls in types:
die_unless_hint_nonpep_type(
hint=cls, exception_prefix=exception_prefix)
# Else, all items of this collection are isinstanceable classes.
# If this tuple only contains one type, register only this type.
if len(types) == 1:
return add_func_scope_type(
# The first and only item of this collection, accessed as either:
# * If this collection is a tuple, that item with fast indexing.
# * If this collection is a set, that item with slow iteration.
cls=types[0] if isinstance(types, tuple) else next(iter(types)),
func_scope=func_scope,
exception_prefix=exception_prefix,
)
# Else, this tuple either contains two or more types.
#
# If this collection is a frozenset, coerce this frozenset into a tuple.
elif isinstance(types, Set):
types = tuple(types)
# If this collection is a tuple *AND* the caller failed to guarantee this
# tuple to be duplicate-free, coerce this tuple into (in order):
# * A set, thus ignoring duplicates and ordering.
# * Back into a duplicate-free tuple.
elif isinstance(types, tuple) and not is_unique:
types = tuple(set(types))
# In either case, this collection is now guaranteed to be a tuple
# containing only duplicate-free classes.
assert isinstance(types, tuple), f'{exception_prefix}{repr(types)} not tuple.'
# Name uniquely identifying this collection as a beartypistry key.
tuple_types_name = f'{TYPISTRY_HINT_NAME_TUPLE_PREFIX}{hash(types)}'
# If this tuple has *NOT* already been cached with the beartypistry
# singleton, do so.
if tuple_types_name not in bear_typistry:
bear_typistry[tuple_types_name] = types
# Else, this tuple has already been cached with the beartypistry singleton.
# In this case, reuse the previously cached tuple.
else:
types = bear_typistry[tuple_types_name]
# Return the name of a new parameter passing this tuple.
return add_func_scope_attr(
attr=types, func_scope=func_scope, exception_prefix=exception_prefix)
# ....................{ EXPRESSERS ~ type }....................
def express_func_scope_type_forwardref(
# Mandatory parameters.
forwardref: HINT_PEP484585_FORWARDREF_UNION,
forwardrefs_class_basename: Optional[set],
func_scope: LexicalScope,
# Optional parameters.
exception_prefix: str = 'Globally or locally scoped forward reference ',
) -> Tuple[str, Optional[set]]:
'''
Express the passed :pep:`484`- or :pep:`585`-compliant **forward
reference** (i.e., fully-qualified or unqualified name of an arbitrary
class that typically has yet to be declared) as a Python expression
evaluating to this forward reference when accessed via the beartypistry
singleton added as a new key-value pair of the passed dictionary, whose
key is the string :attr:`beartype._check.checkmagic.ARG_NAME_TYPISTRY`
and whose value is the beartypistry singleton.
Parameters
----------
forwardref : HINT_PEP484585_FORWARDREF_UNION
Forward reference to be expressed relative to this scope.
forwardrefs_class_basename : Optional[set]
Set of the unqualified classnames referred to by all relative forward
references relative to this scope if any *or* ``None`` otherwise
(i.e., if no relative forward references have been expressed relative
to this scope yet).
func_scope : LexicalScope
Local or global scope to add this forward reference to.
exception_prefix : str, optional
Human-readable substring describing this forward reference in exception
messages. Defaults to a reasonably sane string.
Returns
----------
Tuple[str, Optional[set]]
2-tuple ``(forwardref_expr, forwardrefs_class_basename)``, where:
* ``forwardref_expr`` is the Python expression evaluating to this
forward reference when accessed via the beartypistry singleton added
to this scope.
* ``forwardrefs_class_basename`` is either:
* If this forward reference is a fully-qualified classname, the
passed ``forwardrefs_class_basename`` set as is.
* If this forward reference is an unqualified classname, either:
* If the passed ``forwardrefs_class_basename`` set is *not*
``None``, this set with this classname added to it.
* Else, a new set containing only this classname.
Raises
----------
BeartypeDecorHintForwardRefException
If this forward reference is *not* actually a forward reference.
'''
assert isinstance(func_scope, dict), f'{repr(func_scope)} not dictionary.'
assert isinstance(forwardrefs_class_basename, NoneTypeOr[set]), (
f'{repr(forwardrefs_class_basename)} neither set nor "None".')
# If this object is *NOT* a forward reference, raise an exception.
die_unless_hint_pep484585_forwardref(
hint=forwardref, exception_prefix=exception_prefix)
# Else, this object is a forward reference.
# Fully-qualified or unqualified classname referred to by this reference.
forwardref_classname = get_hint_pep484585_forwardref_classname(forwardref)
# If this classname contains one or more "." characters, this classname is
# fully-qualified. In this case...
if '.' in forwardref_classname:
#FIXME: Unsafe. Encapsulate this operation in a new
#add_func_scope_beartypistry() function validating that either:
#* "ARG_NAME_TYPISTRY" has *NOT* already been added to this scope.
#* "ARG_NAME_TYPISTRY" has already been added to this scope and its
# value is exactly the "bear_typistry" singleton.
#To do so, we might consider simply generalizing the existing
#add_func_scope_attr() function to optionally accept a new optional
#"attr_name" parameter. When passed, that function should use that
#string as the passed attribute's name rather than internally
#generating its own non-human-readable attribute name.
# Add the beartypistry singleton as a private "__beartypistry"
# attribute to this scope.
func_scope[ARG_NAME_TYPISTRY] = bear_typistry
# Python expression evaluating to this class when accessed via this
# private "__beartypistry" attribute.
forwardref_expr = register_typistry_forwardref(forwardref_classname)
# Else, this classname is unqualified. In this case...
else:
# If this set of unqualified classnames referred to by all relative
# forward references has yet to be instantiated, do so.
if forwardrefs_class_basename is None:
forwardrefs_class_basename = set()
# In any case, this set now exists.
# Add this unqualified classname to this set.
forwardrefs_class_basename.add(forwardref_classname)
# Placeholder substring to be replaced by the caller with a Python
# expression evaluating to this unqualified classname canonicalized
# relative to the module declaring the currently decorated callable
# when accessed via the private "__beartypistry" parameter.
forwardref_expr = (
f'{PEP_CODE_HINT_FORWARDREF_UNQUALIFIED_PLACEHOLDER_PREFIX}'
f'{forwardref_classname}'
f'{PEP_CODE_HINT_FORWARDREF_UNQUALIFIED_PLACEHOLDER_SUFFIX}'
)
# Return a 2-tuple of this expression and set of unqualified classnames.
return forwardref_expr, forwardrefs_class_basename
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **PEP-agnostic type hint sanitizers** (i.e., high-level callables
converting type hints from one format into another, either permanently or
temporarily and either losslessly or in a lossy manner).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.typing import Any
from beartype._check.checkcall import BeartypeCall
from beartype._check.conv.convcoerce import (
coerce_func_hint_root,
coerce_hint_any,
coerce_hint_root,
)
from beartype._check.conv.convreduce import reduce_hint
from beartype._conf.confcls import BeartypeConf
from beartype._util.cache.map.utilmapbig import CacheUnboundedStrong
from beartype._util.error.utilerror import EXCEPTION_PLACEHOLDER
from beartype._util.hint.utilhinttest import die_unless_hint
# ....................{ SANIFIERS ~ root }....................
#FIXME: Unit test us up, please.
#FIXME: Revise docstring in accordance with recent dramatic improvements.
def sanify_func_hint_root(
# Mandatory parameters.
hint: object,
arg_name: str,
bear_call: BeartypeCall,
# Optional parameters.
exception_prefix: str = EXCEPTION_PLACEHOLDER,
) -> object:
'''
PEP-compliant type hint sanified (i.e., sanitized) from the passed **root
type hint** (i.e., possibly PEP-noncompliant type hint annotating the
parameter or return with the passed name of the passed callable) if this
hint is reducible *or* this hint as is otherwise (i.e., if this hint is
irreducible).
Specifically, this function:
* If this hint is a **PEP-noncompliant tuple union** (i.e., tuple of one or
more standard classes and forward references to standard classes):
* Coerces this tuple union into the equivalent :pep:`484`-compliant
union.
* Replaces this tuple union in the ``__annotations__`` dunder tuple of
this callable with this :pep:`484`-compliant union.
* Returns this :pep:`484`-compliant union.
* Else if this hint is already PEP-compliant, preserves and returns this
hint unmodified as is.
* Else (i.e., if this hint is neither PEP-compliant nor -noncompliant and
thus invalid as a type hint), raise an exception.
Caveats
----------
This function *cannot* be meaningfully memoized, since the passed type hint
is *not* guaranteed to be cached somewhere. Only functions passed cached
type hints can be meaningfully memoized. Even if this function *could* be
meaningfully memoized, there would be no benefit; this function is only
called once per parameter or return of the currently decorated callable.
This function is intended to be called *after* all possibly
:pep:`563`-compliant **deferred type hints** (i.e., type hints persisted as
evaluatable strings rather than actual type hints) annotating this callable
if any have been evaluated into actual type hints.
Parameters
----------
hint : object
Possibly PEP-noncompliant root type hint to be sanified.
arg_name : str
Either:
* If this hint annotates a parameter, the name of that parameter.
* If this hint annotates the return, ``"return"``.
bear_call : BeartypeCall
Decorated callable directly annotated by this hint.
exception_prefix : str, optional
Human-readable label prefixing the representation of this object in the
exception message. Defaults to :data:`EXCEPTION_PLACEHOLDER`.
Returns
----------
object
Either:
* If this hint is PEP-noncompliant, a PEP-compliant type hint converted
from this hint.
* If this hint is PEP-compliant, this hint unmodified as is.
Raises
----------
BeartypeDecorHintNonpepException
If this object is neither:
* A PEP-noncompliant type hint.
* A supported PEP-compliant type hint.
'''
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize with the sanify_hint_root() sanitizer, please.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# PEP-compliant type hint coerced (i.e., permanently converted in the
# annotations dunder dictionary of the passed callable) from this possibly
# PEP-noncompliant type hint if this hint is coercible *OR* this hint as is
# otherwise. Since the passed hint is *NOT* necessarily PEP-compliant,
# perform this coercion *BEFORE* validating this hint to be PEP-compliant.
hint = bear_call.func_wrappee.__annotations__[arg_name] = (
coerce_func_hint_root(
hint=hint,
arg_name=arg_name,
bear_call=bear_call,
exception_prefix=exception_prefix,
)
)
# If this object is neither a PEP-noncompliant type hint *NOR* supported
# PEP-compliant type hint, raise an exception.
#
# Note that this function call is effectively memoized and thus efficient.
die_unless_hint(hint=hint, exception_prefix=exception_prefix)
# Else, this object is a supported PEP-compliant type hint.
# Reduce this hint to a lower-level PEP-compliant type hint if this hint is
# reducible *OR* this hint as is otherwise. Reductions simplify subsequent
# logic elsewhere by transparently converting non-trivial hints (e.g.,
# numpy.typing.NDArray[...]) into semantically equivalent trivial hints
# (e.g., beartype validators).
#
# Whereas the above coercion permanently persists for the duration of the
# active Python process (i.e., by replacing the original type hint in the
# annotations dunder dictionary of this callable), this reduction only
# temporarily persists for the duration of the current call stack. Why?
# Because hints explicitly coerced above are assumed to be either:
# * PEP-noncompliant and thus harmful (in the general sense).
# * PEP-compliant but semantically deficient and thus equally harmful (in
# the general sense).
#
# In either case, coerced type hints are generally harmful in *ALL*
# possible contexts for *ALL* possible consumers (including other competing
# runtime type-checkers). Reduced type hints, however, are *NOT* harmful in
# any sense whatsoever; they're simply non-trivial for @beartype to support
# in their current form and thus temporarily reduced in-memory into a more
# convenient form for beartype-specific type-checking purposes elsewhere.
#
# Note that parameters are intentionally passed positionally to both
# optimize memoization efficiency and circumvent memoization warnings.
hint = reduce_hint(hint, bear_call.conf, exception_prefix)
# Return this sanified hint.
return hint
#FIXME: Unit test us up, please.
#FIXME: Revise docstring in accordance with recent dramatic improvements.
def sanify_hint_root(
hint: object,
conf: BeartypeConf,
exception_prefix: str,
) -> object:
'''
PEP-compliant type hint sanified (i.e., sanitized) from the passed **root
type hint** (i.e., possibly PEP-noncompliant type hint that has *no* parent
type hint) if this hint is reducible *or* this hint as is otherwise (i.e.,
if this hint is irreducible).
Specifically, this function:
* If this hint is a **PEP-noncompliant tuple union** (i.e., tuple of one or
more standard classes and forward references to standard classes):
* Coerces this tuple union into the equivalent :pep:`484`-compliant
union.
* Replaces this tuple union in the ``__annotations__`` dunder tuple of
this callable with this :pep:`484`-compliant union.
* Returns this :pep:`484`-compliant union.
* Else if this hint is already PEP-compliant, preserves and returns this
hint unmodified as is.
* Else (i.e., if this hint is neither PEP-compliant nor -noncompliant and
thus invalid as a type hint), raise an exception.
Caveats
----------
This function *cannot* be meaningfully memoized, since the passed type hint
is *not* guaranteed to be cached somewhere. Only functions passed cached
type hints can be meaningfully memoized. Even if this function *could* be
meaningfully memoized, there would be no benefit; this function is only
called once per parameter or return of the currently decorated callable.
This function is intended to be called *after* all possibly
:pep:`563`-compliant **deferred type hints** (i.e., type hints persisted as
evaluatable strings rather than actual type hints) annotating this callable
if any have been evaluated into actual type hints.
Parameters
----------
hint : object
Possibly PEP-noncompliant root type hint to be sanified.
conf : BeartypeConf
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object).
exception_prefix : str
Human-readable label prefixing the representation of this object in the
exception message.
Returns
----------
object
Either:
* If this hint is PEP-noncompliant, a PEP-compliant type hint converted
from this hint.
* If this hint is PEP-compliant, this hint unmodified as is.
Raises
----------
BeartypeDecorHintNonpepException
If this object is neither:
* A PEP-noncompliant type hint.
* A supported PEP-compliant type hint.
'''
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize with the sanify_func_hint_root() sanitizer, please.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# PEP-compliant type hint coerced from this possibly PEP-noncompliant type
# hint if this hint is coercible *OR* this hint as is otherwise. Since the
# passed hint is *NOT* necessarily PEP-compliant, perform this coercion
# *BEFORE* validating this hint to be PEP-compliant.
hint = coerce_hint_root(hint=hint, exception_prefix=exception_prefix)
# If this object is neither a PEP-noncompliant type hint *NOR* supported
# PEP-compliant type hint, raise an exception.
#
# Note that this function call is effectively memoized and thus efficient.
die_unless_hint(hint=hint, exception_prefix=exception_prefix)
# Else, this object is a supported PEP-compliant type hint.
# Reduce this hint to a lower-level PEP-compliant type hint if this hint is
# reducible *OR* this hint as is otherwise. See
# sanify_func_hint_root() for further commentary.
hint = reduce_hint(hint, conf, exception_prefix)
# Return this sanified hint.
return hint
# ....................{ SANIFIERS ~ child }....................
def sanify_hint_child(
hint: object,
conf: BeartypeConf,
exception_prefix: str,
) -> Any:
'''
PEP-compliant type hint sanified (i.e., sanitized) from the passed
**PEP-compliant child type hint** (i.e., hint transitively subscripting the
root type hint annotating a parameter or return of the currently decorated
callable) if this hint is reducible *or* this hint as is otherwise (i.e., if
this hint is *not* irreducible).
Parameters
----------
hint : object
PEP-compliant type hint to be sanified.
conf : BeartypeConf
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object).
exception_prefix : str
Human-readable label prefixing the representation of this object in the
exception message.
Returns
----------
object
PEP-compliant type hint sanified from this hint.
'''
# This sanifier covers the proper subset of logic performed by the
# sanify_hint_root() sanifier applicable to child type hints.
# PEP-compliant type hint coerced (i.e., permanently converted in the
# annotations dunder dictionary of the passed callable) from this possibly
# PEP-noncompliant type hint if this hint is coercible *OR* this hint as is
# otherwise. Since the passed hint is *NOT* necessarily PEP-compliant,
# perform this coercion *BEFORE* validating this hint to be PEP-compliant.
hint = coerce_hint_any(hint)
# Return this hint reduced.
return reduce_hint(hint, conf, exception_prefix)
# ....................{ PRIVATE ~ mappings }....................
_HINT_REPR_TO_HINT = CacheUnboundedStrong()
'''
**Type hint cache** (i.e., thread-safe cache mapping from the machine-readable
representations of all non-self-cached type hints to those hints).**
This cache caches:
* :pep:`585`-compliant type hints, which do *not* cache themselves.
This cache does *not* cache:
* Type hints declared by the :mod:`typing` module, which implicitly cache
themselves on subscription thanks to inscrutable metaclass magic.
* :pep:`563`-compliant **deferred type hints** (i.e., type hints persisted as
evaluable strings rather than actual type hints). Ideally, this cache would
cache the evaluations of *all* deferred type hints. Sadly, doing so is
infeasible in the general case due to global and local namespace lookups
(e.g., ``Dict[str, int]`` only means what you think it means if an
importation resembling ``from typing import Dict`` preceded that type hint).
Design
--------------
**This dictionary is intentionally thread-safe.** Why? Because this dictionary
is used to modify the ``__attributes__`` dunder variable of arbitrary callables.
Since most of those callables are either module- or class-scoped, that variable
is effectively global. To prevent race conditions between competing threads
contending over that global variable, this dictionary *must* be thread-safe.
This dictionary is intentionally designed as a naive dictionary rather than a
robust LRU cache, for the same reasons that callables accepting hints are
memoized by the :func:`beartype._util.cache.utilcachecall.callable_cached`
rather than the :func:`functools.lru_cache` decorator. Why? Because:
* The number of different type hints instantiated across even worst-case
codebases is negligible in comparison to the space consumed by those hints.
* The :attr:`sys.modules` dictionary persists strong references to all
callables declared by previously imported modules. In turn, the
``func.__annotations__`` dunder dictionary of each such callable persists
strong references to all type hints annotating that callable. In turn, these
two statements imply that type hints are *never* garbage collected but
instead persisted for the lifetime of the active Python process. Ergo,
temporarily caching hints in an LRU cache is pointless, as there are *no*
space savings in dropping stale references to unused hints.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **PEP-agnostic type hint reducers** (i.e., low-level callables
converting type hints from one format into another, either permanently or
temporarily and either losslessly or in a lossy manner).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.typing import Any
from beartype._cave._cavefast import NoneType
from beartype._conf.confcls import (
BEARTYPE_CONF_DEFAULT,
BeartypeConf,
)
from beartype._data.datatyping import (
Pep484TowerComplex,
Pep484TowerFloat,
)
from beartype._data.hint.pep.sign.datapepsigns import (
HintSignAnnotated,
HintSignDataclassInitVar,
HintSignNewType,
HintSignNumpyArray,
HintSignType,
HintSignTypeVar,
HintSignTypedDict,
)
from beartype._util.cache.utilcachecall import callable_cached
from beartype._util.hint.pep.proposal.utilpep544 import (
is_hint_pep484_generic_io,
reduce_hint_pep484_generic_io_to_pep544_protocol,
)
from beartype._util.hint.pep.proposal.utilpep557 import (
get_hint_pep557_initvar_arg)
from beartype._util.hint.pep.utilpepget import get_hint_pep_sign_or_none
from collections.abc import Mapping
# ....................{ REDUCERS }....................
#FIXME: Improve documentation to list all reductions performed by this reducer.
#Sadly, this documentation is currently quite out-of-date. What? It happens!
@callable_cached
def reduce_hint(
hint: Any,
conf: BeartypeConf,
exception_prefix: str,
) -> object:
'''
Lower-level type hint reduced (i.e., converted) from the passed higher-level
type hint if this hint is reducible *or* this hint as is otherwise (i.e., if
this hint is irreducible).
Specifically, if the passed hint is:
* *Not* PEP-compliant, this hint is returned as is unmodified.
* PEP 593-compliant (i.e., :class:`typing.Annotated`) but beartype-agnostic
(i.e., its second argument is *not* an instance of the
:class:`beartype.vale._core._valecore.BeartypeValidator` class produced by
subscripting the :class:`beartype.vale.Is` class), this hint is reduced
to the first argument subscripting this hint. Doing so ignores *all*
irrelevant annotations on this hint (e.g., reducing
``typing.Annotated[str, 50, False]`` to simply ``str``).
This function is memoized for efficiency.
Parameters
----------
hint : Any
Type hint to be possibly reduced.
conf : BeartypeConf
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object).
exception_prefix : str
Human-readable label prefixing the representation of this object in the
exception message.
Returns
----------
object
Either:
* If the passed higher-level type hint is reducible, a lower-level type
hint reduced (i.e., converted, extracted) from this hint.
* Else, this hint as is unmodified.
Raises
----------
BeartypeDecorHintNonpepNumpyException
See the
:func:`beartype._util.hint.pep.mod.utilmodnumpy.reduce_hint_numpy_ndarray`
function for further details.
'''
assert isinstance(conf, BeartypeConf), f'{repr(conf)} not configuration.'
# Sign uniquely identifying this hint if this hint is identifiable *OR*
# "None" otherwise.
hint_sign = get_hint_pep_sign_or_none(hint)
# This reduction is intentionally implemented as a linear series of tests,
# ordered in descending likelihood of a match for efficiency. While
# alternatives (that are more readily readable and maintainable) do exist,
# these alternatives all appear to be substantially less efficient.
#
# ..................{ NON-PEP }..................
# If this hint is unidentifiable...
#
# Since this includes *ALL* isinstanceable classes (including both
# user-defined classes and builtin types), this is *ALWAYS* detected first.
if hint_sign is None:
# If...
if (
# This configuration enables support for the PEP 484-compliant
# implicit numeric tower *AND*...
conf.is_pep484_tower and
# This hint is either the builtin "float" or "complex" classes
# governed by this tower...
(hint is float or hint is complex)
# Then expand this hint to the corresponding numeric tower.
):
# Expand this hint to match...
hint = (
# If this hint is the builtin "float" class, both the builtin
# "float" and "int" classes;
Pep484TowerFloat
if hint is float else
# Else, this hint is the builtin "complex" class by the above
# condition; in this case, the builtin "complex", "float", and
# "int" classes.
Pep484TowerComplex
)
# Else, this hint is truly unidentifiable.
# Return this hint as is unmodified.
return hint
# ..................{ PEP 484 }..................
# If this is the PEP 484-compliant "None" singleton, reduce this hint to
# the type of that singleton. While *NOT* explicitly defined by the
# "typing" module, PEP 484 explicitly supports this singleton:
# When used in a type hint, the expression None is considered
# equivalent to type(None).
#
# The "None" singleton is used to type callables lacking an explicit
# "return" statement and thus absurdly common. Ergo, detect this early.
elif hint is None:
hint = NoneType
#FIXME: Remove this branch *AFTER* deeply type-checking type variables.
# If this is a PEP 484-compliant type variable...
#
# Type variables are excruciatingly common and thus detected very early.
elif hint_sign is HintSignTypeVar:
# Avoid circular import dependencies.
from beartype._util.hint.pep.proposal.pep484.utilpep484typevar import (
get_hint_pep484_typevar_bound_or_none)
# PEP-compliant type hint synthesized from all bounded constraints
# parametrizing this type variable if any *OR* "None" otherwise.
hint_bound = get_hint_pep484_typevar_bound_or_none(hint)
# print(f'Reducing PEP 484 type variable {repr(hint)} to {repr(hint_bound)}...')
# If this type variable was parametrized by one or more bounded
# constraints, reduce this hint to these constraints.
if hint_bound is not None:
# print(f'Reducing non-beartype PEP 593 type hint {repr(hint)}...')
hint = hint_bound
# Else, this type variable was parametrized by no bounded constraints.
# ..................{ PEP 593 }..................
# If this hint is a PEP 593-compliant metahint...
#
# Since metahints form the core backbone of our beartype-specific data
# validation API, metahints are extremely common and thus detected early.
elif hint_sign is HintSignAnnotated:
# Avoid circular import dependencies.
from beartype._util.hint.pep.proposal.utilpep593 import (
get_hint_pep593_metahint,
is_hint_pep593_beartype,
)
# If this metahint is beartype-agnostic and thus irrelevant to us,
# ignore all annotations on this hint by reducing this hint to the
# lower-level hint it annotates.
if not is_hint_pep593_beartype(hint):
# print(f'Reducing non-beartype PEP 593 type hint {repr(hint)}...')
hint = get_hint_pep593_metahint(hint)
# Else, this metahint is beartype-specific. In this case, preserve
# this hint as is for subsequent handling elsewhere.
# ..................{ NON-PEP ~ numpy }..................
# If this hint is a PEP-noncompliant typed NumPy array (e.g.,
# "numpy.typing.NDArray[np.float64]"), reduce this hint to the equivalent
# well-supported beartype validator.
#
# Typed NumPy arrays are increasingly common and thus detected early.
elif hint_sign is HintSignNumpyArray:
# Avoid circular import dependencies.
from beartype._util.hint.pep.mod.utilmodnumpy import (
reduce_hint_numpy_ndarray)
hint = reduce_hint_numpy_ndarray(
hint=hint, exception_prefix=exception_prefix)
# ..................{ PEP (484|585) ~ subclass }..................
# If this hint is a PEP 484-compliant subclass type hint subscripted by an
# ignorable child type hint (e.g., "object", "typing.Any"), silently ignore
# this argument by reducing this hint to the "type" superclass. Although
# this logic could also be performed elsewhere, doing so here simplifies
# matters dramatically. Note that this reduction *CANNOT* be performed by
# the is_hint_ignorable() tester, as subclass type hints subscripted by
# ignorable child type hints are *NOT* ignorable; they're simply safely
# reducible to the "type" superclass.
#
# Subclass type hints are reasonably uncommon and thus detected late.
elif hint_sign is HintSignType:
# Avoid circular import dependencies.
from beartype._util.hint.pep.proposal.pep484585.utilpep484585type import (
reduce_hint_pep484585_subclass_superclass_if_ignorable)
hint = reduce_hint_pep484585_subclass_superclass_if_ignorable(
hint=hint, exception_prefix=exception_prefix)
# ..................{ PEP 589 }..................
#FIXME: Remove *AFTER* deeply type-checking typed dictionaries. For now,
#shallowly type-checking such hints by reduction to untyped dictionaries
#remains the sanest temporary work-around.
#FIXME: The PEP 589 edict that "any TypedDict type is consistent with
#"Mapping[str, object]" suggests that we should trivially reduce this hint
#to "Mapping[str, object]" rather than merely "Mapping" *AFTER* we deeply
#type-check mappings. Doing so will get us slightly deeper type-checking of
#typed dictionaries, effectively for free. Note that:
#* Care should be taken to ensure that the "Mapping" factory appropriate
# for the active Python interpreter is used. PEP 585 gonna PEP 585.
#* We should cache "Mapping[str, object]" to a private global above rather
# than return a new "Mapping[str, object]" type hint on each call. Right?
# If this hint is a PEP 589-compliant typed dictionary (i.e.,
# "typing.TypedDict" or "typing_extensions.TypedDict" subclass), silently
# ignore all child type hints annotating this dictionary by reducing this
# hint to the "Mapping" superclass. Yes, "Mapping" rather than "dict". By
# PEP 589 edict:
# First, any TypedDict type is consistent with Mapping[str, object].
#
# Typed dictionaries are largely discouraged in the typing community, due
# to their non-standard semantics and syntax. Ergo, typed dictionaries are
# reasonably uncommon and thus detected late.
elif hint_sign is HintSignTypedDict:
return Mapping
# ..................{ PEP 484 ~ new type }..................
# If this hint is a PEP 484-compliant new type, reduce this hint to the
# user-defined class aliased by this hint. Although this logic could also
# be performed elsewhere, doing so here simplifies matters.
#
# New type hints are functionally useless for most meaningful purposes and
# thus fairly rare in the wild. Ergo, detect these late.
elif hint_sign is HintSignNewType:
# Avoid circular import dependencies.
from beartype._util.hint.pep.proposal.pep484.utilpep484newtype import (
get_hint_pep484_newtype_class)
hint = get_hint_pep484_newtype_class(hint)
# ..................{ PEP 484 ~ io }..................
# If this hint is a PEP 484-compliant IO generic base class *AND* the
# active Python interpreter targets Python >= 3.8 and thus supports PEP
# 544-compliant protocols, reduce this functionally useless hint to the
# corresponding functionally useful beartype-specific PEP 544-compliant
# protocol implementing this hint.
#
# IO generic base classes are extremely rare and thus detected even later.
#
# Note that PEP 484-compliant IO generic base classes are technically
# usable under Python < 3.8 (e.g., by explicitly subclassing those classes
# from third-party classes). Ergo, we can neither safely emit warnings nor
# raise exceptions on visiting these classes under *ANY* Python version.
elif is_hint_pep484_generic_io(hint):
hint = reduce_hint_pep484_generic_io_to_pep544_protocol(
hint=hint, exception_prefix=exception_prefix)
# ..................{ PEP 557 }..................
# If this hint is a dataclass-specific initialization-only instance
# variable (i.e., instance of the PEP 557-compliant "dataclasses.InitVar"
# class introduced by Python 3.8.0), reduce this functionally useless hint
# to the functionally useful child type hint subscripting this parent hint.
#
# "InitVar" instances are stupefyingly rare and thus detected even later.
elif hint_sign is HintSignDataclassInitVar:
hint = get_hint_pep557_initvar_arg(
hint=hint, exception_prefix=exception_prefix)
# Return this possibly reduced hint.
return hint
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **PEP-agnostic type hint coercers** (i.e., mid-level callables
converting type hints from one format into another, either permanently or
temporarily and either losslessly or in a lossy manner).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
#FIXME: coerce_hint() should also rewrite unhashable hints to be hashable *IF
#FEASIBLE.* This isn't always feasible, of course (e.g., "Annotated[[]]",
#"Literal[[]]"). The one notable place where this *IS* feasible is with PEP
#585-compliant type hints subscripted by unhashable rather than hashable
#iterables, which can *ALWAYS* be safely rewritten to be hashable (e.g.,
#coercing "callable[[], None]" to "callable[(), None]").
#FIXME: coerce_hint() should also coerce PEP 544-compatible protocols *NOT*
#decorated by @typing.runtime_checkable to be decorated by that decorator, as
#such protocols are unusable at runtime. Yes, we should always try something
#*REALLY* sneaky and clever.
#
#Specifically, rather than accept "typing" nonsense verbatim, we could instead:
#* Detect PEP 544-compatible protocol type hints *NOT* decorated by
# @typing.runtime_checkable. The existing is_type_or_types_isinstanceable() tester now
# detects whether arbitrary classes are isinstanceable, so just call that.
#* Emit a non-fatal warning advising the end user to resolve this on their end.
#* Meanwhile, beartype can simply:
# * Dynamically fabricate a new PEP 544-compatible protocol decorated by
# @typing.runtime_checkable using the body of the undecorated user-defined
# protocol as its base. Indeed, simply subclassing a new subclass decorated
# by @typing.runtime_checkable from the undecorated user-defined protocol as
# its base with a noop body of "pass" should suffice.
# * Replacing all instances of the undecorated user-defined protocol with that
# decorated beartype-defined protocol in annotations. Note this would
# strongly benefit from some form of memoization or caching. Since this edge
# case should be fairly rare, even a dictionary would probably be overkill.
# Just implementing something resembling the following memoized getter
# in the "utilpep544" submodule would probably suffice:
# @callable_cached
# def get_pep544_protocol_checkable_from_protocol_uncheckable(
# protocol_uncheckable: object) -> Protocol:
# ...
#
#Checkmate, "typing". Checkmate.
# ....................{ IMPORTS }....................
from beartype.typing import (
Any,
Union,
)
from beartype._cave._cavefast import NotImplementedType
from beartype._data.func.datafunc import METHOD_NAMES_BINARY_DUNDER
from beartype._check.checkcall import BeartypeCall
from beartype._util.cache.map.utilmapbig import CacheUnboundedStrong
from beartype._util.hint.utilhinttest import is_hint_uncached
from beartype._util.hint.pep.proposal.pep484.utilpep484union import (
make_hint_pep484_union)
# ....................{ COERCERS ~ root }....................
#FIXME: Document mypy-specific coercion in the docstring as well, please.
def coerce_func_hint_root(
hint: object,
arg_name: str,
bear_call: BeartypeCall,
exception_prefix: str,
) -> object:
'''
PEP-compliant type hint coerced (i.e., converted) from the passed **root
type hint** (i.e., possibly PEP-noncompliant type hint annotating the
parameter or return with the passed name of the passed callable) if this
hint is coercible *or* this hint as is otherwise (i.e., if this hint is
*not* coercible).
This function is intentionally *not* memoized (e.g., by the
:func:`callable_cached` decorator). Since the hint returned by this
function conditionally depends upon the passed callable, memoizing this
function would consume space needlessly with *no* useful benefit.
Caveats
----------
This function *cannot* be meaningfully memoized, since the passed type hint
is *not* guaranteed to be cached somewhere. Only functions passed cached
type hints can be meaningfully memoized. Since this high-level function
internally defers to unmemoized low-level functions that are ``O(n)`` in
``n`` the size of the inheritance hierarchy of this hint, this function
should be called sparingly. See the :mod:`beartype._decor._cache.cachehint`
submodule for further details.
Parameters
----------
hint : object
Possibly PEP-noncompliant type hint to be possibly coerced.
arg_name : str
Either:
* If this hint annotates a parameter of that callable, the name of that
parameter.
* If this hint annotates the return of that callable, ``"return"``.
bear_call : BeartypeCall
Decorated callable annotated by this hint.
exception_prefix : str
Human-readable label prefixing the representation of this object in the
exception message.
Returns
----------
object
Either:
* If this possibly PEP-noncompliant hint is coercible, a PEP-compliant
type hint coerced from this hint.
* Else, this hint as is unmodified.
'''
assert isinstance(arg_name, str), f'{arg_name} not string.'
assert bear_call.__class__ is BeartypeCall, (
f'{repr(bear_call)} not @beartype call.')
# ..................{ MYPY }..................
# If...
if (
# This hint annotates the return for the decorated callable *AND*...
arg_name == 'return' and
# The decorated callable is a binary dunder method (e.g., __eq__())...
bear_call.func_wrapper_name in METHOD_NAMES_BINARY_DUNDER
):
# Expand this hint to accept both this hint *AND* the "NotImplemented"
# singleton as valid returns from this method. Why? Because this
# expansion has been codified by mypy and is thus a de-facto typing
# standard, albeit one currently lacking formal PEP standardization.
#
# Consider this representative binary dunder method:
# class MuhClass:
# @beartype
# def __eq__(self, other: object) -> bool:
# if isinstance(other, TheCloud):
# return self is other
# return NotImplemented
#
# Technically, that method *COULD* be retyped to return:
# def __eq__(self, other: object) -> Union[
# bool, type(NotImplemented)]:
#
# Pragmatically, mypy and other static type checkers do *NOT* currently
# support the type() builtin in a sane manner and thus raise errors
# given the otherwise valid logic above. This means that the following
# equivalent approach also yields the same errors:
# NotImplementedType = type(NotImplemented)
# class MuhClass:
# @beartype
# def __eq__(self, other: object) -> Union[
# bool, NotImplementedType]:
# if isinstance(other, TheCloud):
# return self is other
# return NotImplemented
#
# Of course, the latter approach can be manually rectified by
# explicitly typing that type as "Any": e.g.,
# NotImplementedType: Any = type(NotImplemented)
#
# Of course, expecting users to be aware of these ludicrous sorts of
# mypy idiosyncrasies merely to annotate an otherwise normal binary
# dunder method is one expectation too far.
#
# Ideally, official CPython developers would resolve this by declaring
# a new "types.NotImplementedType" type global resembling the existing
# "types.NoneType" type global. Since that has yet to happen, mypy has
# instead taken the surprisingly sensible course of silently ignoring
# this edge case by effectively performing the same type expansion as
# performed here. *applause*
return Union[hint, NotImplementedType] # pyright: ignore[reportGeneralTypeIssues]
# Defer to the function-agnostic root hint coercer as a generic fallback.
return coerce_hint_root(hint=hint, exception_prefix=exception_prefix)
def coerce_hint_root(hint: object, exception_prefix: str) -> object:
'''
PEP-compliant type hint coerced (i.e., converted) from the passed **root
type hint** (i.e., possibly PEP-noncompliant type hint that has *no* parent
type hint) if this hint is coercible *or* this hint as is otherwise (i.e.,
if this hint is *not* coercible).
Specifically, if the passed hint is:
* A **PEP-noncompliant tuple union** (i.e., tuple of one or more standard
classes and forward references to standard classes), this function:
* Coerces this tuple union into the equivalent :pep:`484`-compliant
union.
* Replaces this tuple union in the ``__annotations__`` dunder tuple of
this callable with this :pep:`484`-compliant union.
* Returns this :pep:`484`-compliant union.
This function is intentionally *not* memoized (e.g., by the
:func:`callable_cached` decorator). See caveats that follow.
Caveats
----------
This function *cannot* be meaningfully memoized, since the passed type hint
is *not* guaranteed to be cached somewhere. Only functions passed cached
type hints can be meaningfully memoized. Since this high-level function
internally defers to unmemoized low-level functions that are ``O(n)`` for
``n`` the size of the inheritance hierarchy of this hint, this function
should be called sparingly. See the :mod:`beartype._decor._cache.cachehint`
submodule for further details.
Parameters
----------
hint : object
Possibly PEP-noncompliant type hint to be possibly coerced.
exception_prefix : str
Human-readable label prefixing the representation of this object in the
exception message.
Returns
----------
object
Either:
* If this possibly PEP-noncompliant hint is coercible, a PEP-compliant
type hint coerced from this hint.
* Else, this hint as is unmodified.
'''
# ..................{ NON-PEP }..................
# If this hint is a PEP-noncompliant tuple union, coerce this union into
# the equivalent PEP-compliant union subscripted by the same child hints.
# By definition, PEP-compliant unions are a superset of PEP-noncompliant
# tuple unions and thus accept all child hints accepted by the latter.
if isinstance(hint, tuple):
return make_hint_pep484_union(hint)
# Else, this hint is *NOT* a PEP-noncompliant tuple union.
# Since none of the above conditions applied, this hint could *NOT* be
# specifically coerced as a root type hint. Nonetheless, this hint may
# still be generically coercible as a hint irrespective of its contextual
# position relative to other type hints.
#
# Return this hint, possibly coerced as a context-agnostic type hint.
return coerce_hint_any(hint)
# ....................{ COERCERS ~ any }....................
def coerce_hint_any(hint: object) -> Any:
'''
PEP-compliant type hint coerced (i.e., converted) from the passed
PEP-compliant type hint if this hint is coercible *or* this hint as is
otherwise (i.e., if this hint is *not* coercible).
Specifically, if the passed hint is:
* A **PEP-compliant uncached type hint** (i.e., hint *not* already
internally cached by its parent class or module), this function:
* If this hint has already been passed to a prior call of this function,
returns the semantically equivalent PEP-compliant type hint having the
same machine-readable representation as this hint cached by that call.
Doing so deduplicates this hint, which both:
* Minimizes space complexity across the lifetime of this process.
* Minimizes time complexity by enabling beartype-specific memoized
callables to efficiently reduce to constant-time lookup operations
when repeatedly passed copies of this hint nonetheless sharing the
same machine-readable representation.
* Else, internally caches this hint with a thread-safe global cache and
returns this hint as is.
Uncached hints include:
* :pep:`484`-compliant subscripted generics under Python >= 3.9 (e.g.,
``from typing import List; class MuhPep484List(List): pass;
MuhPep484List[int]``). See below for further commentary.
* :pep:`585`-compliant type hints, including both:
* Builtin :pep:`585`-compliant type hints (e.g., ``list[int]``).
* User-defined :pep:`585`-compliant generics (e.g.,
``class MuhPep585List(list): pass; MuhPep585List[int]``).
* Already cached, this hint is already PEP-compliant by definition. In this
case, this function preserves and returns this hint as is.
This function is intentionally *not* memoized (e.g., by the
:func:`callable_cached` decorator). See caveats that follow.
Design
------
This function does *not* bother caching **self-caching type hints** (i.e.,
type hints that externally cache themselves), as these hints are already
cached elsewhere. Self-cached type hints include most type hints created by
subscripting type hint factories declared by the :mod:`typing` module,
which internally cache their resulting type hints: e.g.,
.. code-block:: python
>>> import typing
>>> typing.List[int] is typing.List[int]
True
Equivalently, this function *only* caches **uncached type hints** (i.e.,
type hints that do *not* externally cache themselves), as these hints are
*not* already cached elsewhere. Uncached type hints include *all*
:pep:`585`-compliant type hints produced by subscripting builtin container
types, which fail to internally cache their resulting type hints: e.g.,
.. code-block:: python
>>> list[int] is list[int]
False
This function enables callers to coerce uncached type hints into
:mod:`beartype`-cached type hints. :mod:`beartype` effectively requires
*all* type hints to be cached somewhere! :mod:`beartype` does *not* care
who, what, or how is caching those type hints -- only that they are cached
before being passed to utility functions in the :mod:`beartype` codebase.
Why? Because most such utility functions are memoized for efficiency by the
:func:`beartype._util.cache.utilcachecall.callable_cached` decorator, which
maps passed parameters (typically including the standard ``hint`` parameter
accepting a type hint) based on object identity to previously cached return
values. You see the problem, we trust.
Uncached type hints that are otherwise semantically equal are nonetheless
distinct objects and will thus be treated as distinct parameters by
memoization decorators. If this function did *not* exist, uncached type
hints could *not* be coerced into :mod:`beartype`-cached type hints and
thus could *not* be memoized, dramatically reducing the efficiency of
:mod:`beartype` for standard type hints.
Caveats
----------
This function *cannot* be meaningfully memoized, since the passed type hint
is *not* guaranteed to be cached somewhere. Only functions passed cached
type hints can be meaningfully memoized. Since this high-level function
internally defers to unmemoized low-level functions that are ``O(n)`` for
``n`` the size of the inheritance hierarchy of this hint, this function
should be called sparingly.
This function intentionally does *not* cache :pep:`484`-compliant generics
subscripted by type variables under Python < 3.9. Those hints are
technically uncached but silently treated by this function as self-cached
and thus preserved as is. Why? Because correctly detecting those hints as
uncached would require an unmemoized ``O(n)`` search across the inheritance
hierarchy of *all* passed objects and thus all type hints annotating
callables decorated by :func:`beartype.beartype`. Since this failure only
affects obsolete Python versions *and* since the only harms induced by this
failure are a slight increase in space and time consumption for edge-case
type hints unlikely to actually be used in real-world code, this tradeoff
is more than acceptable. We're not the bad guy here. Right?
Parameters
----------
hint : object
Type hint to be possibly coerced.
Returns
----------
object
Either:
* If this PEP-compliant type hint is coercible, another PEP-compliant
type hint coerced from this hint.
* Else, this hint as is unmodified.
'''
# ..................{ NON-SELF-CACHING }..................
# If this hint is *NOT* self-caching, this hint *MUST* thus be explicitly
# cached here. Failing to do so would disable subsequent memoization,
# reducing decoration- and call-time efficiency when decorating callables
# repeatedly annotated by copies of this hint.
#
# Specifically, deduplicate this hint by either:
# * If this is the first copy of this hint passed to this function, cache
# this hint under its machine-readable implementation.
# * Else, one or more prior copies of this hint have already been passed to
# this function. In this case, replace this subsequent copy by the first
# copy of this hint originally passed to a prior call of this function.
if is_hint_uncached(hint):
return _HINT_REPR_TO_SINGLETON.cache_or_get_cached_value(
key=repr(hint), value=hint)
# return _HINT_REPR_TO_SINGLETON.cache_or_get_cached_value(key=repr(hint), value=hint)
# Else, this hint is (hopefully) self-caching.
# Return this uncoerced hint as is.
return hint
# ....................{ PRIVATE ~ mappings }....................
_HINT_REPR_TO_SINGLETON = CacheUnboundedStrong()
'''
**Type hint cache** (i.e., thread-safe cache mapping from the machine-readable
representations of all non-self-cached type hints to cached singleton instances
of those hints).**
This cache caches:
* :pep:`585`-compliant type hints, which do *not* cache themselves.
* :pep:`604`-compliant unions, which do *not* cache themselves.
This cache does *not* cache:
* Type hints declared by the :mod:`typing` module, which implicitly cache
themselves on subscription thanks to inscrutable metaclass magic.
* :pep:`563`-compliant **deferred type hints** (i.e., type hints persisted as
evaluable strings rather than actual type hints). Ideally, this cache would
cache the evaluations of *all* deferred type hints. Sadly, doing so is
infeasible in the general case due to global and local namespace lookups
(e.g., ``Dict[str, int]`` only means what you think it means if an
importation resembling ``from typing import Dict`` preceded that type hint).
Design
--------------
**This dictionary is intentionally thread-safe.** Why? Because this dictionary
is used to modify the ``__attributes__`` dunder variable of arbitrary callables.
Since most such callables are either module- or class-scoped, that variable is
effectively global. To prevent race conditions between competing threads
contending over that variable, this dictionary *must* be thread-safe.
**This dictionary is intentionally designed as a naive dictionary rather than a
robust LRU cache,** for the same reasons that callables accepting hints are
memoized by the :func:`beartype._util.cache.utilcachecall.callable_cached`
rather than the :func:`functools.lru_cache` decorator. Why? Because:
* The number of different type hints instantiated across even worst-case
codebases is negligible in comparison to the space consumed by those hints.
* The :attr:`sys.modules` dictionary persists strong references to all
callables declared by previously imported modules. In turn, the
``func.__annotations__`` dunder dictionary of each such callable persists
strong references to all type hints annotating that callable. In turn, these
two statements imply that type hints are *never* garbage collected but
instead persisted for the lifetime of the active Python process. Ergo,
temporarily caching hints in an LRU cache is pointless, as there are *no*
space savings in dropping stale references to unused hints.
**This dictionary intentionally caches machine-readable representation strings
hashes rather than alternative keys** (e.g., actual hashes). Why? Disambiguity.
Although comparatively less efficient in both space and time to construct than
hashes, the :func:`repr` strings produced for two dissimilar type hints *never*
ambiguously collide unless an external caller maliciously modified one or more
identifying dunder attributes of those hints (e.g., the ``__module__``,
``__qualname__``, and/or ``__name__`` dunder attributes). That should *never*
occur in production code. Meanwhile, the :func:`hash` values produced for two
dissimilar type hints *commonly* ambiguously collide. This is why hashable
containers (e.g., :class:`dict`, :class:`set`) explicitly handle hash table
collisions and why we are *not* going to do so.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype** :mod:`typing` **compatibility layer.**
This submodule declares the exact same set of **public typing attributes**
(i.e., module-scoped attributes listed by the :attr:`typing.__all__` global) as
declared by the :mod:`typing` module for your current Python version. Although
the attributes declared by this submodule *mostly* share the same values as
the attributes declared by :mod:`typing`, notable differences include:
* :pep:`585`-deprecated typing attributes. :pep:`585` deprecated **38 public
typing attributes** to "...be removed from the typing module in the first
Python version released 5 years after the release of Python 3.9.0." This
submodule preserves those attributes under their original names for the
Python 3.8-specific version of the :mod:`typing` module, thus preserving
forward compatibility with future Python versions. These include:
* :attr:`typing.AbstractSet`.
* :attr:`typing.AsyncContextManager`.
* :attr:`typing.AsyncGenerator`.
* :attr:`typing.AsyncIterable`.
* :attr:`typing.AsyncIterator`.
* :attr:`typing.Awaitable`.
* :attr:`typing.ByteString`.
* :attr:`typing.Callable`.
* :attr:`typing.ChainMap`.
* :attr:`typing.Collection`.
* :attr:`typing.Container`.
* :attr:`typing.ContextManager`.
* :attr:`typing.Coroutine`.
* :attr:`typing.Counter`.
* :attr:`typing.DefaultDict`.
* :attr:`typing.Deque`.
* :attr:`typing.Dict`.
* :attr:`typing.FrozenSet`.
* :attr:`typing.Generator`.
* :attr:`typing.ItemsView`.
* :attr:`typing.Iterable`.
* :attr:`typing.Iterator`.
* :attr:`typing.KeysView`.
* :attr:`typing.List`.
* :attr:`typing.Mapping`.
* :attr:`typing.MappingView`.
* :attr:`typing.Match`.
* :attr:`typing.MutableMapping`.
* :attr:`typing.MutableSequence`.
* :attr:`typing.MutableSet`.
* :attr:`typing.OrderedDict`.
* :attr:`typing.Pattern`.
* :attr:`typing.Reversible`.
* :attr:`typing.Set`.
* :attr:`typing.Tuple`.
* :attr:`typing.Type`.
* :attr:`typing.Sequence`.
* :attr:`typing.ValuesView`.
Usage
----------
:mod:`beartype` users are strongly encouraged to import typing attributes from
this submodule rather than from :mod:`typing` directly: e.g.,
.. code-block:: python
# Instead of this...
from typing import Tuple, List, Dict, Set, FrozenSet, Type
# ...always do this.
from beartype.typing import Tuple, List, Dict, Set, FrozenSet, Type
'''
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# *NOT* intended for public importation should be locally imported at module
# scope *ONLY* under alternate private names (e.g., "import re as _re" rather
# than merely "from re").
# WARNING: To preserve PEP 561 compliance with static type checkers (e.g.,
# mypy), external attributes *MUST* be explicitly imported with standard static
# import machinery rather than non-standard dynamic import shenanigans (e.g.,
# "from typing import Annotated" rather than
# "import_typing_attr_or_none('Annotated')").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from beartype._util.py.utilpyversion import (
IS_PYTHON_AT_LEAST_3_11 as _IS_PYTHON_AT_LEAST_3_11,
IS_PYTHON_AT_LEAST_3_10 as _IS_PYTHON_AT_LEAST_3_10,
IS_PYTHON_AT_LEAST_3_9 as _IS_PYTHON_AT_LEAST_3_9,
IS_PYTHON_AT_LEAST_3_8 as _IS_PYTHON_AT_LEAST_3_8,
IS_PYTHON_AT_LEAST_3_7_2 as _IS_PYTHON_AT_LEAST_3_7_2,
)
# ....................{ IMPORTS ~ all }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To prevent "mypy --no-implicit-reexport" from raising literally
# hundreds of errors at static analysis time, *ALL* public attributes *MUST* be
# explicitly reimported under the same names with "{exception_name} as
# {exception_name}" syntax rather than merely "{exception_name}". Yes, this is
# ludicrous. Yes, this is mypy. For posterity, these failures resemble:
# beartype/_cave/_cavefast.py:47: error: Module "beartype.roar" does not
# explicitly export attribute "BeartypeCallUnavailableTypeException";
# implicit reexport disabled [attr-defined]
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Import all public attributes of the "typing" module both available under all
# supported Python versions and *NOT* deprecated by a subsequent Python version
# under their original names.
from typing import (
TYPE_CHECKING as TYPE_CHECKING,
Any as Any,
AnyStr as AnyStr,
NewType as NewType,
Text as Text,
BinaryIO as BinaryIO,
ClassVar as ClassVar,
ForwardRef as ForwardRef,
Generic as Generic,
Hashable as Hashable,
IO as IO,
Match as Match,
NamedTuple as NamedTuple,
NoReturn as NoReturn,
Optional as Optional,
Pattern as Pattern,
Sized as Sized,
TextIO as TextIO,
TypeVar as TypeVar,
Union as Union,
cast as cast,
get_type_hints as get_type_hints,
no_type_check as no_type_check,
no_type_check_decorator as no_type_check_decorator,
overload as overload,
)
# ....................{ IMPORTS ~ version }....................
# Import all public attributes of the "typing" module both available under a
# subset of supported Python versions and *NOT* deprecated by a subsequent
# Python version under their original names.
# If the active Python interpreter targets Python >= 3.8...
if _IS_PYTHON_AT_LEAST_3_8:
from typing import ( # type: ignore[attr-defined]
Final as Final, # pyright: ignore[reportGeneralTypeIssues]
Literal as Literal, # pyright: ignore[reportGeneralTypeIssues]
Reversible as Reversible, # pyright: ignore[reportGeneralTypeIssues]
SupportsIndex as SupportsIndex, # pyright: ignore[reportGeneralTypeIssues]
TypedDict as TypedDict, # pyright: ignore[reportGeneralTypeIssues]
final as final, # pyright: ignore[reportGeneralTypeIssues]
get_args as get_args, # pyright: ignore[reportGeneralTypeIssues]
get_origin as get_origin, # pyright: ignore[reportGeneralTypeIssues]
)
# If the active Python interpreter targets Python >= 3.10...
if _IS_PYTHON_AT_LEAST_3_10:
from typing import ( # type: ignore[attr-defined]
Concatenate as Concatenate, # pyright: ignore[reportGeneralTypeIssues]
ParamSpec as ParamSpec, # pyright: ignore[reportGeneralTypeIssues]
ParamSpecArgs as ParamSpecArgs, # pyright: ignore[reportGeneralTypeIssues]
ParamSpecKwargs as ParamSpecKwargs, # pyright: ignore[reportGeneralTypeIssues]
TypeAlias as TypeAlias, # pyright: ignore[reportGeneralTypeIssues]
TypeGuard as TypeGuard, # pyright: ignore[reportGeneralTypeIssues]
is_typeddict as is_typeddict, # pyright: ignore[reportGeneralTypeIssues]
)
# If the active Python interpreter targets Python >= 3.11...
if _IS_PYTHON_AT_LEAST_3_11:
from typing import ( # type: ignore[attr-defined]
LiteralString as LiteralString, # pyright: ignore[reportGeneralTypeIssues]
Never as Never, # pyright: ignore[reportGeneralTypeIssues]
NotRequired as NotRequired, # pyright: ignore[reportGeneralTypeIssues]
Required as Required, # pyright: ignore[reportGeneralTypeIssues]
Self as Self, # pyright: ignore[reportGeneralTypeIssues]
TypeVarTuple as TypeVarTuple, # pyright: ignore[reportGeneralTypeIssues]
Unpack as Unpack, # pyright: ignore[reportGeneralTypeIssues]
assert_never as assert_never, # pyright: ignore[reportGeneralTypeIssues]
assert_type as assert_type, # pyright: ignore[reportGeneralTypeIssues]
clear_overloads as clear_overloads, # pyright: ignore[reportGeneralTypeIssues]
dataclass_transform as dataclass_transform, # pyright: ignore[reportGeneralTypeIssues]
reveal_type as reveal_type, # pyright: ignore[reportGeneralTypeIssues]
get_overloads as get_overloads, # pyright: ignore[reportGeneralTypeIssues]
reveal_type as reveal_type, # pyright: ignore[reportGeneralTypeIssues]
)
# ....................{ PEP ~ 544 }....................
# If this interpreter is either performing static type-checking (e.g., via mypy)
# *OR* targets Python < 3.8, defer to the standard library versions of the
# family of "Supports*" protocols available under Python < 3.8.
if TYPE_CHECKING or not _IS_PYTHON_AT_LEAST_3_8:
from typing import ( # type: ignore[attr-defined]
SupportsAbs as SupportsAbs,
SupportsBytes as SupportsBytes,
SupportsComplex as SupportsComplex,
SupportsFloat as SupportsFloat,
SupportsInt as SupportsInt,
SupportsRound as SupportsRound,
)
# If this interpreter targets Python >= 3.8 and thus fully supports PEP 544...
if _IS_PYTHON_AT_LEAST_3_8:
# If this interpreter is performing static type-checking, defer to the
# standard library versions of all remaining PEP 544 attributes.
if TYPE_CHECKING:
from typing import ( # type: ignore[attr-defined]
Protocol as Protocol, # pyright: ignore[reportGeneralTypeIssues]
SupportsIndex as SupportsIndex, # pyright: ignore[reportGeneralTypeIssues]
runtime_checkable as runtime_checkable, # pyright: ignore[reportGeneralTypeIssues]
)
# Else, this interpreter is *NOT* performing static type-checking. In this
# case, prefer our optimized PEP 544 attributes.
else:
from beartype.typing._typingpep544 import (
Protocol as Protocol,
SupportsAbs as SupportsAbs,
SupportsBytes as SupportsBytes,
SupportsComplex as SupportsComplex,
SupportsFloat as SupportsFloat,
SupportsIndex as SupportsIndex,
SupportsInt as SupportsInt,
SupportsRound as SupportsRound,
runtime_checkable as runtime_checkable,
)
# ....................{ PEP ~ 585 }....................
# If this interpreter is either performing static type-checking (e.g., via mypy)
# *OR* targets Python < 3.9 and thus fails to support PEP 585, import *ALL*
# public attributes of the "typing" module deprecated by PEP 585 as their
# original values.
#
# This is intentionally performed *BEFORE* the corresponding "else:" branch
# below handling the Python >= 3.9 case. Why? Because mypy. If the order of
# these two branches is reversed, mypy emits errors under Python < 3.9 when
# attempting to subscript any of the builtin types (e.g., "Tuple"): e.g.,
# error: "tuple" is not subscriptable [misc]
if TYPE_CHECKING or not _IS_PYTHON_AT_LEAST_3_9:
from typing import (
AbstractSet as AbstractSet,
AsyncContextManager as AsyncContextManager,
AsyncGenerator as AsyncGenerator,
AsyncIterable as AsyncIterable,
AsyncIterator as AsyncIterator,
Awaitable as Awaitable,
ByteString as ByteString,
Callable as Callable,
ChainMap as ChainMap,
Collection as Collection,
Container as Container,
ContextManager as ContextManager,
Coroutine as Coroutine,
Counter as Counter,
DefaultDict as DefaultDict,
Deque as Deque,
Dict as Dict,
FrozenSet as FrozenSet,
Generator as Generator,
ItemsView as ItemsView,
Iterable as Iterable,
Iterator as Iterator,
KeysView as KeysView,
List as List,
Mapping as Mapping,
MappingView as MappingView,
MutableMapping as MutableMapping,
MutableSequence as MutableSequence,
MutableSet as MutableSet,
Reversible as Reversible,
Set as Set,
Tuple as Tuple,
Type as Type,
Sequence as Sequence,
ValuesView as ValuesView,
)
# If the active Python interpreter targets Python >= 3.7.2, import *ALL*
# public attributes of the "typing" module introduced by Python 3.7.2
# deprecated by PEP 585 as their original values.
if _IS_PYTHON_AT_LEAST_3_7_2:
from typing import ( # type: ignore[attr-defined]
OrderedDict as OrderedDict,
)
# If the active Python interpreter targets Python >= 3.9 and thus supports PEP
# 585, alias *ALL* public attributes of the "typing" module deprecated by PEP
# 585 to their equivalent values elsewhere in the standard library.
else:
from collections import (
ChainMap as ChainMap,
Counter as Counter,
OrderedDict as OrderedDict,
defaultdict as DefaultDict,
deque as Deque,
)
from collections.abc import (
AsyncIterable as AsyncIterable,
AsyncIterator as AsyncIterator,
AsyncGenerator as AsyncGenerator,
Awaitable as Awaitable,
ByteString as ByteString,
Callable as Callable,
Collection as Collection,
Container as Container,
Coroutine as Coroutine,
Generator as Generator,
ItemsView as ItemsView,
Iterable as Iterable,
Iterator as Iterator,
KeysView as KeysView,
Mapping as Mapping,
MappingView as MappingView,
MutableMapping as MutableMapping,
MutableSequence as MutableSequence,
MutableSet as MutableSet,
Reversible as Reversible,
Sequence as Sequence,
ValuesView as ValuesView,
Set as AbstractSet,
)
from contextlib import (
AbstractContextManager as ContextManager,
AbstractAsyncContextManager as AsyncContextManager,
)
from typing import ( # type: ignore[attr-defined]
Annotated,
)
Dict = dict # type: ignore[misc]
FrozenSet = frozenset # type: ignore[misc]
List = list # type: ignore[misc]
Set = set # type: ignore[misc]
Tuple = tuple # type: ignore[assignment]
Type = type # type: ignore[assignment]
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype typing callable caching** (i.e., general-purpose memoization of
function and method calls intended to be called *only* from submodules of this
subpackage) utilities.
This private submodule implements only a minimal subset of the caching
functionality implemented by the general-purpose
:mod:`beartype._util.cache.utilcachecall` submodule, from which this submodule
was originally derived. Since the latter transitively imports from the
:mod:`beartype.typing` subpackage at module scope, submodules of the
:mod:`beartype.typing` subpackage *cannot* safely import from the
:mod:`beartype._util.cache.utilcachecall` submodule at module scope. Ergo, the
existence of this submodule.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._util.py.utilpyversion import IS_PYTHON_AT_LEAST_3_9
from functools import wraps
# Note that we intentionally:
# * Avoid importing these type hint factories from "beartype.typing", as that
# would induce a circular import dependency. Instead, we manually import the
# relevant type hint factories conditionally depending on the version of the
# active Python interpreter. *sigh*
# * Test the negation of this condition first. Why? Because mypy quietly
# defecates all over itself if the order of these two branches is reversed.
# Yeah. It's as bad as it sounds.
if not IS_PYTHON_AT_LEAST_3_9:
from typing import Callable, Dict # type: ignore[misc]
# Else, the active Python interpreter targets Python >= 3.9 and thus supports
# PEP 585. In this case, embrace non-deprecated PEP 585-compliant type hints.
else:
from collections.abc import Callable
Dict = dict # type: ignore[misc]
# ....................{ CONSTANTS }....................
_SENTINEL = object()
'''
Sentinel object of arbitrary value.
'''
# ....................{ DECORATORS }....................
def callable_cached_minimal(func: Callable) -> Callable:
'''
**Memoize** (i.e., efficiently cache and return all previously returned
values of the passed callable as well as all previously raised exceptions
of that callable previously rather than inefficiently recalling that
callable) the passed callable.
Parameters
----------
func : Callable
Callable to be memoized.
Returns
----------
Callable
Closure wrapping this callable with memoization.
See Also
----------
:func:`beartype._util.cache.utilcachecall.callable_cached`
Further details.
'''
assert callable(func), f'{repr(func)} not callable.'
# Dictionary mapping a tuple of all flattened parameters passed to each
# prior call of the decorated callable with the value returned by that
# call if any (i.e., if that call did *NOT* raise an exception).
params_flat_to_return_value: Dict[tuple, object] = {}
# get() method of this dictionary, localized for efficiency.
params_flat_to_return_value_get = params_flat_to_return_value.get
# Dictionary mapping a tuple of all flattened parameters passed to each
# prior call of the decorated callable with the exception raised by that
# call if any (i.e., if that call raised an exception).
params_flat_to_exception: Dict[tuple, Exception] = {}
# get() method of this dictionary, localized for efficiency.
params_flat_to_exception_get = params_flat_to_exception.get
@wraps(func)
def _callable_cached(*args):
f'''
Memoized variant of the {func.__name__}() callable.
See Also
----------
:func:`callable_cached`
Further details.
'''
# If passed only one positional argument, minimize space consumption by
# flattening this tuple of only that argument into that argument. Since
# tuple items are necessarily hashable, this argument is necessarily
# hashable as well and thus permissible as a dictionary key below.
if len(args) == 1:
params_flat = args[0]
# Else, one or more positional arguments are passed. In this case,
# reuse this tuple as is.
else:
params_flat = args
# Attempt to...
try:
#FIXME: Optimize the params_flat_to_exception_get() case, please.
#Since "None" is *NOT* a valid exception, we shouldn't need a
#sentinel for safety here. Instead, this should suffice:
# exception = params_flat_to_exception_get(params_flat)
# # If this callable previously raised an exception when called with
# # these parameters, re-raise the same exception.
# if exception:
# raise exception
# Exception raised by a prior call to the decorated callable when
# passed these parameters *OR* the sentinel placeholder otherwise
# (i.e., if this callable either has yet to be called with these
# parameters *OR* has but failed to raise an exception).
#
# Note that this call raises a "TypeError" exception if any item of
# this flattened tuple is unhashable.
exception = params_flat_to_exception_get(params_flat, _SENTINEL)
# If this callable previously raised an exception when called with
# these parameters, re-raise the same exception.
if exception is not _SENTINEL:
raise exception # pyright: ignore[reportGeneralTypeIssues]
# Else, this callable either has yet to be called with these
# parameters *OR* has but failed to raise an exception.
# Value returned by a prior call to the decorated callable when
# passed these parameters *OR* a sentinel placeholder otherwise
# (i.e., if this callable has yet to be passed these parameters).
return_value = params_flat_to_return_value_get(
params_flat, _SENTINEL)
# If this callable has already been called with these parameters,
# return the value returned by that prior call.
if return_value is not _SENTINEL:
return return_value
# Else, this callable has yet to be called with these parameters.
# Attempt to...
try:
# Call this parameter with these parameters and cache the value
# returned by this call to these parameters.
return_value = params_flat_to_return_value[params_flat] = func(
*args)
# If this call raises an exception...
except Exception as exception:
# Cache this exception to these parameters.
params_flat_to_exception[params_flat] = exception
# Re-raise this exception.
raise exception
# If one or more objects either passed to *OR* returned from this call
# are unhashable, perform this call as is *WITHOUT* memoization. While
# non-ideal, stability is better than raising a fatal exception.
except TypeError:
return func(*args)
# Return this value.
return return_value
# Return this wrapper.
return _callable_cached
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype** :pep:`544` **optimization layer.**
This private submodule implements a :func:`beartype.beartype``-compatible
(i.e., decorated by the :func:`typing.runtime_checkable` decorator) drop-in
replacement for :class:`typing.Protocol` that can lead to significant
performance improvements.
'''
# ....................{ IMPORTS }....................
from beartype._util.py.utilpyversion import (
IS_PYTHON_AT_LEAST_3_8,
IS_PYTHON_AT_LEAST_3_9,
)
# ....................{ PEP 544 }....................
# If the active Python interpreter targets Python >= 3.8 and thus supports PEP
# 544...
#
# This is one of those cases where one pines for a module-scope return
# statement. I seem to remember a bug/feature request about that somewhere,
# but couldn't find it after a brief search.
if IS_PYTHON_AT_LEAST_3_8:
# ..................{ IMPORTS }..................
#FIXME: The ignore[attr-defined] is for Python 3.7 because Mypy doesn't
#understand IS_PYTHON_AT_LEAST_3_8. That ignore should be removable
#when retiring PYTHON 3.7.
# Defer Python version-specific imports, including non-caching
# protocols to be overridden by caching equivalents below (and other
# requirements from various sources, depending on runtime environment).
from beartype.typing._typingcache import callable_cached_minimal
from typing import ( # type: ignore[attr-defined]
EXCLUDED_ATTRIBUTES, # pyright: ignore[reportGeneralTypeIssues]
TYPE_CHECKING,
Any,
Generic,
Protocol as _ProtocolSlow,
SupportsAbs as _SupportsAbsSlow,
SupportsBytes as _SupportsBytesSlow,
SupportsComplex as _SupportsComplexSlow,
SupportsFloat as _SupportsFloatSlow,
SupportsIndex as _SupportsIndexSlow, # pyright: ignore[reportGeneralTypeIssues]
SupportsInt as _SupportsIntSlow,
SupportsRound as _SupportsRoundSlow,
TypeVar,
runtime_checkable,
)
# Note that we intentionally:
# * Avoid importing these type hint factories from "beartype.typing", as
# that would induce a circular import dependency. Instead, we manually
# import the relevant type hint factories conditionally depending on the
# version of the active Python interpreter. *sigh*
# * Test the negation of this condition first. Why? Because mypy quietly
# defecates all over itself if the order of these two branches is
# reversed. Yeah. It's as bad as it sounds.
if not IS_PYTHON_AT_LEAST_3_9:
from typing import Dict, Tuple, Type # type: ignore[misc]
# Else, the active Python interpreter targets Python >= 3.9 and thus
# supports PEP 585. In this case, embrace non-deprecated PEP 585-compliant
# type hints.
else:
Dict = dict # type: ignore[misc]
Tuple = tuple # type: ignore[assignment]
Type = type # type: ignore[assignment]
# If the active Python interpreter was invoked by a static type checker
# (e.g., mypy), violate privacy encapsulation. Doing so invites breakage
# under newer Python releases. Confining any potential breakage to this
# technically optional static type-checking phase minimizes the fallout by
# ensuring that this API continues to behave as expected at runtime.
#
# See also this deep typing voodoo:
# https://github.com/python/mypy/issues/11614
if TYPE_CHECKING:
from abc import ABCMeta as _ProtocolMeta
# Else, this interpreter was *NOT* invoked by a static type checker and is
# thus subject to looser runtime constraints. In this case, access the same
# metaclass *WITHOUT* violating privacy encapsulation.
else:
_ProtocolMeta = type(_ProtocolSlow)
# ..................{ CONSTANTS }..................
_PROTOCOL_ATTR_NAMES_IGNORABLE = frozenset(EXCLUDED_ATTRIBUTES)
'''
Frozen set of the names all **ignorable non-protocol attributes** (i.e.,
attributes *not* considered part of the protocol of a
:class:`beartype.typing.Protocol` subclass when passing that protocol to
the :func:`isinstance` builtin in structural subtyping checks).
'''
_T_co = TypeVar("_T_co", covariant=True)
'''
Arbitrary covariant type variable.
'''
_TT = TypeVar("_TT", bound="_CachingProtocolMeta")
'''
Arbitrary type variable bound (i.e., confined) to classes.
'''
# ..................{ METACLASSES }..................
class _CachingProtocolMeta(_ProtocolMeta):
'''
**Caching protocol metaclass** (i.e., drop-in replacement for the
private metaclass of the public :class:`typing.Protocol` superclass
that additionally caches :meth:`class.__instancecheck__` results).
This metaclass amortizes the `non-trivial time complexity of protocol
validation <protocol cost_>`__ to a trivial constant-time lookup.
.. _protocol cost:
https://github.com/python/mypy/issues/3186#issuecomment-885718629
Caveats
----------
**This metaclass will yield unpredictable results for any object with
one or more methods not declared by the class of that object,**
including objects whose methods are dynamically assembled at runtime.
This metaclass is ill-suited for such "types."
Motivation
----------
By default, :class:`typing.Protocol` subclasses are constrained to only
be checkable by static type checkers (e.g., :mod:`mypy`). Checking a
protocol with a runtime type checker (e.g., :mod:`beartype`) requires
explicitly decorating that protocol with the
:func:`typing.runtime_checkable` decorator. Why? We have no idea.
For unknown (but probably indefensible) reasons, :pep:`544` authors
enforced this constraint with a trivial private
:class:`typing.Protocol` boolean instance variable imposing *no* space
or time burden set only by the optional
:func:`typing.runtime_checkable` decorator. Since that's demonstrably
insane, we pretend :pep:`544` authors chose wisely by unconditionally
decorating *all* :class:`beartype.typing.Protocol` subclasses by that
decorator.
Technically, any non-caching :class:`typing.Protocol` subclass can be
effectively coerced into a caching :class:`beartype.typing.Protocol`
protocol through inheritance: e.g.,
.. code-block:: python
>>> from abc import abstractmethod
>>> from typing import Protocol
>>> from beartype.typing import _CachingProtocolMeta, runtime_checkable
>>> @runtime_checkable
... class _MyProtocol(Protocol): # plain vanilla protocol
... @abstractmethod
... def myfunc(self, arg: int) -> str:
... pass
>>> @runtime_checkable # redundant, but useful for documentation
... class MyProtocol(
... _MyProtocol,
... Protocol,
... metaclass=_CachingProtocolMeta, # caching version
... ):
... pass
>>> class MyImplementation:
... def myfunc(self, arg: int) -> str:
... return str(arg * -2 + 5)
>>> my_thing: MyProtocol = MyImplementation()
>>> isinstance(my_thing, MyProtocol)
True
Pragmatically, :class:`beartype.typing.Protocol` trivially eliminates
*all* of the above fragile boilerplate: e.g.,
.. code-block:: python
>>> from beartype.typing import Protocol
>>> class MyBearProtocol(Protocol):
... @abstractmethod
... def myfunc(self, arg: int) -> str:
... pass
>>> my_thing: MyBearProtocol = MyImplementation()
>>> isinstance(my_thing, MyBearProtocol)
True
'''
# ................{ CLASS VARIABLES }................
_abc_inst_check_cache: Dict[type, bool]
'''
:func:`isinstance` **cache** (i.e., dictionary mapping from each type
of any object previously passed as the first parameter to the
:func:`isinstance` builtin whose second parameter was this protocol
onto each boolean returned by that call to that builtin).
'''
# ................{ DUNDERS }................
def __new__(
mcls: Type[_TT], # pyright: ignore[reportSelfClsParameterName]
name: str,
bases: Tuple[type, ...],
namespace: Dict[str, Any],
**kw: Any,
) -> _TT:
# See <https://github.com/python/mypy/issues/9282>
cls = super().__new__(mcls, name, bases, namespace, **kw)
# If this class is *NOT* the abstract "beartype.typing.Protocol"
# superclass defined below...
if name != 'Protocol':
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize this "if" conditional against the
# standard "typing" module, which defines the exact same logic
# in the Protocol.__init_subclass__() class method.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# If it is unknown whether this class is an abstract protocol
# directly subclassing the "Protocol" superclass *OR* a concrete
# subclass of an abstract protocol, decide which applies now.
# Why? Because upstream performs the same logic. Since this
# logic tests the non-transitive dunder tuple "__bases__" of all
# *DIRECT* superclasses of this class rather than the transitive
# dunder tuple "__mro__" of all direct and indirect superclasses
# of this class, upstream logic erroneously detects abstract
# fast @beartype protocols as concrete by unconditionally
# reducing to:
# cls._is_protocol = False
#
# Why? Because "beartype.typing.Protocol" subclasses
# "typing.Protocol", subclasses of "beartype.typing.Protocol"
# list "beartype.typing.Protocol" rather than "typing.Protocol"
# in their "__bases__" dunder tuple. Disaster, thy name is
# "typing"!
if not cls.__dict__.get('_is_protocol'):
# print(f'Protocol {cls} bases: {cls.__bases__}')
cls._is_protocol = any(b is Protocol for b in cls.__bases__) # type: ignore[attr-defined]
# If this protocol is concrete rather than abstract,
# monkey-patch this concrete protocol to be implicitly
# type-checkable at runtime. By default, protocols are *NOT*
# type-checkable at runtime unless explicitly decorated by this
# nonsensical decorator.
#
# Note that the abstract "beartype.typing.Protocol" superclass
# *MUST* be explicitly excluded from consideration. Why? For
# unknown reasons, monkey-patching that superclass as implicitly
# type-checkable at runtime has extreme consequences throughout
# the typing ecosystem. In particular, doing so causes *ALL*
# non-protocol classes to be subsequently erroneously detected
# as being PEP 544-compliant protocols: e.g.,
# # If we monkey-patched the "Protocol" superclass as well, then
# # the following snippet would insanely hold true... wat!?!?!?!
# >>> from typing import Protocol
# >>> class OhBoy(object): pass
# >>> issubclass(OhBoy, Protocol)
# True # <-- we have now destroyed the world, folks.
if cls._is_protocol: # type: ignore[attr-defined]
# print(f'Protocol {cls} mro: {cls.__mro__}')
runtime_checkable(cls) # pyright: ignore[reportGeneralTypeIssues]
# Else, this class is the abstract "beartype.typing.Protocol"
# superclass defined below. In this case, avoid dangerously
# monkey-patching this superclass.
# Prefixing this class member with "_abc_" is necessary to prevent
# it from being considered part of the Protocol. See also:
# https://github.com/python/cpython/blob/main/Lib/typing.py
cls._abc_inst_check_cache = {}
# Return this caching protocol.
return cls
def __instancecheck__(cls, inst: Any) -> bool:
'''
``True`` only if the passed object is a **structural subtype**
(i.e., satisfies the protocol defined by) the passed protocol.
Parameters
----------
cls : type
:pep:`544`-compliant protocol to check this object against.
inst : Any
Arbitrary object to check against this protocol.
Returns
----------
bool
``True`` only if this object satisfies this protocol.
'''
# Attempt to...
try:
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: This *MUST* remain *SUPER* tight!! Even adding a
# mere assertion here can add ~50% to our best-case runtime.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Return a pre-cached boolean indicating whether an object of
# the same arbitrary type as the object passed to this call
# satisfied the same protocol in a prior call of this method.
return cls._abc_inst_check_cache[type(inst)]
# If this method has yet to be passed the same protocol *AND* an
# object of the same type as the object passed to this call...
except KeyError:
# If you're going to do *anything*, do it here. Try not to
# expand the rest of this method if you can avoid it.
inst_t = type(inst)
bases_pass_muster = True
for base in cls.__bases__:
#FIXME: This branch probably erroneously matches unrelated
#user-defined types whose names just happen to be "Generic"
#or "Protocol". Ideally, we should tighten that up to only
#match the actual "{beartype,}.typing.{Generic,Protocol}"
#superclasses. Of course, note that
#"beartype.typing.Protocol" is *NOT* "typing.Protocol', so
#we'll want to explicitly test against both.
if base is cls or base.__name__ in (
'Protocol',
'Generic',
'object',
):
continue
if not isinstance(inst, base):
bases_pass_muster = False
break
cls._abc_inst_check_cache[inst_t] = bases_pass_muster and (
_check_only_my_attrs(cls, inst))
return cls._abc_inst_check_cache[inst_t]
#FIXME: Docstring us up, please.
#FIXME: Comment us up, please.
def _check_only_my_attrs(cls, inst: Any, _EMPTY_DICT = {}) -> bool:
cls_attr_name_to_value = cls.__dict__
cls_attr_name_to_hint = cls_attr_name_to_value.get(
'__annotations__', _EMPTY_DICT)
cls_attr_names = (
cls_attr_name_to_value | cls_attr_name_to_hint
if IS_PYTHON_AT_LEAST_3_9 else
dict(cls_attr_name_to_value, **cls_attr_name_to_hint)
)
# For the name of each attribute declared by this protocol class...
for cls_attr_name in cls_attr_names:
# If...
if (
# This name implies this attribute to be unignorable *AND*...
#
# Specifically, if this name is neither...
not (
# A private attribute defined by dark machinery in the
# "ABCMeta" metaclass for abstract base classes *OR*...
cls_attr_name.startswith('_abc_') or
# That of an ignorable non-protocol attribute...
cls_attr_name in _PROTOCOL_ATTR_NAMES_IGNORABLE
# This attribute is either...
) and (
# Undefined by the passed object *OR*...
not hasattr(inst, cls_attr_name) or
# Defined by the passed object as a "blocked" (i.e., omitted
# from being type-checked as part of this protocol) method.
# For unknown and indefensible reasons, PEP 544 explicitly
# supports this fragile, unreadable, and error-prone idiom
# enabling objects to leave methods "undefined." What this!?
(
#FIXME: Unit test this up, please.
# A callable *AND*...
callable(getattr(cls, cls_attr_name, None)) and
# The passed object nullified this method. *facepalm*
getattr(inst, cls_attr_name) is None
)
)
):
# Then the passed object violates this protocol. In this case,
# return false.
return False
# Else, the passed object satisfies this protocol. In this case, return
# true.
return True
# ..................{ CLASSES }..................
# @runtime_checkable
class Protocol(
_ProtocolSlow,
# Force protocols to be generics. Although the standard
# "typing.Protocol" superclass already implicitly subclasses from the
# "typing.Generic" superclass, the non-standard
# "typing_extensions.Protocol" superclass does *NOT*. Ergo, we force
# this to be the case.
Generic, # pyright: ignore
metaclass=_CachingProtocolMeta,
):
'''
:func:`beartype.beartype`-compatible (i.e., decorated by
:func:`typing.runtime_checkable`) drop-in replacement for
:class:`typing.Protocol` that can lead to significant performance
improvements.
Uses :class:`_CachingProtocolMeta` to cache :func:`isinstance` check
results.
Examples
----------
.. code-block:: python
>>> from abc import abstractmethod
>>> from beartype import beartype
>>> from beartype.typing import Protocol
>>> class MyBearProtocol(Protocol): # <-- runtime-checkable through inheritance
... @abstractmethod
... def myfunc(self, arg: int) -> str:
... pass
>>> my_thing: MyBearProtocol = MyImplementation()
>>> isinstance(my_thing, MyBearProtocol)
True
>>> @beartype
... def do_somthing(thing: MyBearProtocol) -> None:
... thing.myfunc(0)
'''
# ................{ CLASS VARIABLES }................
__slots__: Any = ()
# ................{ DUNDERS }................
@callable_cached_minimal
def __class_getitem__(cls, item):
# We have to redefine this method because typing.Protocol's version
# is very persnickety about only working for typing.Generic and
# typing.Protocol. That's an exclusive club, and we ain't in it.
# (RIP, GC.) Let's see if we can sneak in, shall we?
# FIXME: Once <https://bugs.python.org/issue46581> is addressed,
# consider replacing the madness below with something like:
# cached_gen_alias = _ProtocolSlow.__class_getitem__(_ProtocolSlow, params)
# our_gen_alias = cached_gen_alias.copy_with(params)
# our_gen_alias.__origin__ = cls
# return our_gen_alias
# Superclass __class_getitem__() dunder method, localized for
# brevity, efficiency, and (most importantly) to squelch false
# positive "errors" from pyright with a single pragma comment.
super_class_getitem = super().__class_getitem__ # pyright: ignore[reportGeneralTypeIssues]
# If the superclass typing.Protocol.__class_getitem__() dunder
# method has been wrapped as expected with caching by the private
# (and thus *NOT* guaranteed to exist) @typing._tp_cache decorator,
# call that unwrapped method directly to obtain the expected
# generic alias.
#
# Note that:
# * We intentionally call the unwrapped method rather than the
# decorated closure wrapping that method with memoization. Why?
# Because subsequent logic monkey-patches this generic alias to
# refer to this class rather than the standard "typing.Protocol".
# However, doing so violates internal expectations of the
# @typing._tp_cache decorator performing this memoization.
# * This method is already memoized by our own @callable_cached
# decorator. Calling the decorated closure wrapping that
# unwrapped method with memoization would needlessly consume
# excess space and time for *NO* additional benefit.
if hasattr(super_class_getitem, '__wrapped__'):
# Protocol class to be passed as the "cls" parameter to the
# unwrapped superclass typing.Protocol.__class_getitem__()
# dunder method. There exist two unique cases corresponding to
# two unique branches of an "if" conditional in that method,
# depending on whether either this "Protocol" superclass or a
# user-defined subclass of this superclass is being
# subscripted. Specifically, this class is...
protocol_cls = (
# If this "Protocol" superclass is being directly
# subclassed by one or more type variables (e.g.,
# "Protocol[S, T]"), the non-caching "typing.Protocol"
# superclass underlying this caching protocol superclass.
# Since the aforementioned "if" conditional performs an
# explicit object identity test for the "typing.Protocol"
# superclass, we *MUST* pass that rather than this
# superclass to trigger that conditional appropriately.
_ProtocolSlow
if cls is Protocol else
# Else, a user-defined subclass of this "Protocol"
# superclass is being subclassed by one or more type
# variables *OR* types satisfying the type variables
# subscripting the superclass (e.g.,
# "UserDefinedProtocol[str]" for a user-defined subclass
# class UserDefinedProtocol(Protocol[AnyStr]). In this
# case, this subclass as is.
cls
)
gen_alias = super_class_getitem.__wrapped__(protocol_cls, item)
# We shouldn't ever be here, but if we are, we're making the
# assumption that typing.Protocol.__class_getitem__() no longer
# caches. Heaven help us if that ever uses some proprietary
# memoization implementation we can't see anymore because it's not
# based on the standard @functools.wraps decorator.
else:
gen_alias = super_class_getitem(item)
# Switch the origin of this generic alias from its default of
# "typing.Protocol" to this caching protocol class. If *NOT* done,
# CPython incorrectly sets the metaclass of subclasses to the
# non-caching "type(typing.Protocol)" metaclass rather than our
# caching "_CachingProtocolMeta" metaclass.
#
# Luddite alert: we don't fully understand the mechanics here. We
# suspect no one does.
gen_alias.__origin__ = cls
# We're done! Time for a honey brewskie break. We earned it.
return gen_alias
#FIXME: Ensure that the main @beartype codebase handles protocols whose
#repr() starts with "beartype.typing" as well, please.
# Replace the unexpected (and thus non-compliant) fully-qualified name of
# the module declaring this caching protocol superclass (e.g.,
# "beartype.typing._typingpep544") with the expected (and thus compliant)
# fully-qualified name of the standard "typing" module declaring the
# non-caching "typing.Protocol" superclass.
#
# If this is *NOT* done, then the machine-readable representation of this
# caching protocol superclass when subscripted by one or more type
# variables (e.g., "beartype.typing.Protocol[S, T]") will be differ
# significantly from that of the non-caching "typing.Protocol" superclass
# (e.g., beartype.typing._typingpep544.Protocol[S, T]"). Because
# @beartype (and possibly other third-party packages) expect the two
# representations to comply, this awkward monkey-patch preserves sanity.
Protocol.__module__ = 'beartype.typing'
# ..................{ PROTOCOLS }..................
class SupportsAbs(_SupportsAbsSlow[_T_co], Protocol, Generic[_T_co]):
'''
Caching variant of :class:`typing.SupportsAbs`.
'''
__module__: str = 'beartype.typing'
__slots__: Any = ()
class SupportsBytes(_SupportsBytesSlow, Protocol):
'''
Caching variant of :class:`typing.SupportsBytes`.
'''
__module__: str = 'beartype.typing'
__slots__: Any = ()
class SupportsComplex(_SupportsComplexSlow, Protocol):
'''
Caching variant of :class:`typing.SupportsComplex`.
'''
__module__: str = 'beartype.typing'
__slots__: Any = ()
class SupportsFloat(_SupportsFloatSlow, Protocol):
'''
Caching variant of :class:`typing.SupportsFloat`."
'''
__module__: str = 'beartype.typing'
__slots__: Any = ()
class SupportsInt(_SupportsIntSlow, Protocol):
'''
Caching variant of :class:`typing.SupportsInt`.
'''
__module__: str = 'beartype.typing'
__slots__: Any = ()
class SupportsIndex(_SupportsIndexSlow, Protocol):
'''
Caching variant of :class:`typing.SupportsIndex`.
'''
__module__: str = 'beartype.typing'
__slots__: Any = ()
class SupportsRound(_SupportsRoundSlow[_T_co], Protocol, Generic[_T_co]):
'''
Caching variant of :class:`typing.SupportsRound`.
'''
__module__: str = 'beartype.typing'
__slots__: Any = ()
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype all-at-once API.**
This subpackage publishes :pep:`302`- and :pep:`451`-compliant import hooks
enabling external callers to automatically decorate well-typed third-party
packages and modules with runtime type-checking dynamically generated by the
:func:`beartype.beartype` decorator in a single line of code.
'''
# ....................{ TODO }....................
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# should be locally imported at module scope *ONLY* under alternate private
# names (e.g., "from argparse import ArgumentParser as _ArgumentParser" rather
# than merely "from argparse import ArgumentParser").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from beartype.claw._clawpathhooks import (
beartype_all,
beartype_package,
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype all-at-once low-level abstract syntax tree (AST) transformation.**
This private submodule defines the low-level abstract syntax tree (AST)
transformation automatically decorating well-typed third-party packages and
modules with runtime type-checking dynamically generated by the
:func:`beartype.beartype` decorator.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
#FIXME: Additionally define a new BeartypeNodeTransformer.visit_ClassDef()
#method modelled after the equivelent TypeguardTransformer.visit_ClassDef()
#method residing at:
# https://github.com/agronholm/typeguard/blob/master/src/typeguard/importhook.py
#FIXME: *OMG.* See also the third-party "executing" Python package:
# https://github.com/alexmojaki/executing
#
#IPython itself internally leverages "executing" via "stack_data" (i.e., a
#slightly higher-level third-party Python package that internally leverages
#"executing") to syntax-highlight the currently executing AST node. Indeed,
#"executing" sports an intense test suite (much like ours) effectively
#guaranteeing a one-to-one mapping between stack frames and AST nodes.
#
#So, what's the Big Idea here? The Big Idea here is that @beartype can
#internally (...possibly only optionally, but possibly mandatorily) leverage
#"stack_data" to begin performing full-blown static type-checking at runtime --
#especially of mission critical type hints like "typing.LiteralString" which can
#*ONLY* be type-checked via static analysis. :o
#
#So, what's the Little Idea here? The Little Idea here is that @beartype can
#generate type-checking wrappers that type-check parameters or returns annotated
#by "typing.LiteralString" by calling an internal private utility function --
#say, "_die_unless_literalstring(func: Callable, arg_name: str) -> None" -- with
#"func" as the current type-checking wrapper and "arg_name" as either the name
#of that parameter or "return". The _die_unless_literalstring() raiser then:
#* Dynamically searches up the call stack for the stack frame encapsulating an
# external call to the passed "func" callable.
#* Passes that stack frame to the "executing" package.
#* "executing" then returns the AST node corresponding to that stack frame.
#* Introspects that node for the passed parameter whose name is "arg_name".
#* Raises an exception unless the value of that parameter is an AST node
# corresponding to a string literal.
#
#Of course, that won't necessarily be fast -- but it will be accurate. Since
#security trumps speed, speed is significantly less of a concern insofar as
#"typing.LiteralString" is concerned. Of course, we should also employ
#significant caching... if we even can.
#FIXME: The above idea generalizes from "typing.LiteralString" to other
#fascinating topics as well. Indeed, given sufficient caching, one could begin
#to internally generate and cache a mypy-like graph network whose nodes are
#typed attributes and whose edges are relations between those typed attributes.
# ....................{ IMPORTS }....................
from ast import (
AST,
AnnAssign,
Call,
Expr,
FunctionDef,
ImportFrom,
Load,
Module,
Name,
NodeTransformer,
Str,
alias,
)
from beartype.typing import (
List,
Union,
)
from beartype._util.py.utilpyversion import IS_PYTHON_AT_LEAST_3_8
# ....................{ SUBCLASSES }....................
#FIXME: Implement us up, please.
#FIXME: Docstring us up, please.
#FIXME: Unit test us up, please.
class BeartypeNodeTransformer(NodeTransformer):
'''
**Beartype abstract syntax tree (AST) node transformer** (i.e., visitor
pattern recursively transforming the AST tree passed to the :meth:`visit`
method by decorating all typed callables and classes by the
:func:`beartype.beartype` decorator).
See Also
----------
* The `comparable "typeguard.importhook" submodule <typeguard import
hook_>`__ implemented by the incomparable `@agronholm (Alex Grönholm)
<agronholm_>`__.
.. _agronholm:
https://github.com/agronholm
.. _typeguard import hook:
https://github.com/agronholm/typeguard/blob/master/src/typeguard/importhook.py
'''
# ..................{ VISITORS }..................
def visit_Module(self, node: Module) -> Module:
'''
Add a new abstract syntax tree (AST) child node to the passed AST module
parent node encapsulating the module currently being loaded by the
:class:`beartype.claw._clawloader.BeartypeSourceFileLoader` object,
importing our private
:func:`beartype._decor.decorcore.beartype_object_nonfatal` decorator for
subsequent use by the other visitor methods defined by this class.
Parameters
----------
node : Module
AST module parent node to be transformed.
Returns
----------
Module
That same AST module parent node.
'''
# 0-based index of the first safe position of the list of all AST child
# nodes of this AST module parent node to insert an import statement
# importing our beartype decorator, initialized to the erroneous index
# "-1" to enable detection of empty modules (i.e., modules whose AST
# module nodes containing *NO* child nodes) below.
node_import_beartype_attrs_index = -1
# AST child node of this AST module parent node immediately preceding
# the AST import child node to be added below, defaulting to this AST
# module parent node to ensure that the _copy_node_code_metadata()
# function below *ALWAYS* copies from a valid AST node for sanity.
module_child: AST = node
# Efficiently find this index. Since, this iteration is guaranteed to
# exhibit worst-case O(1) time complexity despite superficially
# appearing to perform a linear search of all n child nodes of this
# module parent node and thus exhibit worst-case O(n) time complexity.
#
# For the 0-based index and value of each direct AST child node of this
# AST module parent node...
for node_import_beartype_attrs_index, module_child in enumerate(
node.body):
# If this child node signifies either...
if (
# A module docstring...
#
# If that module defines a docstring, that docstring *MUST* be
# the first expression of that module. That docstring *MUST* be
# explicitly found and iterated past to ensure that the import
# statement added below appears *AFTER* rather than *BEFORE* any
# docstring. (The latter would destroy the semantics of that
# docstring by reducing that docstring to an ignorable string.)
(
isinstance(module_child, Expr) and
isinstance(module_child.value, Str)
) or
# A future import (i.e., import of the form
# "from __future__ ...") *OR*...
#
# If that module performs one or more future imports, these
# imports *MUST* necessarily be the first non-docstring
# statement of that module and thus appear *BEFORE* all import
# statements that are actually imports -- including the import
# statement added below.
(
isinstance(module_child, ImportFrom) and
module_child.module == '__future__'
)
):
# Then continue past this child node to the next child node.
continue
# If the 0-based index of the first safe position of the list of all AST
# child nodes of this AST module parent node to insert import
# statements importing various beartype attributes is *NOT* the
# erroneous index to which this index was initialized above, this module
# contains one or more child nodes and is thus non-empty. In this
# case...
if node_import_beartype_attrs_index != -1:
# Tuple of all module-scoped import nodes (i.e., child nodes to be
# inserted under the parent node encapsulating the currently visited
# bmodule in the AST for that module).
nodes_import_beartype_attr = (
# Our public beartype.door.die_if_unbearable() raiser,
# intentionally imported from our private
# "beartype.door._doorcheck" submodule rather than our public
# "beartype.door" subpackage. Why? Because the former consumes
# marginally less space and time to import than the latter.
# Whereas the latter imports the full "TypeHint" hierarchy, the
# former only imports multiple low-level utility functions.
ImportFrom(
module='beartype.door._doorcheck',
names=[alias('die_if_unbearable')],
),
# Our private
# beartype._decor.decorcore.beartype_object_nonfatal()
# decorator.
ImportFrom(
module='beartype._decor.decorcore',
names=[alias('beartype_object_nonfatal')],
),
)
# For each module-scoped import node to be inserted...
for node_import_beartype_attr in nodes_import_beartype_attr:
# Copy all source code metadata from the AST child node of this
# AST module parent node immediately preceding this AST import
# child node onto this AST import child node.
_copy_node_code_metadata(
node_src=node, node_trg=node_import_beartype_attr)
# Insert this AST import child node at this safe position of the
# list of all AST child nodes of this AST module parent node.
node.body.insert(
node_import_beartype_attrs_index, node_import_beartype_attr)
# Else, this module is empty. In this case, silently reduce to a noop.
# Since this edge case is *EXTREMELY* uncommon, avoid optimizing for
# this edge case (here or elsewhere).
# Recursively transform *ALL* AST child nodes of this AST module node.
self.generic_visit(node)
# Return this AST module node as is.
return node
def visit_FunctionDef(self, node: FunctionDef) -> FunctionDef:
'''
Add a new abstract syntax tree (AST) child node to the passed AST
callable parent node, decorating that callable by our private
:func:`beartype._decor.decorcore.beartype_object_nonfatal` decorator if
and only if that callable is **typed** (i.e., annotated by a return type
hint and/or one or more parameter type hints).
Parameters
----------
node : FunctionDef
AST callable parent node to be transformed.
Returns
----------
FunctionDef
That same AST callable parent node.
'''
# True only if that callable is annotated by a return type hint,
# trivially decided in O(1) time.
is_return_typed = bool(node.returns)
# True only if that callable is annotated by one or more parameter type
# hints, non-trivially decided in O(n) time for n the number of
# parameters accepted by that callable.
is_args_typed = False
# If that callable is *NOT* annotated by a return type hint, fallback to
# deciding whether that callable is annotated by one or more parameter
# type hints. Since doing is considerably more computationally
# expensive, do so *ONLY* as needed.
if not is_return_typed:
for arg in node.args.args:
if arg.annotation:
is_args_typed = True
break
# Else, that callable is annotated by a return type hint. In this case,
# do *NOT* spend useless time deciding whether that callable is
# annotated by one or more parameter type hints.
# If that callable is typed (i.e., annotated by a return type hint
# and/or one or more parameter type hints)...
#
# Note that the former is intentionally tested *BEFORE* the latter, as
# the detecting former is O(1) time complexity and thus trivial.
if is_return_typed or is_args_typed:
#FIXME: Additionally pass the current beartype configuration as a
#keyword-only "conf={conf}" parameter to this decorator, please.
# AST decoration child node decorating that callable by our
# beartype._decor.decorcore.beartype_object_nonfatal() decorator. Note
# that this syntax derives from the example for the ast.arg() class:
# https://docs.python.org/3/library/ast.html#ast.arg
decorate_callable = Name(id='beartype_object_nonfatal', ctx=Load())
# Copy all source code metadata from this AST callable parent node
# onto this AST decoration child node.
_copy_node_code_metadata(node_src=node, node_trg=decorate_callable)
#FIXME: *INSUFFICIENT.* We need to additionally avoid redecorating
#callables already explicitly decorated by @beartype, as that
#redecoration would erroneously take precedence over the explicit
#decoration; the latter should *ALWAYS* take precedence over the
#former, however, due to "conf=BeartypeConf(...)" parametrization.
#Happily, this should be trivial ala:
# #FIXME: Note that "decorator_node.id == 'beartype'" is probably
# #an insufficient test, as decorators can be trivially renamed
# #or imported under differing names.
# for decorator_node in node.decorator_list:
# if (
# isinstance(decorator_node, Name) and
# decorator_node.id == 'beartype'
# ):
# break
# else:
# node.decorator_list.append(decorate_callable)
# Append this AST decoration child node to the end of the list of
# all AST decoration child nodes for this AST callable parent node.
# Since this list is "stored outermost first (i.e. the first in the
# list will be applied last)", appending guarantees that our
# decorator will be applied first (i.e., *BEFORE* all subsequent
# decorators). This is *NOT* simply obsequious greed. The @beartype
# decorator generally requires that it precede other decorators that
# obfuscate the identity of the original callable, including:
# * The builtin @property decorator.
# * The builtin @classmethod decorator.
# * The builtin @staticmethod decorator.
node.decorator_list.append(decorate_callable)
# Else, that callable is untyped. In this case, avoid needlessly
# decorating that callable by @beartype for efficiency.
# Recursively transform *ALL* AST child nodes of this AST callable node.
self.generic_visit(node)
# Return this AST callable node as is.
return node
def visit_AnnAssign(self, node: AnnAssign) -> Union[AST, List[AST]]:
'''
Add a new abstract syntax tree (AST) child node to the passed AST
**annotated assignment** (i.e., assignment of an attribute annotated by
a :pep:`562`-compliant type hint) parent node, inserting a subsequent
statement following that annotated assignment that type-checks that
attribute against that type hint by passing both to our public
:func:`beartype.door.is_bearable` tester.
Parameters
----------
node : AnnAssign
AST annotated assignment parent node to be transformed.
Returns
----------
Union[AST, List[AST]]
Either:
* If this annotated assignment parent node is *not* **simple**
(i.e., the attribute being assigned to is embedded in parentheses
and thus denotes a full-blown Python expression rather than a
simple attribute name), that same parent node unmodified.
* Else, a 2-list comprising both that parent node and a new adjacent
:class:`Call` node performing this type-check.
See Also
----------
https://github.com/awf/awfutils
Third-party Python package whose ``@awfutils.typecheck`` decorator
implements statement-level :func:`isinstance`-based type-checking in
a similar manner, strongly inspiring this implementation. Thanks so
much to Cambridge researcher @awf (Andrew Fitzgibbon) for the
phenomenal inspiration!
'''
# Note that "AnnAssign" node subclass defines these instance variables:
# * "node.annotation", a child node describing the PEP-compliant type
# hint annotating this assignment, typically an instance of either:
# * "ast.Name".
# * "ast.Str".
# * "node.simple", a boolean that is true only if "node.target" is an
# "ast.Name" node.
# * "node.target", a child node describing the target attribute assigned
# to by this assignment, guaranteed to be an instance of either:
# * "ast.Name", in which case this assignment is denoted as "simple"
# via the "node.simple" instance variable. This is the common case
# in which the attribute being assigned to is *NOT* embedded in
# parentheses and thus denotes a simple attribute name rather than a
# full-blown Python expression.
# * "ast.Attribute".
# * "ast.Subscript".
# * "node.value", an optional child node describing the source value
# being assigned to this target attribute.
#FIXME: Can and/or should we also support "node.target" child nodes that
#are instances of "ast.Attribute" and "ast.Subscript"?
# If this assignment is *NOT* simple, this assignment is *NOT* assigning
# to an attribute name. In this case, silently ignore this assignment.
if not node.simple:
return node
# Else, this assignment is simple and assigning to an attribute name.
# Validate this fact.
assert isinstance(node.target, Name)
#FIXME: Additionally pass the current beartype configuration as a
#keyword-only "conf={conf}" parameter to this raiser, please.
# Child node referencing the function performing this type-checking,
# previously imported at module scope by visit_FunctionDef() above.
node_typecheck_function = Name('die_if_unbearable', ctx=Load())
# Child node passing the value newly assigned to this attribute by this
# assignment as the first parameter to die_if_unbearable().
node_typecheck_pith = Name(node.target.id, ctx=Load())
# Adjacent node type-checking this newly assigned attribute against the
# PEP-compliant type hint annotating this assignment by deferring to our
# die_if_unbearable() raiser.
node_typecheck = Call(
node_typecheck_function,
[
# Child node passing the value newly assigned to this
# attribute by this assignment as the first parameter.
node_typecheck_pith,
# Child node passing the type hint annotating this assignment as
# the second parameter.
node.annotation,
],
[],
)
# Copy all source code metadata from this AST annotated assignment node
# onto *ALL* AST nodes created above.
_copy_node_code_metadata(
node_src=node, node_trg=node_typecheck_function)
_copy_node_code_metadata(node_src=node, node_trg=node_typecheck_pith)
_copy_node_code_metadata(node_src=node, node_trg=node_typecheck)
#FIXME: Can we replace this inefficient list with an efficient tuple?
#Probably not. Let's avoid doing so for the moment, as the "ast" API is
#obstruse enough as it is.
# Return a list comprising these two adjacent nodes.
return [node, node_typecheck]
# ....................{ PRIVATE ~ copiers }....................
def _copy_node_code_metadata(node_src: AST, node_trg: AST) -> None:
'''
Copy all **source code metadata** (i.e., beginning and ending line and
column numbers) from the passed source abstract syntax tree (AST) node onto
the passed target AST node.
This function is an efficient alternative to:
* The extremely inefficient (albeit still useful)
:func:`ast.fix_missing_locations` function.
* The mildly inefficient (and mostly useless) :func:`ast.copy_location`
function.
The tradeoffs are as follows:
* :func:`ast.fix_missing_locations` is ``O(n)`` time complexity for ``n``
the number of AST nodes across the entire AST tree, but requires only a
single trivial call and is thus considerably more "plug-and-play" than
this function.
* This function is ``O(1)`` time complexity irrespective of the size of the
AST tree, but requires one still mostly trivial call for each synthetic
AST node inserted into the AST tree by the
:class:`BeartypeNodeTransformer` above.
Parameters
----------
node_src: AST
Source AST node to copy source code metadata from.
node_trg: AST
Target AST node to copy source code metadata onto.
See Also
----------
:func:`ast.copy_location`
Less efficient analogue of this function running in ``O(k)`` time
complexity for ``k`` the number of types of source code metadata.
Typically, ``k == 4``.
'''
assert isinstance(node_src, AST), f'{repr(node_src)} not AST node.'
assert isinstance(node_trg, AST), f'{repr(node_trg)} not AST node.'
# Copy all source code metadata from this source to target AST node.
node_trg.lineno = node_src.lineno
node_trg.col_offset = node_src.col_offset
# If the active Python interpreter targets Python >= 3.8, then additionally
# copy all source code metadata exposed by Python >= 3.8.
if IS_PYTHON_AT_LEAST_3_8:
node_trg.end_lineno = node_src.end_lineno # type: ignore[attr-defined]
node_trg.end_col_offset = node_src.end_col_offset # type: ignore[attr-defined]
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype all-at-once low-level package name cache.**
This private submodule caches package names on behalf of the higher-level
:func:`beartype.claw.beartype_submodules_on_import` function. Beartype import
path hooks internally created by that function subsequently lookup these package
names from this cache when deciding whether or not (and how) to decorate a
submodule being imported with :func:`beartype.beartype`.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeClawRegistrationException
from beartype.typing import (
Dict,
Iterable,
Iterator,
Optional,
Union,
)
from beartype._conf.confcls import BeartypeConf
from beartype._util.text.utiltextident import is_identifier
from collections.abc import Iterable as IterableABC
from contextlib import contextmanager
# ....................{ TESTERS }....................
#FIXME: Unit test us up, please.
def is_packages_registered_any() -> bool:
'''
``True`` only if one or more packages have been previously registered.
Equivalently, this tester returns ``True`` only if the
:func:`register_packages` function has been called at least once under the
active Python interpreter.
Caveats
----------
**This function is only safely callable in a thread-safe manner within a**
``with _claw_lock:`` **context manager.** Equivalently, this global is *not*
safely accessible outside that manager.
Returns
----------
bool
``True`` only if one or more packages have been previously registered.
'''
# Unleash the beast! Unsaddle the... addled?
return bool(_package_basename_to_subpackages)
# ....................{ GETTERS }....................
#FIXME: Unit test us up, please.
def get_package_conf_if_registered(package_name: str) -> Optional[BeartypeConf]:
'''
Beartype configuration with which to type-check the package with the passed
name if either that package or a parent package of that package has been
previously registered by a prior call to the :func:`register_packages`
function *or* ``None`` otherwise (i.e., if neither that package nor a parent
package of that package has been previously registered by such a call).
Caveats
----------
**This function is only safely callable in a thread-safe manner within a**
``with _claw_lock:`` **context manager.** Equivalently, this global is *not*
safely accessible outside that manager.
Parameters
----------
package_name : str
Fully-qualified name of the package to be inspected.
Returns
----------
Optional[BeartypeConf]
Either:
* If either that package or a parent package of that package
has been previously registered by a prior call to the
:func:`register_packages` function, beartype configuration with which
to type-check that package.
* Else, ``None``.
'''
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize logic below with the register_packages() function.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# List of each unqualified basename comprising this name, split from this
# fully-qualified name on "." delimiters. Note that the "str.split('.')" and
# "str.rsplit('.')" calls produce the exact same lists under all possible
# edge cases. We arbitrarily call the former rather than the latter for
# simplicity and readability.
package_basenames = package_name.split('.')
# If that package is either the top-level "beartype" package or a subpackage
# of that package, silently ignore this dangerous attempt to type-check the
# "beartype" package by the @beartype.beartype decorator. Why? Because doing
# so is both:
#
# * Fundamentally unnecessary. The entirety of the "beartype" package
# already religiously guards against type violations with a laborious slew
# of type checks littered throughout the codebase -- including assertions
# of the form "assert isinstance({arg}, {type}), ...". Further decorating
# *ALL* "beartype" callables with automated type-checking only needlessly
# reduces the runtime efficiency of the "beartype" package.
# * Fundamentally dangerous, which is the greater concern. For example:
# The beartype.claw._clawast.BeartypeNodeTransformer.visit_Module()
# dynamically inserts a module-scoped import of the
# @beartype._decor.decorcore.beartype_object_nonfatal decorator at the head of
# the module currently being imported. But if the
# "beartype._decor.decorcore" submodule itself is being imported, then
# that importation would destructively induce an infinite circular import!
# Could that ever happen? *YES.* Conceivably, an external caller could
# force reimportation of all modules by emptying the "sys.modules" cache.
#
# Note this edge case is surprisingly common. The public
# beartype.claw.beartype_all() function implicitly registers *ALL* packages
# (including "beartype" itself by default) for decoration by @beartype.
if package_basenames[0] == 'beartype':
return None
# Else, that package is neither the top-level "beartype" package *NOR* a
# subpackage of that package. In this case, register this package.
# Current subdictionary of the global package name cache describing the
# currently iterated unqualified basename comprising that package's name,
# initialized to the root dictionary describing all top-level packages.
package_basename_to_subpackages_curr = _package_basename_to_subpackages
# Beartype configuration registered with that package, defaulting to the
# beartype configuration registered with the root package cache globally
# applicable to *ALL* packages if an external caller previously called the
# public beartype.claw.beartype_all() function *OR* "None" otherwise (i.e.,
# if that function has yet to be called).
package_conf_if_registered = (
package_basename_to_subpackages_curr.conf_if_registered)
# For each unqualified basename of each parent package transitively
# containing this package (as well as that of that package itself)...
for package_basename in package_basenames:
# Current subdictionary of that cache describing that parent package if
# that parent package was registered by a prior call to the
# register_packages() function *OR* "None" otherwise (i.e., if that
# parent package has yet to be registered).
package_subpackages = package_basename_to_subpackages_curr.get(
package_basename)
# If that parent package has yet to be registered, terminate iteration
# at that parent package.
if package_subpackages is None:
break
# Else, that parent package was previously registered.
# Beartype configuration registered with either...
package_conf_if_registered = (
# That parent package if any *OR*...
#
# Since that parent package is more granular (i.e., unique) than any
# transitive parent package of that parent package, the former takes
# precedence over the latter where defined.
package_subpackages.conf_if_registered or
# A transitive parent package of that parent package if any.
package_conf_if_registered
)
# Iterate the currently examined subcache one subpackage deeper.
package_basename_to_subpackages_curr = package_subpackages
# Return this beartype configuration if any *OR* "None" otherwise.
return package_conf_if_registered
# ....................{ REGISTRARS }....................
#FIXME: Unit test us up, please.
def register_packages_all(
# Mandatory keyword-only parameters.
*,
conf: BeartypeConf,
) -> None:
'''
Register *all* packages as subject to our **beartype import path hook**
(i.e., callable inserted to the front of the standard :mod:`sys.path_hooks`
list recursively applying the :func:`beartype.beartype` decorator to all
well-typed callables and classes defined by all submodules of all packages
with the passed names on the first importation of those submodules).
Caveats
----------
**This function is only safely callable in a thread-safe manner within a**
``with _claw_lock:`` **context manager.** Equivalently, this global is *not*
safely accessible outside that manager.
Parameters
----------
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed packages).
Raises
----------
BeartypeClawRegistrationException
If either:
* The passed ``conf`` parameter is *not* a beartype configuration (i.e.,
:class:`BeartypeConf` instance).
* One or more of the packages with the passed names have already been
registered by a previous call to this function under a conflicting
configuration differing from the passed configuration.
'''
# This configuration is *NOT* a configuration, raise an exception.
if not isinstance(conf, BeartypeConf):
raise BeartypeClawRegistrationException(
f'Beartype configuration {repr(conf)} invalid (i.e., not '
f'"beartype.BeartypeConf" instance).'
)
# Else, this configuration is a configuration.
# Beartype configuration currently associated with *ALL* packages by a
# previous call to this function if any *OR* "None" otherwise (i.e., if this
# function has yet to be called under the active Python interpreter).
conf_curr = _package_basename_to_subpackages.conf_if_registered
# If that call associated all packages with a different configuration than
# that passed, raise an exception.
if conf_curr is not conf:
raise BeartypeClawRegistrationException(
f'All packages previously registered '
f'with differing beartype configuration:\n'
f'----------(OLD CONFIGURATION)----------\n'
f'{repr(conf_curr)}\n'
f'----------(NEW CONFIGURATION)----------\n'
f'{repr(conf)}\n'
)
# Else, that call associated all packages with the same configuration to
# that passed. In this case, silently ignore this redundant attempt to
# re-register all packages.
#FIXME: Unit test us up, please.
#FIXME: Define a comparable removal function named either:
#* cancel_beartype_submodules_on_import(). This is ostensibly the most
# unambiguous and thus the best choice of those listed here. Obviously,
# beartype_submodules_on_import_cancel() is a comparable alternative.
#* forget_beartype_on_import().
#* no_beartype_on_import().
def register_packages(
# Mandatory keyword-only parameters.
*,
package_names: Union[str, Iterable[str]],
conf: BeartypeConf,
) -> None:
'''
Register the packages with the passed names as subject to our **beartype
import path hook** (i.e., callable inserted to the front of the standard
:mod:`sys.path_hooks` list recursively applying the
:func:`beartype.beartype` decorator to all well-typed callables and classes
defined by all submodules of all packages with the passed names on the first
importation of those submodules).
Caveats
----------
**This function is only safely callable in a thread-safe manner within a**
``with _claw_lock:`` **context manager.** Equivalently, this global is *not*
safely accessible outside that manager.
Parameters
----------
package_names : Union[str, Iterable[str]]
Either:
* Fully-qualified name of the package to be type-checked.
* Iterable of the fully-qualified names of one or more packages to be
type-checked.
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed packages).
Raises
----------
BeartypeClawRegistrationException
If either:
* The passed ``package_names`` parameter is either:
* Neither a string nor an iterable (i.e., fails to satisfy the
:class:`collections.abc.Iterable` protocol).
* An empty string or iterable.
* A non-empty string that is *not* a valid **package name** (i.e.,
``"."``-delimited concatenation of valid Python identifiers).
* A non-empty iterable containing at least one item that is either:
* *Not* a string.
* The empty string.
* A non-empty string that is *not* a valid **package name** (i.e.,
``"."``-delimited concatenation of valid Python identifiers).
* The passed ``conf`` parameter is *not* a beartype configuration (i.e.,
:class:`BeartypeConf` instance).
* One or more of the packages with the passed names have already been
registered by a previous call to this function under a conflicting
configuration differing from the passed configuration.
'''
# ..................{ VALIDATION }..................
# This configuration is *NOT* a configuration, raise an exception.
if not isinstance(conf, BeartypeConf):
raise BeartypeClawRegistrationException(
f'Beartype configuration {repr(conf)} invalid (i.e., not '
f'"beartype.BeartypeConf" instance).'
)
# Else, this configuration is a configuration.
# If passed only a single package name *NOT* contained in an iterable, wrap
# this name in a 1-tuple containing only this name for convenience.
if isinstance(package_names, str):
package_names = (package_names,)
# If this iterable of package names is *NOT* an iterable, raise an
# exception.
if not isinstance(package_names, IterableABC):
raise BeartypeClawRegistrationException(
f'Package names {repr(package_names)} not iterable.')
# Else, this iterable of package names is an iterable.
#
# If this iterable of package names is empty, raise an exception.
elif not package_names:
raise BeartypeClawRegistrationException('Package names empty.')
# Else, this iterable of package names is non-empty.
# For each such package name...
for package_name in package_names:
# If this package name is *NOT* a string, raise an exception.
if not isinstance(package_name, str):
raise BeartypeClawRegistrationException(
f'Package name {repr(package_name)} not string.')
# Else, this package name is a string.
#
# If this package name is *NOT* a valid Python identifier, raise an
# exception.
elif not is_identifier(package_name):
raise BeartypeClawRegistrationException(
f'Package name "{package_name}" invalid '
f'(i.e., not "."-delimited Python identifier).'
)
# Else, this package name is a valid Python identifier.
# ..................{ REGISTRATION }..................
# For the fully-qualified name of each package to be registered...
for package_name in package_names:
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize with the get_package_conf_if_registered() getter.
# The iteration performed below modifies the global package names cache
# and thus *CANNOT* simply defer to the same logic.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# List of each unqualified basename comprising this name, split from
# this fully-qualified name on "." delimiters. Note that the
# "str.split('.')" and "str.rsplit('.')" calls produce the exact same
# lists under all possible edge cases. We arbitrarily call the former
# rather than the latter for simplicity and readability.
package_basenames = package_name.split('.')
# Current subdictionary of the global package name cache describing the
# currently iterated unqualified basename comprising that package's name
# initialized to the root dictionary describing all top-level packages.
package_basename_to_subpackages_curr = _package_basename_to_subpackages
# # For each unqualified basename comprising the directed path from the
# root parent package of that package to that package...
for package_basename in package_basenames:
# Current subdictionary of that cache describing that parent package
# if that parent package was registered by a prior call to the
# register_packages() function *OR* "None" otherwise (i.e., if that
# parent package has yet to be registered).
package_subpackages = package_basename_to_subpackages_curr.get(
package_basename)
# If this is the first registration of that parent package, register
# a new subcache describing that parent package.
#
# Note that this test could be obviated away by refactoring our
# "_PackageBasenameToSubpackagesDict" subclass from the
# "collections.defaultdict" superclass rather than the standard
# "dict" class. Since doing so would obscure erroneous attempts
# to access non-existing keys, however, this test is preferable
# to inviting even *MORE* bugs into this bug-riddled codebase.
# Just kidding! There are absolutely no bugs in this codebase.
# *wink*
if package_subpackages is None:
package_subpackages = \
package_basename_to_subpackages_curr[package_basename] = \
_PackageBasenameToSubpackagesDict()
# Else, that parent package was already registered by a prior call
# to this function.
# Iterate the currently examined subcache one subpackage deeper.
package_basename_to_subpackages_curr = package_subpackages
# Since the "package_basenames" list contains at least one basename,
# the above iteration set the currently examined subdictionary
# "package_basename_to_subpackages_curr" to at least one subcache of the
# global package name cache. Moreover, that subcache is guaranteed to
# describe the current (sub)package being registered.
# If that (sub)package has yet to be registered, register that
# (sub)package with this beartype configuration.
if package_basename_to_subpackages_curr.conf_if_registered is None:
package_basename_to_subpackages_curr.conf_if_registered = conf
# Else, that (sub)package has already been registered by a previous
# call to this function. In this case...
else:
# Beartype configuration previously associated with that
# (sub)package by the previous call to this function.
conf_curr = (
package_basename_to_subpackages_curr.conf_if_registered)
# If that call associated that (sub)package with a different
# configuration than that passed, raise an exception.
if conf_curr is not conf:
raise BeartypeClawRegistrationException(
f'Package name "{package_name}" previously registered '
f'with differing beartype configuration:\n'
f'----------(OLD CONFIGURATION)----------\n'
f'{repr(conf_curr)}\n'
f'----------(NEW CONFIGURATION)----------\n'
f'{repr(conf)}\n'
)
# Else, that call associated that (sub)package with the same
# configuration to that passed. In this case, silently ignore
# this redundant attempt to re-register that (sub)package.
#FIXME: Unit test us up, please.
@contextmanager
def packages_unregistered() -> Iterator[None]:
'''
Context manager "unregistering" (i.e., clearing, removing) all previously
registered packages from the global package name cache maintained by the
:func:`register_packages` function *after* running the caller-defined block
of the ``with`` statement executing this context manager.
Caveats
----------
**This context manager is only intended to be invoked by unit and
integration tests in our test suite.** Nonetheless, this context manager
necessarily violates privacy encapsulation by accessing private submodule
globals and is thus declared in this submodule rather than elsewhere.
**This context manager is non-thread-safe.** Since our test suite is
intentionally *not* dangerously parallelized across multiple threads, this
caveat is ignorable with respect to testing.
Yields
----------
None
This context manager yields *no* values.
'''
# Attempt to run the caller-defined block of the parent "with" statement.
try:
yield
# Clear the global package name cache *AFTER* doing so.
finally:
_package_basename_to_subpackages.clear()
# ....................{ PRIVATE ~ classes }....................
#FIXME: Docstring us up, please.
class _PackageBasenameToSubpackagesDict(
Dict[str, Optional['_PackageBasenameToSubpackagesDict']]):
'''
**(Sub)package name (sub)cache** (i.e., recursively nested dictionary
mapping from the unqualified basename of each subpackage of the current
package to be type-checked on first importation by the
:func:`beartype.beartype` decorator to another instance of this class
similarly describing the subsubpackages of that subpackage).
This (sub)cache is suitable for caching as the values of:
* The :data:`_package_basename_to_subpackages` global dictionary.
* Each (sub)value mapped to by that global dictionary.
Attributes
----------
conf_if_registered : Optional[BeartypeConf]
Either:
* If this (sub)package has been explicitly registered by a prior call to
the :func:`register_package_names` function, the **beartype
configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for this (sub)package).
* Else, ``None``.
'''
# ..................{ CLASS VARIABLES }..................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Subclasses declaring uniquely subclass-specific instance
# variables *MUST* additionally slot those variables. Subclasses violating
# this constraint will be usable but unslotted, which defeats our purposes.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Slot all instance variables defined on this object to minimize the time
# complexity of both reading and writing variables across frequently called
# cache dunder methods. Slotting has been shown to reduce read and write
# costs by approximately ~10%, which is non-trivial.
__slots__ = (
'conf_if_registered',
)
# ..................{ INITIALIZERS }..................
def __init__(self, *args, **kwargs) -> None:
'''
Initialize this package name (sub)cache.
All passed parameters are passed as is to the superclass
:meth:`dict.__init__` method.
'''
# Initialize our superclass with all passed parameters.
super().__init__(*args, **kwargs)
# Nullify all subclass-specific parameters for safety.
self.conf_if_registered: Optional[BeartypeConf] = None
# ....................{ PRIVATE ~ globals }....................
#FIXME: Revise docstring in accordance with data structure changes, please.
_package_basename_to_subpackages = _PackageBasenameToSubpackagesDict()
'''
**Package name cache** (i.e., non-thread-safe dictionary mapping in a
recursively nested manner from the unqualified basename of each subpackage to be
possibly type-checked on first importation by the :func:`beartype.beartype`
decorator to either the ``None`` singleton if that subpackage is to be
type-checked *or* a nested dictionary satisfying the same structure otherwise
(i.e., if that subpackage is *not* to be type-checked)).
Motivation
----------
This dictionary is intentionally structured as a non-trivial nested data
structure rather than a trivial non-nested flat dictionary. Why? Efficiency.
Consider this flattened set of package names:
.. code-block:: python
_package_names = {'a.b', 'a.c', 'd'}
Deciding whether an arbitrary package name is in that set or not requires
worst-case ``O(n)`` iteration across the set of ``n`` package names.
Consider instead this nested dictionary whose keys are package names split on
``.`` delimiters and whose values are either recursively nested dictionaries of
the same format *or* the ``None`` singleton (terminating the current package
name):
.. code-block:: python
_package_basename_to_subpackages = {
'a': {'b': None, 'c': None}, 'd': None}
Deciding whether an arbitrary package name is in this dictionary or not requires
worst-case ``O(h)`` iteration across the height ``h`` of this dictionary
(equivalent to the largest number of ``.`` delimiters for any fully-qualified
package name encapsulated by this dictionary). Since ``h <<<<<<<<<< n``, this
dictionary provides substantially faster worst-case lookup than that set.
Moreover, in the worst case:
* That set requires one inefficient string prefix test for each item.
* This dictionary requires *only* one efficient string equality test for each
nested key-value pair while descending towards the target package name.
Let's do this, fam.
Caveats
----------
**This global is only safely accessible in a thread-safe manner from within a**
``with _claw_lock:`` **context manager.** Equivalently, this global is *not*
safely accessible outside that manager.
Examples
----------
Instance of this data structure type-checking on import submodules of the root
``package_z`` package, the child ``package_a.subpackage_k`` submodule, and the
``package_a.subpackage_b.subpackage_c`` and
``package_a.subpackage_b.subpackage_d`` submodules:
>>> _package_basename_to_subpackages = {
... 'package_a': {
... 'subpackage_b': {
... 'subpackage_c': None,
... 'subpackage_d': None,
... },
... 'subpackage_k': None,
... },
... 'package_z': None,
... }
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype all-at-once low-level** :mod:`importlib` **module loading
machinery.**
This private submodule declares an :mod:`importlib`-compatible module loader
dynamically decorating all typed callables and classes of all submodules of all
packages previously registered with our global package name cache (e.g., by a
prior call to the high-level public :func:`beartype.claw.beartype_all` or
:func:`beartype.claw.beartype_package` functions) with the
:func:`beartype.beartype` decorator with the abstract syntax tree (AST)
transformation defined by the low-level :mod:`beartype.claw._clawast` submodule.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from ast import PyCF_ONLY_AST
from beartype.claw._clawast import BeartypeNodeTransformer
from beartype.claw._clawregistrar import get_package_conf_if_registered
from beartype.meta import VERSION
from beartype.typing import Optional
from beartype._conf.confcls import BeartypeConf
from importlib import ( # type: ignore[attr-defined]
_bootstrap_external, # pyright: ignore[reportGeneralTypeIssues]
)
from importlib.machinery import SourceFileLoader
from importlib.util import decode_source
from types import CodeType
# Original cache_from_source() function defined by the private (*gulp*)
# "importlib._bootstrap_external" submodule, preserved *BEFORE* temporarily
# replacing that function with our beartype-specific variant below.
from importlib.util import cache_from_source as cache_from_source_original
# ....................{ CLASSES }....................
#FIXME: Generalize this class to support stacking. What? Okay, so the core
#issue with the prior approach is that it only works with standard Python
#modules defined as standard files in standard directories. This assumption
#breaks down for Python modules embedded within other files (e.g., as frozen
#archives or zip files). The key insight here is given by Iguananaut in this
#StackOverflow answer:
# https://stackoverflow.com/a/48671982/2809027
#This approach "...installs a special hook in sys.path_hooks that acts almost
#as a sort of middle-ware between the PathFinder in sys.meta_path, and the
#hooks in sys.path_hooks where, rather than just using the first hook that
#says 'I can handle this path!' it tries all matching hooks in order, until it
#finds one that actually returns a useful ModuleSpec from its find_spec
#method."
#Note that "hooks" in "sys.path_hooks" are actually *FACTORY FUNCTIONS*,
#typically defined by calling the FileFinder.path_hook() class method.
#We're unclear whether we want a full "ModuleSpec," however. It seems
#preferable to merely search for a working hook in "sys.path_hooks" that
#applies to the path. Additionally, if that hook defines a get_source() method
#*AND* that method returns a non-empty string (i.e., that is neither "None"
#*NOR* the empty string), then we want to munge that string with our AST
#transformation. The advantages of this approach are multitude:
#* This approach supports pytest, unlike standard "meta_path" approaches.
#* This approach supports embedded files, unlike the first approach above. In
# particular, note that the standard
# "zipimporter.zipimporter(_bootstrap_external._LoaderBasics)" class for
# loading Python modules from arbitrary zip files does *NOT* subclass any of
# the standard superclasses you might expect it to (e.g.,
# "importlib.machinery.SourceFileLoader"). Ergo, a simple inheritance check
# fails to suffice. Thankfully, that class *DOES* define a get_source()
# method resembling that of SourceFileLoader.get_source().
#FIXME: I've confirmed by deep inspection of both the standard "importlib"
#package and the third-party "_pytest.assertion.rewrite" subpackage that the
#above should (but possible does *NOT*) suffice to properly integrate with
#pytest. Notably, the
#_pytest.assertion.rewrite.AssertionRewritingHook.find_spec() class method
#improperly overwrites the "importlib._bootstrap.ModuleSpec.loader" instance
#variable with *ITSELF* here:
#
# class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader):
# ...
#
# _find_spec = importlib.machinery.PathFinder.find_spec
#
# def find_spec(
# self,
# name: str,
# path: Optional[Sequence[Union[str, bytes]]] = None,
# target: Optional[types.ModuleType] = None,
# ) -> Optional[importlib.machinery.ModuleSpec]:
# ...
#
# # *SO FAR, SO GOOD.* The "spec.loader" instance variable now refers
# # to an instance of our custom "SourceFileLoader" subclass.
# spec = self._find_spec(name, path) # type: ignore
# ...
#
# # *EVEN BETTER.* This might save us. See below.
# if not self._should_rewrite(name, fn, state):
# return None
#
# # And... everything goes to Heck right here. Passing "loader=self"
# # completely replaces the loader that Python just helpfully
# # provided with this "AssertionRewritingHook" instance, which is
# # all manner of wrong.
# return importlib.util.spec_from_file_location(
# name,
# fn,
# loader=self, # <-- *THIS IS THE PROBLEM, BRO.*
# submodule_search_locations=spec.submodule_search_locations,
# )
#
#Ultimately, it's no surprise whatsoever that this brute-force behaviour from
#pytest conflicts with everyone else in the Python ecosystem. That said, this
#might still not be an issue. Why? Because the call to self._should_rewrite()
#*SHOULD* cause "AssertionRewritingHook" to silently reduce to a noop for
#anything that beartype would care about.
#
#If true (which it should be), the above approach *SHOULD* still actually work.
#So why does pytest conflict with other AST transformation approaches? Because
#those other approaches also leverage "sys.meta_path" machinery, typically by
#forcefully prepending their own "MetaPathFinder" instance onto "sys.meta_path",
#which silently overwrites pytest's "MetaPathFinder" instance. Since we're *NOT*
#doing that, we should be fine with our approach. *sweat beads brow*
#FIXME: Unit test us up, please.
class BeartypeSourceFileLoader(SourceFileLoader):
'''
**Beartype source file loader** implementing :mod:`importlib` machinery
loading a **sourceful Python package or module** (i.e., package or module
backed by a ``.py``-suffixed source file) into a **module spec** (i.e.,
in-memory :class:`importlib._bootstrap.ModuleSpec` instance describing the
importation of that package or module, complete with a reference back to
this originating loader).
The :func:`beartype_package` function injects a low-level **import path
hook** (i.e., factory closure instantiating this class as an item of the
standard :mod:`sys.path_hooks` list) to the front of that list. When called
by a higher-level parent **import metapath hook** (i.e., object suitable for
use as an item of the standard :mod:`sys.meta_path` list), that closure:
#. Instantiates one instance of the standard
:class:`importlib._bootstrap_external.FileFinder` class for each
**imported Python package** (i.e., package on the :mod:`sys.path` list).
The :meth:``importlib._bootstrap_external.FileFinder.find_spec` method of
that instance then returns this :class:`BeartypeSourceFileLoader` class
uninstantiated for each **imported Python package submodule** (i.e.,
submodule directly contained in that package).
#. Adds a new key-value pair to the standard :mod:`sys.path_importer_cache`
dictionary, whose:
* Key is the package of that module.
* Value is that instance of this class.
Motivation
----------
This loader was intentionally implemented so as to exclusively leverage the
lower-level :attr:`sys.path_hooks` mechanism for declaring import hooks
rather than both that *and* the higher-level :attr:`sys.meta_path`
mechanism. All prior efforts in the Python ecosystem to transform the
abstract syntax trees (ASTs) of modules at importation time via import hooks
leverage both mechanisms. This includes:
* :mod:`pytest`, which rewrites test assertion statements via import hooks
leveraging both mechanisms.
* :mod:`typeguard`, which implicitly applies the runtime type-checking
:func:`typguard.typechecked` decorator via import hooks leveraging both
mechanisms.
* :mod:`ideas`, which applies arbitrary caller-defined AST transformations
via (...wait for it) import hooks leveraging both mechanisms.
Beartype subverts this long-storied tradition by *only* leveraging the
lower-level :attr:`sys.path_hooks` mechanism. Doing so reduces the
maintenance burden, code complexity, and inter-package conflicts. The latter
point is particularly salient. The AST transformations applied by
both :mod:`typeguard` and :mod:`ideas` accidentally conflict with those
applied by :mod:`pytest`. Why? Because (in order):
#. When run as a test suite runner, :mod:`pytest` necessarily runs first and
thus prepends its import hook as the new first item of the
:attr:`sys.meta_path` list.
#. When imported during subsequent unit and/or integration testing under
that test suite runner, :mod:`typeguard` and :mod:`ideas` then install
their own import hooks as the new first item of the :attr:`sys.meta_path`
list. The import hook previously prepended by :mod:`pytest` then becomes
the second item of the :attr:`sys.meta_path` list. Python consults both
the :attr:`sys.meta_path` and :attr:`sys.path_hooks` lists in a
first-come-first-served manner. The first hook on each list satisfying a
request to find and import a module being imported terminates that
request; no subsequent hooks are consulted. Both :mod:`typeguard` and
:mod:`ideas` fail to iteratively consult subsequent hooks (e.g., with a
piggybacking scheme of some sort). Both squelch the hook previously
installed by :mod:`pytest` that rewrote assertions. That is bad.
Attributes
----------
_module_conf_if_registered : Optional[BeartypeConf]
Either:
* If the most recent call to the :meth:`get_code` method loading a
module (i.e., creating and return the code object underlying that
module) was passed the fully-qualified name of a module with a
transitive parent package previously registered by a call to a public
:mod:`beartype.claw` import hook factory (e.g.,
:func:`beartype.claw.beartype_package`), the beartype configuration
with which to type-check that module.
* Else, ``None``.
This instance variable enables our override of the parent
:meth:`get_code` method to communicate this configuration to the child
:meth:`source_to_code` method, which fails to accept and thus has *no*
access to this module name. The superclass implementation of the
:meth:`get_code` method then internally calls our override of the
:meth:`source_to_code` method, which accesses this instance variable to
decide whether and how to type-check that module.
Ordinarily, this approach would be fraught with fragility. For example,
what if something *other* than the :meth:`get_code` method calls the
:meth:`source_to_code` method? Thankfully, that is *not* a concern.
:meth:`source_to_code` is only called by :meth:`get_code` in the
:mod:`importlib` codebase. Ergo, :meth:`source_to_code` should ideally
have been privatized (e.g., as ``_source_to_code()``).
See Also
----------
* The `comparable "typeguard.importhook" submodule <typeguard import
hook_>`__ implemented by the incomparable `@agronholm (Alex Grönholm)
<agronholm_>`__, whose intrepid solutions strongly inspired this
subpackage. `Typeguard's import hook infrastructure <typeguard import
hook_>`__ is a significant improvement over the prior state of the art in
Python and a genuine marvel of concise, elegant, and portable abstract
syntax tree (AST) transformation.
.. _agronholm:
https://github.com/agronholm
.. _typeguard import hook:
https://github.com/agronholm/typeguard/blob/master/src/typeguard/importhook.py
'''
# ..................{ INITIALIZERS }..................
def __init__(self, *args, **kwargs) -> None:
'''
Initialize this beartype source file loader.
All passed parameters are passed as is to the superclass method, which
then calls our lower-level :meth:`source_to_code` subclass method
overridden below.
'''
# Initialize our superclass with all passed parameters.
super().__init__(*args, **kwargs)
# Nullify all subclass-specific instance variables for safety.
self._module_conf_if_registered: Optional[BeartypeConf] = None
# ..................{ LOADER API }..................
# The importlib._bootstrap_external.*Loader API declares the low-level
# exec_module() method, which accepts a "importlib._bootstrap.ModuleSpec"
# instance created and returned by a prior call to the higher-level
# find_spec() method documented above; the exec_module() method then uses
# that module spec to create and return a fully imported module object
# (i.e., "types.ModuleType" instance). To do so:
# * The default exec_module() implementation internally calls the
# lower-level get_code() method returning an in-memory Python code object
# deserialized from the on-disk or in-memory bytes underlying that module.
# * The default get_code() implementation internally calls the
# lower-level source_to_code() method returning an in-memory Python code
# object dynamically compiled from the passed in-memory bytes.
def get_code(self, fullname: str) -> Optional[CodeType]:
'''
Create and return the code object underlying the module with the passed
name.
This override of the superclass :meth:`SourceLoader.get_code` method
internally follows one of two distinct code paths, conditionally
depending on whether a parent package transitively containing that
module has been previously registered with the
:mod:`beartype.claw._clawregistrar` submodule (e.g., by a call to the
:func:`beartype.claw.beartype_package` function). Specifically:
* If *no* parent package transitively containing that module has been
registered, this method fully defers to the superclass
:meth:`SourceLoader.get_code` method.
* Else, one or more parent packages transitively containing that module
have been registered. In this case, this method (in order):
#. Temporarily monkey-patches (i.e., replaces) the
private :func:`importlib._bootstrap_external.cache_from_source`
function with our beartype-specific
:func:`_cache_from_source_beartype` variant.
#. Calls the superclass :meth:`SourceLoader.get_code` method, which:
#. Calls our override of the lower-level superclass
:meth:`SourceLoader.source_to_code` method.
#. Restores the
:func:`importlib._bootstrap_external.cache_from_source` function to
its original implementation.
Motivation
----------
The temporary monkey-patch applied by this method is strongly inspired
by a suspiciously similar temporary monkey-patch applied by the external
:meth:`typeguard.importhook.TypeguardLoader.exec_module` method authored
by the incomparable @agronholm (Alex Grönholm), who writes:
Use a custom optimization marker – the import lock should make
this monkey patch safe
The aforementioned "custom optimization marker" is, in fact, a
beartype-specific constant embedded in the filename of the cached Python
bytecode file to which that module is byte-compiled. This filename
typically resembles
``__pycache__/{module_basename}.{optimization_markers}.pyc``, where:
* ``{module_basename}`` is the unqualified basename of that module.
* ``{optimization_markers}`` is a ``"-"``-delimited string of
**optimization markers** (i.e., arbitrary alphanumeric labels
uniquifying this bytecode file to various bytecode-specific metadata,
including the name and version of the active Python interpreter).
This monkey-patch suffixing ``{optimization_markers}`` with
the substring ``"-beartype-{BEARTYPE_VERSION}"``, which additionally
uniquifies the filename of this bytecode file to the abstract syntax
tree (AST) transformation applied by this version of :mod:`beartype`.
Why? Because external callers can trivially enable and disable that
transformation for any module by either calling or not calling the
:func:`beartype.claw.beartype_package` function with the name of a
package transitively containing that module. Compiling a @beartyped
module to the same bytecode file as the non-@beartyped variant of that
module would erroneously persist @beartyping to that module -- even
*after* removing the relevant call to the
:func:`beartype.claw.beartype_package` function! Clearly, that's awful.
Enter @agronholm's phenomenal patch, stage left.
We implicitly trust @agronholm to get that right in a popular project
stress-tested across hundreds of open-source projects over the past
several decades. So, we avoid explicit thread-safe locking here.
Lastly, note there appears to be *no* other means of safely
implementing this behaviour *without* violating Don't Repeat Yourself
(DRY). Specifically, doing so would require duplicating most of the
entirety of the *extremely* non-trivial nearly 100 line-long
:meth:`importlib._bootstrap_external.SourceLoader.get_code` method.
Since duplicating non-trivial and fragile code inherently tied to a
specific CPython version is considerably worse than applying a trivial
one-line monkey-patch, first typeguard and now @beartype strongly prefer
this monkey-patch. Did we mention that @agronholm is amazing? Because
that really bears repeating. May the name of Alex Grönholm live eternal!
'''
# Fully-qualified name of the parent package of the module with the
# passed fully-qualified name, defined as either...
package_name = (
# If that module is a submodule of a package, then the expression
# "fullname.rpartition('.')[0]" necessarily yields the
# fully-qualified name of that package;
fullname.rpartition('.')[0] or
#FIXME: Actually, *IS* it feasible for a top-level module to be
#registered as a package? Certainly, our API permits that -- but how
#does "importlib" machinery actually import top-level modules? They
#don't have packages, but *ALL* "importlib" machinery is based on
#containing packages. This is a fascinating edge case, so let's
#investigate further if we ever grep up the time.
# Else, that module is a top-level module with *NO* parent package.
# In this case, since the above expression
# "fullname.rpartition('.')[0]" erroneously yields the empty string,
# fallback to the fully-qualified name of that module as is.
# Although unlikely, it is feasible for a top-level module to be
# registered as a package by a prior call resembling:
# beartype.claw.beartype_package(fullname)
fullname
)
# Beartype configuration with which to type-check that module if the
# parent package of that module was previously registered *OR* "None"
# otherwise (i.e., if this function preserves that module unmodified).
self._module_conf_if_registered = get_package_conf_if_registered(
package_name)
# If that module has *NOT* been registered for type-checking, preserve
# that module as is by simply deferring to the superclass method
# *WITHOUT* monkey-patching cache_from_source(). This isn't only an
# optimization, although it certainly is that as well. This is critical.
# Why? Because modules *NOT* being @beartyped should remain compiled
# under their standard non-@beartyped bytecode filenames.
if self._module_conf_if_registered is None:
return super().get_code(fullname)
# Else, that module has been registered for type-checking. In this
# case...
#
# Note that the logic below requires inefficient exception handling (as
# well as a potentially risky monkey-patch) and is thus performed *ONLY*
# when absolutely necessary.
else:
# Temporarily monkey-patch away the cache_from_source() function.
#
# Note that @agronholm (Alex Grönholm) claims that "the import lock
# should make this monkey patch safe." We're trusting you here, man!
_bootstrap_external.cache_from_source = _cache_from_source_beartype
# Attempt to defer to the superclass method.
try:
return super().get_code(fullname)
# After doing so (and regardless of whether doing so raises an
# exception), restore the original cache_from_source() function.
finally:
_bootstrap_external.cache_from_source = (
cache_from_source_original)
# Note that we explicitly ignore mypy override complaints here. For unknown
# reasons, mypy believes that "importlib.machinery.SourceFileLoader"
# subclasses comply with the "importlib.abc.InspectLoader" abstract base
# class (ABC). Naturally, that is *NOT* the case. Ergo, we entirely ignore
# mypy complaints here with respect to signature matching.
def source_to_code( # type: ignore[override]
self,
# Mandatory parameters.
data: bytes,
path: str,
# Optional keyword-only parameters.
*,
_optimize: int =-1,
) -> CodeType:
'''
Code object dynamically compiled from the **sourceful Python package or
module** (i.e., package or module backed by a ``.py``-suffixed source
file) with the passed undecoded contents and filename, efficiently
transformed in-place by our abstract syntax tree (AST) transformation
automatically applying the :func:`beartype.beartype` decorator to all
applicable objects of that package or module.
The higher-level :meth:`get_code` superclass method internally calls
this lower-level subclass method.
Parameters
----------
data : bytes
**Byte array** (i.e., undecoded list of bytes) of the Python package
or module to be decoded and dynamically compiled into a code object.
path : str
Absolute or relative filename of that Python package or module.
_optimize : int, optional
**Optimization level** (i.e., numeric integer signifying increasing
levels of optimization under which to compile that Python package or
module). Defaults to -1, implying the current interpreter-wide
optimization level with which the active Python process was
initially invoked (e.g., via the ``-o`` command-line option).
Returns
----------
CodeType
Code object dynamically compiled from that Python package or module.
'''
# If that module has *NOT* been registered for type-checking, preserve
# that module as is by simply deferring to the superclass method.
if self._module_conf_if_registered is None:
return super().source_to_code( # type: ignore[call-arg]
data=data, path=path, _optimize=_optimize) # pyright: ignore[reportGeneralTypeIssues]
# Else, that module has been registered for type-checking.
# Plaintext decoded contents of that module.
module_source = decode_source(data)
# Abstract syntax tree (AST) parsed from these contents.
module_ast = compile(
module_source,
path,
'exec',
PyCF_ONLY_AST,
# Prevent these contents from inheriting the effects of any
# "from __future__ import" statements in effect in beartype itself.
dont_inherit=True,
optimize=_optimize,
)
# Abstract syntax tree (AST) modified by our AST transformation
# decorating all typed callables and classes by @beartype.
module_ast_beartyped = BeartypeNodeTransformer().visit(module_ast)
#FIXME: *THIS IS BAD, BRO.* For one thing, this is slow. Recursion is
#slow. It's also dangerous. We shouldn't do it more than we have to. Now
#we're recursing over the entire AST tree twice: once in our AST
#transformation above (which is unavoidable) and again in the call to
#fix_missing_locations() here (which probably is avoidable). Instead, we
#should fold the logic of fix_missing_locations() directly into our AST
#transformation. Specifically, our AST transformation should:
#* For each newly generated AST node, non-recursively propagate the line
# and column numbers of that node's parent node onto that node.
#FIXME: Fascinating. It would seem that we need to propagate these
#*FOUR* attributes: "lineno", "end_lineno", "col_offset", and
#"end_col_offset". Note that the "end_"-prefixed attributes may only be
#available under newer Python versions. Say, Python ≥ 3.9 or 3.10?
#Further research is required, clearly. Fortunately, the implementation
#of the ast.fix_missing_locations() function is trivial. It shouldn't be
#terribly arduous to embed equivalent functionality in our AST
#transformation, assuming we think we know what we're doing. (We don't.)
# Recursively propagate the line and column numbers of all parent nodes
# in this tree to their children that failed to define these numbers
# *BEFORE* calling the compile() builtin, which requires these numbers
# to be defined on all AST nodes supporting these numbers. All AST nodes
# supporting these numbers define these numbers *EXCEPT* those AST nodes
# programmatically generated by our AST transformation, which currently
# leaves these numbers undefined due to programmer laziness.
#fix_missing_locations(module_ast_beartyped)
# Code object compiled from this transformed AST.
module_codeobj = compile(
module_ast_beartyped,
path,
'exec',
# Prevent these contents from inheriting the effects of any
# "from __future__ import" statements in effect in beartype itself.
dont_inherit=True,
optimize=_optimize,
)
# Return this code object.
return module_codeobj
# ....................{ PRIVATE ~ cachers }....................
#FIXME: Unit test us up, please.
def _cache_from_source_beartype(*args, **kwargs) -> str:
'''
Beartype-specific variant of the
:func:`importlib._bootstrap_external.cache_from_source` function applying a
beartype-specific optimization marker to that function.
This, in turn, ensures that submodules residing in packages registered by a
prior call to the :func:`beartype_submodules_on_import` function are
compiled to files with the filetype ``".pyc{optimization}-beartype"``,
where ``{optimization}`` is the original ``optimization`` parameter passed
to this function call.
'''
# Original optimization parameter passed to this function call if any *OR*
# the empty string otherwise.
optimization_marker_old = kwargs.get('optimization', '')
# New optimization parameter applied by this monkey-patch of that function,
# uniquifying that parameter with a beartype-specific suffix.
kwargs['optimization'] = f'{optimization_marker_old}-beartype-{VERSION}'
# Defer to the implementation of the original cache_from_source() function.
return cache_from_source_original(*args, **kwargs)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype all-at-once high-level** :mod:`importlib` **machinery.**
This private submodule is the main entry point for this subpackage, defining the
public-facing :func:`beartype_submodules_on_import` function registering new
beartype import path hooks. Notably, this submodule integrates high-level
:mod:`importlib` machinery required to implement :pep:`302`- and
:pep:`451`-compliant import hooks with the abstract syntax tree (AST)
transformation defined by the low-level :mod:`beartype.claw._clawast` submodule.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ TODO }....................
#FIXME: Unit test us up. Specifically, test that this approach successfully:
#* Directly decorates callables declared at:
# * Global scope in an on-disk top-level non-package module embedded in our
# test suite.
# * Class scope in the same module.
# * Closure scope in the same module.
#* Recursively decorates all callables declared by submodules of an on-disk
# top-level package.
#* Does *NOT* conflict with pytest's assertion rewriting mechanism. This will
# be non-trivial. Can we isolate another pytest process within the main
# currently running pytest process? O_o
# ....................{ IMPORTS }....................
from beartype.claw._clawloader import BeartypeSourceFileLoader
from beartype.claw._clawregistrar import (
is_packages_registered_any,
register_packages,
register_packages_all,
)
from beartype.roar import BeartypeClawRegistrationException
from beartype.typing import (
Iterable,
Optional,
Union,
)
from beartype._conf.confcls import (
BEARTYPE_CONF_DEFAULT,
BeartypeConf,
)
from beartype._util.func.utilfunccodeobj import (
FUNC_CODEOBJ_NAME_MODULE,
get_func_codeobj,
)
from beartype._util.func.utilfuncframe import get_frame
from importlib import invalidate_caches
from importlib.machinery import (
SOURCE_SUFFIXES,
FileFinder,
)
from sys import (
path_hooks,
path_importer_cache,
)
from threading import RLock
from types import (
FrameType,
)
# ....................{ HOOKS }....................
#FIXME: Unit test us up, please.
def beartype_all(
# Optional keyword-only parameters.
*,
conf: BeartypeConf = BEARTYPE_CONF_DEFAULT,
) -> None:
'''
Register a new **universal beartype import path hook** (i.e., callable
inserted to the front of the standard :mod:`sys.path_hooks` list recursively
decorating *all* typed callables and classes of *all* submodules of *all*
packages on the first importation of those submodules with the
:func:`beartype.beartype` decorator, wrapping those callables and classes
with performant runtime type-checking).
This function is the runtime equivalent of a full-blown static type checker
like ``mypy`` or ``pyright``, enabling full-stack runtime type-checking of
*all* typed callables and classes across *all* submodules imported from this
end-user application -- including those defined by both:
* First-party proprietary packages directly authored for this application.
* Third-party open-source packages authored and maintained elsewhere.
Usage
----------
This function is intended to be called (usually without passed parameters)
from module scope as the first statement of the top-level ``__init__``
submodule of the top-level package of an end-user application to be fully
type-checked by :func:`beartype.beartype`. This function then registers an
import path hook type-checking all typed callables and classes of all
submodules of all packages on the first importation of those submodules:
e.g.,
.. code-block:: python
# In "muh_package.__init__":
from beartype.claw import beartype_package
beartype_package() # <-- beartype all subsequent imports, yo
# Import submodules *AFTER* calling beartype_package().
from muh_package._some_module import muh_function # <-- @beartyped!
from yer_package.other_module import muh_class # <-- @beartyped!
Caveats
----------
**This function is not intended to be called from intermediary APIs,
libraries, frameworks, or other middleware.** This function is *only*
intended to be called from full stack end-user applications as a convenient
alternative to manually passing the names of all packages to be type-checked
to the more granular :func:`beartype_package` function. This function
imposes runtime type-checking on downstream reverse dependencies that may
not necessarily want, expect, or tolerate runtime type-checking. This
function should typically *only* be called by proprietary packages not
expected to be reused by others. Open-source packages are advised to call
the more granular :func:`beartype_package` function instead.
**tl;dr:** *Only call this function in proprietary non-reusable packages.*
Parameters
----------
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object). Defaults
to ``BeartypeConf()``, the default ``O(1)`` constant-time configuration.
Raises
----------
BeartypeClawRegistrationException
If either:
* The passed ``conf`` parameter is *not* a beartype configuration (i.e.,
:class:`BeartypeConf` instance).
'''
# ..................{ PATH HOOK }..................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize with the beartype_all() function.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# With a submodule-specific thread-safe reentrant lock...
with _claw_lock:
# True only if the beartype import path hook subsequently added below
# has already been added by a prior call to this function under the
# active Python interpreter.
#
# Technically, this condition is also decidable by an iterative search
# over the "sys.path_hooks" list for an item that is an instance of our
# private "_BeartypeSourceFileLoader" subclass. However, doing so would
# impose O(n) time complexity for "n" the size of that list,
#
# Pragmatically, this condition is trivially decidable by noting that:
# * This public function performs the *ONLY* call to the private
# register_packages() function in this codebase.
# * The first call of this function under the active Python interpreter:
# * Also performs the first call of the register_packages() function.
# * Also adds our beartype import path hook.
#
# Ergo, deciding this state in O(1) time complexity reduces to deciding
# whether the register_packages() function has been called already.
is_path_hook_added = is_packages_registered_any()
# Register *ALL* packages for subsequent lookup during submodule
# importation by the beartype import path hook registered below *AFTER*
# deciding whether this function has been called already.
#
# Note this helper function fully validates these parameters. Ergo, we
# intentionally avoid doing so here in this higher-level function.
register_packages_all(conf=conf)
# True only if the beartype import path hook subsequently added below
# has already been added by a prior call to this function under the
# active Python interpreter.
if not is_path_hook_added:
_add_path_hook()
#FIXME: Unit test us up, please.
def beartype_package(
# Optional parameters.
package_names: Optional[Union[str, Iterable[str]]] = None,
# Optional keyword-only parameters.
*,
conf: BeartypeConf = BEARTYPE_CONF_DEFAULT,
) -> None:
'''
Register a new **package-specific beartype import path hook** (i.e.,
callable inserted to the front of the standard :mod:`sys.path_hooks` list
recursively applying the :func:`beartype.beartype` decorator to all typed
callables and classes of all submodules of all packages with the passed
names on the first importation of those submodules).
Usage
----------
This function is intended to be called (usually without passed parameters)
from module scope as the first statement of the top-level ``__init__``
submodule of any package to be type-checked by :func:`beartype.beartype`.
This function then registers an import path hook type-checking all
typed callables and classes of all submodules of that package on the first
importation of those submodules: e.g.,
.. code-block:: python
# In "muh_package.__init__":
from beartype.claw import beartype_package
beartype_package() # <-- beartype all subsequent package imports, yo
# Import package submodules *AFTER* calling beartype_package().
from muh_package._some_module import muh_function # <-- @beartyped!
from muh_package.other_module import muh_class # <-- @beartyped!
Parameters
----------
package_names : Optional[Union[str, Iterable[str]]]
Either:
* Fully-qualified name of the package to be type-checked.
* Iterable of the fully-qualified names of one or more packages to be
type-checked.
Defaults to ``None``, in which case this parameter defaults to the
fully-qualified name of the **calling package** (i.e., external parent
package of the submodule directly calling this function).
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object). Defaults
to ``BeartypeConf()``, the default ``O(1)`` constant-time configuration.
Raises
----------
BeartypeClawRegistrationException
If either:
* The passed ``package_names`` parameter is either:
* Neither a string nor an iterable (i.e., fails to satisfy the
:class:`collections.abc.Iterable` protocol).
* An empty string or iterable.
* A non-empty string that is *not* a valid **package name** (i.e.,
``"."``-delimited concatenation of valid Python identifiers).
* A non-empty iterable containing at least one item that is either:
* *Not* a string.
* The empty string.
* A non-empty string that is *not* a valid **package name** (i.e.,
``"."``-delimited concatenation of valid Python identifiers).
* The passed ``conf`` parameter is *not* a beartype configuration (i.e.,
:class:`BeartypeConf` instance).
See Also
----------
https://stackoverflow.com/a/43573798/2809027
StackOverflow answer strongly inspiring the low-level implementation of
this function with respect to inscrutable :mod:`importlib` machinery.
'''
# ..................{ PACKAGE NAMES }..................
# Note the following logic *CANNOT* reasonably be isolated to a new
# private helper function. Why? Because this logic itself calls existing
# private helper functions assuming the caller to be at the expected
# position on the current call stack.
if package_names is None:
#FIXME: *UNSAFE.* get_frame() raises a "ValueError" exception if
#passed a non-existent frame, which is non-ideal: e.g.,
# >>> sys._getframe(20)
# ValueError: call stack is not deep enough
#Since beartype_on_import() is public, that function can
#technically be called directly from a REPL. When it is, a
#human-readable exception should be raised instead. Notably, we
#instead want to:
#* Define new utilfuncframe getters resembling:
# def get_frame_or_none(ignore_frames: int) -> Optional[FrameType]:
# try:
# return get_frame(ignore_frames + 1)
# except ValueError:
# return None
# def get_frame_caller_or_none() -> Optional[FrameType]:
# return get_frame_or_none(2)
#* Import "get_frame_caller_or_none" above.
#* Refactor this logic here to resemble:
# frame_caller = get_frame_caller_or_none()
# if frame_caller is None:
# raise BeartypeClawRegistrationException(
# 'beartype_submodules_on_import() '
# 'not callable directly from REPL scope.'
# )
frame_caller: FrameType = get_frame(1) # type: ignore[assignment,misc]
# Code object underlying the caller if that caller is pure-Python *OR*
# raise an exception otherwise (i.e., if that caller is C-based).
frame_caller_codeobj = get_func_codeobj(frame_caller)
# Unqualified basename of that caller.
frame_caller_basename = frame_caller_codeobj.co_name
# Fully-qualified name of the module defining that caller.
frame_caller_module_name = frame_caller.f_globals['__name__']
#FIXME: Relax this constraint, please. Just iteratively search up the
#call stack with iter_frames() until stumbling into a frame satisfying
#this condition.
# If that name is *NOT* the placeholder string assigned by the active
# Python interpreter to all scopes encapsulating the top-most lexical
# scope of a module in the current call stack, the caller is a class or
# callable rather than a module. In this case, raise an exception.
if frame_caller_basename != FUNC_CODEOBJ_NAME_MODULE:
raise BeartypeClawRegistrationException(
f'beartype_submodules_on_import() '
f'neither passed "package_names" nor called from module scope '
f'(i.e., caller scope '
f'"{frame_caller_module_name}.{frame_caller_basename}" '
f'either class or callable). '
f'Please either pass "package_names" or '
f'call this function from module scope.'
)
# If the fully-qualified name of the module defining that caller
# contains *NO* delimiters, that module is a top-level module defined by
# *NO* parent package. In this case, raise an exception. Why? Because
# this function uselessly and silently reduces to a noop when called by
# a top-level module. Why? Because this function registers an import
# hook applicable only to subsequently imported submodules of the passed
# packages. By definition, a top-level module is *NOT* a package and
# thus has *NO* submodules. To prevent confusion, notify the user here.
#
# Note this is constraint is also implicitly imposed by the subsequent
# call to the frame_caller_module_name.rpartition() method: e.g.,
# >>> frame_caller_module_name = 'muh_module'
# >>> frame_caller_module_name.rpartition()
# ('', '', 'muh_module') # <-- we're now in trouble, folks
if '.' not in frame_caller_module_name:
raise BeartypeClawRegistrationException(
f'beartype_submodules_on_import() '
f'neither passed "package_names" nor called by a submodule '
f'(i.e., caller module "{frame_caller_module_name}" '
f'defined by no parent package).'
)
# Else, that module is a submodule of some parent package.
# Fully-qualified name of the parent package defining that submodule,
# parsed from the name of that submodule via this standard idiom:
# >>> frame_caller_module_name = 'muh_package.muh_module'
# >>> frame_caller_module_name.rpartition()
# ('muh_package', '.', 'muh_module')
frame_caller_package_name = frame_caller_module_name.rpartition()[0]
# Default this iterable to the 1-tuple referencing only this package.
package_names = (frame_caller_package_name,)
# ..................{ PATH HOOK }..................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize with the beartype_all() function.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# With a submodule-specific thread-safe reentrant lock...
with _claw_lock:
# True only if the beartype import path hook subsequently added below
# has already been added by a prior call to this function under the
# active Python interpreter.
#
# Technically, this condition is also decidable by an iterative search
# over the "sys.path_hooks" list for an item that is an instance of our
# private "_BeartypeSourceFileLoader" subclass. However, doing so would
# impose O(n) time complexity for "n" the size of that list,
#
# Pragmatically, this condition is trivially decidable by noting that:
# * This public function performs the *ONLY* call to the private
# register_packages() function in this codebase.
# * The first call of this function under the active Python interpreter:
# * Also performs the first call of the register_packages() function.
# * Also adds our beartype import path hook.
#
# Ergo, deciding this state in O(1) time complexity reduces to deciding
# whether the register_packages() function has been called already.
is_path_hook_added = is_packages_registered_any()
# Register these packages for subsequent lookup during submodule
# importation by the beartype import path hook registered below *AFTER*
# deciding whether this function has been called already.
#
# Note this helper function fully validates these parameters. Ergo, we
# intentionally avoid doing so here in this higher-level function.
register_packages(package_names=package_names, conf=conf)
# True only if the beartype import path hook subsequently added below
# has already been added by a prior call to this function under the
# active Python interpreter.
if not is_path_hook_added:
_add_path_hook()
def _add_path_hook() -> None:
'''
Add a new **beartype import path hook** (i.e., callable inserted to the
front of the standard :mod:`sys.path_hooks` list recursively applying the
:func:`beartype.beartype` decorator to all well-typed callables and classes
defined by all submodules of all packages with the passed names on the first
importation of those submodules).
See Also
----------
https://stackoverflow.com/a/43573798/2809027
StackOverflow answer strongly inspiring the low-level implementation of
this function with respect to inscrutable :mod:`importlib` machinery.
'''
# 2-tuple of the undocumented format expected by the FileFinder.path_hook()
# class method called below, associating our beartype-specific source file
# loader with the platform-specific filetypes of all sourceful Python
# packages and modules. We didn't do it. Don't blame the bear.
LOADER_DETAILS = (BeartypeSourceFileLoader, SOURCE_SUFFIXES)
# Closure instantiating a new "FileFinder" instance invoking this loader.
#
# Note that we intentionally ignore mypy complaints here. Why? Because mypy
# erroneously believes this method accepts 2-tuples whose first items are
# loader *INSTANCES* (e.g., "Tuple[Loader, List[str]]"). In fact, this
# method accepts 2-tuples whose first items are loader *TYPES* (e.g.,
# "Tuple[Type[Loader], List[str]]"). This is why we can't have nice.
loader_factory = FileFinder.path_hook(LOADER_DETAILS) # type: ignore[arg-type]
# Prepend a new import hook (i.e., factory closure encapsulating this
# loader) *BEFORE* all other import hooks.
path_hooks.insert(0, loader_factory)
# Uncache *ALL* competing loaders cached by prior importations. Just do it!
path_importer_cache.clear()
invalidate_caches()
# ....................{ PRIVATE ~ globals : threading }....................
_claw_lock = RLock()
'''
Reentrant reusable thread-safe context manager gating access to otherwise
non-thread-safe private globals defined by both this high-level submodule and
subsidiary lower-level submodules (particularly, the
:attr:`beartype.claw._clawregistrar._package_basename_to_subpackages` cache).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **container singletons** (i.e., instances of data structures
commonly required throughout this codebase, reducing space and time consumption
by preallocating widely used data structures).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.typing import (
Any,
Dict,
)
# ....................{ DICTS }....................
# Note that this exact type annotation is required to avoid mypy complaints. :O
DICT_EMPTY: Dict[Any, Any] = {}
'''
**Empty dictionary singleton.**
Whereas Python guarantees the **empty tuple** (i.e., ``()``) to be a singleton,
Python does *not* extend that guarantee to dictionaries. This empty dictionary
singleton amends that oversight, providing efficient reuse of empty
dictionaries: e.g.,
.. code-block::
>>> () is ()
True # <-- good. this is good.
>>> {} is {}
False # <-- bad. this is bad.
>>> from beartype._data.datakind import DICT_EMPTY
>>> DICT_EMPTY is DICT_EMPTY
True # <-- good. this is good, because we made it so.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **PEP-compliant type hints** (i.e., hints annotating callables
declared throughout this codebase, either for compliance with :pep:`561` or
simply for documentation purposes).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.typing import (
Any,
Callable,
Dict,
Iterable,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
from beartype._cave._cavefast import (
MethodDecoratorClassType,
MethodDecoratorPropertyType,
MethodDecoratorStaticType,
)
from types import (
CodeType,
FrameType,
GeneratorType,
)
# ....................{ HINTS ~ callable ~ early }....................
# Callable-specific type hints required by subsequent type hints below.
CallableAny = Callable[..., Any]
'''
PEP-compliant type hint matching any callable in a manner explicitly matching
all possible callable signatures.
'''
# ....................{ HINTS ~ typevar }....................
# Type variables required by subsequent type hints below.
BeartypeableT = TypeVar(
'BeartypeableT',
# The @beartype decorator decorates objects that are either...
bound=Union[
# An arbitrary class *OR*...
type,
# An arbitrary callable *OR*...
CallableAny,
# A C-based unbound class method descriptor (i.e., a pure-Python unbound
# function decorated by the builtin @classmethod decorator) *OR*...
MethodDecoratorClassType,
# A C-based unbound property method descriptor (i.e., a pure-Python
# unbound function decorated by the builtin @property decorator) *OR*...
MethodDecoratorPropertyType,
# A C-based unbound static method descriptor (i.e., a pure-Python
# unbound function decorated by the builtin @staticmethod decorator).
MethodDecoratorStaticType,
],
)
'''
:pep:`484`-compliant **generic beartypeable type variable** (i.e., type hint
matching any arbitrary callable or class).
This type variable notifies static analysis performed by both static type
checkers (e.g., :mod:`mypy`) and type-aware IDEs (e.g., VSCode) that the
:mod:`beartype` decorator preserves:
* Callable signatures by creating and returning callables with the same
signatures as passed callables.
* Class hierarchies by preserving passed classes with respect to inheritance,
including metaclasses and method-resolution orders (MRO) of those classes.
'''
# ....................{ HINTS ~ callable ~ args }....................
CallableMethodGetitemArg = Union[int, slice]
'''
PEP-compliant type hint matching the standard type of the single positional
argument accepted by the ``__getitem__` dunder method.
'''
# ....................{ HINTS ~ callable ~ late }....................
# Callable-specific type hints *NOT* required by subsequent type hints below.
CallableTester = Callable[[object], bool]
'''
PEP-compliant type hint matching a **tester callable** (i.e., arbitrary callable
accepting a single arbitrary object and returning either ``True`` if that object
satisfies an arbitrary constraint *or* ``False`` otherwise).
'''
Codeobjable = Union[Callable, CodeType, FrameType, GeneratorType]
'''
PEP-compliant type hint matching a **codeobjable** (i.e., pure-Python object
directly associated with a code object and thus safely passable as the first
parameter to the :func:`beartype._util.func.utilfunccodeobj.get_func_codeobj`
getter retrieving the code object associated with this codeobjable).
Specifically, this hint matches:
* Code objects.
* Pure-Python callables, including generators (but *not* C-based callables,
which lack code objects).
* Pure-Python callable stack frames.
'''
# ....................{ HINTS ~ callable ~ late : decor }....................
BeartypeConfedDecorator = Callable[[BeartypeableT], BeartypeableT]
'''
PEP-compliant type hint matching a **configured beartype decorator** (i.e.,
closure created and returned from the :func:`beartype.beartype` decorator when
passed a beartype configuration via the optional ``conf`` parameter rather than
an arbitrary object to be decorated via the optional ``obj`` parameter).
'''
BeartypeReturn = Union[BeartypeableT, BeartypeConfedDecorator]
'''
PEP-compliant type hint matching any possible value returned by any invocation
of the :func:`beartype.beartype` decorator, including calls to that decorator
in both configuration and decoration modes.
'''
# ....................{ HINTS ~ code }....................
LexicalScope = Dict[str, Any]
'''
PEP-compliant type hint matching a **lexical scope** (i.e., dictionary mapping
from the name to value of each locally or globally scoped variable accessible
to a callable or class).
'''
CodeGenerated = Tuple[str, LexicalScope, Tuple[str, ...]]
'''
PEP-compliant type hint matching **generated code** (i.e., a tuple containing
a Python code snippet dynamically generated on-the-fly by a
:mod:`beartype`-specific code generator and metadata describing that code).
Specifically, this hint matches a 3-tuple ``(func_wrapper_code,
func_wrapper_scope, hint_forwardrefs_class_basename)``, where:
* ``func_wrapper_code`` is a Python code snippet type-checking an arbitrary
object against this hint. For the common case of code generated for a
:func:`beartype.beartype`-decorated callable, this snippet type-checks a
previously localized parameter or return value against this hint.
* ``func_wrapper_scope`` is the **local scope** (i.e., dictionary mapping from
the name to value of each attribute referenced one or more times in this code)
of the body of the function embedding this code.
* ``hint_forwardrefs_class_basename`` is a tuple of the unqualified classnames
of :pep:`484`-compliant relative forward references visitable from this hint
(e.g., ``('MuhClass', 'YoClass')`` given the hint ``Union['MuhClass',
List['YoClass']]``).
'''
# ....................{ HINTS ~ iterable }....................
IterableStrs = Iterable[str]
'''
PEP-compliant type hint matching *any* iterable of zero or more strings.
'''
# ....................{ HINTS ~ pep : 484 }....................
# Type hints required to fully comply with PEP 484.
Pep484TowerComplex = Union[complex, float, int]
'''
:pep:`484`-compliant type hint matching the **implicit complex tower** (i.e.,
complex numbers, floating-point numbers, and integers).
'''
Pep484TowerFloat = Union[float, int]
'''
:pep:`484`-compliant type hint matching the **implicit floating-point tower**
(i.e., both floating-point numbers and integers).
'''
# ....................{ HINTS ~ type }....................
TypeException = Type[Exception]
'''
PEP-compliant type hint matching *any* exception class.
'''
TypeWarning = Type[Warning]
'''
PEP-compliant type hint matching *any* warning category.
'''
# ....................{ HINTS ~ type : tuple }....................
TupleTypes = Tuple[type, ...]
'''
PEP-compliant type hint matching a tuple of zero or more classes.
Equivalently, this hint matches all tuples passable as the second parameters to
the :func:`isinstance` and :func:`issubclass` builtins.
'''
TypeOrTupleTypes = Union[type, TupleTypes]
'''
PEP-compliant type hint matching either a single class *or* a tuple of zero or
more classes.
Equivalently, this hint matches all objects passable as the second parameters
to the :func:`isinstance` and :func:`issubclass` builtins.
'''
# ....................{ HINTS ~ type : tuple : stack }....................
TypeStack = Optional[Tuple[type, ...]]
'''
PEP-compliant type hint matching a **type stack** (i.e., either tuple of zero or
more arbitrary types *or* ``None``).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **bare PEP-compliant type hint representations** (i.e., global
constants pertaining to machine-readable strings returned by the :func:`repr`
builtin suffixed by *no* "["- and "]"-delimited subscription representations).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.typing import (
Dict,
FrozenSet,
Set,
)
from beartype._data.hint.pep.sign import datapepsigns
from beartype._data.hint.pep.sign.datapepsigncls import HintSign
from beartype._data.hint.pep.sign.datapepsigns import (
HintSignAbstractSet,
# HintSignAnnotated,
# HintSignAny,
HintSignAsyncContextManager,
HintSignDataclassInitVar,
HintSignAsyncIterable,
HintSignAsyncIterator,
HintSignAsyncGenerator,
HintSignAwaitable,
HintSignByteString,
HintSignCallable,
HintSignChainMap,
# HintSignClassVar,
HintSignCollection,
# HintSignConcatenate,
HintSignContainer,
HintSignCoroutine,
HintSignContextManager,
HintSignCounter,
HintSignDefaultDict,
HintSignDeque,
HintSignDict,
# HintSignFinal,
HintSignForwardRef,
HintSignFrozenSet,
HintSignGenerator,
# HintSignGeneric,
# HintSignHashable,
HintSignItemsView,
HintSignIterable,
HintSignIterator,
HintSignKeysView,
HintSignList,
# HintSignLiteral,
HintSignMapping,
HintSignMappingView,
HintSignMatch,
HintSignMutableMapping,
HintSignMutableSequence,
HintSignMutableSet,
# HintSignNamedTuple,
HintSignNewType,
HintSignNone,
HintSignNumpyArray,
# HintSignOptional,
HintSignOrderedDict,
HintSignParamSpec,
# HintSignParamSpecArgs,
# HintSignProtocol,
HintSignReversible,
HintSignSequence,
HintSignSet,
# HintSignSized,
HintSignPattern,
HintSignTuple,
HintSignType,
HintSignTypeVar,
# HintSignTypedDict,
HintSignUnion,
HintSignValuesView,
)
from beartype._util.py.utilpyversion import (
IS_PYTHON_AT_LEAST_3_9,
IS_PYTHON_AT_MOST_3_8,
)
# ....................{ MAPPINGS ~ repr }....................
# The majority of this dictionary is initialized with automated inspection below
# in the _init() function. The *ONLY* key-value pairs explicitly defined here
# are those *NOT* amenable to such inspection.
HINT_REPR_PREFIX_ARGS_0_OR_MORE_TO_SIGN: Dict[str, HintSign] = {
# ..................{ PEP 484 }..................
# All other PEP 484-compliant representation prefixes are defined by
# automated inspection below.
# PEP 484-compliant "None" singleton, which transparently reduces to
# "types.NoneType". While not explicitly defined by the "typing" module,
# PEP 484 explicitly supports this singleton:
# When used in a type hint, the expression None is considered equivalent
# to type(None).
#
# Note that the representation of the type of the "None" singleton (i.e.,
# "<class 'NoneType'>") is intentionally omitted here despite the "None"
# singleton reducing to that type. Indeed, the *ONLY* reason we detect this
# singleton at all is to enable that reduction. Although this singleton
# conveys a PEP-compliant semantic, the type of this singleton explicitly
# conveys *NO* PEP-compliant semantics. That type is simply a standard
# isinstanceable type (like any other). Indeed, attempting to erroneously
# associate the type of the "None" singleton with the same sign here would
# cause that type to be detected as conveying sign-specific PEP-compliant
# semantics rather than *NO* such semantics, which would then substantially
# break and complicate dynamic code generation for no benefit whatsoever.
'None': HintSignNone,
}
'''
Dictionary mapping from the **possibly unsubscripted PEP-compliant type hint
representation prefix** (i.e., unsubscripted prefix of the machine-readable
strings returned by the :func:`repr` builtin for PEP-compliant type hints
permissible in both subscripted and unsubscripted forms) of each hint uniquely
identifiable by that representations to its identifying sign.
Notably, this dictionary maps from the representation prefixes of:
* *All* :pep:`484`-compliant type hints. Whereas *all* :pep:`585`-compliant
type hints (e.g., ``list[str]``) are necessarily subscripted and thus omitted
from this dictionary, *all* :pep:`484`-compliant type hints support at least
unsubscripted form and most :pep:`484`-compliant type hints support
subscription as well. Moreover, the unsubscripted forms of most
:pep:`484`-compliant type hints convey deep semantics and thus require
detection as PEP-compliant (e.g., ``typing.List``, requiring detection and
reduction to ``list``).
'''
# The majority of this dictionary is defined by explicit key-value pairs here.
HINT_REPR_PREFIX_ARGS_1_OR_MORE_TO_SIGN: Dict[str, HintSign] = {
# ..................{ PEP 585 }..................
# PEP 585-compliant type hints *MUST* by definition be subscripted (e.g.,
# "list[str]" rather than "list"). While the stdlib types underlying those
# hints are isinstanceable classes and thus also permissible as type hints
# when unsubscripted (e.g., simply "list"), unsubscripted classes convey no
# deep semantics and thus need *NOT* be detected as PEP-compliant.
#
# For maintainability, these key-value pairs are intentionally listed in the
# same order as the official list in PEP 585 itself.
'tuple': HintSignTuple,
'list': HintSignList,
'dict': HintSignDict,
'set': HintSignSet,
'frozenset': HintSignFrozenSet,
'type': HintSignType,
'collections.deque': HintSignDeque,
'collections.defaultdict': HintSignDefaultDict,
'collections.OrderedDict': HintSignOrderedDict,
'collections.Counter': HintSignCounter,
'collections.ChainMap': HintSignChainMap,
'collections.abc.Awaitable': HintSignAwaitable,
'collections.abc.Coroutine': HintSignCoroutine,
'collections.abc.AsyncIterable': HintSignAsyncIterable,
'collections.abc.AsyncIterator': HintSignAsyncIterator,
'collections.abc.AsyncGenerator': HintSignAsyncGenerator,
'collections.abc.Iterable': HintSignIterable,
'collections.abc.Iterator': HintSignIterator,
'collections.abc.Generator': HintSignGenerator,
'collections.abc.Reversible': HintSignReversible,
'collections.abc.Container': HintSignContainer,
'collections.abc.Collection': HintSignCollection,
'collections.abc.Callable': HintSignCallable,
'collections.abc.Set': HintSignAbstractSet,
'collections.abc.MutableSet': HintSignMutableSet,
'collections.abc.Mapping': HintSignMapping,
'collections.abc.MutableMapping': HintSignMutableMapping,
'collections.abc.Sequence': HintSignSequence,
'collections.abc.MutableSequence': HintSignMutableSequence,
'collections.abc.ByteString': HintSignByteString,
'collections.abc.MappingView': HintSignMappingView,
'collections.abc.KeysView': HintSignKeysView,
'collections.abc.ItemsView': HintSignItemsView,
'collections.abc.ValuesView': HintSignValuesView,
'contextlib.AbstractContextManager': HintSignContextManager,
'contextlib.AbstractAsyncContextManager': HintSignAsyncContextManager,
're.Pattern': HintSignPattern,
're.Match': HintSignMatch,
# ..................{ NON-PEP ~ lib : numpy }..................
# The PEP-noncompliant "numpy.typing.NDArray" type hint is permissible in
# both subscripted and unsubscripted forms. In the latter case, this hint
# is implicitly subscripted by generic type variables. In both cases, this
# hint presents a uniformly reliable representation -- dramatically
# simplifying detection via a common prefix of that representation here:
# >>> import numpy as np
# >>> import numpy.typing as npt
# >>> repr(npt.NDArray)
# numpy.ndarray[typing.Any, numpy.dtype[+ScalarType]]
# >>> repr(npt.NDArray[np.float64])
# repr: numpy.ndarray[typing.Any, numpy.dtype[numpy.float64]]
#
# Ergo, unsubscripted "numpy.typing.NDArray" type hints present themselves
# as implicitly subscripted through their representation.
'numpy.ndarray': HintSignNumpyArray,
}
'''
Dictionary mapping from the **necessarily subscripted PEP-compliant type hint
representation prefixes** (i.e., unsubscripted prefix of the machine-readable
strings returned by the :func:`repr` builtin for subscripted PEP-compliant type
hints) of all hints uniquely identifiable by those representations to
their identifying signs.
Notably, this dictionary maps from the representation prefixes of:
* All :pep:`585`-compliant type hints. Whereas all :pep:`484`-compliant type
hints support both subscripted and unsubscripted forms (e.g.,
``typing.List``, ``typing.List[str]``), all :pep:`585`-compliant type hints
necessarily require subscription. While the stdlib types underlying
:pep:`585`-compliant type hints are isinstanceable classes and thus also
permissible as type hints when unsubscripted (e.g., simply :class:`list`),
isinstanceable classes convey *no* deep semantics and thus need *not* be
detected as PEP-compliant.
'''
# ....................{ MAPPINGS ~ type }....................
# The majority of this dictionary is initialized with automated inspection
# below in the _init() function. The *ONLY* key-value pairs explicitly defined
# here are those *NOT* amenable to such inspection.
HINT_TYPE_NAME_TO_SIGN: Dict[str, HintSign] = {
# ..................{ PEP 484 }..................
# PEP 484-compliant forward reference type hints may be annotated either:
# * Explicitly as "typing.ForwardRef" instances, which automated inspection
# performed by the _init() function below already handles.
# * Implicitly as strings, which this key-value pair here detects. Note
# this unconditionally matches *ALL* strings, including both:
# * Invalid Python identifiers (e.g., "0d@yw@r3z").
# * Absolute forward references (i.e., fully-qualified classnames)
# technically non-compliant with PEP 484 but seemingly compliant with
# PEP 585.
# Since the distinction between PEP-compliant and -noncompliant forward
# references is murky at best and since unconditionally matching *ALL*
# string as PEP-compliant substantially simplifies logic throughout the
# codebase, we (currently) opt to do so.
'builtins.str': HintSignForwardRef,
# Python >= 3.10 implements PEP 484-compliant "typing.NewType" type hints as
# instances of that class. Regardless of the current Python version,
# "typing_extensions.NewType" type hints remain implemented in manner of
# Python < 3.10 -- which is to say, as closures of that function. Ergo, we
# intentionally omit "typing_extensions.NewType" here. See also:
# https://github.com/python/typing/blob/master/typing_extensions/src_py3/typing_extensions.py
'typing.NewType': HintSignNewType,
# ..................{ PEP 557 }..................
# Python >= 3.8 implements PEP 557-compliant "dataclasses.InitVar" type
# hints as instances of that class.
'dataclasses.InitVar': HintSignDataclassInitVar,
# ..................{ PEP 604 }..................
# PEP 604-compliant |-style unions (e.g., "int | float") are internally
# implemented as instances of the low-level C-based "types.UnionType" type.
# Thankfully, these unions are semantically interchangeable with comparable
# PEP 484-compliant unions (e.g., "typing.Union[int, float]"); both kinds
# expose equivalent dunder attributes (e.g., "__args__", "__parameters__"),
# enabling subsequent code generation to conflate the two without issue.
'types.UnionType': HintSignUnion,
# ..................{ PEP 612 }..................
# Python >= 3.10 implements PEP 612-compliant "typing.ParamSpec" type hints
# as instances of that class.
'typing.ParamSpec': HintSignParamSpec,
}
'''
Dictionary mapping from the fully-qualified classnames of all PEP-compliant
type hints uniquely identifiable by those classnames to their identifying
signs.
'''
# ....................{ SETS ~ deprecated }....................
# Initialized with automated inspection below in the _init() function.
HINTS_PEP484_REPR_PREFIX_DEPRECATED: FrozenSet[str] = set() # type: ignore[assignment]
'''
Frozen set of all **bare deprecated** :pep:`484`-compliant **type hint
representations** (i.e., machine-readable strings returned by the :func:`repr`
builtin suffixed by *no* "["- and "]"-delimited subscription representations
for all :pep:`484`-compliant type hints obsoleted by :pep:`585`-compliant
subscriptable classes).
'''
# ....................{ SETS ~ ignorable }....................
# The majority of this dictionary is initialized with automated inspection
# below in the _init() function. The *ONLY* key-value pairs explicitly defined
# here are those *NOT* amenable to such inspection.
HINTS_REPR_IGNORABLE_SHALLOW: FrozenSet[str] = { # type: ignore[assignment]
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize changes to this set with the corresponding
# testing-specific set
# "beartype_test.a00_unit.data.hint.pep.data_pep.HINTS_PEP_IGNORABLE_SHALLOW".
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# ..................{ NON-PEP }..................
# The PEP-noncompliant builtin "object" type is the transitive superclass
# of all classes. Ergo, parameters and return values annotated as "object"
# unconditionally match *ALL* objects under isinstance()-based type
# covariance and thus semantically reduce to unannotated parameters and
# return values. This is literally the "beartype.cave.AnyType" type.
"<class 'object'>",
# ..................{ PEP 604 }..................
# The low-level C-based "types.UnionType" class underlying PEP 604-compliant
# |-style unions (e.g., "int | float") imposes no constraints and is thus
# also semantically synonymous with the ignorable PEP-noncompliant
# "beartype.cave.AnyType" and hence "object" types. Nonetheless, this class
# *CANNOT* be instantiated from Python code:
# >>> import types
# >>> types.UnionType(int, bool)
# TypeError: cannot create 'types.UnionType' instances
#
# Likewise, this class *CANNOT* be subscripted. It follows that there exists
# no meaningful equivalent of shallow type-checking for these unions. While
# trivially feasible, listing "<class 'types.UnionType'>" here would only
# prevent callers from meaningfully type-checking these unions passed as
# valid parameters or returned as valid returns: e.g.,
# @beartype
# def muh_union_printer(muh_union: UnionType) -> None: print(muh_union)
#
# Ergo, we intentionally omit this type from consideration here.
}
'''
Frozen set of all **shallowly ignorable PEP-compliant type hint
representations** (i.e., machine-readable strings returned by the :func:`repr`
builtin for all PEP-compliant type hints that are unconditionally ignorable by
the :func:`beartype.beartype` decorator in *all* possible contexts)
Caveats
----------
**The high-level**
:func:`beartype._util.hint.pep.utilhinttest.is_hint_ignorable` **tester
function should always be called in lieu of testing type hints against this
low-level set.** This set is merely shallow and thus excludes **deeply
ignorable type hints** (e.g., :data:`Union[Any, bool, str]`). Since there exist
a countably infinite number of deeply ignorable type hints, this set is
necessarily constrained to the substantially smaller finite subset of only
shallowly ignorable type hints.
'''
# ....................{ INITIALIZERS }....................
def _init() -> None:
'''
Initialize this submodule.
'''
# ..................{ EXTERNALS }..................
# Defer initialization-specific imports.
from beartype._data.mod.datamodtyping import TYPING_MODULE_NAMES
# Permit redefinition of these globals below.
global \
HINTS_PEP484_REPR_PREFIX_DEPRECATED, \
HINTS_REPR_IGNORABLE_SHALLOW
# ..................{ HINTS }..................
# Length of the ignorable substring prefixing the name of each sign.
_HINT_SIGN_PREFIX_LEN = len('HintSign')
# ..................{ HINTS ~ repr }..................
# Dictionary mapping from the unqualified names of typing attributes whose
# names are erroneously desynchronized from their bare machine-readable
# representations to the actual representations of those attributes.
#
# The unqualified names and representations of *MOST* typing attributes are
# rigorously synchronized. However, these two strings are desynchronized
# for a proper subset of Python versions and typing attributes:
# $ ipython3.8
# >>> import typing
# >>> repr(typing.List[str])
# typing.List[str] # <-- this is good
# >>> repr(typing.ContextManager[str])
# typing.AbstractContextManager[str] # <-- this is pants
#
# This dictionary enables subsequent logic to transparently resynchronize
# the unqualified names and representations of pants typing attributes.
_HINT_TYPING_ATTR_NAME_TO_REPR_PREFIX: Dict[str, str] = {}
# If the active Python interpreter targets Python >= 3.7.x <= 3.8.x (i.e.,
# either Python 3.7 or 3.8), resynchronize the unqualified names and
# representations of desynchronized typing attributes. Bizarrely:
# * Python 3.7.0 first desynchronized these attributes, despite the
# otherwise insane Python 3.6.x series having actually gotten this right.
# * Python 3.8.x preserved this bad behaviour.
# * Python 3.9.0 rectified this issue finally. *sigh*
if IS_PYTHON_AT_MOST_3_8:
_HINT_TYPING_ATTR_NAME_TO_REPR_PREFIX.update({
'AsyncContextManager': 'AbstractAsyncContextManager',
'ContextManager': 'AbstractContextManager',
})
# ..................{ HINTS ~ types }..................
# Dictionary mapping from the unqualified names of all classes defined by
# typing modules used to instantiate PEP-compliant type hints to their
# corresponding signs.
_HINT_TYPE_BASENAMES_TO_SIGN = {
# ................{ PEP 484 }................
# All PEP 484-compliant forward references are necessarily instances of
# the same class.
'ForwardRef' : HintSignForwardRef,
# All PEP 484-compliant type variables are necessarily instances of the
# same class.
'TypeVar': HintSignTypeVar,
#FIXME: "Generic" is ignorable when unsubscripted. Excise this up!
# The unsubscripted PEP 484-compliant "Generic" superclass is
# explicitly equivalent under PEP 484 to the "Generic[Any]"
# subscription and thus slightly conveys meaningful semantics.
# 'Generic': HintSignGeneric,
}
# ..................{ HINTS ~ deprecated }..................
# Set of the unqualified names of all deprecated PEP 484-compliant typing
# attributes.
_HINT_PEP484_TYPING_ATTR_NAMES_DEPRECATED: Set[str] = set()
# If the active Python interpreter targets Python >= 3.9 and thus
# supports PEP 585, add the names of all deprecated PEP 484-compliant
# typing attributes (e.g., "typing.List") that have since been obsoleted by
# equivalent bare PEP 585-compliant builtin classes (e.g., "list").
if IS_PYTHON_AT_LEAST_3_9:
_HINT_PEP484_TYPING_ATTR_NAMES_DEPRECATED.update((
# ..............{ PEP 484 }..............
'AbstractSet',
'AsyncContextManager',
'AsyncGenerator',
'AsyncIterable',
'AsyncIterator',
'Awaitable',
'ByteString',
'Callable',
'ChainMap',
'Collection',
'Container',
'ContextManager',
'Coroutine',
'Counter',
'DefaultDict',
'Deque',
'Dict',
'FrozenSet',
'Generator',
'Hashable',
'ItemsView',
'Iterable',
'Iterator',
'KeysView',
'List',
'MappingView',
'Mapping',
'Match',
'MutableMapping',
'MutableSequence',
'MutableSet',
'OrderedDict',
'Pattern',
'Reversible',
'Sequence',
'Set',
'Sized',
'Tuple',
'Type',
'ValuesView',
))
# ..................{ HINTS ~ ignorable }..................
# Set of the unqualified names of all shallowly ignorable typing non-class
# attributes. Since classes and non-class attributes have incommensurate
# machine-readable representations, these two types of attributes *MUST* be
# isolated to distinct sets. See "_HINT_TYPING_CLASSNAMES_IGNORABLE" below.
_HINT_TYPING_ATTR_NAMES_IGNORABLE = {
# ................{ PEP 484 }................
# The "Any" singleton is semantically synonymous with the ignorable
# PEP-noncompliant "beartype.cave.AnyType" and hence "object" types.
'Any',
# The unsubscripted "Optional" singleton semantically expands to the
# implicit "Optional[Any]" singleton by the same argument. Since PEP
# 484 also stipulates that all "Optional[t]" singletons semantically
# expand to "Union[t, type(None)]" singletons for arbitrary arguments
# "t", "Optional[Any]" semantically expands to merely "Union[Any,
# type(None)]". Since all unions subscripted by "Any" semantically
# reduce to merely "Any", the "Optional" singleton also reduces to
# merely "Any".
#
# This intentionally excludes "Optional[type(None)]", which the
# "typing" module physically reduces to merely "type(None)". *shrug*
'Optional',
# The unsubscripted "Union" singleton semantically expands to the
# implicit "Union[Any]" singleton by the same argument. Since PEP 484
# stipulates that a union of one type semantically reduces to only that
# type, "Union[Any]" semantically reduces to merely "Any". Despite
# their semantic equivalency, however, these objects remain
# syntactically distinct with respect to object identification: e.g.,
# >>> Union is not Union[Any]
# True
# >>> Union is not Any
# True
#
# This intentionally excludes:
#
# * The "Union[Any]" and "Union[object]" singletons, since the "typing"
# module physically reduces:
# * "Union[Any]" to merely "Any" (i.e., "Union[Any] is Any"), which
# this frozen set already contains.
# * "Union[object]" to merely "object" (i.e., "Union[object] is
# object"), which this frozen set also already contains.
# * "Union" singleton subscripted by one or more ignorable type hints
# contained in this set (e.g., "Union[Any, bool, str]"). Since there
# exist a countably infinite number of these subscriptions, these
# subscriptions *CANNOT* be explicitly listed in this set. Instead,
# these subscriptions are dynamically detected by the high-level
# beartype._util.hint.pep.utilhinttest.is_hint_ignorable() tester
# function and thus referred to as deeply ignorable type hints.
'Union',
}
# Set of the unqualified names of all shallowly ignorable typing classes.
_HINT_TYPING_CLASSNAMES_IGNORABLE = {
# The "Generic" superclass imposes no constraints and is thus also
# semantically synonymous with the ignorable PEP-noncompliant
# "beartype.cave.AnyType" and hence "object" types. Since PEP
# 484 stipulates that *ANY* unsubscripted subscriptable PEP-compliant
# singleton including "typing.Generic" semantically expands to that
# singleton subscripted by an implicit "Any" argument, "Generic"
# semantically expands to the implicit "Generic[Any]" singleton.
'Generic',
# ................{ PEP 544 }................
# Note that ignoring the "typing.Protocol" superclass is vital here. For
# unknown and presumably uninteresting reasons, *ALL* possible objects
# satisfy this superclass. Ergo, this superclass is synonymous with the
# "object" root superclass: e.g.,
# >>> import typing as t
# >>> isinstance(object(), t.Protocol)
# True
# >>> isinstance('wtfbro', t.Protocol)
# True
# >>> isinstance(0x696969, t.Protocol)
# True
'Protocol',
}
# ..................{ CONSTRUCTION }..................
# For the fully-qualified name of each quasi-standard typing module...
for typing_module_name in TYPING_MODULE_NAMES:
# For the name of each sign...
#
# Note that:
# * The inspect.getmembers() getter could also be called here. However,
# that getter internally defers to dir() and getattr() with a
# considerable increase in runtime complexity.
# * The "__dict__" dunder attribute should *NEVER* be accessed directly
# on a module, as that attribute commonly contains artificial entries
# *NOT* explicitly declared by that module (e.g., "__", "e__",
# "ns__").
for hint_sign_name in dir(datapepsigns):
# If this name is *NOT* prefixed by the substring prefixing the
# names of all signs, this name is *NOT* the name of a sign. In this
# case, silently continue to the next sign.
if not hint_sign_name.startswith('HintSign'):
continue
# Sign with this name.
hint_sign = getattr(datapepsigns, hint_sign_name)
# Unqualified name of the typing attribute identified by this sign.
typing_attr_name = hint_sign_name[_HINT_SIGN_PREFIX_LEN:]
assert typing_attr_name, f'{hint_sign_name} not sign name.'
# Machine-readable representation prefix of this attribute,
# conditionally defined as either:
# * If this name is erroneously desynchronized from this
# representation under the active Python interpreter, the actual
# representation of this attribute under this interpreter (e.g.,
# "AbstractContextManager" for the "typing.ContextManager" hint).
# * Else, this name is correctly synchronized with this
# representation under the active Python interpreter. In this
# case, fallback to this name as is (e.g., "List" for the
# "typing.List" hint).
hint_repr_prefix = _HINT_TYPING_ATTR_NAME_TO_REPR_PREFIX.get(
typing_attr_name, typing_attr_name)
# Map from that attribute in this module to this sign.
# print(f'[datapeprepr] Mapping repr("{typing_module_name}.{hint_repr_prefix}[...]") -> {repr(hint_sign)}...')
HINT_REPR_PREFIX_ARGS_0_OR_MORE_TO_SIGN[
f'{typing_module_name}.{hint_repr_prefix}'] = hint_sign
# For the unqualified classname identifying each sign to that sign...
for typing_attr_name, hint_sign in (
_HINT_TYPE_BASENAMES_TO_SIGN.items()):
# Map from that classname in this module to this sign.
# print(f'[datapeprepr] Mapping type "{typing_module_name}.{typing_attr_name}" -> {repr(hint_sign)}...')
HINT_TYPE_NAME_TO_SIGN[
f'{typing_module_name}.{typing_attr_name}'] = hint_sign
# For each shallowly ignorable typing non-class attribute name...
for typing_attr_name in _HINT_TYPING_ATTR_NAMES_IGNORABLE:
# Add that attribute relative to this module to this set.
# print(f'[datapeprepr] Registering ignorable non-class "{typing_module_name}.{typing_attr_name}"...')
HINTS_REPR_IGNORABLE_SHALLOW.add( # type: ignore[attr-defined]
f'{typing_module_name}.{typing_attr_name}')
# For each shallowly ignorable typing classname...
for typing_classname in _HINT_TYPING_CLASSNAMES_IGNORABLE:
# Add that classname relative to this module to this set.
# print(f'[datapeprepr] Registering ignorable class "{typing_module_name}.{typing_attr_name}"...')
HINTS_REPR_IGNORABLE_SHALLOW.add( # type: ignore[attr-defined]
f"<class '{typing_module_name}.{typing_classname}'>")
# For each deprecated PEP 484-compliant typing attribute name...
for typing_attr_name in _HINT_PEP484_TYPING_ATTR_NAMES_DEPRECATED:
# Add that attribute relative to this module to this set.
# print(f'[datapeprepr] Registering deprecated "{typing_module_name}.{typing_attr_name}"...')
HINTS_PEP484_REPR_PREFIX_DEPRECATED.add( # type: ignore[attr-defined]
f'{typing_module_name}.{typing_attr_name}')
# ..................{ SYNTHESIS }..................
# Freeze all relevant global sets for safety.
HINTS_PEP484_REPR_PREFIX_DEPRECATED = frozenset(
HINTS_PEP484_REPR_PREFIX_DEPRECATED)
HINTS_REPR_IGNORABLE_SHALLOW = frozenset(HINTS_REPR_IGNORABLE_SHALLOW)
# ..................{ DEBUGGING }..................
# Uncomment as needed to display the contents of these objects.
# from pprint import pformat
# print(f'HINT_REPR_PREFIX_ARGS_0_OR_MORE_TO_SIGN: {pformat(HINT_REPR_PREFIX_ARGS_0_OR_MORE_TO_SIGN)}')
# print(f'HINT_REPR_PREFIX_ARGS_1_OR_MORE_TO_SIGN: {pformat(HINT_REPR_PREFIX_ARGS_1_OR_MORE_TO_SIGN)}')
# print(f'HINT_TYPE_NAME_TO_SIGN: {pformat(HINT_TYPE_NAME_TO_SIGN)}')
# Initialize this submodule.
_init()
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **Python version-agnostic signs** (i.e., instances of the
:class:`beartype._data.hint.pep.sign.datapepsigncls.HintSign` class
uniquely identifying PEP-compliant type hints in a safe, non-deprecated manner
regardless of the Python version targeted by the active Python interpreter).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Attributes imported here at module scope *MUST* be explicitly
# deleted from this module's namespace below.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from beartype._data.hint.pep.sign.datapepsigncls import (
HintSign as _HintSign)
# ....................{ SIGNS ~ explicit }....................
# Signs with explicit analogues in the stdlib "typing" module.
#
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Signs defined by this module are synchronized with the "__all__"
# list global of the "typing" module bundled with the most recent CPython
# release. For that reason, these signs are:
# * Intentionally declared in the exact same order prefixed by the exact same
# inline comments as for that list global.
# * Intentionally *NOT* commented with docstrings, both because:
# * These docstrings would all trivially reduce to a single-line sentence
# fragment resembling "Alias of typing attribute."
# * These docstrings would inhibit diffing and synchronization by inspection.
# * Intentionally *NOT* conditionally isolated to the specific range of Python
# versions whose "typing" module lists these attributes. For example, the
# "HintSignAsyncContextManager" sign identifying the
# "typing.AsyncContextManager" attribute that only exists under Python >=
# 3.7 could be conditionally isolated to that range of Python versions.
# Technically, there exists *NO* impediment to doing so; pragmatically, doing
# so would be ineffectual. Why? Because attributes *NOT* defined by the
# "typing" module of the active Python interpreter cannot (by definition) be
# used to annotate callables decorated by the @beartype decorator.
#
# When bumping beartype to support a new CPython release:
# * Declare one new attribute here for each new "typing" attribute added by
# that CPython release regardless of whether beartype explicitly supports
# that attribute yet. The subsequently called die_unless_hint_pep_supported()
# validator will raise exceptions when passed these attributes.
# * Preserve attributes here that have since been removed from the "typing"
# module in that CPython release to ensure their continued usability when
# running beartype against older CPython releases.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Super-special typing primitives.
HintSignAnnotated = _HintSign(name='Annotated')
HintSignAny = _HintSign(name='Any')
HintSignCallable = _HintSign(name='Callable')
HintSignClassVar = _HintSign(name='ClassVar')
HintSignConcatenate = _HintSign(name='Concatenate')
HintSignFinal = _HintSign(name='Final')
HintSignForwardRef = _HintSign(name='ForwardRef')
HintSignGeneric = _HintSign(name='Generic')
HintSignLiteral = _HintSign(name='Literal')
HintSignOptional = _HintSign(name='Optional')
HintSignParamSpec = _HintSign(name='ParamSpec')
HintSignProtocol = _HintSign(name='Protocol')
HintSignTuple = _HintSign(name='Tuple')
HintSignType = _HintSign(name='Type')
HintSignTypeVar = _HintSign(name='TypeVar')
HintSignTypeVarTuple = _HintSign(name='TypeVarTuple')
HintSignUnion = _HintSign(name='Union')
# ABCs (from collections.abc).
HintSignAbstractSet = _HintSign(name='AbstractSet')
HintSignByteString = _HintSign(name='ByteString')
HintSignContainer = _HintSign(name='Container')
HintSignContextManager = _HintSign(name='ContextManager')
HintSignHashable = _HintSign(name='Hashable')
HintSignItemsView = _HintSign(name='ItemsView')
HintSignIterable = _HintSign(name='Iterable')
HintSignIterator = _HintSign(name='Iterator')
HintSignKeysView = _HintSign(name='KeysView')
HintSignMapping = _HintSign(name='Mapping')
HintSignMappingView = _HintSign(name='MappingView')
HintSignMutableMapping = _HintSign(name='MutableMapping')
HintSignMutableSequence = _HintSign(name='MutableSequence')
HintSignMutableSet = _HintSign(name='MutableSet')
HintSignSequence = _HintSign(name='Sequence')
HintSignSized = _HintSign(name='Sized')
HintSignValuesView = _HintSign(name='ValuesView')
HintSignAwaitable = _HintSign(name='Awaitable')
HintSignAsyncIterator = _HintSign(name='Iterator')
HintSignAsyncIterable = _HintSign(name='Iterable')
HintSignCoroutine = _HintSign(name='Coroutine')
HintSignCollection = _HintSign(name='Collection')
HintSignAsyncGenerator = _HintSign(name='AsyncGenerator')
HintSignAsyncContextManager = _HintSign(name='ContextManager')
# Structural checks, a.k.a. protocols.
HintSignReversible = _HintSign(name='Reversible')
# SupportsAbs <-- not a useful type hint (already an isinstanceable ABC)
# SupportsBytes <-- not a useful type hint (already an isinstanceable ABC)
# SupportsComplex <-- not a useful type hint (already an isinstanceable ABC)
# SupportsFloat <-- not a useful type hint (already an isinstanceable ABC)
# SupportsIndex <-- not a useful type hint (already an isinstanceable ABC)
# SupportsInt <-- not a useful type hint (already an isinstanceable ABC)
# SupportsRound <-- not a useful type hint (already an isinstanceable ABC)
# Concrete collection types.
HintSignChainMap = _HintSign(name='ChainMap')
HintSignCounter = _HintSign(name='Counter')
HintSignDeque = _HintSign(name='Deque')
HintSignDict = _HintSign(name='Dict')
HintSignDefaultDict = _HintSign(name='DefaultDict')
HintSignList = _HintSign(name='List')
HintSignOrderedDict = _HintSign(name='OrderedDict')
HintSignSet = _HintSign(name='Set')
HintSignFrozenSet = _HintSign(name='FrozenSet')
HintSignNamedTuple = _HintSign(name='NamedTuple')
HintSignTypedDict = _HintSign(name='TypedDict')
HintSignGenerator = _HintSign(name='Generator')
# One-off things.
# AnyStr <-- not a unique type hint (just a constrained "TypeVar")
# cast <-- unusable as a type hint
# final <-- unusable as a type hint
# get_args <-- unusable as a type hint
# get_origin <-- unusable as a type hint
# get_type_hints <-- unusable as a type hint
# is_typeddict <-- unusable as a type hint
HintSignLiteralString = _HintSign(name='LiteralString')
HintSignNever = _HintSign(name='Never')
HintSignNewType = _HintSign(name='NewType')
# no_type_check <-- unusable as a type hint
# no_type_check_decorator <-- unusable as a type hint
# Note that "NoReturn" is contextually valid *ONLY* as a top-level return hint.
# Since this use case is extremely limited, we explicitly generate code for this
# use case outside of the general-purpose code generation pathway for standard
# type hints. Since "NoReturn" is an unsubscriptable singleton, we explicitly
# detect this type hint with an identity test and thus require *NO* sign to
# uniquely identify this type hint.
#
# Theoretically, explicitly defining a sign uniquely identifying this type hint
# could erroneously encourage us to use that sign elsewhere; we should avoid
# that, as "NoReturn" is invalid in almost all possible contexts. Pragmatically,
# doing so nonetheless improves orthogonality when detecting and validating
# PEP-compliant type hints, which ultimately matters more than our subjective
# feelings about the matter. Wisely, we choose pragmatics.
#
# In short, "NoReturn" is insane.
HintSignNoReturn = _HintSign(name='NoReturn')
HintSignNotRequired = _HintSign(name='NotRequired')
# overload <-- unusable as a type hint
HintSignParamSpecArgs = _HintSign(name='ParamSpecArgs')
HintSignParamSpecKwargs = _HintSign(name='ParamSpecKwargs')
HintSignRequired = _HintSign(name='Required')
# runtime_checkable <-- unusable as a type hint
HintSignSelf = _HintSign(name='Self')
# Text <-- not actually a type hint (literal alias for "str")
# TYPE_CHECKING <-- unusable as a type hint
HintSignTypeAlias = _HintSign(name='TypeAlias')
HintSignTypeGuard = _HintSign(name='TypeGuard')
HintSignUnpack = _HintSign(name='Unpack')
# Wrapper namespace for re type aliases.
#
# Note that "typing.__all__" intentionally omits the "Match" and "Pattern"
# attributes, which it oddly considers to comprise another namespace. *shrug*
HintSignMatch = _HintSign(name='Match')
HintSignPattern = _HintSign(name='Pattern')
# ....................{ SIGNS ~ implicit }....................
# Signs with *NO* explicit analogues in the stdlib "typing" module but
# nonetheless standardized by one or more PEPs.
# PEP 484 explicitly supports the "None" singleton, albeit implicitly:
# When used in a type hint, the expression None is considered equivalent to
# type(None).
HintSignNone = _HintSign(name='None')
# PEP 557 defines the "dataclasses.InitVar" type hint factory for annotating
# class-scoped variable annotations of @dataclass.dataclass-decorated classes.
HintSignDataclassInitVar = _HintSign(name='DataclassInitVar')
# ....................{ SIGNS ~ implicit : lib }....................
# Signs identifying PEP-noncompliant third-party type hints published by...
#
# ....................{ SIGNS ~ implicit : lib : numpy }....................
# ...the "numpy.typing" subpackage.
HintSignNumpyArray = _HintSign(name='NumpyArray') # <-- "numpy.typing.NDArray"
# ....................{ CLEANUP }....................
# Prevent all attributes imported above from polluting this namespace. Why?
# Logic elsewhere subsequently assumes a one-to-one mapping between the
# attributes of this namespace and signs.
del _HintSign
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **type hint sign sets** (i.e., frozen set globals aggregating
instances of the :class:`beartype._data.hint.pep.sign.datapepsigncls.HintSign`
class, enabling efficient categorization of signs as belonging to various
categories of type hints).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype._data.hint.pep.sign.datapepsigns import (
HintSignAbstractSet,
HintSignAnnotated,
HintSignAny,
HintSignAsyncContextManager,
HintSignAsyncGenerator,
HintSignAsyncIterator,
HintSignAsyncIterable,
HintSignAwaitable,
HintSignByteString,
HintSignCallable,
HintSignChainMap,
HintSignCollection,
HintSignConcatenate,
HintSignContainer,
HintSignContextManager,
HintSignCoroutine,
HintSignCounter,
HintSignDataclassInitVar,
HintSignDefaultDict,
HintSignDeque,
HintSignDict,
HintSignForwardRef,
HintSignFrozenSet,
HintSignGenerator,
HintSignGeneric,
HintSignHashable,
HintSignItemsView,
HintSignIterable,
HintSignIterator,
HintSignKeysView,
HintSignList,
HintSignLiteral,
HintSignMapping,
HintSignMappingView,
HintSignMatch,
HintSignMutableMapping,
HintSignMutableSequence,
HintSignMutableSet,
HintSignNewType,
HintSignNumpyArray,
HintSignNone,
HintSignOptional,
HintSignOrderedDict,
HintSignParamSpec,
HintSignPattern,
HintSignProtocol,
HintSignReversible,
HintSignSequence,
HintSignSet,
HintSignSized,
HintSignTuple,
HintSignType,
HintSignTypedDict,
HintSignTypeVar,
HintSignUnion,
HintSignValuesView,
)
# ....................{ SIGNS ~ bare }....................
HINT_SIGNS_BARE_IGNORABLE = frozenset((
# ..................{ PEP 484 }..................
# The "Any" singleton is semantically synonymous with the ignorable
# PEP-noncompliant "beartype.cave.AnyType" and hence "object" types.
HintSignAny,
# The "Generic" superclass imposes no constraints and is thus also
# semantically synonymous with the ignorable PEP-noncompliant
# "beartype.cave.AnyType" and hence "object" types. Since PEP
# 484 stipulates that *ANY* unsubscripted subscriptable PEP-compliant
# singleton including "typing.Generic" semantically expands to that
# singelton subscripted by an implicit "Any" argument, "Generic"
# semantically expands to the implicit "Generic[Any]" singleton.
HintSignGeneric,
# The unsubscripted "Optional" singleton semantically expands to the
# implicit "Optional[Any]" singleton by the same argument. Since PEP
# 484 also stipulates that all "Optional[t]" singletons semantically
# expand to "Union[t, type(None)]" singletons for arbitrary arguments
# "t", "Optional[Any]" semantically expands to merely "Union[Any,
# type(None)]". Since all unions subscripted by "Any" semantically
# reduce to merely "Any", the "Optional" singleton also reduces to
# merely "Any".
#
# This intentionally excludes "Optional[type(None)]", which the
# "typing" module physically reduces to merely "type(None)". *shrug*
HintSignOptional,
# The unsubscripted "Union" singleton semantically expands to the
# implicit "Union[Any]" singleton by the same argument. Since PEP 484
# stipulates that a union of one type semantically reduces to only that
# type, "Union[Any]" semantically reduces to merely "Any". Despite
# their semantic equivalency, however, these objects remain
# syntactically distinct with respect to object identification: e.g.,
# >>> Union is not Union[Any]
# True
# >>> Union is not Any
# True
#
# This intentionally excludes:
#
# * The "Union[Any]" and "Union[object]" singletons, since the "typing"
# module physically reduces:
# * "Union[Any]" to merely "Any" (i.e., "Union[Any] is Any"), which
# this frozen set already contains.
# * "Union[object]" to merely "object" (i.e., "Union[object] is
# object"), which this frozen set also already contains.
# * "Union" singleton subscripted by one or more ignorable type hints
# contained in this set (e.g., "Union[Any, bool, str]"). Since there
# exist a countably infinite number of these subscriptions, these
# subscriptions *CANNOT* be explicitly listed in this set. Instead,
# these subscriptions are dynamically detected by the high-level
# beartype._util.hint.pep.utilhinttest.is_hint_ignorable() tester
# function and thus referred to as deeply ignorable type hints.
HintSignUnion,
# ..................{ PEP 544 }..................
# Note that ignoring the "typing.Protocol" superclass is vital here. For
# unknown and presumably uninteresting reasons, *ALL* possible objects
# satisfy this superclass. Ergo, this superclass is synonymous with the
# "object" root superclass: e.g.,
# >>> import typing as t
# >>> isinstance(object(), t.Protocol)
# True
# >>> isinstance('wtfbro', t.Protocol)
# True
# >>> isinstance(0x696969, t.Protocol)
# True
HintSignProtocol,
))
'''
Frozen set of all **bare ignorable signs** (i.e., arbitrary objects uniquely
identifying unsubscripted type hints that are unconditionally ignorable by the
:func:`beartype.beartype` decorator).
'''
# ....................{ SETS ~ kind }....................
HINT_SIGNS_CALLABLE_PARAMS = frozenset((
# ..................{ PEP 612 }..................
HintSignConcatenate,
HintSignParamSpec,
))
'''
Frozen set of all **callable argument signs** (i.e., arbitrary objects uniquely
identifying PEP-compliant child type hints typing the argument lists of parent
:class:`collections.abc.Callable` type hints).
This set necessarily excludes:
* **Standard callable argument lists** (e.g., ``Callable[[bool, int], str]``),
which are specified as standard lists and thus identified by *no* signs.
* **Ellipsis callable argument lists** (e.g., ``Callable[..., str]``), which are
specified as the ellipsis singleton and thus identified by *no* signs.
'''
HINT_SIGNS_SEQUENCE_ARGS_1 = frozenset((
# ..................{ PEP (484|585) }..................
HintSignByteString,
HintSignList,
HintSignMutableSequence,
HintSignSequence,
))
'''
Frozen set of all **standard sequence signs** (i.e., arbitrary objects uniquely
identifying PEP-compliant type hints accepting exactly one subscripted type
hint argument constraining *all* items of compliant sequences, which
necessarily satisfy the :class:`collections.abc.Sequence` protocol with
guaranteed ``O(1)`` indexation across all sequence items).
This set intentionally excludes the:
* :attr:`typing.AnyStr` sign, which accepts only the :class:`str` and
:class:`bytes` types as its sole subscripted argument, which does *not*
unconditionally constrain *all* items (i.e., unencoded and encoded characters
respectively) of compliant sequences but instead parametrizes this attribute.
* :attr:`typing.ByteString` sign, which accepts *no* subscripted arguments.
:attr:`typing.ByteString` is simply an alias for the
:class:`collections.abc.ByteString` abstract base class (ABC) and thus
already handled by our fallback logic for supported PEP-compliant type hints.
* :attr:`typing.Deque` sign, whose compliant objects (i.e.,
:class:`collections.deque` instances) only `guarantee O(n) indexation across
all sequence items <collections.deque_>`__:
Indexed access is ``O(1)`` at both ends but slows to ``O(n)`` in the
middle. For fast random access, use lists instead.
* :attr:`typing.NamedTuple` sign, which embeds a variadic number of
PEP-compliant field type hints and thus requires special-cased handling.
* :attr:`typing.Text` sign, which accepts *no* subscripted arguments.
:attr:`typing.Text` is simply an alias for the builtin :class:`str` type and
thus handled elsewhere as a PEP-noncompliant type hint.
* :attr:`typing.Tuple` sign, which accepts a variadic number of subscripted
arguments and thus requires special-cased handling.
.. _collections.deque:
https://docs.python.org/3/library/collections.html#collections.deque
'''
HINT_SIGNS_UNION = frozenset((
# ..................{ PEP 484 }..................
HintSignOptional,
HintSignUnion,
))
'''
Frozen set of all **union signs** (i.e., arbitrary objects uniquely identifying
:pep:`484`-compliant type hints unifying one or more subscripted type hint
arguments into a disjunctive set union of these arguments).
If the active Python interpreter targets:
* Python >= 3.9, the :attr:`typing.Optional` and :attr:`typing.Union`
attributes are distinct.
* Python < 3.9, the :attr:`typing.Optional` attribute reduces to the
:attr:`typing.Union` attribute, in which case this set is technically
semantically redundant. Since tests of both object identity and set
membership are ``O(1)``, this set incurs no significant performance penalty
versus direct usage of the :attr:`typing.Union` attribute and is thus
unconditionally used as is irrespective of Python version.
'''
# ....................{ SIGNS ~ origin }....................
HINT_SIGNS_ORIGIN_ISINSTANCEABLE = frozenset((
# ..................{ PEP (484|585) }..................
HintSignAbstractSet,
HintSignAsyncContextManager,
HintSignAsyncGenerator,
HintSignAsyncIterable,
HintSignAsyncIterator,
HintSignAwaitable,
HintSignByteString,
HintSignCallable,
HintSignChainMap,
HintSignCollection,
HintSignContainer,
HintSignContextManager,
HintSignCoroutine,
HintSignCounter,
HintSignDefaultDict,
HintSignDeque,
HintSignDict,
HintSignFrozenSet,
HintSignGenerator,
HintSignHashable,
HintSignItemsView,
HintSignIterable,
HintSignIterator,
HintSignKeysView,
HintSignList,
HintSignMapping,
HintSignMappingView,
HintSignMatch,
HintSignMutableMapping,
HintSignMutableSequence,
HintSignMutableSet,
HintSignOrderedDict,
HintSignPattern,
HintSignReversible,
HintSignSequence,
HintSignSet,
HintSignSized,
HintSignTuple,
HintSignType,
HintSignValuesView,
))
'''
Frozen set of all signs uniquely identifying PEP-compliant type hints
originating from an **isinstanceable origin type** (i.e., isinstanceable class
such that *all* objects satisfying this hint are instances of this class).
All hints identified by signs in this set are guaranteed to define
``__origin__`` dunder instance variables whose values are the standard origin
types they originate from. Since any object is trivially type-checkable against
such a type by passing that object and type to the :func:`isinstance` builtin,
*all* objects annotated by hints identified by signs in this set are at least
shallowly type-checkable from wrapper functions generated by the
:func:`beartype.beartype` decorator.
'''
# ....................{ SIGNS ~ origin : args }....................
HINT_SIGNS_ORIGIN_ISINSTANCEABLE_ARGS_1 = frozenset((
HintSignAbstractSet,
HintSignAsyncContextManager,
HintSignAsyncIterable,
HintSignAsyncIterator,
HintSignAwaitable,
HintSignCollection,
HintSignContainer,
HintSignContextManager,
HintSignCounter,
HintSignDeque,
HintSignFrozenSet,
HintSignIterable,
HintSignIterator,
HintSignKeysView,
HintSignList,
HintSignMatch,
HintSignMappingView,
HintSignMutableSequence,
HintSignMutableSet,
HintSignPattern,
HintSignReversible,
HintSignSequence,
HintSignSet,
HintSignType,
HintSignValuesView,
))
'''
Frozen set of all signs uniquely identifying **single-argument PEP-compliant
type hints** (i.e., type hints subscriptable by only one child type hint)
originating from an **isinstanceable origin type** (i.e., isinstanceable class
such that *all* objects satisfying this hint are instances of this class).
Note that the corresponding types in the typing module will have an ``_nparams``
instance variable with a value equal to 1.
'''
HINT_SIGNS_ORIGIN_ISINSTANCEABLE_ARGS_2 = frozenset((
HintSignAsyncGenerator,
# HintSignCallable, # defined explicitly below
HintSignChainMap,
HintSignDefaultDict,
HintSignDict,
HintSignItemsView,
HintSignMapping,
HintSignMutableMapping,
HintSignOrderedDict,
))
'''
Frozen set of all signs uniquely identifying **two-argument PEP-compliant
type hints** (i.e., type hints subscriptable by exactly two child type hints)
Note that the corresponding types in the typing module will have an ``_nparams``
instance variable with a value equal to 2.
'''
HINT_SIGNS_ORIGIN_ISINSTANCEABLE_ARGS_3 = frozenset((
HintSignCoroutine,
HintSignGenerator,
))
'''
Frozen set of all signs uniquely identifying **three-argument PEP-compliant
type hints** (i.e., type hints subscriptable by exactly three child type hints)
Note that the corresponding types in the typing module will have an ``_nparams``
instance variable with a value equal to 3.
'''
# ....................{ SIGNS ~ return }....................
HINT_SIGNS_RETURN_GENERATOR_ASYNC = frozenset((
# ..................{ PEP (484|585) }..................
HintSignAsyncGenerator,
HintSignAsyncIterable,
HintSignAsyncIterator,
))
'''
Frozen set of all signs uniquely identifying **PEP-compliant asynchronous
generator return type hints** (i.e., hints permissible as the return
annotations of asynchronous generators).
See Also
----------
:data:`HINT_SIGNS_RETURN_GENERATOR_SYNC`
Further discussion.
'''
HINT_SIGNS_RETURN_GENERATOR_SYNC = frozenset((
# ..................{ PEP (484|585) }..................
HintSignGenerator,
HintSignIterable,
HintSignIterator,
))
'''
Frozen set of all signs uniquely identifying **PEP-compliant synchronous
generator return type hints** (i.e., hints permissible as the return
annotations of synchronous generators).
Generator callables are simply syntactic sugar for non-generator callables
returning generator objects. For this reason, generator callables *must* be
annotated as returning a type compatible with generator objects -- including:
* :data:`HintSignGenerator`, the narrowest abstract base class (ABC) to which
all generator objects necessarily conform.
* :data:`HintSignIterator`, the immediate superclass of
:data:`HintSignGenerator`.
* :data:`HintSignIterable`, the immediate superclass of
:data:`HintSignIterator`.
Technically, :pep:`484` states that generator callables may only be annotated
as only returning a subscription of the :attr:`typing.Generator` factory:
The return type of generator functions can be annotated by the generic type
``Generator[yield_type, send_type, return_type]`` provided by ``typing.py``
module:
Pragmatically, official documentation for the :mod:`typing` module seemingly
*never* standardized by an existing PEP additionally states that generator
callables may be annotated as also returning a subscription of either the
:attr:`typing.Iterable` or :attr:`typing.Iterator` factories:
Alternatively, annotate your generator as having a return type of either
``Iterable[YieldType]`` or ``Iterator[YieldType]``:
See Also
----------
https://github.com/beartype/beartype/issues/65#issuecomment-954468111
Further discussion.
'''
# ....................{ SIGNS ~ type }....................
HINT_SIGNS_TYPE_MIMIC = frozenset((
# ..................{ PEP 484 }..................
HintSignNewType,
# ..................{ PEP 593 }..................
HintSignAnnotated,
))
'''
Frozen set of all signs uniquely identifying **PEP-compliant type hint mimics**
(i.e., hints maliciously masquerading as another type by explicitly overriding
their ``__module__`` dunder instance variable to that of that type).
Notably, this set contains the signs of:
* :pep:`484`-compliant :attr:`typing.NewType` type hints under Python >= 3.10,
which badly masquerade as their first passed argument to such an extreme
degree that they even intentionally prefix their machine-readable
representation by the fully-qualified name of the caller's module: e.g.,
.. code-block:: python
# Under Python >= 3.10:
>>> import typing
>>> new_type = typing.NewType('List', bool)
>>> repr(new_type)
__main__.List # <---- this is genuine bollocks
* :pep:`593`-compliant :attr:`typing.Annotated` type hints, which badly
masquerade as their first subscripted argument (e.g., the :class:`int` in
``typing.Annotated[int, 63]``) such that the value of the ``__module__``
attributes of these hints is that of that argument rather than their own.
Oddly, their machine-readable representation remains prefixed by
``"typing."``, enabling an efficient test that also generalizes to all other
outlier edge cases that are probably lurking about.
I have no code and I must scream.
'''
# ....................{ SETS ~ supported }....................
_HINT_SIGNS_SUPPORTED_SHALLOW = frozenset((
# ..................{ PEP 484 }..................
HintSignTypeVar,
# ..................{ PEP 589 }..................
#FIXME: Shift into "HINT_SIGNS_SUPPORTED_DEEP" *AFTER* deeply type-checking
#typed dictionaries.
HintSignTypedDict,
))
'''
Frozen set of all **shallowly supported non-originative signs** (i.e.,
arbitrary objects uniquely identifying PEP-compliant type hints *not*
originating from an isinstanceable type for which the :func:`beartype.beartype`
decorator generates shallow type-checking code).
'''
HINT_SIGNS_SUPPORTED_DEEP = frozenset((
# ..................{ PEP 484 }..................
# Note that the "NoReturn" type hint is invalid in almost all possible
# syntactic contexts and thus intentionally omitted here. See the
# "datapepsigns" submodule for further commentary.
HintSignAny,
HintSignForwardRef,
HintSignNewType,
HintSignNone,
# Note that "typing.Union" implicitly subsumes "typing.Optional" *ONLY*
# under Python <= 3.9. The implementations of the "typing" module under
# those older Python versions transparently reduced "typing.Optional" to
# "typing.Union" at runtime. Since this reduction is no longer the case,
# both *MUST* now be explicitly listed here.
HintSignOptional,
HintSignUnion,
# ..................{ PEP (484|585) }..................
HintSignByteString,
HintSignGeneric,
HintSignList,
HintSignMutableSequence,
HintSignSequence,
HintSignTuple,
HintSignType,
# ..................{ PEP 544 }..................
HintSignProtocol,
# ..................{ PEP 557 }..................
HintSignDataclassInitVar,
# ..................{ PEP 586 }..................
HintSignLiteral,
# ..................{ PEP 593 }..................
HintSignAnnotated,
# ..................{ NON-PEP ~ package : numpy }..................
HintSignNumpyArray,
))
'''
Frozen set of all **deeply supported signs** (i.e., arbitrary objects uniquely
identifying PEP-compliant type hints for which the :func:`beartype.beartype`
decorator generates deeply type-checking code).
This set contains *every* sign explicitly supported by one or more conditional
branches in the body of the
:func:`beartype._check.expr.exprmake.make_func_wrapper_code` function
generating code deeply type-checking the current pith against the PEP-compliant
type hint annotated by a subscription of that attribute.
'''
HINT_SIGNS_SUPPORTED = frozenset((
# Set of all deeply supported signs.
HINT_SIGNS_SUPPORTED_DEEP |
# Set of all shallowly supported signs *NOT* originating from a class.
_HINT_SIGNS_SUPPORTED_SHALLOW |
# Set of all shallowly supported signs originating from a class.
HINT_SIGNS_ORIGIN_ISINSTANCEABLE
))
'''
Frozen set of all **supported signs** (i.e., arbitrary objects uniquely
identifying PEP-compliant type hints).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **sign classes** (i.e., classes whose instances uniquely
identifying PEP-compliant type hints in a safe, non-deprecated manner
regardless of the Python version targeted by the active Python interpreter).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.typing import Union
# ....................{ CLASSES }....................
class HintSign(object):
'''
**Sign** (i.e., object uniquely identifying PEP-compliant type hints in a
safe, non-deprecated manner regardless of the Python version targeted by
the active Python interpreter).
Attributes
----------
name : str
Uniqualified name of the :mod:`typing` attribute uniquely identified by
this sign (e.g., ``Literal`` for :pep:`586`-compliant type hints).
'''
# ..................{ CLASS VARIABLES }..................
# Slot all instance variables defined on this object to minimize the time
# complexity of both reading and writing variables across frequently
# called @beartype decorations. Slotting has been shown to reduce read and
# write costs by approximately ~10%, which is non-trivial.
__slots__ = ('name',)
# ..................{ DUNDERS }..................
def __init__(self, name: str) -> None:
'''
Initialize this sign.
Parameters
----------
name : str
Uniqualified name of the :mod:`typing` attribute uniquely
identified by this sign (e.g., ``Literal`` for :pep:`586`-compliant
type hints).
'''
assert isinstance(name, str), f'{repr(name)} not string.'
# Classify all passed parameters.
self.name = name
def __repr__(self) -> str:
'''
Machine-readable representation of this sign.
'''
return f'HintSign{self.name}'
# return f'HintSign({repr(self.name)})'
# ....................{ HINTS }....................
HintSignOrType = Union[HintSign, type]
'''
PEP-compliant type hint matching either a **sign** (i.e., object uniquely
identifying PEP-compliant type hints in a safe, non-deprecated manner
regardless of the Python version targeted by the active Python interpreter) or
**isinstanceable class** (i.e., class safely passable as the second argument to
the :func:`isinstance` builtin).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **class globals** (i.e., global constants describing various
well-known types).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
#FIXME: Export these types from "beartype.cave", please.
from beartype._cave._cavefast import (
AsyncCoroutineCType,
AsyncGeneratorCType,
CallableCodeObjectType,
CallableFrameType,
ClassDictType,
ClassType,
ClosureVarCellType,
EllipsisType,
ExceptionTracebackType,
FunctionType,
FunctionOrMethodCType,
GeneratorCType,
MethodBoundInstanceDunderCType,
MethodBoundInstanceOrClassType,
MethodDecoratorBuiltinTypes,
MethodUnboundClassCType,
MethodUnboundInstanceDunderCType,
MethodUnboundInstanceNondunderCType,
MethodUnboundPropertyNontrivialCExtensionType,
MethodUnboundPropertyTrivialCExtensionType,
ModuleType,
NoneType,
NotImplementedType,
)
# ....................{ SETS }....................
TYPES_BUILTIN_FAKE = frozenset((
AsyncCoroutineCType,
AsyncGeneratorCType,
CallableCodeObjectType,
CallableFrameType,
ClassDictType,
ClosureVarCellType,
EllipsisType,
ExceptionTracebackType,
FunctionType,
FunctionOrMethodCType,
GeneratorCType,
MethodBoundInstanceDunderCType,
MethodBoundInstanceOrClassType,
MethodUnboundClassCType,
MethodUnboundInstanceDunderCType,
MethodUnboundInstanceNondunderCType,
MethodUnboundPropertyNontrivialCExtensionType,
MethodUnboundPropertyTrivialCExtensionType,
ModuleType,
NoneType,
NotImplementedType,
))
'''
Frozen set of all **fake builtin types** (i.e., types that are *not* builtin
but which nonetheless erroneously masquerade as being builtin).
Like all non-builtin types, fake builtin types are globally inaccessible until
explicitly imported into the current lexical variable scope. Unlike all
non-builtin types, however, fake builtin types declare themselves to be
builtin. The standard example is the type of the ``None`` singleton: e.g.,
.. code-block:: python
>>> f'{type(None).__module__}.{type(None).__name__}'
'builtins.NoneType'
>>> NoneType
NameError: name 'NoneType' is not defined # <---- this is balls
These inconsistencies almost certainly constitute bugs in the CPython
interpreter itself, but it seems doubtful CPython developers would see it that
way and almost certain everyone else would defend these edge cases.
We're *not* dying on that lonely hill. We obey the Iron Law of Guido.
See Also
----------
:data:`beartype_test.a00_unit.data.TYPES_BUILTIN_FAKE`
Related test-time set. Whereas this runtime-specific set is efficiently
defined explicitly by listing all non-builtin builtin mimic types, that
test-specific set is inefficiently defined implicitly by introspecting the
:mod:`builtins` module. While less efficient, that test-specific set serves
as an essential sanity check on that runtime-specific set.
'''
# ....................{ TUPLES }....................
# Types of *ALL* objects that may be decorated by @beartype, intentionally
# listed in descending order of real-world prevalence for negligible efficiency
# gains when performing isinstance()-based tests against this tuple. These
# include the types of *ALL*...
TYPES_BEARTYPEABLE = (
# Pure-Python unbound functions and methods.
FunctionType,
# Pure-Python classes.
ClassType,
# C-based builtin method descriptors wrapping pure-Python unbound methods,
# including class methods, static methods, and property methods.
MethodDecoratorBuiltinTypes,
)
'''
Tuple set of all **beartypeable types** (i.e., types of all objects that may be
decorated by the :func:`beartype.beartype` decorator).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **standard Python module globals** (i.e., global constants
describing modules and packages bundled with CPython's standard library).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
# ....................{ NAMES }....................
BUILTINS_MODULE_NAME = 'builtins'
'''
Fully-qualified name of the **builtins module** (i.e., objects defined by the
standard :mod:`builtins` module and thus globally available by default
*without* requiring explicit importation).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **typing module globals** (i.e., global constants describing
quasi-standard typing modules).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
# ....................{ SETS }....................
TYPING_MODULE_NAMES_STANDARD = frozenset((
# Official typing module bundled with the Python stdlib.
'typing',
# Third-party typing compatibility layer bundled with @beartype itself.
'beartype.typing',
))
'''
Frozen set of the fully-qualified names of all **standard typing modules**
(i.e., modules whose public APIs *exactly* conform to that of the standard
:mod:`typing` module).
This set includes both the standard :mod:`typing` module and comparatively
more standard :mod:`beartype.typing` submodule while excluding the third-party
:mod:`typing_extensions` module, whose runtime behaviour often significantly
diverges in non-standard fashion from that of the aforementioned modules.
'''
TYPING_MODULE_NAMES = TYPING_MODULE_NAMES_STANDARD | frozenset((
# Third-party module backporting "typing" attributes introduced in newer
# Python versions to older Python versions.
'typing_extensions',
))
'''
Frozen set of the fully-qualified names of all **quasi-standard typing
modules** (i.e., modules defining attributes usable for creating PEP-compliant
type hints accepted by both static and runtime type checkers).
'''
TYPING_MODULE_NAMES_DOTTED = frozenset(
f'{typing_module_name}.' for typing_module_name in TYPING_MODULE_NAMES)
'''
Frozen set of the fully-qualified ``.``-suffixed names of all typing modules.
This set is a negligible optimization enabling callers to perform slightly more
efficient testing of string prefixes against items of this specialized set than
those of the more general-purpose :data:`TYPING_MODULE_NAMES` set.
See Also
----------
:data:`TYPING_MODULE_NAMES`
Further details.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **callable globals** (i.e., global constants describing various
well-known functions and methods).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
# ....................{ SETS }....................
#FIXME: Rename to "METHOD_NAMES_DUNDER_BINARY" for clarity.
METHOD_NAMES_BINARY_DUNDER = frozenset((
'__add__',
'__and__',
'__cmp__',
'__divmod__',
'__div__',
'__eq__',
'__floordiv__',
'__ge__',
'__gt__',
'__iadd__',
'__iand__',
'__idiv__',
'__ifloordiv__',
'__ilshift__',
'__imatmul__',
'__imod__',
'__imul__',
'__ior__',
'__ipow__',
'__irshift__',
'__isub__',
'__itruediv__',
'__ixor__',
'__le__',
'__lshift__',
'__lt__',
'__matmul__',
'__mod__',
'__mul__',
'__ne__',
'__or__',
'__pow__',
'__radd__',
'__rand__',
'__rdiv__',
'__rfloordiv__',
'__rlshift__',
'__rmatmul__',
'__rmod__',
'__rmul__',
'__ror__',
'__rpow__',
'__rrshift__',
'__rshift__',
'__rsub__',
'__rtruediv__',
'__rxor__',
'__sub__',
'__truediv__',
'__xor__',
))
'''
Frozen set of the unqualified names of all **binary dunder methods** (i.e.,
methods whose names are both prefixed and suffixed by ``__``, which the active
Python interpreter implicitly calls to perform binary operations on instances
whose first operands are instances of the classes declaring those methods).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype warning hierarchy.**
This private submodule publishes a hierarchy of both public and private
:mod:`beartype`-specific warnings emitted at decoration, call, and usage time.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# should be locally imported at module scope *ONLY* under alternate private
# names (e.g., "from argparse import ArgumentParser as _ArgumentParser" rather
# than merely "from argparse import ArgumentParser").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from abc import ABCMeta as _ABCMeta
# ....................{ SUPERCLASS }....................
class BeartypeWarning(UserWarning, metaclass=_ABCMeta):
'''
Abstract base class of all **beartype warnings.**
Instances of subclasses of this warning are emitted either:
* At decoration time from the :func:`beartype.beartype` decorator.
* At call time from the new callable generated by the
:func:`beartype.beartype` decorator to wrap the original callable.
* At Sphinx-based documentation building time from Python code invoked by
the ``doc/Makefile`` file.
'''
# ..................{ INITIALIZERS }..................
def __init__(self, message: str) -> None:
'''
Initialize this exception.
This constructor (in order):
#. Passes all passed arguments as is to the superclass constructor.
#. Sanitizes the fully-qualified module name of this
exception from the private ``"beartype.roar._roarwarn"`` submodule
to the public ``"beartype.roar"`` subpackage to both improve the
readability of exception messages and discourage end users from
accessing this private submodule.
'''
# Defer to the superclass constructor.
super().__init__(message)
# Sanitize the fully-qualified module name of the class of this
# warning. See the docstring for justification.
self.__class__.__module__ = 'beartype.roar'
# ....................{ CLAW }....................
class BeartypeClawWarning(BeartypeWarning):
'''
Abstract base class of all **beartype import hook warnings.**
Instances of subclasses of this warning are emitted at module importation
time from the import hooks registered by the :func:`beartype.claw`
subpackage, typically due to the :func:`beartype.beartype` decorator failing
to decorate callables or classes in modules imported by those hooks.
'''
pass
class BeartypeClawDecorationWarning(BeartypeClawWarning):
'''
**Beartype import hook decoration warning.**
This warning is emitted at module importation time from the import hooks
registered by the :func:`beartype.claw` subpackage when the
:func:`beartype.beartype` decorator fails to decorate a callable or class
declared in a module imported by those hooks.
'''
pass
# ....................{ DECORATOR ~ hint : pep }....................
class BeartypeDecorHintPepWarning(BeartypeWarning):
'''
Abstract base class of all **beartype decorator PEP-compliant type hint
warnings.**
Instances of subclasses of this warning are emitted at decoration time from
the :func:`beartype.beartype` decorator on receiving a callable annotated
by a suspicious (but *not* necessarily erroneous) PEP-compliant type hint
warranting non-fatal warnings *without* raising fatal exceptions.
'''
pass
#FIXME: Consider removal.
# class BeartypeDecorHintPepIgnorableDeepWarning(BeartypeDecorHintPepWarning):
# '''
# **Beartype decorator deeply ignorable PEP-compliant type hint warning.**
#
# This warning is emitted at decoration time from the
# :func:`beartype.beartype` decorator on receiving a callable annotated by
# one or more **deeply ignorable PEP-compliant type hints** (i.e., instances or classes declared
# by the stdlib :mod:`typing` module) currently unsupported by this
# decorator.
# '''
#
# pass
#FIXME: Consider removal.
# class BeartypeDecorHintPepUnsupportedWarning(BeartypeWarning):
# '''
# **Beartype decorator unsupported PEP-compliant type hint warning.**
#
# This warning is emitted at decoration time from the
# :func:`beartype.beartype` decorator on receiving a callable annotated with
# one or more PEP-compliant type hints (e.g., instances or classes declared
# by the stdlib :mod:`typing` module) currently unsupported by this
# decorator.
# '''
#
# pass
# ....................{ DECORATOR ~ hint : pep : deprecate }....................
class BeartypeDecorHintPepDeprecationWarning(BeartypeDecorHintPepWarning):
'''
**Beartype decorator PEP-compliant type hint deprecation warning.**
This warning is emitted at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated by
one or more **deprecated PEP-compliant type hints** (i.e., type hints
compliant with outdated PEPs that have since been obsoleted by recent
PEPs), including:
* If the active Python interpreter targets at least Python >= 3.9 and thus
supports :pep:`585`, outdated :pep:`484`-compliant type hints (e.g.,
``typing.List[int]``) that have since been obsoleted by the equivalent
:pep:`585`-compliant type hints (e.g., ``list[int]``).
'''
pass
class BeartypeDecorHintPep585DeprecationWarning(
BeartypeDecorHintPepDeprecationWarning):
'''
**Beartype decorator** :pep:`585`-mandated **deprecation of**
:pep:`484`-compliant **type hint warning.**
This warning is emitted at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated by
one or more outdated :pep:`484`-compliant type hints (e.g.,
``typing.List[int]``) that have since been obsoleted by the equivalent
:pep:`585`-compliant type hints (e.g., ``list[int]``) if the active Python
interpreter targets at least Python >= 3.9 and thus supports :pep:`585`.
See Also
----------
https://github.com/beartype/beartype#pep-585-deprecations
Further discussion
'''
pass
# ....................{ DECORATOR ~ hint : non-pep }....................
class BeartypeDecorHintNonpepWarning(BeartypeWarning):
'''
Abstract base class of all **beartype decorator PEP-noncompliant type hint
warnings.**
Instances of subclasses of this warning are emitted at decoration time from
the :func:`beartype.beartype` decorator on receiving a callable annotated
by a suspicious (but *not* necessarily erroneous) PEP-noncompliant type
hint warranting non-fatal warnings *without* raising fatal exceptions.
'''
pass
class BeartypeDecorHintNonpepNumpyWarning(BeartypeDecorHintNonpepWarning):
'''
**Beartype decorator PEP-noncompliant NumPy type hint warning.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated by an
suspicious NumPy type hint, including:
* **Typed NumPy arrays** (i.e., ``numpy.typed.NDArray[...]`` type hints)
under Python < 3.8, which this decorator currently reduces to
**untyped NumPy arrays** (i.e., :class:`numpy.ndarray`).
'''
pass
# ....................{ MODULE }....................
class BeartypeModuleWarning(BeartypeWarning):
'''
Abstract base class of all **beartype module warnings.**
Instances of subclasses of this warning are emitted at various times
(including at decoration time from the :func:`beartype.beartype` decorator)
on failing to import optional third-party modules, packages, or C
extensions warranting non-fatal warnings *without* raising fatal
exceptions.
'''
pass
class BeartypeModuleNotFoundWarning(BeartypeModuleWarning):
'''
**Beartype missing optional dependency warning.**
This warning is emitted at various times to inform the user of a **missing
recommended optional dependency** (i.e., third-party Python package *not*
installed under the active Python interpreter whose installation is
technically optional but recommended).
'''
pass
class BeartypeModuleUnimportableWarning(BeartypeModuleWarning):
'''
**Beartype unimportable optional dependency warning.**
This warning is emitted at various times to inform the user of an
**unimportable optional dependency** (i.e., third-party Python package
installed under the active Python interpreter but which raises unexpected
exceptions from module scope when imported).
'''
pass
# ....................{ SPHINX }....................
#FIXME: Consider removal.
# class BeartypeSphinxWarning(BeartypeWarning, metaclass=_ABCMeta):
# '''
# Abstract base class of all **beartype Sphinx warnings.**
#
# Instances of subclasses of this warning are emitted at Sphinx-based
# documentation building time from the ``doc/Makefile`` file in various edge
# cases warranting non-fatal warnings *without* raising fatal exceptions.
# '''
#
# pass
# ....................{ VALE }....................
class BeartypeValeWarning(BeartypeWarning):
'''
Abstract base class of all **beartype data validation warnings.**
Instances of subclasses of this warning are emitted at usage (e.g.,
instantiation, method call) time from the class hierarchy published by the
:func:`beartype.vale` subpackage by suspicious (but *not* necessarily
erroneous) PEP-compliant type hints warranting non-fatal warnings *without*
raising fatal exceptions.
'''
pass
class BeartypeValeLambdaWarning(BeartypeValeWarning):
'''
**Beartype data validation lambda function warning.**
This warning is emitted on passing the :func:`repr` builtin an instance of
the :class:`beartype.vale.Is` class subscripted by a lambda function whose
definition is *not* parsable from the script or module file defining that
lambda.
'''
pass
# ....................{ PRIVATE ~ util }....................
class _BeartypeUtilWarning(BeartypeWarning):
'''
Abstract base class of all **beartype private utility warnings.**
Instances of subclasses of this warning are emitted by *most* (but *not*
all) private submodules of the private :mod:`beartype._util` subpackage.
These warnings denote non-critical internal issues and should thus *never*
be emitted, let alone allowed to percolate up the call stack to end users.
'''
pass
# ....................{ PRIVATE ~ util : call }....................
class _BeartypeUtilCallableWarning(_BeartypeUtilWarning):
'''
**Beartype decorator memoization decorator keyword argument warnings.**
This warning is emitted from callables memoized by the
:func:`beartype._util.cache.utilcachecall.callable_cached` decorator on
calls receiving one or more keyword arguments. Memoizing keyword arguments
is substantially more space- and time-intensive than memoizing the
equivalent positional arguments, partially defeating the purpose of
memoization in the first place.
This warning denotes a critical internal issue and should thus *never* be
emitted to end users.
'''
pass
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype exception and warning hierarchies.**
This submodule publishes a hierarchy of:
* :mod:`beartype`-specific exceptions raised both by:
* The :func:`beartype.beartype` decorator at decoration and call time.
* Other public submodules and subpackages at usage time, including
user-defined data validators imported from the :mod:`beartype.vale`
subpackage.
* :mod:`beartype`-specific warnings emitted at similar times.
Hear :mod:`beartype` roar as it efficiently checks types, validates data, and
raids native beehives for organic honey.
'''
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To prevent "mypy --no-implicit-reexport" from raising literally
# hundreds of errors at static analysis time, *ALL* public attributes *MUST* be
# explicitly reimported under the same names with "{exception_name} as
# {exception_name}" syntax rather than merely "{exception_name}". Yes, this is
# ludicrous. Yes, this is mypy. For posterity, these failures resemble:
# beartype/_cave/_cavefast.py:47: error: Module "beartype.roar" does not
# explicitly export attribute "BeartypeCallUnavailableTypeException";
# implicit reexport disabled [attr-defined]
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# should be locally imported at module scope *ONLY* under alternate private
# names (e.g., "from argparse import ArgumentParser as _ArgumentParser" rather
# than merely "from argparse import ArgumentParser").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Public exception hierarchy.
from beartype.roar._roarexc import (
# Exceptions.
BeartypeException as BeartypeException,
BeartypeCaveException as BeartypeCaveException,
BeartypeCaveNoneTypeOrException as BeartypeCaveNoneTypeOrException,
BeartypeCaveNoneTypeOrKeyException as BeartypeCaveNoneTypeOrKeyException,
BeartypeCaveNoneTypeOrMutabilityException as BeartypeCaveNoneTypeOrMutabilityException,
BeartypeClawException as BeartypeClawException,
BeartypeClawRegistrationException as BeartypeClawRegistrationException,
BeartypeConfException as BeartypeConfException,
BeartypeDoorException as BeartypeDoorException,
BeartypeDoorNonpepException as BeartypeDoorNonpepException,
BeartypeDoorPepException as BeartypeDoorPepException,
BeartypeDoorPepUnsupportedException as BeartypeDoorPepUnsupportedException,
BeartypeDecorException as BeartypeDecorException,
BeartypeDecorWrappeeException as BeartypeDecorWrappeeException,
BeartypeDecorWrapperException as BeartypeDecorWrapperException,
BeartypeDecorHintException as BeartypeDecorHintException,
BeartypeDecorHintForwardRefException as BeartypeDecorHintForwardRefException,
BeartypeDecorHintNonpepException as BeartypeDecorHintNonpepException,
BeartypeDecorHintNonpepNumpyException as BeartypeDecorHintNonpepNumpyException,
BeartypeDecorHintPepException as BeartypeDecorHintPepException,
BeartypeDecorHintPepSignException as BeartypeDecorHintPepSignException,
BeartypeDecorHintPepUnsupportedException as BeartypeDecorHintPepUnsupportedException,
BeartypeDecorHintPep484Exception as BeartypeDecorHintPep484Exception,
BeartypeDecorHintPep484585Exception as BeartypeDecorHintPep484585Exception,
BeartypeDecorHintPep544Exception as BeartypeDecorHintPep544Exception,
BeartypeDecorHintPep557Exception as BeartypeDecorHintPep557Exception,
BeartypeDecorHintPep585Exception as BeartypeDecorHintPep585Exception,
BeartypeDecorHintPep586Exception as BeartypeDecorHintPep586Exception,
BeartypeDecorHintPep593Exception as BeartypeDecorHintPep593Exception,
BeartypeDecorHintPep3119Exception as BeartypeDecorHintPep3119Exception,
BeartypeDecorHintTypeException as BeartypeDecorHintTypeException,
BeartypeDecorParamException as BeartypeDecorParamException,
BeartypeDecorParamNameException as BeartypeDecorParamNameException,
BeartypeCallException as BeartypeCallException,
BeartypeCallUnavailableTypeException as BeartypeCallUnavailableTypeException,
BeartypeCallHintException as BeartypeCallHintException,
BeartypeCallHintForwardRefException as BeartypeCallHintForwardRefException,
BeartypePepException as BeartypePepException,
BeartypePep563Exception as BeartypePep563Exception,
BeartypeValeException as BeartypeValeException,
BeartypeValeSubscriptionException as BeartypeValeSubscriptionException,
BeartypeValeValidationException as BeartypeValeValidationException,
# Violations (i.e., exceptions raised during runtime type-checking).
BeartypeCallHintViolation as BeartypeCallHintViolation,
BeartypeCallHintParamViolation as BeartypeCallHintParamViolation,
BeartypeCallHintReturnViolation as BeartypeCallHintReturnViolation,
BeartypeDoorHintViolation as BeartypeDoorHintViolation,
)
# Public warning hierarchy.
from beartype.roar._roarwarn import (
BeartypeWarning as BeartypeWarning,
BeartypeClawWarning as BeartypeClawWarning,
BeartypeClawDecorationWarning as BeartypeClawDecorationWarning,
BeartypeDecorHintPepWarning as BeartypeDecorHintPepWarning,
BeartypeDecorHintPepDeprecationWarning as BeartypeDecorHintPepDeprecationWarning,
BeartypeDecorHintPep585DeprecationWarning as BeartypeDecorHintPep585DeprecationWarning,
BeartypeDecorHintNonpepWarning as BeartypeDecorHintNonpepWarning,
BeartypeDecorHintNonpepNumpyWarning as BeartypeDecorHintNonpepNumpyWarning,
BeartypeModuleNotFoundWarning as BeartypeModuleNotFoundWarning,
BeartypeModuleUnimportableWarning as BeartypeModuleUnimportableWarning,
BeartypeValeWarning as BeartypeValeWarning,
BeartypeValeLambdaWarning as BeartypeValeLambdaWarning,
)
# ....................{ DEPRECATIONS }....................
def __getattr__(attr_deprecated_name: str) -> object:
'''
Dynamically retrieve a deprecated attribute with the passed unqualified
name from this submodule and emit a non-fatal deprecation warning on each
such retrieval if this submodule defines this attribute *or* raise an
exception otherwise.
The Python interpreter implicitly calls this :pep:`562`-compliant module
dunder function under Python >= 3.7 *after* failing to directly retrieve an
explicit attribute with this name from this submodule. Since this dunder
function is only called in the event of an error, neither space nor time
efficiency are a concern here.
Parameters
----------
attr_deprecated_name : str
Unqualified name of the deprecated attribute to be retrieved.
Returns
----------
object
Value of this deprecated attribute.
Warns
----------
:class:`DeprecationWarning`
If this attribute is deprecated.
Raises
----------
:exc:`AttributeError`
If this attribute is unrecognized and thus erroneous.
'''
# Isolate imports to avoid polluting the module namespace.
from beartype._util.mod.utilmoddeprecate import deprecate_module_attr
# Return the value of this deprecated attribute and emit a warning.
return deprecate_module_attr(
attr_deprecated_name=attr_deprecated_name,
attr_deprecated_name_to_nondeprecated_name={
'BeartypeAbbyException': (
'BeartypeDoorException'),
'BeartypeAbbyHintViolation': (
'BeartypeDoorHintViolation'),
'BeartypeAbbyTesterException': (
'BeartypeDoorException'),
'BeartypeCallHintPepException': (
'BeartypeCallHintViolation'),
'BeartypeCallHintPepParamException': (
'BeartypeCallHintParamViolation'),
'BeartypeCallHintPepReturnException': (
'BeartypeCallHintReturnViolation'),
'BeartypeDecorHintNonPepException': (
'BeartypeDecorHintNonpepException'),
'BeartypeDecorHintNonPepNumPyException': (
'BeartypeDecorHintNonpepNumpyException'),
'BeartypeDecorHintPep563Exception': (
'BeartypePep563Exception'),
'BeartypeDecorHintPepDeprecatedWarning': (
'BeartypeDecorHintPepDeprecationWarning'),
'BeartypeDecorPepException': (
'BeartypePepException'),
},
attr_nondeprecated_name_to_value=globals(),
)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype exception hierarchy.**
This private submodule publishes a hierarchy of both public and private
:mod:`beartype`-specific exceptions raised at decoration, call, and usage time.
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# should be locally imported at module scope *ONLY* under alternate private
# names (e.g., "from argparse import ArgumentParser as _ArgumentParser" rather
# than merely "from argparse import ArgumentParser").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from abc import ABCMeta as _ABCMeta
# ....................{ SUPERCLASS }....................
class BeartypeException(Exception, metaclass=_ABCMeta):
'''
Abstract base class of all **beartype exceptions.**
Instances of subclasses of this exception are raised either:
* At decoration time from the :func:`beartype.beartype` decorator.
* At call time from the new callable generated by the
:func:`beartype.beartype` decorator to wrap the original callable.
'''
# ..................{ INITIALIZERS }..................
def __init__(self, message: str) -> None:
'''
Initialize this exception.
This constructor (in order):
#. Passes all passed arguments as is to the superclass constructor.
#. Sanitizes the fully-qualified module name of this
exception from the private ``"beartype.roar._roarexc"`` submodule to
the public ``"beartype.roar"`` subpackage to both improve the
readability of exception messages and discourage end users from
accessing this private submodule. By default, Python emits less
readable and dangerous exception messages resembling:
beartype.roar._roarexc.BeartypeCallHintParamViolation:
@beartyped quote_wiggum_safer() parameter lines=[] violates type
hint typing.Annotated[list[str], Is[lambda lst: bool(lst)]], as
value [] violates validator Is[lambda lst: bool(lst)].
'''
# Defer to the superclass constructor.
super().__init__(message)
# Sanitize the fully-qualified module name of the class of this
# exception. See the docstring for justification.
self.__class__.__module__ = 'beartype.roar'
# print(f'{self.__class__.__name__}: {message}')
# ....................{ DECORATOR }....................
class BeartypeDecorException(BeartypeException):
'''
Abstract base class of all **beartype decorator exceptions.**
Instances of subclasses of this exception are raised at decoration time
from the :func:`beartype.beartype` decorator.
'''
pass
# ....................{ DECORATOR ~ wrapp[ee|er] }....................
class BeartypeDecorWrappeeException(BeartypeDecorException):
'''
**Beartype decorator wrappee exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator when passed a **wrappee** (i.e., object
to be decorated by this decorator) of invalid type.
'''
pass
class BeartypeDecorWrapperException(BeartypeDecorException):
'''
**Beartype decorator parse exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on accidentally generating an **invalid
wrapper** (i.e., syntactically invalid new callable to wrap the original
callable).
'''
pass
# ....................{ DECORATOR ~ hint }....................
class BeartypeDecorHintException(BeartypeDecorException):
'''
Abstract base class of all **beartype decorator type hint exceptions.**
Instances of subclasses of this exception are raised at decoration time
from the :func:`beartype.beartype` decorator on receiving a callable
annotated by one or more **invalid type hints** (i.e., annotations that are
neither PEP-compliant nor PEP-compliant type hints supported by this
decorator).
'''
pass
class BeartypeDecorHintForwardRefException(BeartypeDecorHintException):
'''
**Beartype decorator forward reference type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated by an
**invalid forward reference type hint** (i.e., string whose value is the
name of a user-defined class that has yet to be declared).
'''
pass
class BeartypeDecorHintTypeException(BeartypeDecorHintException):
'''
**Beartype decorator class type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated by an
**invalid class type hint** (i.e., class invalid for use as a type hint,
typically due to failing to support runtime :func:`isinstance` calls).
'''
pass
# ....................{ DECORATOR ~ hint : non-pep }....................
class BeartypeDecorHintNonpepException(BeartypeDecorHintException):
'''
**Beartype decorator PEP-noncompliant type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated by an
**invalid PEP-noncompliant type hint** (i.e., type hint failing to comply
with :mod:`beartype`-specific semantics, including tuple unions and
fully-qualified forward references).
Tuple unions, for example, are required to contain *only* PEP-noncompliant
annotations. This exception is thus raised for callables type-hinted with
tuples containing one or more PEP-compliant items (e.g., instances or
classes declared by the stdlib :mod:`typing` module) *or* arbitrary objects
(e.g., dictionaries, lists, numbers, sets).
'''
pass
class BeartypeDecorHintNonpepNumpyException(BeartypeDecorHintNonpepException):
'''
**Beartype decorator PEP-noncompliant NumPy type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated by an
**invalid NumPy type hint** (e.g., ``numpy.typed.NDArray[...]`` type hint
subscripted by an invalid number of arguments).
'''
pass
# ....................{ DECORATOR ~ hint : pep }....................
class BeartypeDecorHintPepException(BeartypeDecorHintException):
'''
Abstract base class of all **beartype decorator PEP-compliant type hint
value exceptions.**
Instances of subclasses of this exception are raised at decoration time
from the :func:`beartype.beartype` decorator on receiving a callable
annotated with one or more PEP-compliant type hints either violating an
annotation-centric PEP (e.g., :pep:`484`) *or* this decorator's
implementation of such a PEP.
'''
pass
class BeartypeDecorHintPepSignException(BeartypeDecorHintPepException):
'''
**Beartype decorator PEP-compliant type hint sign exception.**
Instances of subclasses of this exception are raised at decoration time
from the :func:`beartype.beartype` decorator on receiving a callable
annotated with one or more PEP-compliant type hints *not* uniquely
identifiable by a **sign** (i.e., object uniquely identifying a category
of PEP-compliant type hints).
'''
pass
class BeartypeDecorHintPepUnsupportedException(BeartypeDecorHintPepException):
'''
**Beartype decorator unsupported PEP-compliant type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated with
one or more PEP-compliant type hints (e.g., instances or classes declared
by the stdlib :mod:`typing` module) currently unsupported by this
decorator.
'''
pass
# ....................{ DECORATOR ~ hint : pep : proposal }....................
class BeartypeDecorHintPep3119Exception(BeartypeDecorHintPepException):
'''
**Beartype decorator** :pep:`3119`-compliant **type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated with
one or more PEP-compliant type hints either violating :pep:`3119` *or* this
decorator's implementation of :pep:`3119`, including:
* Hints that are **non-isinstanceable classes** (i.e., classes that
prohibit being passed as the second parameter to the :func:`isinstance`
builtin by leveraging metaclasses overriding the ``__instancecheck__()``
dunder method to raise exceptions). Notably, this includes most public
classes declared by the standard :mod:`typing` module.
'''
pass
class BeartypeDecorHintPep484Exception(BeartypeDecorHintPepException):
'''
**Beartype decorator** :pep:`484`-compliant **type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated with
one or more PEP-compliant type hints either violating :pep:`484` *or* this
decorator's implementation of :pep:`484`, including:
* Hints subscripted by the :attr:`typing.NoReturn` type hint (e.g.,
``typing.List[typing.NoReturn]``).
'''
pass
class BeartypeDecorHintPep484585Exception(BeartypeDecorHintPepException):
'''
**Beartype decorator** :pep:`484`- or :pep:`585`-compliant **dual type hint
exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated with
one or more PEP-compliant type hints violating :pep:`484`, :pep:`585`, *or*
this decorator's implementation of :pep:`484` or :pep:`585`.
'''
pass
class BeartypeDecorHintPep544Exception(BeartypeDecorHintPepException):
'''
**Beartype decorator** :pep:`544`-compliant **type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated with
one or more PEP-compliant type hints either violating :pep:`544` *or* this
decorator's implementation of :pep:`544`.
'''
pass
class BeartypeDecorHintPep557Exception(BeartypeDecorHintPepException):
'''
**Beartype decorator** :pep:`557`-compliant **type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated with
one or more PEP-compliant type hints either violating :pep:`557` *or* this
decorator's implementation of :pep:`557`.
'''
pass
class BeartypeDecorHintPep585Exception(BeartypeDecorHintPepException):
'''
**Beartype decorator** :pep:`585`-compliant **type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated with
one or more PEP-compliant type hints either violating :pep:`585` *or* this
decorator's implementation of :pep:`585`.
'''
pass
class BeartypeDecorHintPep586Exception(BeartypeDecorHintPepException):
'''
**Beartype decorator** :pep:`586`-compliant **type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated with
one or more PEP-compliant type hints either violating :pep:`586` *or* this
decorator's implementation of :pep:`586`.
'''
pass
class BeartypeDecorHintPep593Exception(BeartypeDecorHintPepException):
'''
**Beartype decorator** :pep:`593`-compliant **type hint exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable annotated with
one or more PEP-compliant type hints either violating :pep:`593` *or* this
decorator's implementation of :pep:`593`.
'''
pass
# ....................{ DECORATOR ~ param }....................
class BeartypeDecorParamException(BeartypeDecorException):
'''
Abstract base class of all **beartype decorator parameter exceptions.**
Instances of subclasses of this exception are raised at decoration time
from the :func:`beartype.beartype` decorator on receiving a callable
declaring invalid parameters.
'''
pass
class BeartypeDecorParamNameException(BeartypeDecorParamException):
'''
**Beartype decorator parameter name exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator on receiving a callable declaring
parameters with **invalid names** (i.e., prefixed by the
:mod:`beartype`-reserved substring ``"__bear"``).
'''
pass
# ....................{ CALL }....................
class BeartypeCallException(BeartypeException):
'''
Abstract base class of all **beartyped callable exceptions.**
Instances of subclasses of this exception are raised from wrapper functions
generated by the :func:`beartype.beartype` decorator, typically when
failing a runtime type-check at call time.
'''
pass
class BeartypeCallUnavailableTypeException(BeartypeCallException):
'''
**Beartyped callable unavailable type exceptions.**
This exception is raised from the :class:`beartype.cave.UnavailableType`
class when passed to either the :func:`isinstance` or :func:`issubclass`
builtin functions, typically due to a type defined by the
:class:`beartype.cave` submodule being conditionally unavailable under the
active Python interpreter.
'''
pass
# ....................{ CALL ~ hint }....................
class BeartypeCallHintException(BeartypeCallException):
'''
Abstract base class of all **beartype type-checking exceptions.**
Instances of subclasses of this exception are raised from wrapper functions
generated by the :func:`beartype.beartype` decorator when failing a runtime
type-check at callable call time, typically due to either being passed a
parameter or returning a value violating a type hint annotating that
parameter or return.
'''
pass
class BeartypeCallHintForwardRefException(BeartypeCallHintException):
'''
**Beartype type-checking forward reference exception.**
This exception is raised from wrapper functions generated by the
:func:`beartype.beartype` decorator when a **forward reference type hint**
(i.e., string whose value is the name of a user-defined class that has yet
to be defined) erroneously references a module attribute whose value is
*not* actually a class.
'''
pass
# ....................{ CALL ~ hint : violation }....................
class BeartypeCallHintViolation(BeartypeCallHintException):
'''
Abstract base class of all **beartype type-checking violations.**
Instances of subclasses of this exception are raised by :mod:`beartype` when
an object to be type-checked violates the type hint annotating that object.
This includes wrapper functions generated by the :func:`beartype.beartype`
decorator when either passed a parameter or returning an object violating
the type hint annotating that parameter or return.
Attributes
----------
_culprits_weakref_and_repr : Tuple[(object, str), ...]
Tuple of 2-tuples (``culprit_weakref``, ``culprit_repr``) weakly
referring to all of the culprits previously passed to the
:meth:`__init__` method, where:
* ``culprits_repr`` is the machine-readable string representation of the
culprit weakly referred to by the ``culprit_weakref`` reference.
* ``culprits_weakref`` is a weak reference to that culprit, defined as
either:
* If that culprit is not ``None`` *and* that culprit can be weakly
referenced, a **weak reference** (i.e., :class:`weakref.ref`
instance) to that culprit.
* If that culprit is ``None``, a singleton non-``None`` placeholder.
Since the :class:`weakref.ref` class ambiguously returns ``None``
when that culprit has already been garbage-collected, this
attribute intentionally substitutes ``None`` for this placeholder.
* If that culprit *cannot* be weakly referenced (e.g., due to being
an instance of a builtin variable-sized C-based type), ``None``.
'''
# ..................{ INITIALIZERS }..................
def __init__(self, message: str, culprits: tuple) -> None:
'''
Initialize this type-checking exception.
Parameters
----------
message : str
Human-readable message describing this exception.
culprits : Tuple[object, ...]
Tuple of one or more **culprits** (i.e., user-defined objects
directly responsible for this exception, typically due to violating
a type hint annotating a parameter passed to *or* object returned
from the wrapper function generated by the :func:`beartype.beartype`
decorator raising this exception). This exception internally
preserves a weak reference to these culprits, which callers may then
safely retrieve at any time via the :meth:`culprits` property.
Raises
----------
_BeartypeUtilExceptionException
If the culprits are either:
* *Not* a tuple.
* The empty tuple.
'''
# Avoid circular import dependencies.
from beartype._util.py.utilpyweakref import make_obj_weakref_and_repr
# Initialize the superclass with the passed message.
super().__init__(message)
#FIXME: Unit test us up, please.
# If the culprits are *NOT* a tuple, raise an exception.
if not isinstance(culprits, tuple):
raise _BeartypeUtilExceptionException(
f'Culprits {repr(culprits)} not tuple.')
# Else, the culprits are a tuple.
#
# If the culprits are the empty tuple, raise an exception.
elif not culprits:
raise _BeartypeUtilExceptionException('Culprits tuple empty.')
# Else, the culprits are a non-empty tuple.
# Tuple of 2-tuples ("culprit_weakref", "culprit_repr") weakly referring
# to all of the passed culprits.
self._culprits_weakref_and_repr = tuple(
make_obj_weakref_and_repr(culprit)
for culprit in culprits
)
# ..................{ PROPERTIES }..................
# Read-only properties intentionally providing no corresponding setters.
@property
def culprits(self) -> tuple:
'''
Tuple of one or more **culprits** (i.e., user-defined objects directly
responsible for this exception, typically due to violating a type hint
annotating a parameter passed to *or* object returned from the wrapper
function generated by the :func:`beartype.beartype` decorator raising
this exception).
Specifically, this property returns either:
* If a container (e.g., dictionary, list, set, tuple) is responsible for
this exception, the 2-tuple ``(culprit_root, culprit_leaf)`` where:
* ``culprit_root`` is the outermost such container. Typically, this is
the passed parameter or returned value indirectly violating this
hint.
* ``culprit_leaf`` is the innermost item transitively contained in
``culprit_root`` directly violating this hint.
* If a non-container (e.g., scalar, class instance) is responsible for
this exception, the 1-tuple ``(culprit,)`` where ``culprit`` is that
non-container.
Caveats
----------
**This property is safely accessible from any context.** However, this
property is most usefully accessed *only* from the ``except ...:`` block
directly catching this exception. To avoid memory leaks, this property
only weakly rather than strongly refers to these culprits and is thus
best accessed only where these culprits are accessible. Notably, this
property is guaranteed to refer to these culprits *only* for the
duration of the ``except ...:`` block directly catching this exception.
Since these culprits may be garbage-collected at any time thereafter,
this property *cannot* be guaranteed to refer to these culprits outside
that block. If this property is accessed from *any* other context and
ore or more of these culprits have already been garbage-collected, the
corresponding item(s) of this property are only the machine-readable
representations of those culprits rather than those actual culprits.
**This property returns the machine-readable representation of instances
of builtin variable-sized C-based types** (e.g., :class:`dict`,
:class:`int`, :class:`list`, :class:`tuple`) **rather than those
instances themselves.** Why? Because CPython limitations prevent those
instances from being weakly referred to. Blame Guido and the BDFL!
'''
# Avoid circular import dependencies.
from beartype._util.py.utilpyweakref import get_weakref_obj_or_repr
# Tuple of one or more strong references to the culprits previously
# passed to the __init__() method for those culprits that are alive
# *OR* their representations otherwise.
culprits = tuple(
get_weakref_obj_or_repr(
obj_weakref=culprit_weakref, obj_repr=culprit_repr)
for culprit_weakref, culprit_repr in self._culprits_weakref_and_repr
)
# print(f'culprits_weakref_and_repr: {self._culprits_weakref_and_repr}')
# Return these culprits.
return culprits
class BeartypeCallHintParamViolation(BeartypeCallHintViolation):
'''
**Beartyped callable parameter type-checking exception.**
This exception is raised from a call to a wrapper function generated by the
:func:`beartype.beartype` decorator type-checking a decorated callable when
the caller passes that call a parameter violating the type hint annotating
that parameter of that decorated callable.
'''
pass
class BeartypeCallHintReturnViolation(BeartypeCallHintViolation):
'''
**Beartyped callable return type-checking exception.**
This exception is raised from a call to a wrapper function generated by the
:func:`beartype.beartype` decorator type-checking a decorated callable when
that call returns an object violating the type hint annotating the return
of that decorated callable.
'''
pass
# ....................{ PEP }....................
class BeartypePepException(BeartypeDecorException):
'''
Abstract base class of all **beartype Python Enhancement Proposal (PEP)
exceptions.**
Instances of subclasses of this exception are raised at both call time and
decoration time on receiving a callable or class violating a specific PEP.
'''
pass
class BeartypePep563Exception(BeartypePepException):
'''
**Beartype** :pep:`563` **exception.**
This exception is raised at both call time of the
:func:`beartype.peps.resolve_pep563` function and decoration time of the
:func:`beartype.beartype` decorator on failing to dynamically evaluate a
postponed annotation of a callable for which :pep:`563` is active.
'''
pass
# ....................{ API ~ cave }....................
class BeartypeCaveException(BeartypeException):
'''
Abstract base class of all **beartype cave exceptions.**
Instances of subclasses of this exception are raised at usage time from
various types published by the :func:`beartype.cave` submodule.
'''
pass
# ....................{ API ~ cave : nonetypeor }....................
class BeartypeCaveNoneTypeOrException(BeartypeCaveException):
'''
Abstract base class of all **beartype cave** ``None`` **tuple factory
exceptions.**
Instances of subclasses of this exception are raised at usage time from
the :func:`beartype.cave.NoneTypeOr` tuple factory.
'''
pass
class BeartypeCaveNoneTypeOrKeyException(BeartypeCaveNoneTypeOrException):
'''
**Beartype cave** ``None`` **tuple factory key exception.**
Instances of this exception are raised when indexing the :func:
`beartype.cave.NoneTypeOr` dictionary with an invalid key, including:
* The empty tuple.
* Arbitrary objects that are neither:
* **Types** (i.e., :class:`beartype.cave.ClassType` instances).
* **Tuples of types** (i.e., tuples whose items are all
:class:`beartype.cave.ClassType` instances).
'''
pass
class BeartypeCaveNoneTypeOrMutabilityException(
BeartypeCaveNoneTypeOrException):
'''
**Beartype cave** ``None`` **tuple factory mutability exception.**
Instances of this exception are raised when attempting to explicitly set a
key on the :func:`beartype.cave.NoneTypeOr` dictionary.
'''
pass
# ....................{ API ~ claw }....................
class BeartypeClawException(BeartypeException):
'''
Abstract base class of all **beartype import hook exceptions.**
Instances of subclasses of this exception are raised at call time from the
callables and classes published by the :func:`beartype.claw` subpackage.
'''
pass
class BeartypeClawRegistrationException(BeartypeClawException):
'''
**Beartype import hook registration exception.**
This exception is raised at call time by the
:func:`beartype.claw.beartype_submodules_on_import` function when passed
invalid parameters.
'''
pass
# ....................{ API ~ conf }....................
class BeartypeConfException(BeartypeException):
'''
**Beartype configuration exception.**
Instances of this exception are raised on either erroneously instantiating
the :class:`beartype.BeartypeConf` class *or* passing an object that is not
an instance of that class as the ``conf`` parameter to the
:func:`beartype.beartype` decorator.
'''
pass
# ....................{ API ~ door }....................
class BeartypeDoorException(BeartypeException):
'''
Abstract base class of all **Decidedly Object-Oriented Runtime-checking
(DOOR) exceptions.**
Instances of subclasses of this exception are raised at call time from
callables and classes published by the :func:`beartype.door` subpackage.
'''
pass
class BeartypeDoorHintViolation(BeartypeCallHintViolation):
'''
**Beartype object-oriented type-checking exception.**
This exception is raised at call time by the
:meth:`beartype.door.TypeHint.die_if_unbearable` method when passed an
object violating the current type hint.
'''
pass
# ....................{ API ~ door : pep }....................
class BeartypeDoorNonpepException(BeartypeDoorException):
'''
**Decidedly Object-Oriented Runtime-checking (DOOR) PEP-noncompliant type
hint exception.**
This exception is raised at call time from :func:`beartype.door` callables
and classes on receiving an **invalid PEP-noncompliant type hint** (i.e.,
type hint failing to comply with PEP standards currently supported by the
:mod:`beartype.door` API).
'''
pass
class BeartypeDoorPepException(BeartypeDoorException):
'''
**Decidedly Object-Oriented Runtime-checking (DOOR) PEP-compliant type hint
exception.**
This exception is raised at call time from :func:`beartype.door` callables
and classes on receiving an **invalid PEP-compliant type hint** (i.e.,
type hint complying with PEP standards currently supported by the
:mod:`beartype.door` API but otherwise invalid for various reasons).
'''
pass
class BeartypeDoorPepUnsupportedException(BeartypeDoorPepException):
'''
**Decidedly Object-Oriented Runtime-checking (DOOR) unsupported
PEP-compliant type hint exception.**
This exception is raised at call time from :func:`beartype.door` callables
and classes on receiving an **unsupported PEP-compliant type hint** (i.e.,
type hint complying with PEP standards *not* currently supported by the
:mod:`beartype.door` API).
'''
pass
# ....................{ API ~ vale }....................
class BeartypeValeException(BeartypeException):
'''
Abstract base class of all **beartype validator exceptions.**
Instances of subclasses of this exception are raised at usage (e.g.,
instantiation, callable call) time from the class hierarchy published by
the :func:`beartype.vale` subpackage.
'''
pass
class BeartypeValeSubscriptionException(BeartypeValeException):
'''
**Beartype validator subscription exception.**
This exception is raised at instantiation time when subscripting (indexing)
factories published by the :func:`beartype.vale` subpackage, including
attempts to:
* Instantiate *any* of these factories. Like standard type hints, these
factories are *only* intended to be subscripted (indexed).
* Apply the ``&`` or ``|`` operators to *any* subscriptions of these
factories and *any* other objects (e.g.,
``beartype.vale.Is[lambda obj: True]] & 'If it seems bad, it is.'``).
* Subscript the :attr:`beartype.vale.Is` factory by anything other than a
**validator** (i.e., tester function satisfying the type hint
``collections.abc.Callable[[typing.Any,], bool]``).
'''
pass
class BeartypeValeValidationException(BeartypeValeException):
'''
**Beartype validator validation exception.**
This exception is raised at validation time (e.g.,, at call time of a
:func:`beartype.beartype`-decorated callable annotated by a beartype
validator) when a beartype validator fails to properly validate an object,
including attempts to:
* Subscript the :attr:`beartype.vale.Is` factory by a **non-bool-like
validator** (i.e., tester function returning an object that is neither a
:class:`bool` *nor* implicitly convertible into a :class:`bool`).
'''
pass
# ....................{ PRIVATE ~ decorator }....................
class _BeartypeDecorBeartypistryException(BeartypeDecorException):
'''
**Beartype decorator beartypistry exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator when erroneously accessing the
**beartypistry** (i.e.,
:class:`beartype._decor._cache.cachetype.bear_typistry` singleton).
This private exception denotes a critical internal issue and should thus
*never* be raised -- let alone exposed to end users.
'''
pass
# ....................{ PRIVATE ~ door }..................
class _BeartypeDoorTextException(BeartypeDoorException):
'''
**Decidedly Object-Oriented Runtime-checking (DOOR) text exception.**
This exception is raised at call time from :func:`beartype.door` callables
and classes on detecting invalid strings (e.g., on raising an exception
whose message is *not* prefixed by the expected substring).
'''
pass
# ....................{ PRIVATE ~ vale }....................
class _BeartypeValeUtilException(BeartypeValeException):
'''
**Beartype validator utility exception.**
This exception is raised from various submodules of the private
:func:`beartype.vale._util` subpackage.
'''
pass
# ....................{ PRIVATE ~ util }....................
class _BeartypeUtilException(BeartypeException):
'''
Abstract base class of all **beartype private utility exceptions.**
Instances of subclasses of this exception are raised by *most* (but *not*
all) private submodules of the private :mod:`beartype._util` subpackage.
These exceptions denote critical internal issues and should thus *never* be
raised, let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilCallableException(_BeartypeUtilException):
'''
**Beartype callable utility exception.**
This exception is raised by public functions of the private
:mod:`beartype._util.utilfunc` subpackage.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilExceptionException(_BeartypeUtilException):
'''
**Beartype exception utility exception.**
This exception is raised by public functions of the private
:mod:`beartype.roar._roarexc` subpackage.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilMappingException(_BeartypeUtilException):
'''
**Beartype mapping utility exception.**
This exception is raised by public functions of the private
:mod:`beartype._util.kind.utilkinddict` submodule.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilModuleException(_BeartypeUtilException):
'''
**Beartype module utility exception.**
This exception is raised by public functions of the private
:mod:`beartype._util.mod.utilmodget` subpackage when dynamically importing
an unimportable external user-defined module, typically due to a
**PEP-compliant forward reference type hint** (i.e., string whose value is
the name of a user-defined class that has yet to be defined) erroneously
referencing a non-existent module or module attribute.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilPathException(_BeartypeUtilException):
'''
**Beartype path utility exception.**
This exception is raised by public functions of the private
:mod:`beartype._util.path` subpackage on various fatal edge cases.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilTextException(_BeartypeUtilException):
'''
**Beartype text utility exception.**
This exception is raised by public functions of the private
:mod:`beartype._util.text` subpackage on various fatal edge cases.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilTypeException(_BeartypeUtilException):
'''
**Beartype class utility exception.**
This exception is raised by public functions of the private
:mod:`beartype._util.cls.utilclstest` subpackage.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
# ....................{ PRIVATE ~ util : call }..................
class _BeartypeCallHintRaiseException(_BeartypeUtilException):
'''
Abstract base class of all **beartype human-readable exception raiser
exceptions.**
Instances of subclasses of this exception are raised by private utility
**exception raiser functions** (i.e., functions raising human-readable
exceptions from wrapper functions when either passed a parameter or
returning a value annotated by a type hint fails the runtime type-check
required by that hint) when an unexpected failure occurs.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
# ....................{ PRIVATE ~ util : cache }..................
class _BeartypeUtilCachedException(_BeartypeUtilException):
'''
Abstract base class of all **beartype caching utility exceptions.**
Instances of subclasses of this exception are raised by private submodules
of the private :mod:`beartype._util.cache` subpackage. These exceptions
denote critical internal issues and should thus *never* be raised -- let
alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilCallableCachedException(_BeartypeUtilCachedException):
'''
**Beartype memoization exception.**
This exception is raised by the
:func:`beartype._util.cache.utilcache.utilcachecall.callable_cached`
decorator on various fatal errors (e.g., when the signature of the
decorated callable is unsupported).
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilCacheLruException(_BeartypeUtilCachedException):
'''
**Beartype Least Recently Used (LRU) cache exception.**
This exception is raised by the
:func:`beartype._util.cache.utilcache.utilcachelru.CacheLruStrong` class
on various fatal errors (e.g., when the cache capacity is *not* a positive
integer).
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
# ....................{ PRIVATE ~ util : cache : pool }..................
class _BeartypeUtilCachedKeyPoolException(_BeartypeUtilException):
'''
**Beartype key pool exception.**
This exception is raised by private functions of the private
:mod:`beartype._util.cache.pool.utilcachepool` subpackage on various fatal
edge cases.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilCachedFixedListException(_BeartypeUtilCachedException):
'''
**Beartype decorator fixed list exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator when an internal callable erroneously
mutates a **fixed list** (i.e., list constrained to a fixed length defined
at instantiation time), usually by attempting to modify the length of that
list.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilCachedObjectTypedException(_BeartypeUtilCachedException):
'''
**Beartype decorator typed object exception.**
This exception is raised at decoration time from the
:func:`beartype.beartype` decorator when an internal callable erroneously
acquires a **pooled typed object** (i.e., object internally cached to a
pool of all objects of that type).
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
# ....................{ PRIVATE ~ util : call }..................
class _BeartypeCallHintPepRaiseException(_BeartypeCallHintRaiseException):
'''
**Beartype PEP-compliant human-readable exception raiser exception.**
This exception is raised by the
:func:`beartype._decor._error.errormain.get_beartype_violation`
exception raiser function when an unexpected failure occurs.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeCallHintPepRaiseDesynchronizationException(
_BeartypeCallHintPepRaiseException):
'''
**Beartype human-readable exception raiser desynchronization exception.**
This exception is raised by the
:func:`beartype._decor._error.errormain.get_beartype_violation` function
(which raises human-readable exceptions from wrapper functions when either
passed a parameter or returning a value, referred to as the "pith" for
brevity, annotated by a PEP-compliant type hint fails the type-check
required by that hint) when this pith appears to satisfy this type-check, a
runtime paradox implying either:
* The parent wrapper function generated by the :mod:`beartype.beartype`
decorator type-checking this pith triggered a false negative by
erroneously misdetecting this pith as failing this type check.
* The
:func:`beartype._decor._error.errormain.get_beartype_violation`
function re-type-checking this pith triggered a false positive by
erroneously misdetecting this pith as satisfying this type check when in
fact this pith fails to do so.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
# ....................{ PRIVATE ~ util : object }..................
class _BeartypeUtilObjectException(_BeartypeUtilException):
'''
Abstract base class of all **beartype object utility exceptions.**
Instances of subclasses of this exception are raised by private functions
defined by the private :mod:`beartype._util.utilobject` submodule. These
exceptions denote critical internal issues and should thus *never* be raised
-- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilObjectNameException(_BeartypeUtilObjectException):
'''
**Beartype object name exception.**
This exception is raised by the
:func:`beartype._util.utilobject.get_object_basename_scoped` getter when
the passed object is **unnamed** (i.e., fails to declare either the
``__name__`` or ``__qualname__`` dunder attributes).
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
# ....................{ PRIVATE ~ util : python }..................
class _BeartypeUtilPythonException(_BeartypeUtilException):
'''
Abstract base class of all **beartype Python utility exceptions.**
Instances of subclasses of this exception are raised by private submodules
of the private :mod:`beartype._util.py` subpackage. These exceptions
denote critical internal issues and should thus *never* be raised -- let
alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilPythonInterpreterException(_BeartypeUtilPythonException):
'''
**Beartype Python interpreter utility exception.**
This exception is raised by private functions of the private
:mod:`beartype._util.py.utilpyinterpreter` submodule on fatal edge cases.
This exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
class _BeartypeUtilPythonWeakrefException(_BeartypeUtilPythonException):
'''
**Beartype Python weak reference utility exception.**
This exception is raised by private functions of the private
:mod:`beartype._util.py.utilpyweakref` submodule on fatal edge cases. This
exception denotes a critical internal issue and should thus *never* be
raised -- let alone allowed to percolate up the call stack to end users.
'''
pass
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype configuration enumerations** (i.e., public enumerations whose members
may be passed as initialization-time parameters to the
:meth:`beartype._conf.confcls.BeartypeConf.__init__` constructor to configure
:mod:`beartype` with optional runtime type-checking behaviours).
Most of the public attributes defined by this private submodule are explicitly
exported to external users in our top-level :mod:`beartype.__init__` submodule.
This private submodule is *not* intended for direct importation by downstream
callers.
'''
# ....................{ IMPORTS }....................
from enum import (
Enum,
auto as next_enum_member_value,
unique as die_unless_enum_member_values_unique,
)
# ....................{ ENUMERATIONS }....................
#FIXME: Document us up in "README.rst", please.
@die_unless_enum_member_values_unique
class BeartypeStrategy(Enum):
'''
Enumeration of all kinds of **type-checking strategies** (i.e., competing
procedures for type-checking objects passed to or returned from
:func:`beartype.beartype`-decorated callables, each with concomitant
tradeoffs with respect to runtime complexity and quality assurance).
Strategies are intentionally named according to `conventional Big O
notation <Big O_>`__ (e.g., :attr:`BeartypeStrategy.On` enables the
``O(n)`` strategy). Strategies are established per-decoration at the
fine-grained level of callables decorated by the :func:`beartype.beartype`
decorator by setting the :attr:`beartype.BeartypeConf.strategy` parameter of
the :class:`beartype.BeartypeConf` object passed as the optional ``conf``
parameter to that decorator.
Strategies enforce their corresponding runtime complexities (e.g., ``O(n)``)
across *all* type-checks performed for callables enabling those strategies.
For example, a callable configured by the :attr:`BeartypeStrategy.On`
strategy will exhibit linear ``O(n)`` complexity as its overhead for
type-checking each nesting level of each container passed to and returned
from that callable.
.. _Big O:
https://en.wikipedia.org/wiki/Big_O_notation
Attributes
----------
O0 : EnumMemberType
**No-time strategy** (i.e, disabling type-checking for a decorated
callable by reducing :func:`beartype.beartype` to the identity
decorator for that callable). Although seemingly useless, this strategy
enables users to selectively blacklist (prevent) callables from being
type-checked by our as-yet-unimplemented import hook. When implemented,
that hook will type-check all callables within a package or module
*except* those callables explicitly decorated by this strategy.
O1 : EnumMemberType
**Constant-time strategy** (i.e., the default ``O(1)`` strategy,
type-checking a single randomly selected item of each container). As
the default, this strategy need *not* be explicitly enabled.
Ologn : EnumMemberType
**Logarithmic-time strategy** (i.e., the ``O(log n)`` strategy,
type-checking a randomly selected number of items ``log(len(obj))`` of
each container ``obj``). This strategy is **currently unimplemented.**
(*To be implemented by a future beartype release.*)
On : EnumMemberType
**Linear-time strategy** (i.e., the ``O(n)`` strategy, type-checking
*all* items of a container). This strategy is **currently
unimplemented.** (*To be implemented by a future beartype release.*)
'''
O0 = next_enum_member_value()
O1 = next_enum_member_value()
Ologn = next_enum_member_value()
On = next_enum_member_value()
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype configuration class hierarchy** (i.e., public dataclasses defining
beartype's configuration API, enabling end users to selectively configure
:mod:`beartype` with optional runtime type-checking behaviours).
Most of the public attributes defined by this private submodule are explicitly
exported to external users in our top-level :mod:`beartype.__init__` submodule.
This private submodule is *not* intended for direct importation by downstream
callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeConfException
from beartype.typing import (
TYPE_CHECKING,
Dict,
Optional,
)
from beartype._cave._cavemap import NoneTypeOr
from beartype._conf.confenum import BeartypeStrategy
# ....................{ CLASSES }....................
#FIXME: Document us up in "README.rst", please.
#FIXME: Refactor to use @dataclass.dataclass once we drop Python 3.7 support.
#Note that doing so will require usage of "frozen=True" to prevent unwanted
#modification of read-only properties.
class BeartypeConf(object):
'''
**Beartype configuration** (i.e., self-caching dataclass encapsulating all
flags, options, settings, and other metadata configuring each type-checking
operation performed by :mod:`beartype` -- including each decoration of a
callable or class by the :func:`beartype.beartype` decorator).
Attributes
----------
_is_color : Optional[bool]
Tri-state boolean governing how and whether beartype colours
**type-checking violations** (i.e.,
:class:`beartype.roar.BeartypeCallHintViolation` exceptions) with
POSIX-compliant ANSI escape sequences for readability. Specifically, if
this boolean is:
* ``False``, beartype *never* colours type-checking violations raised by
callables configured with this configuration.
* ``True``, beartype *always* colours type-checking violations raised by
callables configured with this configuration.
* ``None``, beartype conditionally colours type-checking violations
raised by callables configured with this configuration only when
standard output is attached to an interactive terminal.
_is_debug : bool, optional
``True`` only if debugging :mod:`beartype`. See also the :meth:`__new__`
method docstring.
_is_pep484_tower : bool, optional
``True`` only if enabling support for the :pep:`484`-compliant
implicit numeric tower. See also the :meth:`__new__` method docstring.
_strategy : BeartypeStrategy, optional
**Type-checking strategy** (i.e., :class:`BeartypeStrategy` enumeration
member) with which to implement all type-checks in the wrapper function
dynamically generated by the :func:`beartype.beartype` decorator for
the decorated callable.
'''
# ..................{ CLASS VARIABLES }..................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize this slots list with the implementations of:
# * The __new__() dunder method.
# * The __eq__() dunder method.
# * The __hash__() dunder method.
# * The __repr__() dunder method.
# CAUTION: Subclasses declaring uniquely subclass-specific instance
# variables *MUST* additionally slot those variables. Subclasses violating
# this constraint will be usable but unslotted, which defeats our purposes.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Slot all instance variables defined on this object to minimize the time
# complexity of both reading and writing variables across frequently called
# cache dunder methods. Slotting has been shown to reduce read and write
# costs by approximately ~10%, which is non-trivial.
__slots__ = (
'_is_color',
'_is_debug',
'_is_pep484_tower',
'_strategy',
)
# Squelch false negatives from mypy. This is absurd. This is mypy. See:
# https://github.com/python/mypy/issues/5941
if TYPE_CHECKING:
_is_color: Optional[bool]
_is_debug: bool
_is_pep484_tower: bool
_strategy: BeartypeStrategy
# ..................{ INSTANTIATORS }..................
# Note that this __new__() dunder method implements the superset of the
# functionality typically implemented by the __init__() dunder method. Due
# to Python instantiation semantics, the __init__() dunder method is
# intentionally left undefined. Why? Because Python unconditionally invokes
# __init__() if defined, even when the initialization performed by that
# __init__() has already been performed for the cached instance returned by
# __new__(). In short, __init__() and __new__() are largely mutually
# exclusive; one typically defines one or the other but *NOT* both.
def __new__(
cls,
# Optional keyword-only parameters.
*,
is_color: Optional[bool] = None,
is_debug: bool = False,
is_pep484_tower: bool = False,
strategy: BeartypeStrategy = BeartypeStrategy.O1,
) -> 'BeartypeConf':
'''
Instantiate this configuration if needed (i.e., if *no* prior
configuration with these same parameters was previously instantiated)
*or* reuse that previously instantiated configuration otherwise.
This dunder methods guarantees beartype configurations to be memoized:
.. code-block:: python
>>> from beartype import BeartypeConf
>>> BeartypeConf() is BeartypeConf()
True
This memoization is *not* merely an optimization. The
:func:`beartype.beartype` decorator internally memoizes the private
closure it creates and returns on the basis of this configuration,
which *must* thus also be memoized.
Parameters
----------
is_color : Optional[bool]
Tri-state boolean governing how and whether beartype colours
**type-checking violations** (i.e.,
:class:`beartype.roar.BeartypeCallHintViolation` exceptions) with
POSIX-compliant ANSI escape sequences for readability. Specifically,
if this boolean is:
* ``False``, beartype *never* colours type-checking violations
raised by callables configured with this configuration.
* ``True``, beartype *always* colours type-checking violations
raised by callables configured with this configuration.
* ``None``, beartype conditionally colours type-checking violations
raised by callables configured with this configuration only when
standard output is attached to an interactive terminal.
Defaults to ``None``.
is_debug : bool, optional
``True`` only if debugging :mod:`beartype`. Enabling this boolean:
* Prints the definition (including both the signature and body) of
each type-checking wrapper function dynamically generated by
:mod:`beartype` to standard output.
* Caches the body of each type-checking wrapper function dynamically
generated by :mod:`beartype` with the standard :mod:`linecache`
module, enabling these function bodies to be introspected at
runtime *and* improving the readability of tracebacks whose call
stacks contain one or more calls to these
:func:`beartype.beartype`-decorated functions.
* Appends to the declaration of each **hidden parameter** (i.e.,
whose name is prefixed by ``"__beartype_"`` and whose value is
that of an external attribute internally referenced in the body of
that function) the machine-readable representation of the initial
value of that parameter, stripped of newlines and truncated to a
hopefully sensible length. Since the
:func:`beartype._util.text.utiltextrepr.represent_object` function
called to do so is shockingly slow, these substrings are
conditionally embedded in the returned signature *only* when
enabling this boolean.
Defaults to ``False``.
is_pep484_tower : bool, optional
``True`` only if enabling support for the :pep:`484`-compliant
**implicit numeric tower** (i.e., lossy conversion of integers to
floating-point numbers as well as both integers and floating-point
numbers to complex numbers). Specifically, enabling this instructs
:mod:`beartype` to automatically expand:
* All :class:`float` type hints to ``float | int``, thus implicitly
accepting both integers and floating-point numbers for objects
annotated as only accepting floating-point numbers.
* All :class:`complex` type hints to ``complex | float | int``, thus
implicitly accepting integers, floating-point, and complex numbers
for objects annotated as only accepting complex numbers.
Defaults to ``False`` to minimize precision error introduced by
lossy conversions from integers to floating-point numbers to complex
numbers. Since most integers do *not* have exact representations
as floating-point numbers, each conversion of an integer into a
floating-point number typically introduces a small precision error
that accumulates over multiple conversions and operations into a
larger precision error. Enabling this improves the usability of
public APIs at a cost of introducing precision errors.
strategy : BeartypeStrategy, optional
**Type-checking strategy** (i.e., :class:`BeartypeStrategy`
enumeration member) with which to implement all type-checks in the
wrapper function dynamically generated by the
:func:`beartype.beartype` decorator for the decorated callable.
Defaults to :attr: `BeartypeStrategy.O1`, the ``O(1)`` constant-time
strategy.
Returns
----------
BeartypeConf
Beartype configuration memoized with these parameters.
Raises
----------
BeartypeConfException
If either:
* ``is_color`` is *not* a tri-state boolean.
* ``is_debug`` is *not* a boolean.
* ``is_pep484_tower`` is *not* a boolean.
* ``strategy`` is *not* a :class:`BeartypeStrategy` enumeration
member.
'''
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize this logic with BeartypeConf.__hash__().
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Efficiently hashable tuple of these parameters (in arbitrary order).
beartype_conf_args = (
is_color,
is_debug,
is_pep484_tower,
strategy,
)
# If this method has already instantiated a configuration with these
# parameters, return that configuration for consistency and efficiency.
if beartype_conf_args in _BEARTYPE_CONF_ARGS_TO_CONF:
return _BEARTYPE_CONF_ARGS_TO_CONF[beartype_conf_args]
# Else, this method has yet to instantiate a configuration with these
# parameters. In this case, do so below (and cache that configuration).
# If "is_color" is *NOT* a tri-state boolean, raise an exception.
if not isinstance(is_color, NoneTypeOr[bool]):
raise BeartypeConfException(
f'Beartype configuration parameter "is_color" '
f'value {repr(is_color)} not tri-state boolean '
f'(i.e., "True", "False", or "None").'
)
# Else, "is_color" is a tri-state boolean, raise an exception.
#
# If "is_debug" is *NOT* a boolean, raise an exception.
elif not isinstance(is_debug, bool):
raise BeartypeConfException(
f'Beartype configuration parameter "is_debug" '
f'value {repr(is_debug)} not boolean.'
)
# Else, "is_debug" is a boolean.
#
# If "is_pep484_tower" is *NOT* a boolean, raise an exception.
elif not isinstance(is_pep484_tower, bool):
raise BeartypeConfException(
f'Beartype configuration parameter "is_pep484_tower" '
f'value {repr(is_debug)} not boolean.'
)
# Else, "is_pep484_tower" is a boolean.
#
# If "strategy" is *NOT* an enumeration member, raise an exception.
elif not isinstance(strategy, BeartypeStrategy):
raise BeartypeConfException(
f'Beartype configuration parameter "strategy" '
f'value {repr(strategy)} not '
f'"beartype.BeartypeStrategy" enumeration member.'
)
# Else, "strategy" is an enumeration member.
# Instantiate a new configuration of this type.
self = super().__new__(cls)
# Classify all passed parameters with this configuration.
self._is_color = is_color
self._is_debug = is_debug
self._is_pep484_tower = is_pep484_tower
self._strategy = strategy
# Cache this configuration.
_BEARTYPE_CONF_ARGS_TO_CONF[beartype_conf_args] = self
# Return this configuration.
return self
# ..................{ PROPERTIES }..................
# Read-only public properties effectively prohibiting mutation of their
# underlying private attributes.
@property
def is_color(self) -> Optional[bool]:
'''
Tri-state boolean governing how and whether beartype colours
**type-checking violations** (i.e.,
:class:`beartype.roar.BeartypeCallHintViolation` exceptions) with
POSIX-compliant ANSI escape sequences for readability. Specifically, if
this boolean is:
* ``False``, beartype *never* colours type-checking violations raised by
callables configured with this configuration.
* ``True``, beartype *always* colours type-checking violations raised by
callables configured with this configuration.
* ``None``, beartype conditionally colours type-checking violations
raised by callables configured with this configuration only when
standard output is attached to an interactive terminal.
'''
return self._is_color
@property
def is_debug(self) -> bool:
'''
``True`` only if debugging :mod:`beartype`.
See Also
----------
:meth:`__new__`
Further details.
'''
return self._is_debug
@property
def is_pep484_tower(self) -> bool:
'''
``True`` only if enabling support for the :pep:`484`-compliant
implicit numeric tower.
See Also
----------
:meth:`__new__`
Further details.
'''
return self._is_pep484_tower
@property
def strategy(self) -> BeartypeStrategy:
'''
**Type-checking strategy** (i.e., :class:`BeartypeStrategy`
enumeration member) with which to implement all type-checks in the
wrapper function dynamically generated by the :func:
`beartype.beartype` decorator for the decorated callable.
'''
return self._strategy
# ..................{ DUNDERS }..................
def __eq__(self, other: object) -> bool:
'''
**Beartype configuration equality comparator.**
Parameters
----------
other : object
Arbitrary object to be compared for equality against this
configuration.
Returns
----------
Union[bool, type(NotImplemented)]
Either:
* If this other object is also a beartype configuration, either:
* If these configurations share the same settings, ``True``.
* Else, ``False``.
* Else, ``NotImplemented``.
See Also
----------
:func:`_hash_beartype_conf`
Further details.
'''
# If this other object is also a beartype configuration...
if isinstance(other, BeartypeConf):
# Return true only if these configurations share the same settings.
return (
self._is_color == other._is_color and
self._is_debug == other._is_debug and
self._is_pep484_tower == other._is_pep484_tower and
self._strategy == other._strategy
)
# Else, this other object is *NOT* also a beartype configuration.
# In this case, return the standard singleton informing Python that
# this equality comparator fails to support this comparison.
return NotImplemented
def __hash__(self) -> int:
'''
**Hash** (i.e., non-negative integer quasi-uniquely identifying this
beartype configuration with respect to hashable container membership).
Returns
----------
int
Hash of this configuration.
'''
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize this logic with BeartypeConf.__new__().
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Return the hash of a tuple containing these parameters in an
# arbitrary (albeit well-defined) order.
#
# Note this has been profiled to be the optimal means of hashing object
# attributes in Python, where "optimal" means:
# * Optimally fast. CPython in particular optimizes the creation and
# garbage collection of "small" tuples, where "small" is ill-defined
# but almost certainly applies here.
# * Optimally uniformly distributed, thus minimizing the likelihood of
# expensive hash collisions.
return hash((
self._is_color,
self._is_debug,
self._is_pep484_tower,
self._strategy,
))
def __repr__(self) -> str:
'''
**Beartype configuration representation** (i.e., machine-readable
string which, when dynamically evaluated as code, restores access to
this exact configuration object).
Returns
----------
str
Representation of this configuration.
'''
return (
f'{self.__class__.__name__}('
f'is_color={repr(self._is_color)}, '
f'is_debug={repr(self._is_debug)}, '
f'is_pep484_tower={repr(self._is_pep484_tower)}, '
f'strategy={repr(self._strategy)}'
f')'
)
# ....................{ PRIVATE ~ globals }....................
_BEARTYPE_CONF_ARGS_TO_CONF: Dict[tuple, BeartypeConf] = {}
'''
Non-thread-safe **beartype configuration cache** (i.e., dictionary mapping from
the hash of each set of parameters accepted by a prior call of the
:meth:`BeartypeConf.__new__` instantiator to the unique :class:`BeartypeConf`
instance instantiated by that call).
Note that this cache is technically non-thread-safe. Since this cache is only
used as a memoization optimization, the only harmful consequences of a race
condition between threads contending over this cache is a mildly inefficient
(but otherwise harmless) repeated re-memoization of duplicate configurations.
'''
# ....................{ GLOBALS }....................
# This global is intentionally defined *AFTER* all other attributes above, which
# this global implicitly assumes to be defined.
BEARTYPE_CONF_DEFAULT = BeartypeConf()
'''
**Default beartype configuration** (i.e., :class:`BeartypeConf` class
instantiated with *no* parameters and thus default parameters), globalized to
trivially optimize external access to this configuration throughout this
codebase.
Note that this global is *not* publicized to end users, who can simply
instantiate ``BeartypeConf()`` to obtain the same singleton.
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype Decidedly Object-Oriented Runtime-checking (DOOR) procedural
type-checkers** (i.e., high-level functions type-checking arbitrary objects
against PEP-compliant type hints at *any* time during the lifecycle of the
active Python process).
'''
# ....................{ TODO }....................
#FIXME: Consider validating the signatures of both the is_bearable() function
#defined below *AND* TypeHint.is_bearable() method defined elsewhere to have
#returns annotated as "TypeHint[T]" from the perspective of static
#type-checking. Sadly, doing so is extremely non-trivial -- requiring usage of a
#new mandatory "pytest-mypy-plugins" test-time dependency, which itself requires
#definition of mypy-specific cases in a new supplementary top-level directory.
#Honestly, it's all a bit insane. We just wish we could call mypy's fake
#reveal_type() function at runtime, honestly. *sigh*
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: This submodule intentionally does *not* import the
# @beartype.beartype decorator. Why? Because that decorator conditionally
# reduces to a noop under certain contexts (e.g., `python3 -O` optimization),
# whereas the API defined by this submodule is expected to unconditionally
# operate as expected regardless of the current context.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from beartype.door._doortyping import (
T,
BeartypeTypeChecker,
)
from beartype.roar import (
BeartypeCallHintReturnViolation,
BeartypeDoorHintViolation,
)
from beartype.roar._roarexc import _BeartypeDoorTextException
from beartype.typing import TYPE_CHECKING
from beartype._check.checkmake import make_func_tester
from beartype._conf.confcls import (
BEARTYPE_CONF_DEFAULT,
BeartypeConf,
)
from beartype._decor._cache.cachedecor import beartype
from beartype._util.cache.utilcachecall import callable_cached
from beartype._util.error.utilerror import reraise_exception_placeholder
from beartype._util.hint.utilhintfactory import TypeHintTypeFactory
from beartype._util.hint.utilhinttest import die_unless_hint
from beartype._util.mod.lib.utiltyping import import_typing_attr_or_fallback
# ....................{ HINTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: Synchronize with similar logic in "beartype.door._cls.doorsuper".
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#FIXME: This approach is *PHENOMENAL.* No. Seriously, We could implement a
#full-blown "beartype.typing" subpackage (or perhaps even separate "beartyping"
#package) extending this core concept to *ALL* type hint factories, enabling
#users to trivially annotate with any type hint factory regardless of the
#current version of Python or whether "typing_extensions" is installed or not.
# Portably import the PEP 647-compliant "typing.TypeGuard" type hint factory
# first introduced by Python >= 3.10, regardless of the current version of
# Python and regardless of whether this submodule is currently being subject to
# static type-checking or not. Praise be to MIT ML guru and stunning Hypothesis
# maintainer @rsokl (Ryan Soklaski) for this brilliant circumvention. \o/
#
# Usage of this factory is a high priority. Hinting the return of the
# is_bearable() tester with a type guard created by this factory effectively
# coerces that tester in an arbitrarily complete type narrower and thus type
# parser at static analysis time, substantially reducing complaints from static
# type-checkers in end user code deferring to that tester.
#
# If this submodule is currently being statically type-checked (e.g., mypy),
# intentionally import from the third-party "typing_extensions" module rather
# than the standard "typing" module. Why? Because doing so eliminates Python
# version complaints from static type-checkers (e.g., mypy, pyright). Static
# type-checkers could care less whether "typing_extensions" is actually
# installed or not; they only care that "typing_extensions" unconditionally
# defines this type factory across all Python versions, whereas "typing" only
# conditionally defines this type factory under Python >= 3.10. *facepalm*
if TYPE_CHECKING:
from typing_extensions import TypeGuard
# Else, this submodule is currently being imported at runtime by Python. In this
# case, dynamically import this factory from whichever of the standard "typing"
# module *OR* the third-party "typing_extensions" module declares this factory,
# falling back to the builtin "bool" type if none do.
else:
TypeGuard = import_typing_attr_or_fallback(
'TypeGuard', TypeHintTypeFactory(bool))
# ....................{ VALIDATORS }....................
def die_if_unbearable(
# Mandatory flexible parameters.
obj: object,
hint: object,
# Optional keyword-only parameters.
*,
conf: BeartypeConf = BEARTYPE_CONF_DEFAULT,
) -> None:
'''
Raise an exception if the passed arbitrary object violates the passed
PEP-compliant type hint under the passed beartype configuration.
Parameters
----------
obj : object
Arbitrary object to be tested against this hint.
hint : object
PEP-compliant type hint to test this object against.
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object). Defaults
to ``BeartypeConf()``, the default ``O(1)`` constant-time configuration.
Raises
----------
beartype.roar.BeartypeDecorHintNonpepException
If this hint is *not* PEP-compliant (i.e., complies with *no* Python
Enhancement Proposals (PEPs) currently supported by :mod:`beartype`).
beartype.roar.BeartypeDecorHintPepUnsupportedException
If this hint is currently unsupported by :mod:`beartype`.
beartype.roar.BeartypeDoorHintViolation
If this object violates this hint.
Examples
----------
>>> from beartype.door import die_if_unbearable
>>> die_if_unbearable(['And', 'what', 'rough', 'beast,'], list[str])
>>> die_if_unbearable(['its', 'hour', 'come', 'round'], list[int])
beartype.roar.BeartypeDoorHintViolation: Object ['its', 'hour', 'come',
'round'] violates type hint list[int], as list index 0 item 'its' not
instance of int.
'''
# @beartype-decorated closure raising an
# "BeartypeCallHintReturnViolation" exception if the parameter passed to
# this closure violates the hint passed to this parent tester.
_check_object = _get_type_checker(hint, conf)
# Attempt to type-check this object by passing this object to this closure,
# which then implicitly type-checks this object as a return value.
try:
_check_object(obj)
# If this closure raises an exception as this object violates this hint...
except BeartypeCallHintReturnViolation as exception:
# Exception message.
exception_message = str(exception)
# If this exception message is *NOT* prefixed by the expected substring,
# raise an exception.
#
# Note that this should *NEVER* occur in production releases, but that
# this could potentially occur during pre-production testing. Ergo, it
# is worth explicitly checking but *NOT* worth investing time and effort
# in raising a human-readable exception.
if not exception_message.startswith(
_TYPE_CHECKER_EXCEPTION_MESSAGE_PREFIX):
raise _BeartypeDoorTextException(
f'_check_object() exception '
f'"{exception_message}" not prefixed by '
f'"{_TYPE_CHECKER_EXCEPTION_MESSAGE_PREFIX}".'
) from exception
# Else, this exception message is prefixed by the expected substring.
# Replace the irrelevant substring prefixing this message with a
# relevant substring applicable to this higher-level function.
exception_message = (
f'Object '
f'{exception_message[_TYPE_CHECKER_EXCEPTION_MESSAGE_PREFIX_LEN:]}'
)
# Wrap this exception in a more readable higher-level exception.
raise BeartypeDoorHintViolation(
message=exception_message,
culprits=exception.culprits,
) from exception
# Else, this closure raised another exception. In this case, percolate this
# exception back up this call stack.
# ....................{ TESTERS }....................
def is_subhint(subhint: object, superhint: object) -> bool:
'''
``True`` only if the first passed hint is a **subhint** of the second passed
hint, in which case this second hint is a **superhint** of this first hint.
Equivalently, this tester returns ``True`` only if *all* of the following
conditions apply:
* These two hints are **semantically related** (i.e., convey broadly similar
semantics enabling these two hints to be reasonably compared). For
example:
* ``callable.abc.Iterable[str]`` and ``callable.abc.Sequence[int]`` are
semantically related. These two hints both convey container semantics.
Despite their differing child hints, these two hints are broadly similar
enough to be reasonably comparable.
* ``callable.abc.Iterable[str]`` and ``callable.abc.Callable[[], int]``
are *not* semantically related. Whereas the first hints conveys a
container semantic, the second hint conveys a callable semantic. Since
these two semantics are unrelated, these two hints are dissimilar
enough to *not* be reasonably comparable.
* The first hint is **semantically equivalent** to or **narrower** than the
second hint. Equivalently:
* The first hint matches less than or equal to the total number of all
possible objects matched by the second hint.
* The size of the countably infinite set of all possible objects matched
by the first hint is less than or equal to that of those matched by the
second hint.
* The first hint is **compatible** with the second hint. Since the first
hint is semantically narrower than the second, APIs annotated by the first
hint may safely replace that hint with the second hint; doing so preserves
backward compatibility.
Parameters
----------
subhint : object
PEP-compliant type hint or type to be tested as the subhint.
superhint : object
PEP-compliant type hint or type to be tested as the superhint.
Returns
-------
bool
``True`` only if this first hint is a subhint of this second hint.
Examples
--------
>>> from beartype.door import is_subhint
>>> is_subhint(int, int)
True
>>> is_subhint(Callable[[], list], Callable[..., Sequence[Any]])
True
>>> is_subhint(Callable[[], list], Callable[..., Sequence[int]])
False
'''
# Avoid circular import dependencies.
from beartype.door._cls.doorsuper import TypeHint
# The one-liner is mightier than the... many-liner.
return TypeHint(subhint).is_subhint(TypeHint(superhint))
# ....................{ TESTERS ~ is_bearable }....................
#FIXME: Improve unit tests to exhaustively exercise edge cases, including:
#* Invalid hints. In this case, test that the raised exception is prefixed by
# the expected substring rather than our exception placeholder.
def is_bearable(
# Mandatory flexible parameters.
obj: object,
hint: T,
# Optional keyword-only parameters.
*,
conf: BeartypeConf = BEARTYPE_CONF_DEFAULT,
) -> TypeGuard[T]:
'''
``True`` only if the passed arbitrary object satisfies the passed
PEP-compliant type hint under the passed beartype configuration.
Parameters
----------
obj : object
Arbitrary object to be tested against this hint.
hint : object
PEP-compliant type hint to test this object against.
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all settings configuring type-checking for the passed object). Defaults
to ``BeartypeConf()``, the default ``O(1)`` constant-time configuration.
Returns
----------
bool
``True`` only if this object satisfies this hint.
Raises
----------
beartype.roar.BeartypeConfException
If this configuration is *not* a :class:`BeartypeConf` instance.
beartype.roar.BeartypeDecorHintForwardRefException
If this hint contains one or more relative forward references, which
this tester explicitly prohibits to improve both the efficiency and
portability of calls to this tester.
beartype.roar.BeartypeDecorHintNonpepException
If this hint is *not* PEP-compliant (i.e., complies with *no* Python
Enhancement Proposals (PEPs) currently supported by :mod:`beartype`).
beartype.roar.BeartypeDecorHintPepUnsupportedException
If this hint is currently unsupported by :mod:`beartype`.
Examples
----------
>>> from beartype.door import is_bearable
>>> is_bearable(['Things', 'fall', 'apart;'], list[str])
True
>>> is_bearable(['the', 'centre', 'cannot', 'hold;'], list[int])
False
'''
# Attempt to dynamically generate a memoized low-level type-checking
# tester function returning true only if the object passed to that
# tester satisfies the type hint passed to this high-level type-checking
# tester function.
#
# Note that parameters are intentionally passed positionally for
# efficiency. Since make_func_tester() is memoized, passing parameters
# by keyword would raise a non-fatal
# "_BeartypeUtilCallableCachedKwargsWarning" warning.
try:
func_tester = make_func_tester(hint, conf)
# If any exception was raised, reraise this exception with each
# placeholder substring (i.e., "EXCEPTION_PLACEHOLDER" instance)
# replaced by the passed exception prefix.
except Exception as exception:
reraise_exception_placeholder(
exception=exception,
target_str='is_bearable() ',
)
# Return true only if the passed object satisfies this hint.
return func_tester(obj) # pyright: ignore[reportUnboundVariable]
# ....................{ PRIVATE ~ getters }....................
#FIXME: Shift into a more public location for widespread usage elsewhere: e.g.,
#* Define a new "beartype._check" subpackage.
#* Define a new "beartype._check.checkget" submodule.
#* Rename this function to get_object_checker() in that submodule.
#* Shift the "_TYPE_CHECKER_EXCEPTION_MESSAGE_PREFIX*" globals into that
# submodule as well, probably publicized and renamed to:
# * "CHECKER_EXCEPTION_MESSAGE_PREFIX".
# * "CHECKER_EXCEPTION_MESSAGE_PREFIX_LEN".
#* *COPY* (i.e., do *NOT* move) the die_if_unbearable() into that submodule,
# renamed to either:
# * check_object(). This is the one, due to orthogonality with the
# get_object_checker() getter.
# * die_if_object_violates_hint(). Fairly long, but disambiguous.
#* Generalize check_object() to accept additional *MANDATORY* "exception_cls"
# and "exception_prefix" parameters. Replace the currently hard-coded 'Object '
# prefix in check_object() by "exception_prefix".
#* Refactor die_if_unbearable() in this submodule to defer entirely to
# check_object() in that submodule.
#* Shift the "BeartypeTypeChecker" type hint into the existing
# "beartype._data.datatyping" submodule, publicized and renamed to
# "BeartypeChecker".
#FIXME: And... the prior "FIXME:" comment is almost certainly obsolete already.
#Eventually, we want to eliminate this getter entirely in favour of dynamically
#generating a full-blown exception raiser specific to the passed hint. *shrig*
@callable_cached
def _get_type_checker(
hint: object, conf: BeartypeConf) -> BeartypeTypeChecker:
'''
Create, cache, and return a **synthetic runtime type-checker** (i.e.,
function raising a :exc:`BeartypeCallHintReturnViolation` exception when the
object passed to that function violates the hint passed to this parent
getter under the passed beartype configuration).
This factory intentionally raises :exc:`BeartypeCallHintReturnViolation`
rather than :exc:`BeartypeCallHintParamViolation` exceptions. Since
type-checking returns is *slightly* faster than type-checking parameters,
this factory intentionally annotates the return rather than a parameter of
this checker.
This factory is memoized for efficiency.
Parameters
----------
hint : object
Type hint to validate *all* objects passed to the checker returned by
this factory against.
conf : BeartypeConf, optional
**Beartype configuration** (i.e., self-caching dataclass encapsulating
all flags, options, settings, and other metadata configuring how this
object is type-checked).
Returns
----------
BeartypeTypeChecker
Synthetic runtime type-checker specific to this hint and configuration.
Raises
----------
beartype.roar.BeartypeDecorHintPepUnsupportedException
If this hint is a PEP-compliant type hint currently unsupported by
the :func:`beartype.beartype` decorator.
beartype.roar.BeartypeDecorHintNonpepException
If this hint is neither a:
* Supported PEP-compliant type hint.
* Supported PEP-noncompliant type hint.
'''
# If this hint is unsupported, raise an exception.
#
# Note that this technically duplicates a similar check performed by the
# @beartype decorator below except that the exception prefix passed here
# results in substantially more readable and relevant exceptions.
die_unless_hint(hint=hint, exception_prefix='Functional ')
# Else, this hint is supported.
# @beartype-decorated closure raising an
# "BeartypeCallHintReturnViolation" exception if the parameter passed to
# this closure violates the hint passed to this parent tester.
@beartype(conf=conf)
def _die_if_unbearable(pith) -> hint: # type: ignore[valid-type]
return pith
# Return this closure.
return _die_if_unbearable
# ....................{ PRIVATE ~ constants }....................
_TYPE_CHECKER_EXCEPTION_MESSAGE_PREFIX = (
'@beartyped '
'beartype.door._doorcheck._get_type_checker._die_if_unbearable() '
'return '
)
'''
Irrelevant substring prefixing *all* exception messages raised by *all*
**synthetic runtime type-checkers** (i.e., callables dynamically created and
returned by the :func: `_get_type_checker` getter).
'''
_TYPE_CHECKER_EXCEPTION_MESSAGE_PREFIX_LEN = (
len(_TYPE_CHECKER_EXCEPTION_MESSAGE_PREFIX))
'''
Length of the irrelevant substring prefixing *all*
exception messages raised by *all* **synthetic runtime type-checkers** (i.e.,
callables dynamically created and returned by the :func: `_get_type_checker`
getter).
'''
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype Decidedly Object-Oriented Runtime-checking (DOOR) API.**
This subpackage provides an object-oriented type hint class hierarchy,
encapsulating the crude non-object-oriented type hint declarative API
standardized by the :mod:`typing` module.
'''
# ....................{ TODO }....................
#FIXME: Create one unique "TypeHint" subclass *FOR EACH UNIQUE KIND OF TYPE
#HINT.* We're currently simply reusing the same
#"_TypeHintOriginIsinstanceableArgs*" family of concrete subclasses to
#transparently handle these unique kinds of type hints. That's fine as an
#internal implementation convenience. Sadly, that's *NOT* fine for users
#actually trying to introspect types. That's the great disadvantage of standard
#"typing" types, after all; they're *NOT* introspectable by type. Ergo, we need
#to explicitly define subclasses like:
#* "beartype.door.ListTypeHint".
#* "beartype.door.MappingTypeHint".
#* "beartype.door.SequenceTypeHint".
#
#And so on. There are a plethora, but ultimately a finite plethora, which is all
#that matters. Do this for our wonderful userbase, please.
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# WARNING: To avoid polluting the public module namespace, external attributes
# should be locally imported at module scope *ONLY* under alternate private
# names (e.g., "from argparse import ArgumentParser as _ArgumentParser" rather
# than merely "from argparse import ArgumentParser").
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from beartype.door._cls.doorsuper import (
TypeHint as TypeHint)
from beartype.door._doorcheck import (
die_if_unbearable as die_if_unbearable,
is_bearable as is_bearable,
is_subhint as is_subhint,
)
from beartype.door._cls.pep.doorpep484604 import (
UnionTypeHint as UnionTypeHint)
from beartype.door._cls.pep.doorpep586 import (
LiteralTypeHint as LiteralTypeHint)
from beartype.door._cls.pep.doorpep593 import (
AnnotatedTypeHint as AnnotatedTypeHint)
from beartype.door._cls.pep.pep484.doorpep484class import (
ClassTypeHint as ClassTypeHint)
from beartype.door._cls.pep.pep484.doorpep484newtype import (
NewTypeTypeHint as NewTypeTypeHint)
from beartype.door._cls.pep.pep484.doorpep484typevar import (
TypeVarTypeHint as TypeVarTypeHint)
from beartype.door._cls.pep.pep484585.doorpep484585callable import (
CallableTypeHint as CallableTypeHint)
#FIXME: Actually, let's *NOT* publicly expose this for the moment. Why? Because
#we still need to split this into fixed and variadic tuple subclasses.
# from beartype.door._cls.pep.pep484585.doorpep484585tuple import (
# _TupleTypeHint as _TupleTypeHint)
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype Decidedly Object-Oriented Runtime-checking (DOOR) testers** (i.e.,
callables testing and validating :class:`beartype.door.TypeHint` instances).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
from beartype.roar import BeartypeDoorException
# ....................{ VALIDATORS }....................
def die_unless_typehint(obj: object) -> None:
'''
Raise an exception unless the passed object is a **type hint wrapper**
(i.e., :class:`TypeHint` instance).
Parameters
----------
obj : object
Arbitrary object to be validated.
Raises
----------
beartype.roar.BeartypeDoorException
If this object is *not* a type hint wrapper.
'''
# Avoid circular import dependencies.
from beartype.door._cls.doorsuper import TypeHint
# If this object is *NOT* a type hint wrapper, raise an exception.
if not isinstance(obj, TypeHint):
raise BeartypeDoorException(
f'{repr(obj)} not type hint wrapper '
f'(i.e., "beartype.door.TypeHint" instance).'
)
# Else, this object is a type hint wrapper.
|
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
**Beartype Decidedly Object-Oriented Runtime-checking (DOOR) type hints** (i.e.,
PEP-compliant widely used throughout the :mod:`beartype.door` subpackage).
'''
# ....................{ IMPORTS }....................
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# CAUTION: This submodule intentionally does *not* import the
# @beartype.beartype decorator. Why? Because that decorator conditionally
# reduces to a noop under certain contexts (e.g., `python3 -O` optimization),
# whereas the API defined by this submodule is expected to unconditionally
# operate as expected regardless of the current context.
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
from beartype.typing import (
Callable,
TypeVar,
)
# ....................{ PRIVATE ~ hints }....................
T = TypeVar('T')
'''
PEP-compliant type hint matching an arbitrary PEP-compliant type hint.
'''
BeartypeTypeChecker = Callable[[object], None]
'''
PEP-compliant type hint matching a **runtime type-checker** (i.e., function
created and returned by the :func:`_get_type_checker` getter, raising a
:exc:`BeartypeCallHintReturnViolation` exception when the object passed to that
function violates a PEP-compliant type hint).
'''
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.