index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
41,397 |
pyglove.core.symbolic.base
|
sym_hasattr
|
Returns if a symbolic attribute exists.
|
@abc.abstractmethod
def sym_hasattr(self, key: Union[str, int]) -> bool:
"""Returns if a symbolic attribute exists."""
|
(self, key: Union[str, int]) -> bool
|
41,398 |
pyglove.core.symbolic.base
|
sym_hash
|
Computes the symbolic hash of current object.
|
@abc.abstractmethod
def sym_hash(self) -> int:
"""Computes the symbolic hash of current object."""
|
(self) -> int
|
41,401 |
pyglove.core.symbolic.base
|
sym_items
|
Iterates the (key, value) pairs of symbolic attributes.
|
@abc.abstractmethod
def sym_items(self) -> Iterator[Tuple[Union[str, int], Any]]:
"""Iterates the (key, value) pairs of symbolic attributes."""
|
(self) -> Iterator[Tuple[Union[str, int], Any]]
|
41,402 |
pyglove.core.symbolic.base
|
sym_jsonify
|
Converts representation of current object to a plain Python object.
|
@abc.abstractmethod
def sym_jsonify(self,
*,
hide_default_values: bool = False,
**kwargs) -> object_utils.JSONValueType:
"""Converts representation of current object to a plain Python object."""
|
(self, *, hide_default_values: bool = False, **kwargs) -> Union[int, float, bool, str, List[Any], Dict[str, Any]]
|
41,403 |
pyglove.core.symbolic.base
|
sym_keys
|
Iterates the keys of symbolic attributes.
|
@abc.abstractmethod
def sym_keys(self) -> Iterator[Union[str, int]]:
"""Iterates the keys of symbolic attributes."""
|
(self) -> Iterator[Union[str, int]]
|
41,413 |
pyglove.core.symbolic.base
|
sym_values
|
Iterates the values of symbolic attributes.
|
@abc.abstractmethod
def sym_values(self) -> Iterator[Any]:
"""Iterates the values of symbolic attributes."""
|
(self) -> Iterator[Any]
|
41,416 |
pyglove.core.symbolic.base
|
TraverseAction
|
Enum for the next action after a symbolic node is visited.
See also: :func:`pyglove.traverse`.
|
class TraverseAction(enum.Enum):
"""Enum for the next action after a symbolic node is visited.
See also: :func:`pyglove.traverse`.
"""
# Traverse should immediately stop.
STOP = 0
# Traverse should enter sub-tree if sub-tree exists and traverse is in
# pre-order. For post-order traverse, it has the same effect as CONTINUE.
ENTER = 1
# Traverse should continue to next node without entering the sub-tree.
CONTINUE = 2
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
41,417 |
pyglove.core.typing.class_schema
|
ValueSpec
|
Interface for value specifications.
A value specification defines what values are acceptable for a symbolic
field (see :class:`pyglove.Field`). When assignments take place on the
attributes for the field, the associated ValueSpec object will kick in to
intercept the process and take care of the following aspects:
* Type check
* Noneable check
* Value validation and transformation
* Default value assignment
See :meth:`.apply` for more details.
Different aspects of assignment interception are handled by the following
methods:
+-----------------------+-------------------------------------------------+
| Aspect name | Property/Method |
+=======================+=================================================+
| Type check | :attr:`.value_type` |
+-----------------------+-------------------------------------------------+
| Noneable check | :attr:`.is_noneable` |
+-----------------------+-------------------------------------------------+
| Type-specific value | :meth:`.apply` |
| validation and | |
| transformation | |
+-----------------------+-------------------------------------------------+
| User transform | :attr:`.transform` |
+-----------------------+-------------------------------------------------+
| Default value lookup | :attr:`.default` |
+-----------------------+-------------------------------------------------+
There are many ``ValueSpec`` subclasses, each correspond to a commonly used
Python type, e.g. `Bool`, `Int`, `Float` and etc. PyGlove's builtin value
specifications are:
+---------------------------+----------------------------------------------+
| ``ValueSpec`` type | Class |
+===========================+==============================================+
| bool | :class:`pyglove.typing.Bool` |
+---------------------------+----------------------------------------------+
| int | :class:`pyglove.typing.Int` |
+---------------------------+----------------------------------------------+
| float | :class:`pyglove.typing.Float` |
+---------------------------+----------------------------------------------+
| str | :class:`pyglove.typing.Str` |
+---------------------------+----------------------------------------------+
| enum | :class:`pyglove.typing.Enum` |
+---------------------------+----------------------------------------------+
| list | :class:`pyglove.typing.List` |
+---------------------------+----------------------------------------------+
| tuple | :class:`pyglove.typing.Tuple` |
+---------------------------+----------------------------------------------+
| dict | :class:`pyglove.typing.Dict`
|
+---------------------------+----------------------------------------------+
| instance of a class | :class:`pyglove.typing.Object` |
+---------------------------+----------------------------------------------+
| callable | :class:`pyglove.typing.Callable` |
+---------------------------+----------------------------------------------+
| functor | :class:`pyglove.typing.Functor` |
+---------------------------+----------------------------------------------+
| type | :class:`pyglove.typing.Type` |
+---------------------------+----------------------------------------------+
| union | :class:`pyglove.typing.Union` |
+---------------------------+----------------------------------------------+
| any | :class:`pyglove.typing.Any` |
+---------------------------+----------------------------------------------+
**Construction**
A value specification is an instance of a ``ValueSpec`` subclass. All
:class:`pyglove.ValueSpec` subclasses follow a common pattern to construct::
pg.typing.<ValueSpecClass>(
[validation-rules],
[default=<default>],
[transform=<transform>])
After creation, a ``ValueSpec`` object can be modified with chaining.
The code below creates an int specification with default value 1 and can
accept None::
pg.typing.Int().noneable().set_default(1)
**Usage**
To apply a value specification on an user input to get the accepted value,
:meth:`pyglove.ValueSpec.apply` shall be used::
value == pg.typing.Int(min_value=1).apply(4)
assert value == 4
**Extension**
Besides, a ``ValueSpec`` object can extend another ``ValueSpec`` object to
obtain a more restricted ``ValueSpec`` object. For example::
pg.typing.Int(min_value=1).extend(pg.typing.Int(max_value=5))
will end up with::
pg.typing.Int(min_value=1, max_value=5)
which will be useful when subclass adds additional restrictions to an
inherited symbolic attribute from its base class. For some use cases, a value
spec can be frozen to avoid subclass extensions::
pg.typing.Int().freeze(1)
``ValueSpec`` objects can be created and modified with chaining. For example::
pg.typing.Int().noneable().set_default(1)
The code above creates an int specification with default value 1 and can
accept None.
``ValueSpec`` object can also be derived from annotations.
For example, annotation below
@pg.members([
('a', pg.typing.List(pg.typing.Str)),
('b', pg.typing.Dict().set_default('key': 'value')),
('c', pg.typing.List(pg.typing.Any()).noneable()),
('x', pg.typing.Int()),
('y', pg.typing.Str().noneable()),
('z', pg.typing.Union(pg.typing.Int(), pg.typing.Float()))
])
can be writen as
@pg.members([
('a', list[str]),
('b', {'key': 'value}),
('c', Optional[list]),
('x', int),
('y', Optional[str]),
('z', Union[int, float])
])
|
class ValueSpec(object_utils.Formattable, object_utils.JSONConvertible):
"""Interface for value specifications.
A value specification defines what values are acceptable for a symbolic
field (see :class:`pyglove.Field`). When assignments take place on the
attributes for the field, the associated ValueSpec object will kick in to
intercept the process and take care of the following aspects:
* Type check
* Noneable check
* Value validation and transformation
* Default value assignment
See :meth:`.apply` for more details.
Different aspects of assignment interception are handled by the following
methods:
+-----------------------+-------------------------------------------------+
| Aspect name | Property/Method |
+=======================+=================================================+
| Type check | :attr:`.value_type` |
+-----------------------+-------------------------------------------------+
| Noneable check | :attr:`.is_noneable` |
+-----------------------+-------------------------------------------------+
| Type-specific value | :meth:`.apply` |
| validation and | |
| transformation | |
+-----------------------+-------------------------------------------------+
| User transform | :attr:`.transform` |
+-----------------------+-------------------------------------------------+
| Default value lookup | :attr:`.default` |
+-----------------------+-------------------------------------------------+
There are many ``ValueSpec`` subclasses, each correspond to a commonly used
Python type, e.g. `Bool`, `Int`, `Float` and etc. PyGlove's builtin value
specifications are:
+---------------------------+----------------------------------------------+
| ``ValueSpec`` type | Class |
+===========================+==============================================+
| bool | :class:`pyglove.typing.Bool` |
+---------------------------+----------------------------------------------+
| int | :class:`pyglove.typing.Int` |
+---------------------------+----------------------------------------------+
| float | :class:`pyglove.typing.Float` |
+---------------------------+----------------------------------------------+
| str | :class:`pyglove.typing.Str` |
+---------------------------+----------------------------------------------+
| enum | :class:`pyglove.typing.Enum` |
+---------------------------+----------------------------------------------+
| list | :class:`pyglove.typing.List` |
+---------------------------+----------------------------------------------+
| tuple | :class:`pyglove.typing.Tuple` |
+---------------------------+----------------------------------------------+
| dict | :class:`pyglove.typing.Dict`
|
+---------------------------+----------------------------------------------+
| instance of a class | :class:`pyglove.typing.Object` |
+---------------------------+----------------------------------------------+
| callable | :class:`pyglove.typing.Callable` |
+---------------------------+----------------------------------------------+
| functor | :class:`pyglove.typing.Functor` |
+---------------------------+----------------------------------------------+
| type | :class:`pyglove.typing.Type` |
+---------------------------+----------------------------------------------+
| union | :class:`pyglove.typing.Union` |
+---------------------------+----------------------------------------------+
| any | :class:`pyglove.typing.Any` |
+---------------------------+----------------------------------------------+
**Construction**
A value specification is an instance of a ``ValueSpec`` subclass. All
:class:`pyglove.ValueSpec` subclasses follow a common pattern to construct::
pg.typing.<ValueSpecClass>(
[validation-rules],
[default=<default>],
[transform=<transform>])
After creation, a ``ValueSpec`` object can be modified with chaining.
The code below creates an int specification with default value 1 and can
accept None::
pg.typing.Int().noneable().set_default(1)
**Usage**
To apply a value specification on an user input to get the accepted value,
:meth:`pyglove.ValueSpec.apply` shall be used::
value == pg.typing.Int(min_value=1).apply(4)
assert value == 4
**Extension**
Besides, a ``ValueSpec`` object can extend another ``ValueSpec`` object to
obtain a more restricted ``ValueSpec`` object. For example::
pg.typing.Int(min_value=1).extend(pg.typing.Int(max_value=5))
will end up with::
pg.typing.Int(min_value=1, max_value=5)
which will be useful when subclass adds additional restrictions to an
inherited symbolic attribute from its base class. For some use cases, a value
spec can be frozen to avoid subclass extensions::
pg.typing.Int().freeze(1)
``ValueSpec`` objects can be created and modified with chaining. For example::
pg.typing.Int().noneable().set_default(1)
The code above creates an int specification with default value 1 and can
accept None.
``ValueSpec`` object can also be derived from annotations.
For example, annotation below
@pg.members([
('a', pg.typing.List(pg.typing.Str)),
('b', pg.typing.Dict().set_default('key': 'value')),
('c', pg.typing.List(pg.typing.Any()).noneable()),
('x', pg.typing.Int()),
('y', pg.typing.Str().noneable()),
('z', pg.typing.Union(pg.typing.Int(), pg.typing.Float()))
])
can be writen as
@pg.members([
('a', list[str]),
('b', {'key': 'value}),
('c', Optional[list]),
('x', int),
('y', Optional[str]),
('z', Union[int, float])
])
"""
# pylint: disable=invalid-name
# List-type value spec class.
ListType: Type['ValueSpec']
# Dict-type value spec class.
DictType: Type['ValueSpec']
# Object-type value spec class.
ObjectType: Type['ValueSpec']
# pylint: enable=invalid-name
@property
@abc.abstractmethod
def value_type(self) -> Union[
Type[Any],
Tuple[Type[Any], ...]]: # pyformat: disable
"""Returns acceptable (resolved) value type(s)."""
@property
@abc.abstractmethod
def forward_refs(self) -> Set[ForwardRef]:
"""Returns forward referenes used by the value spec."""
@abc.abstractmethod
def noneable(self) -> 'ValueSpec':
"""Marks none-able and returns `self`."""
@property
@abc.abstractmethod
def is_noneable(self) -> bool:
"""Returns True if current value spec accepts None."""
@abc.abstractmethod
def set_default(self,
default: Any,
use_default_apply: bool = True) -> 'ValueSpec':
"""Sets the default value and returns `self`.
Args:
default: Default value.
use_default_apply: If True, invoke `apply` to the value, otherwise use
default value as is.
Returns:
ValueSpec itself.
Raises:
ValueError: If default value cannot be applied when use_default_apply
is set to True.
"""
@property
@abc.abstractmethod
def default(self) -> Any:
"""Returns the default value.
If no default is provided, MISSING_VALUE will be returned for non-dict
types. For Dict type, a dict that may contains nested MISSING_VALUE
will be returned.
"""
@property
def has_default(self) -> bool:
"""Returns True if the default value is provided."""
return self.default != object_utils.MISSING_VALUE
@abc.abstractmethod
def freeze(
self,
permanent_value: Any = object_utils.MISSING_VALUE,
apply_before_use: bool = True) -> 'ValueSpec':
"""Sets the default value using a permanent value and freezes current spec.
A frozen value spec will not accept any value that is not the default
value. A frozen value spec is useful when a subclass fixes the value of a
symoblic attribute and want to prevent it from being modified.
Args:
permanent_value: A permanent value used for current spec.
If MISSING_VALUE, freeze the value spec with current default value.
apply_before_use: If True, invoke `apply` on permanent value
when permanent_value is provided, otherwise use it as is.
Returns:
ValueSpec itself.
Raises:
ValueError if current default value is MISSING_VALUE and the permanent
value is not specified.
"""
@property
@abc.abstractmethod
def frozen(self) -> bool:
"""Returns True if current value spec is frozen."""
@property
@abc.abstractmethod
def annotation(self) -> Any:
"""Returns PyType annotation. MISSING_VALUE if annotation is absent."""
@property
@abc.abstractmethod
def transform(self) -> Optional[Callable[[Any], Any]]:
"""Returns a transform that will be applied on the input before apply."""
@abc.abstractmethod
def is_compatible(self, other: 'ValueSpec') -> bool:
"""Returns True if values acceptable to `other` is acceptable to this spec.
Args:
other: Other value spec.
Returns:
True if values that is applicable to the other value spec can be applied
to current spec. Otherwise False.
"""
@abc.abstractmethod
def extend(self, base: 'ValueSpec') -> 'ValueSpec':
"""Extends a base spec with current spec's rules.
Args:
base: Base ValueSpec to extend.
Returns:
ValueSpec itself.
Raises:
TypeError: When this value spec cannot extend from base.
"""
@abc.abstractmethod
def apply(
self,
value: Any,
allow_partial: bool = False,
child_transform: Optional[Callable[
[object_utils.KeyPath, 'Field', Any], Any]] = None,
root_path: Optional[object_utils.KeyPath] = None,
) -> Any:
"""Validates, completes and transforms the input value.
Here is the procedure of ``apply``::
(1). Choose the default value if the input value is ``MISSING_VALUE``
(2). Check whether the input value is None.
(2.a) Input value is None and ``value_spec.is_noneable()`` is False,
raises Error.
(2.b) Input value is not None or ``value_spec.is_noneable()`` is True,
goto step (3).
(3). Run ``value_spec.custom_apply`` if the input value is a
``CustomTyping`` instance.
(3.a). If ``value_spec.custom_apply`` returns a value that indicates to
proceed with standard apply, goto step (4).
(3.b). Else goto step (6)
(4). Check the input value type against the ``value_spec.value_type``.
(4.a). If their value type matches, go to step (5)
(4.b). Else if there is a converter registered between input value type
and the value spec's value type, perform the conversion, and go
to step (5). (see pg.typing.register_converter)
(4.c) Otherwise raises type mismatch.
(5). Perform type-specific and user validation and transformation.
For complex types such as Dict, List, Tuple, call `child_spec.apply`
recursively on the child fields.
(6). Perform user transform and returns final value
(invoked at Field.apply.)
Args:
value: Input value to apply.
allow_partial: If True, partial value is allowed. This is useful for
container types (dict, list, tuple).
child_transform: Function to transform child node values into final
values.
(NOTE: This transform will not be performed on current value. Instead
transform on current value is done by Field.apply, which has adequate
information to call transform with both KeySpec and ValueSpec).
root_path: Key path of current node.
Returns:
Final value:
* When allow_partial is set to False (default), only input value that
has no missing values can be applied.
* When allow_partial is set to True, missing fields will be placeheld
using MISSING_VALUE.
Raises:
KeyError: If additional key is found in value, or required key is missing
and allow_partial is set to False.
TypeError: If type of value is not the same as spec required.
ValueError: If value is not acceptable, or value is MISSING_VALUE while
allow_partial is set to False.
"""
@property
def type_resolved(self) -> bool:
"""Returns True if all forward references are resolved."""
return not any(not ref.resolved for ref in self.forward_refs)
def __ne__(self, other: Any) -> bool:
"""Operator !=."""
return not self.__eq__(other)
@classmethod
def from_annotation(
cls,
annotation: Any,
auto_typing=False,
accept_value_as_annotation=False) -> 'ValueSpec':
"""Gets a concrete ValueSpec from annotation."""
del annotation
assert False, 'Overridden in `annotation_conversion.py`.'
|
()
|
41,423 |
pyglove.core.typing.class_schema
|
apply
|
Validates, completes and transforms the input value.
Here is the procedure of ``apply``::
(1). Choose the default value if the input value is ``MISSING_VALUE``
(2). Check whether the input value is None.
(2.a) Input value is None and ``value_spec.is_noneable()`` is False,
raises Error.
(2.b) Input value is not None or ``value_spec.is_noneable()`` is True,
goto step (3).
(3). Run ``value_spec.custom_apply`` if the input value is a
``CustomTyping`` instance.
(3.a). If ``value_spec.custom_apply`` returns a value that indicates to
proceed with standard apply, goto step (4).
(3.b). Else goto step (6)
(4). Check the input value type against the ``value_spec.value_type``.
(4.a). If their value type matches, go to step (5)
(4.b). Else if there is a converter registered between input value type
and the value spec's value type, perform the conversion, and go
to step (5). (see pg.typing.register_converter)
(4.c) Otherwise raises type mismatch.
(5). Perform type-specific and user validation and transformation.
For complex types such as Dict, List, Tuple, call `child_spec.apply`
recursively on the child fields.
(6). Perform user transform and returns final value
(invoked at Field.apply.)
Args:
value: Input value to apply.
allow_partial: If True, partial value is allowed. This is useful for
container types (dict, list, tuple).
child_transform: Function to transform child node values into final
values.
(NOTE: This transform will not be performed on current value. Instead
transform on current value is done by Field.apply, which has adequate
information to call transform with both KeySpec and ValueSpec).
root_path: Key path of current node.
Returns:
Final value:
* When allow_partial is set to False (default), only input value that
has no missing values can be applied.
* When allow_partial is set to True, missing fields will be placeheld
using MISSING_VALUE.
Raises:
KeyError: If additional key is found in value, or required key is missing
and allow_partial is set to False.
TypeError: If type of value is not the same as spec required.
ValueError: If value is not acceptable, or value is MISSING_VALUE while
allow_partial is set to False.
|
@abc.abstractmethod
def apply(
self,
value: Any,
allow_partial: bool = False,
child_transform: Optional[Callable[
[object_utils.KeyPath, 'Field', Any], Any]] = None,
root_path: Optional[object_utils.KeyPath] = None,
) -> Any:
"""Validates, completes and transforms the input value.
Here is the procedure of ``apply``::
(1). Choose the default value if the input value is ``MISSING_VALUE``
(2). Check whether the input value is None.
(2.a) Input value is None and ``value_spec.is_noneable()`` is False,
raises Error.
(2.b) Input value is not None or ``value_spec.is_noneable()`` is True,
goto step (3).
(3). Run ``value_spec.custom_apply`` if the input value is a
``CustomTyping`` instance.
(3.a). If ``value_spec.custom_apply`` returns a value that indicates to
proceed with standard apply, goto step (4).
(3.b). Else goto step (6)
(4). Check the input value type against the ``value_spec.value_type``.
(4.a). If their value type matches, go to step (5)
(4.b). Else if there is a converter registered between input value type
and the value spec's value type, perform the conversion, and go
to step (5). (see pg.typing.register_converter)
(4.c) Otherwise raises type mismatch.
(5). Perform type-specific and user validation and transformation.
For complex types such as Dict, List, Tuple, call `child_spec.apply`
recursively on the child fields.
(6). Perform user transform and returns final value
(invoked at Field.apply.)
Args:
value: Input value to apply.
allow_partial: If True, partial value is allowed. This is useful for
container types (dict, list, tuple).
child_transform: Function to transform child node values into final
values.
(NOTE: This transform will not be performed on current value. Instead
transform on current value is done by Field.apply, which has adequate
information to call transform with both KeySpec and ValueSpec).
root_path: Key path of current node.
Returns:
Final value:
* When allow_partial is set to False (default), only input value that
has no missing values can be applied.
* When allow_partial is set to True, missing fields will be placeheld
using MISSING_VALUE.
Raises:
KeyError: If additional key is found in value, or required key is missing
and allow_partial is set to False.
TypeError: If type of value is not the same as spec required.
ValueError: If value is not acceptable, or value is MISSING_VALUE while
allow_partial is set to False.
"""
|
(self, value: Any, allow_partial: bool = False, child_transform: Optional[Callable[[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.typing.class_schema.Field, Any], Any]] = None, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None) -> Any
|
41,424 |
pyglove.core.typing.class_schema
|
extend
|
Extends a base spec with current spec's rules.
Args:
base: Base ValueSpec to extend.
Returns:
ValueSpec itself.
Raises:
TypeError: When this value spec cannot extend from base.
|
@abc.abstractmethod
def extend(self, base: 'ValueSpec') -> 'ValueSpec':
"""Extends a base spec with current spec's rules.
Args:
base: Base ValueSpec to extend.
Returns:
ValueSpec itself.
Raises:
TypeError: When this value spec cannot extend from base.
"""
|
(self, base: pyglove.core.typing.class_schema.ValueSpec) -> pyglove.core.typing.class_schema.ValueSpec
|
41,426 |
pyglove.core.typing.class_schema
|
freeze
|
Sets the default value using a permanent value and freezes current spec.
A frozen value spec will not accept any value that is not the default
value. A frozen value spec is useful when a subclass fixes the value of a
symoblic attribute and want to prevent it from being modified.
Args:
permanent_value: A permanent value used for current spec.
If MISSING_VALUE, freeze the value spec with current default value.
apply_before_use: If True, invoke `apply` on permanent value
when permanent_value is provided, otherwise use it as is.
Returns:
ValueSpec itself.
Raises:
ValueError if current default value is MISSING_VALUE and the permanent
value is not specified.
|
@abc.abstractmethod
def freeze(
self,
permanent_value: Any = object_utils.MISSING_VALUE,
apply_before_use: bool = True) -> 'ValueSpec':
"""Sets the default value using a permanent value and freezes current spec.
A frozen value spec will not accept any value that is not the default
value. A frozen value spec is useful when a subclass fixes the value of a
symoblic attribute and want to prevent it from being modified.
Args:
permanent_value: A permanent value used for current spec.
If MISSING_VALUE, freeze the value spec with current default value.
apply_before_use: If True, invoke `apply` on permanent value
when permanent_value is provided, otherwise use it as is.
Returns:
ValueSpec itself.
Raises:
ValueError if current default value is MISSING_VALUE and the permanent
value is not specified.
"""
|
(self, permanent_value: Any = MISSING_VALUE, apply_before_use: bool = True) -> pyglove.core.typing.class_schema.ValueSpec
|
41,427 |
pyglove.core.typing.annotation_conversion
|
_value_spec_from_annotation
|
Creates a value spec from annotation.
|
def _value_spec_from_annotation(
annotation: typing.Any,
auto_typing=False,
accept_value_as_annotation=False
) -> class_schema.ValueSpec:
"""Creates a value spec from annotation."""
if isinstance(annotation, class_schema.ValueSpec):
return annotation
elif annotation == inspect.Parameter.empty:
return vs.Any()
elif annotation is None:
if accept_value_as_annotation:
return vs.Any().noneable()
else:
return vs.Any().freeze(None)
if auto_typing:
return _value_spec_from_type_annotation(
annotation, accept_value_as_annotation)
else:
value_spec = None
if accept_value_as_annotation:
# Accept default values is applicable only when auto typing is off.
value_spec = _value_spec_from_default_value(annotation)
return value_spec or _any_spec_with_annotation(annotation)
|
(annotation: Any, auto_typing=False, accept_value_as_annotation=False) -> pyglove.core.typing.class_schema.ValueSpec
|
41,428 |
pyglove.core.typing.class_schema
|
is_compatible
|
Returns True if values acceptable to `other` is acceptable to this spec.
Args:
other: Other value spec.
Returns:
True if values that is applicable to the other value spec can be applied
to current spec. Otherwise False.
|
@abc.abstractmethod
def is_compatible(self, other: 'ValueSpec') -> bool:
"""Returns True if values acceptable to `other` is acceptable to this spec.
Args:
other: Other value spec.
Returns:
True if values that is applicable to the other value spec can be applied
to current spec. Otherwise False.
"""
|
(self, other: pyglove.core.typing.class_schema.ValueSpec) -> bool
|
41,429 |
pyglove.core.typing.class_schema
|
noneable
|
Marks none-able and returns `self`.
|
@abc.abstractmethod
def noneable(self) -> 'ValueSpec':
"""Marks none-able and returns `self`."""
|
(self) -> pyglove.core.typing.class_schema.ValueSpec
|
41,430 |
pyglove.core.typing.class_schema
|
set_default
|
Sets the default value and returns `self`.
Args:
default: Default value.
use_default_apply: If True, invoke `apply` to the value, otherwise use
default value as is.
Returns:
ValueSpec itself.
Raises:
ValueError: If default value cannot be applied when use_default_apply
is set to True.
|
@abc.abstractmethod
def set_default(self,
default: Any,
use_default_apply: bool = True) -> 'ValueSpec':
"""Sets the default value and returns `self`.
Args:
default: Default value.
use_default_apply: If True, invoke `apply` to the value, otherwise use
default value as is.
Returns:
ValueSpec itself.
Raises:
ValueError: If default value cannot be applied when use_default_apply
is set to True.
"""
|
(self, default: Any, use_default_apply: bool = True) -> pyglove.core.typing.class_schema.ValueSpec
|
41,432 |
pyglove.core.symbolic.base
|
WritePermissionError
|
Exception raisen when write access to object fields is not allowed.
|
class WritePermissionError(Exception):
"""Exception raisen when write access to object fields is not allowed."""
| null |
41,433 |
pyglove.core.symbolic.flags
|
allow_empty_field_description
|
Allow empty field description, which is useful for testing purposes.
|
def allow_empty_field_description(allow: bool = True) -> None:
"""Allow empty field description, which is useful for testing purposes."""
global _ALLOW_EMPTY_FIELD_DESCRIPTION
_ALLOW_EMPTY_FIELD_DESCRIPTION = allow
|
(allow: bool = True) -> NoneType
|
41,434 |
pyglove.core.symbolic.flags
|
allow_partial
|
Returns a context manager that allows partial values in scope.
This function is thread-safe and can be nested. In the nested use case, the
allow flag of immediate parent context is effective.
Example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Int())
])
class A(pg.Object):
pass
with pg.allow_partial(True):
a = A(x=1) # Missing `y`, but OK
with pg.allow_partial(False):
a.rebind(x=pg.MISSING_VALUE) # NOT OK
a.rebind(x=pg.MISSING_VALUE) # OK
Args:
allow: If True, allow partial symbolic values in scope.
If False, do not allow partial symbolic values in scope even if
individual objects allow so. If None, honor object-level
`allow_partial` property.
Returns:
A context manager that allows/disallow partial symbolic values in scope.
After leaving the scope, the `allow_partial` state of individual objects
will remain intact.
|
def allow_partial(allow: Optional[bool] = True) -> ContextManager[None]:
"""Returns a context manager that allows partial values in scope.
This function is thread-safe and can be nested. In the nested use case, the
allow flag of immediate parent context is effective.
Example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Int())
])
class A(pg.Object):
pass
with pg.allow_partial(True):
a = A(x=1) # Missing `y`, but OK
with pg.allow_partial(False):
a.rebind(x=pg.MISSING_VALUE) # NOT OK
a.rebind(x=pg.MISSING_VALUE) # OK
Args:
allow: If True, allow partial symbolic values in scope.
If False, do not allow partial symbolic values in scope even if
individual objects allow so. If None, honor object-level
`allow_partial` property.
Returns:
A context manager that allows/disallow partial symbolic values in scope.
After leaving the scope, the `allow_partial` state of individual objects
will remain intact.
"""
return thread_local.thread_local_value_scope(_TLS_ALLOW_PARTIAL, allow, None)
|
(allow: Optional[bool] = True) -> ContextManager[NoneType]
|
41,435 |
pyglove.core.symbolic.flags
|
allow_repeated_class_registration
|
Allow repeated class registration, which is useful for testing purposes.
|
def allow_repeated_class_registration(allow: bool = True) -> None:
"""Allow repeated class registration, which is useful for testing purposes."""
global _ALLOW_REPEATED_CLASS_REGISTRATION
_ALLOW_REPEATED_CLASS_REGISTRATION = allow
|
(allow: bool = True) -> NoneType
|
41,436 |
pyglove.core.symbolic.flags
|
allow_writable_accessors
|
Returns a context manager that makes accessor writable in scope.
This function is thread-safe and can be nested. In the nested use case, the
writable flag of immediate parent context is effective.
Example::
sd1 = pg.Dict()
sd2 = pg.Dict(accessor_writable=False)
with pg.allow_writable_accessors(False):
sd1.a = 2 # NOT OK
sd2.a = 2 # NOT OK
with pg.allow_writable_accessors(True):
sd1.a = 2 # OK
sd2.a = 2 # OK
with pg.allow_writable_accessors(None):
sd1.a = 1 # OK
sd2.a = 1 # NOT OK
Args:
writable: If True, allow write access with accessors (__setattr__,
__setitem__) for all symbolic values in scope.
If False, disallow write access via accessors for all symbolic values
in scope, even if individual objects allow so.
If None, honor object-level `accessor_writable` flag.
Returns:
A context manager that allows/disallows writable accessors of all
symbolic values in scope. After leaving the scope, the
`accessor_writable` flag of individual objects will remain intact.
|
def allow_writable_accessors(
writable: Optional[bool] = True) -> ContextManager[None]:
"""Returns a context manager that makes accessor writable in scope.
This function is thread-safe and can be nested. In the nested use case, the
writable flag of immediate parent context is effective.
Example::
sd1 = pg.Dict()
sd2 = pg.Dict(accessor_writable=False)
with pg.allow_writable_accessors(False):
sd1.a = 2 # NOT OK
sd2.a = 2 # NOT OK
with pg.allow_writable_accessors(True):
sd1.a = 2 # OK
sd2.a = 2 # OK
with pg.allow_writable_accessors(None):
sd1.a = 1 # OK
sd2.a = 1 # NOT OK
Args:
writable: If True, allow write access with accessors (__setattr__,
__setitem__) for all symbolic values in scope.
If False, disallow write access via accessors for all symbolic values
in scope, even if individual objects allow so.
If None, honor object-level `accessor_writable` flag.
Returns:
A context manager that allows/disallows writable accessors of all
symbolic values in scope. After leaving the scope, the
`accessor_writable` flag of individual objects will remain intact.
"""
return thread_local.thread_local_value_scope(
_TLS_ACCESSOR_WRITABLE, writable, None
)
|
(writable: Optional[bool] = True) -> ContextManager[NoneType]
|
41,437 |
pyglove.core.symbolic.class_wrapper
|
apply_wrappers
|
Context manager for swapping user classes with their class wrappers.
This helper method is a handy tool to swap user classes with their wrappers
within a code block, without modifying exisiting code.
For example::
def foo():
return A()
APrime = pg.wrap(A)
with pg.apply_wrappers([APrime]):
# Direct creation of an instance of `A` will be detoured to `APrime`.
assert isinstance(A(), APrime)
Indirect creation of an instance of `A` will be detoured too.
assert isinstance(foo(), APrime)
# Out of the scope, direct/indirect creation `of` A will be restored.
assert not isinstance(A(), APrime)
assert not isinstance(foo(), APrime)
``pg.apply_wrappers`` can be nested, under which the inner context will apply
the wrappers from the outter context. ``pg.apply_wrappers`` is NOT
thread-safe.
Args:
wrapper_classes: Wrapper classes to use. If None, sets it to all registered
wrapper classes.
where: An optional filter function in signature (wrapper_class) -> bool.
If not None, only filtered `wrapper_class` will be swapped.
Returns:
A context manager that detours the original classes to the wrapper classes.
|
def apply_wrappers(
wrapper_classes: Optional[Sequence[Type['ClassWrapper']]] = None,
where: Optional[Callable[[Type['ClassWrapper']], bool]] = None):
"""Context manager for swapping user classes with their class wrappers.
This helper method is a handy tool to swap user classes with their wrappers
within a code block, without modifying exisiting code.
For example::
def foo():
return A()
APrime = pg.wrap(A)
with pg.apply_wrappers([APrime]):
# Direct creation of an instance of `A` will be detoured to `APrime`.
assert isinstance(A(), APrime)
Indirect creation of an instance of `A` will be detoured too.
assert isinstance(foo(), APrime)
# Out of the scope, direct/indirect creation `of` A will be restored.
assert not isinstance(A(), APrime)
assert not isinstance(foo(), APrime)
``pg.apply_wrappers`` can be nested, under which the inner context will apply
the wrappers from the outter context. ``pg.apply_wrappers`` is NOT
thread-safe.
Args:
wrapper_classes: Wrapper classes to use. If None, sets it to all registered
wrapper classes.
where: An optional filter function in signature (wrapper_class) -> bool.
If not None, only filtered `wrapper_class` will be swapped.
Returns:
A context manager that detours the original classes to the wrapper classes.
"""
if not wrapper_classes:
wrapper_classes = []
for _, c in object_utils.JSONConvertible.registered_types():
if (issubclass(c, ClassWrapper)
and c not in (ClassWrapper, _SubclassedWrapperBase)
and (not where or where(c))
and c.sym_wrapped_cls is not None):
wrapper_classes.append(c)
return detouring.detour([(c.sym_wrapped_cls, c) for c in wrapper_classes])
|
(wrapper_classes: Optional[Sequence[Type[pyglove.core.symbolic.class_wrapper.ClassWrapper]]] = None, where: Optional[Callable[[Type[pyglove.core.symbolic.class_wrapper.ClassWrapper]], bool]] = None)
|
41,438 |
pyglove.core.symbolic.flags
|
as_sealed
|
Returns a context manager to treat symbolic values as sealed/unsealed.
While the user can use `Symbolic.seal` to seal or unseal an individual object.
This context manager is useful to create a readonly zone for operations on
all existing symbolic objects.
This function is thread-safe and can be nested. In the nested use case, the
sealed flag of immediate parent context is effective.
Example::
sd1 = pg.Dict()
sd2 = pg.Dict().seal()
with pg.as_sealed(True):
sd1.a = 2 # NOT OK
sd2.a = 2 # NOT OK
with pg.as_sealed(False):
sd1.a = 2 # OK
sd2.a = 2 # OK
with pg.as_sealed(None):
sd1.a = 1 # OK
sd2.a = 1 # NOT OK
Args:
sealed: If True, treats all symbolic values as sealed in scope.
If False, treats all as unsealed.
If None, honor object-level `sealed` state.
Returns:
A context manager that treats all symbolic values as sealed/unsealed
in scope. After leaving the scope, the sealed state of individual objects
will remain intact.
|
def as_sealed(sealed: Optional[bool] = True) -> ContextManager[None]:
"""Returns a context manager to treat symbolic values as sealed/unsealed.
While the user can use `Symbolic.seal` to seal or unseal an individual object.
This context manager is useful to create a readonly zone for operations on
all existing symbolic objects.
This function is thread-safe and can be nested. In the nested use case, the
sealed flag of immediate parent context is effective.
Example::
sd1 = pg.Dict()
sd2 = pg.Dict().seal()
with pg.as_sealed(True):
sd1.a = 2 # NOT OK
sd2.a = 2 # NOT OK
with pg.as_sealed(False):
sd1.a = 2 # OK
sd2.a = 2 # OK
with pg.as_sealed(None):
sd1.a = 1 # OK
sd2.a = 1 # NOT OK
Args:
sealed: If True, treats all symbolic values as sealed in scope.
If False, treats all as unsealed.
If None, honor object-level `sealed` state.
Returns:
A context manager that treats all symbolic values as sealed/unsealed
in scope. After leaving the scope, the sealed state of individual objects
will remain intact.
"""
return thread_local.thread_local_value_scope(_TLS_SEALED, sealed, None)
|
(sealed: Optional[bool] = True) -> ContextManager[NoneType]
|
41,439 |
pyglove.core.symbolic.flags
|
auto_call_functors
|
Returns a context manager to enable or disable auto call for functors.
`auto_call_functors` is thread-safe and can be nested. For example::
@pg.symbolize
def foo(x, y):
return x + y
with pg.auto_call_functors(True):
a = foo(1, 2)
assert a == 3
with pg.auto_call_functors(False):
b = foo(1, 2)
assert isinstance(b, foo)
Args:
enabled: If True, enable auto call for functors.
Otherwise, auto call will be disabled.
Returns:
A context manager for enabling/disabling auto call for functors.
|
def auto_call_functors(enabled: bool = True) -> ContextManager[None]:
"""Returns a context manager to enable or disable auto call for functors.
`auto_call_functors` is thread-safe and can be nested. For example::
@pg.symbolize
def foo(x, y):
return x + y
with pg.auto_call_functors(True):
a = foo(1, 2)
assert a == 3
with pg.auto_call_functors(False):
b = foo(1, 2)
assert isinstance(b, foo)
Args:
enabled: If True, enable auto call for functors.
Otherwise, auto call will be disabled.
Returns:
A context manager for enabling/disabling auto call for functors.
"""
return thread_local.thread_local_value_scope(
_TLS_AUTO_CALL_FUNCTORS, enabled, False
)
|
(enabled: bool = True) -> ContextManager[NoneType]
|
41,440 |
pyglove.core.symbolic.boilerplate
|
boilerplate_class
|
Create a boilerplate class using a symbolic object.
As the name indicates, a boilerplate class is a class that can be used
as a boilerplate to create object.
Implementation-wise it's a class that extends the type of input value, while
setting the default values of its (inherited) schema using the value from
input.
An analogy to boilerplate class is prebound function.
For example::
# A regular function: correspond to a pg.Object subclass.
def f(a, b, c)
return a + b + c
# A partially bound function: correspond to a boilerplate class created
# from a partially bound object.
def g(c):
return f(1, 2, c)
# A fully bound function: correspond to a boilerplate class created from
# a fully bound object.
def h():
return f(1, 2, 3)
Boilerplate class can be created with a value that is fully bound
(like function `h` above), or partially bound (like function `g` above).
Since boilerplate class extends the type of the input, we can rebind members
of its instances as we modify the input.
Here are a few examples::
@pg.members([
('a', pg.typing.Str(), 'Field A.'),
('b', pg.typing.Int(), 'Field B.'),
])
class A(pg.Object):
pass
A1 = pg.boilerplate_class('A1', A.partial(a='foo'))
assert A1(b=1) == A(a='foo', b=1)
A2 = pg.boilerplate_class('A2', A(a='bar', b=2))
assert A2() == A(a='bar', b=2)
Args:
cls_name: Name of the boilerplate class.
value: Value that is used as the default value of the boilerplate class.
init_arg_list: An optional list of strings as __init__ positional arguments
names.
**kwargs: Keyword arguments for infrequently used options. Acceptable
keywords are: * `serialization_key`: An optional string to be used as the
serialization key for the class during `sym_jsonify`. If None,
`cls.__type_name__` will be used. This is introduced for scenarios when we
want to relocate a class, before the downstream can recognize the new
location, we need the class to serialize it using previous key. *
`additional_keys`: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
Returns:
A class which extends the input value's type, with its schema's default
values set from the input value.
Raises:
TypeError: Keyword argumment provided is not supported.
|
def boilerplate_class(
cls_name: str,
value: pg_object.Object,
init_arg_list: Optional[List[str]] = None,
**kwargs) -> Type[pg_object.Object]:
"""Create a boilerplate class using a symbolic object.
As the name indicates, a boilerplate class is a class that can be used
as a boilerplate to create object.
Implementation-wise it's a class that extends the type of input value, while
setting the default values of its (inherited) schema using the value from
input.
An analogy to boilerplate class is prebound function.
For example::
# A regular function: correspond to a pg.Object subclass.
def f(a, b, c)
return a + b + c
# A partially bound function: correspond to a boilerplate class created
# from a partially bound object.
def g(c):
return f(1, 2, c)
# A fully bound function: correspond to a boilerplate class created from
# a fully bound object.
def h():
return f(1, 2, 3)
Boilerplate class can be created with a value that is fully bound
(like function `h` above), or partially bound (like function `g` above).
Since boilerplate class extends the type of the input, we can rebind members
of its instances as we modify the input.
Here are a few examples::
@pg.members([
('a', pg.typing.Str(), 'Field A.'),
('b', pg.typing.Int(), 'Field B.'),
])
class A(pg.Object):
pass
A1 = pg.boilerplate_class('A1', A.partial(a='foo'))
assert A1(b=1) == A(a='foo', b=1)
A2 = pg.boilerplate_class('A2', A(a='bar', b=2))
assert A2() == A(a='bar', b=2)
Args:
cls_name: Name of the boilerplate class.
value: Value that is used as the default value of the boilerplate class.
init_arg_list: An optional list of strings as __init__ positional arguments
names.
**kwargs: Keyword arguments for infrequently used options. Acceptable
keywords are: * `serialization_key`: An optional string to be used as the
serialization key for the class during `sym_jsonify`. If None,
`cls.__type_name__` will be used. This is introduced for scenarios when we
want to relocate a class, before the downstream can recognize the new
location, we need the class to serialize it using previous key. *
`additional_keys`: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
Returns:
A class which extends the input value's type, with its schema's default
values set from the input value.
Raises:
TypeError: Keyword argumment provided is not supported.
"""
if not isinstance(value, pg_object.Object):
raise ValueError('Argument \'value\' must be an instance of '
'symbolic.Object subclass.')
serialization_key = kwargs.pop('serialization_key', None)
additional_keys = kwargs.pop('additional_keys', None)
if kwargs:
raise TypeError(
f'Unsupported keyword arguments: {list(kwargs.keys())!r}.')
base_cls = value.__class__
class _BoilerplateClass(base_cls):
"""Boilerplate class."""
auto_register = False
is_boilerplate = True
caller_module = inspect.getmodule(inspect.stack()[1][0])
cls_module = caller_module.__name__ if caller_module else '__main__'
cls = _BoilerplateClass
cls.__name__ = cls_name
cls.__qualname__ = cls.__qualname__.replace(
'boilerplate_class.<locals>._BoilerplateClass', cls_name)
cls.__module__ = cls_module
# Enable automatic registration for subclass.
cls.auto_register = True
allow_partial = value.allow_partial
def _freeze_field(path: object_utils.KeyPath,
field: pg_typing.Field,
value: Any) -> Any:
# We do not do validation since Object is already in valid form.
del path
if not isinstance(field.key, pg_typing.ListKey):
# Recursively freeze dict field.
if isinstance(field.value, pg_typing.Dict) and field.value.schema:
field.value.schema.apply(
value, allow_partial=allow_partial, child_transform=_freeze_field)
field.value.set_default(value)
if all(f.frozen for f in field.value.schema.values()):
field.value.freeze()
else:
if value != pg_typing.MISSING_VALUE:
field.value.freeze(copy.deepcopy(value), apply_before_use=False)
else:
field.value.set_default(
pg_typing.MISSING_VALUE, use_default_apply=False)
return value
# NOTE(daiyip): we call `cls.__schema__.apply` to freeze fields that have
# default values. But we no longer need to formalize `cls.__schema__`, since
# it's copied from the boilerplate object's class which was already
# formalized.
with flags.allow_writable_accessors():
cls.__schema__.apply(
value._sym_attributes, # pylint: disable=protected-access
allow_partial=allow_partial,
child_transform=_freeze_field,
)
if init_arg_list is not None:
schema_utils.validate_init_arg_list(init_arg_list, cls.__schema__)
cls.__schema__.metadata['init_arg_list'] = init_arg_list
cls.register_for_deserialization(serialization_key, additional_keys)
return cls
|
(cls_name: str, value: pyglove.core.symbolic.object.Object, init_arg_list: Optional[List[str]] = None, **kwargs) -> Type[pyglove.core.symbolic.object.Object]
|
41,441 |
pyglove.core.object_utils.error_utils
|
catch_errors
|
Context manager for catching user-specified exceptions.
Examples::
with pg.object_utils.catch_errors(
[
RuntimeErrror,
(ValueError, 'Input is wrong.')
],
) as error_context:
do_something()
if error_context.error:
# Error branch.
handle_error(error_context.error)
Args:
errors: A sequence of exception types or tuples of exception type and error
messages (described in regular expression) as the desired exception types
to catch. If an error is raised within the scope which does not match with
the specification, it will be propagated to the outer scope.
error_handler: An optional callable object to handle the error on failure.
It's usually provided if the user want to create a context manager based
on `pg.catch_errors` with specific error handling logics.
Yields:
A CatchErrorsContext object.
| null |
(errors: Union[Type[Exception], Tuple[Exception, str], Sequence[Union[Type[Exception], Tuple[Exception, str]]]], error_handler: Optional[Callable[[Exception], NoneType]] = None)
|
41,442 |
pyglove.core.symbolic.base
|
clone
|
Clones a value. Use symbolic clone if possible.
Example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Any())
])
class A(pg.Object):
pass
# B is not a symbolic object.
class B:
pass
# Shallow copy on non-symbolic values (by reference).
a = A(1, B())
b = pg.clone(a)
assert pg.eq(a, b)
assert a.y is b.y
# Deepcopy on non-symbolic values.
c = pg.clone(a, deep=True)
assert pg.ne(a, c)
assert a.y is not c.y
# Copy with override
d = pg.clone(a, override={'x': 2})
assert d.x == 2
assert d.y is a.y
Args:
x: value to clone.
deep: If True, use deep clone, otherwise use shallow clone.
memo: Optional memo object for deep clone.
override: Value to override if value is symbolic.
Returns:
Cloned instance.
|
def clone(
x: Any,
deep: bool = False,
memo: Optional[Any] = None,
override: Optional[Dict[str, Any]] = None
) -> Any:
"""Clones a value. Use symbolic clone if possible.
Example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Any())
])
class A(pg.Object):
pass
# B is not a symbolic object.
class B:
pass
# Shallow copy on non-symbolic values (by reference).
a = A(1, B())
b = pg.clone(a)
assert pg.eq(a, b)
assert a.y is b.y
# Deepcopy on non-symbolic values.
c = pg.clone(a, deep=True)
assert pg.ne(a, c)
assert a.y is not c.y
# Copy with override
d = pg.clone(a, override={'x': 2})
assert d.x == 2
assert d.y is a.y
Args:
x: value to clone.
deep: If True, use deep clone, otherwise use shallow clone.
memo: Optional memo object for deep clone.
override: Value to override if value is symbolic.
Returns:
Cloned instance.
"""
if isinstance(x, Symbolic):
return x.sym_clone(deep, memo, override)
else:
assert not override
return copy.deepcopy(x, memo) if deep else copy.copy(x)
|
(x: Any, deep: bool = False, memo: Optional[Any] = None, override: Optional[Dict[str, Any]] = None) -> Any
|
41,443 |
pyglove.core.symbolic.compounding
|
compound
|
Function decorator to create compound class.
Example::
@dataclasses.dataclass
class Foo:
x: int
y: int
def sum(self):
return self.x + self.y
@pg.compound
def foo_with_equal_x_y(v: int) -> Foo:
return Foo(v, v)
f = foo_with_equal_x_y(1)
# First of all, the objects of compound classes can be used as an in-place
# replacement for the objects of regular classes, as they are subclasses of
# the regular classes.
assert issubclass(foo_with_equal_x_y, Foo)
assert isinstance(f, Foo)
# We can access symbolic attributes of the compound object.
assert f.v == 1
# We can also access the public APIs of the decomposed object.
assert f.x == 1
assert f.y == 1
assert f.sum() == 2
# Or explicit access the decomposed object.
assert f.decomposed == Foo(1, 1)
# Moreover, symbolic power is fully unleashed to the compound class.
f.rebind(v=2)
assert f.x == 2
assert f.y == 2
assert f.sum() == 4
# Err with runtime type check: 2.5 is not an integer.
f.rebind(v=2.5)
Args:
base_class: The base class of the compond class, which should be a
``pg.Object`` type. If None, it will be infererenced from the return
annotation of `factory_fn`. If the annotation is not present or
`auto_typing` is set to False, `base_class` must be present.
args: Symbolic args specification. See :class:`pg.compound_class` for
details.
**kwargs: Keyword arguments. See :class:`pg.compound_class` for details.
Returns:
A symbolic compound class that subclasses `base_class`.
|
def compound(
base_class: Optional[Type[Object]] = None,
args: Optional[
List[
Union[
Tuple[Tuple[str, pg_typing.KeySpec], pg_typing.ValueSpec, str],
Tuple[
Tuple[str, pg_typing.KeySpec], pg_typing.ValueSpec, str, Any
],
]
]
] = None, # pylint: disable=bad-continuation
**kwargs
):
"""Function decorator to create compound class.
Example::
@dataclasses.dataclass
class Foo:
x: int
y: int
def sum(self):
return self.x + self.y
@pg.compound
def foo_with_equal_x_y(v: int) -> Foo:
return Foo(v, v)
f = foo_with_equal_x_y(1)
# First of all, the objects of compound classes can be used as an in-place
# replacement for the objects of regular classes, as they are subclasses of
# the regular classes.
assert issubclass(foo_with_equal_x_y, Foo)
assert isinstance(f, Foo)
# We can access symbolic attributes of the compound object.
assert f.v == 1
# We can also access the public APIs of the decomposed object.
assert f.x == 1
assert f.y == 1
assert f.sum() == 2
# Or explicit access the decomposed object.
assert f.decomposed == Foo(1, 1)
# Moreover, symbolic power is fully unleashed to the compound class.
f.rebind(v=2)
assert f.x == 2
assert f.y == 2
assert f.sum() == 4
# Err with runtime type check: 2.5 is not an integer.
f.rebind(v=2.5)
Args:
base_class: The base class of the compond class, which should be a
``pg.Object`` type. If None, it will be infererenced from the return
annotation of `factory_fn`. If the annotation is not present or
`auto_typing` is set to False, `base_class` must be present.
args: Symbolic args specification. See :class:`pg.compound_class` for
details.
**kwargs: Keyword arguments. See :class:`pg.compound_class` for details.
Returns:
A symbolic compound class that subclasses `base_class`.
"""
if inspect.isfunction(base_class):
assert args is None
return compound_class(base_class, add_to_registry=True, **kwargs)
return lambda fn: compound_class( # pylint: disable=g-long-lambda # pytype: disable=wrong-arg-types
fn, base_class, args, add_to_registry=True, **kwargs
)
|
(base_class: Optional[Type[pyglove.core.symbolic.object.Object]] = None, args: Optional[List[Union[Tuple[Tuple[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str], Tuple[Tuple[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str, Any]]]] = None, **kwargs)
|
41,444 |
pyglove.core.symbolic.compounding
|
compound_class
|
Creates a compound class from a factory function.
Args:
factory_fn: A function that produces a compound object.
base_class: The base class of the compond class, which should be a
``pg.Object`` type. If None, it will be infererenced from the return
annotation of `factory_fn`. If the annotation is not present or
`auto_typing` is set to False, `base_class` must be present.
args: Symbolic args specification. `args` is a list of tuples, each
describes an argument from the input function. Each tuple is the format of
(<argumment-name>, <value-spec>, [description], [metadata-objects]).
`argument-name` - a `str` or `pg_typing.StrKey` object. When
`pg_typing.StrKey` is used, it describes the wildcard keyword argument.
`value-spec` - a `pg_typing.ValueSpec` object or equivalent, e.g.
primitive values which will be converted to ValueSpec implementation
according to its type and used as its default value. `description` - a
string to describe the agument. `metadata-objects` - an optional list of
any type, which can be used to generate code according to the schema.
There are notable rules in filling the `args`: 1) When `args` is None or
arguments from the function signature are missing from it, `schema.Field`
for these fields will be automatically generated and inserted into `args`.
That being said, every arguments in input function will have a
`schema.Field` counterpart in `Functor.schema.fields` sorted by the
declaration order of each argument in the function signature ( other than
the order in `args`). 2) Default argument values are specified along with
function definition as regular python functions, instead of being set at
`schema.Field` level. But validation rules can be set using `args` and
apply to argument values.
lazy_build: If True, `factory_fn` will be called upon first use. Otherwise,
it will be called at construction.
auto_doc: If True, the descriptions of argument fields will be inherited
from `factory_fn` docstr if they are not explicitly specified through
``args``.
auto_typing: If True, the value spec for constraining each argument will be
inferred from its annotation. Otherwise the value specs for all arguments
will be ``pg.typing.Any()``.
serialization_key: An optional string to be used as the serialization key
for the class during `sym_jsonify`. If None, `cls.__type_name__` will be
used. This is introduced for scenarios when we want to relocate a class,
before the downstream can recognize the new location, we need the class to
serialize it using previous key.
additional_keys: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
add_to_registry: If True, the newly created functor class will be added to
the registry for deserialization.
Returns:
A callable that converts a factory function into a subclass of the base
class.
|
def compound_class(
factory_fn: types.FunctionType,
base_class: Optional[Type[Object]] = None,
args: Optional[
List[
Union[
Tuple[Tuple[str, pg_typing.KeySpec], pg_typing.ValueSpec, str],
Tuple[
Tuple[str, pg_typing.KeySpec], pg_typing.ValueSpec, str, Any
],
]
]
] = None, # pylint: disable=bad-continuation
*,
lazy_build: bool = True,
auto_doc: bool = True,
auto_typing: bool = True,
serialization_key: Optional[str] = None,
additional_keys: Optional[List[str]] = None,
add_to_registry: bool = False
) -> Type[Compound]:
"""Creates a compound class from a factory function.
Args:
factory_fn: A function that produces a compound object.
base_class: The base class of the compond class, which should be a
``pg.Object`` type. If None, it will be infererenced from the return
annotation of `factory_fn`. If the annotation is not present or
`auto_typing` is set to False, `base_class` must be present.
args: Symbolic args specification. `args` is a list of tuples, each
describes an argument from the input function. Each tuple is the format of
(<argumment-name>, <value-spec>, [description], [metadata-objects]).
`argument-name` - a `str` or `pg_typing.StrKey` object. When
`pg_typing.StrKey` is used, it describes the wildcard keyword argument.
`value-spec` - a `pg_typing.ValueSpec` object or equivalent, e.g.
primitive values which will be converted to ValueSpec implementation
according to its type and used as its default value. `description` - a
string to describe the agument. `metadata-objects` - an optional list of
any type, which can be used to generate code according to the schema.
There are notable rules in filling the `args`: 1) When `args` is None or
arguments from the function signature are missing from it, `schema.Field`
for these fields will be automatically generated and inserted into `args`.
That being said, every arguments in input function will have a
`schema.Field` counterpart in `Functor.schema.fields` sorted by the
declaration order of each argument in the function signature ( other than
the order in `args`). 2) Default argument values are specified along with
function definition as regular python functions, instead of being set at
`schema.Field` level. But validation rules can be set using `args` and
apply to argument values.
lazy_build: If True, `factory_fn` will be called upon first use. Otherwise,
it will be called at construction.
auto_doc: If True, the descriptions of argument fields will be inherited
from `factory_fn` docstr if they are not explicitly specified through
``args``.
auto_typing: If True, the value spec for constraining each argument will be
inferred from its annotation. Otherwise the value specs for all arguments
will be ``pg.typing.Any()``.
serialization_key: An optional string to be used as the serialization key
for the class during `sym_jsonify`. If None, `cls.__type_name__` will be
used. This is introduced for scenarios when we want to relocate a class,
before the downstream can recognize the new location, we need the class to
serialize it using previous key.
additional_keys: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
add_to_registry: If True, the newly created functor class will be added to
the registry for deserialization.
Returns:
A callable that converts a factory function into a subclass of the base
class.
"""
if not inspect.isfunction(factory_fn):
raise TypeError('Decorator `compound` is only applicable to functions.')
schema = schema_utils.function_schema(
factory_fn,
args=args,
returns=pg_typing.Object(base_class) if base_class else None,
auto_doc=auto_doc,
auto_typing=auto_typing,
)
# Inference the base_class from schema.
return_spec = schema.metadata.get('returns', None)
if isinstance(return_spec, pg_typing.Object):
base_class = return_spec.cls
else:
raise ValueError(
'Cannot inference the base class from return value annotation. '
'Please either add an annotation for the return value or provide the '
'value for the `base_class` argument.'
)
class _Compound(Compound, base_class):
"""The compound class bound to a factory function."""
# Disable auto register so we can use function module and name
# for registration later.
auto_register = False
# The compound class uses the function signature to decide its
# schema, thus we do not infer its schema from the class annotations.
auto_schema = False
def _on_bound(self):
# NOTE(daiyip): Do not call `super()._on_bound()` to avoid side effect.
# This is okay since all states are delegated to `self.decomposed`.
Compound._on_bound(self) # pylint: disable=protected-access
self._sym_decomposed = None
if not lazy_build:
# Trigger build.
_ = self.decomposed
@property
def decomposed(self):
if self._sym_decomposed is None:
# Build the compound object.
self._sym_decomposed = factory_fn(**self.sym_init_args)
# This allows the decomposed symbolic object to access the parent chain
# for value retrieval of inferred attributes.
if isinstance(self._sym_decomposed, Symbolic):
self._sym_decomposed.sym_setparent(self.sym_parent)
return self._sym_decomposed
def _on_parent_change(self, old_parent, new_parent):
"""Override to allow decomposed object to follow parent chain change."""
super()._on_parent_change(old_parent, new_parent)
if isinstance(self._sym_decomposed, Symbolic):
self._sym_decomposed.sym_setparent(new_parent)
def _sym_inferred(self, key: str, **kwargs) -> Any:
# Bypass the user base' `_sym_inferred` if it's overriden.
return Compound._sym_inferred(self, key, **kwargs)
def __getattribute__(self, name: str):
if (
name.startswith('_')
or name in _COMPOUND_OWNED_ATTR_NAMES
or name in self.sym_init_args
):
return Compound.__getattribute__(self, name)
# Redirect attribute to the compound object.
return getattr(self.decomposed, name)
cls = _Compound
cls.__name__ = factory_fn.__name__
cls.__qualname__ = factory_fn.__qualname__
cls.__module__ = factory_fn.__module__
cls.__doc__ = factory_fn.__doc__
# Enable automatic registration of subclass.
cls.auto_register = True
cls.apply_schema(schema)
# Supporting abstract class as compound base class.
# This is a feature supported for Python 3.10 and above.
if sys.version_info >= (3, 10):
# NOTE(daiyip): Override abstract methods as non-ops, so `cls` could
# have an abstract class as its base. We don't need to worry about the
# implementation of the abstract method, since it will be detoured to the
# decomposed object at runtime via `__getattribute__`.
for key in dir(cls):
attr = getattr(cls, key)
if getattr(attr, '__isabstractmethod__', False):
noop = lambda self, *args, **kwargs: None
if isinstance(attr, property):
noop = property(noop)
else:
assert inspect.isfunction(attr), (key, attr)
setattr(cls, key, noop)
abc.update_abstractmethods(cls)
if add_to_registry:
cls.register_for_deserialization(serialization_key, additional_keys)
return cls
|
(factory_fn: function, base_class: Optional[Type[pyglove.core.symbolic.object.Object]] = None, args: Optional[List[Union[Tuple[Tuple[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str], Tuple[Tuple[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str, Any]]]] = None, *, lazy_build: bool = True, auto_doc: bool = True, auto_typing: bool = True, serialization_key: Optional[str] = None, additional_keys: Optional[List[str]] = None, add_to_registry: bool = False) -> Type[pyglove.core.symbolic.compounding.Compound]
|
41,445 |
pyglove.core.symbolic.base
|
contains
|
Returns if a value contains values of specific type.
Example::
@pg.members([
('x', pg.typing.Any()),
('y', pg.typing.Any())
])
class A(pg.Object):
pass
# Test if a symbolic tree contains a value.
assert pg.contains(A('a', 'b'), 'a')
assert not pg.contains(A('a', 'b'), A)
# Test if a symbolic tree contains a type.
assert pg.contains({'x': A(1, 2)}, type=A)
assert pg.contains({'x': A(1, 2)}, type=int)
assert pg.contains({'x': A(1, 2)}, type=(int, float))
Args:
x: The source value to query against.
value: Value of sub-node to contain. Applicable when `type` is None.
type: A type or a tuple of types for the sub-nodes. Applicable if
not None.
Returns:
True if `x` itself or any of its sub-nodes equal to `value` or
is an instance of `value_type`.
|
def contains(
x: Any,
value: Any = None,
type: Optional[Union[ # pylint: disable=redefined-builtin
Type[Any],
Tuple[Type[Any]]]]=None
) -> bool:
"""Returns if a value contains values of specific type.
Example::
@pg.members([
('x', pg.typing.Any()),
('y', pg.typing.Any())
])
class A(pg.Object):
pass
# Test if a symbolic tree contains a value.
assert pg.contains(A('a', 'b'), 'a')
assert not pg.contains(A('a', 'b'), A)
# Test if a symbolic tree contains a type.
assert pg.contains({'x': A(1, 2)}, type=A)
assert pg.contains({'x': A(1, 2)}, type=int)
assert pg.contains({'x': A(1, 2)}, type=(int, float))
Args:
x: The source value to query against.
value: Value of sub-node to contain. Applicable when `type` is None.
type: A type or a tuple of types for the sub-nodes. Applicable if
not None.
Returns:
True if `x` itself or any of its sub-nodes equal to `value` or
is an instance of `value_type`.
"""
if type is not None:
def _contains(k, v, p):
del k, p
if isinstance(v, type):
return TraverseAction.STOP
return TraverseAction.ENTER
else:
def _contains(k, v, p):
del k, p
if v == value:
return TraverseAction.STOP
return TraverseAction.ENTER
return not traverse(x, _contains)
|
(x: Any, value: Optional[Any] = None, type: Union[NoneType, Type[Any], Tuple[Type[Any]]] = None) -> bool
|
41,447 |
pyglove.core.detouring.class_detour
|
detour
|
Context manager for detouring object creation.
At times, we want to replace an object of a class to an object of a different
class. Usually, we do so by passing the object as a function argument using
dependency injection. However, it's not always possible to expose those
internal objects as parameters to the class, as we cannot predict what needs
to be customized in future. Also, exposing too many arguments will hurt
usability, it's big burden to figure out 20 arguments of a function for a user
to get started.
`pg.detour` provides another option for object replacement in Python, which
creates a context in which some source classes can be detoured to specified
destination classes or functions. For example, the code snippet below will
detour instantation of class A to class B, and vice-versa::
class A:
pass
class B:
pass
# Exchange class A and class B.
with pg.detour([(A, B), (B, A)]):
a = A() # a is a B object.
b = B() # b is an A object.
Detour destination can be a function, which allows users to intercept the
arguments passed to the class constructor. For example::
class Foo:
def __init__(self, value):
self.value = value
class Bar:
def __init__(self, value):
self.value = value
def detoured_foo(cls, value):
# cls is the original class before detour.
return Bar(value + 1)
with pg.detour([(Foo, detoured_foo)]):
f = Foo(1) # f will be Bar(2).
Detour can be nested. The outer scope mappings take precedence over the
mappings from the inner loop, allowing users to change object creation
behaviors from the outside. For example, the following code will detour
class A to class C::
with pg.detour([(A, C)]):
with pg.detour([A, B]):
a = A() # a is a C object.
Detour is transisive across the inner and outer scope. For example, the code
below will detour class A to class C through B::
with pg.detour([(B, C)]):
a1 = A() # a1 is an A object.
with pg.detour([A, B]):
a2 = A() # a2 is a C object. (A -> B -> C)
Detour is thread-sfe.
Args:
mappings: A sequence of tuple (src_cls, dest_cls_or_fn) as mappings for the
detour - 'src_cls' is the source class to be detoured, while
'dest_cls_or_fn' is the destination class or function. When it's a class,
its `__init__` method should have the same signature as the `__init__` of
the original class. When it's a function, it should accept a positional
argument `cls`, for passing the original class that is being detoured,
followed by all the arguments that the original class should accept. For
example, a class with `__init__(self, x, *args, y, **kwargs)` can be
detoured to a function with signature `(cls, x, *args, y, **kwargs)`.
Yields:
Resolved detour mappings.
Raises:
TypeError: If the first item in each mapping is not a class, or the second
item in each mapping is neither a class nor a function.
|
def get_original_new(self, src_cls):
"""Returns the original new method of source cls."""
if not _is_detoured_new(src_cls.__new__):
orig_new = src_cls.__new__
else:
# NOTE(daiyip): there are usually 3 patterns in implementing __new__.
# 1) call super.__new__ to return an instance.
# 2) explicitly call object.__new__ to return an instance.
# 3) return an instance from another class.
#
# The following code aims to support case #1 by mimicing the call
# convention of super.__new__ without access to the super object.
# We implement this by maintaining a call history of `__new__` method
# returned by `get_original_new` for each top-most call to
# `_maybe_detour_new`. Based on the history, we always return the next
# __new__ along the inheritance hierarchy. For example, for code:
#
# ```
# class A:
# def __new__(cls, *args, **kwargs):
# return super(A, cls).__new__(cls, *args, **kwargs)
#
# class B:
# def __new__(cls, *args, **kwargs):
# return super(A, cls).__new__(cls, *args, **kwargs)
#
# class C(A, B):
# pass
# ```
# when we detour A and B to other classes, their `__new__` method will be
# replaced with `_maybe_detoured_new`. As we create an object of C, it
# will call `C.__new__`, which inherits the `_maybe_detoured_new` assigned
# to `A.__new__`. `_maybe_detoured_new` calls `get_original_new` on class
# C, which should return the original `A.__new__`. It then executes
# `super(A, cls).__new__`, which triggers `_maybe_detoured_new` method
# again assigned to `B.__new__`. In such case, we cannot differentiate the
# first call to `_maybe_detoured_new` (C.__new__) from this call, since
# both take class C as the cls argument. However, by assuming that nested
# `_maybe_detoured_new` call should always reflect the `super.__new__`
# call convention, we can store the call history for these invoked __new__
# methods, and return the one that is one-step closer to `object.__new__`.
# This may not work for the most complicated __new__ customization, but
# should work well for most __new__ implementations.
orig_new = self._original_new.get(src_cls, object.__new__)
if orig_new is object.__new__ or orig_new in self._new_stack:
for base in src_cls.__bases__:
base_new = self.get_original_new(base)
if base_new is not object.__new__ and base_new not in self._new_stack:
orig_new = base_new
break
return orig_new
|
(mappings: Sequence[Tuple[Type[Any], Union[Type[Any], function]]])
|
41,536 |
pyglove.core.symbolic.diff
|
diff
|
Inspect the symbolic diff between two objects.
For example::
@pg.members([
('x', pg.Any()),
('y', pg.Any())
])
class A(pg.Object):
pass
@pg.members([
('z', pg.Any().noneable())
])
class B(A):
pass
# Diff the same object.
pg.diff(A(1, 2), A(1, 2))
>> No diff
# Diff the same object with mode 'same'.
pg.diff(A(1, 2), A(1, 2), mode='same')
>> A(1, 2)
# Diff different objects of the same type.
pg.diff(A(1, 2), A(1, 3))
>> A(
>> y = Diff(
>> left=2,
>> right=3
>> )
>> )
# Diff objects of different type.
pg.diff(A(1, 2), B(1, 3))
>> Diff(
>> left = A(
>> x = 1,
>> y = 2
>> ),
>> right = B(
>> x = 1,
>> y = 3,
>> z = None
>> )
# Diff objects of different type with collapse.
pg.diff(A(1, 2), B(1, 3), collapse=True)
>> A|B (
>> y = Diff(
>> left = 2,
>> right = 3,
>> ),
>> z = Diff(
>> left = MISSING,
>> right = None
>> )
>> )
# Diff objects of different type with collapse and flatten.
# Object type is included in key '_type'.
pg.diff(A(1, pg.Dict(a=1)), B(1, pg.Dict(a=2)), collapse=True, flatten=True)
>> {
>> 'y.a': Diff(1, 2),
>> 'z', Diff(MISSING, None),
>> '_type': Diff(A, B)
>> }
Args:
left: The left object to compare.
right: The right object to compare.
flatten: If True, returns a level-1 dict with diff keys flattened. Otherwise
preserve the hierarchy of the diff result.
collapse: One of a boolean value, string or a callable object that indicates
whether to collapse two different values. The default value 'same_type'
means only collapse when the two values are of the same type.
mode: Diff mode, should be one of ['diff', 'same', 'both']. For 'diff' mode
(the default), the return value contains only different values. For 'same'
mode, the return value contains only same values. For 'both', the return
value contains both different and same values.
Returns:
A `Diff` object when flatten is False. Otherwise a dict of string (key path)
to `Diff`.
|
def diff(
left: Any,
right: Any,
flatten: bool = False,
collapse: Union[bool, str, Callable[[Any, Any], bool]] = 'same_type',
mode: str = 'diff') -> object_utils.Nestable[Diff]:
"""Inspect the symbolic diff between two objects.
For example::
@pg.members([
('x', pg.Any()),
('y', pg.Any())
])
class A(pg.Object):
pass
@pg.members([
('z', pg.Any().noneable())
])
class B(A):
pass
# Diff the same object.
pg.diff(A(1, 2), A(1, 2))
>> No diff
# Diff the same object with mode 'same'.
pg.diff(A(1, 2), A(1, 2), mode='same')
>> A(1, 2)
# Diff different objects of the same type.
pg.diff(A(1, 2), A(1, 3))
>> A(
>> y = Diff(
>> left=2,
>> right=3
>> )
>> )
# Diff objects of different type.
pg.diff(A(1, 2), B(1, 3))
>> Diff(
>> left = A(
>> x = 1,
>> y = 2
>> ),
>> right = B(
>> x = 1,
>> y = 3,
>> z = None
>> )
# Diff objects of different type with collapse.
pg.diff(A(1, 2), B(1, 3), collapse=True)
>> A|B (
>> y = Diff(
>> left = 2,
>> right = 3,
>> ),
>> z = Diff(
>> left = MISSING,
>> right = None
>> )
>> )
# Diff objects of different type with collapse and flatten.
# Object type is included in key '_type'.
pg.diff(A(1, pg.Dict(a=1)), B(1, pg.Dict(a=2)), collapse=True, flatten=True)
>> {
>> 'y.a': Diff(1, 2),
>> 'z', Diff(MISSING, None),
>> '_type': Diff(A, B)
>> }
Args:
left: The left object to compare.
right: The right object to compare.
flatten: If True, returns a level-1 dict with diff keys flattened. Otherwise
preserve the hierarchy of the diff result.
collapse: One of a boolean value, string or a callable object that indicates
whether to collapse two different values. The default value 'same_type'
means only collapse when the two values are of the same type.
mode: Diff mode, should be one of ['diff', 'same', 'both']. For 'diff' mode
(the default), the return value contains only different values. For 'same'
mode, the return value contains only same values. For 'both', the return
value contains both different and same values.
Returns:
A `Diff` object when flatten is False. Otherwise a dict of string (key path)
to `Diff`.
"""
def _should_collapse(left, right):
if isinstance(left, dict):
if isinstance(right, dict):
return True
elif isinstance(left, list):
return isinstance(right, list)
if (isinstance(left, (dict, base.Symbolic))
and isinstance(right, (dict, base.Symbolic))):
if collapse == 'same_type':
return type(left) is type(right)
elif callable(collapse):
return collapse(left, right)
elif isinstance(collapse, bool):
return collapse
else:
raise ValueError(f'Unsupported `collapse` value: {collapse!r}')
else:
return False
def _add_child_diff(diff_container, key, value, child_has_diff):
if ((mode != 'same' and child_has_diff)
or (mode != 'diff' and not child_has_diff)):
diff_container[key] = value
def _get_container_ops(container):
if isinstance(container, dict):
return container.__contains__, container.__getitem__, container.items
else:
assert isinstance(container, base.Symbolic)
return container.sym_hasattr, container.sym_getattr, container.sym_items
def _diff(x, y) -> Tuple[object_utils.Nestable[Diff], bool]:
if x is y or x == y:
return (Diff(x, y), False)
if not _should_collapse(x, y):
return (Diff(x, y), True)
diff_value, has_diff = {}, False
if isinstance(x, list):
assert isinstance(y, list)
def _child(l, index):
return l[i] if index < len(l) else Diff.MISSING
for i in range(max(len(x), len(y))):
child_diff, child_has_diff = _diff(_child(x, i), _child(y, i))
has_diff = has_diff or child_has_diff
_add_child_diff(diff_value, str(i), child_diff, child_has_diff)
diff_value = Diff(pg_list.List, pg_list.List, children=diff_value)
else:
assert isinstance(x, (dict, base.Symbolic))
assert isinstance(y, (dict, base.Symbolic))
x_haskey, _, x_items = _get_container_ops(x)
y_haskey, y_getitem, y_items = _get_container_ops(y)
for k, xv in x_items():
yv = y_getitem(k) if y_haskey(k) else Diff.MISSING
child_diff, child_has_diff = _diff(xv, yv)
has_diff = has_diff or child_has_diff
_add_child_diff(diff_value, k, child_diff, child_has_diff)
for k, yv in y_items():
if not x_haskey(k):
child_diff, _ = _diff(Diff.MISSING, yv)
has_diff = True
_add_child_diff(diff_value, k, child_diff, True)
xt, yt = type(x), type(y)
same_type = xt is yt
if not same_type:
has_diff = True
if flatten:
# Put type difference with key '_type'. Since symbolic
# fields will not start with underscore, so there should be
# no clash.
if not same_type or mode != 'diff':
diff_value['_type'] = Diff(xt, yt)
else:
diff_value = Diff(xt, yt, children=diff_value)
return diff_value, has_diff
diff_value, has_diff = _diff(left, right)
if not has_diff and mode == 'diff':
diff_value = Diff()
if flatten:
diff_value = object_utils.flatten(diff_value)
return diff_value
|
(left: Any, right: Any, flatten: bool = False, collapse: Union[bool, str, Callable[[Any, Any], bool]] = 'same_type', mode: str = 'diff') -> Union[Any, pyglove.core.symbolic.diff.Diff]
|
41,537 |
pyglove.core.hyper.object_template
|
dna_spec
|
Returns the DNASpec from a (maybe) hyper value.
Example::
hyper = pg.Dict(x=pg.oneof([1, 2, 3]), y=pg.oneof(['a', 'b']))
spec = pg.dna_spec(hyper)
assert spec.space_size == 6
assert len(spec.decision_points) == 2
print(spec.decision_points)
# Select a partial space with `where` argument.
spec = pg.dna_spec(hyper, where=lambda x: len(x.candidates) == 2)
assert spec.space_size == 2
assert len(spec.decision_points) == 1
See also:
* :class:`pyglove.DNASpec`
* :class:`pyglove.DNA`
Args:
value: A (maybe) hyper value.
where: Function to filter hyper primitives. If None, all hyper primitives
from `value` will be included in the encoding/decoding process. Otherwise
only the hyper primitives on which 'where' returns True will be included.
`where` can be very useful to partition a search space into separate
optimization processes. Please see 'Template' docstr for details.
Returns:
A DNASpec object, which represents the search space from algorithm's view.
|
def dna_spec(
value: Any,
where: Optional[Callable[[base.HyperPrimitive], bool]] = None
) -> geno.DNASpec:
"""Returns the DNASpec from a (maybe) hyper value.
Example::
hyper = pg.Dict(x=pg.oneof([1, 2, 3]), y=pg.oneof(['a', 'b']))
spec = pg.dna_spec(hyper)
assert spec.space_size == 6
assert len(spec.decision_points) == 2
print(spec.decision_points)
# Select a partial space with `where` argument.
spec = pg.dna_spec(hyper, where=lambda x: len(x.candidates) == 2)
assert spec.space_size == 2
assert len(spec.decision_points) == 1
See also:
* :class:`pyglove.DNASpec`
* :class:`pyglove.DNA`
Args:
value: A (maybe) hyper value.
where: Function to filter hyper primitives. If None, all hyper primitives
from `value` will be included in the encoding/decoding process. Otherwise
only the hyper primitives on which 'where' returns True will be included.
`where` can be very useful to partition a search space into separate
optimization processes. Please see 'Template' docstr for details.
Returns:
A DNASpec object, which represents the search space from algorithm's view.
"""
return template(value, where).dna_spec()
|
(value: Any, where: Optional[Callable[[pyglove.core.hyper.base.HyperPrimitive], bool]] = None) -> pyglove.core.geno.base.DNASpec
|
41,538 |
pyglove.core.object_utils.docstr_utils
|
docstr
|
Gets structure docstring of a Python symbol.
|
def docstr(symbol: Any) -> Optional[DocStr]:
"""Gets structure docstring of a Python symbol."""
docstr_text = getattr(symbol, '__doc__', None)
return DocStr.parse(docstr_text) if docstr_text else None
|
(symbol: Any) -> Optional[pyglove.core.object_utils.docstr_utils.DocStr]
|
41,540 |
pyglove.core.symbolic.flags
|
enable_type_check
|
Returns a context manager to enable or disable runtime type check.
`enable_type_check` is thread-safe and can be nested. For example,
in the following code, runtime type check with be `a` but not on `b`::
with pg.enable_type_check(False):
with pg.enable_type_check(True):
a = pg.Dict(x=1, value_spec=pg.typing.Dict([('x', pg.typing.Int())]))
b = pg.Dict(y=1, value_spec=pg.typing.Dict([('x', pg.typing.Int())]))
Args:
enabled: If True, enable runtime type check in current scope.
Otherwise, disable runtime type check.
Returns:
A context manager for allowing/disallowing runtime type check.
|
def enable_type_check(enabled: bool = True) -> ContextManager[None]:
"""Returns a context manager to enable or disable runtime type check.
`enable_type_check` is thread-safe and can be nested. For example,
in the following code, runtime type check with be `a` but not on `b`::
with pg.enable_type_check(False):
with pg.enable_type_check(True):
a = pg.Dict(x=1, value_spec=pg.typing.Dict([('x', pg.typing.Int())]))
b = pg.Dict(y=1, value_spec=pg.typing.Dict([('x', pg.typing.Int())]))
Args:
enabled: If True, enable runtime type check in current scope.
Otherwise, disable runtime type check.
Returns:
A context manager for allowing/disallowing runtime type check.
"""
return thread_local.thread_local_value_scope(
_TLS_ENABLE_TYPE_CHECK, enabled, True
)
|
(enabled: bool = True) -> ContextManager[NoneType]
|
41,541 |
pyglove.core.symbolic.base
|
eq
|
Compares if two values are equal. Use symbolic equality if possible.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
def sym_eq(self, right):
if super().sym_eq(right):
return True
return pg.eq(self.x, right)
class B:
pass
assert pg.eq(1, 1)
assert pg.eq(A(1), A(1))
# This is True since A has override `sym_eq`.
assert pg.eq(A(1), 1)
# Objects of B are compared by references.
assert not pg.eq(A(B()), A(B()))
Args:
left: The left-hand value to compare.
right: The right-hand value to compare.
Returns:
True if left and right is equal or symbolically equal. Otherwise False.
|
def eq(left: Any, right: Any) -> bool:
"""Compares if two values are equal. Use symbolic equality if possible.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
def sym_eq(self, right):
if super().sym_eq(right):
return True
return pg.eq(self.x, right)
class B:
pass
assert pg.eq(1, 1)
assert pg.eq(A(1), A(1))
# This is True since A has override `sym_eq`.
assert pg.eq(A(1), 1)
# Objects of B are compared by references.
assert not pg.eq(A(B()), A(B()))
Args:
left: The left-hand value to compare.
right: The right-hand value to compare.
Returns:
True if left and right is equal or symbolically equal. Otherwise False.
"""
# NOTE(daiyip): the default behavior for dict/list/tuple comparison is that
# it compares the elements using __eq__, __ne__. For symbolic comparison on
# these container types, we need to change the behavior by using symbolic
# comparison on their items.
if left is right:
return True
if ((isinstance(left, list) and isinstance(right, list))
or (isinstance(left, tuple) and isinstance(right, tuple))):
if len(left) != len(right):
return False
for x, y in zip(left, right):
if ne(x, y):
return False
return True
elif isinstance(left, dict):
if (not isinstance(right, dict)
or len(left) != len(right)
or set(left.keys()) != set(right.keys())):
return False
# NOTE(daiyip): pg.Dict.__getitem__ will trigger inferred value
# evaluation, therefore we always get its symbolic form during traversal.
left_items = left.sym_items if isinstance(left, Symbolic) else left.items
right_item = (
right.sym_getattr if isinstance(right, Symbolic) else right.__getitem__)
for k, v in left_items():
if ne(v, right_item(k)):
return False
return True
# We compare sym_eq with Symbolic.sym_eq to avoid endless recursion.
elif (hasattr(left, 'sym_eq')
and not inspect.isclass(left)
and left.sym_eq.__code__ is not Symbolic.sym_eq.__code__):
return left.sym_eq(right)
elif (hasattr(right, 'sym_eq')
and not inspect.isclass(right)
and right.sym_eq.__code__ is not Symbolic.sym_eq.__code__):
return right.sym_eq(left)
# Compare two maybe callable objects.
return pg_typing.callable_eq(left, right)
|
(left: Any, right: Any) -> bool
|
41,543 |
pyglove.core.hyper.evolvable
|
evolve
|
An evolvable symbolic value.
Example::
@pg.symbolize
@dataclasses.dataclass
class Foo:
x: int
y: int
@pg.symbolize
@dataclasses.dataclass
class Bar:
a: int
b: int
# Defines possible transitions.
def node_transform(location, value, parent):
if isinstance(value, Foo)
return Bar(value.x, value.y)
if location.key == 'x':
return random.choice([1, 2, 3])
if location.key == 'y':
return random.choice([3, 4, 5])
v = pg.evolve(Foo(1, 3), node_transform)
See also:
* :class:`pyglove.hyper.Evolvable`
* :func:`pyglove.oneof`
* :func:`pyglove.manyof`
* :func:`pyglove.permutate`
* :func:`pyglove.floatv`
Args:
initial_value: The initial value to evolve.
node_transform: A callable object that takes information of the value to
operate (e.g. location, old value, parent node) and returns a new value as
a replacement for the node. Such information allows users to not only
access the mutation node, but the entire symbolic tree if needed, allowing
complex mutation rules to be written with ease - for example - check
adjacent nodes while modifying a list element. This function is designed
to take care of both node replacements and node insertions. When insertion
happens, the old value for the location will be `pg.MISSING_VALUE`. See
`pg.composing.SeenObjectReplacer` as an example.
weights: An optional callable object that returns the unnormalized (e.g.
the sum of all probabilities don't have to sum to 1.0) mutation
probabilities for all the nodes in the symbolic tree, based on (mutation
type, location, old value, parent node), If None, all the locations and
mutation types will be sampled uniformly.
name: An optional name of the decision point.
hints: An optional hints for the decision point.
Returns:
A `pg.hyper.Evolvable` object.
|
def evolve(
initial_value: symbolic.Symbolic,
node_transform: Callable[
[
object_utils.KeyPath, # Location.
Any, # Old value.
# pg.MISSING_VALUE for insertion.
symbolic.Symbolic, # Parent node.
],
Any # Replacement.
],
*,
weights: Optional[Callable[
[
MutationType, # Mutation type.
object_utils.KeyPath, # Location.
Any, # Value.
symbolic.Symbolic, # Parent.
],
float # Mutation weight.
]] = None, # pylint: disable=bad-whitespace
name: Optional[str] = None,
hints: Optional[Any] = None) -> Evolvable:
"""An evolvable symbolic value.
Example::
@pg.symbolize
@dataclasses.dataclass
class Foo:
x: int
y: int
@pg.symbolize
@dataclasses.dataclass
class Bar:
a: int
b: int
# Defines possible transitions.
def node_transform(location, value, parent):
if isinstance(value, Foo)
return Bar(value.x, value.y)
if location.key == 'x':
return random.choice([1, 2, 3])
if location.key == 'y':
return random.choice([3, 4, 5])
v = pg.evolve(Foo(1, 3), node_transform)
See also:
* :class:`pyglove.hyper.Evolvable`
* :func:`pyglove.oneof`
* :func:`pyglove.manyof`
* :func:`pyglove.permutate`
* :func:`pyglove.floatv`
Args:
initial_value: The initial value to evolve.
node_transform: A callable object that takes information of the value to
operate (e.g. location, old value, parent node) and returns a new value as
a replacement for the node. Such information allows users to not only
access the mutation node, but the entire symbolic tree if needed, allowing
complex mutation rules to be written with ease - for example - check
adjacent nodes while modifying a list element. This function is designed
to take care of both node replacements and node insertions. When insertion
happens, the old value for the location will be `pg.MISSING_VALUE`. See
`pg.composing.SeenObjectReplacer` as an example.
weights: An optional callable object that returns the unnormalized (e.g.
the sum of all probabilities don't have to sum to 1.0) mutation
probabilities for all the nodes in the symbolic tree, based on (mutation
type, location, old value, parent node), If None, all the locations and
mutation types will be sampled uniformly.
name: An optional name of the decision point.
hints: An optional hints for the decision point.
Returns:
A `pg.hyper.Evolvable` object.
"""
return Evolvable(
initial_value=initial_value, node_transform=node_transform,
weights=weights, name=name, hints=hints)
|
(initial_value: pyglove.core.symbolic.base.Symbolic, node_transform: Callable[[pyglove.core.object_utils.value_location.KeyPath, Any, pyglove.core.symbolic.base.Symbolic], Any], *, weights: Optional[Callable[[pyglove.core.hyper.evolvable.MutationType, pyglove.core.object_utils.value_location.KeyPath, Any, pyglove.core.symbolic.base.Symbolic], float]] = None, name: Optional[str] = None, hints: Optional[Any] = None) -> pyglove.core.hyper.evolvable.Evolvable
|
41,544 |
pyglove.core.object_utils.common_traits
|
explicit_method_override
|
Decorator that marks a member method as explicitly overridden.
In PyGlove, many methods are managed by the framework - for example -
``pg.Object.__init__``. It's easy for users to override these methods
unconsciously. Therefore, we introduce this decorator to catch error at
the first place when such overrides incidentally take place, while allowing
advanced users to override them.
Usage::
class Foo(pg.Object):
@pg.explicit_method_override
def __init__(self, *args, **kwargs):
...
Args:
method: method to explicitly overriden.
Returns:
The original method with an explicit overriden stamp.
|
def explicit_method_override(method):
"""Decorator that marks a member method as explicitly overridden.
In PyGlove, many methods are managed by the framework - for example -
``pg.Object.__init__``. It's easy for users to override these methods
unconsciously. Therefore, we introduce this decorator to catch error at
the first place when such overrides incidentally take place, while allowing
advanced users to override them.
Usage::
class Foo(pg.Object):
@pg.explicit_method_override
def __init__(self, *args, **kwargs):
...
Args:
method: method to explicitly overriden.
Returns:
The original method with an explicit overriden stamp.
"""
setattr(method, '__explicit_override__', True)
return method
|
(method)
|
41,546 |
pyglove.core.hyper.numerical
|
floatv
|
A continuous value within a range.
Example::
# A continuous value within [0.0, 1.0]
v = pg.floatv(0.0, 1.0)
See also:
* :class:`pyglove.hyper.Float`
* :func:`pyglove.oneof`
* :func:`pyglove.manyof`
* :func:`pyglove.permutate`
* :func:`pyglove.evolve`
.. note::
Under symbolic mode (by default), `pg.floatv` returns a ``pg.hyper.Float``
object. Under dynamic evaluate mode, which is called under the context of
:meth:`pyglove.hyper.DynamicEvaluationContext.collect` or
:meth:`pyglove.hyper.DynamicEvaluationContext.apply`, it evaluates to
a concrete candidate value.
Args:
min_value: Minimum acceptable value (inclusive).
max_value: Maximum acceptable value (inclusive).
scale: An optional string as the scale of the range. Supported values
are None, 'linear', 'log', and 'rlog'.
If None, the feasible space is unscaled.
If `linear`, the feasible space is mapped to [0, 1] linearly.
If `log`, the feasible space is mapped to [0, 1] logarithmically with
formula `x -> log(x / min) / log(max / min)`.
If `rlog`, the feasible space is mapped to [0, 1] "reverse"
logarithmically, resulting in values close to `max_value` spread
out more than the points near the `min_value`, with formula:
x -> 1.0 - log((max + min - x) / min) / log (max / min).
`min_value` must be positive if `scale` is not None.
Also, it depends on the search algorithm to decide whether this
information is used or not.
name: A name that can be used to identify a decision point in the search
space. This is needed when the code to instantiate the same hyper
primitive may be called multiple times under a
`pg.DynamicEvaluationContext.collect` context or a
`pg.DynamicEvaluationContext.apply` context.
hints: An optional value which acts as a hint for the controller.
Returns:
In symbolic mode, this function returns a `Float`.
In dynamic evaluate mode, this function returns a float value that is no
less than the `min_value` and no greater than the `max_value`.
If evaluated under an `pg.DynamicEvaluationContext.apply` scope,
this function will return a chosen float value from the controller
decisions.
If evaluated under a `pg.DynamicEvaluationContext.collect`
scope, it will return `min_value`.
|
def floatv(min_value: float,
max_value: float,
scale: Optional[str] = None,
*,
name: Optional[str] = None,
hints: Optional[Any] = None) -> Any:
"""A continuous value within a range.
Example::
# A continuous value within [0.0, 1.0]
v = pg.floatv(0.0, 1.0)
See also:
* :class:`pyglove.hyper.Float`
* :func:`pyglove.oneof`
* :func:`pyglove.manyof`
* :func:`pyglove.permutate`
* :func:`pyglove.evolve`
.. note::
Under symbolic mode (by default), `pg.floatv` returns a ``pg.hyper.Float``
object. Under dynamic evaluate mode, which is called under the context of
:meth:`pyglove.hyper.DynamicEvaluationContext.collect` or
:meth:`pyglove.hyper.DynamicEvaluationContext.apply`, it evaluates to
a concrete candidate value.
Args:
min_value: Minimum acceptable value (inclusive).
max_value: Maximum acceptable value (inclusive).
scale: An optional string as the scale of the range. Supported values
are None, 'linear', 'log', and 'rlog'.
If None, the feasible space is unscaled.
If `linear`, the feasible space is mapped to [0, 1] linearly.
If `log`, the feasible space is mapped to [0, 1] logarithmically with
formula `x -> log(x / min) / log(max / min)`.
If `rlog`, the feasible space is mapped to [0, 1] "reverse"
logarithmically, resulting in values close to `max_value` spread
out more than the points near the `min_value`, with formula:
x -> 1.0 - log((max + min - x) / min) / log (max / min).
`min_value` must be positive if `scale` is not None.
Also, it depends on the search algorithm to decide whether this
information is used or not.
name: A name that can be used to identify a decision point in the search
space. This is needed when the code to instantiate the same hyper
primitive may be called multiple times under a
`pg.DynamicEvaluationContext.collect` context or a
`pg.DynamicEvaluationContext.apply` context.
hints: An optional value which acts as a hint for the controller.
Returns:
In symbolic mode, this function returns a `Float`.
In dynamic evaluate mode, this function returns a float value that is no
less than the `min_value` and no greater than the `max_value`.
If evaluated under an `pg.DynamicEvaluationContext.apply` scope,
this function will return a chosen float value from the controller
decisions.
If evaluated under a `pg.DynamicEvaluationContext.collect`
scope, it will return `min_value`.
"""
return Float(
min_value=min_value, max_value=max_value,
scale=scale, name=name, hints=hints)
|
(min_value: float, max_value: float, scale: Optional[str] = None, *, name: Optional[str] = None, hints: Optional[Any] = None) -> Any
|
41,548 |
pyglove.core.object_utils.formatting
|
format
|
Formats a (maybe) hierarchical value with flags.
Args:
value: The value to format.
compact: If True, this object will be formatted into a single line.
verbose: If True, this object will be formatted with verbosity.
Subclasses should define `verbosity` on their own.
root_indent: The start indent level for this object if the output is a
multi-line string.
list_wrap_threshold: A threshold in number of characters for wrapping a
list value in a single line.
strip_object_id: If True, format object as '<class-name>(...)' other than
'object at <address>'.
include_keys: A set of keys to include from the top-level dict or object.
exclude_keys: A set of keys to exclude from the top-level dict or object.
Applicable only when `include_keys` is set to None.
**kwargs: Keyword arguments that will be passed through unto child
``Formattable`` objects.
Returns:
A string representation for `value`.
|
def format(value: Any, # pylint: disable=redefined-builtin
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
list_wrap_threshold: int = 80,
strip_object_id: bool = False,
include_keys: Optional[Set[str]] = None,
exclude_keys: Optional[Set[str]] = None,
**kwargs) -> str:
"""Formats a (maybe) hierarchical value with flags.
Args:
value: The value to format.
compact: If True, this object will be formatted into a single line.
verbose: If True, this object will be formatted with verbosity.
Subclasses should define `verbosity` on their own.
root_indent: The start indent level for this object if the output is a
multi-line string.
list_wrap_threshold: A threshold in number of characters for wrapping a
list value in a single line.
strip_object_id: If True, format object as '<class-name>(...)' other than
'object at <address>'.
include_keys: A set of keys to include from the top-level dict or object.
exclude_keys: A set of keys to exclude from the top-level dict or object.
Applicable only when `include_keys` is set to None.
**kwargs: Keyword arguments that will be passed through unto child
``Formattable`` objects.
Returns:
A string representation for `value`.
"""
exclude_keys = exclude_keys or set()
def _indent(text, indent: int) -> str:
return ' ' * 2 * indent + text
def _should_include_key(key: str) -> bool:
if include_keys:
return key in include_keys
return key not in exclude_keys
def _format_child(v):
return format(v, compact=compact, verbose=verbose,
root_indent=root_indent + 1,
list_wrap_threshold=list_wrap_threshold,
strip_object_id=strip_object_id,
**kwargs)
if isinstance(value, common_traits.Formattable):
return value.format(compact=compact,
verbose=verbose,
root_indent=root_indent,
list_wrap_threshold=list_wrap_threshold,
strip_object_id=strip_object_id,
include_keys=include_keys,
exclude_keys=exclude_keys,
**kwargs)
elif isinstance(value, (list, tuple)):
# Always try compact representation if length is not too long.
open_bracket, close_bracket = bracket_chars(
BracketType.SQUARE if isinstance(value, list) else BracketType.ROUND)
s = [open_bracket]
s.append(', '.join([_format_child(elem) for elem in value]))
s.append(close_bracket)
s = [''.join(s)]
if not compact and len(s[-1]) > list_wrap_threshold:
s = [f'{open_bracket}\n']
s.append(',\n'.join([
_indent(_format_child(elem), root_indent + 1)
for elem in value
]))
s.append('\n')
s.append(_indent(close_bracket, root_indent))
elif isinstance(value, dict):
if compact or not value:
s = ['{']
s.append(', '.join([
f'{k!r}: {_format_child(v)}'
for k, v in value.items() if _should_include_key(k)
]))
s.append('}')
else:
s = ['{\n']
s.append(',\n'.join([
_indent(f'{k!r}: {_format_child(v)}', root_indent + 1)
for k, v in value.items() if _should_include_key(k)
]))
s.append('\n')
s.append(_indent('}', root_indent))
else:
if isinstance(value, str):
s = [repr(value)]
else:
s = [repr(value) if compact else str(value)]
if strip_object_id and 'object at 0x' in s[-1]:
s = [f'{value.__class__.__name__}(...)']
return ''.join(s)
|
(value: Any, compact: bool = False, verbose: bool = True, root_indent: int = 0, list_wrap_threshold: int = 80, strip_object_id: bool = False, include_keys: Optional[Set[str]] = None, exclude_keys: Optional[Set[str]] = None, **kwargs) -> str
|
41,549 |
pyglove.core.symbolic.base
|
from_json
|
Deserializes a (maybe) symbolic value from JSON value.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
json = a1.to_json()
a2 = pg.from_json(json)
assert pg.eq(a1, a2)
Args:
json_value: Input JSON value.
allow_partial: Whether to allow elements of the list to be partial.
root_path: KeyPath of loaded object in its object tree.
force_dict: If True, "_type" keys will be stripped before loading. As a
result, JSONConvertible objects will be returned as dict.
**kwargs: Allow passing through keyword arguments to from_json of specific
types.
Returns:
Deserialized value, which is
* pg.Dict for dict.
* pg.List for list.
* symbolic.Object for dict with '_type' property.
* value itself.
|
def from_json(json_value: Any,
*,
allow_partial: bool = False,
root_path: Optional[object_utils.KeyPath] = None,
force_dict: bool = False,
**kwargs) -> Any:
"""Deserializes a (maybe) symbolic value from JSON value.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
json = a1.to_json()
a2 = pg.from_json(json)
assert pg.eq(a1, a2)
Args:
json_value: Input JSON value.
allow_partial: Whether to allow elements of the list to be partial.
root_path: KeyPath of loaded object in its object tree.
force_dict: If True, "_type" keys will be stripped before loading. As a
result, JSONConvertible objects will be returned as dict.
**kwargs: Allow passing through keyword arguments to from_json of specific
types.
Returns:
Deserialized value, which is
* pg.Dict for dict.
* pg.List for list.
* symbolic.Object for dict with '_type' property.
* value itself.
"""
assert Symbolic.DictType is not None
if isinstance(json_value, Symbolic):
return json_value
if force_dict:
json_value = object_utils.json_conversion.strip_types(json_value)
kwargs.update({
'allow_partial': allow_partial,
'root_path': root_path,
})
if isinstance(json_value, list):
if (json_value
and json_value[0] == object_utils.JSONConvertible.TUPLE_MARKER):
if len(json_value) < 2:
raise ValueError(
object_utils.message_on_path(
f'Tuple should have at least one element '
f'besides \'{object_utils.JSONConvertible.TUPLE_MARKER}\'. '
f'Encountered: {json_value}', root_path))
return tuple([
from_json(v, allow_partial=allow_partial,
root_path=object_utils.KeyPath(i, root_path))
for i, v in enumerate(json_value[1:])
])
return Symbolic.ListType(json_value, **kwargs) # pytype: disable=not-callable # pylint: disable=not-callable
elif isinstance(json_value, dict):
if object_utils.JSONConvertible.TYPE_NAME_KEY not in json_value:
return Symbolic.DictType.from_json(json_value, **kwargs)
return object_utils.from_json(json_value, **kwargs)
return json_value
|
(json_value: Any, *, allow_partial: bool = False, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None, force_dict: bool = False, **kwargs) -> Any
|
41,550 |
pyglove.core.symbolic.base
|
from_json_str
|
Deserialize (maybe) symbolic object from JSON string.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
json_str = a1.to_json_str()
a2 = pg.from_json_str(json_str)
assert pg.eq(a1, a2)
Args:
json_str: JSON string.
allow_partial: If True, allow a partial symbolic object to be created.
Otherwise error will be raised on partial value.
root_path: The symbolic path used for the deserialized root object.
force_dict: If True, "_type" keys will be stripped before loading. As a
result, JSONConvertible objects will be returned as dict.
**kwargs: Additional keyword arguments that will be passed to
``pg.from_json``.
Returns:
A deserialized value.
|
def from_json_str(json_str: str,
*,
allow_partial: bool = False,
root_path: Optional[object_utils.KeyPath] = None,
force_dict: bool = False,
**kwargs) -> Any:
"""Deserialize (maybe) symbolic object from JSON string.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
json_str = a1.to_json_str()
a2 = pg.from_json_str(json_str)
assert pg.eq(a1, a2)
Args:
json_str: JSON string.
allow_partial: If True, allow a partial symbolic object to be created.
Otherwise error will be raised on partial value.
root_path: The symbolic path used for the deserialized root object.
force_dict: If True, "_type" keys will be stripped before loading. As a
result, JSONConvertible objects will be returned as dict.
**kwargs: Additional keyword arguments that will be passed to
``pg.from_json``.
Returns:
A deserialized value.
"""
return from_json(
json.loads(json_str),
allow_partial=allow_partial,
root_path=root_path,
force_dict=force_dict,
**kwargs)
|
(json_str: str, *, allow_partial: bool = False, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None, force_dict: bool = False, **kwargs) -> Any
|
41,551 |
pyglove.core.symbolic.functor
|
functor
|
Function/Decorator for creating symbolic function from regular function.
Example::
# Create a symbolic function without specifying the
# validation rules for arguments.
@pg.functor
def foo(x, y):
return x + y
f = foo(1, 2)
assert f() == 3
# Create a symbolic function with specifying the
# the validation rules for argument 'a', 'args', and 'kwargs'.
@pg.functor([
('a', pg.typing.Int()),
('b', pg.typing.Float()),
('args', pg.List(pg.typing.Int())),
(pg.typing.StrKey(), pg.typing.Int())
])
def bar(a, b, c, *args, **kwargs):
return a * b / c + sum(args) + sum(kwargs.values())
See :class:`pyglove.Functor` for more details on symbolic function.
Args:
args: A list of tuples that defines the schema for function arguments.
Please see `functor_class` for detailed explanation of `args`.
returns: Optional value spec for return value.
base_class: Optional base class derived from `symbolic.Functor`. If None,
returning functor will inherit from `symbolic.Functor`.
**kwargs: Keyword arguments for infrequently used options: Acceptable
keywords are: * `serialization_key`: An optional string to be used as the
serialization key for the class during `sym_jsonify`. If None,
`cls.__type_name__` will be used. This is introduced for scenarios when we
want to relocate a class, before the downstream can recognize the new
location, we need the class to serialize it using previous key. *
`additional_keys`: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
Returns:
A function that converts a regular function into a symbolic function.
|
def functor(
args: Optional[List[Union[
Tuple[Union[str, pg_typing.KeySpec], pg_typing.ValueSpec, str],
Tuple[Union[str, pg_typing.KeySpec], pg_typing.ValueSpec, str, Any]]]
] = None, # pylint: disable=bad-continuation
returns: Optional[pg_typing.ValueSpec] = None,
base_class: Optional[Type[Functor]] = None,
**kwargs):
"""Function/Decorator for creating symbolic function from regular function.
Example::
# Create a symbolic function without specifying the
# validation rules for arguments.
@pg.functor
def foo(x, y):
return x + y
f = foo(1, 2)
assert f() == 3
# Create a symbolic function with specifying the
# the validation rules for argument 'a', 'args', and 'kwargs'.
@pg.functor([
('a', pg.typing.Int()),
('b', pg.typing.Float()),
('args', pg.List(pg.typing.Int())),
(pg.typing.StrKey(), pg.typing.Int())
])
def bar(a, b, c, *args, **kwargs):
return a * b / c + sum(args) + sum(kwargs.values())
See :class:`pyglove.Functor` for more details on symbolic function.
Args:
args: A list of tuples that defines the schema for function arguments.
Please see `functor_class` for detailed explanation of `args`.
returns: Optional value spec for return value.
base_class: Optional base class derived from `symbolic.Functor`. If None,
returning functor will inherit from `symbolic.Functor`.
**kwargs: Keyword arguments for infrequently used options: Acceptable
keywords are: * `serialization_key`: An optional string to be used as the
serialization key for the class during `sym_jsonify`. If None,
`cls.__type_name__` will be used. This is introduced for scenarios when we
want to relocate a class, before the downstream can recognize the new
location, we need the class to serialize it using previous key. *
`additional_keys`: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
Returns:
A function that converts a regular function into a symbolic function.
"""
if inspect.isfunction(args):
assert returns is None
return functor_class(
typing.cast(Callable[..., Any], args),
base_class=base_class,
add_to_registry=True,
**kwargs,
)
return lambda fn: functor_class( # pylint: disable=g-long-lambda # pytype: disable=wrong-arg-types
fn, args, returns,
base_class=base_class,
add_to_registry=True,
**kwargs)
|
(args: Optional[List[Union[Tuple[Union[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str], Tuple[Union[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str, Any]]]] = None, returns: Optional[pyglove.core.typing.class_schema.ValueSpec] = None, base_class: Optional[Type[pyglove.core.symbolic.functor.Functor]] = None, **kwargs)
|
41,552 |
pyglove.core.symbolic.functor
|
functor_class
|
Returns a functor class from a function.
Args:
func: Function to be wrapped into a functor.
args: Symbolic args specification. `args` is a list of tuples, each
describes an argument from the input function. Each tuple is the format of
(<argumment-name>, <value-spec>, [description], [metadata-objects]).
`argument-name` - a `str` or `pg_typing.StrKey` object. When
`pg_typing.StrKey` is used, it describes the wildcard keyword argument.
`value-spec` - a `pg_typing.ValueSpec` object or equivalent, e.g.
primitive values which will be converted to ValueSpec implementation
according to its type and used as its default value. `description` - a
string to describe the agument. `metadata-objects` - an optional list of
any type, which can be used to generate code according to the schema.
There are notable rules in filling the `args`: 1) When `args` is None or
arguments from the function signature are missing from it, `schema.Field`
for these fields will be automatically generated and inserted into `args`.
That being said, every arguments in input function will have a
`schema.Field` counterpart in `Functor.__schema__.fields` sorted by the
declaration order of each argument in the function signature ( other than
the order in `args`). 2) Default argument values are specified along with
function definition as regular python functions, instead of being set at
`schema.Field` level. But validation rules can be set using `args` and
apply to argument values.
For example::
@pg.functor([('c', pg.typing.Int(min_value=0), 'Arg c')])
def foo(a, b, c=1, **kwargs):
return a + b + c + sum(kwargs.values())
assert foo.schema.fields() == [
pg.typing.Field('a', pg.Any(), 'Argument a'.),
pg.typing.Field('b', pg.Any(), 'Argument b'.),
pg.typing.Field('c', pg.typing.Int(), 'Arg c.),
pg.typing.Filed(
pg.typing.StrKey(), pg.Any(), 'Other arguments.')
]
# Prebind a=1, b=2, with default value c=1.
assert foo(1, 2)() == 4
returns: Optional schema specification for the return value.
base_class: Optional base class (derived from `symbolic.Functor`). If None,
returned type will inherit from `Functor` directly.
auto_doc: If True, the descriptions of argument fields will be inherited
from funciton docstr if they are not explicitly specified through
``args``.
auto_typing: If True, the value spec for constraining each argument will be
inferred from its annotation. Otherwise the value specs for all arguments
will be ``pg.typing.Any()``.
serialization_key: An optional string to be used as the serialization key
for the class during `sym_jsonify`. If None, `cls.__type_name__` will be
used. This is introduced for scenarios when we want to relocate a class,
before the downstream can recognize the new location, we need the class to
serialize it using previous key.
additional_keys: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
add_to_registry: If True, the newly created functor class will be added to
the registry for deserialization.
Returns:
`symbolic.Functor` subclass that wraps input function.
Raises:
KeyError: names of symbolic arguments are not compatible with
function signature.
TypeError: types of symbolic arguments are not compatible with
function signature.
ValueError: default values of symbolic arguments are not compatible
with function signature.
|
def functor_class(
func: types.FunctionType,
args: Optional[List[Union[
Tuple[Tuple[str, pg_typing.KeySpec], pg_typing.ValueSpec, str],
Tuple[Tuple[str, pg_typing.KeySpec], pg_typing.ValueSpec, str, Any]]]
] = None, # pylint: disable=bad-continuation
returns: Optional[pg_typing.ValueSpec] = None,
base_class: Optional[Type[Functor]] = None,
*,
auto_doc: bool = False,
auto_typing: bool = False,
serialization_key: Optional[str] = None,
additional_keys: Optional[List[str]] = None,
add_to_registry: bool = False,
) -> Type[Functor]:
"""Returns a functor class from a function.
Args:
func: Function to be wrapped into a functor.
args: Symbolic args specification. `args` is a list of tuples, each
describes an argument from the input function. Each tuple is the format of
(<argumment-name>, <value-spec>, [description], [metadata-objects]).
`argument-name` - a `str` or `pg_typing.StrKey` object. When
`pg_typing.StrKey` is used, it describes the wildcard keyword argument.
`value-spec` - a `pg_typing.ValueSpec` object or equivalent, e.g.
primitive values which will be converted to ValueSpec implementation
according to its type and used as its default value. `description` - a
string to describe the agument. `metadata-objects` - an optional list of
any type, which can be used to generate code according to the schema.
There are notable rules in filling the `args`: 1) When `args` is None or
arguments from the function signature are missing from it, `schema.Field`
for these fields will be automatically generated and inserted into `args`.
That being said, every arguments in input function will have a
`schema.Field` counterpart in `Functor.__schema__.fields` sorted by the
declaration order of each argument in the function signature ( other than
the order in `args`). 2) Default argument values are specified along with
function definition as regular python functions, instead of being set at
`schema.Field` level. But validation rules can be set using `args` and
apply to argument values.
For example::
@pg.functor([('c', pg.typing.Int(min_value=0), 'Arg c')])
def foo(a, b, c=1, **kwargs):
return a + b + c + sum(kwargs.values())
assert foo.schema.fields() == [
pg.typing.Field('a', pg.Any(), 'Argument a'.),
pg.typing.Field('b', pg.Any(), 'Argument b'.),
pg.typing.Field('c', pg.typing.Int(), 'Arg c.),
pg.typing.Filed(
pg.typing.StrKey(), pg.Any(), 'Other arguments.')
]
# Prebind a=1, b=2, with default value c=1.
assert foo(1, 2)() == 4
returns: Optional schema specification for the return value.
base_class: Optional base class (derived from `symbolic.Functor`). If None,
returned type will inherit from `Functor` directly.
auto_doc: If True, the descriptions of argument fields will be inherited
from funciton docstr if they are not explicitly specified through
``args``.
auto_typing: If True, the value spec for constraining each argument will be
inferred from its annotation. Otherwise the value specs for all arguments
will be ``pg.typing.Any()``.
serialization_key: An optional string to be used as the serialization key
for the class during `sym_jsonify`. If None, `cls.__type_name__` will be
used. This is introduced for scenarios when we want to relocate a class,
before the downstream can recognize the new location, we need the class to
serialize it using previous key.
additional_keys: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
add_to_registry: If True, the newly created functor class will be added to
the registry for deserialization.
Returns:
`symbolic.Functor` subclass that wraps input function.
Raises:
KeyError: names of symbolic arguments are not compatible with
function signature.
TypeError: types of symbolic arguments are not compatible with
function signature.
ValueError: default values of symbolic arguments are not compatible
with function signature.
"""
if not inspect.isfunction(func):
raise TypeError(f'{func!r} is not a function.')
class _Functor(base_class or Functor):
"""Functor wrapper for input function."""
# The schema for function-based Functor will be inferred from the function
# signature. Therefore we do not infer the schema automatically during class
# creation.
auto_schema = False
# Do not infer symbolic fields from annotations, since this functor is
# created from function definition which does not have class-level
# attributes.
infer_symbolic_fields_from_annotations = True
def _call(self, *args, **kwargs):
return func(*args, **kwargs)
cls = typing.cast(Type[Functor], _Functor)
cls.__name__ = func.__name__
cls.__qualname__ = func.__qualname__
cls.__module__ = getattr(func, '__module__', 'wrapper')
cls.__doc__ = func.__doc__
# Enable automatic registration for subclass.
cls.auto_register = True
# Apply function schema.
schema = schema_utils.function_schema(
func, args, returns, auto_doc=auto_doc, auto_typing=auto_typing)
cls.apply_schema(schema)
# Register functor class for deserialization if needed.
if add_to_registry:
cls.register_for_deserialization(serialization_key, additional_keys)
return cls
|
(func: function, args: Optional[List[Union[Tuple[Tuple[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str], Tuple[Tuple[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str, Any]]]] = None, returns: Optional[pyglove.core.typing.class_schema.ValueSpec] = None, base_class: Optional[Type[pyglove.core.symbolic.functor.Functor]] = None, *, auto_doc: bool = False, auto_typing: bool = False, serialization_key: Optional[str] = None, additional_keys: Optional[List[str]] = None, add_to_registry: bool = False) -> Type[pyglove.core.symbolic.functor.Functor]
|
41,554 |
pyglove.core.typing.type_conversion
|
get_converter
|
Get converter from source type to destination type.
|
def get_converter(
src: Type[Any], dest: Union[Type[Any], Tuple[Type[Any], ...]]
) -> Optional[Callable[[Any], Any]]:
"""Get converter from source type to destination type."""
dest_types = dest if isinstance(dest, tuple) else (dest,)
for dest in dest_types:
converter = _TYPE_CONVERTER_REGISTRY.get_converter(src, dest)
if converter is not None:
return converter
return None
|
(src: Type[Any], dest: Union[Type[Any], Tuple[Type[Any], ...]]) -> Optional[Callable[[Any], Any]]
|
41,555 |
pyglove.core.symbolic.flags
|
get_load_handler
|
Returns global load handler.
|
def get_load_handler() -> Optional[Callable[..., Any]]:
"""Returns global load handler."""
return _LOAD_HANDLER
|
() -> Optional[Callable[..., Any]]
|
41,556 |
pyglove.core.symbolic.flags
|
get_save_handler
|
Returns global save handler.
|
def get_save_handler() -> Optional[Callable[..., Any]]:
"""Returns global save handler."""
return _SAVE_HANDLER
|
() -> Optional[Callable[..., Any]]
|
41,557 |
pyglove.core.typing.callable_signature
|
get_signature
|
Gets signature from a python callable.
|
def get_signature(func: Callable, auto_typing: bool = False) -> Signature: # pylint:disable=g-bare-generic
"""Gets signature from a python callable."""
return Signature.from_callable(func, auto_typing)
|
(func: Callable, auto_typing: bool = False) -> pyglove.core.typing.callable_signature.Signature
|
41,558 |
pyglove.core.symbolic.base
|
gt
|
Returns True if a value is symbolically greater than the other value.
Refer to :func:`pyglove.lt` for the definition of symbolic comparison.
Args:
left: The left-hand value to compare.
right: The right-hand value to compare.
Returns:
True if the left value is symbolically greater than the right value.
|
def gt(left: Any, right: Any) -> bool:
"""Returns True if a value is symbolically greater than the other value.
Refer to :func:`pyglove.lt` for the definition of symbolic comparison.
Args:
left: The left-hand value to compare.
right: The right-hand value to compare.
Returns:
True if the left value is symbolically greater than the right value.
"""
return lt(right, left) # pylint: disable=arguments-out-of-order
|
(left: Any, right: Any) -> bool
|
41,559 |
pyglove.core.symbolic.base
|
sym_hash
|
Returns hash of value. Use symbolic hashing function if possible.
Example::
@pg.symbolize
class A:
def __init__(self, x):
self.x = x
assert hash(A(1)) != hash(A(1))
assert pg.hash(A(1)) == pg.hash(A(1))
assert pg.hash(pg.Dict(x=[A(1)])) == pg.hash(pg.Dict(x=[A(1)]))
Args:
x: Value for computing hash.
Returns:
The hash value for `x`.
|
def sym_hash(x: Any) -> int:
"""Returns hash of value. Use symbolic hashing function if possible.
Example::
@pg.symbolize
class A:
def __init__(self, x):
self.x = x
assert hash(A(1)) != hash(A(1))
assert pg.hash(A(1)) == pg.hash(A(1))
assert pg.hash(pg.Dict(x=[A(1)])) == pg.hash(pg.Dict(x=[A(1)]))
Args:
x: Value for computing hash.
Returns:
The hash value for `x`.
"""
if isinstance(x, Symbolic):
return x.sym_hash()
if inspect.isfunction(x):
return hash(x.__code__.co_code)
if inspect.ismethod(x):
return hash((sym_hash(x.__self__), x.__code__.co_code)) # pytype: disable=attribute-error
return hash(x)
|
(x: Any) -> int
|
41,562 |
pyglove.core.symbolic.base
|
is_abstract
|
Returns if the input value is abstract.
Example::
@pg.symbolize
class Foo:
def __init__(self, x):
pass
class Bar(pg.PureSymbolic):
pass
assert not pg.is_abstract(1)
assert not pg.is_abstract(Foo(1))
assert pg.is_abstract(Foo.partial())
assert pg.is_abstract(Bar())
assert pg.is_abstract(Foo(Bar()))
assert pg.is_abstract(Foo(pg.oneof([1, 2])))
Args:
x: Value to query against.
Returns:
True if value itself is partial/PureSymbolic or its child and nested
child fields contain partial/PureSymbolic values.
|
def is_abstract(x: Any) -> bool:
"""Returns if the input value is abstract.
Example::
@pg.symbolize
class Foo:
def __init__(self, x):
pass
class Bar(pg.PureSymbolic):
pass
assert not pg.is_abstract(1)
assert not pg.is_abstract(Foo(1))
assert pg.is_abstract(Foo.partial())
assert pg.is_abstract(Bar())
assert pg.is_abstract(Foo(Bar()))
assert pg.is_abstract(Foo(pg.oneof([1, 2])))
Args:
x: Value to query against.
Returns:
True if value itself is partial/PureSymbolic or its child and nested
child fields contain partial/PureSymbolic values.
"""
return object_utils.is_partial(x) or is_pure_symbolic(x)
|
(x: Any) -> bool
|
41,563 |
pyglove.core.symbolic.base
|
is_deterministic
|
Returns if the input value is deterministic.
Example::
@pg.symbolize
def foo(x, y):
pass
assert pg.is_deterministic(1)
assert pg.is_deterministic(foo(1, 2))
assert not pg.is_deterministic(pg.oneof([1, 2]))
assert not pg.is_deterministic(foo(pg.oneof([1, 2]), 3))
Args:
x: Value to query against.
Returns:
True if value itself is not NonDeterministic and its child and nested
child fields do not contain NonDeterministic values.
|
def is_deterministic(x: Any) -> bool:
"""Returns if the input value is deterministic.
Example::
@pg.symbolize
def foo(x, y):
pass
assert pg.is_deterministic(1)
assert pg.is_deterministic(foo(1, 2))
assert not pg.is_deterministic(pg.oneof([1, 2]))
assert not pg.is_deterministic(foo(pg.oneof([1, 2]), 3))
Args:
x: Value to query against.
Returns:
True if value itself is not NonDeterministic and its child and nested
child fields do not contain NonDeterministic values.
"""
return not contains(x, type=NonDeterministic)
|
(x: Any) -> bool
|
41,564 |
pyglove.core.object_utils.hierarchical
|
is_partial
|
Returns True if a value is partially bound.
|
def is_partial(value: Any) -> bool:
"""Returns True if a value is partially bound."""
def _check_full_bound(path: KeyPath, value: Any) -> bool:
del path
if MISSING_VALUE == value:
return False
elif (isinstance(value, common_traits.MaybePartial)
and not isinstance(value, (dict, list))):
return not value.is_partial
return True
return not traverse(value, _check_full_bound)
|
(value: Any) -> bool
|
41,565 |
pyglove.core.symbolic.base
|
is_pure_symbolic
|
Returns if the input value is pure symbolic.
Example::
class Bar(pg.PureSymbolic):
pass
@pg.symbolize
def foo(x, y):
pass
assert not pg.is_pure_symbolic(1)
assert not pg.is_pure_symbolic(foo(1, 2))
assert pg.is_pure_symbolic(Bar())
assert pg.is_pure_symbolic(foo(Bar(), 1))
assert pg.is_pure_symbolic(foo(pg.oneof([1, 2]), 1))
Args:
x: Value to query against.
Returns:
True if value itself is PureSymbolic or its child and nested
child fields contain PureSymbolic values.
|
def is_pure_symbolic(x: Any) -> bool:
"""Returns if the input value is pure symbolic.
Example::
class Bar(pg.PureSymbolic):
pass
@pg.symbolize
def foo(x, y):
pass
assert not pg.is_pure_symbolic(1)
assert not pg.is_pure_symbolic(foo(1, 2))
assert pg.is_pure_symbolic(Bar())
assert pg.is_pure_symbolic(foo(Bar(), 1))
assert pg.is_pure_symbolic(foo(pg.oneof([1, 2]), 1))
Args:
x: Value to query against.
Returns:
True if value itself is PureSymbolic or its child and nested
child fields contain PureSymbolic values.
"""
def _check_pure_symbolic(k, v, p):
del k, p
if (isinstance(v, PureSymbolic)
or (isinstance(v, Symbolic) and v.sym_puresymbolic)):
return TraverseAction.STOP
else:
return TraverseAction.ENTER
return not traverse(x, _check_pure_symbolic)
|
(x: Any) -> bool
|
41,566 |
pyglove.core.hyper.iter
|
iterate
|
Iterate a hyper value based on an algorithm.
Example::
hyper_dict = pg.Dict(x=pg.oneof([1, 2, 3]), y=pg.oneof(['a', 'b']))
# Get all examples from the hyper_dict.
assert list(pg.iter(hyper_dict)) == [
pg.Dict(x=1, y='a'),
pg.Dict(x=1, y='b'),
pg.Dict(x=2, y='a'),
pg.Dict(x=2, y='b'),
pg.Dict(x=3, y='a'),
pg.Dict(x=3, y='b'),
]
# Get the first two examples.
assert list(pg.iter(hyper_dict, 2)) == [
pg.Dict(x=1, y='a'),
pg.Dict(x=1, y='b'),
]
# Random sample examples, which is equivalent to `pg.random_sample`.
list(pg.iter(hyper_dict, 2, pg.geno.Random()))
# Iterate examples with feedback loop.
for d, feedback in pg.iter(
hyper_dict, 10,
pg.evolution.regularized_evolution(pg.evolution.mutators.Uniform())):
feedback(d.x)
# Only materialize selected parts.
assert list(
pg.iter(hyper_dict, where=lambda x: len(x.candidates) == 2)) == [
pg.Dict(x=pg.oneof([1, 2, 3]), y='a'),
pg.Dict(x=pg.oneof([1, 2, 3]), y='b'),
]
``pg.iter`` distinguishes from `pg.sample` in that it's designed
for simple in-process iteration, which is handy for quickly generating
examples from algorithms without maintaining trail states. On the contrary,
`pg.sample` is designed for distributed sampling, with parallel workers and
failover handling.
Args:
hyper_value: A hyper value that represents a space of instances.
num_examples: An optional integer as the max number of examples to
propose. If None, propose will return an iterator of infinite examples.
algorithm: An optional DNA generator. If None, Sweeping will be used, which
iterates examples in order.
where: Function to filter hyper primitives. If None, all hyper primitives
from `value` will be included in the encoding/decoding process. Otherwise
only the hyper primitives on which 'where' returns True will be included.
`where` can be useful to partition a search space into separate
optimization processes. Please see 'Template' docstr for details.
force_feedback: If True, always return the Feedback object together
with the example, this is useful when the user want to pass different
DNAGenerators to `pg.iter` and want to handle them uniformly.
Yields:
A tuple of (example, feedback_fn) if the algorithm needs a feedback or
`force_feedback` is True, otherwise the example.
Raises:
ValueError: when `hyper_value` is a constant value.
|
def iterate(hyper_value: Any,
num_examples: Optional[int] = None,
algorithm: Optional[geno.DNAGenerator] = None,
where: Optional[Callable[[base.HyperPrimitive], bool]] = None,
force_feedback: bool = False):
"""Iterate a hyper value based on an algorithm.
Example::
hyper_dict = pg.Dict(x=pg.oneof([1, 2, 3]), y=pg.oneof(['a', 'b']))
# Get all examples from the hyper_dict.
assert list(pg.iter(hyper_dict)) == [
pg.Dict(x=1, y='a'),
pg.Dict(x=1, y='b'),
pg.Dict(x=2, y='a'),
pg.Dict(x=2, y='b'),
pg.Dict(x=3, y='a'),
pg.Dict(x=3, y='b'),
]
# Get the first two examples.
assert list(pg.iter(hyper_dict, 2)) == [
pg.Dict(x=1, y='a'),
pg.Dict(x=1, y='b'),
]
# Random sample examples, which is equivalent to `pg.random_sample`.
list(pg.iter(hyper_dict, 2, pg.geno.Random()))
# Iterate examples with feedback loop.
for d, feedback in pg.iter(
hyper_dict, 10,
pg.evolution.regularized_evolution(pg.evolution.mutators.Uniform())):
feedback(d.x)
# Only materialize selected parts.
assert list(
pg.iter(hyper_dict, where=lambda x: len(x.candidates) == 2)) == [
pg.Dict(x=pg.oneof([1, 2, 3]), y='a'),
pg.Dict(x=pg.oneof([1, 2, 3]), y='b'),
]
``pg.iter`` distinguishes from `pg.sample` in that it's designed
for simple in-process iteration, which is handy for quickly generating
examples from algorithms without maintaining trail states. On the contrary,
`pg.sample` is designed for distributed sampling, with parallel workers and
failover handling.
Args:
hyper_value: A hyper value that represents a space of instances.
num_examples: An optional integer as the max number of examples to
propose. If None, propose will return an iterator of infinite examples.
algorithm: An optional DNA generator. If None, Sweeping will be used, which
iterates examples in order.
where: Function to filter hyper primitives. If None, all hyper primitives
from `value` will be included in the encoding/decoding process. Otherwise
only the hyper primitives on which 'where' returns True will be included.
`where` can be useful to partition a search space into separate
optimization processes. Please see 'Template' docstr for details.
force_feedback: If True, always return the Feedback object together
with the example, this is useful when the user want to pass different
DNAGenerators to `pg.iter` and want to handle them uniformly.
Yields:
A tuple of (example, feedback_fn) if the algorithm needs a feedback or
`force_feedback` is True, otherwise the example.
Raises:
ValueError: when `hyper_value` is a constant value.
"""
if isinstance(hyper_value, dynamic_evaluation.DynamicEvaluationContext):
dynamic_evaluation_context = hyper_value
spec = hyper_value.dna_spec
t = None
else:
t = object_template.template(hyper_value, where)
if t.is_constant:
raise ValueError(
f'\'hyper_value\' is a constant value: {hyper_value!r}.')
dynamic_evaluation_context = None
spec = t.dna_spec()
if algorithm is None:
algorithm = geno.Sweeping()
# NOTE(daiyip): algorithm can continue if it's already set up with the same
# DNASpec, or we will setup the algorithm with the DNASpec from the template.
if algorithm.dna_spec is None:
algorithm.setup(spec)
elif symbolic.ne(spec, algorithm.dna_spec):
raise ValueError(
f'{algorithm!r} has been set up with a different DNASpec. '
f'Existing: {algorithm.dna_spec!r}, New: {spec!r}.')
count = 0
while num_examples is None or count < num_examples:
try:
count += 1
dna = algorithm.propose()
if t is not None:
example = t.decode(dna)
else:
assert dynamic_evaluation_context is not None
example = lambda: dynamic_evaluation_context.apply(dna)
if force_feedback or algorithm.needs_feedback:
yield example, Feedback(algorithm, dna)
else:
yield example
except StopIteration:
return
|
(hyper_value: Any, num_examples: Optional[int] = None, algorithm: Optional[pyglove.core.geno.dna_generator.DNAGenerator] = None, where: Optional[Callable[[pyglove.core.hyper.base.HyperPrimitive], bool]] = None, force_feedback: bool = False)
|
41,651 |
pyglove.core.symbolic.base
|
load
|
Load a symbolic value using the global load handler.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
file = 'my_file.json'
a1.save(file)
a2 = pg.load(file)
assert pg.eq(a1, a2)
Args:
path: A path string for loading an object.
*args: Positional arguments that will be passed through to the global
load handler.
**kwargs: Keyword arguments that will be passed through to the global
load handler.
Returns:
Return value from the global load handler.
|
def load(path: str, *args, **kwargs) -> Any:
"""Load a symbolic value using the global load handler.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
file = 'my_file.json'
a1.save(file)
a2 = pg.load(file)
assert pg.eq(a1, a2)
Args:
path: A path string for loading an object.
*args: Positional arguments that will be passed through to the global
load handler.
**kwargs: Keyword arguments that will be passed through to the global
load handler.
Returns:
Return value from the global load handler.
"""
load_handler = flags.get_load_handler() or default_load_handler
value = load_handler(path, *args, **kwargs)
if flags.is_tracking_origin() and isinstance(value, Symbolic):
value.sym_setorigin(path, 'load')
return value
|
(path: str, *args, **kwargs) -> Any
|
41,653 |
pyglove.core.symbolic.base
|
lt
|
Returns True if a value is symbolically less than the other value.
Symbolic values are comparable by their symbolic representations. For common
types such as numbers and string, symbolic comparison returns the same value
as value comparisons. For example::
assert pg.lt(False, True) == Flase < True
assert pg.lt(0.1, 1) == 0.1 < 1
assert pg.lt('a', 'ab') == 'a' < 'ab'
However, symbolic comparison can be applied on hierarchical values, for
example::
assert pg.lt(['a'], ['a', 'b'])
assert pg.lt(['a', 'b', 'c'], ['b'])
assert pg.lt({'x': 1}, {'x': 2})
assert pg.lt({'x': 1}, {'y': 1})
assert pg.lt(A(x=1), A(x=2))
Also, symbolic values of different types can be compared, for example::
assert pg.lt(pg.MISSING_VALUE, None)
assert pg.lt(None, 1)
assert pg.lt(1, 'abc')
assert pg.lt('abc', [])
assert pg.lt([], {})
assert pg.lt([], A(x=1))
The high-level idea is that a value with lower information entropy is less
than a value with higher information entropy. As a result, we know that
`pg.MISSING_VALUE` is the smallest among all values.
The order of symbolic representation are defined by the following rules:
1) If x and y are comparable by their values, they will be compared using
operator <. (e.g. bool, int, float, str)
2) If x and y are not directly comparable and are different in their types,
they will be compared based on their types. The order of different types
are: pg.MISSING_VALUE, NoneType, bool, int, float, str, list, tuple, set,
dict, functions/classes. When different functions/classes compare, their
order is determined by their qualified name.
3) If x and y are of the same type, which are symbolic containers (e.g. list,
dict, pg.Symbolic objects), their order will be determined by the order of
their first sub-nodes which are different. Therefore ['b'] is greater than
['a', 'b'], though the later have 2 elements.
4) Non-symbolic classes can define method `sym_lt` to enable symbolic
comparison.
Args:
left: The left-hand value to compare.
right: The right-hand value to compare.
Returns:
True if the left value is symbolically less than the right value.
|
def lt(left: Any, right: Any) -> bool:
"""Returns True if a value is symbolically less than the other value.
Symbolic values are comparable by their symbolic representations. For common
types such as numbers and string, symbolic comparison returns the same value
as value comparisons. For example::
assert pg.lt(False, True) == Flase < True
assert pg.lt(0.1, 1) == 0.1 < 1
assert pg.lt('a', 'ab') == 'a' < 'ab'
However, symbolic comparison can be applied on hierarchical values, for
example::
assert pg.lt(['a'], ['a', 'b'])
assert pg.lt(['a', 'b', 'c'], ['b'])
assert pg.lt({'x': 1}, {'x': 2})
assert pg.lt({'x': 1}, {'y': 1})
assert pg.lt(A(x=1), A(x=2))
Also, symbolic values of different types can be compared, for example::
assert pg.lt(pg.MISSING_VALUE, None)
assert pg.lt(None, 1)
assert pg.lt(1, 'abc')
assert pg.lt('abc', [])
assert pg.lt([], {})
assert pg.lt([], A(x=1))
The high-level idea is that a value with lower information entropy is less
than a value with higher information entropy. As a result, we know that
`pg.MISSING_VALUE` is the smallest among all values.
The order of symbolic representation are defined by the following rules:
1) If x and y are comparable by their values, they will be compared using
operator <. (e.g. bool, int, float, str)
2) If x and y are not directly comparable and are different in their types,
they will be compared based on their types. The order of different types
are: pg.MISSING_VALUE, NoneType, bool, int, float, str, list, tuple, set,
dict, functions/classes. When different functions/classes compare, their
order is determined by their qualified name.
3) If x and y are of the same type, which are symbolic containers (e.g. list,
dict, pg.Symbolic objects), their order will be determined by the order of
their first sub-nodes which are different. Therefore ['b'] is greater than
['a', 'b'], though the later have 2 elements.
4) Non-symbolic classes can define method `sym_lt` to enable symbolic
comparison.
Args:
left: The left-hand value to compare.
right: The right-hand value to compare.
Returns:
True if the left value is symbolically less than the right value.
"""
# A fast type check can eliminate most
if type(left) is not type(right):
tol = _type_order(left)
tor = _type_order(right)
# When tol == tor, this means different types are treated as same symbols.
# E.g. list and pg.List.
if tol != tor:
return tol < tor
# Most symbolic nodes are leaf, which are primitive types, therefore
# we detect such types to make `lt` to run faster.
if isinstance(left, (int, float, bool, str)):
return left < right
elif isinstance(left, list):
min_len = min(len(left), len(right))
for i in range(min_len):
l, r = left[i], right[i]
if not eq(l, r):
return lt(l, r)
# `left` and `right` are equal so far, so `left` is less than `right`
# only when left has a smaller length.
return len(left) < len(right)
elif isinstance(left, dict):
lkeys = list(left.keys())
rkeys = list(right.keys())
min_len = min(len(lkeys), len(rkeys))
for i in range(min_len):
kl, kr = lkeys[i], rkeys[i]
if kl == kr:
if not eq(left[kl], right[kr]):
return lt(left[kl], right[kr])
else:
return kl < kr
# `left` and `right` are equal so far, so `left is less than `right`
# only when left has fewer keys.
return len(lkeys) < len(rkeys)
elif hasattr(left, 'sym_lt'):
return left.sym_lt(right)
return left < right
|
(left: Any, right: Any) -> bool
|
41,654 |
pyglove.core.hyper.categorical
|
manyof
|
N choose K.
Example::
@pg.members([
('x', pg.typing.Int())
])
class A(pg.Object):
pass
# Chooses 2 distinct candidates.
v = pg.manyof(2, [1, 2, 3])
# Chooses 2 non-distinct candidates.
v = pg.manyof(2, [1, 2, 3], distinct=False)
# Chooses 2 distinct candidates sorted by their indices.
v = pg.manyof(2, [1, 2, 3], sorted=True)
# A complex type as candidate.
v1 = pg.manyof(2, ['a', {'x': 1}, A(1)])
# A hierarchical categorical choice:
v2 = pg.manyof(2, [
'foo',
'bar',
A(pg.oneof([1, 2, 3]))
])
.. note::
Under symbolic mode (by default), `pg.manyof` returns a ``pg.hyper.ManyOf``
object. Under dynamic evaluation mode, which is called under the context of
:meth:`pyglove.hyper.DynamicEvaluationContext.collect` or
:meth:`pyglove.hyper.DynamicEvaluationContext.apply`, it evaluates to
a concrete candidate value.
To use conditional search space in dynamic evaluate mode, the candidate
should be wrapped with a `lambda` function, which is not necessary under
symbolic mode. For example::
pg.manyof(2, [
lambda: pg.oneof([0, 1], name='sub_a'),
lambda: pg.floatv(0.0, 1.0, name='sub_b'),
lambda: pg.manyof(2, ['a', 'b', 'c'], name='sub_c')
], name='root')
See also:
* :class:`pyglove.hyper.ManyOf`
* :func:`pyglove.manyof`
* :func:`pyglove.floatv`
* :func:`pyglove.permutate`
* :func:`pyglove.evolve`
Args:
k: number of choices to make. Should be no larger than the length of
`candidates` unless `choice_distinct` is set to False,
candidates: Candidates to select from. Items of candidate can be any type,
therefore it can have nested hyper primitives, which forms a hierarchical
search space.
distinct: If True, each choice needs to be unique.
sorted: If True, choices are sorted by their indices in the
candidates.
name: A name that can be used to identify a decision point in the search
space. This is needed when the code to instantiate the same hyper
primitive may be called multiple times under a
`pg.DynamicEvaluationContext.collect` context or a
`pg.DynamicEvaluationContext.apply` context.
hints: An optional value which acts as a hint for the controller.
**kwargs: Keyword arguments for backward compatibility.
`choices_distinct`: Old name for `distinct`.
`choices_sorted`: Old name for `sorted`.
Returns:
In symbolic mode, this function returns a `Choices`.
In dynamic evaluate mode, this function returns a list of items in
`candidates`.
If evaluated under a `pg.DynamicEvaluationContext.apply` scope,
this function will return a list of selected candidates.
If evaluated under a `pg.DynamicEvaluationContext.collect`
scope, it will return a list of the first valid combination from the
`candidates`. For example::
# Evaluates to [0, 1, 2].
manyof(3, range(5))
# Evaluates to [0, 0, 0].
manyof(3, range(5), distinct=False)
|
def manyof(k: int,
candidates: Iterable[Any],
distinct: bool = True,
sorted: bool = False, # pylint: disable=redefined-builtin
*,
name: Optional[str] = None,
hints: Optional[Any] = None,
**kwargs) -> Any:
"""N choose K.
Example::
@pg.members([
('x', pg.typing.Int())
])
class A(pg.Object):
pass
# Chooses 2 distinct candidates.
v = pg.manyof(2, [1, 2, 3])
# Chooses 2 non-distinct candidates.
v = pg.manyof(2, [1, 2, 3], distinct=False)
# Chooses 2 distinct candidates sorted by their indices.
v = pg.manyof(2, [1, 2, 3], sorted=True)
# A complex type as candidate.
v1 = pg.manyof(2, ['a', {'x': 1}, A(1)])
# A hierarchical categorical choice:
v2 = pg.manyof(2, [
'foo',
'bar',
A(pg.oneof([1, 2, 3]))
])
.. note::
Under symbolic mode (by default), `pg.manyof` returns a ``pg.hyper.ManyOf``
object. Under dynamic evaluation mode, which is called under the context of
:meth:`pyglove.hyper.DynamicEvaluationContext.collect` or
:meth:`pyglove.hyper.DynamicEvaluationContext.apply`, it evaluates to
a concrete candidate value.
To use conditional search space in dynamic evaluate mode, the candidate
should be wrapped with a `lambda` function, which is not necessary under
symbolic mode. For example::
pg.manyof(2, [
lambda: pg.oneof([0, 1], name='sub_a'),
lambda: pg.floatv(0.0, 1.0, name='sub_b'),
lambda: pg.manyof(2, ['a', 'b', 'c'], name='sub_c')
], name='root')
See also:
* :class:`pyglove.hyper.ManyOf`
* :func:`pyglove.manyof`
* :func:`pyglove.floatv`
* :func:`pyglove.permutate`
* :func:`pyglove.evolve`
Args:
k: number of choices to make. Should be no larger than the length of
`candidates` unless `choice_distinct` is set to False,
candidates: Candidates to select from. Items of candidate can be any type,
therefore it can have nested hyper primitives, which forms a hierarchical
search space.
distinct: If True, each choice needs to be unique.
sorted: If True, choices are sorted by their indices in the
candidates.
name: A name that can be used to identify a decision point in the search
space. This is needed when the code to instantiate the same hyper
primitive may be called multiple times under a
`pg.DynamicEvaluationContext.collect` context or a
`pg.DynamicEvaluationContext.apply` context.
hints: An optional value which acts as a hint for the controller.
**kwargs: Keyword arguments for backward compatibility.
`choices_distinct`: Old name for `distinct`.
`choices_sorted`: Old name for `sorted`.
Returns:
In symbolic mode, this function returns a `Choices`.
In dynamic evaluate mode, this function returns a list of items in
`candidates`.
If evaluated under a `pg.DynamicEvaluationContext.apply` scope,
this function will return a list of selected candidates.
If evaluated under a `pg.DynamicEvaluationContext.collect`
scope, it will return a list of the first valid combination from the
`candidates`. For example::
# Evaluates to [0, 1, 2].
manyof(3, range(5))
# Evaluates to [0, 0, 0].
manyof(3, range(5), distinct=False)
"""
choices_distinct = kwargs.pop('choices_distinct', distinct)
choices_sorted = kwargs.pop('choices_sorted', sorted)
return ManyOf(
num_choices=k,
candidates=list(candidates),
choices_distinct=choices_distinct,
choices_sorted=choices_sorted,
name=name,
hints=hints)
|
(k: int, candidates: Iterable[Any], distinct: bool = True, sorted: bool = False, *, name: Optional[str] = None, hints: Optional[Any] = None, **kwargs) -> Any
|
41,655 |
pyglove.core.hyper.object_template
|
materialize
|
Materialize a (maybe) hyper value using a DNA or parameter dict.
Example::
hyper_dict = pg.Dict(x=pg.oneof(['a', 'b']), y=pg.floatv(0.0, 1.0))
# Materialize using DNA.
assert pg.materialize(
hyper_dict, pg.DNA([0, 0.5])) == pg.Dict(x='a', y=0.5)
# Materialize usign key value pairs.
# See `pg.DNA.from_dict` for more details.
assert pg.materialize(
hyper_dict, {'x': 0, 'y': 0.5}) == pg.Dict(x='a', y=0.5)
# Partially materialize.
v = pg.materialize(
hyper_dict, pg.DNA(0), where=lambda x: isinstance(x, pg.hyper.OneOf))
assert v == pg.Dict(x='a', y=pg.floatv(0.0, 1.0))
Args:
value: A (maybe) hyper value
parameters: A DNA object or a dict of string (key path) to a
string (in format of '<selected_index>/<num_choices>' for
`geno.Choices`, or '<float_value>' for `geno.Float`), or their literal
values when `use_literal_values` is set to True.
use_literal_values: Applicable when `parameters` is a dict. If True, the
values in the dict will be from `geno.Choices.literal_values` for
`geno.Choices`.
where: Function to filter hyper primitives. If None, all hyper primitives
from `value` will be included in the encoding/decoding process. Otherwise
only the hyper primitives on which 'where' returns True will be included.
`where` can be useful to partition a search space into separate
optimization processes. Please see 'Template' docstr for details.
Returns:
A materialized value.
Raises:
TypeError: if parameters is not a DNA or dict.
ValueError: if parameters cannot be decoded.
|
def materialize(
value: Any,
parameters: Union[geno.DNA, Dict[str, Any]],
use_literal_values: bool = True,
where: Optional[Callable[[base.HyperPrimitive], bool]] = None) -> Any:
"""Materialize a (maybe) hyper value using a DNA or parameter dict.
Example::
hyper_dict = pg.Dict(x=pg.oneof(['a', 'b']), y=pg.floatv(0.0, 1.0))
# Materialize using DNA.
assert pg.materialize(
hyper_dict, pg.DNA([0, 0.5])) == pg.Dict(x='a', y=0.5)
# Materialize usign key value pairs.
# See `pg.DNA.from_dict` for more details.
assert pg.materialize(
hyper_dict, {'x': 0, 'y': 0.5}) == pg.Dict(x='a', y=0.5)
# Partially materialize.
v = pg.materialize(
hyper_dict, pg.DNA(0), where=lambda x: isinstance(x, pg.hyper.OneOf))
assert v == pg.Dict(x='a', y=pg.floatv(0.0, 1.0))
Args:
value: A (maybe) hyper value
parameters: A DNA object or a dict of string (key path) to a
string (in format of '<selected_index>/<num_choices>' for
`geno.Choices`, or '<float_value>' for `geno.Float`), or their literal
values when `use_literal_values` is set to True.
use_literal_values: Applicable when `parameters` is a dict. If True, the
values in the dict will be from `geno.Choices.literal_values` for
`geno.Choices`.
where: Function to filter hyper primitives. If None, all hyper primitives
from `value` will be included in the encoding/decoding process. Otherwise
only the hyper primitives on which 'where' returns True will be included.
`where` can be useful to partition a search space into separate
optimization processes. Please see 'Template' docstr for details.
Returns:
A materialized value.
Raises:
TypeError: if parameters is not a DNA or dict.
ValueError: if parameters cannot be decoded.
"""
t = template(value, where)
if isinstance(parameters, dict):
dna = geno.DNA.from_parameters(
parameters=parameters,
dna_spec=t.dna_spec(),
use_literal_values=use_literal_values)
else:
dna = parameters
if not isinstance(dna, geno.DNA):
raise TypeError(
f'\'parameters\' must be a DNA or a dict of string to DNA values. '
f'Encountered: {dna!r}.')
return t.decode(dna)
|
(value: Any, parameters: Union[pyglove.core.geno.base.DNA, Dict[str, Any]], use_literal_values: bool = True, where: Optional[Callable[[pyglove.core.hyper.base.HyperPrimitive], bool]] = None) -> Any
|
41,656 |
pyglove.core.symbolic.ref
|
maybe_ref
|
Returns a reference if a value is not symbolic or already has a parent.
|
def maybe_ref(value: Any) -> Optional[Ref]:
"""Returns a reference if a value is not symbolic or already has a parent."""
if isinstance(value, base.Symbolic):
if value.sym_parent is None:
return value
return Ref(value)
|
(value: Any) -> Optional[pyglove.core.symbolic.ref.Ref]
|
41,657 |
pyglove.core.symbolic.object
|
members
|
Function/Decorator for declaring symbolic fields for ``pg.Object``.
Example::
@pg.members([
# Declare symbolic fields. Each field produces a symbolic attribute
# for its object, which can be accessed by `self.<field_name>`.
# Description is optional.
('x', pg.typing.Int(min_value=0, default=0), 'Description for `x`.'),
('y', pg.typing.Str(), 'Description for `y`.')
])
class A(pg.Object):
def sum(self):
return self.x + self.y
@pg.members([
# Override field 'x' inherited from class A and make it more restrictive.
('x', pg.typing.Int(max_value=10, default=5)),
# Add field 'z'.
('z', pg.typing.Bool().noneable())
])
class B(A):
pass
@pg.members([
# Declare dynamic fields: any keyword can be acceptable during `__init__`
# and can be accessed using `self.<field_name>`.
(pg.typing.StrKey(), pg.typing.Int())
])
class D(B):
pass
@pg.members([
# Declare dynamic fields: keywords started with 'foo' is acceptable.
(pg.typing.StrKey('foo.*'), pg.typing.Int())
])
class E(pg.Object):
pass
See :class:`pyglove.typing.ValueSpec` for supported value specifications.
Args:
fields: A list of pg.typing.Field or equivalent tuple representation as
(<key>, <value-spec>, [description], [metadata-objects]). `key` should be
a string. `value-spec` should be pg_typing.ValueSpec classes or
equivalent, e.g. primitive values which will be converted to ValueSpec
implementation according to its type and used as its default value.
`description` is optional only when field overrides a field from its
parent class. `metadata-objects` is an optional list of any type, which
can be used to generate code according to the schema.
metadata: Optional dict of user objects as class-level metadata which will
be attached to class schema.
init_arg_list: An optional sequence of strings as the positional argument
list for `__init__`. This is helpful when symbolic attributes are
inherited from base classes or the user want to change its order. If not
provided, the `init_arg_list` will be automatically generated from
symbolic attributes defined from ``pg.members`` in their declaration
order, from the base classes to the subclass.
**kwargs: Keyword arguments for infrequently used options. Acceptable
keywords are: * `serialization_key`: An optional string to be used as the
serialization key for the class during `sym_jsonify`. If None,
`cls.__type_name__` will be used. This is introduced for scenarios when we
want to relocate a class, before the downstream can recognize the new
location, we need the class to serialize it using previous key. *
`additional_keys`: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
Returns:
a decorator function that register the class or function with schema
created from the fields.
Raises:
TypeError: Decorator cannot be applied on target class or keyword argument
provided is not supported.
KeyError: If type has already been registered in the registry.
ValueError: schema cannot be created from fields.
|
def members(
fields: List[Union[
pg_typing.Field,
Tuple[Union[str, pg_typing.KeySpec], pg_typing.ValueSpec, str],
Tuple[Union[str, pg_typing.KeySpec], pg_typing.ValueSpec, str, Any]]],
metadata: Optional[Dict[str, Any]] = None,
init_arg_list: Optional[Sequence[str]] = None,
**kwargs
) -> pg_typing.Decorator:
"""Function/Decorator for declaring symbolic fields for ``pg.Object``.
Example::
@pg.members([
# Declare symbolic fields. Each field produces a symbolic attribute
# for its object, which can be accessed by `self.<field_name>`.
# Description is optional.
('x', pg.typing.Int(min_value=0, default=0), 'Description for `x`.'),
('y', pg.typing.Str(), 'Description for `y`.')
])
class A(pg.Object):
def sum(self):
return self.x + self.y
@pg.members([
# Override field 'x' inherited from class A and make it more restrictive.
('x', pg.typing.Int(max_value=10, default=5)),
# Add field 'z'.
('z', pg.typing.Bool().noneable())
])
class B(A):
pass
@pg.members([
# Declare dynamic fields: any keyword can be acceptable during `__init__`
# and can be accessed using `self.<field_name>`.
(pg.typing.StrKey(), pg.typing.Int())
])
class D(B):
pass
@pg.members([
# Declare dynamic fields: keywords started with 'foo' is acceptable.
(pg.typing.StrKey('foo.*'), pg.typing.Int())
])
class E(pg.Object):
pass
See :class:`pyglove.typing.ValueSpec` for supported value specifications.
Args:
fields: A list of pg.typing.Field or equivalent tuple representation as
(<key>, <value-spec>, [description], [metadata-objects]). `key` should be
a string. `value-spec` should be pg_typing.ValueSpec classes or
equivalent, e.g. primitive values which will be converted to ValueSpec
implementation according to its type and used as its default value.
`description` is optional only when field overrides a field from its
parent class. `metadata-objects` is an optional list of any type, which
can be used to generate code according to the schema.
metadata: Optional dict of user objects as class-level metadata which will
be attached to class schema.
init_arg_list: An optional sequence of strings as the positional argument
list for `__init__`. This is helpful when symbolic attributes are
inherited from base classes or the user want to change its order. If not
provided, the `init_arg_list` will be automatically generated from
symbolic attributes defined from ``pg.members`` in their declaration
order, from the base classes to the subclass.
**kwargs: Keyword arguments for infrequently used options. Acceptable
keywords are: * `serialization_key`: An optional string to be used as the
serialization key for the class during `sym_jsonify`. If None,
`cls.__type_name__` will be used. This is introduced for scenarios when we
want to relocate a class, before the downstream can recognize the new
location, we need the class to serialize it using previous key. *
`additional_keys`: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
Returns:
a decorator function that register the class or function with schema
created from the fields.
Raises:
TypeError: Decorator cannot be applied on target class or keyword argument
provided is not supported.
KeyError: If type has already been registered in the registry.
ValueError: schema cannot be created from fields.
"""
serialization_key = kwargs.pop('serialization_key', None)
additional_keys = kwargs.pop('additional_keys', None)
if kwargs:
raise TypeError(f'Unsupported keyword arguments: {list(kwargs.keys())!r}.')
def _decorator(cls):
"""Decorator function that registers schema with an Object class."""
schema_utils.update_schema(
cls,
fields,
extend=True,
init_arg_list=init_arg_list,
metadata=metadata,
serialization_key=serialization_key,
additional_keys=additional_keys,
)
return cls
return typing.cast(pg_typing.Decorator, _decorator)
|
(fields: List[Union[pyglove.core.typing.class_schema.Field, Tuple[Union[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str], Tuple[Union[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str, Any]]], metadata: Optional[Dict[str, Any]] = None, init_arg_list: Optional[Sequence[str]] = None, **kwargs) -> <function <lambda> at 0x7f2fa64d25f0>
|
41,659 |
pyglove.core.symbolic.base
|
ne
|
Compares if two values are not equal. Use symbolic equality if possible.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
def sym_eq(self, right):
if super().sym_eq(right):
return True
return pg.eq(self.x, right)
class B:
pass
assert pg.ne(1, 2)
assert pg.ne(A(1), A(2))
# A has override `sym_eq`.
assert not pg.ne(A(1), 1)
# Objects of B are compared by references.
assert pg.ne(A(B()), A(B()))
Args:
left: The left-hand value to compare.
right: The right-hand value to compare.
Returns:
True if left and right is not equal or symbolically equal. Otherwise False.
|
def ne(left: Any, right: Any) -> bool:
"""Compares if two values are not equal. Use symbolic equality if possible.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
def sym_eq(self, right):
if super().sym_eq(right):
return True
return pg.eq(self.x, right)
class B:
pass
assert pg.ne(1, 2)
assert pg.ne(A(1), A(2))
# A has override `sym_eq`.
assert not pg.ne(A(1), 1)
# Objects of B are compared by references.
assert pg.ne(A(B()), A(B()))
Args:
left: The left-hand value to compare.
right: The right-hand value to compare.
Returns:
True if left and right is not equal or symbolically equal. Otherwise False.
"""
return not eq(left, right)
|
(left: Any, right: Any) -> bool
|
41,660 |
pyglove.core.symbolic.flags
|
notify_on_change
|
Returns a context manager to enable or disable notification upon change.
`notify_on_change` is thread-safe and can be nested. For example, in the
following code, `_on_change` (thus `_on_bound`) method of `a` will be
triggered due to the rebind in the inner `with` statement, and those of `b`
will not be triggered as the outer `with` statement disables the
notification::
with pg.notify_on_change(False):
with pg.notify_on_change(True):
a.rebind(b=1)
b.rebind(x=2)
Args:
enabled: If True, enable change notification in current scope.
Otherwise, disable notification.
Returns:
A context manager for allowing/disallowing change notification in scope.
|
def notify_on_change(enabled: bool = True) -> ContextManager[None]:
"""Returns a context manager to enable or disable notification upon change.
`notify_on_change` is thread-safe and can be nested. For example, in the
following code, `_on_change` (thus `_on_bound`) method of `a` will be
triggered due to the rebind in the inner `with` statement, and those of `b`
will not be triggered as the outer `with` statement disables the
notification::
with pg.notify_on_change(False):
with pg.notify_on_change(True):
a.rebind(b=1)
b.rebind(x=2)
Args:
enabled: If True, enable change notification in current scope.
Otherwise, disable notification.
Returns:
A context manager for allowing/disallowing change notification in scope.
"""
return thread_local.thread_local_value_scope(
_TLS_ENABLE_CHANGE_NOTIFICATION, enabled, True
)
|
(enabled: bool = True) -> ContextManager[NoneType]
|
41,662 |
pyglove.core.hyper.categorical
|
oneof
|
N choose 1.
Example::
@pg.members([
('x', pg.typing.Int())
])
class A(pg.Object):
pass
# A single categorical choice:
v = pg.oneof([1, 2, 3])
# A complex type as candidate.
v1 = pg.oneof(['a', {'x': 1}, A(1)])
# A hierarchical categorical choice:
v2 = pg.oneof([
'foo',
'bar',
A(pg.oneof([1, 2, 3]))
])
See also:
* :class:`pyglove.hyper.OneOf`
* :func:`pyglove.manyof`
* :func:`pyglove.floatv`
* :func:`pyglove.permutate`
* :func:`pyglove.evolve`
.. note::
Under symbolic mode (by default), `pg.oneof` returns a ``pg.hyper.OneOf``
object. Under dynamic evaluation mode, which is called under the context of
:meth:`pyglove.hyper.DynamicEvaluationContext.collect` or
:meth:`pyglove.hyper.DynamicEvaluationContext.apply`, it evaluates to
a concrete candidate value.
To use conditional search space in dynamic evaluation mode, the candidate
should be wrapped with a `lambda` function, which is not necessary under
symbolic mode. For example::
pg.oneof([lambda: pg.oneof([0, 1], name='sub'), 2], name='root')
Args:
candidates: Candidates to select from. Items of candidate can be any type,
therefore it can have nested hyper primitives, which forms a hierarchical
search space.
name: A name that can be used to identify a decision point in the search
space. This is needed when the code to instantiate the same hyper
primitive may be called multiple times under a
`pg.DynamicEvaluationContext.collect` context or under a
`pg.DynamicEvaluationContext.apply` context.
hints: An optional value which acts as a hint for the controller.
Returns:
In symbolic mode, this function returns a `ChoiceValue`.
In dynamic evaluation mode, this function returns one of the items in
`candidates`.
If evaluated under a `pg.DynamicEvaluationContext.apply` scope,
this function will return the selected candidate.
If evaluated under a `pg.DynamicEvaluationContext.collect`
scope, it will return the first candidate.
|
def oneof(candidates: Iterable[Any],
*,
name: Optional[str] = None,
hints: Optional[Any] = None) -> Any:
"""N choose 1.
Example::
@pg.members([
('x', pg.typing.Int())
])
class A(pg.Object):
pass
# A single categorical choice:
v = pg.oneof([1, 2, 3])
# A complex type as candidate.
v1 = pg.oneof(['a', {'x': 1}, A(1)])
# A hierarchical categorical choice:
v2 = pg.oneof([
'foo',
'bar',
A(pg.oneof([1, 2, 3]))
])
See also:
* :class:`pyglove.hyper.OneOf`
* :func:`pyglove.manyof`
* :func:`pyglove.floatv`
* :func:`pyglove.permutate`
* :func:`pyglove.evolve`
.. note::
Under symbolic mode (by default), `pg.oneof` returns a ``pg.hyper.OneOf``
object. Under dynamic evaluation mode, which is called under the context of
:meth:`pyglove.hyper.DynamicEvaluationContext.collect` or
:meth:`pyglove.hyper.DynamicEvaluationContext.apply`, it evaluates to
a concrete candidate value.
To use conditional search space in dynamic evaluation mode, the candidate
should be wrapped with a `lambda` function, which is not necessary under
symbolic mode. For example::
pg.oneof([lambda: pg.oneof([0, 1], name='sub'), 2], name='root')
Args:
candidates: Candidates to select from. Items of candidate can be any type,
therefore it can have nested hyper primitives, which forms a hierarchical
search space.
name: A name that can be used to identify a decision point in the search
space. This is needed when the code to instantiate the same hyper
primitive may be called multiple times under a
`pg.DynamicEvaluationContext.collect` context or under a
`pg.DynamicEvaluationContext.apply` context.
hints: An optional value which acts as a hint for the controller.
Returns:
In symbolic mode, this function returns a `ChoiceValue`.
In dynamic evaluation mode, this function returns one of the items in
`candidates`.
If evaluated under a `pg.DynamicEvaluationContext.apply` scope,
this function will return the selected candidate.
If evaluated under a `pg.DynamicEvaluationContext.collect`
scope, it will return the first candidate.
"""
return OneOf(candidates=list(candidates), name=name, hints=hints)
|
(candidates: Iterable[Any], *, name: Optional[str] = None, hints: Optional[Any] = None) -> Any
|
41,664 |
pyglove.core.patching.rule_based
|
patch
|
Apply patches to a symbolic value.
Args:
value: A symbolic value to patch.
rule: A patching rule is one of the following:
1) A dict of symbolic paths to the new values.
2) A rebind function defined by signature (k, v) or (k, v, p).
See :meth:`pyglove.Symbolic.rebind`.
3) A :class:`pyglove.patching.Patcher` object.
4) A URL-like string representing an instance of a register Patcher.
The format is "<patcher_name>?<arg1>&<arg2>=<val2>".
5) A list of the mixtures of above.
Returns:
Value after applying the patchers. If any patcher returned a new value
(by returning a single-item dict that containing '' as key), the return
value will be a different object other than `value`, otherwise `value`
will be returned after applying the patches.
Raises:
ValueError: Error if the patch name and arguments cannot
be parsed successfully.
|
def patch(value: symbolic.Symbolic, rule: PatchType) -> Any:
"""Apply patches to a symbolic value.
Args:
value: A symbolic value to patch.
rule: A patching rule is one of the following:
1) A dict of symbolic paths to the new values.
2) A rebind function defined by signature (k, v) or (k, v, p).
See :meth:`pyglove.Symbolic.rebind`.
3) A :class:`pyglove.patching.Patcher` object.
4) A URL-like string representing an instance of a register Patcher.
The format is "<patcher_name>?<arg1>&<arg2>=<val2>".
5) A list of the mixtures of above.
Returns:
Value after applying the patchers. If any patcher returned a new value
(by returning a single-item dict that containing '' as key), the return
value will be a different object other than `value`, otherwise `value`
will be returned after applying the patches.
Raises:
ValueError: Error if the patch name and arguments cannot
be parsed successfully.
"""
patches = []
rules = rule if isinstance(rule, list) else [rule]
for p in rules:
if isinstance(p, str):
p = from_uri(p)
if not isinstance(p, (Patcher, dict)) and not callable(p):
raise TypeError(
f'Patching rule {p!r} should be a dict of path to values, a rebind '
f'function, a patcher (object or string), or a list of their '
f'mixtures.')
patches.append(p)
# Apply patches in chain.
for p in patches:
if isinstance(p, Patcher):
value = p.patch(value)
elif isinstance(p, dict):
if len(p) == 1 and '' in p:
value = p['']
else:
value = value.rebind(p, raise_on_no_change=False)
else:
value = value.rebind(p, raise_on_no_change=False)
# Validate patched values.
for p in patches:
if isinstance(p, Patcher):
p.validate(value)
return value
|
(value: pyglove.core.symbolic.base.Symbolic, rule: Union[Dict[str, Any], Callable, pyglove.core.patching.rule_based.Patcher, str, List[Union[Dict[str, Any], Callable, pyglove.core.patching.rule_based.Patcher, str]]]) -> Any
|
41,665 |
pyglove.core.patching.pattern_based
|
patch_on_key
|
Recursively patch values on matched keys (leaf-node names).
Example::
d = pg.Dict(a=0, b=2)
print(pg.patching.patch_on_key(d, 'a', value=3))
# {a=3, b=2}
print(pg.patching.patch_on_key(d, '.', value=3))
# {a=3, b=3}
@pg.members([
('x', schema.Int())
])
class A(pg.Object):
def _on_init(self):
super()._on_init()
self._num_changes = 0
def _on_change(self, updates):
super()._on_change(updates)
self._num_changes += 1
a = A()
pg.patching.patch_on_key(a, 'x', value=2)
# a._num_changes is 1.
pg.patching.patch_on_key(a, 'x', value=3)
# a._num_changes is 2.
pg.patching.patch_on_keys(a, 'x', value=4, skip_notification=True)
# a._num_changes is still 2.
Args:
src: symbolic value to patch.
regex: Regex for key name.
value: New value for field that satisfy `condition`.
value_fn: Callable object that produces new value based on old value.
If not None, `value` must be None.
skip_notification: If True, `on_change` event will not be triggered for this
operation. If None, the behavior is decided by `pg.notify_on_rebind`.
Please see `symbolic.Symbolic.rebind` for details.
Returns:
`src` after being patched.
|
def patch_on_key(
src: symbolic.Symbolic,
regex: str,
value: Any = None,
value_fn: Optional[Callable[[Any], Any]] = None,
skip_notification: Optional[bool] = None) -> Any:
"""Recursively patch values on matched keys (leaf-node names).
Example::
d = pg.Dict(a=0, b=2)
print(pg.patching.patch_on_key(d, 'a', value=3))
# {a=3, b=2}
print(pg.patching.patch_on_key(d, '.', value=3))
# {a=3, b=3}
@pg.members([
('x', schema.Int())
])
class A(pg.Object):
def _on_init(self):
super()._on_init()
self._num_changes = 0
def _on_change(self, updates):
super()._on_change(updates)
self._num_changes += 1
a = A()
pg.patching.patch_on_key(a, 'x', value=2)
# a._num_changes is 1.
pg.patching.patch_on_key(a, 'x', value=3)
# a._num_changes is 2.
pg.patching.patch_on_keys(a, 'x', value=4, skip_notification=True)
# a._num_changes is still 2.
Args:
src: symbolic value to patch.
regex: Regex for key name.
value: New value for field that satisfy `condition`.
value_fn: Callable object that produces new value based on old value.
If not None, `value` must be None.
skip_notification: If True, `on_change` event will not be triggered for this
operation. If None, the behavior is decided by `pg.notify_on_rebind`.
Please see `symbolic.Symbolic.rebind` for details.
Returns:
`src` after being patched.
"""
regex = re.compile(regex)
return _conditional_patch(
src,
lambda k, v, p: k and regex.match(str(k.key)),
value,
value_fn,
skip_notification)
|
(src: pyglove.core.symbolic.base.Symbolic, regex: str, value: Optional[Any] = None, value_fn: Optional[Callable[[Any], Any]] = None, skip_notification: Optional[bool] = None) -> Any
|
41,666 |
pyglove.core.patching.pattern_based
|
patch_on_member
|
Recursively patch values that are the requested member of classes.
Example::
d = pg.Dict(a=A(x=1), b=2)
print(pg.patching.patch_on_member(d, A, 'x', 2)
# {a=A(x=2), b=4}
Args:
src: symbolic value to patch.
cls: In which class the member belongs to.
name: Member name.
value: New value for field that satisfy `condition`.
value_fn: Callable object that produces new value based on old value.
If not None, `value` must be None.
skip_notification: If True, `on_change` event will not be triggered for this
operation. If None, the behavior is decided by `pg.notify_on_rebind`.
Please see `symbolic.Symbolic.rebind` for details.
Returns:
`src` after being patched.
|
def patch_on_member(
src: symbolic.Symbolic,
cls: Union[Type[Any], Tuple[Type[Any], ...]],
name: str,
value: Any = None,
value_fn: Optional[Callable[[Any], Any]] = None,
skip_notification: Optional[bool] = None) -> Any:
"""Recursively patch values that are the requested member of classes.
Example::
d = pg.Dict(a=A(x=1), b=2)
print(pg.patching.patch_on_member(d, A, 'x', 2)
# {a=A(x=2), b=4}
Args:
src: symbolic value to patch.
cls: In which class the member belongs to.
name: Member name.
value: New value for field that satisfy `condition`.
value_fn: Callable object that produces new value based on old value.
If not None, `value` must be None.
skip_notification: If True, `on_change` event will not be triggered for this
operation. If None, the behavior is decided by `pg.notify_on_rebind`.
Please see `symbolic.Symbolic.rebind` for details.
Returns:
`src` after being patched.
"""
return _conditional_patch(
src, lambda k, v, p: isinstance(p, cls) and k.key == name,
value, value_fn, skip_notification)
|
(src: pyglove.core.symbolic.base.Symbolic, cls: Union[Type[Any], Tuple[Type[Any], ...]], name: str, value: Optional[Any] = None, value_fn: Optional[Callable[[Any], Any]] = None, skip_notification: Optional[bool] = None) -> Any
|
41,667 |
pyglove.core.patching.pattern_based
|
patch_on_path
|
Recursively patch values on matched paths.
Example::
d = pg.Dict(a={'x': 1}, b=2)
print(pg.patching.patch_on_path(d, '.*x', value=3))
# {a={x=1}, b=2}
Args:
src: symbolic value to patch.
regex: Regex for key path.
value: New value for field that satisfy `condition`.
value_fn: Callable object that produces new value based on old value.
If not None, `value` must be None.
skip_notification: If True, `on_change` event will not be triggered for this
operation. If None, the behavior is decided by `pg.notify_on_rebind`.
Please see `symbolic.Symbolic.rebind` for details.
Returns:
`src` after being patched.
|
def patch_on_path(
src: symbolic.Symbolic,
regex: str,
value: Any = None,
value_fn: Optional[Callable[[Any], Any]] = None,
skip_notification: Optional[bool] = None) -> Any:
"""Recursively patch values on matched paths.
Example::
d = pg.Dict(a={'x': 1}, b=2)
print(pg.patching.patch_on_path(d, '.*x', value=3))
# {a={x=1}, b=2}
Args:
src: symbolic value to patch.
regex: Regex for key path.
value: New value for field that satisfy `condition`.
value_fn: Callable object that produces new value based on old value.
If not None, `value` must be None.
skip_notification: If True, `on_change` event will not be triggered for this
operation. If None, the behavior is decided by `pg.notify_on_rebind`.
Please see `symbolic.Symbolic.rebind` for details.
Returns:
`src` after being patched.
"""
regex = re.compile(regex)
return _conditional_patch(
src, lambda k, v, p: regex.match(str(k)),
value, value_fn, skip_notification)
|
(src: pyglove.core.symbolic.base.Symbolic, regex: str, value: Optional[Any] = None, value_fn: Optional[Callable[[Any], Any]] = None, skip_notification: Optional[bool] = None) -> Any
|
41,668 |
pyglove.core.patching.pattern_based
|
patch_on_type
|
Recursively patch values on matched types.
Example::
d = pg.Dict(a={'x': 1}, b=2)
print(pg.patching.patch_on_type(d, int, value_fn=lambda x: x * 2))
# {a={x=2}, b=4}
Args:
src: symbolic value to patch.
value_type: Value type to match.
value: New value for field that satisfy `condition`.
value_fn: Callable object that produces new value based on old value.
If not None, `value` must be None.
skip_notification: If True, `on_change` event will not be triggered for this
operation. If None, the behavior is decided by `pg.notify_on_rebind`.
Please see `symbolic.Symbolic.rebind` for details.
Returns:
`src` after being patched.
|
def patch_on_type(
src: symbolic.Symbolic,
value_type: Union[Type[Any], Tuple[Type[Any], ...]],
value: Any = None,
value_fn: Optional[Callable[[Any], Any]] = None,
skip_notification: Optional[bool] = None) -> Any:
"""Recursively patch values on matched types.
Example::
d = pg.Dict(a={'x': 1}, b=2)
print(pg.patching.patch_on_type(d, int, value_fn=lambda x: x * 2))
# {a={x=2}, b=4}
Args:
src: symbolic value to patch.
value_type: Value type to match.
value: New value for field that satisfy `condition`.
value_fn: Callable object that produces new value based on old value.
If not None, `value` must be None.
skip_notification: If True, `on_change` event will not be triggered for this
operation. If None, the behavior is decided by `pg.notify_on_rebind`.
Please see `symbolic.Symbolic.rebind` for details.
Returns:
`src` after being patched.
"""
return _conditional_patch(
src, lambda k, v, p: isinstance(v, value_type),
value, value_fn, skip_notification)
|
(src: pyglove.core.symbolic.base.Symbolic, value_type: Union[Type[Any], Tuple[Type[Any], ...]], value: Optional[Any] = None, value_fn: Optional[Callable[[Any], Any]] = None, skip_notification: Optional[bool] = None) -> Any
|
41,669 |
pyglove.core.patching.pattern_based
|
patch_on_value
|
Recursively patch values on matched values.
Example::
d = pg.Dict(a={'x': 1}, b=1)
print(pg.patching.patch_on_value(d, 1, value=3))
# {a={x=3}, b=3}
Args:
src: symbolic value to patch.
old_value: Old value to match.
value: New value for field that satisfy `condition`.
value_fn: Callable object that produces new value based on old value.
If not None, `value` must be None.
skip_notification: If True, `on_change` event will not be triggered for this
operation. If None, the behavior is decided by `pg.notify_on_rebind`.
Please see `symbolic.Symbolic.rebind` for details.
Returns:
`src` after being patched.
|
def patch_on_value(
src: symbolic.Symbolic,
old_value: Any,
value: Any = None,
value_fn: Optional[Callable[[Any], Any]] = None,
skip_notification: Optional[bool] = None) -> Any:
"""Recursively patch values on matched values.
Example::
d = pg.Dict(a={'x': 1}, b=1)
print(pg.patching.patch_on_value(d, 1, value=3))
# {a={x=3}, b=3}
Args:
src: symbolic value to patch.
old_value: Old value to match.
value: New value for field that satisfy `condition`.
value_fn: Callable object that produces new value based on old value.
If not None, `value` must be None.
skip_notification: If True, `on_change` event will not be triggered for this
operation. If None, the behavior is decided by `pg.notify_on_rebind`.
Please see `symbolic.Symbolic.rebind` for details.
Returns:
`src` after being patched.
"""
return _conditional_patch(
src, lambda k, v, p: v == old_value,
value, value_fn, skip_notification)
|
(src: pyglove.core.symbolic.base.Symbolic, old_value: Any, value: Optional[Any] = None, value_fn: Optional[Callable[[Any], Any]] = None, skip_notification: Optional[bool] = None) -> Any
|
41,670 |
pyglove.core.patching.rule_based
|
patcher
|
Decorate a function into a Patcher and register it.
A patcher function is defined as:
`<patcher_fun> := <fun_name>(<target>, [parameters])`
The signature takes at least one argument as the patching target,
with additional arguments as patching parameters to control the details of
this patch.
Example::
@pg.patching.patcher([
('x': pg.typing.Int())
])
def increment(v, x=1):
return pg.symbolic.get_rebind_dict(
lambda k, v, p: v + x if isinstance(v, int) else v)
# This patcher can be called via:
# pg.patching.apply(v, [increment(x=2)])
# or pg.patching.apply(v, ['increment?x=2'])
Args:
args: A list of (arg_name, arg_value_spec) to schematize patcher arguments.
name: String to be used as patcher name in URI. If None, function name will
be used as patcher name.
Returns:
A decorator that converts a function into a Patcher subclass.
|
def patcher(
args: Optional[List[Tuple[str, pg_typing.ValueSpec]]] = None,
name: Optional[str] = None) -> Any:
"""Decorate a function into a Patcher and register it.
A patcher function is defined as:
`<patcher_fun> := <fun_name>(<target>, [parameters])`
The signature takes at least one argument as the patching target,
with additional arguments as patching parameters to control the details of
this patch.
Example::
@pg.patching.patcher([
('x': pg.typing.Int())
])
def increment(v, x=1):
return pg.symbolic.get_rebind_dict(
lambda k, v, p: v + x if isinstance(v, int) else v)
# This patcher can be called via:
# pg.patching.apply(v, [increment(x=2)])
# or pg.patching.apply(v, ['increment?x=2'])
Args:
args: A list of (arg_name, arg_value_spec) to schematize patcher arguments.
name: String to be used as patcher name in URI. If None, function name will
be used as patcher name.
Returns:
A decorator that converts a function into a Patcher subclass.
"""
functor_decorator = symbolic.functor(args, base_class=Patcher)
def _decorator(fn):
"""Returns decorated Patcher class."""
cls = functor_decorator(fn)
_PATCHER_REGISTRY.register(name or fn.__name__,
typing.cast(Type[Patcher], cls))
arg_specs = cls.__signature__.args
if len(arg_specs) < 1:
raise TypeError(
'Patcher function should have at least 1 argument '
f'as patching target. (Patcher={cls.__type_name__!r})'
)
if not _is_patcher_target_spec(arg_specs[0].value_spec):
raise TypeError(
f'{arg_specs[0].value_spec!r} cannot be used for constraining '
f'Patcher target. (Patcher={cls.__type_name__!r}, '
f'Argument={arg_specs[0].name!r})\n'
'Acceptable value spec types are: '
'Any, Callable, Dict, Functor, List, Object.'
)
for arg_spec in arg_specs[1:]:
if not _is_patcher_parameter_spec(arg_spec.value_spec):
raise TypeError(
f'{arg_spec.value_spec!r} cannot be used for constraining '
f'Patcher argument. (Patcher={cls.__type_name__!r}, '
f'Argument={arg_spec.name!r})\n'
'Consider to treat it as string and parse yourself.'
)
return cls
return _decorator
|
(args: Optional[List[Tuple[str, pyglove.core.typing.class_schema.ValueSpec]]] = None, name: Optional[str] = None) -> Any
|
41,672 |
pyglove.core.hyper.categorical
|
permutate
|
Permuatation of candidates.
Example::
@pg.members([
('x', pg.typing.Int())
])
class A(pg.Object):
pass
# Permutates the candidates.
v = pg.permutate([1, 2, 3])
# A complex type as candidate.
v1 = pg.permutate(['a', {'x': 1}, A(1)])
# A hierarchical categorical choice:
v2 = pg.permutate([
'foo',
'bar',
A(pg.oneof([1, 2, 3]))
])
.. note::
Under symbolic mode (by default), `pg.manyof` returns a ``pg.hyper.ManyOf``
object. Under dynamic evaluate mode, which is called under the context of
:meth:`pyglove.hyper.DynamicEvaluationContext.collect` or
:meth:`pyglove.hyper.DynamicEvaluationContext.apply`, it evaluates to
a concrete candidate value.
To use conditional search space in dynamic evaluate mode, the candidate
should be wrapped with a `lambda` function, which is not necessary under
symbolic mode. For example::
pg.permutate([
lambda: pg.oneof([0, 1], name='sub_a'),
lambda: pg.floatv(0.0, 1.0, name='sub_b'),
lambda: pg.manyof(2, ['a', 'b', 'c'], name='sub_c')
], name='root')
See also:
* :class:`pyglove.hyper.ManyOf`
* :func:`pyglove.oneof`
* :func:`pyglove.manyof`
* :func:`pyglove.floatv`
* :func:`pyglove.evolve`
Args:
candidates: Candidates to select from. Items of candidate can be any type,
therefore it can have nested hyper primitives, which forms a hierarchical
search space.
name: A name that can be used to identify a decision point in the search
space. This is needed when the code to instantiate the same hyper
primitive may be called multiple times under a
`pg.DynamicEvaluationContext.collect` context or a
`pg.DynamicEvaluationContext.apply` context.
hints: An optional value which acts as a hint for the controller.
Returns:
In symbolic mode, this function returns a `Choices`.
In dynamic evaluate mode, this function returns a permutation from
`candidates`.
If evaluated under an `pg.DynamicEvaluationContext.apply` scope,
this function will return a permutation of candidates based on controller
decisions.
If evaluated under a `pg.DynamicEvaluationContext.collect`
scope, it will return the first valid permutation.
For example::
# Evaluates to [0, 1, 2, 3, 4].
permutate(range(5), name='numbers')
|
def permutate(candidates: Iterable[Any],
name: Optional[str] = None,
hints: Optional[Any] = None) -> Any:
"""Permuatation of candidates.
Example::
@pg.members([
('x', pg.typing.Int())
])
class A(pg.Object):
pass
# Permutates the candidates.
v = pg.permutate([1, 2, 3])
# A complex type as candidate.
v1 = pg.permutate(['a', {'x': 1}, A(1)])
# A hierarchical categorical choice:
v2 = pg.permutate([
'foo',
'bar',
A(pg.oneof([1, 2, 3]))
])
.. note::
Under symbolic mode (by default), `pg.manyof` returns a ``pg.hyper.ManyOf``
object. Under dynamic evaluate mode, which is called under the context of
:meth:`pyglove.hyper.DynamicEvaluationContext.collect` or
:meth:`pyglove.hyper.DynamicEvaluationContext.apply`, it evaluates to
a concrete candidate value.
To use conditional search space in dynamic evaluate mode, the candidate
should be wrapped with a `lambda` function, which is not necessary under
symbolic mode. For example::
pg.permutate([
lambda: pg.oneof([0, 1], name='sub_a'),
lambda: pg.floatv(0.0, 1.0, name='sub_b'),
lambda: pg.manyof(2, ['a', 'b', 'c'], name='sub_c')
], name='root')
See also:
* :class:`pyglove.hyper.ManyOf`
* :func:`pyglove.oneof`
* :func:`pyglove.manyof`
* :func:`pyglove.floatv`
* :func:`pyglove.evolve`
Args:
candidates: Candidates to select from. Items of candidate can be any type,
therefore it can have nested hyper primitives, which forms a hierarchical
search space.
name: A name that can be used to identify a decision point in the search
space. This is needed when the code to instantiate the same hyper
primitive may be called multiple times under a
`pg.DynamicEvaluationContext.collect` context or a
`pg.DynamicEvaluationContext.apply` context.
hints: An optional value which acts as a hint for the controller.
Returns:
In symbolic mode, this function returns a `Choices`.
In dynamic evaluate mode, this function returns a permutation from
`candidates`.
If evaluated under an `pg.DynamicEvaluationContext.apply` scope,
this function will return a permutation of candidates based on controller
decisions.
If evaluated under a `pg.DynamicEvaluationContext.collect`
scope, it will return the first valid permutation.
For example::
# Evaluates to [0, 1, 2, 3, 4].
permutate(range(5), name='numbers')
"""
candidates = list(candidates)
return manyof(
len(candidates), candidates,
choices_distinct=True, choices_sorted=False, name=name, hints=hints)
|
(candidates: Iterable[Any], name: Optional[str] = None, hints: Optional[Any] = None) -> Any
|
41,673 |
pyglove.core.tuning.backend
|
poll_result
|
Gets tuning result by name.
|
def poll_result(
name: str,
backend: Optional[str] = None,
**kwargs) -> Result:
"""Gets tuning result by name."""
return get_backend_cls(backend).poll_result(name, **kwargs)
|
(name: str, backend: Optional[str] = None, **kwargs) -> pyglove.core.tuning.protocols.Result
|
41,674 |
pyglove.core.object_utils.formatting
|
printv
|
Prints formatted value.
|
def printv(v: Any, **kwargs):
"""Prints formatted value."""
fs = kwargs.pop('file', sys.stdout)
print(format(v, **kwargs), file=fs)
|
(v: Any, **kwargs)
|
41,675 |
pyglove.core.symbolic.base
|
query
|
Queries a (maybe) symbolic value.
Example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Int())
])
class A(pg.Object):
pass
value = {
'a1': A(x=0, y=1),
'a2': [A(x=1, y=1), A(x=1, y=2)],
'a3': {
'p': A(x=2, y=1),
'q': A(x=2, y=2)
}
}
# Query by path regex.
# Shall print:
# {'a3.p': A(x=2, y=1)}
print(pg.query(value, r'.*p'))
# Query by value.
# Shall print:
# {
# 'a2[1].y': 2,
# 'a3.p.x': 2,
# 'a3.q.x': 2,
# 'a3.q.y': 2,
# }
print(pg.query(value, where=lambda v: v==2))
# Query by path, value and parent.
# Shall print:
# {
# 'a2[1].y': 2,
# }
print(pg.query(
value, r'.*y',
where=lambda v, p: v > 1 and isinstance(p, A) and p.x == 1))
Args:
x: A nested structure that may contains symbolic value.
path_regex: Optional regex expression to constrain path.
where: Optional callable to constrain value and parent when path matches
with `path_regex` or `path_regex` is not provided. The signature is:
`(value) -> should_select` or `(value, parent) -> should_select`
enter_selected: If True, if a node is selected, enter the node and query
its sub-nodes.
custom_selector: Optional callable object as custom selector. When
`custom_selector` is provided, `path_regex` and `where` must be None.
The signature of `custom_selector` is:
`(key_path, value) -> should_select`
or `(key_path, value, parent) -> should_select`
Returns:
A dict of key path to value as results for selected values.
|
def query(
x: Any,
path_regex: Optional[str] = None,
where: Optional[Union[Callable[[Any], bool],
Callable[[Any, Any], bool]]] = None,
enter_selected: bool = False,
custom_selector: Optional[Union[
Callable[[object_utils.KeyPath, Any], bool],
Callable[[object_utils.KeyPath, Any, Any], bool]]] = None
) -> Dict[str, Any]:
"""Queries a (maybe) symbolic value.
Example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Int())
])
class A(pg.Object):
pass
value = {
'a1': A(x=0, y=1),
'a2': [A(x=1, y=1), A(x=1, y=2)],
'a3': {
'p': A(x=2, y=1),
'q': A(x=2, y=2)
}
}
# Query by path regex.
# Shall print:
# {'a3.p': A(x=2, y=1)}
print(pg.query(value, r'.*p'))
# Query by value.
# Shall print:
# {
# 'a2[1].y': 2,
# 'a3.p.x': 2,
# 'a3.q.x': 2,
# 'a3.q.y': 2,
# }
print(pg.query(value, where=lambda v: v==2))
# Query by path, value and parent.
# Shall print:
# {
# 'a2[1].y': 2,
# }
print(pg.query(
value, r'.*y',
where=lambda v, p: v > 1 and isinstance(p, A) and p.x == 1))
Args:
x: A nested structure that may contains symbolic value.
path_regex: Optional regex expression to constrain path.
where: Optional callable to constrain value and parent when path matches
with `path_regex` or `path_regex` is not provided. The signature is:
`(value) -> should_select` or `(value, parent) -> should_select`
enter_selected: If True, if a node is selected, enter the node and query
its sub-nodes.
custom_selector: Optional callable object as custom selector. When
`custom_selector` is provided, `path_regex` and `where` must be None.
The signature of `custom_selector` is:
`(key_path, value) -> should_select`
or `(key_path, value, parent) -> should_select`
Returns:
A dict of key path to value as results for selected values.
"""
regex = re.compile(path_regex) if path_regex else None
if custom_selector is not None:
if path_regex is not None or where is not None:
raise ValueError('\'path_regex\' and \'where\' must be None when '
'\'custom_selector\' is provided.')
signature = pg_typing.get_signature(custom_selector)
if len(signature.args) == 2:
select_fn = lambda k, v, p: custom_selector(k, v) # pytype: disable=wrong-arg-count
elif len(signature.args) == 3:
select_fn = custom_selector
else:
raise TypeError(
f'Custom selector \'{signature.id}\' should accept 2 or 3 arguments. '
f'(key_path, value, [parent]). Encountered: {signature.args}')
else:
if where is not None:
signature = pg_typing.get_signature(where)
if len(signature.args) == 1:
where_fn = lambda v, p: where(v) # pytype: disable=wrong-arg-count
elif len(signature.args) == 2:
where_fn = where
else:
raise TypeError(
f'Where function \'{signature.id}\' should accept 1 or 2 '
f'arguments: (value, [parent]). Encountered: {signature.args}.')
else:
where_fn = lambda v, p: True
def select_fn(k, v, p):
if regex is not None and not regex.match(str(k)):
return False
return where_fn(v, p) # pytype: disable=wrong-arg-count
results = {}
def _preorder_visitor(path: object_utils.KeyPath, v: Any,
parent: Any) -> TraverseAction:
if select_fn(path, v, parent): # pytype: disable=wrong-arg-count
results[str(path)] = v
return TraverseAction.ENTER if enter_selected else TraverseAction.CONTINUE
return TraverseAction.ENTER
traverse(x, preorder_visitor_fn=_preorder_visitor)
return results
|
(x: Any, path_regex: Optional[str] = None, where: Union[Callable[[Any], bool], Callable[[Any, Any], bool], NoneType] = None, enter_selected: bool = False, custom_selector: Union[Callable[[pyglove.core.object_utils.value_location.KeyPath, Any], bool], Callable[[pyglove.core.object_utils.value_location.KeyPath, Any, Any], bool], NoneType] = None) -> Dict[str, Any]
|
41,676 |
pyglove.core.geno.random
|
random_dna
|
Generates a random DNA from a DNASpec.
Example::
spec = pg.geno.space([
pg.geno.oneof([
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant()
]),
pg.geno.floatv(0.1, 0.2)
])
print(pg.random_dna(spec))
# DNA([2, 0.1123])
Args:
dna_spec: a DNASpec object.
random_generator: a Python random generator.
attach_spec: If True, attach the DNASpec to generated DNA.
previous_dna: An optional DNA representing previous DNA. This field might
be useful for generating stateful random DNAs.
Returns:
A DNA object.
|
def random_dna(
dna_spec: DNASpec,
random_generator: Union[None, types.ModuleType, random.Random] = None,
attach_spec: bool = True,
previous_dna: Optional[DNA] = None
) -> DNA:
"""Generates a random DNA from a DNASpec.
Example::
spec = pg.geno.space([
pg.geno.oneof([
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant()
]),
pg.geno.floatv(0.1, 0.2)
])
print(pg.random_dna(spec))
# DNA([2, 0.1123])
Args:
dna_spec: a DNASpec object.
random_generator: a Python random generator.
attach_spec: If True, attach the DNASpec to generated DNA.
previous_dna: An optional DNA representing previous DNA. This field might
be useful for generating stateful random DNAs.
Returns:
A DNA object.
"""
return dna_spec.random_dna(
random_generator or random, attach_spec, previous_dna)
|
(dna_spec: pyglove.core.geno.base.DNASpec, random_generator: Union[module, random.Random, NoneType] = None, attach_spec: bool = True, previous_dna: Optional[pyglove.core.geno.base.DNA] = None) -> pyglove.core.geno.base.DNA
|
41,677 |
pyglove.core.hyper.iter
|
random_sample
|
Returns an iterator of random sampled examples.
Example::
hyper_dict = pg.Dict(x=pg.oneof(range(3)), y=pg.floatv(0.0, 1.0))
# Generate one random example from the hyper_dict.
d = next(pg.random_sample(hyper_dict))
# Generate 5 random examples with random seed.
ds = list(pg.random_sample(hyper_dict, 5, seed=1))
# Generate 3 random examples of `x` with `y` intact.
ds = list(pg.random_sample(hyper_dict, 3,
where=lambda x: isinstance(x, pg.hyper.OneOf)))
Args:
value: A (maybe) hyper value.
num_examples: An optional integer as number of examples to propose. If None,
propose will return an iterator that iterates forever.
where: Function to filter hyper primitives. If None, all hyper primitives in
`value` will be included in the encoding/decoding process. Otherwise only
the hyper primitives on which 'where' returns True will be included.
`where` can be useful to partition a search space into separate
optimization processes. Please see 'Template' docstr for details.
seed: An optional integer as random seed.
Returns:
Iterator of random examples.
|
def random_sample(
value: Any,
num_examples: Optional[int] = None,
where: Optional[Callable[[base.HyperPrimitive], bool]] = None,
seed: Optional[int] = None):
"""Returns an iterator of random sampled examples.
Example::
hyper_dict = pg.Dict(x=pg.oneof(range(3)), y=pg.floatv(0.0, 1.0))
# Generate one random example from the hyper_dict.
d = next(pg.random_sample(hyper_dict))
# Generate 5 random examples with random seed.
ds = list(pg.random_sample(hyper_dict, 5, seed=1))
# Generate 3 random examples of `x` with `y` intact.
ds = list(pg.random_sample(hyper_dict, 3,
where=lambda x: isinstance(x, pg.hyper.OneOf)))
Args:
value: A (maybe) hyper value.
num_examples: An optional integer as number of examples to propose. If None,
propose will return an iterator that iterates forever.
where: Function to filter hyper primitives. If None, all hyper primitives in
`value` will be included in the encoding/decoding process. Otherwise only
the hyper primitives on which 'where' returns True will be included.
`where` can be useful to partition a search space into separate
optimization processes. Please see 'Template' docstr for details.
seed: An optional integer as random seed.
Returns:
Iterator of random examples.
"""
return iterate(
value, num_examples, geno.Random(seed), where=where)
|
(value: Any, num_examples: Optional[int] = None, where: Optional[Callable[[pyglove.core.hyper.base.HyperPrimitive], bool]] = None, seed: Optional[int] = None)
|
41,678 |
pyglove.core.typing.type_conversion
|
register_converter
|
Register converter from source type to destination type.
Examples::
# Add converter from int to float.
pg.typing.register_converter(int, float, float)
assert pg.typing.Float().apply(1) is 1.0
# Add converter from a dict to class A.
def from_dict(d):
return A(**d)
assert isinstance(pg.typing.Object(A).apply({'x': 1, 'y': 2}), A)
Args:
src_type: Source value type.
dest_type: Target value type.
convert_fn: Function that performs the conversion, in signature
(src_type) -> dest_type.
|
def register_converter(
src_type: Union[Type[Any], Tuple[Type[Any], ...]],
dest_type: Union[Type[Any], Tuple[Type[Any], ...]],
convert_fn: Callable[[Any], Any]) -> None:
"""Register converter from source type to destination type.
Examples::
# Add converter from int to float.
pg.typing.register_converter(int, float, float)
assert pg.typing.Float().apply(1) is 1.0
# Add converter from a dict to class A.
def from_dict(d):
return A(**d)
assert isinstance(pg.typing.Object(A).apply({'x': 1, 'y': 2}), A)
Args:
src_type: Source value type.
dest_type: Target value type.
convert_fn: Function that performs the conversion, in signature
(src_type) -> dest_type.
"""
_TYPE_CONVERTER_REGISTRY.register(src_type, dest_type, convert_fn)
|
(src_type: Union[Type[Any], Tuple[Type[Any], ...]], dest_type: Union[Type[Any], Tuple[Type[Any], ...]], convert_fn: Callable[[Any], Any]) -> NoneType
|
41,679 |
pyglove.core.object_utils.json_conversion
|
registered_types
|
Returns an iterator of registered (serialization key, class) tuples.
|
def registered_types() -> Iterable[Tuple[str, Type[JSONConvertible]]]:
"""Returns an iterator of registered (serialization key, class) tuples."""
return JSONConvertible.registered_types()
|
() -> Iterable[Tuple[str, Type[pyglove.core.object_utils.json_conversion.JSONConvertible]]]
|
41,680 |
pyglove.core.object_utils.common_traits
|
repr_format
|
Context manager for setting the default format kwargs for __repr__.
|
def repr_format(**kwargs) -> ContextManager[Dict[str, Any]]:
"""Context manager for setting the default format kwargs for __repr__."""
return thread_local.thread_local_arg_scope(_TLS_REPR_FORMAT_KWARGS, **kwargs)
|
(**kwargs) -> ContextManager[Dict[str, Any]]
|
41,681 |
pyglove.core.tuning.sample
|
sample
|
Yields an example and its feedback sampled from a hyper value.
Example 1: sample a search space defined by a symbolic hyper value::
for example, feedback in pg.sample(
pg.Dict(x=pg.floatv(-1, 1))),
pg.geno.Random(),
num_examples=10,
name='my_search'):
# We can access trial ID (staring from 1) and DNA from the feedback.
print(feedback.id, feedback.dna)
# We can report the reward computed on the example using
# `feedback.add_measurement`, which can be called
# multiple times to report the rewards incrementally.
# Once a trial is done, we call `feedback.done` to mark evaluation on
# current example as completed, or use `feedback.skip` to move to the
# next sample without passing any feedback to the algorithm.
# Without `feedback.done` or `feedback.skip`, the same trial will be
# iterated over and over again for failover handling purpose.
# Besides the reward and the step, metrics and checkpoint can be added
# to each measurement. Additional meta-data and related links (URLs) can
# be passed to `feedback.done` which can be retrieved via
# `pg.poll_result` later.
if example.x >= 0:
# If we only want to add one measurement for each example, a
# shortcut expression for the next two lines can be written as
# follows:
# `feedback(reward=math.sqrt(example.x), step=1)`
feedback.add_measurement(reward=math.sqrt(example.x), step=1)
feedback.done()
else:
feedback.skip()
# IMPORTANT: to stop the loop among all workers, we can call
# `feedback.end_loop`. As a result, each worker will quit their loop
# after current iteration, while using `break` in the for-loop is
# only effective for the local process rather than remotely.
if session.id > 1000:
feedback.end_loop()
# At any time, we can poll the search result via `pg.poll_result`, please
# see `pg.tuning.Result` for more details.
result = pg.poll_result('my_search')
print(result.best_trial)
Example 2: sample a search space defined by `pg.hyper.trace`::
def fun():
return pg.oneof([
lambda: pg.oneof([1, 2, 3]),
lambda: pg.float(0.1, 1.0),
3]) + sum(pg.manyof(2, [1, 2, 3]))
for example, feedback in pg.sample(
pg.hyper.trace(fun),
pg.geno.Random(),
num_examples=10,
name='my_search'):
# When space is a `pg.hyper.DynamicEvaluationContext` object,
# the `example` yielded at each iteration is a context manager under which
# the hyper primitives (e.g. pg.oneof) will be materialized into concrete
# values according to the controller decision.
with example():
reward = fun()
feedback(reward)
Example 3: sample DNAs from an abstract search space represented by
`pg.DNASpec`::
for dna, feedback in pg.sample(
pg.List([pg.oneof(range(3))] * 5).dna_spec(),
pg.geno.Random(),
num_examples=10,
name='my_search'):
reward = evaluate_dna(dna)
feedback(reward)
**Using `pg.sample` in distributed environment**
`pg.sample` is designed with distributed sampling in mind, in which multiple
processes can work on the trials of the same sampling loop. While the default
'in-memory' backend works only within a single process without failover
handling, other backends may support distributed computing environment with
persistent state. Nevertheless, the `pg.sample` API is the same
among different backends, users can switch the backend easily by passing a
different value to the `backend` argument, or set the default value globally
via `pg.tuning.set_default_backend`.
**Identifying a sampling**
To identify a distributed loop, a unique `name` is introduced, which will
also be used to poll the latest sampling result via `pg.poll_result`.
**Failover handling**
In a distributed setup, worker processes may incidentally die and restart.
Unless a trial is explicitly marked as done (via `feedback(reward)` or
`feedback.done()`) or skipped (via feedback.skip()), a worker will try to
resume its work on the trial from where it left off.
**Workroup**
In a distributed setup, worker processes may or may not work on the same
trials. Worker group is introduced to serve this purpose, which is identified
by an integer or a string named `group`. If `group` is not specified or
having different values among the workers, every worker will work on
different trials of the loop. On the contrary, workers having the same
`group` will co-work on the same trials. Group is useful when evaluation on
one example can be parallelized - for example - an example in the outer loop
of a nested search. However, feedback on the same example should be fed back
to the search algorithm only once. Therefore, workers in the same group need
to communicate with each other to avoid duplicated evaluation and feedbacks.
To facilitate such communication, per-trial metadata is supported, and can be
accessed via `feedback.set_metadata/get_metadata` methods. The consistency in
reading and writing the metadata is defined by the backend used. For the
'in-memory' backend, all the trials and their metadata is stored in memory,
thus will be lost if the process get restarted. On the contrary, the backends
built on distributed computing environment may store both the trials and
metadata in a persistent storage with varing read/write QPS and read/write
consistency guarentees.
**Switch between backends**
The `backend` argument of `pg.sample` lets users choose a backend used in
current sampling loop. Users can use different backends in the same process
to achieve a best performance trade-off, e.g., using `in-memory` backend when
the communication cost overweighs the redudant evaluation cost upon worker
failure.
Helper function :func:`pyglove.tuning.set_default_backend` is introduced to
set the default tuning backend for the entire process.
Args:
space: One of (a hyper value, a `pg.hyper.DynamicEvaluationContext`,
a `pg.DNASpec`) to sample from.
A hyper value is an object with to-be-determined values specified by
`pg.oneof`, `pg.manyof`, `pg.floatv` and etc, representing a search space.
A `pg.hyper.DynamicEvaluationContext` object represents a search space
that is traced via dynamic evaluation.
A `pg.DNASpec` represents an abstract search space that emits DNAs.
algorithm: The search algorithm that samples the search space. For example:
`pg.geno.Random()`, `pg.evolution.regularized_evolution(...)`, and
etc.
num_examples: An optional integer as the max number of examples to
sample. If None, sample will return an iterator of infinite examples.
early_stopping_policy: An optional early stopping policy for user to tell
if incremental evaluation (which reports multiple measurements) on each
example can be early short circuited.
After each call to `feedback.add_measurement`, users can use method
`feedback.should_stop_early` to check whether current example worth
further evaluation or not.
where: Function to filter the hyper values. If None, all decision points
from the `space` will be included for the algorithm to make
decisions. Otherwise only the decision points on which 'where' returns
True will be included. The rest decision points will be passed through
in the example, intact, which is a sub-space of the search space
represented by the `space`. `where` is usually used in nested search
flows. Please see 'hyper.Template' docstr for details.
name: A string as a unique identifier for current sampling. Two separate
calls to `pg.sample` with the same `name` (also the same algorithm) will
share the same sampling queue, whose examples are proposed by the same
search algorithm.
group: An string or integer as the group ID of current process in
distributed sampling, which will be used to group different workers into
co-worker groups. Workers with the same group id will work on the same
trial. On the contrary, workers in different groups will always be working
with different trials. If not specified, each worker in current sampling
will be in different groups. `group` is usually used in the outer
loops of nested search, in order to allow workers to work on the same
higher-order item.
backend: An optional string to specify the backend name for sampling.
if None, the default backend set by `pg.tuning.set_default_backend`
will be used.
metrics_to_optimize: A sequence of string as the names of the metrics
to be optimized by the algorithm, which is ['reward'] by default.
When specified, it should have only 1 item for single-objective algorithm
and can have multiple items for algorithms that support multi-objective
optimization.
**kwargs: Arguments passed to the `BackendFactory` subclass registered with
the requested backend.
Yields:
An iterator of tuples (example, feedback) as examples sampled from the
search space defined by the `space` through `algorithm`.
Raises:
ValueError: `space` is a fixed value, or requested `backend` is not
available.
|
def sample(space: Union[HyperValue,
hyper.DynamicEvaluationContext,
geno.DNASpec],
algorithm: geno.DNAGenerator,
num_examples: Optional[int] = None,
early_stopping_policy: Optional[EarlyStoppingPolicy] = None,
where: Optional[Callable[[hyper.HyperPrimitive], bool]] = None,
name: Optional[str] = None,
group: Union[None, int, str] = None,
backend: Optional[str] = None,
metrics_to_optimize: Optional[Sequence[str]] = None,
**kwargs):
"""Yields an example and its feedback sampled from a hyper value.
Example 1: sample a search space defined by a symbolic hyper value::
for example, feedback in pg.sample(
pg.Dict(x=pg.floatv(-1, 1))),
pg.geno.Random(),
num_examples=10,
name='my_search'):
# We can access trial ID (staring from 1) and DNA from the feedback.
print(feedback.id, feedback.dna)
# We can report the reward computed on the example using
# `feedback.add_measurement`, which can be called
# multiple times to report the rewards incrementally.
# Once a trial is done, we call `feedback.done` to mark evaluation on
# current example as completed, or use `feedback.skip` to move to the
# next sample without passing any feedback to the algorithm.
# Without `feedback.done` or `feedback.skip`, the same trial will be
# iterated over and over again for failover handling purpose.
# Besides the reward and the step, metrics and checkpoint can be added
# to each measurement. Additional meta-data and related links (URLs) can
# be passed to `feedback.done` which can be retrieved via
# `pg.poll_result` later.
if example.x >= 0:
# If we only want to add one measurement for each example, a
# shortcut expression for the next two lines can be written as
# follows:
# `feedback(reward=math.sqrt(example.x), step=1)`
feedback.add_measurement(reward=math.sqrt(example.x), step=1)
feedback.done()
else:
feedback.skip()
# IMPORTANT: to stop the loop among all workers, we can call
# `feedback.end_loop`. As a result, each worker will quit their loop
# after current iteration, while using `break` in the for-loop is
# only effective for the local process rather than remotely.
if session.id > 1000:
feedback.end_loop()
# At any time, we can poll the search result via `pg.poll_result`, please
# see `pg.tuning.Result` for more details.
result = pg.poll_result('my_search')
print(result.best_trial)
Example 2: sample a search space defined by `pg.hyper.trace`::
def fun():
return pg.oneof([
lambda: pg.oneof([1, 2, 3]),
lambda: pg.float(0.1, 1.0),
3]) + sum(pg.manyof(2, [1, 2, 3]))
for example, feedback in pg.sample(
pg.hyper.trace(fun),
pg.geno.Random(),
num_examples=10,
name='my_search'):
# When space is a `pg.hyper.DynamicEvaluationContext` object,
# the `example` yielded at each iteration is a context manager under which
# the hyper primitives (e.g. pg.oneof) will be materialized into concrete
# values according to the controller decision.
with example():
reward = fun()
feedback(reward)
Example 3: sample DNAs from an abstract search space represented by
`pg.DNASpec`::
for dna, feedback in pg.sample(
pg.List([pg.oneof(range(3))] * 5).dna_spec(),
pg.geno.Random(),
num_examples=10,
name='my_search'):
reward = evaluate_dna(dna)
feedback(reward)
**Using `pg.sample` in distributed environment**
`pg.sample` is designed with distributed sampling in mind, in which multiple
processes can work on the trials of the same sampling loop. While the default
'in-memory' backend works only within a single process without failover
handling, other backends may support distributed computing environment with
persistent state. Nevertheless, the `pg.sample` API is the same
among different backends, users can switch the backend easily by passing a
different value to the `backend` argument, or set the default value globally
via `pg.tuning.set_default_backend`.
**Identifying a sampling**
To identify a distributed loop, a unique `name` is introduced, which will
also be used to poll the latest sampling result via `pg.poll_result`.
**Failover handling**
In a distributed setup, worker processes may incidentally die and restart.
Unless a trial is explicitly marked as done (via `feedback(reward)` or
`feedback.done()`) or skipped (via feedback.skip()), a worker will try to
resume its work on the trial from where it left off.
**Workroup**
In a distributed setup, worker processes may or may not work on the same
trials. Worker group is introduced to serve this purpose, which is identified
by an integer or a string named `group`. If `group` is not specified or
having different values among the workers, every worker will work on
different trials of the loop. On the contrary, workers having the same
`group` will co-work on the same trials. Group is useful when evaluation on
one example can be parallelized - for example - an example in the outer loop
of a nested search. However, feedback on the same example should be fed back
to the search algorithm only once. Therefore, workers in the same group need
to communicate with each other to avoid duplicated evaluation and feedbacks.
To facilitate such communication, per-trial metadata is supported, and can be
accessed via `feedback.set_metadata/get_metadata` methods. The consistency in
reading and writing the metadata is defined by the backend used. For the
'in-memory' backend, all the trials and their metadata is stored in memory,
thus will be lost if the process get restarted. On the contrary, the backends
built on distributed computing environment may store both the trials and
metadata in a persistent storage with varing read/write QPS and read/write
consistency guarentees.
**Switch between backends**
The `backend` argument of `pg.sample` lets users choose a backend used in
current sampling loop. Users can use different backends in the same process
to achieve a best performance trade-off, e.g., using `in-memory` backend when
the communication cost overweighs the redudant evaluation cost upon worker
failure.
Helper function :func:`pyglove.tuning.set_default_backend` is introduced to
set the default tuning backend for the entire process.
Args:
space: One of (a hyper value, a `pg.hyper.DynamicEvaluationContext`,
a `pg.DNASpec`) to sample from.
A hyper value is an object with to-be-determined values specified by
`pg.oneof`, `pg.manyof`, `pg.floatv` and etc, representing a search space.
A `pg.hyper.DynamicEvaluationContext` object represents a search space
that is traced via dynamic evaluation.
A `pg.DNASpec` represents an abstract search space that emits DNAs.
algorithm: The search algorithm that samples the search space. For example:
`pg.geno.Random()`, `pg.evolution.regularized_evolution(...)`, and
etc.
num_examples: An optional integer as the max number of examples to
sample. If None, sample will return an iterator of infinite examples.
early_stopping_policy: An optional early stopping policy for user to tell
if incremental evaluation (which reports multiple measurements) on each
example can be early short circuited.
After each call to `feedback.add_measurement`, users can use method
`feedback.should_stop_early` to check whether current example worth
further evaluation or not.
where: Function to filter the hyper values. If None, all decision points
from the `space` will be included for the algorithm to make
decisions. Otherwise only the decision points on which 'where' returns
True will be included. The rest decision points will be passed through
in the example, intact, which is a sub-space of the search space
represented by the `space`. `where` is usually used in nested search
flows. Please see 'hyper.Template' docstr for details.
name: A string as a unique identifier for current sampling. Two separate
calls to `pg.sample` with the same `name` (also the same algorithm) will
share the same sampling queue, whose examples are proposed by the same
search algorithm.
group: An string or integer as the group ID of current process in
distributed sampling, which will be used to group different workers into
co-worker groups. Workers with the same group id will work on the same
trial. On the contrary, workers in different groups will always be working
with different trials. If not specified, each worker in current sampling
will be in different groups. `group` is usually used in the outer
loops of nested search, in order to allow workers to work on the same
higher-order item.
backend: An optional string to specify the backend name for sampling.
if None, the default backend set by `pg.tuning.set_default_backend`
will be used.
metrics_to_optimize: A sequence of string as the names of the metrics
to be optimized by the algorithm, which is ['reward'] by default.
When specified, it should have only 1 item for single-objective algorithm
and can have multiple items for algorithms that support multi-objective
optimization.
**kwargs: Arguments passed to the `BackendFactory` subclass registered with
the requested backend.
Yields:
An iterator of tuples (example, feedback) as examples sampled from the
search space defined by the `space` through `algorithm`.
Raises:
ValueError: `space` is a fixed value, or requested `backend` is not
available.
"""
# Placeholder for Google-internal usage instrumentation.
# Create template based on the hyper value.
if isinstance(space, hyper.DynamicEvaluationContext):
dynamic_evaluation_context = space
dna_spec = space.dna_spec
template = None
elif isinstance(space, geno.DNASpec):
dynamic_evaluation_context = None
dna_spec = space
template = None
else:
if symbolic.is_deterministic(space):
raise ValueError(f'\'space\' is a constant value: {space!r}.')
template = hyper.template(space, where)
dna_spec = template.dna_spec()
dynamic_evaluation_context = None
# Create and set up the backend.
metrics_to_optimize = metrics_to_optimize or ['reward']
backend = backend_lib.get_backend_cls(backend).create(
name, group, dna_spec, algorithm, metrics_to_optimize,
early_stopping_policy, num_examples, **kwargs)
while True:
try:
feedback = backend.next()
dna = feedback.dna
reward = dna.metadata.get('reward')
if reward is None:
# Decode and return current example to client code for evaluation.
if template is not None:
value = template.decode(dna)
elif dynamic_evaluation_context is not None:
value = lambda: dynamic_evaluation_context.apply(dna)
else:
value = dna
yield (value, feedback)
else:
# Reward may be computed at the controller side, we can
# short circuit the client-side evaluation for the current item.
# Also, there can be multiple co-workers working on the same trial,
# we ignore errors triggered by racing feedbacks, which will be
# considered as no-op.
with feedback.ignore_race_condition():
feedback(reward, metadata=dict(client_evaluation_skipped=True))
except StopIteration:
return
|
(space: Union[Any, pyglove.core.hyper.dynamic_evaluation.DynamicEvaluationContext, pyglove.core.geno.base.DNASpec], algorithm: pyglove.core.geno.dna_generator.DNAGenerator, num_examples: Optional[int] = None, early_stopping_policy: Optional[pyglove.core.tuning.early_stopping.EarlyStoppingPolicy] = None, where: Optional[Callable[[pyglove.core.hyper.base.HyperPrimitive], bool]] = None, name: Optional[str] = None, group: Union[NoneType, int, str] = None, backend: Optional[str] = None, metrics_to_optimize: Optional[Sequence[str]] = None, **kwargs)
|
41,682 |
pyglove.core.symbolic.base
|
save
|
Save a symbolic value using the global save handler.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
file = 'my_file.json'
a1.save(file)
a2 = pg.load(file)
assert pg.eq(a1, a2)
Args:
value: value to save.
path: A path string for saving `value`.
*args: Positional arguments that will be passed through to the global
save handler.
**kwargs: Keyword arguments that will be passed through to the global
save handler.
Returns:
Return value from the global save handler.
Raises:
RuntimeError: if global save handler is not set.
|
def save(value: Any, path: str, *args, **kwargs) -> Any:
"""Save a symbolic value using the global save handler.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
file = 'my_file.json'
a1.save(file)
a2 = pg.load(file)
assert pg.eq(a1, a2)
Args:
value: value to save.
path: A path string for saving `value`.
*args: Positional arguments that will be passed through to the global
save handler.
**kwargs: Keyword arguments that will be passed through to the global
save handler.
Returns:
Return value from the global save handler.
Raises:
RuntimeError: if global save handler is not set.
"""
save_handler = flags.get_save_handler() or default_save_handler
return save_handler(value, path, *args, **kwargs)
|
(value: Any, path: str, *args, **kwargs) -> Any
|
41,685 |
pyglove.core.symbolic.flags
|
set_load_handler
|
Sets global load handler.
Args:
load_handler: A callable object that takes arbitrary arguments and returns
a value. `symbolic.load` method will pass through all arguments to this
handler and return its return value.
Returns:
Previous global load handler.
|
def set_load_handler(
load_handler: Optional[Callable[..., Any]]) -> Optional[Callable[..., Any]]:
"""Sets global load handler.
Args:
load_handler: A callable object that takes arbitrary arguments and returns
a value. `symbolic.load` method will pass through all arguments to this
handler and return its return value.
Returns:
Previous global load handler.
"""
if load_handler and not callable(load_handler):
raise ValueError('`load_handler` must be callable.')
global _LOAD_HANDLER
old_handler = _LOAD_HANDLER
_LOAD_HANDLER = load_handler
return old_handler
|
(load_handler: Optional[Callable[..., Any]]) -> Optional[Callable[..., Any]]
|
41,686 |
pyglove.core.symbolic.flags
|
set_origin_stacktrace_limit
|
Set stack trace limit for origin tracking.
|
def set_origin_stacktrace_limit(limit: int) -> None:
"""Set stack trace limit for origin tracking."""
global _ORIGIN_STACKTRACE_LIMIT
_ORIGIN_STACKTRACE_LIMIT = limit
|
(limit: int) -> NoneType
|
41,687 |
pyglove.core.symbolic.flags
|
set_save_handler
|
Sets global save handler.
Args:
save_handler: A callable object that takes at least one argument as value to
save. `symbolic.save` method will pass through all arguments to this
handler and return its return value.
Returns:
Previous global save handler.
|
def set_save_handler(
save_handler: Optional[Callable[..., Any]]) -> Optional[Callable[..., Any]]:
"""Sets global save handler.
Args:
save_handler: A callable object that takes at least one argument as value to
save. `symbolic.save` method will pass through all arguments to this
handler and return its return value.
Returns:
Previous global save handler.
"""
if save_handler and not callable(save_handler):
raise ValueError('`save_handler` must be callable.')
global _SAVE_HANDLER
old_handler = _SAVE_HANDLER
_SAVE_HANDLER = save_handler
return old_handler
|
(save_handler: Optional[Callable[..., Any]]) -> Optional[Callable[..., Any]]
|
41,688 |
pyglove.core.object_utils.common_traits
|
str_format
|
Context manager for setting the default format kwargs for __str__.
|
def str_format(**kwargs) -> ContextManager[Dict[str, Any]]:
"""Context manager for setting the default format kwargs for __str__."""
return thread_local.thread_local_arg_scope(_TLS_STR_FORMAT_KWARGS, **kwargs)
|
(**kwargs) -> ContextManager[Dict[str, Any]]
|
41,691 |
pyglove.core.symbolic.symbolize
|
symbolize
|
Make a symbolic class/function out of a regular Python class/function.
``pg.symbolize`` is introduced for the purpose of making existing
classes/functions symbolically programmable. For use cases that build
symbolic classes from scratch (native PyGlove classes), extending `pg.Object`
with `@pg.members` that declares the symbolic properties is the recommended
way, which automatically generates the ``__init__`` method and allow
symbolic attributes to be accessed via `self.<member>`.
``pg.symbolize`` can be invoked as a class/function decorator, or as a
function. When it is used as a decorator, the decorated class or function
will be converted to a symbolic type (via :func:`pyglove.wrap` and
:func:`pyglove.functor_class`). This is preferred when user can modify the
files of existing classes/functions. For example::
@pg.symbolize
def foo(a, b):
return a + b
f = foo(1, 2)
f.rebind(a=2)
f() # Returns 4
@pg.symbolize([
# (Optional) add symbolic constraint for __init__ argument 'a'.
('a', pg.typing.Int(min_value=0), 'Description for `a`.')
])
class Foo:
def __init__(self, a, b):
self._a = a
self._b = b
def result(self):
return self._a + self._b
f = Foo(1, 2)
f.rebind(a=2, b=3)
f.result() # Returns 5
When it used as a function, the input class or function will not be modified.
Instead, a new symbolic type will be created and returned. This is helpful
when users want to create new symbolic types from existing classes/functions
without modifying their original source code. For example::
def foo(a, b):
return a + b
# Create a new symbolic type with constraint on 'a'.
symbolic_foo = pg.symbolize(foo, [
('a', pg.typing.Int(min_value=0))
], returns=pg.typing.Int())
foo(1, 2) # Returns 3 (foo is kept intact).
f = symbolic_foo(1, 2)
f.rebind(a=2)
f() # Returns 4.
class Foo:
def __init__(self, a, b):
self._a = a
self._b = b
def result(self):
return self._a + self._b
SymbolicFoo = pg.symbolize(Foo)
f = SymbolicFoo(2, 2)
f.rebind(a=3)
f.result() # Returns 5.
Args:
*args: The positional arguments for `symbolize` are:
* `class_or_fn`: applicable when `symbolize` is called in function mode.
* `constraints`: an optional list of tuples that allows users to specify
the constraints for arguments from the `__init__` method (for class)
or the arguments from the function signature (for function).
Each tuple should be in format:
`(<arg_name>, <value_spec>, [description], [arg_metadata])`
Where `arg_name` is an argument name that is acceptable to the
`__init__` method of the class, or the function signature;
'value_spec' is a `pg.ValueSpec` object that validates the value of
the argument.
`description` and `arg_metadata` are optional, for documentation and
meta-programming purposes.
**kwargs: Keyword arguments will be passsed through to :func:`pyglove.wrap`
(for symbolizing classes) and :func:`pyglove.functor_class` (for
symbolizing functions).
Returns:
A Symbolic subclass for the decorated/input type.
Raises:
TypeError: input type cannot be symbolized, or it's not a type.
|
def symbolize(*args, **kwargs):
"""Make a symbolic class/function out of a regular Python class/function.
``pg.symbolize`` is introduced for the purpose of making existing
classes/functions symbolically programmable. For use cases that build
symbolic classes from scratch (native PyGlove classes), extending `pg.Object`
with `@pg.members` that declares the symbolic properties is the recommended
way, which automatically generates the ``__init__`` method and allow
symbolic attributes to be accessed via `self.<member>`.
``pg.symbolize`` can be invoked as a class/function decorator, or as a
function. When it is used as a decorator, the decorated class or function
will be converted to a symbolic type (via :func:`pyglove.wrap` and
:func:`pyglove.functor_class`). This is preferred when user can modify the
files of existing classes/functions. For example::
@pg.symbolize
def foo(a, b):
return a + b
f = foo(1, 2)
f.rebind(a=2)
f() # Returns 4
@pg.symbolize([
# (Optional) add symbolic constraint for __init__ argument 'a'.
('a', pg.typing.Int(min_value=0), 'Description for `a`.')
])
class Foo:
def __init__(self, a, b):
self._a = a
self._b = b
def result(self):
return self._a + self._b
f = Foo(1, 2)
f.rebind(a=2, b=3)
f.result() # Returns 5
When it used as a function, the input class or function will not be modified.
Instead, a new symbolic type will be created and returned. This is helpful
when users want to create new symbolic types from existing classes/functions
without modifying their original source code. For example::
def foo(a, b):
return a + b
# Create a new symbolic type with constraint on 'a'.
symbolic_foo = pg.symbolize(foo, [
('a', pg.typing.Int(min_value=0))
], returns=pg.typing.Int())
foo(1, 2) # Returns 3 (foo is kept intact).
f = symbolic_foo(1, 2)
f.rebind(a=2)
f() # Returns 4.
class Foo:
def __init__(self, a, b):
self._a = a
self._b = b
def result(self):
return self._a + self._b
SymbolicFoo = pg.symbolize(Foo)
f = SymbolicFoo(2, 2)
f.rebind(a=3)
f.result() # Returns 5.
Args:
*args: The positional arguments for `symbolize` are:
* `class_or_fn`: applicable when `symbolize` is called in function mode.
* `constraints`: an optional list of tuples that allows users to specify
the constraints for arguments from the `__init__` method (for class)
or the arguments from the function signature (for function).
Each tuple should be in format:
`(<arg_name>, <value_spec>, [description], [arg_metadata])`
Where `arg_name` is an argument name that is acceptable to the
`__init__` method of the class, or the function signature;
'value_spec' is a `pg.ValueSpec` object that validates the value of
the argument.
`description` and `arg_metadata` are optional, for documentation and
meta-programming purposes.
**kwargs: Keyword arguments will be passsed through to :func:`pyglove.wrap`
(for symbolizing classes) and :func:`pyglove.functor_class` (for
symbolizing functions).
Returns:
A Symbolic subclass for the decorated/input type.
Raises:
TypeError: input type cannot be symbolized, or it's not a type.
"""
cls_or_fn = None
if args:
if inspect.isclass(args[0]) or inspect.isfunction(args[0]): # pytype: disable=not-supported-yet
cls_or_fn = args[0]
if cls_or_fn is dict or cls_or_fn is list:
if len(args) != 1 or kwargs:
raise ValueError(
f'Constraints are not supported in symbolic {cls_or_fn!r}. '
f'Encountered: constraints={args[1]!r}.')
return pg_dict.Dict if cls_or_fn is dict else pg_list.List
args = args[1:]
if len(args) > 1:
raise ValueError(
f'Only `constraint` is supported as positional arguments. '
f'Encountered {args!r}.')
elif not isinstance(args[0], list):
raise TypeError(f'{args[0]!r} cannot be symbolized.')
def _symbolize(cls_or_fn):
if inspect.isclass(cls_or_fn):
if (issubclass(cls_or_fn, base.Symbolic)
and not issubclass(cls_or_fn, class_wrapper.ClassWrapper)):
raise ValueError(
f'Cannot symbolize {cls_or_fn!r}: {cls_or_fn.__name__} is already '
f'a dataclass-like symbolic class derived from `pg.Object`. '
f'Consider to use `pg.members` to add new symbolic attributes.')
return class_wrapper.wrap(cls_or_fn, *args, **kwargs)
assert inspect.isfunction(cls_or_fn), (
f'Unexpected: {cls_or_fn!r} should be a class or function.')
return functor_class(
cls_or_fn, add_to_registry=True, *args, **kwargs)
if cls_or_fn is not None:
# When `cls_or_fn` is provided, `symbolize` is called under function mode
# such as `SymbolicFoo = pg.symbolize(Foo)` or being used as a decorator
# with no arguments, e.g:
# ```
# @symbolize
# class Foo:
# pass
# ```
# In both case, we return the symbolic type of `cls_or_fn`.
return _symbolize(cls_or_fn)
else:
# Otherwise a factory method is returned to create the symbolic type from
# a late-bound `cls_or_fn` input, which is the case when `symbolize` is used
# as a decorator with provided arguments.
return _symbolize
|
(*args, **kwargs)
|
41,692 |
pyglove.core.hyper.object_template
|
template
|
Creates an object template from the input.
Example::
d = pg.Dict(x=pg.oneof(['a', 'b', 'c'], y=pg.manyof(2, range(4))))
t = pg.template(d)
assert t.dna_spec() == pg.geno.space([
pg.geno.oneof([
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant(),
], location='x'),
pg.geno.manyof([
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant(),
], location='y')
])
assert t.encode(pg.Dict(x='a', y=0)) == pg.DNA([0, 0])
assert t.decode(pg.DNA([0, 0])) == pg.Dict(x='a', y=0)
t = pg.template(d, where=lambda x: isinstance(x, pg.hyper.ManyOf))
assert t.dna_spec() == pg.geno.space([
pg.geno.manyof([
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant(),
], location='y')
])
assert t.encode(pg.Dict(x=pg.oneof(['a', 'b', 'c']), y=0)) == pg.DNA(0)
assert t.decode(pg.DNA(0)) == pg.Dict(x=pg.oneof(['a', 'b', 'c']), y=0)
Args:
value: A value based on which the template is created.
where: Function to filter hyper values. If None, all hyper primitives from
`value` will be included in the encoding/decoding process. Otherwise
only the hyper values on which 'where' returns True will be included.
`where` can be useful to partition a search space into separate
optimization processes. Please see 'ObjectTemplate' docstr for details.
Returns:
A template object.
|
def template(
value: Any,
where: Optional[Callable[[base.HyperPrimitive], bool]] = None
) -> ObjectTemplate:
"""Creates an object template from the input.
Example::
d = pg.Dict(x=pg.oneof(['a', 'b', 'c'], y=pg.manyof(2, range(4))))
t = pg.template(d)
assert t.dna_spec() == pg.geno.space([
pg.geno.oneof([
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant(),
], location='x'),
pg.geno.manyof([
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant(),
], location='y')
])
assert t.encode(pg.Dict(x='a', y=0)) == pg.DNA([0, 0])
assert t.decode(pg.DNA([0, 0])) == pg.Dict(x='a', y=0)
t = pg.template(d, where=lambda x: isinstance(x, pg.hyper.ManyOf))
assert t.dna_spec() == pg.geno.space([
pg.geno.manyof([
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant(),
pg.geno.constant(),
], location='y')
])
assert t.encode(pg.Dict(x=pg.oneof(['a', 'b', 'c']), y=0)) == pg.DNA(0)
assert t.decode(pg.DNA(0)) == pg.Dict(x=pg.oneof(['a', 'b', 'c']), y=0)
Args:
value: A value based on which the template is created.
where: Function to filter hyper values. If None, all hyper primitives from
`value` will be included in the encoding/decoding process. Otherwise
only the hyper values on which 'where' returns True will be included.
`where` can be useful to partition a search space into separate
optimization processes. Please see 'ObjectTemplate' docstr for details.
Returns:
A template object.
"""
return ObjectTemplate(value, compute_derived=True, where=where)
|
(value: Any, where: Optional[Callable[[pyglove.core.hyper.base.HyperPrimitive], bool]] = None) -> pyglove.core.hyper.object_template.ObjectTemplate
|
41,693 |
pyglove.core.symbolic.base
|
to_json
|
Serializes a (maybe) symbolic value into a plain Python object.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
json = a1.to_json()
a2 = pg.from_json(json)
assert pg.eq(a1, a2)
Args:
value: value to serialize. Applicable value types are:
* Builtin python types: None, bool, int, float, string;
* JSONConvertible types;
* List types;
* Tuple types;
* Dict types.
**kwargs: Keyword arguments to pass to value.to_json if value is
JSONConvertible.
Returns:
JSON value.
|
def to_json(value: Any, **kwargs) -> Any:
"""Serializes a (maybe) symbolic value into a plain Python object.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
json = a1.to_json()
a2 = pg.from_json(json)
assert pg.eq(a1, a2)
Args:
value: value to serialize. Applicable value types are:
* Builtin python types: None, bool, int, float, string;
* JSONConvertible types;
* List types;
* Tuple types;
* Dict types.
**kwargs: Keyword arguments to pass to value.to_json if value is
JSONConvertible.
Returns:
JSON value.
"""
# NOTE(daiyip): special handling `sym_jsonify` since symbolized
# classes may have conflicting `to_json` method in their existing classes.
if isinstance(value, Symbolic):
return value.sym_jsonify(**kwargs)
return object_utils.to_json(value, **kwargs)
|
(value: Any, **kwargs) -> Any
|
41,694 |
pyglove.core.symbolic.base
|
to_json_str
|
Serializes a (maybe) symbolic value into a JSON string.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
json_str = a1.to_json_str()
a2 = pg.from_json_str(json_str)
assert pg.eq(a1, a2)
Args:
value: Value to serialize.
json_indent: The size of indentation for JSON format.
**kwargs: Additional keyword arguments that are passed to ``pg.to_json``.
Returns:
A JSON string.
|
def to_json_str(value: Any,
*,
json_indent=None,
**kwargs) -> str:
"""Serializes a (maybe) symbolic value into a JSON string.
Example::
@pg.members([
('x', pg.typing.Any())
])
class A(pg.Object):
pass
a1 = A(1)
json_str = a1.to_json_str()
a2 = pg.from_json_str(json_str)
assert pg.eq(a1, a2)
Args:
value: Value to serialize.
json_indent: The size of indentation for JSON format.
**kwargs: Additional keyword arguments that are passed to ``pg.to_json``.
Returns:
A JSON string.
"""
return json.dumps(to_json(value, **kwargs), indent=json_indent)
|
(value: Any, *, json_indent=None, **kwargs) -> str
|
41,695 |
pyglove.core.symbolic.flags
|
track_origin
|
Returns a context manager to enable or disable origin tracking.
`track_origin` is thread-safe and can be nested. For example::
a = pg.Dict(x=1)
with pg.track_origin(False):
with pg.track_origin(True):
# b's origin will be tracked, which can be accessed by `b.sym_origin`.
b = a.clone()
# c's origin will not be tracked, `c.sym_origin` returns None.
c = a.clone()
Args:
enabled: If True, the origin of symbolic values will be tracked during
object cloning and retuning from functors under current scope.
Returns:
A context manager for enable or disable origin tracking.
|
def track_origin(enabled: bool = True) -> ContextManager[None]:
"""Returns a context manager to enable or disable origin tracking.
`track_origin` is thread-safe and can be nested. For example::
a = pg.Dict(x=1)
with pg.track_origin(False):
with pg.track_origin(True):
# b's origin will be tracked, which can be accessed by `b.sym_origin`.
b = a.clone()
# c's origin will not be tracked, `c.sym_origin` returns None.
c = a.clone()
Args:
enabled: If True, the origin of symbolic values will be tracked during
object cloning and retuning from functors under current scope.
Returns:
A context manager for enable or disable origin tracking.
"""
return thread_local.thread_local_value_scope(
_TLS_ENABLE_ORIGIN_TRACKING, enabled, False
)
|
(enabled: bool = True) -> ContextManager[NoneType]
|
41,696 |
pyglove.core.symbolic.base
|
traverse
|
Traverse a (maybe) symbolic value using visitor functions.
Example::
@pg.members([
('x', pg.typing.Int())
])
class A(pg.Object):
pass
v = [{'a': A(1)}, A(2)]
integers = []
def track_integers(k, v, p):
if isinstance(v, int):
integers.append((k, v))
return pg.TraverseAction.ENTER
pg.traverse(v, track_integers)
assert integers == [('[0].a.x', 1), ('[1].x', 2)]
Args:
x: Maybe symbolic value.
preorder_visitor_fn: preorder visitor function. Function signature is
`(path, value, parent) -> should_continue`.
postorder_visitor_fn: postorder visitor function. Function signature is
`(path, value, parent) -> should_continue`.
root_path: KeyPath of root value.
parent: Optional parent of the root node.
Returns:
True if both `preorder_visitor_fn` and `postorder_visitor_fn` return
either `TraverseAction.ENTER` or `TraverseAction.CONTINUE` for all nodes.
Otherwise False.
|
def traverse(x: Any,
preorder_visitor_fn: Optional[
Callable[[object_utils.KeyPath, Any, Any],
Optional[TraverseAction]]] = None,
postorder_visitor_fn: Optional[
Callable[[object_utils.KeyPath, Any, Any],
Optional[TraverseAction]]] = None,
root_path: Optional[object_utils.KeyPath] = None,
parent: Optional[Any] = None) -> bool:
"""Traverse a (maybe) symbolic value using visitor functions.
Example::
@pg.members([
('x', pg.typing.Int())
])
class A(pg.Object):
pass
v = [{'a': A(1)}, A(2)]
integers = []
def track_integers(k, v, p):
if isinstance(v, int):
integers.append((k, v))
return pg.TraverseAction.ENTER
pg.traverse(v, track_integers)
assert integers == [('[0].a.x', 1), ('[1].x', 2)]
Args:
x: Maybe symbolic value.
preorder_visitor_fn: preorder visitor function. Function signature is
`(path, value, parent) -> should_continue`.
postorder_visitor_fn: postorder visitor function. Function signature is
`(path, value, parent) -> should_continue`.
root_path: KeyPath of root value.
parent: Optional parent of the root node.
Returns:
True if both `preorder_visitor_fn` and `postorder_visitor_fn` return
either `TraverseAction.ENTER` or `TraverseAction.CONTINUE` for all nodes.
Otherwise False.
"""
root_path = root_path or object_utils.KeyPath()
def no_op_visitor(path, value, parent):
del path, value, parent
return TraverseAction.ENTER
if preorder_visitor_fn is None:
preorder_visitor_fn = no_op_visitor
if postorder_visitor_fn is None:
postorder_visitor_fn = no_op_visitor
preorder_action = preorder_visitor_fn(root_path, x, parent)
if preorder_action is None or preorder_action == TraverseAction.ENTER:
if isinstance(x, dict):
for k, v in x.items():
if not traverse(v, preorder_visitor_fn, postorder_visitor_fn,
object_utils.KeyPath(k, root_path), x):
preorder_action = TraverseAction.STOP
break
elif isinstance(x, list):
for i, v in enumerate(x):
if not traverse(v, preorder_visitor_fn, postorder_visitor_fn,
object_utils.KeyPath(i, root_path), x):
preorder_action = TraverseAction.STOP
break
elif isinstance(x, Symbolic.ObjectType): # pytype: disable=wrong-arg-types
for k, v in x.sym_items():
if not traverse(v, preorder_visitor_fn, postorder_visitor_fn,
object_utils.KeyPath(k, root_path), x):
preorder_action = TraverseAction.STOP
break
postorder_action = postorder_visitor_fn(root_path, x, parent)
if (preorder_action == TraverseAction.STOP or
postorder_action == TraverseAction.STOP):
return False
return True
|
(x: Any, preorder_visitor_fn: Optional[Callable[[pyglove.core.object_utils.value_location.KeyPath, Any, Any], Optional[pyglove.core.symbolic.base.TraverseAction]]] = None, postorder_visitor_fn: Optional[Callable[[pyglove.core.object_utils.value_location.KeyPath, Any, Any], Optional[pyglove.core.symbolic.base.TraverseAction]]] = None, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None, parent: Optional[Any] = None) -> bool
|
41,699 |
pyglove.core.symbolic.object
|
use_init_args
|
Decorator for updating the `__init__` signature of a `pg.Object` subclass.
Examples::
@pg.use_init_args(['x', 'y', '*z'])
class Foo(pg.Object):
y: int
x: str
z: list[int]
f = Foo('abc', 1, 2, 3)
assert f.x == 'abc'
assert f.y == 1
assert f.z == [2, 3]
Args:
init_arg_list: A sequence of attribute names that will be used as the
positional arguments of `__init__`. The last element could be the name of
a list-type attribute, indicating it's used as `*args`. Keyword-only
arguments are not needed to be present in this list, which will be figured
out automatically based on class' schema.
Returns:
a decorator function that updates the `__init__` signature.
|
def use_init_args(init_arg_list: Sequence[str]) -> pg_typing.Decorator:
"""Decorator for updating the `__init__` signature of a `pg.Object` subclass.
Examples::
@pg.use_init_args(['x', 'y', '*z'])
class Foo(pg.Object):
y: int
x: str
z: list[int]
f = Foo('abc', 1, 2, 3)
assert f.x == 'abc'
assert f.y == 1
assert f.z == [2, 3]
Args:
init_arg_list: A sequence of attribute names that will be used as the
positional arguments of `__init__`. The last element could be the name of
a list-type attribute, indicating it's used as `*args`. Keyword-only
arguments are not needed to be present in this list, which will be figured
out automatically based on class' schema.
Returns:
a decorator function that updates the `__init__` signature.
"""
def _decorator(cls):
schema_utils.update_schema(
cls, [], extend=True, init_arg_list=init_arg_list
)
return cls
return typing.cast(pg_typing.Decorator, _decorator)
|
(init_arg_list: Sequence[str]) -> <function <lambda> at 0x7f2fa64d25f0>
|
41,700 |
pyglove.core.symbolic.class_wrapper
|
wrap
|
Makes a symbolic class wrapper from a regular Python class.
``pg.wrap`` is called by :func:`pyglove.symbolize` for symbolizing existing
Python classes. For example::
class A:
def __init__(self, x):
self.x = x
# The following two lines are equivalent.
A1 = pg.symbolize(A)
A2 = pg.wrap(A)
Besides passing the source class, ``pg.wrap`` allows the user to pass symbolic
field definitions for the init arguments. For example::
A3 = pg.wrap(A, [
('x', pg.typing.Int())
])
Moreover, multiple flags are provided to determine whether or not to use the
symbolic operations as the default behaviors. For example::
A4 = pg.wrap(
A,
[],
# Instead clearing out all internal states (default),
# do not reset internal state.
reset_state_fn=lambda self: None,
# Use symbolic representation for __repr__ and __str__.
repr=True,
# use symbolic equality for __eq__, __ne__ and __hash__.
eq=True,
# Customize the class name obtained (the default behaivor
# is to use the source class name).
class_name='A4'
# Customize the module name for created class (the default
# behavior is to use the source module name).
module_name='my_module')
Args:
cls: Class to wrap.
init_args: An optional list of field definitions for the arguments of
__init__. It can be a sparse value specifications for argument in the
__init__ method of `cls`.
reset_state_fn: An optional callable object to reset the internal state of
the user class when rebind happens.
repr: Options for generating `__repr__` and `__str__`. If True (default),
use symbolic representation if the user class does not define its own.
Otherwise use the user class' definition. If False, always use
non-symbolic representations, which falls back to `object.__repr__` and
`object.__str__` if the user class does not define them.
eq: Options for generating `__eq__`, `__ne__` and `__hash__`. If True and
the `user_cls` defines `__eq__`, `__ne__` and `__hash__`, use the
definitions from the `user_cls`. If True and the `user_cls` does not
define `__eq__`, `__ne__` and `__hash__`, use symbolic eq/hash. If False
(default), use `user_cls`'s definition if present, or the definitions from
the `object` class.
class_name: An optional string used as class name for the wrapper class. If
None, the wrapper class will use the class name of the wrapped class.
module_name: An optional string used as module name for the wrapper class.
If None, the wrapper class will use the module name of the wrapped class.
auto_doc: If True, the descriptions for init argument fields will be
extracted from docstring if present.
auto_typing: If True, PyGlove typing (runtime-typing) will be enabled based
on type annotations inspected from the `__init__` method.
serialization_key: An optional string to be used as the serialization key
for the class during `sym_jsonify`. If None, `cls.__type_name__` will be
used. This is introduced for scenarios when we want to relocate a class,
before the downstream can recognize the new location, we need the class to
serialize it using previous key.
additional_keys: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
override: Additional class attributes to override.
Returns:
A subclass of `cls` and `ClassWrapper`.
Raises:
TypeError: input `cls` is not a class.
|
def wrap(
cls,
init_args: Optional[List[Union[
Tuple[Union[Text, pg_typing.KeySpec], pg_typing.ValueSpec, Text],
Tuple[Union[Text, pg_typing.KeySpec], pg_typing.ValueSpec, Text, Any]
]]] = None,
*,
reset_state_fn: Optional[Callable[[Any], None]] = None,
repr: bool = True, # pylint: disable=redefined-builtin
eq: bool = False,
class_name: Optional[str] = None,
module_name: Optional[str] = None,
auto_doc: bool = False,
auto_typing: bool = False,
serialization_key: Optional[str] = None,
additional_keys: Optional[List[str]] = None,
override: Optional[Dict[str, Any]] = None
) -> Type['ClassWrapper']:
"""Makes a symbolic class wrapper from a regular Python class.
``pg.wrap`` is called by :func:`pyglove.symbolize` for symbolizing existing
Python classes. For example::
class A:
def __init__(self, x):
self.x = x
# The following two lines are equivalent.
A1 = pg.symbolize(A)
A2 = pg.wrap(A)
Besides passing the source class, ``pg.wrap`` allows the user to pass symbolic
field definitions for the init arguments. For example::
A3 = pg.wrap(A, [
('x', pg.typing.Int())
])
Moreover, multiple flags are provided to determine whether or not to use the
symbolic operations as the default behaviors. For example::
A4 = pg.wrap(
A,
[],
# Instead clearing out all internal states (default),
# do not reset internal state.
reset_state_fn=lambda self: None,
# Use symbolic representation for __repr__ and __str__.
repr=True,
# use symbolic equality for __eq__, __ne__ and __hash__.
eq=True,
# Customize the class name obtained (the default behaivor
# is to use the source class name).
class_name='A4'
# Customize the module name for created class (the default
# behavior is to use the source module name).
module_name='my_module')
Args:
cls: Class to wrap.
init_args: An optional list of field definitions for the arguments of
__init__. It can be a sparse value specifications for argument in the
__init__ method of `cls`.
reset_state_fn: An optional callable object to reset the internal state of
the user class when rebind happens.
repr: Options for generating `__repr__` and `__str__`. If True (default),
use symbolic representation if the user class does not define its own.
Otherwise use the user class' definition. If False, always use
non-symbolic representations, which falls back to `object.__repr__` and
`object.__str__` if the user class does not define them.
eq: Options for generating `__eq__`, `__ne__` and `__hash__`. If True and
the `user_cls` defines `__eq__`, `__ne__` and `__hash__`, use the
definitions from the `user_cls`. If True and the `user_cls` does not
define `__eq__`, `__ne__` and `__hash__`, use symbolic eq/hash. If False
(default), use `user_cls`'s definition if present, or the definitions from
the `object` class.
class_name: An optional string used as class name for the wrapper class. If
None, the wrapper class will use the class name of the wrapped class.
module_name: An optional string used as module name for the wrapper class.
If None, the wrapper class will use the module name of the wrapped class.
auto_doc: If True, the descriptions for init argument fields will be
extracted from docstring if present.
auto_typing: If True, PyGlove typing (runtime-typing) will be enabled based
on type annotations inspected from the `__init__` method.
serialization_key: An optional string to be used as the serialization key
for the class during `sym_jsonify`. If None, `cls.__type_name__` will be
used. This is introduced for scenarios when we want to relocate a class,
before the downstream can recognize the new location, we need the class to
serialize it using previous key.
additional_keys: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
override: Additional class attributes to override.
Returns:
A subclass of `cls` and `ClassWrapper`.
Raises:
TypeError: input `cls` is not a class.
"""
if not inspect.isclass(cls):
raise TypeError(f'Class wrapper can only be created from classes. '
f'Encountered: {cls!r}.')
if not issubclass(cls, ClassWrapper):
cls = _subclassed_wrapper(
cls,
use_symbolic_repr=repr,
use_symbolic_comp=eq,
reset_state_fn=reset_state_fn,
class_name=class_name,
module_name=module_name,
use_auto_doc=auto_doc,
use_auto_typing=auto_typing)
if issubclass(cls, ClassWrapper):
# Update init argument specifications according to user specified specs.
# Replace schema instead of extending it.
description, init_arg_list, arg_fields = _extract_init_signature(
cls, init_args, auto_doc=auto_doc, auto_typing=auto_typing)
schema_utils.update_schema(
cls,
arg_fields,
init_arg_list=init_arg_list,
extend=False,
description=description,
serialization_key=serialization_key,
additional_keys=additional_keys)
if override:
for k, v in override.items():
setattr(cls, k, v)
return cls
|
(cls, init_args: Optional[List[Union[Tuple[Union[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str], Tuple[Union[str, pyglove.core.typing.class_schema.KeySpec], pyglove.core.typing.class_schema.ValueSpec, str, Any]]]] = None, *, reset_state_fn: Optional[Callable[[Any], NoneType]] = None, repr: bool = True, eq: bool = False, class_name: Optional[str] = None, module_name: Optional[str] = None, auto_doc: bool = False, auto_typing: bool = False, serialization_key: Optional[str] = None, additional_keys: Optional[List[str]] = None, override: Optional[Dict[str, Any]] = None) -> Type[pyglove.core.symbolic.class_wrapper.ClassWrapper]
|
41,701 |
pyglove.core.symbolic.class_wrapper
|
wrap_module
|
Wrap classes from a module.
For example, users can wrap all subclasses of `xxx.Base` under module `xxx`::
import xxx
pg.wrap_module(
xxx, where=lambda c: isinstance(c, xxx.Base))
Args:
module: A container that contains classes to wrap.
names: An optional list of class names. If not provided, all classes under
`module` will be considered candidates.
where: An optional filter function in signature (user_class) -> bool.
Only the classes under `module` with True return value will be wrapped.
export_to: An optional module to export the wrapper classes.
**kwargs: Keyword arguments passed to `wrap`
Returns:
Wrapper classes.
|
def wrap_module(
module,
names: Optional[Sequence[Text]] = None,
where: Optional[Callable[[Type['ClassWrapper']], bool]] = None,
export_to: Optional[types.ModuleType] = None,
**kwargs):
"""Wrap classes from a module.
For example, users can wrap all subclasses of `xxx.Base` under module `xxx`::
import xxx
pg.wrap_module(
xxx, where=lambda c: isinstance(c, xxx.Base))
Args:
module: A container that contains classes to wrap.
names: An optional list of class names. If not provided, all classes under
`module` will be considered candidates.
where: An optional filter function in signature (user_class) -> bool.
Only the classes under `module` with True return value will be wrapped.
export_to: An optional module to export the wrapper classes.
**kwargs: Keyword arguments passed to `wrap`
Returns:
Wrapper classes.
"""
wrapper_classes = []
module_name = export_to.__name__ if export_to else None
origin_cls_to_wrap_cls = {}
for symbol_name in (names or dir(module)):
s = getattr(module, symbol_name)
if inspect.isclass(s) and (not where or where(s)):
# NOTE(daiyip): It's possible that a name under a module is an alias for
# another class. In such case, we do not create duplicated wrappers but
# shares the same wrapper classes with different names.
if s in origin_cls_to_wrap_cls:
wrapper_class = origin_cls_to_wrap_cls[s]
else:
wrapper_class = wrap(s, module_name=module_name, **kwargs)
origin_cls_to_wrap_cls[s] = wrapper_class
wrapper_classes.append(wrapper_class)
if export_to:
setattr(export_to, symbol_name, wrapper_class)
return wrapper_classes
|
(module, names: Optional[Sequence[str]] = None, where: Optional[Callable[[Type[pyglove.core.symbolic.class_wrapper.ClassWrapper]], bool]] = None, export_to: Optional[module] = None, **kwargs)
|
41,703 |
builtins
|
type
|
type(object) -> the object's type
type(name, bases, dict, **kwds) -> a new type
|
from builtins import type
| null |
41,707 |
comm.base_comm
|
BaseComm
|
Class for communicating between a Frontend and a Kernel
Must be subclassed with a publish_msg method implementation which
sends comm messages through the iopub channel.
|
class BaseComm:
"""Class for communicating between a Frontend and a Kernel
Must be subclassed with a publish_msg method implementation which
sends comm messages through the iopub channel.
"""
def __init__(
self,
target_name: str = "comm",
data: MaybeDict = None,
metadata: MaybeDict = None,
buffers: BuffersType = None,
comm_id: str | None = None,
primary: bool = True,
target_module: str | None = None,
topic: bytes | None = None,
_open_data: MaybeDict = None,
_close_data: MaybeDict = None,
**kwargs: t.Any,
) -> None:
super().__init__(**kwargs)
self.comm_id = comm_id if comm_id else uuid.uuid4().hex
self.primary = primary
self.target_name = target_name
self.target_module = target_module
self.topic = topic if topic else ("comm-%s" % self.comm_id).encode("ascii")
self._open_data = _open_data if _open_data else {}
self._close_data = _close_data if _close_data else {}
self._msg_callback: CommCallback | None = None
self._close_callback: CommCallback | None = None
self._closed = True
if self.primary:
# I am primary, open my peer.
self.open(data=data, metadata=metadata, buffers=buffers)
else:
self._closed = False
def publish_msg(
self,
msg_type: str, # noqa: ARG002
data: MaybeDict = None, # noqa: ARG002
metadata: MaybeDict = None, # noqa: ARG002
buffers: BuffersType = None, # noqa: ARG002
**keys: t.Any, # noqa: ARG002
) -> None:
msg = "publish_msg Comm method is not implemented"
raise NotImplementedError(msg)
def __del__(self) -> None:
"""trigger close on gc"""
with contextlib.suppress(Exception):
# any number of things can have gone horribly wrong
# when called during interpreter teardown
self.close(deleting=True)
# publishing messages
def open(
self, data: MaybeDict = None, metadata: MaybeDict = None, buffers: BuffersType = None
) -> None:
"""Open the frontend-side version of this comm"""
if data is None:
data = self._open_data
comm_manager = comm.get_comm_manager()
if comm_manager is None:
msg = "Comms cannot be opened without a comm_manager." # type:ignore[unreachable]
raise RuntimeError(msg)
comm_manager.register_comm(self)
try:
self.publish_msg(
"comm_open",
data=data,
metadata=metadata,
buffers=buffers,
target_name=self.target_name,
target_module=self.target_module,
)
self._closed = False
except Exception:
comm_manager.unregister_comm(self)
raise
def close(
self,
data: MaybeDict = None,
metadata: MaybeDict = None,
buffers: BuffersType = None,
deleting: bool = False,
) -> None:
"""Close the frontend-side version of this comm"""
if self._closed:
# only close once
return
self._closed = True
if data is None:
data = self._close_data
self.publish_msg(
"comm_close",
data=data,
metadata=metadata,
buffers=buffers,
)
if not deleting:
# If deleting, the comm can't be registered
comm.get_comm_manager().unregister_comm(self)
def send(
self, data: MaybeDict = None, metadata: MaybeDict = None, buffers: BuffersType = None
) -> None:
"""Send a message to the frontend-side version of this comm"""
self.publish_msg(
"comm_msg",
data=data,
metadata=metadata,
buffers=buffers,
)
# registering callbacks
def on_close(self, callback: CommCallback | None) -> None:
"""Register a callback for comm_close
Will be called with the `data` of the close message.
Call `on_close(None)` to disable an existing callback.
"""
self._close_callback = callback
def on_msg(self, callback: CommCallback | None) -> None:
"""Register a callback for comm_msg
Will be called with the `data` of any comm_msg messages.
Call `on_msg(None)` to disable an existing callback.
"""
self._msg_callback = callback
# handling of incoming messages
def handle_close(self, msg: MessageType) -> None:
"""Handle a comm_close message"""
logger.debug("handle_close[%s](%s)", self.comm_id, msg)
if self._close_callback:
self._close_callback(msg)
def handle_msg(self, msg: MessageType) -> None:
"""Handle a comm_msg message"""
logger.debug("handle_msg[%s](%s)", self.comm_id, msg)
if self._msg_callback:
from IPython import get_ipython
shell = get_ipython()
if shell:
shell.events.trigger("pre_execute")
self._msg_callback(msg)
if shell:
shell.events.trigger("post_execute")
|
(target_name: 'str' = 'comm', data: 'MaybeDict' = None, metadata: 'MaybeDict' = None, buffers: 'BuffersType' = None, comm_id: 'str | None' = None, primary: 'bool' = True, target_module: 'str | None' = None, topic: 'bytes | None' = None, _open_data: 'MaybeDict' = None, _close_data: 'MaybeDict' = None, **kwargs: 't.Any') -> 'None'
|
41,708 |
comm.base_comm
|
__del__
|
trigger close on gc
|
def __del__(self) -> None:
"""trigger close on gc"""
with contextlib.suppress(Exception):
# any number of things can have gone horribly wrong
# when called during interpreter teardown
self.close(deleting=True)
|
(self) -> NoneType
|
41,709 |
comm.base_comm
|
__init__
| null |
def __init__(
self,
target_name: str = "comm",
data: MaybeDict = None,
metadata: MaybeDict = None,
buffers: BuffersType = None,
comm_id: str | None = None,
primary: bool = True,
target_module: str | None = None,
topic: bytes | None = None,
_open_data: MaybeDict = None,
_close_data: MaybeDict = None,
**kwargs: t.Any,
) -> None:
super().__init__(**kwargs)
self.comm_id = comm_id if comm_id else uuid.uuid4().hex
self.primary = primary
self.target_name = target_name
self.target_module = target_module
self.topic = topic if topic else ("comm-%s" % self.comm_id).encode("ascii")
self._open_data = _open_data if _open_data else {}
self._close_data = _close_data if _close_data else {}
self._msg_callback: CommCallback | None = None
self._close_callback: CommCallback | None = None
self._closed = True
if self.primary:
# I am primary, open my peer.
self.open(data=data, metadata=metadata, buffers=buffers)
else:
self._closed = False
|
(self, target_name: str = 'comm', data: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, buffers: Optional[List[bytes]] = None, comm_id: Optional[str] = None, primary: bool = True, target_module: Optional[str] = None, topic: Optional[bytes] = None, _open_data: Optional[Dict[str, Any]] = None, _close_data: Optional[Dict[str, Any]] = None, **kwargs: Any) -> NoneType
|
41,710 |
comm.base_comm
|
close
|
Close the frontend-side version of this comm
|
def close(
self,
data: MaybeDict = None,
metadata: MaybeDict = None,
buffers: BuffersType = None,
deleting: bool = False,
) -> None:
"""Close the frontend-side version of this comm"""
if self._closed:
# only close once
return
self._closed = True
if data is None:
data = self._close_data
self.publish_msg(
"comm_close",
data=data,
metadata=metadata,
buffers=buffers,
)
if not deleting:
# If deleting, the comm can't be registered
comm.get_comm_manager().unregister_comm(self)
|
(self, data: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, buffers: Optional[List[bytes]] = None, deleting: bool = False) -> NoneType
|
41,711 |
comm.base_comm
|
handle_close
|
Handle a comm_close message
|
def handle_close(self, msg: MessageType) -> None:
"""Handle a comm_close message"""
logger.debug("handle_close[%s](%s)", self.comm_id, msg)
if self._close_callback:
self._close_callback(msg)
|
(self, msg: Dict[str, Any]) -> NoneType
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.