index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
40,642 |
pyglove.core.symbolic.dict
|
_sym_getattr
|
Gets symbolic attribute by key.
|
def _sym_getattr( # pytype: disable=signature-mismatch # overriding-parameter-type-checks
self, key: str) -> Any:
"""Gets symbolic attribute by key."""
return super().__getitem__(key)
|
(self, key: str) -> Any
|
40,644 |
pyglove.core.symbolic.dict
|
_sym_missing
|
Returns missing values.
Returns:
A dict of key to MISSING_VALUE.
|
def _sym_missing(self) -> typing.Dict[str, Any]:
"""Returns missing values.
Returns:
A dict of key to MISSING_VALUE.
"""
missing = dict()
if self._value_spec and self._value_spec.schema:
matched_keys, _ = self._value_spec.schema.resolve(self.keys())
for key_spec, keys in matched_keys.items():
field = self._value_spec.schema[key_spec]
assert keys or isinstance(key_spec, pg_typing.NonConstKey), key_spec
if keys:
for key in keys:
v = self.sym_getattr(key)
if object_utils.MISSING_VALUE == v:
missing[key] = field.value.default
else:
if isinstance(v, base.Symbolic):
missing_child = v.sym_missing(flatten=False)
if missing_child:
missing[key] = missing_child
else:
for k, v in self.sym_items():
if isinstance(v, base.Symbolic):
missing_child = v.sym_missing(flatten=False)
if missing_child:
missing[k] = missing_child
return missing
|
(self) -> Dict[str, Any]
|
40,645 |
pyglove.core.symbolic.dict
|
_sym_nondefault
|
Returns non-default values as key/value pairs in a dict.
|
def _sym_nondefault(self) -> typing.Dict[str, Any]:
"""Returns non-default values as key/value pairs in a dict."""
non_defaults = dict()
if self._value_spec is not None and self._value_spec.schema:
dict_schema = self._value_spec.schema
matched_keys, _ = dict_schema.resolve(self.keys())
for key_spec, keys in matched_keys.items():
value_spec = dict_schema[key_spec].value
for key in keys:
diff = self._diff_base(self.sym_getattr(key), value_spec.default)
if pg_typing.MISSING_VALUE != diff:
non_defaults[key] = diff
else:
for k, v in self.sym_items():
if isinstance(v, base.Symbolic):
non_defaults_child = v.sym_nondefault(flatten=False)
if non_defaults_child:
non_defaults[k] = non_defaults_child
else:
non_defaults[k] = v
return non_defaults
|
(self) -> Dict[str, Any]
|
40,646 |
pyglove.core.symbolic.dict
|
_sym_parent_for_children
| null |
def _sym_parent_for_children(self) -> Optional[base.Symbolic]:
if self._as_object_attributes_container:
return self.sym_parent
return self
|
(self) -> Optional[pyglove.core.symbolic.base.Symbolic]
|
40,647 |
pyglove.core.symbolic.dict
|
_sym_rebind
|
Subclass specific rebind implementation.
|
def _sym_rebind(
self, path_value_pairs: typing.Dict[object_utils.KeyPath, Any]
) -> List[base.FieldUpdate]:
"""Subclass specific rebind implementation."""
updates = []
for k, v in path_value_pairs.items():
update = self._set_item_of_current_tree(k, v)
if update is not None:
updates.append(update)
return updates
|
(self, path_value_pairs: Dict[pyglove.core.object_utils.value_location.KeyPath, Any]) -> List[pyglove.core.symbolic.base.FieldUpdate]
|
40,648 |
pyglove.core.symbolic.dict
|
_update_children_paths
|
Update children paths according to root_path of current node.
|
def _update_children_paths(
self,
old_path: object_utils.KeyPath,
new_path: object_utils.KeyPath) -> None:
"""Update children paths according to root_path of current node."""
del old_path
for k, v in self.sym_items():
if isinstance(v, base.TopologyAware):
v.sym_setpath(object_utils.KeyPath(k, new_path))
|
(self, old_path: pyglove.core.object_utils.value_location.KeyPath, new_path: pyglove.core.object_utils.value_location.KeyPath) -> NoneType
|
40,649 |
pyglove.core.symbolic.dict
|
clear
|
Removes all the keys in current dict.
|
def clear(self) -> None:
"""Removes all the keys in current dict."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot clear a sealed Dict.')
value_spec = self._value_spec
self._value_spec = None
super().clear()
if value_spec:
self.use_value_spec(value_spec, self._allow_partial)
|
(self) -> NoneType
|
40,651 |
pyglove.core.symbolic.dict
|
copy
|
Overridden copy using symbolic copy.
|
def copy(self) -> 'Dict':
"""Overridden copy using symbolic copy."""
return self.sym_clone(deep=False)
|
(self) -> pyglove.core.symbolic.dict.Dict
|
40,652 |
pyglove.core.symbolic.dict
|
custom_apply
|
Implement pg.typing.CustomTyping interface.
Args:
path: KeyPath of current object.
value_spec: Origin value spec of the field.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values in dict_obj into
their final values. Transform function is called on leaf nodes first,
then on their containers, recursively.
Returns:
A tuple (proceed_with_standard_apply, transformed value)
|
def custom_apply(
self,
path: object_utils.KeyPath,
value_spec: pg_typing.ValueSpec,
allow_partial: bool,
child_transform: Optional[
Callable[[object_utils.KeyPath, pg_typing.Field, Any], Any]] = None
) -> Tuple[bool, 'Dict']:
"""Implement pg.typing.CustomTyping interface.
Args:
path: KeyPath of current object.
value_spec: Origin value spec of the field.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values in dict_obj into
their final values. Transform function is called on leaf nodes first,
then on their containers, recursively.
Returns:
A tuple (proceed_with_standard_apply, transformed value)
"""
proceed_with_standard_apply = True
if self._value_spec:
if value_spec and not value_spec.is_compatible(self._value_spec):
raise ValueError(
object_utils.message_on_path(
f'Dict (spec={self._value_spec!r}) cannot be assigned to an '
f'incompatible field (spec={value_spec!r}).', path))
if self._allow_partial == allow_partial:
proceed_with_standard_apply = False
else:
self._allow_partial = allow_partial
elif isinstance(value_spec, pg_typing.Dict):
self._value_spec = value_spec
return (proceed_with_standard_apply, self)
|
(self, path: pyglove.core.object_utils.value_location.KeyPath, value_spec: pyglove.core.typing.class_schema.ValueSpec, allow_partial: bool, child_transform: Optional[Callable[[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.typing.class_schema.Field, Any], Any]] = None) -> Tuple[bool, pyglove.core.symbolic.dict.Dict]
|
40,653 |
pyglove.core.symbolic.dict
|
format
|
Formats this Dict.
|
def format(
self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
*,
python_format: bool = False,
hide_default_values: bool = False,
hide_missing_values: bool = False,
include_keys: Optional[Set[str]] = None,
exclude_keys: Optional[Set[str]] = None,
use_inferred: bool = False,
cls_name: Optional[str] = None,
bracket_type: object_utils.BracketType = object_utils.BracketType.CURLY,
key_as_attribute: bool = False,
extra_blankline_for_field_docstr: bool = False,
**kwargs) -> str:
"""Formats this Dict."""
cls_name = cls_name or ''
exclude_keys = exclude_keys or set()
def _indent(text, indent):
return ' ' * 2 * indent + text
def _should_include_key(key):
if include_keys:
return key in include_keys
return key not in exclude_keys
field_list = []
if self._value_spec and self._value_spec.schema:
matched_keys, unmatched = self._value_spec.schema.resolve(self.keys()) # pytype: disable=attribute-error
assert not unmatched
for key_spec, keys in matched_keys.items():
for key in keys:
if _should_include_key(key):
field = self._value_spec.schema[key_spec]
v = self.sym_getattr(key)
if use_inferred and isinstance(v, base.Inferential):
v = self.sym_inferred(key, default=v)
if pg_typing.MISSING_VALUE == v:
if hide_missing_values:
continue
elif hide_default_values and base.eq(v, field.default_value):
continue
field_list.append((field, key, v))
else:
for k, v in self.sym_items():
if _should_include_key(k):
if use_inferred and isinstance(v, base.Inferential):
v = self.sym_inferred(k, default=v)
field_list.append((None, k, v))
open_bracket, close_bracket = object_utils.bracket_chars(bracket_type)
if not field_list:
return f'{cls_name}{open_bracket}{close_bracket}'
if compact:
s = [f'{cls_name}{open_bracket}']
kv_strs = []
for _, k, v in field_list:
v_str = object_utils.format(
v,
compact,
verbose,
root_indent + 1,
hide_default_values=hide_default_values,
hide_missing_values=hide_missing_values,
python_format=python_format,
use_inferred=use_inferred,
extra_blankline_for_field_docstr=extra_blankline_for_field_docstr,
**kwargs)
if not python_format or key_as_attribute:
kv_strs.append(f'{k}={v_str}')
else:
kv_strs.append(f'\'{k}\': {v_str}')
s.append(', '.join(kv_strs))
s.append(close_bracket)
else:
s = [f'{cls_name}{open_bracket}\n']
for i, (f, k, v) in enumerate(field_list):
if i != 0:
s.append(',\n')
if verbose and f and typing.cast(pg_typing.Field, f).description:
if i != 0 and extra_blankline_for_field_docstr:
s.append('\n')
description = typing.cast(pg_typing.Field, f).description
for line in description.split('\n'):
s.append(_indent(f'# {line}\n', root_indent + 1))
v_str = object_utils.format(
v,
compact,
verbose,
root_indent + 1,
hide_default_values=hide_default_values,
hide_missing_values=hide_missing_values,
python_format=python_format,
use_inferred=use_inferred,
extra_blankline_for_field_docstr=extra_blankline_for_field_docstr,
**kwargs)
if not python_format:
# Format in PyGlove's format (default).
s.append(_indent(f'{k} = {v_str}', root_indent + 1))
elif key_as_attribute:
# Format `pg.Objects` under Python format.
s.append(_indent(f'{k}={v_str}', root_indent + 1))
else:
# Format regular `pg.Dict` under Python format.
s.append(_indent(f'\'{k}\': {v_str}', root_indent + 1))
s.append('\n')
s.append(_indent(close_bracket, root_indent))
return ''.join(s)
|
(self, compact: bool = False, verbose: bool = True, root_indent: int = 0, *, python_format: bool = False, hide_default_values: bool = False, hide_missing_values: bool = False, include_keys: Optional[Set[str]] = None, exclude_keys: Optional[Set[str]] = None, use_inferred: bool = False, cls_name: Optional[str] = None, bracket_type: pyglove.core.object_utils.formatting.BracketType = <BracketType.CURLY: 2>, key_as_attribute: bool = False, extra_blankline_for_field_docstr: bool = False, **kwargs) -> str
|
40,655 |
pyglove.core.symbolic.dict
|
items
|
Returns an iterator of (key, value) items in current dict.
|
def items(self) -> Iterator[Tuple[str, Any]]: # pytype: disable=signature-mismatch
"""Returns an iterator of (key, value) items in current dict."""
return self.sym_items()
|
(self) -> Iterator[Tuple[str, Any]]
|
40,656 |
pyglove.core.symbolic.dict
|
keys
|
Returns an iterator of keys in current dict.
|
def keys(self) -> Iterator[str]: # pytype: disable=signature-mismatch
"""Returns an iterator of keys in current dict."""
return self.sym_keys()
|
(self) -> Iterator[str]
|
40,659 |
pyglove.core.symbolic.dict
|
pop
|
Pops a key from current dict.
|
def pop(
self, key: Any, default: Any = base.RAISE_IF_NOT_FOUND # pylint: disable=protected-access
) -> Any:
"""Pops a key from current dict."""
if key in self:
value = self[key]
with flags.allow_writable_accessors(True):
del self[key]
return value if value != pg_typing.MISSING_VALUE else default
if default is base.RAISE_IF_NOT_FOUND:
raise KeyError(key)
return default
|
(self, key: Any, default: Any = (MISSING_VALUE,)) -> Any
|
40,660 |
pyglove.core.symbolic.dict
|
popitem
| null |
def popitem(self) -> Tuple[str, Any]:
if self._value_spec is not None:
raise ValueError(
'\'popitem\' cannot be performed on a Dict with value spec.')
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot pop item from a sealed Dict.')
return super().popitem()
|
(self) -> Tuple[str, Any]
|
40,663 |
pyglove.core.symbolic.dict
|
seal
|
Seals or unseals current object from further modification.
|
def seal(self, sealed: bool = True) -> 'Dict':
"""Seals or unseals current object from further modification."""
if self.is_sealed == sealed:
return self
for v in self.sym_values():
if isinstance(v, base.Symbolic):
v.seal(sealed)
super().seal(sealed)
return self
|
(self, sealed: bool = True) -> pyglove.core.symbolic.dict.Dict
|
40,665 |
pyglove.core.symbolic.dict
|
setdefault
|
Sets default as the value to key if not present.
|
def setdefault(self, key: str, default: Any = None) -> Any:
"""Sets default as the value to key if not present."""
value = pg_typing.MISSING_VALUE
if key in self:
value = self.sym_getattr(key)
if value == pg_typing.MISSING_VALUE:
self[key] = default
value = default
return value
|
(self, key: str, default: Optional[Any] = None) -> Any
|
40,667 |
pyglove.core.symbolic.dict
|
sym_attr_field
|
Returns the field definition for a symbolic attribute.
|
def sym_attr_field(
self, key: Union[str, int]
) -> Optional[pg_typing.Field]:
"""Returns the field definition for a symbolic attribute."""
if self._value_spec is None or self._value_spec.schema is None:
return None
return self._value_spec.schema.get_field(key) # pytype: disable=attribute-error
|
(self, key: Union[str, int]) -> Optional[pyglove.core.typing.class_schema.Field]
|
40,671 |
pyglove.core.symbolic.base
|
sym_eq
|
Returns if this object equals to another object symbolically.
|
def sym_eq(self, other: Any) -> bool:
"""Returns if this object equals to another object symbolically."""
return eq(self, other)
|
(self, other: Any) -> bool
|
40,676 |
pyglove.core.symbolic.dict
|
sym_hasattr
|
Tests if a symbolic attribute exists.
|
def sym_hasattr(self, key: Union[str, int]) -> bool:
"""Tests if a symbolic attribute exists."""
return key in self
|
(self, key: Union[str, int]) -> bool
|
40,677 |
pyglove.core.symbolic.dict
|
sym_hash
|
Symbolic hashing.
|
def sym_hash(self) -> int:
"""Symbolic hashing."""
return base.sym_hash(
(self.__class__,
tuple([(k, base.sym_hash(v)) for k, v in self.sym_items()
if v != pg_typing.MISSING_VALUE])))
|
(self) -> int
|
40,680 |
pyglove.core.symbolic.dict
|
sym_items
|
Iterates the (key, value) pairs of symbolic attributes.
|
def sym_items(self) -> Iterator[
Tuple[str, Any]]:
"""Iterates the (key, value) pairs of symbolic attributes."""
for k in self.sym_keys():
yield k, self._sym_getattr(k)
|
(self) -> Iterator[Tuple[str, Any]]
|
40,681 |
pyglove.core.symbolic.dict
|
sym_jsonify
|
Converts current object to a dict with plain Python objects.
|
def sym_jsonify(
self,
hide_default_values: bool = False,
exclude_keys: Optional[Sequence[str]] = None,
use_inferred: bool = False,
**kwargs) -> object_utils.JSONValueType:
"""Converts current object to a dict with plain Python objects."""
exclude_keys = set(exclude_keys or [])
if self._value_spec and self._value_spec.schema:
json_repr = dict()
matched_keys, _ = self._value_spec.schema.resolve(self.keys()) # pytype: disable=attribute-error
for key_spec, keys in matched_keys.items():
# NOTE(daiyip): The key values of frozen field can safely be excluded
# since they will be the same for a class.
field = self._value_spec.schema[key_spec]
if not field.frozen:
for key in keys:
if key not in exclude_keys:
value = self.sym_getattr(key)
if use_inferred and isinstance(value, base.Inferential):
value = self.sym_inferred(key, default=value)
if pg_typing.MISSING_VALUE == value:
continue
if hide_default_values and base.eq(value, field.default_value):
continue
json_repr[key] = base.to_json(
value, hide_default_values=hide_default_values,
use_inferred=use_inferred,
**kwargs)
return json_repr
else:
return {
k: base.to_json(
self.sym_inferred(k, default=v) if (
use_inferred and isinstance(v, base.Inferential)) else v,
hide_default_values=hide_default_values,
use_inferred=use_inferred,
**kwargs)
for k, v in self.sym_items()
if k not in exclude_keys
}
|
(self, hide_default_values: bool = False, exclude_keys: Optional[Sequence[str]] = None, use_inferred: bool = False, **kwargs) -> Union[int, float, bool, str, List[Any], Dict[str, Any]]
|
40,682 |
pyglove.core.symbolic.dict
|
sym_keys
|
Iterates the keys of symbolic attributes.
|
def sym_keys(self) -> Iterator[str]:
"""Iterates the keys of symbolic attributes."""
if self._value_spec is None or self._value_spec.schema is None:
for key in super().__iter__():
yield key
else:
traversed = set()
for key_spec in self._value_spec.schema.keys(): # pytype: disable=attribute-error
if isinstance(key_spec, pg_typing.ConstStrKey) and key_spec in self:
yield key_spec.text
traversed.add(key_spec.text)
if len(traversed) < len(self):
for key in super().__iter__():
if key not in traversed:
yield key
|
(self) -> Iterator[str]
|
40,683 |
pyglove.core.symbolic.base
|
sym_lt
|
Returns True if this object is symbolically less than other object.
|
def sym_lt(self, other: Any) -> bool:
"""Returns True if this object is symbolically less than other object."""
return lt(self, other)
|
(self, other: Any) -> bool
|
40,690 |
pyglove.core.symbolic.dict
|
sym_setparent
|
Override set parent of Dict to handle the passing through scenario.
|
def sym_setparent(self, parent: base.Symbolic):
"""Override set parent of Dict to handle the passing through scenario."""
super().sym_setparent(parent)
# NOTE(daiyip): when flag `as_object_attributes_container` is on, it sets
# the parent of child symbolic values using its parent.
if self._as_object_attributes_container:
for v in self.sym_values():
if isinstance(v, base.TopologyAware):
v.sym_setparent(parent)
|
(self, parent: pyglove.core.symbolic.base.Symbolic)
|
40,692 |
pyglove.core.symbolic.dict
|
sym_values
|
Iterates the values of symbolic attributes.
|
def sym_values(self) -> Iterator[Any]:
"""Iterates the values of symbolic attributes."""
for k in self.sym_keys():
yield self._sym_getattr(k)
|
(self) -> Iterator[Any]
|
40,695 |
pyglove.core.symbolic.dict
|
update
|
Update Dict with the same semantic as update on standard dict.
|
def update(self,
other: Union[
None,
typing.Dict[str, Any],
Iterable[Tuple[str, Any]]] = None,
**kwargs) -> None: # pytype: disable=signature-mismatch
"""Update Dict with the same semantic as update on standard dict."""
updates = dict(other) if other else {}
updates.update(kwargs)
self.rebind(
updates, raise_on_no_change=False, skip_notification=True)
|
(self, other: Union[NoneType, Iterable[Tuple[str, Any]], Dict[str, Any]] = None, **kwargs) -> NoneType
|
40,696 |
pyglove.core.symbolic.dict
|
use_value_spec
|
Applies a ``pg.typing.Dict`` as the value spec for current dict.
Args:
value_spec: A Dict ValueSpec to apply to this Dict.
If current Dict is schema-less (whose immediate members are not
validated against schema), and `value_spec` is not None, the value spec
will be applied to the Dict.
Or else if current Dict is already symbolic (whose immediate members
are under the constraint of a Dict value spec), and `value_spec` is
None, current Dict will become schema-less. However, the schema
constraints for non-immediate members will remain.
allow_partial: Whether allow partial dict based on the schema. This flag
will override allow_partial flag in __init__ for spec-less Dict.
Returns:
Self.
Raises:
ValueError: validation failed due to value error.
RuntimeError: Dict is already bound with another spec.
TypeError: type errors during validation.
KeyError: key errors during validation.
|
def use_value_spec(self,
value_spec: Optional[pg_typing.Dict],
allow_partial: bool = False) -> 'Dict':
"""Applies a ``pg.typing.Dict`` as the value spec for current dict.
Args:
value_spec: A Dict ValueSpec to apply to this Dict.
If current Dict is schema-less (whose immediate members are not
validated against schema), and `value_spec` is not None, the value spec
will be applied to the Dict.
Or else if current Dict is already symbolic (whose immediate members
are under the constraint of a Dict value spec), and `value_spec` is
None, current Dict will become schema-less. However, the schema
constraints for non-immediate members will remain.
allow_partial: Whether allow partial dict based on the schema. This flag
will override allow_partial flag in __init__ for spec-less Dict.
Returns:
Self.
Raises:
ValueError: validation failed due to value error.
RuntimeError: Dict is already bound with another spec.
TypeError: type errors during validation.
KeyError: key errors during validation.
"""
if value_spec is None:
self._value_spec = None
self._accessor_writable = True
return self
if not isinstance(value_spec, pg_typing.Dict):
raise ValueError(
self._error_message(
f'Value spec for list must be a `pg.typing.Dict` object. '
f'Encountered: {value_spec!r}'))
if self._value_spec and self._value_spec != value_spec:
raise RuntimeError(
self._error_message(
f'Dict is already bound with a different value spec: '
f'{self._value_spec}. New value spec: {value_spec}.'))
self._allow_partial = allow_partial
if flags.is_type_check_enabled():
# NOTE(daiyip): self._value_spec will be set in Dict.custom_apply method
# called by value_spec.apply, thus we don't need to set self._value_spec
# explicitly.
value_spec.apply(
self,
allow_partial=base.accepts_partial(self),
child_transform=base.symbolic_transform_fn(self._allow_partial),
root_path=self.sym_path)
else:
self._value_spec = value_spec
return self
|
(self, value_spec: Optional[pyglove.core.typing.value_specs.Dict], allow_partial: bool = False) -> pyglove.core.symbolic.dict.Dict
|
40,697 |
pyglove.core.symbolic.dict
|
values
|
Returns an iterator of values in current dict..
|
def values(self) -> Iterator[Any]: # pytype: disable=signature-mismatch
"""Returns an iterator of values in current dict.."""
return self.sym_values()
|
(self) -> Iterator[Any]
|
40,698 |
pyglove.core.symbolic.diff
|
Diff
|
A value diff between two objects: a 'left' object and a 'right' object.
If one of them is missing, it may be represented by pg.Diff.MISSING
For example::
>>> pg.Diff(3.14, 1.618)
Diff(left=3.14, right=1.618)
>>> pg.Diff('hello world', pg.Diff.MISSING)
Diff(left='hello world', right=MISSING)
|
class Diff(PureSymbolic, pg_object.Object):
"""A value diff between two objects: a 'left' object and a 'right' object.
If one of them is missing, it may be represented by pg.Diff.MISSING
For example::
>>> pg.Diff(3.14, 1.618)
Diff(left=3.14, right=1.618)
>>> pg.Diff('hello world', pg.Diff.MISSING)
Diff(left='hello world', right=MISSING)
"""
class _Missing:
"""Represents an absent party in a Diff."""
def __repr__(self):
return self.__str__()
def __str__(self):
return 'MISSING'
def __eq__(self, other):
return isinstance(other, Diff._Missing)
def __ne__(self, other):
return not self.__eq__(other)
MISSING = _Missing()
def _on_bound(self):
super()._on_bound()
if self.children:
if not isinstance(self.left, type):
raise ValueError(
f'\'left\' must be a type when \'children\' is specified. '
f'Encountered: {self.left!r}.')
if not isinstance(self.right, type):
raise ValueError(
f'\'right\' must be a type when \'children\' is specified. '
f'Encountered: {self.right!r}.')
self._has_diff = None
@property
def is_leaf(self) -> bool:
"""Returns True if current Diff does not contain inner Diff object."""
return not self.children
def __bool__(self):
"""Returns True if there is a diff."""
if self._has_diff is None:
if base.ne(self.left, self.right):
has_diff = True
elif self.children:
has_diff = any(bool(cd) for cd in self.children.values())
else:
has_diff = False
self._has_diff = has_diff
return self._has_diff
def sym_eq(self, other: Any):
"""Override symbolic equality."""
if super().sym_eq(other):
return True
if not bool(self):
return base.eq(self.left, other)
@property
def value(self):
"""Returns the value if left and right are the same."""
if bool(self):
raise ValueError(
f'\'value\' cannot be accessed when \'left\' and \'right\' '
f'are not the same. Left={self.left!r}, Right={self.right!r}.')
return self.left
def format(
self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
**kwargs):
"""Override format to conditionally print the shared value or the diff."""
if not bool(self):
if self.value == Diff.MISSING:
return 'No diff'
# When there is no diff, but the same value needs to be displayed
# we simply return the value.
return object_utils.format(
self.value, compact, verbose, root_indent, **kwargs)
if self.is_leaf:
exclude_keys = kwargs.pop('exclude_keys', None)
exclude_keys = exclude_keys or set()
exclude_keys.add('children')
return super().format(
compact, verbose, root_indent, exclude_keys=exclude_keys, **kwargs)
else:
assert isinstance(self.left, type)
assert isinstance(self.right, type)
if self.left is self.right and issubclass(self.left, list):
return self.children.format(
compact=compact,
verbose=verbose,
root_indent=root_indent,
cls_name='',
bracket_type=object_utils.BracketType.SQUARE)
if self.left is self.right:
cls_name = self.left.__name__
else:
cls_name = f'{self.left.__name__}|{self.right.__name__}'
return self.children.format(
compact=compact,
verbose=verbose,
root_indent=root_indent,
cls_name=cls_name,
bracket_type=object_utils.BracketType.ROUND)
|
(left=MISSING, right=MISSING, children: Dict[str, Any] = {})
|
40,700 |
pyglove.core.symbolic.diff
|
__bool__
|
Returns True if there is a diff.
|
def __bool__(self):
"""Returns True if there is a diff."""
if self._has_diff is None:
if base.ne(self.left, self.right):
has_diff = True
elif self.children:
has_diff = any(bool(cd) for cd in self.children.values())
else:
has_diff = False
self._has_diff = has_diff
return self._has_diff
|
(self)
|
40,717 |
pyglove.core.symbolic.diff
|
_on_bound
| null |
def _on_bound(self):
super()._on_bound()
if self.children:
if not isinstance(self.left, type):
raise ValueError(
f'\'left\' must be a type when \'children\' is specified. '
f'Encountered: {self.left!r}.')
if not isinstance(self.right, type):
raise ValueError(
f'\'right\' must be a type when \'children\' is specified. '
f'Encountered: {self.right!r}.')
self._has_diff = None
|
(self)
|
40,735 |
pyglove.core.symbolic.pure_symbolic
|
custom_apply
|
Custom apply on a value based on its original value spec.
This implements ``pg.pg_typing.CustomTyping``, allowing a pure symbolic
value to be assigned to any field. To customize this behavior, override
this method in subclasses.
Args:
path: KeyPath of current object under its object tree.
value_spec: Original value spec for this field.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values into their final
values. Transform function is called on leaf nodes first, then on their
parents, recursively.
Returns:
A tuple (proceed_with_standard_apply, value_to_proceed).
If proceed_with_standard_apply is set to False, value_to_proceed
will be used as final value.
Raises:
Error when the value is not compatible with the value spec.
|
def custom_apply(
self,
path: object_utils.KeyPath,
value_spec: pg_typing.ValueSpec,
allow_partial: bool,
child_transform: Optional[
Callable[[object_utils.KeyPath, pg_typing.Field, Any], Any]] = None
) -> Tuple[bool, Any]:
"""Custom apply on a value based on its original value spec.
This implements ``pg.pg_typing.CustomTyping``, allowing a pure symbolic
value to be assigned to any field. To customize this behavior, override
this method in subclasses.
Args:
path: KeyPath of current object under its object tree.
value_spec: Original value spec for this field.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values into their final
values. Transform function is called on leaf nodes first, then on their
parents, recursively.
Returns:
A tuple (proceed_with_standard_apply, value_to_proceed).
If proceed_with_standard_apply is set to False, value_to_proceed
will be used as final value.
Raises:
Error when the value is not compatible with the value spec.
"""
del path, value_spec, allow_partial, child_transform
return (False, self)
|
(self, path: pyglove.core.object_utils.value_location.KeyPath, value_spec: pyglove.core.typing.class_schema.ValueSpec, allow_partial: bool, child_transform: Optional[Callable[[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.typing.class_schema.Field, Any], Any]] = None) -> Tuple[bool, Any]
|
40,736 |
pyglove.core.symbolic.diff
|
format
|
Override format to conditionally print the shared value or the diff.
|
def format(
self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
**kwargs):
"""Override format to conditionally print the shared value or the diff."""
if not bool(self):
if self.value == Diff.MISSING:
return 'No diff'
# When there is no diff, but the same value needs to be displayed
# we simply return the value.
return object_utils.format(
self.value, compact, verbose, root_indent, **kwargs)
if self.is_leaf:
exclude_keys = kwargs.pop('exclude_keys', None)
exclude_keys = exclude_keys or set()
exclude_keys.add('children')
return super().format(
compact, verbose, root_indent, exclude_keys=exclude_keys, **kwargs)
else:
assert isinstance(self.left, type)
assert isinstance(self.right, type)
if self.left is self.right and issubclass(self.left, list):
return self.children.format(
compact=compact,
verbose=verbose,
root_indent=root_indent,
cls_name='',
bracket_type=object_utils.BracketType.SQUARE)
if self.left is self.right:
cls_name = self.left.__name__
else:
cls_name = f'{self.left.__name__}|{self.right.__name__}'
return self.children.format(
compact=compact,
verbose=verbose,
root_indent=root_indent,
cls_name=cls_name,
bracket_type=object_utils.BracketType.ROUND)
|
(self, compact: bool = False, verbose: bool = True, root_indent: int = 0, **kwargs)
|
40,749 |
pyglove.core.symbolic.diff
|
sym_eq
|
Override symbolic equality.
|
def sym_eq(self, other: Any):
"""Override symbolic equality."""
if super().sym_eq(other):
return True
if not bool(self):
return base.eq(self.left, other)
|
(self, other: Any)
|
40,773 |
pyglove.core.object_utils.docstr_utils
|
DocStr
|
Docstring.
|
class DocStr:
"""Docstring."""
style: DocStrStyle
short_description: Optional[str]
long_description: Optional[str]
examples: List[DocStrExample]
args: Dict[str, DocStrArgument]
returns: Optional[DocStrReturns]
raises: List[DocStrRaises]
blank_after_short_description: bool = True
@classmethod
def parse(cls, text: str, style: Optional[DocStrStyle] = None) -> 'DocStr':
"""Parses a docstring."""
result = docstring_parser.parse(text, _to_parser_style(style))
return cls(
style=_from_parser_style(result.style),
short_description=result.short_description,
long_description=result.long_description,
examples=[
DocStrExample(description=e.description)
for e in result.examples
],
args={ # pylint: disable=g-complex-comprehension
p.arg_name: DocStrArgument(
name=p.arg_name, description=p.description,
type_name=p.type_name, default=p.default,
is_optional=p.is_optional)
for p in result.params
},
returns=DocStrReturns( # pylint: disable=g-long-ternary
name=result.returns.return_name,
description=result.returns.description,
is_yield=result.returns.is_generator) if result.returns else None,
raises=[
DocStrRaises(type_name=r.type_name, description=r.description)
for r in result.raises
],
blank_after_short_description=result.blank_after_short_description)
|
(style: pyglove.core.object_utils.docstr_utils.DocStrStyle, short_description: Optional[str], long_description: Optional[str], examples: List[pyglove.core.object_utils.docstr_utils.DocStrExample], args: Dict[str, pyglove.core.object_utils.docstr_utils.DocStrArgument], returns: Optional[pyglove.core.object_utils.docstr_utils.DocStrReturns], raises: List[pyglove.core.object_utils.docstr_utils.DocStrRaises], blank_after_short_description: bool = True) -> None
|
40,774 |
pyglove.core.object_utils.docstr_utils
|
__eq__
| null |
# Copyright 2023 The PyGlove Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for working with docstrs."""
import dataclasses
import enum
from typing import Any, Dict, List, Optional
import docstring_parser
class DocStrStyle(enum.Enum):
"""Docstring style."""
REST = 1
GOOGLE = 2
NUMPYDOC = 3
EPYDOC = 4
|
(self, other)
|
40,777 |
pyglove.core.typing.class_schema
|
Field
|
Class that represents the definition of one or a group of attributes.
``Field`` is held by a :class:`pyglove.Schema` object for defining the
name(s), the validation and transformation rules on its/their value(s) for a
single symbolic attribute or a set of symbolic attributes.
A ``Field`` is defined by a tuple of 4 items::
(key specification, value specification, doc string, field metadata)
For example::
(pg.typing.StrKey('foo.*'),
pg.typing.Int(),
'Attributes with foo',
{'user_data': 'bar'})
The key specification (or ``KeySpec``, class :class:`pyglove.KeySpec`) and
value specification (or ``ValueSpec``, class :class:`pyglove.ValueSpec`) are
required, while the doc string and the field metadata are optional. The
``KeySpec`` defines acceptable identifiers for this field, and the
``ValueSpec`` defines the attribute's value type, its default value,
validation rules and etc. The doc string serves as the description for the
field, and the field metadata can be used for attribute-based code generation.
``Field`` supports extension, which allows the subclass to add more
restrictions to a field inherited from the base class, or override its default
value. A field can be frozen if subclasses can no longer extend it.
See :class:`pyglove.KeySpec` and :class:`pyglove.ValueSpec` for details.
|
class Field(object_utils.Formattable, object_utils.JSONConvertible):
"""Class that represents the definition of one or a group of attributes.
``Field`` is held by a :class:`pyglove.Schema` object for defining the
name(s), the validation and transformation rules on its/their value(s) for a
single symbolic attribute or a set of symbolic attributes.
A ``Field`` is defined by a tuple of 4 items::
(key specification, value specification, doc string, field metadata)
For example::
(pg.typing.StrKey('foo.*'),
pg.typing.Int(),
'Attributes with foo',
{'user_data': 'bar'})
The key specification (or ``KeySpec``, class :class:`pyglove.KeySpec`) and
value specification (or ``ValueSpec``, class :class:`pyglove.ValueSpec`) are
required, while the doc string and the field metadata are optional. The
``KeySpec`` defines acceptable identifiers for this field, and the
``ValueSpec`` defines the attribute's value type, its default value,
validation rules and etc. The doc string serves as the description for the
field, and the field metadata can be used for attribute-based code generation.
``Field`` supports extension, which allows the subclass to add more
restrictions to a field inherited from the base class, or override its default
value. A field can be frozen if subclasses can no longer extend it.
See :class:`pyglove.KeySpec` and :class:`pyglove.ValueSpec` for details.
"""
__serialization_key__ = 'pyglove.typing.Field'
def __init__(
self,
key_spec: Union[KeySpec, str],
value_spec: ValueSpec,
description: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None):
"""Constructor.
Args:
key_spec: Key specification of the field. Can be a string or a KeySpec
instance.
value_spec: Value specification of the field.
description: Description of the field.
metadata: A dict of objects as metadata for the field.
Raises:
ValueError: metadata is not a dict.
"""
if isinstance(key_spec, str):
key_spec = KeySpec.from_str(key_spec)
assert isinstance(key_spec, KeySpec), key_spec
self._key = key_spec
self._value = value_spec
self._description = description
if metadata and not isinstance(metadata, dict):
raise ValueError('metadata must be a dict.')
self._metadata = metadata or {}
@classmethod
def from_annotation(
cls,
key: Union[str, KeySpec],
annotation: Any,
description: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None,
auto_typing=True) -> 'Field':
"""Gets a Field from annotation."""
del key, annotation, description, metadata, auto_typing
assert False, 'Overridden in `annotation_conversion.py`.'
@property
def description(self) -> Optional[str]:
"""Description of this field."""
return self._description
def set_description(self, description: str) -> None:
"""Sets the description for this field."""
self._description = description
@property
def key(self) -> KeySpec:
"""Key specification of this field."""
return self._key
@property
def value(self) -> ValueSpec:
"""Value specification of this field."""
return self._value
@property
def annotation(self) -> Any:
"""Type annotation for this field."""
return self._value.annotation
@property
def metadata(self) -> Dict[str, Any]:
"""Metadata of this field.
Metadata is defined as a dict type, so we can add multiple annotations
to a field.
userdata = field.metadata.get('userdata', None):
Returns:
Metadata of this field as a dict.
"""
return self._metadata
def extend(self, base_field: 'Field') -> 'Field':
"""Extend current field based on a base field."""
self.key.extend(base_field.key)
self.value.extend(base_field.value)
if not self._description:
self._description = base_field.description
if base_field.metadata:
metadata = copy.copy(base_field.metadata)
metadata.update(self.metadata)
self._metadata = metadata
return self
def apply(
self,
value: Any,
allow_partial: bool = False,
transform_fn: Optional[Callable[
[object_utils.KeyPath, 'Field', Any], Any]] = None,
root_path: Optional[object_utils.KeyPath] = None) -> Any:
"""Apply current field to a value, which validate and complete the value.
Args:
value: Value to validate against this spec.
allow_partial: Whether partial value is allowed. This is for dict or
nested dict values.
transform_fn: Function to transform applied value into final value.
root_path: Key path for root.
Returns:
final value.
When allow_partial is set to False (default), only fully qualified value
is acceptable. When allow_partial is set to True, missing fields will
be placeheld using MISSING_VALUE.
Raises:
KeyError: if additional key is found in value, or required key is missing
and allow_partial is set to False.
TypeError: if type of value is not the same as spec required.
ValueError: if value is not acceptable, or value is MISSING_VALUE while
allow_partial is set to False.
"""
value = self._value.apply(
value,
allow_partial=allow_partial,
child_transform=transform_fn,
root_path=root_path)
if transform_fn:
value = transform_fn(root_path, self, value)
return value
@property
def default_value(self) -> Any:
"""Returns the default value."""
return self._value.default
@property
def frozen(self) -> bool:
"""Returns True if current field's value is frozen."""
return self._value.frozen
def format(self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
**kwargs) -> str:
"""Format this field into a string."""
description = self._description
if not verbose and self._description and len(self._description) > 20:
description = self._description[:20] + '...'
metadata = object_utils.format(
self._metadata,
compact=compact,
verbose=verbose,
root_indent=root_indent + 1,
**kwargs)
if not verbose and len(metadata) > 24:
metadata = '{...}'
attr_str = object_utils.kvlist_str([
('key', self._key, None),
('value', self._value.format(
compact=compact,
verbose=verbose,
root_indent=root_indent + 1,
**kwargs), None),
('description', object_utils.quote_if_str(description), None),
('metadata', metadata, '{}')
])
return f'Field({attr_str})'
def to_json(self, **kwargs: Any) -> Dict[str, Any]:
return self.to_json_dict(
fields=dict(
key_spec=(self._key, None),
value_spec=(self._value, None),
description=(self._description, None),
metadata=(self._metadata, {}),
),
exclude_default=True,
**kwargs,
)
def __eq__(self, other: Any) -> bool:
"""Operator==."""
if self is other:
return True
return (isinstance(other, self.__class__) and self.key == other.key and
self.value == other.value and
self.description == other.description and
self.metadata == other.metadata)
def __ne__(self, other: Any) -> bool:
"""Operator!=."""
return not self.__eq__(other)
|
(key_spec: Union[pyglove.core.typing.class_schema.KeySpec, str], value_spec: pyglove.core.typing.class_schema.ValueSpec, description: Optional[str] = None, metadata: Optional[Dict[str, Any]] = None)
|
40,779 |
pyglove.core.typing.class_schema
|
__eq__
|
Operator==.
|
def __eq__(self, other: Any) -> bool:
"""Operator==."""
if self is other:
return True
return (isinstance(other, self.__class__) and self.key == other.key and
self.value == other.value and
self.description == other.description and
self.metadata == other.metadata)
|
(self, other: Any) -> bool
|
40,780 |
pyglove.core.typing.class_schema
|
__init__
|
Constructor.
Args:
key_spec: Key specification of the field. Can be a string or a KeySpec
instance.
value_spec: Value specification of the field.
description: Description of the field.
metadata: A dict of objects as metadata for the field.
Raises:
ValueError: metadata is not a dict.
|
def __init__(
self,
key_spec: Union[KeySpec, str],
value_spec: ValueSpec,
description: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None):
"""Constructor.
Args:
key_spec: Key specification of the field. Can be a string or a KeySpec
instance.
value_spec: Value specification of the field.
description: Description of the field.
metadata: A dict of objects as metadata for the field.
Raises:
ValueError: metadata is not a dict.
"""
if isinstance(key_spec, str):
key_spec = KeySpec.from_str(key_spec)
assert isinstance(key_spec, KeySpec), key_spec
self._key = key_spec
self._value = value_spec
self._description = description
if metadata and not isinstance(metadata, dict):
raise ValueError('metadata must be a dict.')
self._metadata = metadata or {}
|
(self, key_spec: Union[pyglove.core.typing.class_schema.KeySpec, str], value_spec: pyglove.core.typing.class_schema.ValueSpec, description: Optional[str] = None, metadata: Optional[Dict[str, Any]] = None)
|
40,781 |
pyglove.core.typing.class_schema
|
__ne__
|
Operator!=.
|
def __ne__(self, other: Any) -> bool:
"""Operator!=."""
return not self.__eq__(other)
|
(self, other: Any) -> bool
|
40,785 |
pyglove.core.typing.class_schema
|
apply
|
Apply current field to a value, which validate and complete the value.
Args:
value: Value to validate against this spec.
allow_partial: Whether partial value is allowed. This is for dict or
nested dict values.
transform_fn: Function to transform applied value into final value.
root_path: Key path for root.
Returns:
final value.
When allow_partial is set to False (default), only fully qualified value
is acceptable. When allow_partial is set to True, missing fields will
be placeheld using MISSING_VALUE.
Raises:
KeyError: if additional key is found in value, or required key is missing
and allow_partial is set to False.
TypeError: if type of value is not the same as spec required.
ValueError: if value is not acceptable, or value is MISSING_VALUE while
allow_partial is set to False.
|
def apply(
self,
value: Any,
allow_partial: bool = False,
transform_fn: Optional[Callable[
[object_utils.KeyPath, 'Field', Any], Any]] = None,
root_path: Optional[object_utils.KeyPath] = None) -> Any:
"""Apply current field to a value, which validate and complete the value.
Args:
value: Value to validate against this spec.
allow_partial: Whether partial value is allowed. This is for dict or
nested dict values.
transform_fn: Function to transform applied value into final value.
root_path: Key path for root.
Returns:
final value.
When allow_partial is set to False (default), only fully qualified value
is acceptable. When allow_partial is set to True, missing fields will
be placeheld using MISSING_VALUE.
Raises:
KeyError: if additional key is found in value, or required key is missing
and allow_partial is set to False.
TypeError: if type of value is not the same as spec required.
ValueError: if value is not acceptable, or value is MISSING_VALUE while
allow_partial is set to False.
"""
value = self._value.apply(
value,
allow_partial=allow_partial,
child_transform=transform_fn,
root_path=root_path)
if transform_fn:
value = transform_fn(root_path, self, value)
return value
|
(self, value: Any, allow_partial: bool = False, transform_fn: Optional[Callable[[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.typing.class_schema.Field, Any], Any]] = None, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None) -> Any
|
40,786 |
pyglove.core.typing.class_schema
|
extend
|
Extend current field based on a base field.
|
def extend(self, base_field: 'Field') -> 'Field':
"""Extend current field based on a base field."""
self.key.extend(base_field.key)
self.value.extend(base_field.value)
if not self._description:
self._description = base_field.description
if base_field.metadata:
metadata = copy.copy(base_field.metadata)
metadata.update(self.metadata)
self._metadata = metadata
return self
|
(self, base_field: pyglove.core.typing.class_schema.Field) -> pyglove.core.typing.class_schema.Field
|
40,787 |
pyglove.core.typing.class_schema
|
format
|
Format this field into a string.
|
def format(self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
**kwargs) -> str:
"""Format this field into a string."""
description = self._description
if not verbose and self._description and len(self._description) > 20:
description = self._description[:20] + '...'
metadata = object_utils.format(
self._metadata,
compact=compact,
verbose=verbose,
root_indent=root_indent + 1,
**kwargs)
if not verbose and len(metadata) > 24:
metadata = '{...}'
attr_str = object_utils.kvlist_str([
('key', self._key, None),
('value', self._value.format(
compact=compact,
verbose=verbose,
root_indent=root_indent + 1,
**kwargs), None),
('description', object_utils.quote_if_str(description), None),
('metadata', metadata, '{}')
])
return f'Field({attr_str})'
|
(self, compact: bool = False, verbose: bool = True, root_indent: int = 0, **kwargs) -> str
|
40,788 |
pyglove.core.typing.annotation_conversion
|
_field_from_annotation
|
Creates a field from Python annotation.
|
def _field_from_annotation(
key: typing.Union[str, class_schema.KeySpec],
annotation: typing.Any,
description: typing.Optional[str] = None,
metadata: typing.Optional[typing.Dict[str, typing.Any]] = None,
auto_typing=True,
) -> class_schema.Field:
"""Creates a field from Python annotation."""
if isinstance(annotation, annotated.Annotated):
field_spec = (
key, annotation.value_spec, annotation.docstring, annotation.metadata)
elif _Annotated and typing.get_origin(annotation) is _Annotated:
type_args = typing.get_args(annotation)
assert len(type_args) > 1, (annotation, type_args)
field_spec = tuple([key] + list(type_args))
else:
field_spec = (key, annotation, description, metadata or {})
return class_schema.create_field(
field_spec,
auto_typing=auto_typing,
accept_value_as_annotation=False)
|
(key: Union[str, pyglove.core.typing.class_schema.KeySpec], annotation: Any, description: Optional[str] = None, metadata: Optional[Dict[str, Any]] = None, auto_typing=True) -> pyglove.core.typing.class_schema.Field
|
40,789 |
pyglove.core.typing.class_schema
|
set_description
|
Sets the description for this field.
|
def set_description(self, description: str) -> None:
"""Sets the description for this field."""
self._description = description
|
(self, description: str) -> NoneType
|
40,790 |
pyglove.core.typing.class_schema
|
to_json
| null |
def to_json(self, **kwargs: Any) -> Dict[str, Any]:
return self.to_json_dict(
fields=dict(
key_spec=(self._key, None),
value_spec=(self._value, None),
description=(self._description, None),
metadata=(self._metadata, {}),
),
exclude_default=True,
**kwargs,
)
|
(self, **kwargs: Any) -> Dict[str, Any]
|
40,791 |
pyglove.core.symbolic.base
|
FieldUpdate
|
Class that describes an update to a field in an object tree.
|
class FieldUpdate(object_utils.Formattable):
"""Class that describes an update to a field in an object tree."""
def __init__(self,
path: object_utils.KeyPath,
target: 'Symbolic',
field: Optional[pg_typing.Field],
old_value: Any,
new_value: Any):
"""Constructor.
Args:
path: KeyPath of the field that is updated.
target: Parent of updated field.
field: Specification of the updated field.
old_value: Old value of the field.
new_value: New value of the field.
"""
self.path = path
self.target = target
self.field = field
self.old_value = old_value
self.new_value = new_value
def format(self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
*,
python_format: bool = False,
hide_default_values: bool = False,
hide_missing_values: bool = False,
**kwargs) -> str:
"""Formats this object."""
kwargs.update({
'python_format': python_format,
'hide_default_values': hide_default_values,
'hide_missing_values': hide_missing_values,
})
details = object_utils.kvlist_str([
('parent_path', self.target.sym_path, None),
('path', self.path.path, None),
('old_value', object_utils.format(
self.old_value, compact, verbose, root_indent + 1, **kwargs),
object_utils.MISSING_VALUE),
('new_value',
object_utils.format(
self.new_value, compact, verbose, root_indent + 1, **kwargs),
object_utils.MISSING_VALUE),
])
return f'{self.__class__.__name__}({details})'
def __eq__(self, other: Any) -> bool:
"""Operator ==."""
if not isinstance(other, self.__class__):
return False
return (self.path == other.path and self.target is other.target and
self.field is other.field and self.old_value == other.old_value and
self.new_value == other.new_value)
def __ne__(self, other: Any) -> bool:
"""Operator !=."""
return not self.__eq__(other)
|
(path: pyglove.core.object_utils.value_location.KeyPath, target: 'Symbolic', field: Optional[pyglove.core.typing.class_schema.Field], old_value: Any, new_value: Any)
|
40,792 |
pyglove.core.symbolic.base
|
__eq__
|
Operator ==.
|
def __eq__(self, other: Any) -> bool:
"""Operator ==."""
if not isinstance(other, self.__class__):
return False
return (self.path == other.path and self.target is other.target and
self.field is other.field and self.old_value == other.old_value and
self.new_value == other.new_value)
|
(self, other: Any) -> bool
|
40,793 |
pyglove.core.symbolic.base
|
__init__
|
Constructor.
Args:
path: KeyPath of the field that is updated.
target: Parent of updated field.
field: Specification of the updated field.
old_value: Old value of the field.
new_value: New value of the field.
|
def __init__(self,
path: object_utils.KeyPath,
target: 'Symbolic',
field: Optional[pg_typing.Field],
old_value: Any,
new_value: Any):
"""Constructor.
Args:
path: KeyPath of the field that is updated.
target: Parent of updated field.
field: Specification of the updated field.
old_value: Old value of the field.
new_value: New value of the field.
"""
self.path = path
self.target = target
self.field = field
self.old_value = old_value
self.new_value = new_value
|
(self, path: pyglove.core.object_utils.value_location.KeyPath, target: pyglove.core.symbolic.base.Symbolic, field: Optional[pyglove.core.typing.class_schema.Field], old_value: Any, new_value: Any)
|
40,798 |
pyglove.core.symbolic.base
|
format
|
Formats this object.
|
def format(self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
*,
python_format: bool = False,
hide_default_values: bool = False,
hide_missing_values: bool = False,
**kwargs) -> str:
"""Formats this object."""
kwargs.update({
'python_format': python_format,
'hide_default_values': hide_default_values,
'hide_missing_values': hide_missing_values,
})
details = object_utils.kvlist_str([
('parent_path', self.target.sym_path, None),
('path', self.path.path, None),
('old_value', object_utils.format(
self.old_value, compact, verbose, root_indent + 1, **kwargs),
object_utils.MISSING_VALUE),
('new_value',
object_utils.format(
self.new_value, compact, verbose, root_indent + 1, **kwargs),
object_utils.MISSING_VALUE),
])
return f'{self.__class__.__name__}({details})'
|
(self, compact: bool = False, verbose: bool = True, root_indent: int = 0, *, python_format: bool = False, hide_default_values: bool = False, hide_missing_values: bool = False, **kwargs) -> str
|
40,799 |
pyglove.core.object_utils.common_traits
|
Formattable
|
Interface for classes whose instances can be pretty-formatted.
This interface overrides the default ``__repr__`` and ``__str__`` method, thus
all ``Formattable`` objects can be printed nicely.
All symbolic types implement this interface.
|
class Formattable(metaclass=abc.ABCMeta):
"""Interface for classes whose instances can be pretty-formatted.
This interface overrides the default ``__repr__`` and ``__str__`` method, thus
all ``Formattable`` objects can be printed nicely.
All symbolic types implement this interface.
"""
# Additional format keyword arguments for `__str__`.
__str_format_kwargs__ = dict(compact=False, verbose=True)
# Additional format keyword arguments for `__repr__`.
__repr_format_kwargs__ = dict(compact=True)
@abc.abstractmethod
def format(self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
**kwargs) -> str:
"""Formats this object into a string representation.
Args:
compact: If True, this object will be formatted into a single line.
verbose: If True, this object will be formatted with verbosity.
Subclasses should define `verbosity` on their own.
root_indent: The start indent level for this object if the output is a
multi-line string.
**kwargs: Subclass specific keyword arguments.
Returns:
A string of formatted object.
"""
def __str__(self) -> str:
"""Returns the full (maybe multi-line) representation of this object."""
kwargs = dict(self.__str_format_kwargs__)
kwargs.update(thread_local.thread_local_kwargs(_TLS_STR_FORMAT_KWARGS))
return self._maybe_quote(self.format(**kwargs), **kwargs)
def __repr__(self) -> str:
"""Returns a single-line representation of this object."""
kwargs = dict(self.__repr_format_kwargs__)
kwargs.update(thread_local.thread_local_kwargs(_TLS_REPR_FORMAT_KWARGS))
return self._maybe_quote(self.format(**kwargs), **kwargs)
def _maybe_quote(
self,
s: str,
*,
compact: bool = False,
root_indent: int = 0,
markdown: bool = False,
**kwargs
) -> str:
"""Maybe quote the formatted string with markdown."""
del kwargs
if not markdown or root_indent > 0:
return s
if compact:
return f'`{s}`'
else:
return f'\n```\n{s}\n```\n'
|
()
|
40,803 |
pyglove.core.object_utils.common_traits
|
format
|
Formats this object into a string representation.
Args:
compact: If True, this object will be formatted into a single line.
verbose: If True, this object will be formatted with verbosity.
Subclasses should define `verbosity` on their own.
root_indent: The start indent level for this object if the output is a
multi-line string.
**kwargs: Subclass specific keyword arguments.
Returns:
A string of formatted object.
|
@abc.abstractmethod
def format(self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
**kwargs) -> str:
"""Formats this object into a string representation.
Args:
compact: If True, this object will be formatted into a single line.
verbose: If True, this object will be formatted with verbosity.
Subclasses should define `verbosity` on their own.
root_indent: The start indent level for this object if the output is a
multi-line string.
**kwargs: Subclass specific keyword arguments.
Returns:
A string of formatted object.
"""
|
(self, compact: bool = False, verbose: bool = True, root_indent: int = 0, **kwargs) -> str
|
40,804 |
pyglove.core.symbolic.functor
|
Functor
|
Symbolic functions (Functors).
A symbolic function is a symbolic class with a ``__call__`` method, whose
arguments can be bound partially, incrementally bound by attribute
assignment, or provided at call time.
Another useful trait is that a symbolic function is serializable, when
its definition is imported by the target program and its arguments are also
serializable. Therefore, it is very handy to move a symbolic function
around in distributed scenarios.
Symbolic functions can be created from regular function via
:func:`pyglove.functor`::
# Create a functor class using @pg.functor decorator.
@pg.functor([
('a', pg.typing.Int(), 'Argument a'),
# No field specification for 'b', which will be treated as any type.
])
def sum(a, b=1, *args, **kwargs):
return a + b + sum(args + kwargs.values())
sum(1)() # returns 2: prebind a=1, invoke with b=1 (default)
sum(a=1)() # returns 2: same as above.
sum()(1) # returns 2: bind a=1 at call time, b=1(default)
sum(b=2)(1) # returns 3: prebind b=2, invoke with a=1.
sum(b=2)() # wrong: `a` is not provided.
sum(1)(2) # wrong: 'a' is provided multiple times.
sum(1)(2, override_args=True) # ok: override `a` value with 2.
sum()(1, 2, 3, 4) # returns 10: a=1, b=2, *args=[3, 4]
sum(c=4)(1, 2, 3) # returns 10: a=1, b=2, *args=[3], **kwargs={'c': 4}
Or created by subclassing ``pg.Functor``::
class Sum(pg.Functor):
a: int
b: int = 1
def _call(self) -> int:
return self.a + self.b
Usage on subclassed functors is the same as functors created from functions.
|
class Functor(pg_object.Object, object_utils.Functor):
"""Symbolic functions (Functors).
A symbolic function is a symbolic class with a ``__call__`` method, whose
arguments can be bound partially, incrementally bound by attribute
assignment, or provided at call time.
Another useful trait is that a symbolic function is serializable, when
its definition is imported by the target program and its arguments are also
serializable. Therefore, it is very handy to move a symbolic function
around in distributed scenarios.
Symbolic functions can be created from regular function via
:func:`pyglove.functor`::
# Create a functor class using @pg.functor decorator.
@pg.functor([
('a', pg.typing.Int(), 'Argument a'),
# No field specification for 'b', which will be treated as any type.
])
def sum(a, b=1, *args, **kwargs):
return a + b + sum(args + kwargs.values())
sum(1)() # returns 2: prebind a=1, invoke with b=1 (default)
sum(a=1)() # returns 2: same as above.
sum()(1) # returns 2: bind a=1 at call time, b=1(default)
sum(b=2)(1) # returns 3: prebind b=2, invoke with a=1.
sum(b=2)() # wrong: `a` is not provided.
sum(1)(2) # wrong: 'a' is provided multiple times.
sum(1)(2, override_args=True) # ok: override `a` value with 2.
sum()(1, 2, 3, 4) # returns 10: a=1, b=2, *args=[3, 4]
sum(c=4)(1, 2, 3) # returns 10: a=1, b=2, *args=[3], **kwargs={'c': 4}
Or created by subclassing ``pg.Functor``::
class Sum(pg.Functor):
a: int
b: int = 1
def _call(self) -> int:
return self.a + self.b
Usage on subclassed functors is the same as functors created from functions.
"""
# Allow assignment on symbolic attributes.
allow_symbolic_assignment = True
# Key for storing override members during call.
_TLS_OVERRIDE_MEMBERS_KEY = '__override_members__'
#
# Customizable class traits.
#
@classmethod
@property
def is_subclassed_functor(cls) -> bool:
"""Returns True if this class is a subclassed Functor."""
return cls.auto_schema
@classmethod
def _update_signatures_based_on_schema(cls):
# Update the return value of subclassed functors.
if cls.is_subclassed_functor: # pylint: disable=using-constant-test
private_call_signature = pg_typing.Signature.from_callable(
cls._call, auto_typing=True
)
if (
len(private_call_signature.args) > 1
or private_call_signature.kwonlyargs
):
raise TypeError(
'`_call` of a subclassed Functor should take no argument. '
f'Encountered: {cls._call}.'
)
cls.__schema__.metadata['returns'] = private_call_signature.return_value
# Update __init_ signature.
init_signature = pg_typing.Signature.from_schema(
cls.__schema__,
name='__init__',
module_name=cls.__module__,
qualname=cls.__qualname__,
)
pseudo_init = init_signature.make_function(['pass'])
# Save the original `Functor.__init__` before overriding it.
if not hasattr(cls, '__orig_init__'):
setattr(cls, '__orig_init__', cls.__init__)
@object_utils.explicit_method_override
@functools.wraps(pseudo_init)
def _init(self, *args, **kwargs):
self.__class__.__orig_init__(self, *args, **kwargs)
setattr(cls, '__init__', _init)
# Update __call__ signature.
call_signature = pg_typing.Signature.from_schema(
cls.__schema__,
name='__call__',
module_name=cls.__module__,
qualname=cls.__qualname__,
is_method=False,
)
setattr(cls, '__signature__', call_signature)
def __new__(cls, *args, **kwargs):
instance = object.__new__(cls)
if flags.should_call_functors_during_init():
instance.__init__(*args, **kwargs)
return instance()
return instance
@object_utils.explicit_method_override
def __init__(
self,
*args,
root_path: Optional[object_utils.KeyPath] = None,
override_args: bool = False,
ignore_extra_args: bool = False,
**kwargs):
"""Constructor.
Args:
*args: prebound positional arguments.
root_path: The symbolic path for current object.
override_args: If True, allows arguments provided during `__call__` to
override existing bound arguments.
ignore_extra_args: If True, unsupported arguments can be passed in
during `__call__` without using them. Otherwise, calling with
unsupported arguments will raise error.
**kwargs: prebound keyword arguments.
Raises:
KeyError: constructor got unexpected arguments.
"""
# NOTE(daiyip): Since Functor is usually late bound (until call time),
# we pass `allow_partial=True` during functor construction.
_ = kwargs.pop('allow_partial', None)
varargs = None
signature = self.__signature__
if len(args) > len(signature.args):
if signature.varargs:
varargs = list(args[len(signature.args) :])
args = args[: len(signature.args)]
else:
arg_phrase = object_utils.auto_plural(len(signature.args), 'argument')
was_phrase = object_utils.auto_plural(len(args), 'was', 'were')
raise TypeError(
f'{signature.id}() takes {len(signature.args)} '
f'positional {arg_phrase} but {len(args)} {was_phrase} given.'
)
bound_kwargs = dict()
for i, v in enumerate(args):
if pg_typing.MISSING_VALUE != v:
bound_kwargs[signature.args[i].name] = v
if varargs is not None:
bound_kwargs[signature.varargs.name] = varargs
for k, v in kwargs.items():
if pg_typing.MISSING_VALUE != v:
if k in bound_kwargs:
raise TypeError(
f'{signature.id}() got multiple values for keyword '
f'argument {k!r}.'
)
bound_kwargs[k] = v
default_args = set()
non_default_args = set(bound_kwargs)
for arg_spec in signature.named_args:
if not arg_spec.value_spec.has_default:
continue
arg_name = arg_spec.name
if arg_name not in non_default_args:
default_args.add(arg_name)
elif bound_kwargs[arg_name] == arg_spec.value_spec.default:
default_args.add(arg_name)
non_default_args.discard(arg_name)
if signature.varargs and not varargs:
default_args.add(signature.varargs.name)
super().__init__(allow_partial=True,
root_path=root_path,
**bound_kwargs)
self._non_default_args = non_default_args
self._default_args = default_args
self._specified_args = set(bound_kwargs)
self._override_args = override_args
self._ignore_extra_args = ignore_extra_args
# For subclassed Functor, we use thread-local storage for storing temporary
# member overrides from the arguments during functor call.
self._tls = threading.local() if self.is_subclassed_functor else None
def _sym_inferred(self, key: str, **kwargs: Any) -> Any:
"""Overrides method to allow member overrides during call."""
if self._tls is not None:
overrides = getattr(self._tls, Functor._TLS_OVERRIDE_MEMBERS_KEY, {})
v = overrides.get(key, pg_typing.MISSING_VALUE)
if pg_typing.MISSING_VALUE != v:
return overrides[key]
return super()._sym_inferred(key, **kwargs)
def _sym_clone(self, deep: bool, memo: Any = None) -> 'Functor':
"""Override to copy bound args."""
other = super()._sym_clone(deep, memo)
# pylint: disable=protected-access
other._non_default_args = set(self._non_default_args)
other._default_args = self._default_args
other._specified_args = self._specified_args
other._override_args = self._override_args
other._ignore_extra_args = self._ignore_extra_args
# pylint: enable=protected-access
return typing.cast(Functor, other)
def _on_change(
self, field_updates: Dict[object_utils.KeyPath, base.FieldUpdate]):
"""Custom handling field change to update bound args."""
for relative_path, update in field_updates.items():
assert relative_path
if len(relative_path) != 1:
continue
arg_name = str(relative_path)
if update.field.default_value == update.new_value:
if update.field.value.has_default:
self._default_args.add(arg_name)
self._non_default_args.discard(arg_name)
else:
self._default_args.discard(arg_name)
self._non_default_args.add(arg_name)
if update.new_value == pg_typing.MISSING_VALUE:
self._specified_args.discard(arg_name)
else:
self._specified_args.add(arg_name)
def __delattr__(self, name: str) -> None:
"""Discard a previously bound argument and reset to its default value."""
del self._sym_attributes[name]
if self.__signature__.get_value_spec(name).has_default:
self._default_args.add(name)
self._specified_args.discard(name)
self._non_default_args.discard(name)
def _sym_missing(self) -> Dict[str, Any]:
"""Returns missing values for Functor.
Semantically unbound arguments are not missing, thus we only return partial
bound arguments in `sym_missing`. As a result, a functor is partial only
when any of its bound arguments is partial.
Returns:
A dict of missing key (or path) to missing value.
"""
missing = dict()
for k, v in self._sym_attributes.items():
if pg_typing.MISSING_VALUE != v and isinstance(v, base.Symbolic):
missing_child = v.sym_missing(flatten=False)
if missing_child:
missing[k] = missing_child
return missing
@property
def specified_args(self) -> Set[str]:
"""Returns user specified argument names."""
return self._specified_args
@property
def non_default_args(self) -> Set[str]:
"""Returns the names of bound arguments whose values are not the default."""
return self._non_default_args
@property
def default_args(self) -> Set[str]:
"""Returns the names of bound argument whose values are the default."""
return self._default_args
@property
def bound_args(self) -> Set[str]:
"""Returns bound argument names."""
return self._non_default_args | self._default_args
@property
def unbound_args(self) -> Set[str]:
"""Returns unbound argument names."""
return set([name for name in self._sym_attributes.keys()
if name not in self.bound_args])
@property
def is_fully_bound(self) -> bool:
"""Returns if all arguments of functor is bound."""
return (len(self._non_default_args) + len(self._default_args)
== len(self._sym_attributes))
def _call(self, *args, **kwargs) -> Callable: # pylint: disable=g-bare-generic
"""Actual function logic. Subclasses should implement this method."""
raise NotImplementedError()
# TODO(b/183649930): We pretend that _call is not abstract to avoid
# [not-instantiable] errors from pytype.
if not typing.TYPE_CHECKING:
_call = abc.abstractmethod(_call)
def __call__(self, *args, **kwargs) -> Any:
"""Call with late bound arguments.
Args:
*args: list arguments.
**kwargs: keyword arguments.
Returns:
Any.
Raises:
TypeError: got multiple values for arguments or extra argument name.
"""
args, kwargs = self._parse_call_time_overrides(*args, **kwargs)
signature = self.__signature__
if self.is_subclassed_functor:
for arg_spec, arg_value in zip(signature.args, args):
kwargs[arg_spec.name] = arg_value
# Temporarily override members with argument values from the call.
with self._apply_call_time_overrides_to_members(**kwargs):
return_value = self._call()
else:
return_value = self._call(*args, **kwargs)
# Return value check.
if (
signature.return_value
and flags.is_type_check_enabled()
and return_value != pg_typing.MISSING_VALUE
):
return_value = signature.return_value.apply(
return_value, root_path=self.sym_path + 'returns'
)
if flags.is_tracking_origin() and isinstance(return_value, base.Symbolic):
return_value.sym_setorigin(self, 'return')
return return_value
@contextlib.contextmanager
def _apply_call_time_overrides_to_members(self, **kwargs):
"""Overrides member values within the scope."""
assert self._tls is not None
setattr(self._tls, Functor._TLS_OVERRIDE_MEMBERS_KEY, kwargs)
try:
yield
finally:
delattr(self._tls, Functor._TLS_OVERRIDE_MEMBERS_KEY)
def _parse_call_time_overrides(
self, *args, **kwargs
) -> Tuple[List[Any], Dict[str, Any]]:
"""Parses positional and keyword arguments from call-time overrides."""
override_args = kwargs.pop('override_args', self._override_args)
ignore_extra_args = kwargs.pop('ignore_extra_args', self._ignore_extra_args)
signature = self.__signature__
if len(args) > len(signature.args) and not signature.has_varargs:
if ignore_extra_args:
args = args[: len(signature.args)]
else:
arg_phrase = object_utils.auto_plural(len(signature.args), 'argument')
was_phrase = object_utils.auto_plural(len(args), 'was', 'were')
raise TypeError(
f'{signature.id}() takes {len(signature.args)} '
f'positional {arg_phrase} but {len(args)} {was_phrase} given.'
)
keyword_args = {
k: v for k, v in self._sym_attributes.items()
if k in self._specified_args
}
assert len(keyword_args) == len(self._specified_args)
# Work out varargs when positional arguments are provided.
varargs = None
if signature.has_varargs:
varargs = list(args[len(signature.args) :])
if flags.is_type_check_enabled():
varargs = [
signature.varargs.value_spec.apply(
v, root_path=self.sym_path + signature.varargs.name
)
for v in varargs
]
args = args[: len(signature.args)]
# Convert positional arguments to keyword arguments so we can map them back
# later.
for i in range(len(args)):
arg_spec = signature.args[i]
arg_name = arg_spec.name
if arg_name in self._specified_args:
if not override_args:
raise TypeError(
f'{signature.id}() got new value for argument {arg_name!r} '
f"from position {i}, but 'override_args' is set to False. "
f'Old value: {keyword_args[arg_name]!r}, new value: {args[i]!r}.'
)
arg_value = args[i]
if flags.is_type_check_enabled():
arg_value = arg_spec.value_spec.apply(
arg_value, root_path=self.sym_path + arg_name)
keyword_args[arg_name] = arg_value
for arg_name, arg_value in kwargs.items():
if arg_name in self._specified_args:
if not override_args:
raise TypeError(
f'{signature.id}() got new value for argument {arg_name!r} '
"from keyword argument, while 'override_args' is set to "
f'False. Old value: {keyword_args[arg_name]!r}, '
f'new value: {arg_value!r}.'
)
arg_spec = signature.get_value_spec(arg_name)
if arg_spec and flags.is_type_check_enabled():
arg_value = arg_spec.apply(
arg_value, root_path=self.sym_path + arg_name)
keyword_args[arg_name] = arg_value
elif not ignore_extra_args:
raise TypeError(
f'{signature.id}() got an unexpected keyword argument {arg_name!r}.'
)
# Use positional arguments if possible. This allows us to handle varargs
# with simplicity.
list_args = []
missing_required_arg_names = []
for arg in signature.args:
if arg.name in keyword_args:
list_args.append(keyword_args[arg.name])
del keyword_args[arg.name]
elif arg.value_spec.default != pg_typing.MISSING_VALUE:
list_args.append(arg.value_spec.default)
else:
missing_required_arg_names.append(arg.name)
if missing_required_arg_names:
arg_phrase = object_utils.auto_plural(
len(missing_required_arg_names), 'argument')
args_str = object_utils.comma_delimited_str(missing_required_arg_names)
raise TypeError(
f'{signature.id}() missing {len(missing_required_arg_names)} '
f'required positional {arg_phrase}: {args_str}.'
)
if signature.has_varargs:
prebound_varargs = keyword_args.pop(signature.varargs.name, None)
varargs = varargs or prebound_varargs
if varargs:
list_args.extend(varargs)
return list_args, keyword_args
| null |
40,806 |
pyglove.core.symbolic.functor
|
__call__
|
Call with late bound arguments.
Args:
*args: list arguments.
**kwargs: keyword arguments.
Returns:
Any.
Raises:
TypeError: got multiple values for arguments or extra argument name.
|
def __call__(self, *args, **kwargs) -> Any:
"""Call with late bound arguments.
Args:
*args: list arguments.
**kwargs: keyword arguments.
Returns:
Any.
Raises:
TypeError: got multiple values for arguments or extra argument name.
"""
args, kwargs = self._parse_call_time_overrides(*args, **kwargs)
signature = self.__signature__
if self.is_subclassed_functor:
for arg_spec, arg_value in zip(signature.args, args):
kwargs[arg_spec.name] = arg_value
# Temporarily override members with argument values from the call.
with self._apply_call_time_overrides_to_members(**kwargs):
return_value = self._call()
else:
return_value = self._call(*args, **kwargs)
# Return value check.
if (
signature.return_value
and flags.is_type_check_enabled()
and return_value != pg_typing.MISSING_VALUE
):
return_value = signature.return_value.apply(
return_value, root_path=self.sym_path + 'returns'
)
if flags.is_tracking_origin() and isinstance(return_value, base.Symbolic):
return_value.sym_setorigin(self, 'return')
return return_value
|
(self, *args, **kwargs) -> Any
|
40,809 |
pyglove.core.symbolic.functor
|
__delattr__
|
Discard a previously bound argument and reset to its default value.
|
def __delattr__(self, name: str) -> None:
"""Discard a previously bound argument and reset to its default value."""
del self._sym_attributes[name]
if self.__signature__.get_value_spec(name).has_default:
self._default_args.add(name)
self._specified_args.discard(name)
self._non_default_args.discard(name)
|
(self, name: str) -> NoneType
|
40,814 |
pyglove.core.symbolic.functor
|
__init__
| null |
@classmethod
def _update_signatures_based_on_schema(cls):
# Update the return value of subclassed functors.
if cls.is_subclassed_functor: # pylint: disable=using-constant-test
private_call_signature = pg_typing.Signature.from_callable(
cls._call, auto_typing=True
)
if (
len(private_call_signature.args) > 1
or private_call_signature.kwonlyargs
):
raise TypeError(
'`_call` of a subclassed Functor should take no argument. '
f'Encountered: {cls._call}.'
)
cls.__schema__.metadata['returns'] = private_call_signature.return_value
# Update __init_ signature.
init_signature = pg_typing.Signature.from_schema(
cls.__schema__,
name='__init__',
module_name=cls.__module__,
qualname=cls.__qualname__,
)
pseudo_init = init_signature.make_function(['pass'])
# Save the original `Functor.__init__` before overriding it.
if not hasattr(cls, '__orig_init__'):
setattr(cls, '__orig_init__', cls.__init__)
@object_utils.explicit_method_override
@functools.wraps(pseudo_init)
def _init(self, *args, **kwargs):
self.__class__.__orig_init__(self, *args, **kwargs)
setattr(cls, '__init__', _init)
# Update __call__ signature.
call_signature = pg_typing.Signature.from_schema(
cls.__schema__,
name='__call__',
module_name=cls.__module__,
qualname=cls.__qualname__,
is_method=False,
)
setattr(cls, '__signature__', call_signature)
|
(self) -> Callable[[], NoneType]
|
40,816 |
pyglove.core.symbolic.functor
|
__new__
| null |
def __new__(cls, *args, **kwargs):
instance = object.__new__(cls)
if flags.should_call_functors_during_init():
instance.__init__(*args, **kwargs)
return instance()
return instance
|
(cls, *args, **kwargs)
|
40,817 |
pyglove.core.symbolic.functor
|
__init__
|
Constructor.
Args:
*args: prebound positional arguments.
root_path: The symbolic path for current object.
override_args: If True, allows arguments provided during `__call__` to
override existing bound arguments.
ignore_extra_args: If True, unsupported arguments can be passed in
during `__call__` without using them. Otherwise, calling with
unsupported arguments will raise error.
**kwargs: prebound keyword arguments.
Raises:
KeyError: constructor got unexpected arguments.
|
@object_utils.explicit_method_override
def __init__(
self,
*args,
root_path: Optional[object_utils.KeyPath] = None,
override_args: bool = False,
ignore_extra_args: bool = False,
**kwargs):
"""Constructor.
Args:
*args: prebound positional arguments.
root_path: The symbolic path for current object.
override_args: If True, allows arguments provided during `__call__` to
override existing bound arguments.
ignore_extra_args: If True, unsupported arguments can be passed in
during `__call__` without using them. Otherwise, calling with
unsupported arguments will raise error.
**kwargs: prebound keyword arguments.
Raises:
KeyError: constructor got unexpected arguments.
"""
# NOTE(daiyip): Since Functor is usually late bound (until call time),
# we pass `allow_partial=True` during functor construction.
_ = kwargs.pop('allow_partial', None)
varargs = None
signature = self.__signature__
if len(args) > len(signature.args):
if signature.varargs:
varargs = list(args[len(signature.args) :])
args = args[: len(signature.args)]
else:
arg_phrase = object_utils.auto_plural(len(signature.args), 'argument')
was_phrase = object_utils.auto_plural(len(args), 'was', 'were')
raise TypeError(
f'{signature.id}() takes {len(signature.args)} '
f'positional {arg_phrase} but {len(args)} {was_phrase} given.'
)
bound_kwargs = dict()
for i, v in enumerate(args):
if pg_typing.MISSING_VALUE != v:
bound_kwargs[signature.args[i].name] = v
if varargs is not None:
bound_kwargs[signature.varargs.name] = varargs
for k, v in kwargs.items():
if pg_typing.MISSING_VALUE != v:
if k in bound_kwargs:
raise TypeError(
f'{signature.id}() got multiple values for keyword '
f'argument {k!r}.'
)
bound_kwargs[k] = v
default_args = set()
non_default_args = set(bound_kwargs)
for arg_spec in signature.named_args:
if not arg_spec.value_spec.has_default:
continue
arg_name = arg_spec.name
if arg_name not in non_default_args:
default_args.add(arg_name)
elif bound_kwargs[arg_name] == arg_spec.value_spec.default:
default_args.add(arg_name)
non_default_args.discard(arg_name)
if signature.varargs and not varargs:
default_args.add(signature.varargs.name)
super().__init__(allow_partial=True,
root_path=root_path,
**bound_kwargs)
self._non_default_args = non_default_args
self._default_args = default_args
self._specified_args = set(bound_kwargs)
self._override_args = override_args
self._ignore_extra_args = ignore_extra_args
# For subclassed Functor, we use thread-local storage for storing temporary
# member overrides from the arguments during functor call.
self._tls = threading.local() if self.is_subclassed_functor else None
|
(self, *args, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None, override_args: bool = False, ignore_extra_args: bool = False, **kwargs)
|
40,822 |
pyglove.core.symbolic.functor
|
_apply_call_time_overrides_to_members
|
Overrides member values within the scope.
|
def _on_change(
self, field_updates: Dict[object_utils.KeyPath, base.FieldUpdate]):
"""Custom handling field change to update bound args."""
for relative_path, update in field_updates.items():
assert relative_path
if len(relative_path) != 1:
continue
arg_name = str(relative_path)
if update.field.default_value == update.new_value:
if update.field.value.has_default:
self._default_args.add(arg_name)
self._non_default_args.discard(arg_name)
else:
self._default_args.discard(arg_name)
self._non_default_args.add(arg_name)
if update.new_value == pg_typing.MISSING_VALUE:
self._specified_args.discard(arg_name)
else:
self._specified_args.add(arg_name)
|
(self, **kwargs)
|
40,823 |
pyglove.core.symbolic.functor
|
_call
|
Actual function logic. Subclasses should implement this method.
|
def _call(self, *args, **kwargs) -> Callable: # pylint: disable=g-bare-generic
"""Actual function logic. Subclasses should implement this method."""
raise NotImplementedError()
|
(self, *args, **kwargs) -> Callable
|
40,829 |
pyglove.core.symbolic.functor
|
_on_change
|
Custom handling field change to update bound args.
|
def _on_change(
self, field_updates: Dict[object_utils.KeyPath, base.FieldUpdate]):
"""Custom handling field change to update bound args."""
for relative_path, update in field_updates.items():
assert relative_path
if len(relative_path) != 1:
continue
arg_name = str(relative_path)
if update.field.default_value == update.new_value:
if update.field.value.has_default:
self._default_args.add(arg_name)
self._non_default_args.discard(arg_name)
else:
self._default_args.discard(arg_name)
self._non_default_args.add(arg_name)
if update.new_value == pg_typing.MISSING_VALUE:
self._specified_args.discard(arg_name)
else:
self._specified_args.add(arg_name)
|
(self, field_updates: Dict[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.symbolic.base.FieldUpdate])
|
40,833 |
pyglove.core.symbolic.functor
|
_parse_call_time_overrides
|
Parses positional and keyword arguments from call-time overrides.
|
def _parse_call_time_overrides(
self, *args, **kwargs
) -> Tuple[List[Any], Dict[str, Any]]:
"""Parses positional and keyword arguments from call-time overrides."""
override_args = kwargs.pop('override_args', self._override_args)
ignore_extra_args = kwargs.pop('ignore_extra_args', self._ignore_extra_args)
signature = self.__signature__
if len(args) > len(signature.args) and not signature.has_varargs:
if ignore_extra_args:
args = args[: len(signature.args)]
else:
arg_phrase = object_utils.auto_plural(len(signature.args), 'argument')
was_phrase = object_utils.auto_plural(len(args), 'was', 'were')
raise TypeError(
f'{signature.id}() takes {len(signature.args)} '
f'positional {arg_phrase} but {len(args)} {was_phrase} given.'
)
keyword_args = {
k: v for k, v in self._sym_attributes.items()
if k in self._specified_args
}
assert len(keyword_args) == len(self._specified_args)
# Work out varargs when positional arguments are provided.
varargs = None
if signature.has_varargs:
varargs = list(args[len(signature.args) :])
if flags.is_type_check_enabled():
varargs = [
signature.varargs.value_spec.apply(
v, root_path=self.sym_path + signature.varargs.name
)
for v in varargs
]
args = args[: len(signature.args)]
# Convert positional arguments to keyword arguments so we can map them back
# later.
for i in range(len(args)):
arg_spec = signature.args[i]
arg_name = arg_spec.name
if arg_name in self._specified_args:
if not override_args:
raise TypeError(
f'{signature.id}() got new value for argument {arg_name!r} '
f"from position {i}, but 'override_args' is set to False. "
f'Old value: {keyword_args[arg_name]!r}, new value: {args[i]!r}.'
)
arg_value = args[i]
if flags.is_type_check_enabled():
arg_value = arg_spec.value_spec.apply(
arg_value, root_path=self.sym_path + arg_name)
keyword_args[arg_name] = arg_value
for arg_name, arg_value in kwargs.items():
if arg_name in self._specified_args:
if not override_args:
raise TypeError(
f'{signature.id}() got new value for argument {arg_name!r} '
"from keyword argument, while 'override_args' is set to "
f'False. Old value: {keyword_args[arg_name]!r}, '
f'new value: {arg_value!r}.'
)
arg_spec = signature.get_value_spec(arg_name)
if arg_spec and flags.is_type_check_enabled():
arg_value = arg_spec.apply(
arg_value, root_path=self.sym_path + arg_name)
keyword_args[arg_name] = arg_value
elif not ignore_extra_args:
raise TypeError(
f'{signature.id}() got an unexpected keyword argument {arg_name!r}.'
)
# Use positional arguments if possible. This allows us to handle varargs
# with simplicity.
list_args = []
missing_required_arg_names = []
for arg in signature.args:
if arg.name in keyword_args:
list_args.append(keyword_args[arg.name])
del keyword_args[arg.name]
elif arg.value_spec.default != pg_typing.MISSING_VALUE:
list_args.append(arg.value_spec.default)
else:
missing_required_arg_names.append(arg.name)
if missing_required_arg_names:
arg_phrase = object_utils.auto_plural(
len(missing_required_arg_names), 'argument')
args_str = object_utils.comma_delimited_str(missing_required_arg_names)
raise TypeError(
f'{signature.id}() missing {len(missing_required_arg_names)} '
f'required positional {arg_phrase}: {args_str}.'
)
if signature.has_varargs:
prebound_varargs = keyword_args.pop(signature.varargs.name, None)
varargs = varargs or prebound_varargs
if varargs:
list_args.extend(varargs)
return list_args, keyword_args
|
(self, *args, **kwargs) -> Tuple[List[Any], Dict[str, Any]]
|
40,838 |
pyglove.core.symbolic.functor
|
_sym_clone
|
Override to copy bound args.
|
def _sym_clone(self, deep: bool, memo: Any = None) -> 'Functor':
"""Override to copy bound args."""
other = super()._sym_clone(deep, memo)
# pylint: disable=protected-access
other._non_default_args = set(self._non_default_args)
other._default_args = self._default_args
other._specified_args = self._specified_args
other._override_args = self._override_args
other._ignore_extra_args = self._ignore_extra_args
# pylint: enable=protected-access
return typing.cast(Functor, other)
|
(self, deep: bool, memo: Optional[Any] = None) -> pyglove.core.symbolic.functor.Functor
|
40,840 |
pyglove.core.symbolic.functor
|
_sym_inferred
|
Overrides method to allow member overrides during call.
|
def _sym_inferred(self, key: str, **kwargs: Any) -> Any:
"""Overrides method to allow member overrides during call."""
if self._tls is not None:
overrides = getattr(self._tls, Functor._TLS_OVERRIDE_MEMBERS_KEY, {})
v = overrides.get(key, pg_typing.MISSING_VALUE)
if pg_typing.MISSING_VALUE != v:
return overrides[key]
return super()._sym_inferred(key, **kwargs)
|
(self, key: str, **kwargs: Any) -> Any
|
40,841 |
pyglove.core.symbolic.functor
|
_sym_missing
|
Returns missing values for Functor.
Semantically unbound arguments are not missing, thus we only return partial
bound arguments in `sym_missing`. As a result, a functor is partial only
when any of its bound arguments is partial.
Returns:
A dict of missing key (or path) to missing value.
|
def _sym_missing(self) -> Dict[str, Any]:
"""Returns missing values for Functor.
Semantically unbound arguments are not missing, thus we only return partial
bound arguments in `sym_missing`. As a result, a functor is partial only
when any of its bound arguments is partial.
Returns:
A dict of missing key (or path) to missing value.
"""
missing = dict()
for k, v in self._sym_attributes.items():
if pg_typing.MISSING_VALUE != v and isinstance(v, base.Symbolic):
missing_child = v.sym_missing(flatten=False)
if missing_child:
missing[k] = missing_child
return missing
|
(self) -> Dict[str, Any]
|
40,884 |
pyglove.core.symbolic.base
|
Inferential
|
Interface for values that could be dynamically inferred upon read.
Inferential values are objects whose values are not determined directly but
are instead derived from other sources, such as references (:class:`pg.Ref`)
to other objects or computed based on their context
(:class:`pg.symbolic.ValueFromParentChain`) such as the symbolic tree they
reside in.
When inferential values are utilized as symbolic attributes, we can obtain
their original definition by invoking :meth:`pg.Symbolic.sym_getattr`, and
their inferred values can be retrieved by calling
:meth:`pg.Symbolic.sym_inferred`. The values retrieved from :class:`pg.Dict`,
:class:`pg.List` and :class:`pg.Object` through `__getitem__` or
`__getattribute__` are all inferred values.
|
class Inferential(TopologyAware, pg_typing.CustomTyping):
"""Interface for values that could be dynamically inferred upon read.
Inferential values are objects whose values are not determined directly but
are instead derived from other sources, such as references (:class:`pg.Ref`)
to other objects or computed based on their context
(:class:`pg.symbolic.ValueFromParentChain`) such as the symbolic tree they
reside in.
When inferential values are utilized as symbolic attributes, we can obtain
their original definition by invoking :meth:`pg.Symbolic.sym_getattr`, and
their inferred values can be retrieved by calling
:meth:`pg.Symbolic.sym_inferred`. The values retrieved from :class:`pg.Dict`,
:class:`pg.List` and :class:`pg.Object` through `__getitem__` or
`__getattribute__` are all inferred values.
"""
@abc.abstractmethod
def infer(self, **kwargs) -> Any:
"""Returns the inferred value.
Args:
**kwargs: Optional keyword arguments for inference, which are usually
inferential subclass specific.
Returns:
Inferred value.
Raises:
AttributeError: If the value cannot be inferred.
"""
|
()
|
40,886 |
pyglove.core.symbolic.base
|
infer
|
Returns the inferred value.
Args:
**kwargs: Optional keyword arguments for inference, which are usually
inferential subclass specific.
Returns:
Inferred value.
Raises:
AttributeError: If the value cannot be inferred.
|
@abc.abstractmethod
def infer(self, **kwargs) -> Any:
"""Returns the inferred value.
Args:
**kwargs: Optional keyword arguments for inference, which are usually
inferential subclass specific.
Returns:
Inferred value.
Raises:
AttributeError: If the value cannot be inferred.
"""
|
(self, **kwargs) -> Any
|
40,887 |
pyglove.core.symbolic.base
|
sym_setparent
|
Sets the parent of this object.
|
@abc.abstractmethod
def sym_setparent(self, parent: Optional['TopologyAware']) -> None:
"""Sets the parent of this object."""
|
(self, parent: Optional[pyglove.core.symbolic.base.TopologyAware]) -> NoneType
|
40,888 |
pyglove.core.symbolic.base
|
sym_setpath
|
Sets the path of this object under its topology.
|
@abc.abstractmethod
def sym_setpath(self, path: object_utils.KeyPath) -> None:
"""Sets the path of this object under its topology."""
|
(self, path: pyglove.core.object_utils.value_location.KeyPath) -> NoneType
|
40,889 |
pyglove.core.symbolic.inferred
|
InferredValue
|
Base class for inferred values.
|
class InferredValue(Object, base.Inferential):
"""Base class for inferred values."""
def custom_apply(self, *args, **kwargs: Any) -> Tuple[bool, Any]:
# This is to make a ``InferredValue`` object assignable
# to any symbolic attribute.
return (False, self)
|
()
|
40,925 |
pyglove.core.symbolic.inferred
|
custom_apply
| null |
def custom_apply(self, *args, **kwargs: Any) -> Tuple[bool, Any]:
# This is to make a ``InferredValue`` object assignable
# to any symbolic attribute.
return (False, self)
|
(self, *args, **kwargs: Any) -> Tuple[bool, Any]
|
40,964 |
pyglove.core.symbolic.list
|
Insertion
|
Class that marks a value to insert into a list.
Example::
l = pg.List([0, 1])
l.rebind({
0: pg.Insertion(2)
})
assert l == [2, 0, 1]
|
class Insertion:
"""Class that marks a value to insert into a list.
Example::
l = pg.List([0, 1])
l.rebind({
0: pg.Insertion(2)
})
assert l == [2, 0, 1]
"""
value: Any
|
(value: Any) -> None
|
40,965 |
pyglove.core.symbolic.list
|
__eq__
| null |
# Copyright 2022 The PyGlove Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Symbolic list."""
import dataclasses
import math
import numbers
import typing
from typing import Any, Callable, Dict, Iterable, Iterator, Optional, Tuple, Union
from pyglove.core import object_utils
from pyglove.core import typing as pg_typing
from pyglove.core.symbolic import base
from pyglove.core.symbolic import flags
class List(list, base.Symbolic, pg_typing.CustomTyping):
"""Symbolic list.
``pg.List`` implements a list type whose instances are symbolically
programmable, which is a subclass of the built-in Python ``list``,
and the subclass of ``pg.Symbolic``.
``pg.List`` can be used as a regular list::
# Construct a symbolic list from an iterable object.
l = pg.List(range(10))
It also supports symbolic validation through the ``value_spec`` argument::
l = pg.List([1, 2, 3], value_spec=pg.typing.List(
pg.typing.Int(min_value=1),
max_size=10
))
# Raises: 0 is not in acceptable range.
l.append(0)
And can be symbolically manipulated::
l = pg.List([{'foo': 1}])
l.rebind({
'[0].foo': 2
})
pg.query(l, where=lambda x: isinstance(x, int))
The user call also subscribe changes to its sub-nodes::
def on_change(updates):
print(updates)
l = pg.List([{'foo': 1}], onchange_callaback=on_change)
# `on_change` will be triggered on item insertion.
l.append({'bar': 2})
# `on_change` will be triggered on item removal.
l.pop(0)
# `on_change` will also be triggered on subtree change.
l.rebind({'[0].bar': 3})
"""
@classmethod
def partial(cls,
items: Optional[Iterable[Any]] = None,
*,
value_spec: Optional[pg_typing.List] = None,
onchange_callback: Optional[Callable[
[Dict[object_utils.KeyPath, base.FieldUpdate]], None]] = None,
**kwargs) -> 'List':
"""Class method that creates a partial List object."""
return cls(items,
value_spec=value_spec,
onchange_callback=onchange_callback,
allow_partial=True,
**kwargs)
@classmethod
def from_json(cls,
json_value: Any,
*,
value_spec: Optional[pg_typing.List] = None,
allow_partial: bool = False,
root_path: Optional[object_utils.KeyPath] = None,
**kwargs) -> 'List':
"""Class method that load an symbolic List from a JSON value.
Example::
l = List.from_json([{
'_type': '__main__.Foo',
'f1': 1,
'f2': {
'f21': True
}
},
1
])
assert l.value_spec is None
# Okay:
l.append('abc')
# [0].f2 is bound by class Foo's field 'f2' definition
# (assuming it defines a schema for the Dict field).
assert l[0].f2.value_spec is not None
# Not okay:
l[0].f2.abc = 1
Args:
json_value: Input JSON value, only JSON list is acceptable.
value_spec: An optional `pg.typing.List` object as the schema for the
list.
allow_partial: Whether to allow elements of the list to be partial.
root_path: KeyPath of loaded object in its object tree.
**kwargs: Allow passing through keyword arguments that are not applicable.
Returns:
A schema-less symbolic list, but its items maybe symbolic.
"""
return cls(json_value,
value_spec=value_spec,
allow_partial=allow_partial,
root_path=root_path)
def __init__(
self,
items: Optional[Iterable[Any]] = None,
*,
value_spec: Optional[pg_typing.List] = None,
onchange_callback: Optional[Callable[
[Dict[object_utils.KeyPath, base.FieldUpdate]], None]] = None,
allow_partial: bool = False,
accessor_writable: bool = True,
sealed: bool = False,
root_path: Optional[object_utils.KeyPath] = None):
"""Constructor.
Args:
items: A optional iterable object as initial value for this list.
value_spec: Value spec that applies to this List.
onchange_callback: Callback when sub-tree has been modified.
allow_partial: Whether to allow unbound or partial fields. This takes
effect only when value_spec is not None.
accessor_writable: Whether to allow modification of this List using
accessors (operator[]).
sealed: Whether to seal this List after creation.
root_path: KeyPath of this List in its object tree.
"""
if value_spec and not isinstance(value_spec, pg_typing.List):
raise TypeError(
f'Argument \'value_spec\' must be a `pg.typing.List` object. '
f'Encountered {value_spec}.')
# We delay seal operation until items are filled.
base.Symbolic.__init__(
self,
allow_partial=allow_partial,
accessor_writable=accessor_writable,
sealed=False,
root_path=root_path)
self._value_spec = None
self._onchange_callback = None
list.__init__(self)
if items:
# Copy the symbolic form instead of evaluated form.
if isinstance(items, List):
items = items.sym_values()
for item in items:
self._set_item_without_permission_check(len(self), item)
if value_spec:
self.use_value_spec(value_spec, allow_partial)
# NOTE(daiyip): We set onchange callback at the end of init to avoid
# triggering during initialization.
self._onchange_callback = onchange_callback
self.seal(sealed)
@property
def max_size(self) -> Optional[int]:
"""Returns max size of this list."""
if self._value_spec:
return typing.cast(pg_typing.ListKey,
self._value_spec.element.key).max_value
return None
def use_value_spec(self,
value_spec: Optional[pg_typing.List],
allow_partial: bool = False) -> 'List':
"""Applies a ``pg.List`` as the value spec for current list.
Args:
value_spec: A List ValueSpec to apply to this List.
If current List is schema-less (whose immediate members are not
validated against schema), and `value_spec` is not None, the value spec
will be applied to the List.
Or else if current List is already symbolic (whose immediate members
are under the constraint of a List value spec), and `value_spec` is
None, current List will become schema-less. However, the schema
constraints for non-immediate members will remain.
allow_partial: Whether allow partial dict based on the schema. This flag
will override allow_partial flag in __init__ for spec-less List.
Returns:
Self.
Raises:
ValueError: schema validation failed due to value error.
RuntimeError: List is already bound with another value_spec.
TypeError: type errors during validation.
KeyError: key errors during validation.
"""
if value_spec is None:
self._value_spec = None
self._accessor_writable = True
return self
if not isinstance(value_spec, pg_typing.List):
raise ValueError(
self._error_message(
f'Value spec for list must be a `pg.typing.List` object. '
f'Encountered: {value_spec!r}'))
if self._value_spec and self._value_spec != value_spec:
raise RuntimeError(
self._error_message(
f'List is already bound with a different value '
f'spec: {self._value_spec}. New value spec: {value_spec}.'))
self._allow_partial = allow_partial
if flags.is_type_check_enabled():
# NOTE(daiyip): self._value_spec will be set in List.custom_apply method
# called by spec.apply, thus we don't need to set the _value_spec
# explicitly.
value_spec.apply(
self,
allow_partial=base.accepts_partial(self),
child_transform=base.symbolic_transform_fn(self._allow_partial),
root_path=self.sym_path)
else:
self._value_spec = value_spec
return self
@property
def value_spec(self) -> Optional[pg_typing.List]:
"""Returns value spec of this List."""
return self._value_spec
def sym_attr_field(self, key: Union[str, int]) -> Optional[pg_typing.Field]:
"""Returns the field definition for a symbolic attribute."""
del key
if self._value_spec is None:
return None
return self._value_spec.element
def sym_hasattr(self, key: Union[str, int]) -> bool:
"""Tests if a symbolic attribute exists."""
return (isinstance(key, numbers.Integral)
and key >= -len(self) and key < len(self))
def sym_keys(self) -> Iterator[int]:
"""Symbolically iterates indices."""
for i in range(len(self)):
yield i
def sym_values(self) -> Iterator[Any]:
"""Iterates the values of symbolic attributes."""
for i in range(len(self)):
yield super().__getitem__(i)
def sym_items(self) -> Iterator[Tuple[int, Any]]:
"""Iterates the (key, value) pairs of symbolic attributes."""
for i in range(len(self)):
yield (i, super().__getitem__(i))
def sym_hash(self) -> int:
"""Symbolically hashing."""
return base.sym_hash(
(self.__class__, tuple([base.sym_hash(e) for e in self.sym_values()]))
)
def _sym_getattr(self, key: int) -> Any: # pytype: disable=signature-mismatch # overriding-parameter-type-checks
"""Gets symbolic attribute by index."""
return super().__getitem__(key)
def _sym_clone(self, deep: bool, memo=None) -> 'List':
"""Override Symbolic._clone."""
source = []
for v in self.sym_values():
if deep or isinstance(v, base.Symbolic):
v = base.clone(v, deep, memo)
source.append(v)
return List(
source,
value_spec=self._value_spec,
allow_partial=self._allow_partial,
accessor_writable=self._accessor_writable,
# NOTE(daiyip): parent and root_path are reset to empty
# for copy object.
root_path=None)
def _sym_missing(self) -> Dict[Any, Any]:
"""Returns missing fields."""
missing = dict()
for idx, elem in self.sym_items():
if isinstance(elem, base.Symbolic):
missing_child = elem.sym_missing(flatten=False)
if missing_child:
missing[idx] = missing_child
return missing
def _sym_rebind(
self, path_value_pairs: typing.Dict[object_utils.KeyPath, Any]
) -> typing.List[base.FieldUpdate]:
"""Subclass specific rebind implementation."""
updates = []
# Apply the updates in reverse order, so the operated path will not alter
# from insertions and deletions.
path_value_pairs = sorted(
path_value_pairs.items(), key=lambda x: x[0], reverse=True)
for k, v in path_value_pairs:
update = self._set_item_of_current_tree(k, v)
if update is not None:
updates.append(update)
# Reverse the updates so the update is from the smallest number to
# the largest.
updates.reverse()
return updates
def _sym_nondefault(self) -> Dict[int, Any]:
"""Returns non-default values."""
non_defaults = dict()
for idx, elem in self.sym_items():
if isinstance(elem, base.Symbolic):
non_defaults_child = elem.non_default_values(flatten=False)
if non_defaults_child:
non_defaults[idx] = non_defaults_child
else:
non_defaults[idx] = elem
return non_defaults
def seal(self, sealed: bool = True) -> 'List':
"""Seal or unseal current object from further modification."""
if self.is_sealed == sealed:
return self
for elem in self.sym_values():
if isinstance(elem, base.Symbolic):
elem.seal(sealed)
super().seal(sealed)
return self
def _update_children_paths(
self,
old_path: object_utils.KeyPath,
new_path: object_utils.KeyPath) -> None:
"""Update children paths according to root_path of current node."""
del old_path
for idx, item in self.sym_items():
if isinstance(item, base.TopologyAware):
item.sym_setpath(object_utils.KeyPath(idx, new_path))
def _set_item_without_permission_check( # pytype: disable=signature-mismatch # overriding-parameter-type-checks
self, key: int, value: Any) -> Optional[base.FieldUpdate]:
"""Set or add an item without permission check."""
assert isinstance(key, numbers.Integral), key
index = key
if index >= len(self):
# Appending MISSING_VALUE is considered no-op.
if value == pg_typing.MISSING_VALUE:
return None
index = len(self)
should_insert = False
if isinstance(value, Insertion):
should_insert = True
value = value.value
old_value = pg_typing.MISSING_VALUE
# Replace an existing value.
if index < len(self) and not should_insert:
old_value = list.__getitem__(self, index)
# Generates no update as old value is the same as the new value.
if old_value is value:
return None
new_value = self._formalized_value(index, value)
if index < len(self):
if should_insert:
list.insert(self, index, new_value)
else:
list.__setitem__(self, index, new_value)
# Detach old value from object tree.
if isinstance(old_value, base.TopologyAware):
old_value.sym_setparent(None)
else:
super().append(new_value)
return base.FieldUpdate(
self.sym_path + index, self,
self._value_spec.element if self._value_spec else None,
old_value, new_value)
def _formalized_value(self, idx: int, value: Any):
"""Get transformed (formal) value from user input."""
allow_partial = base.accepts_partial(self)
value = base.from_json(
value,
allow_partial=allow_partial,
root_path=object_utils.KeyPath(idx, self.sym_path))
if self._value_spec and flags.is_type_check_enabled():
value = self._value_spec.element.apply(
value,
allow_partial=allow_partial,
transform_fn=base.symbolic_transform_fn(self._allow_partial),
root_path=object_utils.KeyPath(idx, self.sym_path))
return self._relocate_if_symbolic(idx, value)
@property
def _subscribes_field_updates(self) -> bool:
"""Returns True if current list subscribes field updates."""
return self._onchange_callback is not None
def _on_change(self,
field_updates: Dict[object_utils.KeyPath, base.FieldUpdate]):
"""On change event of List."""
# Do nothing for now to handle changes of List.
# NOTE(daiyip): Remove items that are MISSING_VALUES.
keys_to_remove = []
for i, item in self.sym_items():
if pg_typing.MISSING_VALUE == item:
keys_to_remove.append(i)
if keys_to_remove:
for i in reversed(keys_to_remove):
list.__delitem__(self, i)
# Update paths for children.
for idx, item in self.sym_items():
if isinstance(item, base.TopologyAware) and item.sym_path.key != idx:
item.sym_setpath(object_utils.KeyPath(idx, self.sym_path))
if self._onchange_callback is not None:
self._onchange_callback(field_updates)
def _parse_slice(self, index: slice) -> Tuple[int, int, int]:
start = index.start if index.start is not None else 0
start = max(-len(self), start)
start = min(len(self), start)
if start < 0:
start += len(self)
stop = index.stop if index.stop is not None else len(self)
stop = max(-len(self), stop)
stop = min(len(self), stop)
if stop < 0:
stop += len(self)
step = index.step if index.step is not None else 1
return start, stop, step
def _init_kwargs(self) -> typing.Dict[str, Any]:
kwargs = super()._init_kwargs()
if not self._accessor_writable:
kwargs['accessor_writable'] = False
if self._onchange_callback is not None:
kwargs['onchange_callback'] = self._onchange_callback
# NOTE(daiyip): We do not serialize ValueSpec for now as in most use
# cases they come from the subclasses of `pg.Object`.
return kwargs
def __getstate__(self) -> Any:
"""Customizes pickle.dump."""
return dict(value=list(self), kwargs=self._init_kwargs())
def __setstate__(self, state) -> None:
"""Customizes pickle.load."""
self.__init__(state['value'], **state['kwargs'])
def __getitem__(self, index) -> Any:
"""Gets the item at a given position."""
if isinstance(index, numbers.Integral):
if index < -len(self) or index >= len(self):
raise IndexError('list index out of range')
return self.sym_inferred(index)
elif isinstance(index, slice):
return [self[i] for i in range(*self._parse_slice(index))]
else:
raise TypeError(
f'list index must be an integer. Encountered {index!r}.')
def __iter__(self):
"""Iterates the list."""
for i in range(len(self)):
yield self.sym_inferred(i)
def __setitem__(self, index, value: Any) -> None:
"""Set item in this List."""
if base.treats_as_sealed(self):
raise base.WritePermissionError(
self._error_message('Cannot set item for a sealed List.'))
if not base.writtable_via_accessors(self):
raise base.WritePermissionError(
self._error_message('Cannot modify List item by __setitem__ while '
'accessor_writable is set to False. '
'Use \'rebind\' method instead.'))
if isinstance(index, slice):
start, stop, step = self._parse_slice(index)
replacements = [self._formalized_value(i, v) for i, v in enumerate(value)]
if step < 0:
replacements.reverse()
step = -step
slice_size = math.ceil((stop - start) * 1.0 / step)
if step == 1:
if slice_size < len(replacements):
for i in range(slice_size, len(replacements)):
replacements[i] = Insertion(replacements[i])
else:
replacements.extend(
[pg_typing.MISSING_VALUE
for _ in range(slice_size - len(replacements))])
elif slice_size != len(replacements):
raise ValueError(
f'attempt to assign sequence of size {len(replacements)} to '
f'extended slice of size {slice_size}')
updates = []
for i, r in enumerate(replacements):
update = self._set_item_without_permission_check(start + i * step, r)
if update is not None:
updates.append(update)
if flags.is_change_notification_enabled() and updates:
self._notify_field_updates(updates)
elif isinstance(index, numbers.Integral):
if index < -len(self) or index >= len(self):
raise IndexError(
f'list assignment index out of range. '
f'Length={len(self)}, index={index}')
update = self._set_item_without_permission_check(index, value)
if flags.is_change_notification_enabled() and update:
self._notify_field_updates([update])
else:
raise TypeError(
f'list assignment index must be an integer. Encountered {index!r}.')
def __delitem__(self, index: int) -> None:
"""Delete an item from the List."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot delete item from a sealed List.')
if not base.writtable_via_accessors(self):
raise base.WritePermissionError(
self._error_message('Cannot delete List item while accessor_writable '
'is set to False. '
'Use \'rebind\' method instead.'))
if not isinstance(index, numbers.Integral):
raise TypeError(
f'list index must be an integer. Encountered {index!r}.')
if index < -len(self) or index >= len(self):
raise IndexError(
f'list index out of range. '
f'Length={len(self)}, index={index}')
old_value = self.sym_getattr(index)
super().__delitem__(index)
if flags.is_change_notification_enabled():
self._notify_field_updates([
base.FieldUpdate(
self.sym_path + index, self,
self._value_spec.element if self._value_spec else None,
old_value, pg_typing.MISSING_VALUE)
])
def __add__(self, other: Iterable[Any]) -> 'List':
"""Returns a concatenated List of self and other."""
concatenated = self.copy()
concatenated.extend(other)
return concatenated
def __mul__(self, n: int) -> 'List':
"""Returns a repeated Lit of self."""
result = List()
for _ in range(n):
result.extend(self)
if self._value_spec is not None:
result.use_value_spec(self._value_spec)
return result
def __rmul__(self, n: int) -> 'List':
"""Returns a repeated Lit of self."""
return self.__mul__(n)
def copy(self) -> 'List':
"""Shallow current list."""
return List(super().copy(), value_spec=self._value_spec)
def append(self, value: Any) -> None:
"""Appends an item."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot append element on a sealed List.')
if self.max_size is not None and len(self) >= self.max_size:
raise ValueError(f'List reached its max size {self.max_size}.')
update = self._set_item_without_permission_check(len(self), value)
if flags.is_change_notification_enabled() and update:
self._notify_field_updates([update])
def insert(self, index: int, value: Any) -> None:
"""Inserts an item at a given position."""
if base.treats_as_sealed(self):
raise base.WritePermissionError(
'Cannot insert element into a sealed List.')
if self.max_size is not None and len(self) >= self.max_size:
raise ValueError(f'List reached its max size {self.max_size}.')
update = self._set_item_without_permission_check(
index, mark_as_insertion(value))
if flags.is_change_notification_enabled() and update:
self._notify_field_updates([update])
def pop(self, index: int = -1) -> Any:
"""Pop an item and return its value."""
if index < -len(self) or index >= len(self):
raise IndexError('pop index out of range')
index = (index + len(self)) % len(self)
value = self[index]
with flags.allow_writable_accessors(True):
del self[index]
return value
def remove(self, value: Any) -> None:
"""Removes the first occurrence of the value."""
for i, item in self.sym_items():
if item == value:
if (self._value_spec and self._value_spec.min_size == len(self)):
raise ValueError(
f'Cannot remove item: min size ({self._value_spec.min_size}) '
f'is reached.')
del self[i]
return
raise ValueError(f'{value!r} not in list.')
def extend(self, other: Iterable[Any]) -> None:
# NOTE(daiyip): THIS IS A WORKAROUND FOR WORKING WITH PICKLE.
# `pg.List` is a subclass of `list`, therefore, when pickle loads a list,
# it tries to set the list values directly by calling `extend` without
# calling `pg.List.__init__` at the first place. As a result, an error will
# raise, which complains about that an attribute set up during `__init__` is
# not available. A mitigation to this issue is to detect such calls in
# `extend`, and simply do nothing as follows, which will give a chance to
# `pg.List.__getstate__` to deal with the restoration logic as an object
# (instead of a list).
if not hasattr(self, '_sym_parent'):
return
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot extend a sealed List.')
# Extend on the symbolic form instead of the evaluated form.
iter_other = other.sym_values() if isinstance(other, List) else other
other = list(iter_other)
if self.max_size is not None and len(self) + len(other) > self.max_size:
raise ValueError(
f'Cannot extend List: the number of elements '
f'({len(self) + len(other)}) exceeds max size ({self.max_size}).')
updates = []
for v in other:
update = self._set_item_without_permission_check(len(self), v)
if update is not None:
updates.append(update)
if flags.is_change_notification_enabled() and updates:
self._notify_field_updates(updates)
def clear(self) -> None:
"""Clears the list."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot clear a sealed List.')
if self._value_spec and self._value_spec.min_size > 0:
raise ValueError(
f'List cannot be cleared: min size is {self._value_spec.min_size}.')
super().clear()
def sort(self, *, key=None, reverse=False) -> None:
"""Sorts the items of the list in place.."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot sort a sealed List.')
super().sort(key=key, reverse=reverse)
def reverse(self) -> None:
"""Reverse the elements of the list in place."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot reverse a sealed List.')
super().reverse()
def custom_apply(
self,
path: object_utils.KeyPath,
value_spec: pg_typing.ValueSpec,
allow_partial: bool,
child_transform: Optional[
Callable[[object_utils.KeyPath, pg_typing.Field, Any], Any]] = None
) -> Tuple[bool, 'List']:
"""Implement pg.typing.CustomTyping interface.
Args:
path: KeyPath of current object.
value_spec: Origin value spec of the field.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values in dict_obj into
their final values. Transform function is called on leaf nodes first,
then on their containers, recursively.
Returns:
A tuple (proceed_with_standard_apply, transformed value)
"""
proceed_with_standard_apply = True
if self._value_spec:
if value_spec and not value_spec.is_compatible(self._value_spec):
raise ValueError(
object_utils.message_on_path(
f'List (spec={self._value_spec!r}) cannot be assigned to an '
f'incompatible field (spec={value_spec!r}).', path))
if self._allow_partial == allow_partial:
proceed_with_standard_apply = False
else:
self._allow_partial = allow_partial
elif isinstance(value_spec, pg_typing.List):
self._value_spec = value_spec
return (proceed_with_standard_apply, self)
def sym_jsonify(
self,
use_inferred: bool = False,
**kwargs) -> object_utils.JSONValueType:
"""Converts current list to a list of plain Python objects."""
def json_item(idx):
v = self.sym_getattr(idx)
if use_inferred and isinstance(v, base.Inferential):
v = self.sym_inferred(idx, default=v)
return base.to_json(v, use_inferred=use_inferred, **kwargs)
return [json_item(i) for i in range(len(self))]
def format(
self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
*,
python_format: bool = False,
use_inferred: bool = False,
cls_name: Optional[str] = None,
bracket_type: object_utils.BracketType = object_utils.BracketType.SQUARE,
**kwargs) -> str:
"""Formats this List."""
def _indent(text, indent):
return ' ' * 2 * indent + text
cls_name = cls_name or ''
open_bracket, close_bracket = object_utils.bracket_chars(bracket_type)
s = [f'{cls_name}{open_bracket}']
if compact:
kv_strs = []
for idx, elem in self.sym_items():
if use_inferred and isinstance(elem, base.Inferential):
elem = self.sym_inferred(idx, default=elem)
v_str = object_utils.format(
elem, compact, verbose, root_indent + 1,
python_format=python_format, use_inferred=use_inferred, **kwargs)
if python_format:
kv_strs.append(v_str)
else:
kv_strs.append(f'{idx}: {v_str}')
s.append(', '.join(kv_strs))
s.append(close_bracket)
else:
if self:
for idx, elem in self.sym_items():
if use_inferred and isinstance(elem, base.Inferential):
elem = self.sym_inferred(idx, default=elem)
if idx == 0:
s.append('\n')
else:
s.append(',\n')
v_str = object_utils.format(
elem, compact, verbose, root_indent + 1,
python_format=python_format, use_inferred=use_inferred, **kwargs)
if python_format:
s.append(_indent(v_str, root_indent + 1))
else:
s.append(_indent(f'{idx} : {v_str}', root_indent + 1))
s.append('\n')
s.append(_indent(close_bracket, root_indent))
else:
s.append(close_bracket)
return ''.join(s)
def __copy__(self) -> 'List':
"""List.copy."""
return self.sym_clone(deep=False)
def __deepcopy__(self, memo) -> 'List':
return self.sym_clone(deep=True, memo=memo)
def __hash__(self) -> int:
"""Overriden hashing function."""
return self.sym_hash()
|
(self, other)
|
40,967 |
pyglove.core.symbolic.list
|
__repr__
| null |
def use_value_spec(self,
value_spec: Optional[pg_typing.List],
allow_partial: bool = False) -> 'List':
"""Applies a ``pg.List`` as the value spec for current list.
Args:
value_spec: A List ValueSpec to apply to this List.
If current List is schema-less (whose immediate members are not
validated against schema), and `value_spec` is not None, the value spec
will be applied to the List.
Or else if current List is already symbolic (whose immediate members
are under the constraint of a List value spec), and `value_spec` is
None, current List will become schema-less. However, the schema
constraints for non-immediate members will remain.
allow_partial: Whether allow partial dict based on the schema. This flag
will override allow_partial flag in __init__ for spec-less List.
Returns:
Self.
Raises:
ValueError: schema validation failed due to value error.
RuntimeError: List is already bound with another value_spec.
TypeError: type errors during validation.
KeyError: key errors during validation.
"""
if value_spec is None:
self._value_spec = None
self._accessor_writable = True
return self
if not isinstance(value_spec, pg_typing.List):
raise ValueError(
self._error_message(
f'Value spec for list must be a `pg.typing.List` object. '
f'Encountered: {value_spec!r}'))
if self._value_spec and self._value_spec != value_spec:
raise RuntimeError(
self._error_message(
f'List is already bound with a different value '
f'spec: {self._value_spec}. New value spec: {value_spec}.'))
self._allow_partial = allow_partial
if flags.is_type_check_enabled():
# NOTE(daiyip): self._value_spec will be set in List.custom_apply method
# called by spec.apply, thus we don't need to set the _value_spec
# explicitly.
value_spec.apply(
self,
allow_partial=base.accepts_partial(self),
child_transform=base.symbolic_transform_fn(self._allow_partial),
root_path=self.sym_path)
else:
self._value_spec = value_spec
return self
|
(self)
|
40,968 |
pyglove.core.object_utils.json_conversion
|
JSONConvertible
|
Interface for classes whose instances are convertible from/to JSON.
A JSON convertible object is an object that can be converted into plain Python
objects, hence can be serialized into or deserialized from JSON.
Subclasses of ``JSONConvertible`` should implement:
* ``to_json``: A method that returns a plain Python dict with a `_type`
property whose value should identify the class.
* ``from_json``: A class method that takes a plain Python dict and returns
an instance of the class.
Example::
class MyObject(pg.JSONConvertible):
def __init__(self, x: int):
self.x = x
def to_json(self, **kwargs):
return {
'_type': 'MyObject',
'x': self.x
}
@classmethod
def from_json(cls, json_value, **kwargs):
return cls(json_value['x'])
All symbolic types (see :class:`pyglove.Symbolic`) are JSON convertible.
|
class JSONConvertible(metaclass=abc.ABCMeta):
"""Interface for classes whose instances are convertible from/to JSON.
A JSON convertible object is an object that can be converted into plain Python
objects, hence can be serialized into or deserialized from JSON.
Subclasses of ``JSONConvertible`` should implement:
* ``to_json``: A method that returns a plain Python dict with a `_type`
property whose value should identify the class.
* ``from_json``: A class method that takes a plain Python dict and returns
an instance of the class.
Example::
class MyObject(pg.JSONConvertible):
def __init__(self, x: int):
self.x = x
def to_json(self, **kwargs):
return {
'_type': 'MyObject',
'x': self.x
}
@classmethod
def from_json(cls, json_value, **kwargs):
return cls(json_value['x'])
All symbolic types (see :class:`pyglove.Symbolic`) are JSON convertible.
"""
# Registry for looking up the type definition for a string identifier during
# deserialization. One key can be used for only one type, while the same type
# can be registered with many different string identifiers, which can be
# useful to allow backward compatibility of existing serialized strings.
_TYPE_REGISTRY = _TypeRegistry()
# Key in serialized dict that represents the class to restore.
TYPE_NAME_KEY = '_type'
# Marker (as the first element of a list) for serializing tuples.
TUPLE_MARKER = '__tuple__'
# Type converter that converts a complex type to basic JSON value type.
# When this field is set by users, the converter will be invoked when a
# complex value cannot be serialized by existing methods.
TYPE_CONVERTER: Optional[
Callable[[Type[Any]], Callable[[Any], JSONValueType]]] = None
# Class property that indicates whether to automatically register class
# for deserialization. Subclass can override.
auto_register = True
@classmethod
def from_json(cls, json_value: JSONValueType, **kwargs) -> 'JSONConvertible':
"""Creates an instance of this class from a plain Python value.
NOTE(daiyip): ``pg.Symbolic`` overrides ``from_json`` class method.
Args:
json_value: JSON value type.
**kwargs: Keyword arguments as flags to control object creation.
Returns:
An instance of cls.
"""
del kwargs
assert isinstance(json_value, dict)
init_args = {k: from_json(v) for k, v in json_value.items()
if k != JSONConvertible.TYPE_NAME_KEY}
return cls(**init_args)
@abc.abstractmethod
def to_json(self, **kwargs) -> JSONValueType:
"""Returns a plain Python value as a representation for this object.
A plain Python value are basic python types that can be serialized into
JSON, e.g: ``bool``, ``int``, ``float``, ``str``, ``dict`` (with string
keys), ``list``, ``tuple`` where the container types should have plain
Python values as their values.
Args:
**kwargs: Keyword arguments as flags to control JSON conversion.
Returns:
A plain Python value.
"""
@classmethod
def register(
cls,
type_name: str,
subclass: Type['JSONConvertible'],
override_existing: bool = False
) -> None:
"""Registers a class with a type name.
The type name will be used as the key for class lookup during
deserialization. A class can be registered with multiple type names, but
a type name should be uesd only for one class.
Args:
type_name: A global unique string identifier for subclass.
subclass: A subclass of JSONConvertible.
override_existing: If True, override the class if the type name is
already present in the registry. Otherwise an error will be raised.
"""
cls._TYPE_REGISTRY.register(type_name, subclass, override_existing)
@classmethod
def add_module_alias(cls, source_name: str, target_name: str) -> None:
"""Adds a module alias so previous serialized objects could be loaded."""
cls._TYPE_REGISTRY.add_module_alias(source_name, target_name)
@classmethod
def is_registered(cls, type_name: str) -> bool:
"""Returns True if a type name is registered. Otherwise False."""
return cls._TYPE_REGISTRY.is_registered(type_name)
@classmethod
def class_from_typename(
cls, type_name: str) -> Optional[Type['JSONConvertible']]:
"""Gets the class for a registered type name.
Args:
type_name: A string as the global unique type identifier for requested
class.
Returns:
A type object if registered, otherwise None.
"""
return cls._TYPE_REGISTRY.class_from_typename(type_name)
@classmethod
def registered_types(cls) -> Iterable[Tuple[str, Type['JSONConvertible']]]:
"""Returns an iterator of registered (serialization key, class) tuples."""
return cls._TYPE_REGISTRY.iteritems()
@classmethod
def to_json_dict(
cls,
fields: Dict[str, Union[Tuple[Any, Any], Any]],
*,
exclude_default=False,
exclude_keys: Optional[Set[str]] = None,
**kwargs) -> Dict[str, JSONValueType]:
"""Helper method to create JSON dict from class and field."""
json_dict = {JSONConvertible.TYPE_NAME_KEY: _serialization_key(cls)}
exclude_keys = exclude_keys or set()
if exclude_default:
for k, (v, default) in fields.items():
if k not in exclude_keys and v != default:
json_dict[k] = to_json(v, **kwargs)
else:
json_dict.update(
{k: to_json(v, **kwargs) for k, v in fields.items()
if k not in exclude_keys})
return json_dict
def __init_subclass__(cls):
super().__init_subclass__()
if not inspect.isabstract(cls) and cls.auto_register:
type_name = _serialization_key(cls)
JSONConvertible.register(type_name, cls, override_existing=True)
|
()
|
40,970 |
pyglove.core.object_utils.json_conversion
|
to_json
|
Returns a plain Python value as a representation for this object.
A plain Python value are basic python types that can be serialized into
JSON, e.g: ``bool``, ``int``, ``float``, ``str``, ``dict`` (with string
keys), ``list``, ``tuple`` where the container types should have plain
Python values as their values.
Args:
**kwargs: Keyword arguments as flags to control JSON conversion.
Returns:
A plain Python value.
|
@abc.abstractmethod
def to_json(self, **kwargs) -> JSONValueType:
"""Returns a plain Python value as a representation for this object.
A plain Python value are basic python types that can be serialized into
JSON, e.g: ``bool``, ``int``, ``float``, ``str``, ``dict`` (with string
keys), ``list``, ``tuple`` where the container types should have plain
Python values as their values.
Args:
**kwargs: Keyword arguments as flags to control JSON conversion.
Returns:
A plain Python value.
"""
|
(self, **kwargs) -> Union[int, float, bool, str, List[Any], Dict[str, Any]]
|
40,971 |
pyglove.core.object_utils.value_location
|
KeyPath
|
Represents a path of keys from the root to a node in a tree.
``KeyPath`` is an important concept in PyGlove, which is used for representing
a symbolic object's location (see :meth:`pyglove.Symbolic.sym_path`) within
its symbolic tree. For example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Str())
])
class A(pg.Object):
pass
@pg.members([
('z', pg.typing.Object(A))
])
class B(pg.Object):
pass
a = A(x=1, y='foo')
b = B(z=a)
assert a.sym_path == 'z' # The path to object `a` is 'z'.
assert b.sym_path == '' # The root object's KeyPath is empty.
Since each node in a tree has a unique location, given the root we shall be
able to use a ``KeyPath`` object to locate the node. With the example
above, we can query the member ``x`` of object ``a`` via::
pg.KeyPath.parse('z.x').query(b) # Should return 1.
Similarly, we can modify a symbolic object's sub-node based on a ``KeyPath``
object. See :meth:`pyglove.Symbolic.rebind` for modifying sub-nodes in a
symbolic tree.
|
class KeyPath(common_traits.Formattable):
"""Represents a path of keys from the root to a node in a tree.
``KeyPath`` is an important concept in PyGlove, which is used for representing
a symbolic object's location (see :meth:`pyglove.Symbolic.sym_path`) within
its symbolic tree. For example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Str())
])
class A(pg.Object):
pass
@pg.members([
('z', pg.typing.Object(A))
])
class B(pg.Object):
pass
a = A(x=1, y='foo')
b = B(z=a)
assert a.sym_path == 'z' # The path to object `a` is 'z'.
assert b.sym_path == '' # The root object's KeyPath is empty.
Since each node in a tree has a unique location, given the root we shall be
able to use a ``KeyPath`` object to locate the node. With the example
above, we can query the member ``x`` of object ``a`` via::
pg.KeyPath.parse('z.x').query(b) # Should return 1.
Similarly, we can modify a symbolic object's sub-node based on a ``KeyPath``
object. See :meth:`pyglove.Symbolic.rebind` for modifying sub-nodes in a
symbolic tree.
"""
def __init__(self,
key_or_key_list: Optional[Union[Any, List[Any]]] = None,
parent: Optional['KeyPath'] = None):
"""Constructor.
Args:
key_or_key_list: A single object as key, or a list/tuple of objects
as keys in the path.
When string types or StrKey objects are used as key, dot ('.') is used
as the delimiter, otherwise square brackets ('[]') is used as the
delimiter when formatting a KeyPath.
For object type key, str(object) will be used to represent the key in
string form.
parent: Parent KeyPath.
"""
if key_or_key_list is None:
key_or_key_list = []
elif not isinstance(key_or_key_list, (tuple, list)):
key_or_key_list = [key_or_key_list]
keys = []
if parent:
keys.extend(parent.keys)
keys.extend(key_or_key_list)
self._keys = keys
# NOTE(daiyip): Lazy to build path string cache for fast access.
self._path_str = None
@classmethod
def from_value(cls, value: Union['KeyPath', str, int]) -> 'KeyPath':
"""Returns a KeyPath object from a KeyPath equivalence."""
if isinstance(value, str):
value = cls.parse(value)
elif isinstance(value, int):
value = cls(value)
elif not isinstance(value, KeyPath):
raise ValueError(f'{value!r} is not a valid KeyPath equivalence.')
return value
@classmethod
def parse(cls,
path_str: str,
parent: Optional['KeyPath'] = None) -> 'KeyPath':
"""Creates a ``KeyPath`` object from parsing a JSONPath-like string.
The JSONPath (https://restfulapi.net/json-jsonpath/) like string is defined
as following::
<path> := <empty> | {<dict-key>[.<dict-key>]*}
<dict-key> := <identifier>['[('<list-key>|<special-dict-key>)']']*
<list-key> := <number>
<special-dict-key> := <string-with-delimiter-chars>
<delimiter_chars> := '[' | ']' | '.'
For example, following keys are valid path strings::
'' : An empty path representing the root of a path.
'a' : A path that contains a dict key 'a'.
'a.b' : A path that contains two dict keys 'a' and 'b'.
'a[0]' : A path that contains a dict key 'a' and a list key 0.
'a.0.' : A path that contains two dict keys 'a' and '0'.
'a[0][1]' : A path that contains a dict key 'a' and two list keys
0 and 1 for a multi-dimension list.
'a[x.y].b' : A path that contains three dict keys: 'a', 'x.y', 'b'.
Since 'x.y' has delimiter characters, it needs to be
enclosed in brackets.
TODO(daiyip): Support paring ``KeyPath`` from keys of complex types.
Now this method only supports parsing KeyPath of string and int keys.
That being said, ``format``/``parse`` are not symmetric, while ``format``
can convert a ``KeyPath`` that includes complex keys into a string,
``parse`` is not able to convert them back.
Args:
path_str: A JSON-path-like string.
parent: Parent KeyPath object.
Returns:
A KeyPath object.
Raises:
ValueError: Path string is in bad format.
"""
if not isinstance(path_str, str):
raise ValueError(
f'\'path_str\' must be a string type. Encountered: {path_str!r}')
keys = []
def _append_key(key, preserve_empty=False, maybe_numeric=False):
"""Helper method to append key."""
if not (preserve_empty or key):
return
if maybe_numeric and key.lstrip('-').isdigit():
key = int(key)
keys.append(key)
pos, key_start, unmatched_brackets = 0, 0, 0
while pos != len(path_str):
ch = path_str[pos]
if ch == ']':
unmatched_brackets -= 1
if unmatched_brackets == 0:
key = path_str[key_start:pos]
_append_key(key, True, True)
key_start = pos + 1
elif unmatched_brackets < 0:
raise ValueError(
f'KeyPath parse failed: unmatched close bracket at position '
f'{pos}:{path_str!r}')
elif ch == '[':
if unmatched_brackets == 0:
key = path_str[key_start:pos]
_append_key(key)
key_start = pos + 1
unmatched_brackets += 1
elif ch == '.' and unmatched_brackets == 0:
key = path_str[key_start:pos]
_append_key(key)
key_start = pos + 1
pos += 1
if key_start != len(path_str):
_append_key(path_str[key_start:])
if unmatched_brackets != 0:
raise ValueError(
f'KeyPath parse failed: unmatched open bracket at position '
f'{key_start - 1}: {path_str!r}')
return KeyPath(keys, parent)
@property
def keys(self) -> List[Any]:
"""A list of keys in this path."""
return copy.copy(self._keys)
@property
def key(self) -> Any:
"""The rightmost key of this path."""
if self.depth == 0:
raise KeyError('Key of root KeyPath does not exist.')
return self._keys[-1]
@property
def is_root(self) -> bool:
"""Returns True if this path is the root of a tree."""
return not self._keys
@property
def depth(self) -> int:
"""The depth of this path."""
return len(self._keys)
@property
def parent(self) -> 'KeyPath':
"""The ``KeyPath`` object for current node's parent.
Example::
path = pg.KeyPath.parse('a.b.c.')
assert path.parent == 'a.b'
Returns:
A ``KeyPath`` object for the parent of current node.
Raises:
KeyError: If current path is the root.
"""
if self.is_root:
raise KeyError('Parent of a root KeyPath does not exist.')
return KeyPath(self._keys[:-1])
def __sub__(self, other: Union[None, int, str, 'KeyPath']) -> 'KeyPath':
"""Finds the relative path of this path to the other.
Example::
path1 = pg.KeyPath.parse('a.b.c.d')
path2 = pg.KeyPath.parse('a.b')
assert path1 - path2 == 'c.d'
Args:
other: Object to subtract, which can be None, int (as a depth-1 KeyPath),
string (parsed as a KeyPath) or a KeyPath object.
Returns:
Relative path of this path to the other.
Raises:
ValueError: This path is an ancestor node of the other path,
or these two paths are in different branch.
"""
if other is None:
return self
if isinstance(other, str):
other = KeyPath.parse(other)
elif isinstance(other, int):
other = KeyPath(other)
if not isinstance(other, KeyPath):
raise TypeError(
f'Cannot subtract KeyPath({self}) by {other!r}.')
max_len = max(len(self), len(other))
for pos in range(max_len):
if pos >= len(self):
raise ValueError(
f'KeyPath subtraction failed: left path {self!r} '
f'is an ancestor of right path {other!r}.')
if pos >= len(other):
return KeyPath(self.keys[pos:])
if self.keys[pos] != other.keys[pos]:
raise ValueError(
f'KeyPath subtraction failed: left path {self!r} '
f'and right path {other!r} are in different subtree.')
return KeyPath()
def __add__(self, other: Any) -> 'KeyPath':
"""Concatenates a KeyPath equivalent object.
Args:
other: Object to add, which can be None, int (as a 1-level KeyPath),
string (parsed as a KeyPath), a KeyPath object, or any other object as
a single key.
Returns:
Newly concatenated KeyPath.
Raises:
ValueError: If other is a string that cannot be parsed into a KeyPath.
"""
if other is None:
return self
if isinstance(other, str):
other = KeyPath.parse(other)
elif not isinstance(other, KeyPath):
other = KeyPath(other)
assert isinstance(other, KeyPath)
return KeyPath(other.keys, self)
def query(self, src: Any) -> Any:
"""Query the value from the source object based on current path.
Example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Str())
])
class A(pg.Object):
pass
@pg.members([
('z', pg.typing.Object(A))
])
class B(pg.Object):
pass
b = B(z=A(x=1, y='foo'))
assert pg.KeyPath.parse('z.x').query(b) == 1
Args:
src: Source value to query.
Returns:
Value from src if path exists.
Raises:
KeyError: Path doesn't exist in src.
RuntimeError: Called on a KeyPath that is considered as removed.
"""
return self._query(0, src)
def _query(self, key_pos: int, src: Any) -> Any:
"""Query the value of current path up to key_pos from an object.
Args:
key_pos: Start position in self._keys.
src: Source value to query.
Returns:
Value from src if path exists.
Raises:
KeyError: Path doesn't exist in src.
"""
if key_pos == len(self._keys):
return src
key = self.keys[key_pos]
# NOTE(daiyip): For contextual value (e.g. ``pg.ContextualValue``),
# `query` returns its symbolic form instead of its evaluated value.
if hasattr(src, 'sym_getattr'):
assert hasattr(src, 'sym_hasattr')
if src.sym_hasattr(key):
return self._query(key_pos + 1, src.sym_getattr(key))
elif hasattr(src, '__getitem__'):
if isinstance(key, int):
if not hasattr(src, '__len__'):
raise KeyError(
f'Cannot query index ({key}) on object ({src!r}): '
f'\'__len__\' does not exist.')
if key < len(src):
return self._query(key_pos + 1, src[key])
else:
if not hasattr(src, '__contains__'):
raise KeyError(
f'Cannot query key ({key!r}) on object ({src!r}): '
f'\'__contains__\' does not exist.')
if key in src:
return self._query(key_pos + 1, src[key])
else:
raise KeyError(
f'Cannot query sub-key {key!r} of object ({src!r}): '
f'\'__getitem__\' does not exist. '
f'(path={KeyPath(self.keys[:key_pos])})')
raise KeyError(
f'Path {KeyPath(self._keys[:key_pos + 1])!r} does not exist: '
f'key {key!r} is absent from innermost value {src!r}.')
def _has_special_chars(self, key):
"""Returns True if key has special characters."""
return any([c in key for c in ['[', ']', '.']])
def get(self, src: Any, default_value: Optional[Any] = None) -> Any:
"""Gets the value of current path from an object with a default value."""
try:
return self.query(src)
except KeyError:
return default_value
def exists(self, src: Any) -> bool:
"""Returns whether current path exists in source object."""
try:
self.query(src)
return True
except KeyError:
return False
@property
def path(self) -> str:
"""JSONPath representation of current path."""
if self._path_str is None:
self._path_str = self.path_str()
return self._path_str
def path_str(self, preserve_complex_keys: bool = True) -> str:
"""Returns JSONPath representation of current path.
Args:
preserve_complex_keys: if True, complex keys such as 'x.y' will be
preserved by quoted in brackets.
For example: KeyPath(['a', 'x.y', 'b']) will return 'a[x.y].b' when
`preserve_complex_keys` is True, and `a.x.y.b` when
`preserve_complex_keys` is False.
Returns:
Path string.
"""
s = []
for i, key in enumerate(self._keys):
if ((isinstance(key, str)
and not (preserve_complex_keys and self._has_special_chars(key)))
or isinstance(key, StrKey)):
if i != 0:
s.append('.')
s.append(str(key))
else:
s.append(f'[{key}]')
return ''.join(s)
def __len__(self) -> int:
"""Use depth as length of current path."""
return self.depth
def format(self, *args, **kwargs):
"""Format current path."""
return self.path
def __hash__(self):
"""Hash function.
Returns:
return the hash value of its path.
NOTE(daiyip): KeyPath shares the same hash of its JSONPath representation
(relative form), thus we can lookup a dict with KeyPath key by string,
and vice versa.
"""
return hash(self.path)
def __eq__(self, other: Any) -> bool:
"""Equality check.
Args:
other: A string or a KeyPath.
Returns:
Whether JSON-path representation (either absolute or relative form)
of current path equals to other.
"""
if isinstance(other, str):
return self.path == other
return isinstance(other, KeyPath) and self.keys == other.keys
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
def __lt__(self, other: Any) -> bool:
return self._compare(other, operator.lt)
def __le__(self, other: Any) -> bool:
return self._compare(other, operator.le)
def __gt__(self, other: Any) -> bool:
return self._compare(other, operator.gt)
def __ge__(self, other: Any) -> bool:
return self._compare(other, operator.ge)
def _compare(
self,
other: Any,
comparison: Callable[[Any, Any], bool]
) -> bool:
"""Compare to another KeyPath or a string.
Args:
other: A Keypath or a string.
comparison: A comparison operator.
Returns:
Whether or not the comparison holds true.
Raises:
TypeError: The other object is neither a Keypath nor a string.
"""
if isinstance(other, str):
return comparison(self.path, other)
if isinstance(other, KeyPath):
return comparison(
tuple(map(KeyPath._KeyComparisonWrapper, self.keys)),
tuple(map(KeyPath._KeyComparisonWrapper, other.keys))
)
raise TypeError(
f'Comparison is not supported between instances of \'KeyPath\' and '
f'{type(other).__name__!r}.')
class _KeyComparisonWrapper:
"""A wrapper around KeyPath keys enabling dynamic comparison."""
def __init__(self, key: Any):
self.key = key
def __eq__(self, other: 'KeyPath._KeyComparisonWrapper') -> bool:
return self._compare(other, operator.eq)
def __ne__(self, other: 'KeyPath._KeyComparisonWrapper') -> bool:
return self._compare(other, operator.ne)
def __lt__(self, other: 'KeyPath._KeyComparisonWrapper') -> bool:
return self._compare(other, operator.lt)
def __le__(self, other: 'KeyPath._KeyComparisonWrapper') -> bool:
return self._compare(other, operator.le)
def __gt__(self, other: 'KeyPath._KeyComparisonWrapper') -> bool:
return self._compare(other, operator.gt)
def __ge__(self, other: 'KeyPath._KeyComparisonWrapper') -> bool:
return self._compare(other, operator.ge)
def _compare(
self,
other: 'KeyPath._KeyComparisonWrapper',
comparison: Callable[[Any, Any], bool]
) -> bool:
"""Compare the key against another key from a different KeyPath."""
is_int = lambda key: isinstance(key, int)
is_str = lambda key: isinstance(key, str)
is_int_or_str = lambda key: is_int(key) or is_str(key)
if is_int(self.key) and is_int(other.key):
# Both are ints. Compare numerically so that KeyPath(2) < KeyPath(10).
return comparison(self.key, other.key)
if is_int_or_str(self.key) and is_int_or_str(other.key):
# One is a str; the other is an int or str. Compare lexicographically.
return comparison(str(self.key), str(other.key))
# One or both is a custom key. Delegate comparison to its magic methods.
return comparison(self.key, other.key)
|
(key_or_key_list: Union[Any, List[Any], NoneType] = None, parent: Optional[ForwardRef('KeyPath')] = None)
|
40,972 |
pyglove.core.object_utils.value_location
|
__add__
|
Concatenates a KeyPath equivalent object.
Args:
other: Object to add, which can be None, int (as a 1-level KeyPath),
string (parsed as a KeyPath), a KeyPath object, or any other object as
a single key.
Returns:
Newly concatenated KeyPath.
Raises:
ValueError: If other is a string that cannot be parsed into a KeyPath.
|
def __add__(self, other: Any) -> 'KeyPath':
"""Concatenates a KeyPath equivalent object.
Args:
other: Object to add, which can be None, int (as a 1-level KeyPath),
string (parsed as a KeyPath), a KeyPath object, or any other object as
a single key.
Returns:
Newly concatenated KeyPath.
Raises:
ValueError: If other is a string that cannot be parsed into a KeyPath.
"""
if other is None:
return self
if isinstance(other, str):
other = KeyPath.parse(other)
elif not isinstance(other, KeyPath):
other = KeyPath(other)
assert isinstance(other, KeyPath)
return KeyPath(other.keys, self)
|
(self, other: Any) -> pyglove.core.object_utils.value_location.KeyPath
|
40,973 |
pyglove.core.object_utils.value_location
|
__eq__
|
Equality check.
Args:
other: A string or a KeyPath.
Returns:
Whether JSON-path representation (either absolute or relative form)
of current path equals to other.
|
def __eq__(self, other: Any) -> bool:
"""Equality check.
Args:
other: A string or a KeyPath.
Returns:
Whether JSON-path representation (either absolute or relative form)
of current path equals to other.
"""
if isinstance(other, str):
return self.path == other
return isinstance(other, KeyPath) and self.keys == other.keys
|
(self, other: Any) -> bool
|
40,974 |
pyglove.core.object_utils.value_location
|
__ge__
| null |
def __ge__(self, other: Any) -> bool:
return self._compare(other, operator.ge)
|
(self, other: Any) -> bool
|
40,975 |
pyglove.core.object_utils.value_location
|
__gt__
| null |
def __gt__(self, other: Any) -> bool:
return self._compare(other, operator.gt)
|
(self, other: Any) -> bool
|
40,976 |
pyglove.core.object_utils.value_location
|
__hash__
|
Hash function.
Returns:
return the hash value of its path.
NOTE(daiyip): KeyPath shares the same hash of its JSONPath representation
(relative form), thus we can lookup a dict with KeyPath key by string,
and vice versa.
|
def __hash__(self):
"""Hash function.
Returns:
return the hash value of its path.
NOTE(daiyip): KeyPath shares the same hash of its JSONPath representation
(relative form), thus we can lookup a dict with KeyPath key by string,
and vice versa.
"""
return hash(self.path)
|
(self)
|
40,977 |
pyglove.core.object_utils.value_location
|
__init__
|
Constructor.
Args:
key_or_key_list: A single object as key, or a list/tuple of objects
as keys in the path.
When string types or StrKey objects are used as key, dot ('.') is used
as the delimiter, otherwise square brackets ('[]') is used as the
delimiter when formatting a KeyPath.
For object type key, str(object) will be used to represent the key in
string form.
parent: Parent KeyPath.
|
def __init__(self,
key_or_key_list: Optional[Union[Any, List[Any]]] = None,
parent: Optional['KeyPath'] = None):
"""Constructor.
Args:
key_or_key_list: A single object as key, or a list/tuple of objects
as keys in the path.
When string types or StrKey objects are used as key, dot ('.') is used
as the delimiter, otherwise square brackets ('[]') is used as the
delimiter when formatting a KeyPath.
For object type key, str(object) will be used to represent the key in
string form.
parent: Parent KeyPath.
"""
if key_or_key_list is None:
key_or_key_list = []
elif not isinstance(key_or_key_list, (tuple, list)):
key_or_key_list = [key_or_key_list]
keys = []
if parent:
keys.extend(parent.keys)
keys.extend(key_or_key_list)
self._keys = keys
# NOTE(daiyip): Lazy to build path string cache for fast access.
self._path_str = None
|
(self, key_or_key_list: Union[Any, List[Any], NoneType] = None, parent: Optional[pyglove.core.object_utils.value_location.KeyPath] = None)
|
40,978 |
pyglove.core.object_utils.value_location
|
__le__
| null |
def __le__(self, other: Any) -> bool:
return self._compare(other, operator.le)
|
(self, other: Any) -> bool
|
40,979 |
pyglove.core.object_utils.value_location
|
__len__
|
Use depth as length of current path.
|
def __len__(self) -> int:
"""Use depth as length of current path."""
return self.depth
|
(self) -> int
|
40,980 |
pyglove.core.object_utils.value_location
|
__lt__
| null |
def __lt__(self, other: Any) -> bool:
return self._compare(other, operator.lt)
|
(self, other: Any) -> bool
|
40,981 |
pyglove.core.object_utils.value_location
|
__ne__
| null |
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
|
(self, other: Any) -> bool
|
40,984 |
pyglove.core.object_utils.value_location
|
__sub__
|
Finds the relative path of this path to the other.
Example::
path1 = pg.KeyPath.parse('a.b.c.d')
path2 = pg.KeyPath.parse('a.b')
assert path1 - path2 == 'c.d'
Args:
other: Object to subtract, which can be None, int (as a depth-1 KeyPath),
string (parsed as a KeyPath) or a KeyPath object.
Returns:
Relative path of this path to the other.
Raises:
ValueError: This path is an ancestor node of the other path,
or these two paths are in different branch.
|
def __sub__(self, other: Union[None, int, str, 'KeyPath']) -> 'KeyPath':
"""Finds the relative path of this path to the other.
Example::
path1 = pg.KeyPath.parse('a.b.c.d')
path2 = pg.KeyPath.parse('a.b')
assert path1 - path2 == 'c.d'
Args:
other: Object to subtract, which can be None, int (as a depth-1 KeyPath),
string (parsed as a KeyPath) or a KeyPath object.
Returns:
Relative path of this path to the other.
Raises:
ValueError: This path is an ancestor node of the other path,
or these two paths are in different branch.
"""
if other is None:
return self
if isinstance(other, str):
other = KeyPath.parse(other)
elif isinstance(other, int):
other = KeyPath(other)
if not isinstance(other, KeyPath):
raise TypeError(
f'Cannot subtract KeyPath({self}) by {other!r}.')
max_len = max(len(self), len(other))
for pos in range(max_len):
if pos >= len(self):
raise ValueError(
f'KeyPath subtraction failed: left path {self!r} '
f'is an ancestor of right path {other!r}.')
if pos >= len(other):
return KeyPath(self.keys[pos:])
if self.keys[pos] != other.keys[pos]:
raise ValueError(
f'KeyPath subtraction failed: left path {self!r} '
f'and right path {other!r} are in different subtree.')
return KeyPath()
|
(self, other: Union[NoneType, int, str, pyglove.core.object_utils.value_location.KeyPath]) -> pyglove.core.object_utils.value_location.KeyPath
|
40,985 |
pyglove.core.object_utils.value_location
|
_compare
|
Compare to another KeyPath or a string.
Args:
other: A Keypath or a string.
comparison: A comparison operator.
Returns:
Whether or not the comparison holds true.
Raises:
TypeError: The other object is neither a Keypath nor a string.
|
def _compare(
self,
other: Any,
comparison: Callable[[Any, Any], bool]
) -> bool:
"""Compare to another KeyPath or a string.
Args:
other: A Keypath or a string.
comparison: A comparison operator.
Returns:
Whether or not the comparison holds true.
Raises:
TypeError: The other object is neither a Keypath nor a string.
"""
if isinstance(other, str):
return comparison(self.path, other)
if isinstance(other, KeyPath):
return comparison(
tuple(map(KeyPath._KeyComparisonWrapper, self.keys)),
tuple(map(KeyPath._KeyComparisonWrapper, other.keys))
)
raise TypeError(
f'Comparison is not supported between instances of \'KeyPath\' and '
f'{type(other).__name__!r}.')
|
(self, other: Any, comparison: Callable[[Any, Any], bool]) -> bool
|
40,986 |
pyglove.core.object_utils.value_location
|
_has_special_chars
|
Returns True if key has special characters.
|
def _has_special_chars(self, key):
"""Returns True if key has special characters."""
return any([c in key for c in ['[', ']', '.']])
|
(self, key)
|
40,988 |
pyglove.core.object_utils.value_location
|
_query
|
Query the value of current path up to key_pos from an object.
Args:
key_pos: Start position in self._keys.
src: Source value to query.
Returns:
Value from src if path exists.
Raises:
KeyError: Path doesn't exist in src.
|
def _query(self, key_pos: int, src: Any) -> Any:
"""Query the value of current path up to key_pos from an object.
Args:
key_pos: Start position in self._keys.
src: Source value to query.
Returns:
Value from src if path exists.
Raises:
KeyError: Path doesn't exist in src.
"""
if key_pos == len(self._keys):
return src
key = self.keys[key_pos]
# NOTE(daiyip): For contextual value (e.g. ``pg.ContextualValue``),
# `query` returns its symbolic form instead of its evaluated value.
if hasattr(src, 'sym_getattr'):
assert hasattr(src, 'sym_hasattr')
if src.sym_hasattr(key):
return self._query(key_pos + 1, src.sym_getattr(key))
elif hasattr(src, '__getitem__'):
if isinstance(key, int):
if not hasattr(src, '__len__'):
raise KeyError(
f'Cannot query index ({key}) on object ({src!r}): '
f'\'__len__\' does not exist.')
if key < len(src):
return self._query(key_pos + 1, src[key])
else:
if not hasattr(src, '__contains__'):
raise KeyError(
f'Cannot query key ({key!r}) on object ({src!r}): '
f'\'__contains__\' does not exist.')
if key in src:
return self._query(key_pos + 1, src[key])
else:
raise KeyError(
f'Cannot query sub-key {key!r} of object ({src!r}): '
f'\'__getitem__\' does not exist. '
f'(path={KeyPath(self.keys[:key_pos])})')
raise KeyError(
f'Path {KeyPath(self._keys[:key_pos + 1])!r} does not exist: '
f'key {key!r} is absent from innermost value {src!r}.')
|
(self, key_pos: int, src: Any) -> Any
|
40,989 |
pyglove.core.object_utils.value_location
|
exists
|
Returns whether current path exists in source object.
|
def exists(self, src: Any) -> bool:
"""Returns whether current path exists in source object."""
try:
self.query(src)
return True
except KeyError:
return False
|
(self, src: Any) -> bool
|
40,990 |
pyglove.core.object_utils.value_location
|
format
|
Format current path.
|
def format(self, *args, **kwargs):
"""Format current path."""
return self.path
|
(self, *args, **kwargs)
|
40,991 |
pyglove.core.object_utils.value_location
|
get
|
Gets the value of current path from an object with a default value.
|
def get(self, src: Any, default_value: Optional[Any] = None) -> Any:
"""Gets the value of current path from an object with a default value."""
try:
return self.query(src)
except KeyError:
return default_value
|
(self, src: Any, default_value: Optional[Any] = None) -> Any
|
40,992 |
pyglove.core.object_utils.value_location
|
path_str
|
Returns JSONPath representation of current path.
Args:
preserve_complex_keys: if True, complex keys such as 'x.y' will be
preserved by quoted in brackets.
For example: KeyPath(['a', 'x.y', 'b']) will return 'a[x.y].b' when
`preserve_complex_keys` is True, and `a.x.y.b` when
`preserve_complex_keys` is False.
Returns:
Path string.
|
def path_str(self, preserve_complex_keys: bool = True) -> str:
"""Returns JSONPath representation of current path.
Args:
preserve_complex_keys: if True, complex keys such as 'x.y' will be
preserved by quoted in brackets.
For example: KeyPath(['a', 'x.y', 'b']) will return 'a[x.y].b' when
`preserve_complex_keys` is True, and `a.x.y.b` when
`preserve_complex_keys` is False.
Returns:
Path string.
"""
s = []
for i, key in enumerate(self._keys):
if ((isinstance(key, str)
and not (preserve_complex_keys and self._has_special_chars(key)))
or isinstance(key, StrKey)):
if i != 0:
s.append('.')
s.append(str(key))
else:
s.append(f'[{key}]')
return ''.join(s)
|
(self, preserve_complex_keys: bool = True) -> str
|
40,993 |
pyglove.core.object_utils.value_location
|
query
|
Query the value from the source object based on current path.
Example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Str())
])
class A(pg.Object):
pass
@pg.members([
('z', pg.typing.Object(A))
])
class B(pg.Object):
pass
b = B(z=A(x=1, y='foo'))
assert pg.KeyPath.parse('z.x').query(b) == 1
Args:
src: Source value to query.
Returns:
Value from src if path exists.
Raises:
KeyError: Path doesn't exist in src.
RuntimeError: Called on a KeyPath that is considered as removed.
|
def query(self, src: Any) -> Any:
"""Query the value from the source object based on current path.
Example::
@pg.members([
('x', pg.typing.Int()),
('y', pg.typing.Str())
])
class A(pg.Object):
pass
@pg.members([
('z', pg.typing.Object(A))
])
class B(pg.Object):
pass
b = B(z=A(x=1, y='foo'))
assert pg.KeyPath.parse('z.x').query(b) == 1
Args:
src: Source value to query.
Returns:
Value from src if path exists.
Raises:
KeyError: Path doesn't exist in src.
RuntimeError: Called on a KeyPath that is considered as removed.
"""
return self._query(0, src)
|
(self, src: Any) -> Any
|
40,994 |
pyglove.core.typing.class_schema
|
KeySpec
|
Interface for key specifications.
A key specification determines what keys are acceptable for a symbolic
field (see :class:`pyglove.Field`). Usually, symbolic attributes have an 1:1
relationship with symbolic fields. But in some cases (e.g. a dict with dynamic
keys), a field can be used to describe a group of symbolic attributes::
# A dictionary that accepts key 'x' with float value
# or keys started with 'foo' with int values.
d = pg.Dict(value_spec=pg.Dict([
('x', pg.typing.Float(min_value=0.0)),
(pg.typing.StrKey('foo.*'), pg.typing.Int())
]))
You may noticed that the code above pass a string 'x' for the key spec for a
field definition. The string is automatically converted to
:class:`pyglove.typing.ConstStrKey`.
PyGlove's Builtin key specifications are:
+---------------------------+----------------------------------------------+
| ``KeySpec`` type | Class |
+===========================+==============================================+
| Fixed string identifier | :class:`pyglove.typing.ConstStrKey` |
+---------------------------+----------------------------------------------+
| Dynamic string identifier | :class:`pyglove.typing.StrKey` |
+---------------------------+----------------------------------------------+
| Key of a list | :class:`pyglove.typing.ListKey` |
+---------------------------+----------------------------------------------+
| Key of a tuple | :class:`pyglove.typing.TupleKey` |
+---------------------------+----------------------------------------------+
In most scenarios, the user either use a string or a ``StrKey`` as the key
spec, while other ``KeySpec`` subclasses (e.g. ``ListKey`` and ``TupleKey``)
are used internally to constrain list size and tuple items.
|
class KeySpec(object_utils.Formattable, object_utils.JSONConvertible):
"""Interface for key specifications.
A key specification determines what keys are acceptable for a symbolic
field (see :class:`pyglove.Field`). Usually, symbolic attributes have an 1:1
relationship with symbolic fields. But in some cases (e.g. a dict with dynamic
keys), a field can be used to describe a group of symbolic attributes::
# A dictionary that accepts key 'x' with float value
# or keys started with 'foo' with int values.
d = pg.Dict(value_spec=pg.Dict([
('x', pg.typing.Float(min_value=0.0)),
(pg.typing.StrKey('foo.*'), pg.typing.Int())
]))
You may noticed that the code above pass a string 'x' for the key spec for a
field definition. The string is automatically converted to
:class:`pyglove.typing.ConstStrKey`.
PyGlove's Builtin key specifications are:
+---------------------------+----------------------------------------------+
| ``KeySpec`` type | Class |
+===========================+==============================================+
| Fixed string identifier | :class:`pyglove.typing.ConstStrKey` |
+---------------------------+----------------------------------------------+
| Dynamic string identifier | :class:`pyglove.typing.StrKey` |
+---------------------------+----------------------------------------------+
| Key of a list | :class:`pyglove.typing.ListKey` |
+---------------------------+----------------------------------------------+
| Key of a tuple | :class:`pyglove.typing.TupleKey` |
+---------------------------+----------------------------------------------+
In most scenarios, the user either use a string or a ``StrKey`` as the key
spec, while other ``KeySpec`` subclasses (e.g. ``ListKey`` and ``TupleKey``)
are used internally to constrain list size and tuple items.
"""
@property
@abc.abstractmethod
def is_const(self) -> bool:
"""Returns whether current key is const."""
@abc.abstractmethod
def match(self, key: Any) -> bool:
"""Returns whether current key specification can match a key."""
@abc.abstractmethod
def extend(self, base: 'KeySpec') -> 'KeySpec':
"""Extend base key specification and returns self.
NOTE(daiyip): When a ``Field`` extends a base Field (from a base schema),
it calls ``extend`` on both its ``KeySpec`` and ``ValueSpec``.
``KeySpec.extend`` is to determine whether the ``Field`` key is allowed to
be extended, and ``ValueSpec.extend`` is to determine the final
``ValueSpec`` after extension.
Args:
base: A base ``KeySpec`` object.
Returns:
An ``KeySpec`` object derived from this key spec by extending the base.
"""
@classmethod
def from_str(cls, key: str) -> 'KeySpec':
"""Get a concrete ValueSpec from annotation."""
del key
assert False, 'Overridden in `key_specs.py`.'
|
()
|
40,999 |
pyglove.core.typing.class_schema
|
extend
|
Extend base key specification and returns self.
NOTE(daiyip): When a ``Field`` extends a base Field (from a base schema),
it calls ``extend`` on both its ``KeySpec`` and ``ValueSpec``.
``KeySpec.extend`` is to determine whether the ``Field`` key is allowed to
be extended, and ``ValueSpec.extend`` is to determine the final
``ValueSpec`` after extension.
Args:
base: A base ``KeySpec`` object.
Returns:
An ``KeySpec`` object derived from this key spec by extending the base.
|
@abc.abstractmethod
def extend(self, base: 'KeySpec') -> 'KeySpec':
"""Extend base key specification and returns self.
NOTE(daiyip): When a ``Field`` extends a base Field (from a base schema),
it calls ``extend`` on both its ``KeySpec`` and ``ValueSpec``.
``KeySpec.extend`` is to determine whether the ``Field`` key is allowed to
be extended, and ``ValueSpec.extend`` is to determine the final
``ValueSpec`` after extension.
Args:
base: A base ``KeySpec`` object.
Returns:
An ``KeySpec`` object derived from this key spec by extending the base.
"""
|
(self, base: pyglove.core.typing.class_schema.KeySpec) -> pyglove.core.typing.class_schema.KeySpec
|
41,001 |
pyglove.core.typing.class_schema
|
match
|
Returns whether current key specification can match a key.
|
@abc.abstractmethod
def match(self, key: Any) -> bool:
"""Returns whether current key specification can match a key."""
|
(self, key: Any) -> bool
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.