index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
41,003 |
pyglove.core.symbolic.list
|
List
|
Symbolic list.
``pg.List`` implements a list type whose instances are symbolically
programmable, which is a subclass of the built-in Python ``list``,
and the subclass of ``pg.Symbolic``.
``pg.List`` can be used as a regular list::
# Construct a symbolic list from an iterable object.
l = pg.List(range(10))
It also supports symbolic validation through the ``value_spec`` argument::
l = pg.List([1, 2, 3], value_spec=pg.typing.List(
pg.typing.Int(min_value=1),
max_size=10
))
# Raises: 0 is not in acceptable range.
l.append(0)
And can be symbolically manipulated::
l = pg.List([{'foo': 1}])
l.rebind({
'[0].foo': 2
})
pg.query(l, where=lambda x: isinstance(x, int))
The user call also subscribe changes to its sub-nodes::
def on_change(updates):
print(updates)
l = pg.List([{'foo': 1}], onchange_callaback=on_change)
# `on_change` will be triggered on item insertion.
l.append({'bar': 2})
# `on_change` will be triggered on item removal.
l.pop(0)
# `on_change` will also be triggered on subtree change.
l.rebind({'[0].bar': 3})
|
class List(list, base.Symbolic, pg_typing.CustomTyping):
"""Symbolic list.
``pg.List`` implements a list type whose instances are symbolically
programmable, which is a subclass of the built-in Python ``list``,
and the subclass of ``pg.Symbolic``.
``pg.List`` can be used as a regular list::
# Construct a symbolic list from an iterable object.
l = pg.List(range(10))
It also supports symbolic validation through the ``value_spec`` argument::
l = pg.List([1, 2, 3], value_spec=pg.typing.List(
pg.typing.Int(min_value=1),
max_size=10
))
# Raises: 0 is not in acceptable range.
l.append(0)
And can be symbolically manipulated::
l = pg.List([{'foo': 1}])
l.rebind({
'[0].foo': 2
})
pg.query(l, where=lambda x: isinstance(x, int))
The user call also subscribe changes to its sub-nodes::
def on_change(updates):
print(updates)
l = pg.List([{'foo': 1}], onchange_callaback=on_change)
# `on_change` will be triggered on item insertion.
l.append({'bar': 2})
# `on_change` will be triggered on item removal.
l.pop(0)
# `on_change` will also be triggered on subtree change.
l.rebind({'[0].bar': 3})
"""
@classmethod
def partial(cls,
items: Optional[Iterable[Any]] = None,
*,
value_spec: Optional[pg_typing.List] = None,
onchange_callback: Optional[Callable[
[Dict[object_utils.KeyPath, base.FieldUpdate]], None]] = None,
**kwargs) -> 'List':
"""Class method that creates a partial List object."""
return cls(items,
value_spec=value_spec,
onchange_callback=onchange_callback,
allow_partial=True,
**kwargs)
@classmethod
def from_json(cls,
json_value: Any,
*,
value_spec: Optional[pg_typing.List] = None,
allow_partial: bool = False,
root_path: Optional[object_utils.KeyPath] = None,
**kwargs) -> 'List':
"""Class method that load an symbolic List from a JSON value.
Example::
l = List.from_json([{
'_type': '__main__.Foo',
'f1': 1,
'f2': {
'f21': True
}
},
1
])
assert l.value_spec is None
# Okay:
l.append('abc')
# [0].f2 is bound by class Foo's field 'f2' definition
# (assuming it defines a schema for the Dict field).
assert l[0].f2.value_spec is not None
# Not okay:
l[0].f2.abc = 1
Args:
json_value: Input JSON value, only JSON list is acceptable.
value_spec: An optional `pg.typing.List` object as the schema for the
list.
allow_partial: Whether to allow elements of the list to be partial.
root_path: KeyPath of loaded object in its object tree.
**kwargs: Allow passing through keyword arguments that are not applicable.
Returns:
A schema-less symbolic list, but its items maybe symbolic.
"""
return cls(json_value,
value_spec=value_spec,
allow_partial=allow_partial,
root_path=root_path)
def __init__(
self,
items: Optional[Iterable[Any]] = None,
*,
value_spec: Optional[pg_typing.List] = None,
onchange_callback: Optional[Callable[
[Dict[object_utils.KeyPath, base.FieldUpdate]], None]] = None,
allow_partial: bool = False,
accessor_writable: bool = True,
sealed: bool = False,
root_path: Optional[object_utils.KeyPath] = None):
"""Constructor.
Args:
items: A optional iterable object as initial value for this list.
value_spec: Value spec that applies to this List.
onchange_callback: Callback when sub-tree has been modified.
allow_partial: Whether to allow unbound or partial fields. This takes
effect only when value_spec is not None.
accessor_writable: Whether to allow modification of this List using
accessors (operator[]).
sealed: Whether to seal this List after creation.
root_path: KeyPath of this List in its object tree.
"""
if value_spec and not isinstance(value_spec, pg_typing.List):
raise TypeError(
f'Argument \'value_spec\' must be a `pg.typing.List` object. '
f'Encountered {value_spec}.')
# We delay seal operation until items are filled.
base.Symbolic.__init__(
self,
allow_partial=allow_partial,
accessor_writable=accessor_writable,
sealed=False,
root_path=root_path)
self._value_spec = None
self._onchange_callback = None
list.__init__(self)
if items:
# Copy the symbolic form instead of evaluated form.
if isinstance(items, List):
items = items.sym_values()
for item in items:
self._set_item_without_permission_check(len(self), item)
if value_spec:
self.use_value_spec(value_spec, allow_partial)
# NOTE(daiyip): We set onchange callback at the end of init to avoid
# triggering during initialization.
self._onchange_callback = onchange_callback
self.seal(sealed)
@property
def max_size(self) -> Optional[int]:
"""Returns max size of this list."""
if self._value_spec:
return typing.cast(pg_typing.ListKey,
self._value_spec.element.key).max_value
return None
def use_value_spec(self,
value_spec: Optional[pg_typing.List],
allow_partial: bool = False) -> 'List':
"""Applies a ``pg.List`` as the value spec for current list.
Args:
value_spec: A List ValueSpec to apply to this List.
If current List is schema-less (whose immediate members are not
validated against schema), and `value_spec` is not None, the value spec
will be applied to the List.
Or else if current List is already symbolic (whose immediate members
are under the constraint of a List value spec), and `value_spec` is
None, current List will become schema-less. However, the schema
constraints for non-immediate members will remain.
allow_partial: Whether allow partial dict based on the schema. This flag
will override allow_partial flag in __init__ for spec-less List.
Returns:
Self.
Raises:
ValueError: schema validation failed due to value error.
RuntimeError: List is already bound with another value_spec.
TypeError: type errors during validation.
KeyError: key errors during validation.
"""
if value_spec is None:
self._value_spec = None
self._accessor_writable = True
return self
if not isinstance(value_spec, pg_typing.List):
raise ValueError(
self._error_message(
f'Value spec for list must be a `pg.typing.List` object. '
f'Encountered: {value_spec!r}'))
if self._value_spec and self._value_spec != value_spec:
raise RuntimeError(
self._error_message(
f'List is already bound with a different value '
f'spec: {self._value_spec}. New value spec: {value_spec}.'))
self._allow_partial = allow_partial
if flags.is_type_check_enabled():
# NOTE(daiyip): self._value_spec will be set in List.custom_apply method
# called by spec.apply, thus we don't need to set the _value_spec
# explicitly.
value_spec.apply(
self,
allow_partial=base.accepts_partial(self),
child_transform=base.symbolic_transform_fn(self._allow_partial),
root_path=self.sym_path)
else:
self._value_spec = value_spec
return self
@property
def value_spec(self) -> Optional[pg_typing.List]:
"""Returns value spec of this List."""
return self._value_spec
def sym_attr_field(self, key: Union[str, int]) -> Optional[pg_typing.Field]:
"""Returns the field definition for a symbolic attribute."""
del key
if self._value_spec is None:
return None
return self._value_spec.element
def sym_hasattr(self, key: Union[str, int]) -> bool:
"""Tests if a symbolic attribute exists."""
return (isinstance(key, numbers.Integral)
and key >= -len(self) and key < len(self))
def sym_keys(self) -> Iterator[int]:
"""Symbolically iterates indices."""
for i in range(len(self)):
yield i
def sym_values(self) -> Iterator[Any]:
"""Iterates the values of symbolic attributes."""
for i in range(len(self)):
yield super().__getitem__(i)
def sym_items(self) -> Iterator[Tuple[int, Any]]:
"""Iterates the (key, value) pairs of symbolic attributes."""
for i in range(len(self)):
yield (i, super().__getitem__(i))
def sym_hash(self) -> int:
"""Symbolically hashing."""
return base.sym_hash(
(self.__class__, tuple([base.sym_hash(e) for e in self.sym_values()]))
)
def _sym_getattr(self, key: int) -> Any: # pytype: disable=signature-mismatch # overriding-parameter-type-checks
"""Gets symbolic attribute by index."""
return super().__getitem__(key)
def _sym_clone(self, deep: bool, memo=None) -> 'List':
"""Override Symbolic._clone."""
source = []
for v in self.sym_values():
if deep or isinstance(v, base.Symbolic):
v = base.clone(v, deep, memo)
source.append(v)
return List(
source,
value_spec=self._value_spec,
allow_partial=self._allow_partial,
accessor_writable=self._accessor_writable,
# NOTE(daiyip): parent and root_path are reset to empty
# for copy object.
root_path=None)
def _sym_missing(self) -> Dict[Any, Any]:
"""Returns missing fields."""
missing = dict()
for idx, elem in self.sym_items():
if isinstance(elem, base.Symbolic):
missing_child = elem.sym_missing(flatten=False)
if missing_child:
missing[idx] = missing_child
return missing
def _sym_rebind(
self, path_value_pairs: typing.Dict[object_utils.KeyPath, Any]
) -> typing.List[base.FieldUpdate]:
"""Subclass specific rebind implementation."""
updates = []
# Apply the updates in reverse order, so the operated path will not alter
# from insertions and deletions.
path_value_pairs = sorted(
path_value_pairs.items(), key=lambda x: x[0], reverse=True)
for k, v in path_value_pairs:
update = self._set_item_of_current_tree(k, v)
if update is not None:
updates.append(update)
# Reverse the updates so the update is from the smallest number to
# the largest.
updates.reverse()
return updates
def _sym_nondefault(self) -> Dict[int, Any]:
"""Returns non-default values."""
non_defaults = dict()
for idx, elem in self.sym_items():
if isinstance(elem, base.Symbolic):
non_defaults_child = elem.non_default_values(flatten=False)
if non_defaults_child:
non_defaults[idx] = non_defaults_child
else:
non_defaults[idx] = elem
return non_defaults
def seal(self, sealed: bool = True) -> 'List':
"""Seal or unseal current object from further modification."""
if self.is_sealed == sealed:
return self
for elem in self.sym_values():
if isinstance(elem, base.Symbolic):
elem.seal(sealed)
super().seal(sealed)
return self
def _update_children_paths(
self,
old_path: object_utils.KeyPath,
new_path: object_utils.KeyPath) -> None:
"""Update children paths according to root_path of current node."""
del old_path
for idx, item in self.sym_items():
if isinstance(item, base.TopologyAware):
item.sym_setpath(object_utils.KeyPath(idx, new_path))
def _set_item_without_permission_check( # pytype: disable=signature-mismatch # overriding-parameter-type-checks
self, key: int, value: Any) -> Optional[base.FieldUpdate]:
"""Set or add an item without permission check."""
assert isinstance(key, numbers.Integral), key
index = key
if index >= len(self):
# Appending MISSING_VALUE is considered no-op.
if value == pg_typing.MISSING_VALUE:
return None
index = len(self)
should_insert = False
if isinstance(value, Insertion):
should_insert = True
value = value.value
old_value = pg_typing.MISSING_VALUE
# Replace an existing value.
if index < len(self) and not should_insert:
old_value = list.__getitem__(self, index)
# Generates no update as old value is the same as the new value.
if old_value is value:
return None
new_value = self._formalized_value(index, value)
if index < len(self):
if should_insert:
list.insert(self, index, new_value)
else:
list.__setitem__(self, index, new_value)
# Detach old value from object tree.
if isinstance(old_value, base.TopologyAware):
old_value.sym_setparent(None)
else:
super().append(new_value)
return base.FieldUpdate(
self.sym_path + index, self,
self._value_spec.element if self._value_spec else None,
old_value, new_value)
def _formalized_value(self, idx: int, value: Any):
"""Get transformed (formal) value from user input."""
allow_partial = base.accepts_partial(self)
value = base.from_json(
value,
allow_partial=allow_partial,
root_path=object_utils.KeyPath(idx, self.sym_path))
if self._value_spec and flags.is_type_check_enabled():
value = self._value_spec.element.apply(
value,
allow_partial=allow_partial,
transform_fn=base.symbolic_transform_fn(self._allow_partial),
root_path=object_utils.KeyPath(idx, self.sym_path))
return self._relocate_if_symbolic(idx, value)
@property
def _subscribes_field_updates(self) -> bool:
"""Returns True if current list subscribes field updates."""
return self._onchange_callback is not None
def _on_change(self,
field_updates: Dict[object_utils.KeyPath, base.FieldUpdate]):
"""On change event of List."""
# Do nothing for now to handle changes of List.
# NOTE(daiyip): Remove items that are MISSING_VALUES.
keys_to_remove = []
for i, item in self.sym_items():
if pg_typing.MISSING_VALUE == item:
keys_to_remove.append(i)
if keys_to_remove:
for i in reversed(keys_to_remove):
list.__delitem__(self, i)
# Update paths for children.
for idx, item in self.sym_items():
if isinstance(item, base.TopologyAware) and item.sym_path.key != idx:
item.sym_setpath(object_utils.KeyPath(idx, self.sym_path))
if self._onchange_callback is not None:
self._onchange_callback(field_updates)
def _parse_slice(self, index: slice) -> Tuple[int, int, int]:
start = index.start if index.start is not None else 0
start = max(-len(self), start)
start = min(len(self), start)
if start < 0:
start += len(self)
stop = index.stop if index.stop is not None else len(self)
stop = max(-len(self), stop)
stop = min(len(self), stop)
if stop < 0:
stop += len(self)
step = index.step if index.step is not None else 1
return start, stop, step
def _init_kwargs(self) -> typing.Dict[str, Any]:
kwargs = super()._init_kwargs()
if not self._accessor_writable:
kwargs['accessor_writable'] = False
if self._onchange_callback is not None:
kwargs['onchange_callback'] = self._onchange_callback
# NOTE(daiyip): We do not serialize ValueSpec for now as in most use
# cases they come from the subclasses of `pg.Object`.
return kwargs
def __getstate__(self) -> Any:
"""Customizes pickle.dump."""
return dict(value=list(self), kwargs=self._init_kwargs())
def __setstate__(self, state) -> None:
"""Customizes pickle.load."""
self.__init__(state['value'], **state['kwargs'])
def __getitem__(self, index) -> Any:
"""Gets the item at a given position."""
if isinstance(index, numbers.Integral):
if index < -len(self) or index >= len(self):
raise IndexError('list index out of range')
return self.sym_inferred(index)
elif isinstance(index, slice):
return [self[i] for i in range(*self._parse_slice(index))]
else:
raise TypeError(
f'list index must be an integer. Encountered {index!r}.')
def __iter__(self):
"""Iterates the list."""
for i in range(len(self)):
yield self.sym_inferred(i)
def __setitem__(self, index, value: Any) -> None:
"""Set item in this List."""
if base.treats_as_sealed(self):
raise base.WritePermissionError(
self._error_message('Cannot set item for a sealed List.'))
if not base.writtable_via_accessors(self):
raise base.WritePermissionError(
self._error_message('Cannot modify List item by __setitem__ while '
'accessor_writable is set to False. '
'Use \'rebind\' method instead.'))
if isinstance(index, slice):
start, stop, step = self._parse_slice(index)
replacements = [self._formalized_value(i, v) for i, v in enumerate(value)]
if step < 0:
replacements.reverse()
step = -step
slice_size = math.ceil((stop - start) * 1.0 / step)
if step == 1:
if slice_size < len(replacements):
for i in range(slice_size, len(replacements)):
replacements[i] = Insertion(replacements[i])
else:
replacements.extend(
[pg_typing.MISSING_VALUE
for _ in range(slice_size - len(replacements))])
elif slice_size != len(replacements):
raise ValueError(
f'attempt to assign sequence of size {len(replacements)} to '
f'extended slice of size {slice_size}')
updates = []
for i, r in enumerate(replacements):
update = self._set_item_without_permission_check(start + i * step, r)
if update is not None:
updates.append(update)
if flags.is_change_notification_enabled() and updates:
self._notify_field_updates(updates)
elif isinstance(index, numbers.Integral):
if index < -len(self) or index >= len(self):
raise IndexError(
f'list assignment index out of range. '
f'Length={len(self)}, index={index}')
update = self._set_item_without_permission_check(index, value)
if flags.is_change_notification_enabled() and update:
self._notify_field_updates([update])
else:
raise TypeError(
f'list assignment index must be an integer. Encountered {index!r}.')
def __delitem__(self, index: int) -> None:
"""Delete an item from the List."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot delete item from a sealed List.')
if not base.writtable_via_accessors(self):
raise base.WritePermissionError(
self._error_message('Cannot delete List item while accessor_writable '
'is set to False. '
'Use \'rebind\' method instead.'))
if not isinstance(index, numbers.Integral):
raise TypeError(
f'list index must be an integer. Encountered {index!r}.')
if index < -len(self) or index >= len(self):
raise IndexError(
f'list index out of range. '
f'Length={len(self)}, index={index}')
old_value = self.sym_getattr(index)
super().__delitem__(index)
if flags.is_change_notification_enabled():
self._notify_field_updates([
base.FieldUpdate(
self.sym_path + index, self,
self._value_spec.element if self._value_spec else None,
old_value, pg_typing.MISSING_VALUE)
])
def __add__(self, other: Iterable[Any]) -> 'List':
"""Returns a concatenated List of self and other."""
concatenated = self.copy()
concatenated.extend(other)
return concatenated
def __mul__(self, n: int) -> 'List':
"""Returns a repeated Lit of self."""
result = List()
for _ in range(n):
result.extend(self)
if self._value_spec is not None:
result.use_value_spec(self._value_spec)
return result
def __rmul__(self, n: int) -> 'List':
"""Returns a repeated Lit of self."""
return self.__mul__(n)
def copy(self) -> 'List':
"""Shallow current list."""
return List(super().copy(), value_spec=self._value_spec)
def append(self, value: Any) -> None:
"""Appends an item."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot append element on a sealed List.')
if self.max_size is not None and len(self) >= self.max_size:
raise ValueError(f'List reached its max size {self.max_size}.')
update = self._set_item_without_permission_check(len(self), value)
if flags.is_change_notification_enabled() and update:
self._notify_field_updates([update])
def insert(self, index: int, value: Any) -> None:
"""Inserts an item at a given position."""
if base.treats_as_sealed(self):
raise base.WritePermissionError(
'Cannot insert element into a sealed List.')
if self.max_size is not None and len(self) >= self.max_size:
raise ValueError(f'List reached its max size {self.max_size}.')
update = self._set_item_without_permission_check(
index, mark_as_insertion(value))
if flags.is_change_notification_enabled() and update:
self._notify_field_updates([update])
def pop(self, index: int = -1) -> Any:
"""Pop an item and return its value."""
if index < -len(self) or index >= len(self):
raise IndexError('pop index out of range')
index = (index + len(self)) % len(self)
value = self[index]
with flags.allow_writable_accessors(True):
del self[index]
return value
def remove(self, value: Any) -> None:
"""Removes the first occurrence of the value."""
for i, item in self.sym_items():
if item == value:
if (self._value_spec and self._value_spec.min_size == len(self)):
raise ValueError(
f'Cannot remove item: min size ({self._value_spec.min_size}) '
f'is reached.')
del self[i]
return
raise ValueError(f'{value!r} not in list.')
def extend(self, other: Iterable[Any]) -> None:
# NOTE(daiyip): THIS IS A WORKAROUND FOR WORKING WITH PICKLE.
# `pg.List` is a subclass of `list`, therefore, when pickle loads a list,
# it tries to set the list values directly by calling `extend` without
# calling `pg.List.__init__` at the first place. As a result, an error will
# raise, which complains about that an attribute set up during `__init__` is
# not available. A mitigation to this issue is to detect such calls in
# `extend`, and simply do nothing as follows, which will give a chance to
# `pg.List.__getstate__` to deal with the restoration logic as an object
# (instead of a list).
if not hasattr(self, '_sym_parent'):
return
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot extend a sealed List.')
# Extend on the symbolic form instead of the evaluated form.
iter_other = other.sym_values() if isinstance(other, List) else other
other = list(iter_other)
if self.max_size is not None and len(self) + len(other) > self.max_size:
raise ValueError(
f'Cannot extend List: the number of elements '
f'({len(self) + len(other)}) exceeds max size ({self.max_size}).')
updates = []
for v in other:
update = self._set_item_without_permission_check(len(self), v)
if update is not None:
updates.append(update)
if flags.is_change_notification_enabled() and updates:
self._notify_field_updates(updates)
def clear(self) -> None:
"""Clears the list."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot clear a sealed List.')
if self._value_spec and self._value_spec.min_size > 0:
raise ValueError(
f'List cannot be cleared: min size is {self._value_spec.min_size}.')
super().clear()
def sort(self, *, key=None, reverse=False) -> None:
"""Sorts the items of the list in place.."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot sort a sealed List.')
super().sort(key=key, reverse=reverse)
def reverse(self) -> None:
"""Reverse the elements of the list in place."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot reverse a sealed List.')
super().reverse()
def custom_apply(
self,
path: object_utils.KeyPath,
value_spec: pg_typing.ValueSpec,
allow_partial: bool,
child_transform: Optional[
Callable[[object_utils.KeyPath, pg_typing.Field, Any], Any]] = None
) -> Tuple[bool, 'List']:
"""Implement pg.typing.CustomTyping interface.
Args:
path: KeyPath of current object.
value_spec: Origin value spec of the field.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values in dict_obj into
their final values. Transform function is called on leaf nodes first,
then on their containers, recursively.
Returns:
A tuple (proceed_with_standard_apply, transformed value)
"""
proceed_with_standard_apply = True
if self._value_spec:
if value_spec and not value_spec.is_compatible(self._value_spec):
raise ValueError(
object_utils.message_on_path(
f'List (spec={self._value_spec!r}) cannot be assigned to an '
f'incompatible field (spec={value_spec!r}).', path))
if self._allow_partial == allow_partial:
proceed_with_standard_apply = False
else:
self._allow_partial = allow_partial
elif isinstance(value_spec, pg_typing.List):
self._value_spec = value_spec
return (proceed_with_standard_apply, self)
def sym_jsonify(
self,
use_inferred: bool = False,
**kwargs) -> object_utils.JSONValueType:
"""Converts current list to a list of plain Python objects."""
def json_item(idx):
v = self.sym_getattr(idx)
if use_inferred and isinstance(v, base.Inferential):
v = self.sym_inferred(idx, default=v)
return base.to_json(v, use_inferred=use_inferred, **kwargs)
return [json_item(i) for i in range(len(self))]
def format(
self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
*,
python_format: bool = False,
use_inferred: bool = False,
cls_name: Optional[str] = None,
bracket_type: object_utils.BracketType = object_utils.BracketType.SQUARE,
**kwargs) -> str:
"""Formats this List."""
def _indent(text, indent):
return ' ' * 2 * indent + text
cls_name = cls_name or ''
open_bracket, close_bracket = object_utils.bracket_chars(bracket_type)
s = [f'{cls_name}{open_bracket}']
if compact:
kv_strs = []
for idx, elem in self.sym_items():
if use_inferred and isinstance(elem, base.Inferential):
elem = self.sym_inferred(idx, default=elem)
v_str = object_utils.format(
elem, compact, verbose, root_indent + 1,
python_format=python_format, use_inferred=use_inferred, **kwargs)
if python_format:
kv_strs.append(v_str)
else:
kv_strs.append(f'{idx}: {v_str}')
s.append(', '.join(kv_strs))
s.append(close_bracket)
else:
if self:
for idx, elem in self.sym_items():
if use_inferred and isinstance(elem, base.Inferential):
elem = self.sym_inferred(idx, default=elem)
if idx == 0:
s.append('\n')
else:
s.append(',\n')
v_str = object_utils.format(
elem, compact, verbose, root_indent + 1,
python_format=python_format, use_inferred=use_inferred, **kwargs)
if python_format:
s.append(_indent(v_str, root_indent + 1))
else:
s.append(_indent(f'{idx} : {v_str}', root_indent + 1))
s.append('\n')
s.append(_indent(close_bracket, root_indent))
else:
s.append(close_bracket)
return ''.join(s)
def __copy__(self) -> 'List':
"""List.copy."""
return self.sym_clone(deep=False)
def __deepcopy__(self, memo) -> 'List':
return self.sym_clone(deep=True, memo=memo)
def __hash__(self) -> int:
"""Overriden hashing function."""
return self.sym_hash()
|
(items: Optional[Iterable[Any]] = None, *, value_spec: Optional[pyglove.core.typing.value_specs.List] = None, onchange_callback: Optional[Callable[[Dict[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.symbolic.base.FieldUpdate]], NoneType]] = None, allow_partial: bool = False, accessor_writable: bool = True, sealed: bool = False, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None)
|
41,005 |
pyglove.core.symbolic.list
|
__add__
|
Returns a concatenated List of self and other.
|
def __add__(self, other: Iterable[Any]) -> 'List':
"""Returns a concatenated List of self and other."""
concatenated = self.copy()
concatenated.extend(other)
return concatenated
|
(self, other: Iterable[Any]) -> pyglove.core.symbolic.list.List
|
41,006 |
pyglove.core.symbolic.list
|
__copy__
|
List.copy.
|
def __copy__(self) -> 'List':
"""List.copy."""
return self.sym_clone(deep=False)
|
(self) -> pyglove.core.symbolic.list.List
|
41,007 |
pyglove.core.symbolic.list
|
__deepcopy__
| null |
def __deepcopy__(self, memo) -> 'List':
return self.sym_clone(deep=True, memo=memo)
|
(self, memo) -> pyglove.core.symbolic.list.List
|
41,008 |
pyglove.core.symbolic.list
|
__delitem__
|
Delete an item from the List.
|
def __delitem__(self, index: int) -> None:
"""Delete an item from the List."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot delete item from a sealed List.')
if not base.writtable_via_accessors(self):
raise base.WritePermissionError(
self._error_message('Cannot delete List item while accessor_writable '
'is set to False. '
'Use \'rebind\' method instead.'))
if not isinstance(index, numbers.Integral):
raise TypeError(
f'list index must be an integer. Encountered {index!r}.')
if index < -len(self) or index >= len(self):
raise IndexError(
f'list index out of range. '
f'Length={len(self)}, index={index}')
old_value = self.sym_getattr(index)
super().__delitem__(index)
if flags.is_change_notification_enabled():
self._notify_field_updates([
base.FieldUpdate(
self.sym_path + index, self,
self._value_spec.element if self._value_spec else None,
old_value, pg_typing.MISSING_VALUE)
])
|
(self, index: int) -> NoneType
|
41,009 |
pyglove.core.symbolic.list
|
__getitem__
|
Gets the item at a given position.
|
def __getitem__(self, index) -> Any:
"""Gets the item at a given position."""
if isinstance(index, numbers.Integral):
if index < -len(self) or index >= len(self):
raise IndexError('list index out of range')
return self.sym_inferred(index)
elif isinstance(index, slice):
return [self[i] for i in range(*self._parse_slice(index))]
else:
raise TypeError(
f'list index must be an integer. Encountered {index!r}.')
|
(self, index) -> Any
|
41,010 |
pyglove.core.symbolic.list
|
__getstate__
|
Customizes pickle.dump.
|
def __getstate__(self) -> Any:
"""Customizes pickle.dump."""
return dict(value=list(self), kwargs=self._init_kwargs())
|
(self) -> Any
|
41,011 |
pyglove.core.symbolic.list
|
__hash__
|
Overriden hashing function.
|
def __hash__(self) -> int:
"""Overriden hashing function."""
return self.sym_hash()
|
(self) -> int
|
41,012 |
pyglove.core.symbolic.list
|
__init__
|
Constructor.
Args:
items: A optional iterable object as initial value for this list.
value_spec: Value spec that applies to this List.
onchange_callback: Callback when sub-tree has been modified.
allow_partial: Whether to allow unbound or partial fields. This takes
effect only when value_spec is not None.
accessor_writable: Whether to allow modification of this List using
accessors (operator[]).
sealed: Whether to seal this List after creation.
root_path: KeyPath of this List in its object tree.
|
def __init__(
self,
items: Optional[Iterable[Any]] = None,
*,
value_spec: Optional[pg_typing.List] = None,
onchange_callback: Optional[Callable[
[Dict[object_utils.KeyPath, base.FieldUpdate]], None]] = None,
allow_partial: bool = False,
accessor_writable: bool = True,
sealed: bool = False,
root_path: Optional[object_utils.KeyPath] = None):
"""Constructor.
Args:
items: A optional iterable object as initial value for this list.
value_spec: Value spec that applies to this List.
onchange_callback: Callback when sub-tree has been modified.
allow_partial: Whether to allow unbound or partial fields. This takes
effect only when value_spec is not None.
accessor_writable: Whether to allow modification of this List using
accessors (operator[]).
sealed: Whether to seal this List after creation.
root_path: KeyPath of this List in its object tree.
"""
if value_spec and not isinstance(value_spec, pg_typing.List):
raise TypeError(
f'Argument \'value_spec\' must be a `pg.typing.List` object. '
f'Encountered {value_spec}.')
# We delay seal operation until items are filled.
base.Symbolic.__init__(
self,
allow_partial=allow_partial,
accessor_writable=accessor_writable,
sealed=False,
root_path=root_path)
self._value_spec = None
self._onchange_callback = None
list.__init__(self)
if items:
# Copy the symbolic form instead of evaluated form.
if isinstance(items, List):
items = items.sym_values()
for item in items:
self._set_item_without_permission_check(len(self), item)
if value_spec:
self.use_value_spec(value_spec, allow_partial)
# NOTE(daiyip): We set onchange callback at the end of init to avoid
# triggering during initialization.
self._onchange_callback = onchange_callback
self.seal(sealed)
|
(self, items: Optional[Iterable[Any]] = None, *, value_spec: Optional[pyglove.core.typing.value_specs.List] = None, onchange_callback: Optional[Callable[[Dict[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.symbolic.base.FieldUpdate]], NoneType]] = None, allow_partial: bool = False, accessor_writable: bool = True, sealed: bool = False, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None)
|
41,013 |
pyglove.core.symbolic.list
|
__iter__
|
Iterates the list.
|
def __iter__(self):
"""Iterates the list."""
for i in range(len(self)):
yield self.sym_inferred(i)
|
(self)
|
41,014 |
pyglove.core.symbolic.list
|
__mul__
|
Returns a repeated Lit of self.
|
def __mul__(self, n: int) -> 'List':
"""Returns a repeated Lit of self."""
result = List()
for _ in range(n):
result.extend(self)
if self._value_spec is not None:
result.use_value_spec(self._value_spec)
return result
|
(self, n: int) -> pyglove.core.symbolic.list.List
|
41,015 |
pyglove.core.symbolic.list
|
__rmul__
|
Returns a repeated Lit of self.
|
def __rmul__(self, n: int) -> 'List':
"""Returns a repeated Lit of self."""
return self.__mul__(n)
|
(self, n: int) -> pyglove.core.symbolic.list.List
|
41,016 |
pyglove.core.symbolic.list
|
__setitem__
|
Set item in this List.
|
def __setitem__(self, index, value: Any) -> None:
"""Set item in this List."""
if base.treats_as_sealed(self):
raise base.WritePermissionError(
self._error_message('Cannot set item for a sealed List.'))
if not base.writtable_via_accessors(self):
raise base.WritePermissionError(
self._error_message('Cannot modify List item by __setitem__ while '
'accessor_writable is set to False. '
'Use \'rebind\' method instead.'))
if isinstance(index, slice):
start, stop, step = self._parse_slice(index)
replacements = [self._formalized_value(i, v) for i, v in enumerate(value)]
if step < 0:
replacements.reverse()
step = -step
slice_size = math.ceil((stop - start) * 1.0 / step)
if step == 1:
if slice_size < len(replacements):
for i in range(slice_size, len(replacements)):
replacements[i] = Insertion(replacements[i])
else:
replacements.extend(
[pg_typing.MISSING_VALUE
for _ in range(slice_size - len(replacements))])
elif slice_size != len(replacements):
raise ValueError(
f'attempt to assign sequence of size {len(replacements)} to '
f'extended slice of size {slice_size}')
updates = []
for i, r in enumerate(replacements):
update = self._set_item_without_permission_check(start + i * step, r)
if update is not None:
updates.append(update)
if flags.is_change_notification_enabled() and updates:
self._notify_field_updates(updates)
elif isinstance(index, numbers.Integral):
if index < -len(self) or index >= len(self):
raise IndexError(
f'list assignment index out of range. '
f'Length={len(self)}, index={index}')
update = self._set_item_without_permission_check(index, value)
if flags.is_change_notification_enabled() and update:
self._notify_field_updates([update])
else:
raise TypeError(
f'list assignment index must be an integer. Encountered {index!r}.')
|
(self, index, value: Any) -> NoneType
|
41,020 |
pyglove.core.symbolic.list
|
_formalized_value
|
Get transformed (formal) value from user input.
|
def _formalized_value(self, idx: int, value: Any):
"""Get transformed (formal) value from user input."""
allow_partial = base.accepts_partial(self)
value = base.from_json(
value,
allow_partial=allow_partial,
root_path=object_utils.KeyPath(idx, self.sym_path))
if self._value_spec and flags.is_type_check_enabled():
value = self._value_spec.element.apply(
value,
allow_partial=allow_partial,
transform_fn=base.symbolic_transform_fn(self._allow_partial),
root_path=object_utils.KeyPath(idx, self.sym_path))
return self._relocate_if_symbolic(idx, value)
|
(self, idx: int, value: Any)
|
41,024 |
pyglove.core.symbolic.list
|
_on_change
|
On change event of List.
|
def _on_change(self,
field_updates: Dict[object_utils.KeyPath, base.FieldUpdate]):
"""On change event of List."""
# Do nothing for now to handle changes of List.
# NOTE(daiyip): Remove items that are MISSING_VALUES.
keys_to_remove = []
for i, item in self.sym_items():
if pg_typing.MISSING_VALUE == item:
keys_to_remove.append(i)
if keys_to_remove:
for i in reversed(keys_to_remove):
list.__delitem__(self, i)
# Update paths for children.
for idx, item in self.sym_items():
if isinstance(item, base.TopologyAware) and item.sym_path.key != idx:
item.sym_setpath(object_utils.KeyPath(idx, self.sym_path))
if self._onchange_callback is not None:
self._onchange_callback(field_updates)
|
(self, field_updates: Dict[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.symbolic.base.FieldUpdate])
|
41,025 |
pyglove.core.symbolic.list
|
_parse_slice
| null |
def _parse_slice(self, index: slice) -> Tuple[int, int, int]:
start = index.start if index.start is not None else 0
start = max(-len(self), start)
start = min(len(self), start)
if start < 0:
start += len(self)
stop = index.stop if index.stop is not None else len(self)
stop = max(-len(self), stop)
stop = min(len(self), stop)
if stop < 0:
stop += len(self)
step = index.step if index.step is not None else 1
return start, stop, step
|
(self, index: slice) -> Tuple[int, int, int]
|
41,028 |
pyglove.core.symbolic.list
|
_set_item_without_permission_check
|
Set or add an item without permission check.
|
def _set_item_without_permission_check( # pytype: disable=signature-mismatch # overriding-parameter-type-checks
self, key: int, value: Any) -> Optional[base.FieldUpdate]:
"""Set or add an item without permission check."""
assert isinstance(key, numbers.Integral), key
index = key
if index >= len(self):
# Appending MISSING_VALUE is considered no-op.
if value == pg_typing.MISSING_VALUE:
return None
index = len(self)
should_insert = False
if isinstance(value, Insertion):
should_insert = True
value = value.value
old_value = pg_typing.MISSING_VALUE
# Replace an existing value.
if index < len(self) and not should_insert:
old_value = list.__getitem__(self, index)
# Generates no update as old value is the same as the new value.
if old_value is value:
return None
new_value = self._formalized_value(index, value)
if index < len(self):
if should_insert:
list.insert(self, index, new_value)
else:
list.__setitem__(self, index, new_value)
# Detach old value from object tree.
if isinstance(old_value, base.TopologyAware):
old_value.sym_setparent(None)
else:
super().append(new_value)
return base.FieldUpdate(
self.sym_path + index, self,
self._value_spec.element if self._value_spec else None,
old_value, new_value)
|
(self, key: int, value: Any) -> Optional[pyglove.core.symbolic.base.FieldUpdate]
|
41,030 |
pyglove.core.symbolic.list
|
_sym_clone
|
Override Symbolic._clone.
|
def _sym_clone(self, deep: bool, memo=None) -> 'List':
"""Override Symbolic._clone."""
source = []
for v in self.sym_values():
if deep or isinstance(v, base.Symbolic):
v = base.clone(v, deep, memo)
source.append(v)
return List(
source,
value_spec=self._value_spec,
allow_partial=self._allow_partial,
accessor_writable=self._accessor_writable,
# NOTE(daiyip): parent and root_path are reset to empty
# for copy object.
root_path=None)
|
(self, deep: bool, memo=None) -> pyglove.core.symbolic.list.List
|
41,031 |
pyglove.core.symbolic.list
|
_sym_getattr
|
Gets symbolic attribute by index.
|
def _sym_getattr(self, key: int) -> Any: # pytype: disable=signature-mismatch # overriding-parameter-type-checks
"""Gets symbolic attribute by index."""
return super().__getitem__(key)
|
(self, key: int) -> Any
|
41,033 |
pyglove.core.symbolic.list
|
_sym_missing
|
Returns missing fields.
|
def _sym_missing(self) -> Dict[Any, Any]:
"""Returns missing fields."""
missing = dict()
for idx, elem in self.sym_items():
if isinstance(elem, base.Symbolic):
missing_child = elem.sym_missing(flatten=False)
if missing_child:
missing[idx] = missing_child
return missing
|
(self) -> Dict[Any, Any]
|
41,034 |
pyglove.core.symbolic.list
|
_sym_nondefault
|
Returns non-default values.
|
def _sym_nondefault(self) -> Dict[int, Any]:
"""Returns non-default values."""
non_defaults = dict()
for idx, elem in self.sym_items():
if isinstance(elem, base.Symbolic):
non_defaults_child = elem.non_default_values(flatten=False)
if non_defaults_child:
non_defaults[idx] = non_defaults_child
else:
non_defaults[idx] = elem
return non_defaults
|
(self) -> Dict[int, Any]
|
41,036 |
pyglove.core.symbolic.list
|
_sym_rebind
|
Subclass specific rebind implementation.
|
def _sym_rebind(
self, path_value_pairs: typing.Dict[object_utils.KeyPath, Any]
) -> typing.List[base.FieldUpdate]:
"""Subclass specific rebind implementation."""
updates = []
# Apply the updates in reverse order, so the operated path will not alter
# from insertions and deletions.
path_value_pairs = sorted(
path_value_pairs.items(), key=lambda x: x[0], reverse=True)
for k, v in path_value_pairs:
update = self._set_item_of_current_tree(k, v)
if update is not None:
updates.append(update)
# Reverse the updates so the update is from the smallest number to
# the largest.
updates.reverse()
return updates
|
(self, path_value_pairs: Dict[pyglove.core.object_utils.value_location.KeyPath, Any]) -> List[pyglove.core.symbolic.base.FieldUpdate]
|
41,037 |
pyglove.core.symbolic.list
|
_update_children_paths
|
Update children paths according to root_path of current node.
|
def _update_children_paths(
self,
old_path: object_utils.KeyPath,
new_path: object_utils.KeyPath) -> None:
"""Update children paths according to root_path of current node."""
del old_path
for idx, item in self.sym_items():
if isinstance(item, base.TopologyAware):
item.sym_setpath(object_utils.KeyPath(idx, new_path))
|
(self, old_path: pyglove.core.object_utils.value_location.KeyPath, new_path: pyglove.core.object_utils.value_location.KeyPath) -> NoneType
|
41,038 |
pyglove.core.symbolic.list
|
append
|
Appends an item.
|
def append(self, value: Any) -> None:
"""Appends an item."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot append element on a sealed List.')
if self.max_size is not None and len(self) >= self.max_size:
raise ValueError(f'List reached its max size {self.max_size}.')
update = self._set_item_without_permission_check(len(self), value)
if flags.is_change_notification_enabled() and update:
self._notify_field_updates([update])
|
(self, value: Any) -> NoneType
|
41,039 |
pyglove.core.symbolic.list
|
clear
|
Clears the list.
|
def clear(self) -> None:
"""Clears the list."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot clear a sealed List.')
if self._value_spec and self._value_spec.min_size > 0:
raise ValueError(
f'List cannot be cleared: min size is {self._value_spec.min_size}.')
super().clear()
|
(self) -> NoneType
|
41,041 |
pyglove.core.symbolic.list
|
copy
|
Shallow current list.
|
def copy(self) -> 'List':
"""Shallow current list."""
return List(super().copy(), value_spec=self._value_spec)
|
(self) -> pyglove.core.symbolic.list.List
|
41,042 |
pyglove.core.symbolic.list
|
custom_apply
|
Implement pg.typing.CustomTyping interface.
Args:
path: KeyPath of current object.
value_spec: Origin value spec of the field.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values in dict_obj into
their final values. Transform function is called on leaf nodes first,
then on their containers, recursively.
Returns:
A tuple (proceed_with_standard_apply, transformed value)
|
def custom_apply(
self,
path: object_utils.KeyPath,
value_spec: pg_typing.ValueSpec,
allow_partial: bool,
child_transform: Optional[
Callable[[object_utils.KeyPath, pg_typing.Field, Any], Any]] = None
) -> Tuple[bool, 'List']:
"""Implement pg.typing.CustomTyping interface.
Args:
path: KeyPath of current object.
value_spec: Origin value spec of the field.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values in dict_obj into
their final values. Transform function is called on leaf nodes first,
then on their containers, recursively.
Returns:
A tuple (proceed_with_standard_apply, transformed value)
"""
proceed_with_standard_apply = True
if self._value_spec:
if value_spec and not value_spec.is_compatible(self._value_spec):
raise ValueError(
object_utils.message_on_path(
f'List (spec={self._value_spec!r}) cannot be assigned to an '
f'incompatible field (spec={value_spec!r}).', path))
if self._allow_partial == allow_partial:
proceed_with_standard_apply = False
else:
self._allow_partial = allow_partial
elif isinstance(value_spec, pg_typing.List):
self._value_spec = value_spec
return (proceed_with_standard_apply, self)
|
(self, path: pyglove.core.object_utils.value_location.KeyPath, value_spec: pyglove.core.typing.class_schema.ValueSpec, allow_partial: bool, child_transform: Optional[Callable[[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.typing.class_schema.Field, Any], Any]] = None) -> Tuple[bool, pyglove.core.symbolic.list.List]
|
41,043 |
pyglove.core.symbolic.list
|
extend
| null |
def extend(self, other: Iterable[Any]) -> None:
# NOTE(daiyip): THIS IS A WORKAROUND FOR WORKING WITH PICKLE.
# `pg.List` is a subclass of `list`, therefore, when pickle loads a list,
# it tries to set the list values directly by calling `extend` without
# calling `pg.List.__init__` at the first place. As a result, an error will
# raise, which complains about that an attribute set up during `__init__` is
# not available. A mitigation to this issue is to detect such calls in
# `extend`, and simply do nothing as follows, which will give a chance to
# `pg.List.__getstate__` to deal with the restoration logic as an object
# (instead of a list).
if not hasattr(self, '_sym_parent'):
return
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot extend a sealed List.')
# Extend on the symbolic form instead of the evaluated form.
iter_other = other.sym_values() if isinstance(other, List) else other
other = list(iter_other)
if self.max_size is not None and len(self) + len(other) > self.max_size:
raise ValueError(
f'Cannot extend List: the number of elements '
f'({len(self) + len(other)}) exceeds max size ({self.max_size}).')
updates = []
for v in other:
update = self._set_item_without_permission_check(len(self), v)
if update is not None:
updates.append(update)
if flags.is_change_notification_enabled() and updates:
self._notify_field_updates(updates)
|
(self, other: Iterable[Any]) -> NoneType
|
41,044 |
pyglove.core.symbolic.list
|
format
|
Formats this List.
|
def format(
self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
*,
python_format: bool = False,
use_inferred: bool = False,
cls_name: Optional[str] = None,
bracket_type: object_utils.BracketType = object_utils.BracketType.SQUARE,
**kwargs) -> str:
"""Formats this List."""
def _indent(text, indent):
return ' ' * 2 * indent + text
cls_name = cls_name or ''
open_bracket, close_bracket = object_utils.bracket_chars(bracket_type)
s = [f'{cls_name}{open_bracket}']
if compact:
kv_strs = []
for idx, elem in self.sym_items():
if use_inferred and isinstance(elem, base.Inferential):
elem = self.sym_inferred(idx, default=elem)
v_str = object_utils.format(
elem, compact, verbose, root_indent + 1,
python_format=python_format, use_inferred=use_inferred, **kwargs)
if python_format:
kv_strs.append(v_str)
else:
kv_strs.append(f'{idx}: {v_str}')
s.append(', '.join(kv_strs))
s.append(close_bracket)
else:
if self:
for idx, elem in self.sym_items():
if use_inferred and isinstance(elem, base.Inferential):
elem = self.sym_inferred(idx, default=elem)
if idx == 0:
s.append('\n')
else:
s.append(',\n')
v_str = object_utils.format(
elem, compact, verbose, root_indent + 1,
python_format=python_format, use_inferred=use_inferred, **kwargs)
if python_format:
s.append(_indent(v_str, root_indent + 1))
else:
s.append(_indent(f'{idx} : {v_str}', root_indent + 1))
s.append('\n')
s.append(_indent(close_bracket, root_indent))
else:
s.append(close_bracket)
return ''.join(s)
|
(self, compact: bool = False, verbose: bool = True, root_indent: int = 0, *, python_format: bool = False, use_inferred: bool = False, cls_name: Optional[str] = None, bracket_type: pyglove.core.object_utils.formatting.BracketType = <BracketType.SQUARE: 1>, **kwargs) -> str
|
41,045 |
pyglove.core.symbolic.list
|
insert
|
Inserts an item at a given position.
|
def insert(self, index: int, value: Any) -> None:
"""Inserts an item at a given position."""
if base.treats_as_sealed(self):
raise base.WritePermissionError(
'Cannot insert element into a sealed List.')
if self.max_size is not None and len(self) >= self.max_size:
raise ValueError(f'List reached its max size {self.max_size}.')
update = self._set_item_without_permission_check(
index, mark_as_insertion(value))
if flags.is_change_notification_enabled() and update:
self._notify_field_updates([update])
|
(self, index: int, value: Any) -> NoneType
|
41,049 |
pyglove.core.symbolic.list
|
pop
|
Pop an item and return its value.
|
def pop(self, index: int = -1) -> Any:
"""Pop an item and return its value."""
if index < -len(self) or index >= len(self):
raise IndexError('pop index out of range')
index = (index + len(self)) % len(self)
value = self[index]
with flags.allow_writable_accessors(True):
del self[index]
return value
|
(self, index: int = -1) -> Any
|
41,051 |
pyglove.core.symbolic.list
|
remove
|
Removes the first occurrence of the value.
|
def remove(self, value: Any) -> None:
"""Removes the first occurrence of the value."""
for i, item in self.sym_items():
if item == value:
if (self._value_spec and self._value_spec.min_size == len(self)):
raise ValueError(
f'Cannot remove item: min size ({self._value_spec.min_size}) '
f'is reached.')
del self[i]
return
raise ValueError(f'{value!r} not in list.')
|
(self, value: Any) -> NoneType
|
41,052 |
pyglove.core.symbolic.list
|
reverse
|
Reverse the elements of the list in place.
|
def reverse(self) -> None:
"""Reverse the elements of the list in place."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot reverse a sealed List.')
super().reverse()
|
(self) -> NoneType
|
41,054 |
pyglove.core.symbolic.list
|
seal
|
Seal or unseal current object from further modification.
|
def seal(self, sealed: bool = True) -> 'List':
"""Seal or unseal current object from further modification."""
if self.is_sealed == sealed:
return self
for elem in self.sym_values():
if isinstance(elem, base.Symbolic):
elem.seal(sealed)
super().seal(sealed)
return self
|
(self, sealed: bool = True) -> pyglove.core.symbolic.list.List
|
41,056 |
pyglove.core.symbolic.list
|
sort
|
Sorts the items of the list in place..
|
def sort(self, *, key=None, reverse=False) -> None:
"""Sorts the items of the list in place.."""
if base.treats_as_sealed(self):
raise base.WritePermissionError('Cannot sort a sealed List.')
super().sort(key=key, reverse=reverse)
|
(self, *, key=None, reverse=False) -> NoneType
|
41,058 |
pyglove.core.symbolic.list
|
sym_attr_field
|
Returns the field definition for a symbolic attribute.
|
def sym_attr_field(self, key: Union[str, int]) -> Optional[pg_typing.Field]:
"""Returns the field definition for a symbolic attribute."""
del key
if self._value_spec is None:
return None
return self._value_spec.element
|
(self, key: Union[str, int]) -> Optional[pyglove.core.typing.class_schema.Field]
|
41,067 |
pyglove.core.symbolic.list
|
sym_hasattr
|
Tests if a symbolic attribute exists.
|
def sym_hasattr(self, key: Union[str, int]) -> bool:
"""Tests if a symbolic attribute exists."""
return (isinstance(key, numbers.Integral)
and key >= -len(self) and key < len(self))
|
(self, key: Union[str, int]) -> bool
|
41,068 |
pyglove.core.symbolic.list
|
sym_hash
|
Symbolically hashing.
|
def sym_hash(self) -> int:
"""Symbolically hashing."""
return base.sym_hash(
(self.__class__, tuple([base.sym_hash(e) for e in self.sym_values()]))
)
|
(self) -> int
|
41,071 |
pyglove.core.symbolic.list
|
sym_items
|
Iterates the (key, value) pairs of symbolic attributes.
|
def sym_items(self) -> Iterator[Tuple[int, Any]]:
"""Iterates the (key, value) pairs of symbolic attributes."""
for i in range(len(self)):
yield (i, super().__getitem__(i))
|
(self) -> Iterator[Tuple[int, Any]]
|
41,072 |
pyglove.core.symbolic.list
|
sym_jsonify
|
Converts current list to a list of plain Python objects.
|
def sym_jsonify(
self,
use_inferred: bool = False,
**kwargs) -> object_utils.JSONValueType:
"""Converts current list to a list of plain Python objects."""
def json_item(idx):
v = self.sym_getattr(idx)
if use_inferred and isinstance(v, base.Inferential):
v = self.sym_inferred(idx, default=v)
return base.to_json(v, use_inferred=use_inferred, **kwargs)
return [json_item(i) for i in range(len(self))]
|
(self, use_inferred: bool = False, **kwargs) -> Union[int, float, bool, str, List[Any], Dict[str, Any]]
|
41,073 |
pyglove.core.symbolic.list
|
sym_keys
|
Symbolically iterates indices.
|
def sym_keys(self) -> Iterator[int]:
"""Symbolically iterates indices."""
for i in range(len(self)):
yield i
|
(self) -> Iterator[int]
|
41,081 |
pyglove.core.symbolic.base
|
sym_setparent
|
Sets the parent of current node in the symbolic tree.
|
def sym_setparent(self, parent: 'Symbolic'):
"""Sets the parent of current node in the symbolic tree."""
self._set_raw_attr('_sym_parent', parent)
|
(self, parent: pyglove.core.symbolic.base.Symbolic)
|
41,083 |
pyglove.core.symbolic.list
|
sym_values
|
Iterates the values of symbolic attributes.
|
def sym_values(self) -> Iterator[Any]:
"""Iterates the values of symbolic attributes."""
for i in range(len(self)):
yield super().__getitem__(i)
|
(self) -> Iterator[Any]
|
41,086 |
pyglove.core.symbolic.list
|
use_value_spec
|
Applies a ``pg.List`` as the value spec for current list.
Args:
value_spec: A List ValueSpec to apply to this List.
If current List is schema-less (whose immediate members are not
validated against schema), and `value_spec` is not None, the value spec
will be applied to the List.
Or else if current List is already symbolic (whose immediate members
are under the constraint of a List value spec), and `value_spec` is
None, current List will become schema-less. However, the schema
constraints for non-immediate members will remain.
allow_partial: Whether allow partial dict based on the schema. This flag
will override allow_partial flag in __init__ for spec-less List.
Returns:
Self.
Raises:
ValueError: schema validation failed due to value error.
RuntimeError: List is already bound with another value_spec.
TypeError: type errors during validation.
KeyError: key errors during validation.
|
def use_value_spec(self,
value_spec: Optional[pg_typing.List],
allow_partial: bool = False) -> 'List':
"""Applies a ``pg.List`` as the value spec for current list.
Args:
value_spec: A List ValueSpec to apply to this List.
If current List is schema-less (whose immediate members are not
validated against schema), and `value_spec` is not None, the value spec
will be applied to the List.
Or else if current List is already symbolic (whose immediate members
are under the constraint of a List value spec), and `value_spec` is
None, current List will become schema-less. However, the schema
constraints for non-immediate members will remain.
allow_partial: Whether allow partial dict based on the schema. This flag
will override allow_partial flag in __init__ for spec-less List.
Returns:
Self.
Raises:
ValueError: schema validation failed due to value error.
RuntimeError: List is already bound with another value_spec.
TypeError: type errors during validation.
KeyError: key errors during validation.
"""
if value_spec is None:
self._value_spec = None
self._accessor_writable = True
return self
if not isinstance(value_spec, pg_typing.List):
raise ValueError(
self._error_message(
f'Value spec for list must be a `pg.typing.List` object. '
f'Encountered: {value_spec!r}'))
if self._value_spec and self._value_spec != value_spec:
raise RuntimeError(
self._error_message(
f'List is already bound with a different value '
f'spec: {self._value_spec}. New value spec: {value_spec}.'))
self._allow_partial = allow_partial
if flags.is_type_check_enabled():
# NOTE(daiyip): self._value_spec will be set in List.custom_apply method
# called by spec.apply, thus we don't need to set the _value_spec
# explicitly.
value_spec.apply(
self,
allow_partial=base.accepts_partial(self),
child_transform=base.symbolic_transform_fn(self._allow_partial),
root_path=self.sym_path)
else:
self._value_spec = value_spec
return self
|
(self, value_spec: Optional[pyglove.core.typing.value_specs.List], allow_partial: bool = False) -> pyglove.core.symbolic.list.List
|
41,087 |
pyglove.core.object_utils.common_traits
|
MaybePartial
|
Interface for classes whose instances can be partially constructed.
A ``MaybePartial`` object is an object whose ``__init__`` method can accept
``pg.MISSING_VALUE`` as its argument values. All symbolic types (see
:class:`pyglove.Symbolic`) implements this interface, as their symbolic
attributes can be partially filled.
Example::
d = pg.Dict(x=pg.MISSING_VALUE, y=1)
assert d.is_partial
assert 'x' in d.missing_values()
|
class MaybePartial(metaclass=abc.ABCMeta):
"""Interface for classes whose instances can be partially constructed.
A ``MaybePartial`` object is an object whose ``__init__`` method can accept
``pg.MISSING_VALUE`` as its argument values. All symbolic types (see
:class:`pyglove.Symbolic`) implements this interface, as their symbolic
attributes can be partially filled.
Example::
d = pg.Dict(x=pg.MISSING_VALUE, y=1)
assert d.is_partial
assert 'x' in d.missing_values()
"""
@property
def is_partial(self) -> bool:
"""Returns True if this object is partial. Otherwise False.
An object is considered partial when any of its required fields is missing,
or at least one member is partial. The subclass can override this method
to provide a more efficient solution.
"""
return len(self.missing_values()) > 0 # pylint: disable=g-explicit-length-test
@abc.abstractmethod
def missing_values(self, flatten: bool = True) -> Dict[str, Any]: # pylint: disable=redefined-outer-name
"""Returns missing values from this object.
Args:
flatten: If True, convert nested structures into a flattened dict using
key path (delimited by '.' and '[]') as key.
Returns:
A dict of key to MISSING_VALUE.
"""
|
()
|
41,088 |
pyglove.core.object_utils.common_traits
|
missing_values
|
Returns missing values from this object.
Args:
flatten: If True, convert nested structures into a flattened dict using
key path (delimited by '.' and '[]') as key.
Returns:
A dict of key to MISSING_VALUE.
|
@abc.abstractmethod
def missing_values(self, flatten: bool = True) -> Dict[str, Any]: # pylint: disable=redefined-outer-name
"""Returns missing values from this object.
Args:
flatten: If True, convert nested structures into a flattened dict using
key path (delimited by '.' and '[]') as key.
Returns:
A dict of key to MISSING_VALUE.
"""
|
(self, flatten: bool = True) -> Dict[str, Any]
|
41,089 |
pyglove.core.symbolic.object
|
Object
|
Base class for symbolic user classes.
PyGlove allow symbolic programming interfaces to be easily added to most
Python classes in two ways:
* Developing a dataclass-like symbolic class by subclassing ``pg.Object``.
* Developing a class as usual and decorate it using :func:`pyglove.symbolize`.
This also work with existing classes.
By directly subclassing ``pg.Object``, programmers can create new symbolic
classes with the least effort. For example::
@pg.members([
# Each tuple in the list defines a symbolic field for `__init__`.
('name', pg.typing.Str().noneable(), 'Name to greet'),
('time_of_day',
pg.typing.Enum('morning', ['morning', 'afternnon', 'evening']),
'Time of the day.')
])
class Greeting(pg.Object):
def __call__(self):
# Values for symbolic fields can be accessed
# as public data members of the symbolic object.
print(f'Good {self.time_of_day}, {self.name}')
# Create an object of Greeting and invoke it,
# which shall print 'Good morning, Bob'.
Greeting('Bob')()
Symbolic fields can be inherited from the base symbolic class: the fields
from the base class will be copied to the subclass in their declaration
order, while the subclass can override the inherited fields with more
restricted validation rules or different default values. For example::
@pg.members([
('x', pg.typing.Int(max_value=10)),
('y', pg.typing.Float(min_value=0))
])
class Foo(pg.Object)
pass
@pg.members([
('x', pg.typing.Int(min_value=1, default=1)),
('z', pg.typing.Str().noneable())
])
class Bar(Foo)
pass
# Printing Bar's schema will show that there are 3 parameters defined:
# x : pg.typing.Int(min_value=1, max_value=10, default=1))
# y : pg.typing.Float(min_value=0)
# z : pg.typing.Str().noneable()
print(Bar.__schema__)
|
class Object(base.Symbolic, metaclass=ObjectMeta):
"""Base class for symbolic user classes.
PyGlove allow symbolic programming interfaces to be easily added to most
Python classes in two ways:
* Developing a dataclass-like symbolic class by subclassing ``pg.Object``.
* Developing a class as usual and decorate it using :func:`pyglove.symbolize`.
This also work with existing classes.
By directly subclassing ``pg.Object``, programmers can create new symbolic
classes with the least effort. For example::
@pg.members([
# Each tuple in the list defines a symbolic field for `__init__`.
('name', pg.typing.Str().noneable(), 'Name to greet'),
('time_of_day',
pg.typing.Enum('morning', ['morning', 'afternnon', 'evening']),
'Time of the day.')
])
class Greeting(pg.Object):
def __call__(self):
# Values for symbolic fields can be accessed
# as public data members of the symbolic object.
print(f'Good {self.time_of_day}, {self.name}')
# Create an object of Greeting and invoke it,
# which shall print 'Good morning, Bob'.
Greeting('Bob')()
Symbolic fields can be inherited from the base symbolic class: the fields
from the base class will be copied to the subclass in their declaration
order, while the subclass can override the inherited fields with more
restricted validation rules or different default values. For example::
@pg.members([
('x', pg.typing.Int(max_value=10)),
('y', pg.typing.Float(min_value=0))
])
class Foo(pg.Object)
pass
@pg.members([
('x', pg.typing.Int(min_value=1, default=1)),
('z', pg.typing.Str().noneable())
])
class Bar(Foo)
pass
# Printing Bar's schema will show that there are 3 parameters defined:
# x : pg.typing.Int(min_value=1, max_value=10, default=1))
# y : pg.typing.Float(min_value=0)
# z : pg.typing.Str().noneable()
print(Bar.__schema__)
"""
# Disable pytype attribute checking.
_HAS_DYNAMIC_ATTRIBUTES = True
# Class property that indicates whether to allow attribute access on symbolic
# members.
allow_symbolic_attribute = True
# Class property that indicates whether to allow to set or rebind symbolic
# members by value assginment.
allow_symbolic_assignment = False
# Allow symbolic mutation using `rebind`.
allow_symbolic_mutation = True
# Class property that indicates whether to use `sym_eq` for `__eq__`,
# `sym_ne` for `__ne__`, and `sym_hash` for `__hash__`.
use_symbolic_comparison = True
# If True, symbolic fields will be inferred from class annotations.
# It's an alternative way of declaring symbolic fields other than
# `pg.members`.
#
# e.g.::
#
# class A(pg.Object):
# x: int
# y: str
#
# Please note that class attributes in UPPER_CASE or starting with '_' will
# not be considered as symbolic fields even if they have annotations.
infer_symbolic_fields_from_annotations = True
# Automatically infer schema during subclass creation time.
auto_schema = True
#
# Customizable class behaviors.
#
def __init_subclass__(cls, user_cls=None):
"""Initializes subclass.
`pg.Object` allows child classes to explicit call
`pg.Object.__init_subclass__` in their `__init_subclass__`, to bypass other
classes' `__init__subclass__` in multi-inheritance use cases.
Example:
class Subclass(pg.Object, UserClass):
def __init_subclass__(cls):
# This bypasses UserClass.__init_subclass__
pg.Object.__init_subclass__(cls)
Args:
user_cls: The source class that calls this class method.
"""
object_utils.ensure_explicit_method_override(
cls.__init__,
(
'`pg.Object.__init__` is a PyGlove managed method. For setting up '
'the class initialization logic, please override `_on_bound()` or '
'`_on_init()`. If you do have a need to override `__init__` and '
'know the implications, please decorate your overridden method '
'with `@pg.explicit_method_override`.'
))
# Set `__serialization_key__` before JSONConvertible.__init_subclass__
# is called.
setattr(cls, '__serialization_key__', cls.__type_name__)
super().__init_subclass__()
user_cls = user_cls or cls
if user_cls.auto_schema:
# Inherit schema from base classes that have schema
# in the ordered of inheritance.
# TODO(daiyip): size of base_schema_list can be reduced
# by looking at their inheritance chains.
base_schema_list = []
for base_cls in user_cls.__bases__:
base_schema = getattr(base_cls, '__schema__', None)
if isinstance(base_schema, pg_typing.Schema):
base_schema_list.append(base_schema)
new_fields = user_cls._infer_fields_from_annotations()
cls_schema = schema_utils.formalize_schema(
pg_typing.create_schema(
new_fields,
name=user_cls.__type_name__,
base_schema_list=base_schema_list,
allow_nonconst_keys=True,
metadata={},
)
)
# NOTE(daiyip): When new fields are added through class attributes.
# We invalidate `init_arg_list` so PyGlove could recompute it based
# on its schema during `apply_schema`. Otherwise, we inherit the
# `init_arg_list` from the base class.
# TODO(daiyip): detect new fields based on the differences from the base
# schema.
if new_fields:
cls_schema.metadata['init_arg_list'] = None
user_cls.apply_schema(cls_schema)
@classmethod
def _on_schema_update(cls):
"""Customizable trait: handling schema change."""
# Update the default value for each field after schema is updated. This is
# because that users may change a field's default value via class attribute.
cls._update_default_values_from_class_attributes() # pylint: disable=no-value-for-parameter
# Update all schema-based signatures.
cls._update_signatures_based_on_schema()
# Expose symbolic attributes as object attributes when being asked.
if cls.allow_symbolic_attribute:
cls._generate_sym_attributes()
@classmethod
def _update_signatures_based_on_schema(cls):
"""Customizable trait: updates method signatures upon schema change."""
if cls.__init__ is not Object.__init__ and not hasattr(
cls.__init__, '__sym_generated_init__'
):
# We only generate `__init__` from pg.Object subclass which does not
# override the `__init__` method.
# Functor and ClassWrapper override their `__init__` methods, therefore
# they need to synchronize the __init__ signature by themselves.
return
signature = pg_typing.Signature.from_schema(
cls.__schema__, cls.__module__, '__init__', f'{cls.__name__}.__init__'
)
pseudo_init = signature.make_function(['pass'])
# Create a new `__init__` that passes through all the arguments to
# in `pg.Object.__init__`. This is needed for each class to use different
# signature.
@object_utils.explicit_method_override
@functools.wraps(pseudo_init)
def _init(self, *args, **kwargs):
# We pass through the arguments to `Object.__init__` instead of
# `super()` since the parent class uses a generated `__init__` will
# be delegated to `Object.__init__` eventually. Therefore, directly
# calling `Object.__init__` is equivalent to calling `super().__init__`.
Object.__init__(self, *args, **kwargs)
setattr(_init, '__sym_generated_init__', True)
setattr(cls, '__init__', _init)
@classmethod
def _generate_sym_attributes(cls):
"""Customizable trait: logics for generating symbolic attributes.."""
for key, field in cls.__schema__.fields.items():
if isinstance(key, pg_typing.ConstStrKey):
attr_name = str(key)
attr_value = cls.__dict__.get(attr_name, pg_typing.MISSING_VALUE)
if attr_value == pg_typing.MISSING_VALUE or (
not inspect.isfunction(attr_value)
and not isinstance(attr_value, property)
):
setattr(cls, attr_name, cls._create_sym_attribute(attr_name, field))
@classmethod
def _create_sym_attribute(cls, attr_name, field):
"""Customizable trait: template of single symbolic attribute."""
return property(
object_utils.make_function(
attr_name,
['self'],
[f"return self.sym_inferred('{attr_name}')"],
return_type=field.value.annotation,
)
)
@classmethod
def _begin_annotation_inference(cls) -> None:
"""Event that is triggered before annotation inference begins."""
@classmethod
def _end_annotation_inference(
cls, fields: List[pg_typing.Field]
) -> List[pg_typing.Field]:
"""Event that is triggered after annotation inference ends."""
return fields
#
# Class methods.
#
@classmethod
def partial(cls, *args, **kwargs) -> 'Object':
"""Class method that creates a partial object of current class."""
return cls(*args, allow_partial=True, **kwargs)
@classmethod
def from_json(
cls,
json_value: Any,
*,
allow_partial: bool = False,
root_path: Optional[object_utils.KeyPath] = None) -> 'Object':
"""Class method that load an symbolic Object from a JSON value.
Example::
@pg.members([
('f1', pg.typing.Int()),
('f2', pg.typing.Dict([
('f21', pg.typing.Bool())
]))
])
class Foo(pg.Object):
pass
foo = Foo.from_json({
'f1': 1,
'f2': {
'f21': True
}
})
# or
foo2 = symbolic.from_json({
'_type': '__main__.Foo',
'f1': 1,
'f2': {
'f21': True
}
})
assert foo == foo2
Args:
json_value: Input JSON value, only JSON dict is acceptable.
allow_partial: Whether to allow elements of the list to be partial.
root_path: KeyPath of loaded object in its object tree.
Returns:
A symbolic Object instance.
"""
return cls(allow_partial=allow_partial, root_path=root_path, **{
k: base.from_json(v, allow_partial=allow_partial)
for k, v in json_value.items()
})
@object_utils.explicit_method_override
def __init__(
self,
*args,
allow_partial: bool = False,
sealed: Optional[bool] = None,
root_path: Optional[object_utils.KeyPath] = None,
explicit_init: bool = False,
**kwargs):
"""Create an Object instance.
Args:
*args: positional arguments.
allow_partial: If True, the object can be partial.
sealed: If True, seal the object from future modification (unless under
a `pg.seal(False)` context manager). If False, treat the object as
unsealed. If None, it's determined by `cls.allow_symbolic_mutation`.
root_path: The symbolic path for current object. By default it's None,
which indicates that newly constructed object does not have a parent.
explicit_init: Should set to `True` when `__init__` is called via
`pg.Object.__init__` instead of `super().__init__`.
**kwargs: key/value arguments that align with the schema. All required
keys in the schema must be specified, and values should be acceptable
according to their value spec.
Raises:
KeyError: When required key(s) are missing.
ValueError: When value(s) are not acceptable by their value spec.
"""
# Placeholder for Google-internal usage instrumentation.
if sealed is None:
sealed = not self.__class__.allow_symbolic_mutation
if not isinstance(allow_partial, bool):
raise TypeError(
f'Expect bool type for argument \'allow_partial\' in '
f'symbolic.Object.__init__ but encountered {allow_partial}.')
# We delay the seal attempt until members are all set.
super().__init__(
allow_partial=allow_partial,
accessor_writable=self.__class__.allow_symbolic_assignment,
sealed=sealed,
root_path=root_path,
init_super=not explicit_init)
# Fill field_args and init_args from **kwargs.
_, unmatched_keys = self.__class__.__schema__.resolve(list(kwargs.keys()))
if unmatched_keys:
arg_phrase = object_utils.auto_plural(len(unmatched_keys), 'argument')
keys_str = object_utils.comma_delimited_str(unmatched_keys)
raise TypeError(
f'{self.__class__.__name__}.__init__() got unexpected '
f'keyword {arg_phrase}: {keys_str}')
field_args = {}
# Fill field_args and init_args from *args.
init_arg_names = self.__class__.init_arg_list
if args:
if not self.__class__.__schema__.fields:
raise TypeError(f'{self.__class__.__name__}() takes no arguments.')
elif init_arg_names and init_arg_names[-1].startswith('*'):
vararg_name = init_arg_names[-1][1:]
vararg_field = self.__class__.__schema__.get_field(vararg_name)
assert vararg_field is not None
num_named_args = len(init_arg_names) - 1
field_args[vararg_name] = list(args[num_named_args:])
args = args[:num_named_args]
elif len(args) > len(init_arg_names):
arg_phrase = object_utils.auto_plural(len(init_arg_names), 'argument')
was_phrase = object_utils.auto_plural(len(args), 'was', 'were')
raise TypeError(
f'{self.__class__.__name__}.__init__() takes '
f'{len(init_arg_names)} positional {arg_phrase} but {len(args)} '
f'{was_phrase} given.')
for i, arg_value in enumerate(args):
arg_name = init_arg_names[i]
field_args[arg_name] = arg_value
for k, v in kwargs.items():
if k in field_args:
values_str = object_utils.comma_delimited_str([field_args[k], v])
raise TypeError(
f'{self.__class__.__name__}.__init__() got multiple values for '
f'argument \'{k}\': {values_str}.')
field_args[k] = v
# Check missing arguments when partial binding is disallowed.
if not base.accepts_partial(self):
missing_args = []
for field in self.__class__.__schema__.fields.values():
if (not field.value.has_default
and isinstance(field.key, pg_typing.ConstStrKey)
and field.key not in field_args):
missing_args.append(str(field.key))
if missing_args:
arg_phrase = object_utils.auto_plural(len(missing_args), 'argument')
keys_str = object_utils.comma_delimited_str(missing_args)
raise TypeError(
f'{self.__class__.__name__}.__init__() missing {len(missing_args)} '
f'required {arg_phrase}: {keys_str}.')
self._set_raw_attr(
'_sym_attributes',
pg_dict.Dict(
field_args,
value_spec=self.__class__.sym_fields,
allow_partial=allow_partial,
sealed=sealed,
# NOTE(daiyip): Accessor writable is honored by
# `Object.__setattr__` thus we could always make `_sym_attributes`
# accessor writable. This prevents a child object's attribute access
# from being changed when it's attached to a parent whose symbolic
# attributes could not be directly written.
accessor_writable=True,
root_path=root_path,
as_object_attributes_container=True,
),
)
self._sym_attributes.sym_setparent(self)
self._on_init()
self.seal(sealed)
#
# Events that subclasses can override.
#
def _on_init(self):
"""Event that is triggered at then end of __init__."""
self._on_bound()
def _on_bound(self) -> None:
"""Event that is triggered when any value in the subtree are set/updated.
NOTE(daiyip): This is the best place to set derived members from members
registered by the schema. It's called when any value in the sub-tree is
modified, thus making sure derived members are up-to-date.
When derived members are expensive to create/update, you can implement
_init, _on_rebound, _on_subtree_rebound to update derived members only when
they are impacted.
_on_bound is not called on per-field basis, it's called at most once
during a rebind call (though many fields may be updated)
and during __init__.
"""
def _on_change(self,
field_updates: Dict[object_utils.KeyPath, base.FieldUpdate]):
"""Event that is triggered when field values in the subtree are updated.
This event will be called
* On per-field basis when object is modified via attribute.
* In batch when multiple fields are modified via `rebind` method.
When a field in an object tree is updated, all ancestors' `_on_change` event
will be triggered in order, from the nearest one to furthest one.
Args:
field_updates: Updates made to the subtree. Key path is relative to
current object.
Returns:
it will call `_on_bound` and return the return value of `_on_bound`.
"""
del field_updates
return self._on_bound()
def _on_path_change(
self, old_path: object_utils.KeyPath, new_path: object_utils.KeyPath):
"""Event that is triggered after the symbolic path changes."""
del old_path, new_path
def _on_parent_change(
self,
old_parent: Optional[base.Symbolic],
new_parent: Optional[base.Symbolic]):
"""Event that is triggered after the symbolic parent changes."""
del old_parent, new_parent
@property
def sym_init_args(self) -> pg_dict.Dict:
"""Returns the symbolic attributes which are also the `__init__` args.
Returns:
A symbolic Dict as evaluated symbolic attributes, meaning that all
``pg.ContextValue`` will be resolved.
"""
return self._sym_attributes
def sym_hasattr(self, key: Union[str, int]) -> bool:
"""Tests if a symbolic attribute exists."""
if key == '_sym_attributes':
raise ValueError(
f'{self.__class__.__name__}.__init__ should call `super().__init__`.')
return (
isinstance(key, str)
and not key.startswith('_')
and key in self._sym_attributes
)
def sym_attr_field(
self, key: Union[str, int]
) -> Optional[pg_typing.Field]:
"""Returns the field definition for a symbolic attribute."""
return self._sym_attributes.sym_attr_field(key)
def sym_keys(self) -> Iterator[str]:
"""Iterates the keys of symbolic attributes."""
return self._sym_attributes.sym_keys()
def sym_values(self):
"""Iterates the values of symbolic attributes."""
return self._sym_attributes.sym_values()
def sym_items(self):
"""Iterates the (key, value) pairs of symbolic attributes."""
return self._sym_attributes.sym_items()
def sym_eq(self, other: Any) -> bool:
"""Tests symbolic equality."""
return self is other or (
type(self) is type(other) and base.eq(
self._sym_attributes, other._sym_attributes)) # pylint: disable=protected-access
def sym_lt(self, other: Any) -> bool:
"""Tests symbolic less-than."""
if type(self) is not type(other):
return base.lt(self, other)
return base.lt(self._sym_attributes, other._sym_attributes) # pylint: disable=protected-access
def sym_hash(self) -> int:
"""Symbolically hashing."""
return base.sym_hash((self.__class__, base.sym_hash(self._sym_attributes)))
def sym_setparent(self, parent: base.Symbolic):
"""Sets the parent of current node in the symbolic tree."""
old_parent = self.sym_parent
super().sym_setparent(parent)
if old_parent is not parent:
self._on_parent_change(old_parent, parent)
def _sym_getattr( # pytype: disable=signature-mismatch # overriding-parameter-type-checks
self, key: str) -> Any:
"""Get symbolic field by key."""
return self._sym_attributes.sym_getattr(key)
def _sym_rebind(
self, path_value_pairs: Dict[object_utils.KeyPath, Any]
) -> List[base.FieldUpdate]:
"""Rebind current object using object-form members."""
if base.treats_as_sealed(self):
raise base.WritePermissionError(
f'Cannot rebind a sealed {self.__class__.__name__}.')
return self._sym_attributes._sym_rebind(path_value_pairs) # pylint: disable=protected-access
def _sym_clone(self, deep: bool, memo: Any = None) -> 'Object':
"""Copy flags."""
kwargs = dict()
for k, v in self._sym_attributes.sym_items():
if deep or isinstance(v, base.Symbolic):
v = base.clone(v, deep, memo)
kwargs[k] = v
return self.__class__(allow_partial=self._allow_partial,
sealed=self._sealed,
**kwargs) # pytype: disable=not-instantiable
def _sym_missing(self) -> Dict[str, Any]:
"""Returns missing values."""
# Invalidate the cache of child attributes' missing values before calling
# `Dict.sym_missing`.
setattr(self._sym_attributes, '_sym_missing_values', None)
return self._sym_attributes.sym_missing(flatten=False)
def _sym_nondefault(self) -> Dict[str, Any]:
"""Returns non-default values."""
# Invalidate the cache of child attributes' non-default values before
# calling `Dict.sym_nondefault`.
setattr(self._sym_attributes, '_sym_nondefault_values', None)
return self._sym_attributes.sym_nondefault(flatten=False)
def seal(self, sealed: bool = True) -> 'Object':
"""Seal or unseal current object from further modification."""
self._sym_attributes.seal(sealed)
super().seal(sealed)
return self
def _update_children_paths(
self,
old_path: object_utils.KeyPath,
new_path: object_utils.KeyPath) -> None:
"""Update children paths according to root_path of current node."""
self._sym_attributes.sym_setpath(new_path)
self._on_path_change(old_path, new_path)
def _set_item_without_permission_check( # pytype: disable=signature-mismatch # overriding-parameter-type-checks
self, key: str, value: Any) -> Optional[base.FieldUpdate]:
"""Set item without permission check."""
return self._sym_attributes._set_item_without_permission_check(key, value) # pylint: disable=protected-access
@property
def _subscribes_field_updates(self) -> bool:
"""Returns True if current object subscribes field updates.
For pg.Object, this return True only when _on_change is overridden
from subclass.
"""
return self._on_change.__code__ is not Object._on_change.__code__ # pytype: disable=attribute-error
def _init_kwargs(self) -> typing.Dict[str, Any]:
kwargs = super()._init_kwargs()
kwargs.update(self._sym_attributes)
return kwargs
def __getstate__(self) -> Dict[str, Any]:
"""Customizes pickle.dump."""
return dict(kwargs=self._init_kwargs())
def __setstate__(self, state) -> None:
"""Customizes pickle.load."""
self.__init__(**state['kwargs'])
def __setattr__(self, name: str, value: Any) -> None:
"""Set field value by attribute."""
# NOTE(daiyip): two types of members are treated as regular members:
# 1) All private members which prefixed with '_'.
# 2) Public members that are not declared as symbolic members.
if (
not self.allow_symbolic_attribute
or not self.__class__.__schema__.get_field(name)
or name.startswith('_')
):
super().__setattr__(name, value)
else:
if base.treats_as_sealed(self):
raise base.WritePermissionError(
self._error_message(
f'Cannot set attribute {name!r}: object is sealed.'))
if not base.writtable_via_accessors(self):
raise base.WritePermissionError(
self._error_message(
f'Cannot set attribute of <class {self.__class__.__name__}> '
f'while `{self.__class__.__name__}.allow_symbolic_assignment` '
f'is set to False or under `pg.as_sealed` context.'))
self._sym_attributes[name] = value
def __getattribute__(self, name: str) -> Any:
"""Override to accomondate symbolic attributes with variable keys."""
try:
return super().__getattribute__(name)
except AttributeError as error:
if not self.allow_symbolic_attribute or not self.sym_hasattr(name):
raise error
return self.sym_inferred(name)
def __eq__(self, other: Any) -> bool:
"""Operator==."""
if self.use_symbolic_comparison:
return self.sym_eq(other)
return super().__eq__(other)
def __ne__(self, other: Any) -> bool:
"""Operator!=."""
r = self.__eq__(other)
if r is NotImplemented:
return r
return not r
def __hash__(self) -> int:
"""Hashing function."""
if self.use_symbolic_comparison:
return self.sym_hash()
return super().__hash__()
def sym_jsonify(self, **kwargs) -> object_utils.JSONValueType:
"""Converts current object to a dict of plain Python objects."""
return object_utils.merge([
{
object_utils.JSONConvertible.TYPE_NAME_KEY: (
self.__class__.__serialization_key__
)
},
self._sym_attributes.to_json(**kwargs),
])
def format(self,
compact: bool = False,
verbose: bool = False,
root_indent: int = 0,
**kwargs) -> str:
"""Formats this object."""
return self._sym_attributes.format(
compact,
verbose,
root_indent,
cls_name=self.__class__.__name__,
key_as_attribute=True,
bracket_type=object_utils.BracketType.ROUND,
**kwargs)
|
(*args, allow_partial: bool = False, sealed: Optional[bool] = None, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None, explicit_init: bool = False, **kwargs)
|
41,097 |
pyglove.core.symbolic.object
|
__init__
|
Create an Object instance.
Args:
*args: positional arguments.
allow_partial: If True, the object can be partial.
sealed: If True, seal the object from future modification (unless under
a `pg.seal(False)` context manager). If False, treat the object as
unsealed. If None, it's determined by `cls.allow_symbolic_mutation`.
root_path: The symbolic path for current object. By default it's None,
which indicates that newly constructed object does not have a parent.
explicit_init: Should set to `True` when `__init__` is called via
`pg.Object.__init__` instead of `super().__init__`.
**kwargs: key/value arguments that align with the schema. All required
keys in the schema must be specified, and values should be acceptable
according to their value spec.
Raises:
KeyError: When required key(s) are missing.
ValueError: When value(s) are not acceptable by their value spec.
|
@object_utils.explicit_method_override
def __init__(
self,
*args,
allow_partial: bool = False,
sealed: Optional[bool] = None,
root_path: Optional[object_utils.KeyPath] = None,
explicit_init: bool = False,
**kwargs):
"""Create an Object instance.
Args:
*args: positional arguments.
allow_partial: If True, the object can be partial.
sealed: If True, seal the object from future modification (unless under
a `pg.seal(False)` context manager). If False, treat the object as
unsealed. If None, it's determined by `cls.allow_symbolic_mutation`.
root_path: The symbolic path for current object. By default it's None,
which indicates that newly constructed object does not have a parent.
explicit_init: Should set to `True` when `__init__` is called via
`pg.Object.__init__` instead of `super().__init__`.
**kwargs: key/value arguments that align with the schema. All required
keys in the schema must be specified, and values should be acceptable
according to their value spec.
Raises:
KeyError: When required key(s) are missing.
ValueError: When value(s) are not acceptable by their value spec.
"""
# Placeholder for Google-internal usage instrumentation.
if sealed is None:
sealed = not self.__class__.allow_symbolic_mutation
if not isinstance(allow_partial, bool):
raise TypeError(
f'Expect bool type for argument \'allow_partial\' in '
f'symbolic.Object.__init__ but encountered {allow_partial}.')
# We delay the seal attempt until members are all set.
super().__init__(
allow_partial=allow_partial,
accessor_writable=self.__class__.allow_symbolic_assignment,
sealed=sealed,
root_path=root_path,
init_super=not explicit_init)
# Fill field_args and init_args from **kwargs.
_, unmatched_keys = self.__class__.__schema__.resolve(list(kwargs.keys()))
if unmatched_keys:
arg_phrase = object_utils.auto_plural(len(unmatched_keys), 'argument')
keys_str = object_utils.comma_delimited_str(unmatched_keys)
raise TypeError(
f'{self.__class__.__name__}.__init__() got unexpected '
f'keyword {arg_phrase}: {keys_str}')
field_args = {}
# Fill field_args and init_args from *args.
init_arg_names = self.__class__.init_arg_list
if args:
if not self.__class__.__schema__.fields:
raise TypeError(f'{self.__class__.__name__}() takes no arguments.')
elif init_arg_names and init_arg_names[-1].startswith('*'):
vararg_name = init_arg_names[-1][1:]
vararg_field = self.__class__.__schema__.get_field(vararg_name)
assert vararg_field is not None
num_named_args = len(init_arg_names) - 1
field_args[vararg_name] = list(args[num_named_args:])
args = args[:num_named_args]
elif len(args) > len(init_arg_names):
arg_phrase = object_utils.auto_plural(len(init_arg_names), 'argument')
was_phrase = object_utils.auto_plural(len(args), 'was', 'were')
raise TypeError(
f'{self.__class__.__name__}.__init__() takes '
f'{len(init_arg_names)} positional {arg_phrase} but {len(args)} '
f'{was_phrase} given.')
for i, arg_value in enumerate(args):
arg_name = init_arg_names[i]
field_args[arg_name] = arg_value
for k, v in kwargs.items():
if k in field_args:
values_str = object_utils.comma_delimited_str([field_args[k], v])
raise TypeError(
f'{self.__class__.__name__}.__init__() got multiple values for '
f'argument \'{k}\': {values_str}.')
field_args[k] = v
# Check missing arguments when partial binding is disallowed.
if not base.accepts_partial(self):
missing_args = []
for field in self.__class__.__schema__.fields.values():
if (not field.value.has_default
and isinstance(field.key, pg_typing.ConstStrKey)
and field.key not in field_args):
missing_args.append(str(field.key))
if missing_args:
arg_phrase = object_utils.auto_plural(len(missing_args), 'argument')
keys_str = object_utils.comma_delimited_str(missing_args)
raise TypeError(
f'{self.__class__.__name__}.__init__() missing {len(missing_args)} '
f'required {arg_phrase}: {keys_str}.')
self._set_raw_attr(
'_sym_attributes',
pg_dict.Dict(
field_args,
value_spec=self.__class__.sym_fields,
allow_partial=allow_partial,
sealed=sealed,
# NOTE(daiyip): Accessor writable is honored by
# `Object.__setattr__` thus we could always make `_sym_attributes`
# accessor writable. This prevents a child object's attribute access
# from being changed when it's attached to a parent whose symbolic
# attributes could not be directly written.
accessor_writable=True,
root_path=root_path,
as_object_attributes_container=True,
),
)
self._sym_attributes.sym_setparent(self)
self._on_init()
self.seal(sealed)
|
(self, *args, allow_partial: bool = False, sealed: Optional[bool] = None, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None, explicit_init: bool = False, **kwargs)
|
41,162 |
pyglove.core.patching.object_factory
|
ObjectFactory
|
A factory to create symbolic object from a base value and patches.
Args:
value_type: Type of return value.
base_value: An instance of `value_type`,
or a callable object that produces an instance of `value_type`,
or a string as the path to the serialized value.
patches: Optional patching rules. See :func:`patch` for details.
params_override: A rebind dict (or a JSON string as serialized rebind dict)
as an additional patch to the value,
Returns:
Value after applying `patchers` and `params_override` based on `base_value`.
|
from pyglove.core.patching.object_factory import ObjectFactory
| null |
41,181 |
pyglove.core.symbolic.functor
|
_call
| null |
def functor_class(
func: types.FunctionType,
args: Optional[List[Union[
Tuple[Tuple[str, pg_typing.KeySpec], pg_typing.ValueSpec, str],
Tuple[Tuple[str, pg_typing.KeySpec], pg_typing.ValueSpec, str, Any]]]
] = None, # pylint: disable=bad-continuation
returns: Optional[pg_typing.ValueSpec] = None,
base_class: Optional[Type[Functor]] = None,
*,
auto_doc: bool = False,
auto_typing: bool = False,
serialization_key: Optional[str] = None,
additional_keys: Optional[List[str]] = None,
add_to_registry: bool = False,
) -> Type[Functor]:
"""Returns a functor class from a function.
Args:
func: Function to be wrapped into a functor.
args: Symbolic args specification. `args` is a list of tuples, each
describes an argument from the input function. Each tuple is the format of
(<argumment-name>, <value-spec>, [description], [metadata-objects]).
`argument-name` - a `str` or `pg_typing.StrKey` object. When
`pg_typing.StrKey` is used, it describes the wildcard keyword argument.
`value-spec` - a `pg_typing.ValueSpec` object or equivalent, e.g.
primitive values which will be converted to ValueSpec implementation
according to its type and used as its default value. `description` - a
string to describe the agument. `metadata-objects` - an optional list of
any type, which can be used to generate code according to the schema.
There are notable rules in filling the `args`: 1) When `args` is None or
arguments from the function signature are missing from it, `schema.Field`
for these fields will be automatically generated and inserted into `args`.
That being said, every arguments in input function will have a
`schema.Field` counterpart in `Functor.__schema__.fields` sorted by the
declaration order of each argument in the function signature ( other than
the order in `args`). 2) Default argument values are specified along with
function definition as regular python functions, instead of being set at
`schema.Field` level. But validation rules can be set using `args` and
apply to argument values.
For example::
@pg.functor([('c', pg.typing.Int(min_value=0), 'Arg c')])
def foo(a, b, c=1, **kwargs):
return a + b + c + sum(kwargs.values())
assert foo.schema.fields() == [
pg.typing.Field('a', pg.Any(), 'Argument a'.),
pg.typing.Field('b', pg.Any(), 'Argument b'.),
pg.typing.Field('c', pg.typing.Int(), 'Arg c.),
pg.typing.Filed(
pg.typing.StrKey(), pg.Any(), 'Other arguments.')
]
# Prebind a=1, b=2, with default value c=1.
assert foo(1, 2)() == 4
returns: Optional schema specification for the return value.
base_class: Optional base class (derived from `symbolic.Functor`). If None,
returned type will inherit from `Functor` directly.
auto_doc: If True, the descriptions of argument fields will be inherited
from funciton docstr if they are not explicitly specified through
``args``.
auto_typing: If True, the value spec for constraining each argument will be
inferred from its annotation. Otherwise the value specs for all arguments
will be ``pg.typing.Any()``.
serialization_key: An optional string to be used as the serialization key
for the class during `sym_jsonify`. If None, `cls.__type_name__` will be
used. This is introduced for scenarios when we want to relocate a class,
before the downstream can recognize the new location, we need the class to
serialize it using previous key.
additional_keys: An optional list of strings as additional keys to
deserialize an object of the registered class. This can be useful when we
need to relocate or rename the registered class while being able to load
existing serialized JSON values.
add_to_registry: If True, the newly created functor class will be added to
the registry for deserialization.
Returns:
`symbolic.Functor` subclass that wraps input function.
Raises:
KeyError: names of symbolic arguments are not compatible with
function signature.
TypeError: types of symbolic arguments are not compatible with
function signature.
ValueError: default values of symbolic arguments are not compatible
with function signature.
"""
if not inspect.isfunction(func):
raise TypeError(f'{func!r} is not a function.')
class _Functor(base_class or Functor):
"""Functor wrapper for input function."""
# The schema for function-based Functor will be inferred from the function
# signature. Therefore we do not infer the schema automatically during class
# creation.
auto_schema = False
# Do not infer symbolic fields from annotations, since this functor is
# created from function definition which does not have class-level
# attributes.
infer_symbolic_fields_from_annotations = True
def _call(self, *args, **kwargs):
return func(*args, **kwargs)
cls = typing.cast(Type[Functor], _Functor)
cls.__name__ = func.__name__
cls.__qualname__ = func.__qualname__
cls.__module__ = getattr(func, '__module__', 'wrapper')
cls.__doc__ = func.__doc__
# Enable automatic registration for subclass.
cls.auto_register = True
# Apply function schema.
schema = schema_utils.function_schema(
func, args, returns, auto_doc=auto_doc, auto_typing=auto_typing)
cls.apply_schema(schema)
# Register functor class for deserialization if needed.
if add_to_registry:
cls.register_for_deserialization(serialization_key, additional_keys)
return cls
|
(self, *args, **kwargs)
|
41,242 |
pyglove.core.symbolic.origin
|
Origin
|
Class that represents the origin of a symbolic value.
Origin is used for debugging the creation chain of a symbolic value, as
well as keeping track of the factory or builder in creational design patterns.
An `Origin` object records the source value, a string tag, and optional
stack information on where a symbolic value is created.
Built-in tags are '__init__', 'clone', 'deepclone' and 'return'.
Users can pass custom tags to the `sym_setorigin` method of a symbolic value
for tracking its source in their own scenarios.
When origin tracking is enabled by calling `pg.track_origin(True)`, the
`sym_setorigin` method of symbolic values will be automatically called during
object creation, cloning or being returned from a functor. The stack
information can be obtained by `origin.stack` or `origin.stacktrace`.
|
class Origin(object_utils.Formattable):
"""Class that represents the origin of a symbolic value.
Origin is used for debugging the creation chain of a symbolic value, as
well as keeping track of the factory or builder in creational design patterns.
An `Origin` object records the source value, a string tag, and optional
stack information on where a symbolic value is created.
Built-in tags are '__init__', 'clone', 'deepclone' and 'return'.
Users can pass custom tags to the `sym_setorigin` method of a symbolic value
for tracking its source in their own scenarios.
When origin tracking is enabled by calling `pg.track_origin(True)`, the
`sym_setorigin` method of symbolic values will be automatically called during
object creation, cloning or being returned from a functor. The stack
information can be obtained by `origin.stack` or `origin.stacktrace`.
"""
def __init__(self,
source: Any,
tag: str,
stacktrace: Optional[bool] = None,
stacklimit: Optional[int] = None,
stacktop: int = -1):
"""Constructor.
Args:
source: Source value for the origin.
tag: A descriptive tag of the origin. Built-in tags are:
'__init__', 'clone', 'deepclone', 'return'. Users can manually
call `sym_setorigin` with custom tag value.
stacktrace: If True, enable stack trace for the origin. If None, enable
stack trace if `pg.tracek_origin()` is called. Otherwise stack trace is
disabled.
stacklimit: An optional integer to limit the stack depth. If None, it's
determined by the value passed to `pg.set_origin_stacktrace_limit`,
which is 10 by default.
stacktop: A negative integer to indicate the stack top among the stack
frames that we want to present to user, by default it's 2-level up from
the stack within current `sym_setorigin` call.
"""
if not isinstance(tag, str):
raise ValueError(f'`tag` must be a string. Encountered: {tag!r}.')
self._source = source
self._tag = tag
self._stack = None
self._stacktrace = None
if stacktrace is None:
stacktrace = flags.is_tracking_origin()
if stacklimit is None:
stacklimit = flags.get_origin_stacktrace_limit()
if stacktrace:
self._stack = traceback.extract_stack(limit=stacklimit - stacktop)
if stacktop < 0:
self._stack = self._stack[:stacktop]
@property
def source(self) -> Any:
"""Returns the source object."""
return self._source
@property
def root(self) -> 'Origin':
"""Returns the root source of the origin."""
current = self
while True:
parent = getattr(current.source, 'sym_origin', None)
if parent is None:
break
current = parent
return current
def history(
self,
condition: Optional[Callable[['Origin'], bool]] = None) -> List['Origin']:
"""Returns a history of origins with an optional filter.
Args:
condition: An optional callable object with signature
(origin) -> should_list. If None, all origins will be listed.
Returns:
A list of filtered origin from the earliest (root) to the most recent.
"""
condition = condition or (lambda o: True)
current = self
history = []
while current is not None:
if condition(current):
history.append(current)
current = getattr(current.source, 'sym_origin', None)
history.reverse()
return history
@property
def tag(self) -> str:
"""Returns tag."""
return self._tag
@property
def stack(self) -> Optional[List[traceback.FrameSummary]]:
"""Returns the frame summary of original stack."""
return self._stack
@property
def stacktrace(self) -> Optional[str]:
"""Returns stack trace string."""
if self._stack is None:
return None
if self._stacktrace is None:
self._stacktrace = ''.join(traceback.format_list(self._stack))
return self._stacktrace
def chain(self, tag: Optional[str] = None) -> List['Origin']:
"""Get the origin list from the neareast to the farthest filtered by tag."""
origins = []
o = self
while o is not None:
if tag is None or tag == o.tag:
origins.append(o)
o = getattr(o.source, 'sym_origin', None)
return origins
def format(self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
**kwargs) -> str:
"""Formats this object."""
if isinstance(self._source, (str, type(None))):
source_str = object_utils.quote_if_str(self._source)
else:
source_info = object_utils.format(
self._source, compact, verbose, root_indent + 1, **kwargs)
source_str = f'{source_info} at 0x{id(self._source):8x}'
details = object_utils.kvlist_str([
('tag', object_utils.quote_if_str(self._tag), None),
('source', source_str, None),
])
return f'{self.__class__.__name__}({details})'
def __eq__(self, other: Any) -> bool:
"""Operator ==."""
if not isinstance(other, self.__class__):
return False
return self._source is other.source and self._tag == other.tag
def __ne__(self, other: Any) -> bool:
"""Operator !=."""
return not self.__eq__(other)
|
(source: Any, tag: str, stacktrace: Optional[bool] = None, stacklimit: Optional[int] = None, stacktop: int = -1)
|
41,243 |
pyglove.core.symbolic.origin
|
__eq__
|
Operator ==.
|
def __eq__(self, other: Any) -> bool:
"""Operator ==."""
if not isinstance(other, self.__class__):
return False
return self._source is other.source and self._tag == other.tag
|
(self, other: Any) -> bool
|
41,244 |
pyglove.core.symbolic.origin
|
__init__
|
Constructor.
Args:
source: Source value for the origin.
tag: A descriptive tag of the origin. Built-in tags are:
'__init__', 'clone', 'deepclone', 'return'. Users can manually
call `sym_setorigin` with custom tag value.
stacktrace: If True, enable stack trace for the origin. If None, enable
stack trace if `pg.tracek_origin()` is called. Otherwise stack trace is
disabled.
stacklimit: An optional integer to limit the stack depth. If None, it's
determined by the value passed to `pg.set_origin_stacktrace_limit`,
which is 10 by default.
stacktop: A negative integer to indicate the stack top among the stack
frames that we want to present to user, by default it's 2-level up from
the stack within current `sym_setorigin` call.
|
def __init__(self,
source: Any,
tag: str,
stacktrace: Optional[bool] = None,
stacklimit: Optional[int] = None,
stacktop: int = -1):
"""Constructor.
Args:
source: Source value for the origin.
tag: A descriptive tag of the origin. Built-in tags are:
'__init__', 'clone', 'deepclone', 'return'. Users can manually
call `sym_setorigin` with custom tag value.
stacktrace: If True, enable stack trace for the origin. If None, enable
stack trace if `pg.tracek_origin()` is called. Otherwise stack trace is
disabled.
stacklimit: An optional integer to limit the stack depth. If None, it's
determined by the value passed to `pg.set_origin_stacktrace_limit`,
which is 10 by default.
stacktop: A negative integer to indicate the stack top among the stack
frames that we want to present to user, by default it's 2-level up from
the stack within current `sym_setorigin` call.
"""
if not isinstance(tag, str):
raise ValueError(f'`tag` must be a string. Encountered: {tag!r}.')
self._source = source
self._tag = tag
self._stack = None
self._stacktrace = None
if stacktrace is None:
stacktrace = flags.is_tracking_origin()
if stacklimit is None:
stacklimit = flags.get_origin_stacktrace_limit()
if stacktrace:
self._stack = traceback.extract_stack(limit=stacklimit - stacktop)
if stacktop < 0:
self._stack = self._stack[:stacktop]
|
(self, source: Any, tag: str, stacktrace: Optional[bool] = None, stacklimit: Optional[int] = None, stacktop: int = -1)
|
41,249 |
pyglove.core.symbolic.origin
|
chain
|
Get the origin list from the neareast to the farthest filtered by tag.
|
def chain(self, tag: Optional[str] = None) -> List['Origin']:
"""Get the origin list from the neareast to the farthest filtered by tag."""
origins = []
o = self
while o is not None:
if tag is None or tag == o.tag:
origins.append(o)
o = getattr(o.source, 'sym_origin', None)
return origins
|
(self, tag: Optional[str] = None) -> List[pyglove.core.symbolic.origin.Origin]
|
41,250 |
pyglove.core.symbolic.origin
|
format
|
Formats this object.
|
def format(self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
**kwargs) -> str:
"""Formats this object."""
if isinstance(self._source, (str, type(None))):
source_str = object_utils.quote_if_str(self._source)
else:
source_info = object_utils.format(
self._source, compact, verbose, root_indent + 1, **kwargs)
source_str = f'{source_info} at 0x{id(self._source):8x}'
details = object_utils.kvlist_str([
('tag', object_utils.quote_if_str(self._tag), None),
('source', source_str, None),
])
return f'{self.__class__.__name__}({details})'
|
(self, compact: bool = False, verbose: bool = True, root_indent: int = 0, **kwargs) -> str
|
41,251 |
pyglove.core.symbolic.origin
|
history
|
Returns a history of origins with an optional filter.
Args:
condition: An optional callable object with signature
(origin) -> should_list. If None, all origins will be listed.
Returns:
A list of filtered origin from the earliest (root) to the most recent.
|
def history(
self,
condition: Optional[Callable[['Origin'], bool]] = None) -> List['Origin']:
"""Returns a history of origins with an optional filter.
Args:
condition: An optional callable object with signature
(origin) -> should_list. If None, all origins will be listed.
Returns:
A list of filtered origin from the earliest (root) to the most recent.
"""
condition = condition or (lambda o: True)
current = self
history = []
while current is not None:
if condition(current):
history.append(current)
current = getattr(current.source, 'sym_origin', None)
history.reverse()
return history
|
(self, condition: Optional[Callable[[pyglove.core.symbolic.origin.Origin], bool]] = None) -> List[pyglove.core.symbolic.origin.Origin]
|
41,252 |
pyglove.core.symbolic.pure_symbolic
|
PureSymbolic
|
Base class to classes whose objects are considered pure symbolic.
Pure symbolic objects can be used for representing abstract concepts - for
example, a search space of objects - which cannot be executed but soely
representational.
Having pure symbolic object is a key differentiator of symbolic OOP from
regular OOP, which can be used to placehold values in an object as a
high-level expression of ideas. Later, with symbolic manipulation, the
pure symbolic objects are replaced with material values so the object
can be evaluated. This effectively decouples the expression of ideas from
the implementation of ideas. For example: ``pg.oneof(['a', 'b', 'c']`` will
be manipulated into 'a', 'b' or 'c' based on the decision of a search
algorithm, letting the program evolve itself.
|
class PureSymbolic(pg_typing.CustomTyping):
"""Base class to classes whose objects are considered pure symbolic.
Pure symbolic objects can be used for representing abstract concepts - for
example, a search space of objects - which cannot be executed but soely
representational.
Having pure symbolic object is a key differentiator of symbolic OOP from
regular OOP, which can be used to placehold values in an object as a
high-level expression of ideas. Later, with symbolic manipulation, the
pure symbolic objects are replaced with material values so the object
can be evaluated. This effectively decouples the expression of ideas from
the implementation of ideas. For example: ``pg.oneof(['a', 'b', 'c']`` will
be manipulated into 'a', 'b' or 'c' based on the decision of a search
algorithm, letting the program evolve itself.
"""
def custom_apply(
self,
path: object_utils.KeyPath,
value_spec: pg_typing.ValueSpec,
allow_partial: bool,
child_transform: Optional[
Callable[[object_utils.KeyPath, pg_typing.Field, Any], Any]] = None
) -> Tuple[bool, Any]:
"""Custom apply on a value based on its original value spec.
This implements ``pg.pg_typing.CustomTyping``, allowing a pure symbolic
value to be assigned to any field. To customize this behavior, override
this method in subclasses.
Args:
path: KeyPath of current object under its object tree.
value_spec: Original value spec for this field.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values into their final
values. Transform function is called on leaf nodes first, then on their
parents, recursively.
Returns:
A tuple (proceed_with_standard_apply, value_to_proceed).
If proceed_with_standard_apply is set to False, value_to_proceed
will be used as final value.
Raises:
Error when the value is not compatible with the value spec.
"""
del path, value_spec, allow_partial, child_transform
return (False, self)
|
()
|
41,254 |
pyglove.core.symbolic.ref
|
Ref
|
Symbolic reference.
When adding a symbolic node to a symbolic tree, it undergoes a copy operation
if it already has a parent, ensuring that all symbolic objects have a single
parent. Additionally, list and dict objects are automatically converted to
``pg.List`` and ``pg.Dict``, respectively, to enable symbolic operability.
However, these two conventions come with certain costs. The act of making
copies incurs a runtime cost, and it also introduces challenges in sharing
states across different symbolic objects. To address this issue, symbolic
reference is introduced. This feature allows a symbolic node to refer to
value objects without the need for transformation or copying, even when the
symbolic node itself is copied. For example::
class A(pg.Object):
x: int
a = pg.Ref(A(1))
b = pg.Dict(x=a)
c = pg.Dict(y=a)
assert b.x is a
assert c.y is a
assert b.clone().x is a
assert c.clone(deep=True).y is a
In this example, ``pg.Ref`` is used to create a symbolic reference to the
object ``A(1)``, and the ``pg.Dict`` objects `b` and `c` can then reference
`a` without creating additional copies. This mechanism not only mitigates
the runtime cost but also facilitates seamless sharing of states among various
symbolic objects.
Another useful scenario arises when we wish to utilize regular Python list
and dict objects. In this case, ``pg.Ref`` enables us to access the list/dict
object as fields in the symbolic tree without requiring them to be transformed
into ``pg.List`` and ``pg.Dict``. This allows for seamless integration of
standard Python containers within the symbolic structure::
d = pg.Dict(x=pg.Ref({1: 2}))
assert isinstance(d.x, dict)
assert not isinstance(d.x, pg.Dict)
e = pg.Dict(x=pg.Ref([0, 1, 2]]))
assert isinstance(e.x, list)
assert not isinstance(e.x, pg.List)
Please be aware that ``pg.Ref`` objects are treated as leaf nodes in the
symbolic tree, even when they reference other symbolic objects. As a result,
the ``rebind()`` method cannot modify the value they are pointing to.
For primitive types, ``pg.Ref()`` returns their values directly without
creating a reference. For example, ``pg.Ref(1)`` and ``pg.Ref('abc')`` will
simply return the values 1 and 'abc', respectively, without any additional
referencing.
|
class Ref(Object, base.Inferential):
"""Symbolic reference.
When adding a symbolic node to a symbolic tree, it undergoes a copy operation
if it already has a parent, ensuring that all symbolic objects have a single
parent. Additionally, list and dict objects are automatically converted to
``pg.List`` and ``pg.Dict``, respectively, to enable symbolic operability.
However, these two conventions come with certain costs. The act of making
copies incurs a runtime cost, and it also introduces challenges in sharing
states across different symbolic objects. To address this issue, symbolic
reference is introduced. This feature allows a symbolic node to refer to
value objects without the need for transformation or copying, even when the
symbolic node itself is copied. For example::
class A(pg.Object):
x: int
a = pg.Ref(A(1))
b = pg.Dict(x=a)
c = pg.Dict(y=a)
assert b.x is a
assert c.y is a
assert b.clone().x is a
assert c.clone(deep=True).y is a
In this example, ``pg.Ref`` is used to create a symbolic reference to the
object ``A(1)``, and the ``pg.Dict`` objects `b` and `c` can then reference
`a` without creating additional copies. This mechanism not only mitigates
the runtime cost but also facilitates seamless sharing of states among various
symbolic objects.
Another useful scenario arises when we wish to utilize regular Python list
and dict objects. In this case, ``pg.Ref`` enables us to access the list/dict
object as fields in the symbolic tree without requiring them to be transformed
into ``pg.List`` and ``pg.Dict``. This allows for seamless integration of
standard Python containers within the symbolic structure::
d = pg.Dict(x=pg.Ref({1: 2}))
assert isinstance(d.x, dict)
assert not isinstance(d.x, pg.Dict)
e = pg.Dict(x=pg.Ref([0, 1, 2]]))
assert isinstance(e.x, list)
assert not isinstance(e.x, pg.List)
Please be aware that ``pg.Ref`` objects are treated as leaf nodes in the
symbolic tree, even when they reference other symbolic objects. As a result,
the ``rebind()`` method cannot modify the value they are pointing to.
For primitive types, ``pg.Ref()`` returns their values directly without
creating a reference. For example, ``pg.Ref(1)`` and ``pg.Ref('abc')`` will
simply return the values 1 and 'abc', respectively, without any additional
referencing.
"""
def __new__(cls, value: Any, **kwargs):
del kwargs
if isinstance(value, (bool, numbers.Number, str)):
return value
return object.__new__(cls)
@object_utils.explicit_method_override
def __init__(self, value: Any, **kwargs) -> None:
super().__init__(**kwargs)
if isinstance(value, Ref):
value = value.value
self._value = value
def _on_parent_change(
self,
old_parent: Optional[base.Symbolic],
new_parent: Optional[base.Symbolic]) -> None:
if (new_parent is not None
and isinstance(self._value, base.Symbolic)
and self._value.sym_root is new_parent.sym_root):
raise NotImplementedError('Self-referential object is not supported.')
@property
def value(self) -> Any:
"""Returns the referenced value."""
return self._value
def infer(self, **kwargs) -> Any:
"""Returns the referenced value."""
return self._value
def custom_apply(
self,
path: object_utils.KeyPath,
value_spec: pg_typing.ValueSpec,
allow_partial: bool = False,
child_transform: Optional[Callable[
[object_utils.KeyPath, pg_typing.Field, Any], Any]] = None
) -> Tuple[bool, Any]:
"""Validate candidates during value_spec binding time."""
del child_transform
# Check if the field being assigned could accept the referenced value.
# We do not do any transformation, thus not passing the child transform.
value_spec.apply(
self._value,
allow_partial=allow_partial)
return (False, self)
def _sym_clone(self, deep: bool, memo: Any = None) -> 'Ref':
# Always create a new object.
# TODO(daiyip): support deep clone with the update of reference when
# the original value is updated.
return Ref(self._value, allow_partial=self.allow_partial)
def sym_eq(self, other: Any) -> bool:
return isinstance(other, Ref) and self.value is other.value
def sym_jsonify(self, **kwargs: Any) -> Any:
raise TypeError(f'{self!r} cannot be serialized at the moment.')
def __getstate__(self):
raise TypeError(f'{self!r} cannot be pickled at the moment.')
def format(
self,
compact: bool = False,
verbose: bool = False,
root_indent: int = 0,
**kwargs: Any) -> str:
value_str = object_utils.format(
self._value,
compact=compact, verbose=verbose, root_indent=root_indent + 1)
if compact:
return f'{self.__class__.__name__}({value_str})'
else:
return (f'{self.__class__.__name__}(\n'
+ ' ' * (root_indent + 1)
+ f'value = {value_str}\n'
+ ' ' * root_indent + ')')
|
(value: Any, **kwargs)
|
41,260 |
pyglove.core.symbolic.ref
|
__getstate__
| null |
def __getstate__(self):
raise TypeError(f'{self!r} cannot be pickled at the moment.')
|
(self)
|
41,262 |
pyglove.core.symbolic.ref
|
__init__
| null |
@object_utils.explicit_method_override
def __init__(self, value: Any, **kwargs) -> None:
super().__init__(**kwargs)
if isinstance(value, Ref):
value = value.value
self._value = value
|
(self, value: Any, **kwargs) -> NoneType
|
41,264 |
pyglove.core.symbolic.ref
|
__new__
| null |
def __new__(cls, value: Any, **kwargs):
del kwargs
if isinstance(value, (bool, numbers.Number, str)):
return value
return object.__new__(cls)
|
(cls, value: Any, **kwargs)
|
41,276 |
pyglove.core.symbolic.ref
|
_on_parent_change
| null |
def _on_parent_change(
self,
old_parent: Optional[base.Symbolic],
new_parent: Optional[base.Symbolic]) -> None:
if (new_parent is not None
and isinstance(self._value, base.Symbolic)
and self._value.sym_root is new_parent.sym_root):
raise NotImplementedError('Self-referential object is not supported.')
|
(self, old_parent: Optional[pyglove.core.symbolic.base.Symbolic], new_parent: Optional[pyglove.core.symbolic.base.Symbolic]) -> NoneType
|
41,282 |
pyglove.core.symbolic.ref
|
_sym_clone
| null |
def _sym_clone(self, deep: bool, memo: Any = None) -> 'Ref':
# Always create a new object.
# TODO(daiyip): support deep clone with the update of reference when
# the original value is updated.
return Ref(self._value, allow_partial=self.allow_partial)
|
(self, deep: bool, memo: Optional[Any] = None) -> pyglove.core.symbolic.ref.Ref
|
41,291 |
pyglove.core.symbolic.ref
|
custom_apply
|
Validate candidates during value_spec binding time.
|
def custom_apply(
self,
path: object_utils.KeyPath,
value_spec: pg_typing.ValueSpec,
allow_partial: bool = False,
child_transform: Optional[Callable[
[object_utils.KeyPath, pg_typing.Field, Any], Any]] = None
) -> Tuple[bool, Any]:
"""Validate candidates during value_spec binding time."""
del child_transform
# Check if the field being assigned could accept the referenced value.
# We do not do any transformation, thus not passing the child transform.
value_spec.apply(
self._value,
allow_partial=allow_partial)
return (False, self)
|
(self, path: pyglove.core.object_utils.value_location.KeyPath, value_spec: pyglove.core.typing.class_schema.ValueSpec, allow_partial: bool = False, child_transform: Optional[Callable[[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.typing.class_schema.Field, Any], Any]] = None) -> Tuple[bool, Any]
|
41,292 |
pyglove.core.symbolic.ref
|
format
| null |
def format(
self,
compact: bool = False,
verbose: bool = False,
root_indent: int = 0,
**kwargs: Any) -> str:
value_str = object_utils.format(
self._value,
compact=compact, verbose=verbose, root_indent=root_indent + 1)
if compact:
return f'{self.__class__.__name__}({value_str})'
else:
return (f'{self.__class__.__name__}(\n'
+ ' ' * (root_indent + 1)
+ f'value = {value_str}\n'
+ ' ' * root_indent + ')')
|
(self, compact: bool = False, verbose: bool = False, root_indent: int = 0, **kwargs: Any) -> str
|
41,293 |
pyglove.core.symbolic.ref
|
infer
|
Returns the referenced value.
|
def infer(self, **kwargs) -> Any:
"""Returns the referenced value."""
return self._value
|
(self, **kwargs) -> Any
|
41,306 |
pyglove.core.symbolic.ref
|
sym_eq
| null |
def sym_eq(self, other: Any) -> bool:
return isinstance(other, Ref) and self.value is other.value
|
(self, other: Any) -> bool
|
41,316 |
pyglove.core.symbolic.ref
|
sym_jsonify
| null |
def sym_jsonify(self, **kwargs: Any) -> Any:
raise TypeError(f'{self!r} cannot be serialized at the moment.')
|
(self, **kwargs: Any) -> Any
|
41,330 |
pyglove.core.typing.class_schema
|
Schema
|
Class that represents a schema.
PyGlove's runtime type system is based on the concept of ``Schema`` (
class :class:`pyglove.Schema`), which defines what symbolic attributes are
held by a symbolic type (e.g. a symbolic dict, a symbolic list or a symbolic
class) and what values each attribute accepts. A ``Schema`` object consists of
a list of ``Field`` (class :class:`pyglove.Field`), which define the
acceptable keys and their values for these attributes. A ``Schema`` object is
usually created automatically and associated with a symbolic type upon its
declaration, through decorators such as :func:`pyglove.members`,
:func:`pyglove.symbolize` or :func:`pyglove.functor`. For example::
@pg.members([
('x', pg.typing.Int(default=1)),
('y', pg.typing.Float().noneable())
])
class A(pg.Object):
pass
print(A.__schema__)
@pg.symbolize([
('a', pg.typing.Int()),
('b', pg.typing.Float())
])
def foo(a, b):
return a + b
print(foo.__schema__)
Implementation-wise it holds an ordered dictionary of a field key
(:class:`pyglove.KeySpec`) to its field definition (:class:`pyglove.Field`).
The key specification describes what keys/attributes are acceptable for the
field, and value specification within the ``Field`` describes the value type
of the field and their validation rules, default values, and etc.
Symbolic attributes can be inherited during subclassing. Accordingly, the
schema that defines a symbolic class' attributes can be inherited too by its
subclasses. The fields from the bases' schema will be carried over into the
subclasses' schema, while the subclass can override, by redefining that field
with the same key. The subclass cannot override its base classes' field with
arbitrary value specs, it must be overriding non-frozen fields with more
restrictive validation rules of the same type, or change their default values.
See :meth:`pyglove.ValueSpec.extend` for more details.
The code snippet below illustrates schema inheritance during subclassing::
@pg.members([
('x', pg.typing.Int(min_value=1)),
('y', pg.typing.Float()),
])
class A(pg.Object):
pass
@pg.members([
# Further restrict inherited 'x' by specifying the max value, as well
# as providing a default value.
('x', pg.typing.Int(max_value=5, default=2)),
('z', pg.typing.Str('foo').freeze())
])
class B(A):
pass
assert B.schema.fields.keys() == ['x', 'y', 'z']
@pg.members([
# Raises: 'z' is frozen in class B and cannot be extended further.
('z', pg.typing.Str())
])
class C(B):
pass
With a schema, an input dict can be validated and completed by the schema via
:meth:`apply`. If required a field is missing from the schema, and the
object's `allow_partial` is set to False, a ``KeyError`` will raise. Otherwise
a partially validated/transformed dict will be returned. Missing values in the
object will be placeheld by :const:`pyglove.MISSING_VALUE`.
|
class Schema(object_utils.Formattable, object_utils.JSONConvertible):
"""Class that represents a schema.
PyGlove's runtime type system is based on the concept of ``Schema`` (
class :class:`pyglove.Schema`), which defines what symbolic attributes are
held by a symbolic type (e.g. a symbolic dict, a symbolic list or a symbolic
class) and what values each attribute accepts. A ``Schema`` object consists of
a list of ``Field`` (class :class:`pyglove.Field`), which define the
acceptable keys and their values for these attributes. A ``Schema`` object is
usually created automatically and associated with a symbolic type upon its
declaration, through decorators such as :func:`pyglove.members`,
:func:`pyglove.symbolize` or :func:`pyglove.functor`. For example::
@pg.members([
('x', pg.typing.Int(default=1)),
('y', pg.typing.Float().noneable())
])
class A(pg.Object):
pass
print(A.__schema__)
@pg.symbolize([
('a', pg.typing.Int()),
('b', pg.typing.Float())
])
def foo(a, b):
return a + b
print(foo.__schema__)
Implementation-wise it holds an ordered dictionary of a field key
(:class:`pyglove.KeySpec`) to its field definition (:class:`pyglove.Field`).
The key specification describes what keys/attributes are acceptable for the
field, and value specification within the ``Field`` describes the value type
of the field and their validation rules, default values, and etc.
Symbolic attributes can be inherited during subclassing. Accordingly, the
schema that defines a symbolic class' attributes can be inherited too by its
subclasses. The fields from the bases' schema will be carried over into the
subclasses' schema, while the subclass can override, by redefining that field
with the same key. The subclass cannot override its base classes' field with
arbitrary value specs, it must be overriding non-frozen fields with more
restrictive validation rules of the same type, or change their default values.
See :meth:`pyglove.ValueSpec.extend` for more details.
The code snippet below illustrates schema inheritance during subclassing::
@pg.members([
('x', pg.typing.Int(min_value=1)),
('y', pg.typing.Float()),
])
class A(pg.Object):
pass
@pg.members([
# Further restrict inherited 'x' by specifying the max value, as well
# as providing a default value.
('x', pg.typing.Int(max_value=5, default=2)),
('z', pg.typing.Str('foo').freeze())
])
class B(A):
pass
assert B.schema.fields.keys() == ['x', 'y', 'z']
@pg.members([
# Raises: 'z' is frozen in class B and cannot be extended further.
('z', pg.typing.Str())
])
class C(B):
pass
With a schema, an input dict can be validated and completed by the schema via
:meth:`apply`. If required a field is missing from the schema, and the
object's `allow_partial` is set to False, a ``KeyError`` will raise. Otherwise
a partially validated/transformed dict will be returned. Missing values in the
object will be placeheld by :const:`pyglove.MISSING_VALUE`.
"""
__serialization_key__ = 'pyglove.typing.Schema'
def __init__(
self,
fields: List[Field],
name: Optional[str] = None,
base_schema_list: Optional[List['Schema']] = None,
description: Optional[str] = None,
*,
allow_nonconst_keys: bool = False,
metadata: Optional[Dict[str, Any]] = None):
"""Constructor.
Args:
fields: A list of Field as the definition of the schema. The order of the
fields will be preserved.
name: Optional name of this schema. Useful for debugging.
base_schema_list: List of schema used as base. When present, fields
from these schema will be copied to this schema. Fields from the
latter schema will override those from the former ones.
description: Optional str as the description for the schema.
allow_nonconst_keys: Whether immediate fields can use non-const keys.
metadata: Optional dict of user objects as schema-level metadata.
Raises:
TypeError: Argument `fields` is not a list.
KeyError: If a field name contains characters ('.') which is not
allowed, or a field name from `fields` already exists in parent
schema.
ValueError: When failed to create ValueSpec from `fields`.
It could be an unsupported value type, default value doesn't conform
with value specification, etc.
"""
if not isinstance(fields, list):
raise TypeError(
f"Argument 'fields' must be a list. Encountered: {fields}."
)
self._name = name
self._allow_nonconst_keys = allow_nonconst_keys
self._fields = {f.key: f for f in fields}
self._description = description
self._metadata = metadata or {}
self._dynamic_field = None
for f in fields:
if not f.key.is_const:
self._dynamic_field = f
break
if base_schema_list:
# Extend base schema from the nearest ancestor to the farthest.
for base in reversed(base_schema_list):
self.extend(base)
if not allow_nonconst_keys and self._dynamic_field is not None:
raise ValueError(
f'NonConstKey is not allowed in schema. '
f'Encountered \'{self._dynamic_field.key}\'.')
def extend(self, base: 'Schema') -> 'Schema':
"""Extend current schema based on a base schema."""
def _merge_field(
path,
parent_field: Field,
child_field: Field) -> Field:
"""Merge function on field with the same key."""
if parent_field != object_utils.MISSING_VALUE:
if object_utils.MISSING_VALUE == child_field:
if (not self._allow_nonconst_keys and not parent_field.key.is_const):
hints = object_utils.kvlist_str([
('base', object_utils.quote_if_str(base.name), None),
('path', path, None)
])
raise ValueError(
f'Non-const key {parent_field.key} is not allowed to be '
f'added to the schema. ({hints})')
return copy.deepcopy(parent_field)
else:
try:
child_field.extend(parent_field)
except Exception as e: # pylint: disable=broad-except
hints = object_utils.kvlist_str([
('base', object_utils.quote_if_str(base.name), None),
('path', path, None)
])
raise e.__class__(f'{e} ({hints})').with_traceback(
sys.exc_info()[2])
return child_field
self._fields = object_utils.merge([base.fields, self.fields], _merge_field)
self._metadata = object_utils.merge([base.metadata, self.metadata])
# Inherit dynamic field from base if it's not present in the child.
if self._dynamic_field is None:
for k, f in self._fields.items():
if not k.is_const:
self._dynamic_field = f
break
return self
def is_compatible(self, other: 'Schema') -> bool:
"""Returns whether current schema is compatible with the other schema.
NOTE(daiyip): schema A is compatible with schema B when:
schema A and schema B have the same keys, with compatible values specs.
Args:
other: Other schema.
Returns:
True if values that is acceptable to the other schema is acceptable to
current schema.
Raises:
TypeError: If `other` is not a schema object.
"""
if not isinstance(other, Schema):
raise TypeError(f'Argument \'other\' should be a Schema object. '
f'Encountered {other}.')
for key_spec in other.keys():
if key_spec not in self:
return False
for key_spec, field in self.items():
if key_spec not in other:
return False
if not field.value.is_compatible(other[key_spec].value):
return False
return True
def get_field(self, key: str) -> Optional[Field]:
"""Get field definition (Field) for a key.
Args:
key: string as input key.
Returns:
Matched field. A field is considered a match when:
* Its key spec is a ConstStrKey that equals to the input key.
* Or it's the first field whose key spec is a NonConstKey
which matches the input key.
"""
# For const string key, we can directly retrieve from fields dict.
if key in self._fields:
return self._fields[key]
if self._allow_nonconst_keys:
for key_spec, field in self._fields.items():
if key_spec.match(key):
return field
return None
@property
def description(self) -> Optional[str]:
"""Returns the description for the schema."""
return self._description
def set_description(self, description: str) -> None:
"""Sets the description for the schema."""
self._description = description
@property
def dynamic_field(self) -> Optional[Field]:
"""Returns the field that matches multiple keys if any."""
return self._dynamic_field
def resolve(
self, keys: Iterable[str]
) -> Tuple[Dict[KeySpec, List[str]], List[str]]:
"""Resolve keys by grouping them by their matched fields.
Args:
keys: A list of string keys.
Returns:
A tuple of matched key results and unmatched keys.
Matched key results are an ordered dict of KeySpec to matched keys,
in field declaration order.
Unmatched keys are strings from input.
"""
keys = list(keys)
input_keyset = set(keys)
nonconst_key_specs = [k for k in self._fields.keys() if not k.is_const]
nonconst_keys = {k: [] for k in nonconst_key_specs}
unmatched_keys = []
keys_by_key_spec = dict()
for key in keys:
if key not in self._fields:
matched_nonconst_keys = False
for key_spec in nonconst_key_specs:
if key_spec.match(key):
nonconst_keys[key_spec].append(key)
matched_nonconst_keys = True
break
if not matched_nonconst_keys:
unmatched_keys.append(key)
for key_spec in self._fields.keys():
keys = []
if not key_spec.is_const:
keys = nonconst_keys.get(key_spec, [])
elif key_spec in input_keyset:
keys.append(str(key_spec))
keys_by_key_spec[key_spec] = keys
return (keys_by_key_spec, unmatched_keys)
def apply(
self,
dict_obj: Dict[str, Any],
allow_partial: bool = False,
child_transform: Optional[Callable[
[object_utils.KeyPath, Field, Any], Any]] = None,
root_path: Optional[object_utils.KeyPath] = None,
) -> Dict[str, Any]: # pyformat: disable
# pyformat: disable
"""Apply this schema to a dict object, validate and transform it.
Args:
dict_obj: JSON dict type that (maybe) conform to the schema.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values in dict_obj into
their final values. Transform function is called on leaf nodes first,
then on their containers, recursively.
The signature of transform_fn is: `(path, field, value) -> new_value`
Argument `path` is a KeyPath object to the field. Argument `field` is
on which Field the value should apply. Argument `value` is the value
from input that matches a Field from the schema, with child fields
already transformed by this function.
There are possible values for these two arguments::
------------------------------------------------------------
| field | value
------------------------------------------------------------
The value with | |
applicable Field is | Not None | Not MISSING_VALUE
found in schema. | |
value. | |
------------------------------------------------------------
The value is | |
not present for a | Not None | MISSING_VALUE
key defined in schema. | |
------------------------------------------------------------
Return value will be inserted to the parent dict under path, unless
return value is MISSING_VALUE.
root_path: KeyPath of root element of dict_obj.
Returns:
A dict filled by the schema with transformed values.
Raises:
KeyError: Key is not allowed in schema.
TypeError: Type of dict values are not aligned with schema.
ValueError: Value of dict values are not aligned with schema.
""" # pyformat: enable
matched_keys, unmatched_keys = self.resolve(dict_obj.keys())
if unmatched_keys:
raise KeyError(
f'Keys {unmatched_keys} are not allowed in Schema. '
f'(parent=\'{root_path}\')')
for key_spec, keys in matched_keys.items():
field = self._fields[key_spec]
# For missing const keys, we add to keys collection to add missing value.
if key_spec.is_const and key_spec not in keys:
keys.append(str(key_spec))
for key in keys:
if dict_obj:
value = dict_obj.get(key, object_utils.MISSING_VALUE)
else:
value = object_utils.MISSING_VALUE
# NOTE(daiyip): field.default_value may be MISSING_VALUE too
# or partial.
if object_utils.MISSING_VALUE == value:
value = copy.deepcopy(field.default_value)
new_value = field.apply(
value,
allow_partial=allow_partial,
transform_fn=child_transform,
root_path=object_utils.KeyPath(key, root_path))
# NOTE(daiyip): `pg.Dict.__getitem__`` has special logics in handling
# `pg.Contextual`` values. Therefore, we user `dict.__getitem__()`` to
# avoid triggering side effect.
if (key not in dict_obj
or dict.__getitem__(dict_obj, key) is not new_value):
# NOTE(daiyip): minimize call to __setitem__ when possible.
# Custom like symbolic dict may trigger additional logic
# when __setitem__ is called.
dict_obj[key] = new_value
return dict_obj
def validate(self,
dict_obj: Dict[str, Any],
allow_partial: bool = False,
root_path: Optional[object_utils.KeyPath] = None) -> None:
"""Validates whether dict object is conformed with the schema."""
self.apply(
copy.deepcopy(dict_obj),
allow_partial=allow_partial,
root_path=root_path)
@property
def name(self) -> Optional[str]:
"""Name of this schema."""
return self._name
def set_name(self, name: str) -> None:
"""Sets the name of this schema."""
self._name = name
@property
def allow_nonconst_keys(self) -> bool:
"""Returns whether to allow non-const keys."""
return self._allow_nonconst_keys
@property
def fields(self) -> Dict[KeySpec, Field]:
"""Returns fields of this schema."""
return self._fields
def __getitem__(self, key: Union[str, KeySpec]) -> Field:
"""Returns field by key."""
return self._fields[key]
def __contains__(self, key: Union[str, KeySpec]) -> bool:
"""Returns if a key or key spec exists in the schema."""
return key in self._fields
def get(self,
key: Union[str, KeySpec],
default: Optional[Field] = None
) -> Optional[Field]:
"""Returns field by key with default value if not found."""
return self._fields.get(key, default)
def keys(self) -> Iterable[KeySpec]:
"""Return an iteratable of KeySpecs in declaration order."""
return self._fields.keys()
def values(self) -> Iterable[Field]:
"""Returns an iterable of Field in declaration order."""
return self._fields.values()
def items(self) -> Iterable[Tuple[KeySpec, Field]]:
"""Returns an iterable of (KeySpec, Field) tuple in declaration order."""
return self._fields.items()
@property
def metadata(self) -> Dict[str, Any]:
"""Returns metadata of this schema."""
return self._metadata
def format(
self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
cls_name: Optional[str] = None,
bracket_type: object_utils.BracketType = object_utils.BracketType.ROUND,
**kwargs) -> str:
"""Format current Schema into nicely printed string."""
if cls_name is None:
cls_name = 'Schema'
def _indent(text, indent):
return ' ' * 2 * indent + text
def _format_child(child):
return child.format(
compact=compact,
verbose=verbose,
root_indent=root_indent + 1,
**kwargs)
open_bracket, close_bracket = object_utils.bracket_chars(bracket_type)
if compact:
s = [f'{cls_name}{open_bracket}']
s.append(', '.join([
f'{f.key}={_format_child(f.value)}'
for f in self.fields.values()
]))
s.append(close_bracket)
else:
s = [f'{cls_name}{open_bracket}\n']
last_field_show_description = False
for i, f in enumerate(self.fields.values()):
this_field_show_description = verbose and f.description
if i != 0:
s.append(',\n')
if last_field_show_description or this_field_show_description:
s.append('\n')
if this_field_show_description:
s.append(_indent(f'# {f.description}\n', root_indent + 1))
last_field_show_description = this_field_show_description
s.append(
_indent(f'{f.key} = {_format_child(f.value)}', root_indent + 1))
s.append('\n')
s.append(_indent(close_bracket, root_indent))
return ''.join(s)
def to_json(self, **kwargs) -> Dict[str, Any]:
return self.to_json_dict(
fields=dict(
fields=(list(self._fields.values()), []),
name=(self._name, None),
description=(self._description, None),
allow_nonconst_keys=(self._allow_nonconst_keys, False),
metadata=(self._metadata, {}),
),
exclude_default=True,
**kwargs,
)
def __eq__(self, other: Any) -> bool:
if self is other:
return True
return isinstance(other, Schema) and self._fields == other._fields
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
|
(fields: List[pyglove.core.typing.class_schema.Field], name: Optional[str] = None, base_schema_list: Optional[List[ForwardRef('Schema')]] = None, description: Optional[str] = None, *, allow_nonconst_keys: bool = False, metadata: Optional[Dict[str, Any]] = None)
|
41,332 |
pyglove.core.typing.class_schema
|
__contains__
|
Returns if a key or key spec exists in the schema.
|
def __contains__(self, key: Union[str, KeySpec]) -> bool:
"""Returns if a key or key spec exists in the schema."""
return key in self._fields
|
(self, key: Union[str, pyglove.core.typing.class_schema.KeySpec]) -> bool
|
41,333 |
pyglove.core.typing.class_schema
|
__eq__
| null |
def __eq__(self, other: Any) -> bool:
if self is other:
return True
return isinstance(other, Schema) and self._fields == other._fields
|
(self, other: Any) -> bool
|
41,334 |
pyglove.core.typing.class_schema
|
__getitem__
|
Returns field by key.
|
def __getitem__(self, key: Union[str, KeySpec]) -> Field:
"""Returns field by key."""
return self._fields[key]
|
(self, key: Union[str, pyglove.core.typing.class_schema.KeySpec]) -> pyglove.core.typing.class_schema.Field
|
41,335 |
pyglove.core.typing.class_schema
|
__init__
|
Constructor.
Args:
fields: A list of Field as the definition of the schema. The order of the
fields will be preserved.
name: Optional name of this schema. Useful for debugging.
base_schema_list: List of schema used as base. When present, fields
from these schema will be copied to this schema. Fields from the
latter schema will override those from the former ones.
description: Optional str as the description for the schema.
allow_nonconst_keys: Whether immediate fields can use non-const keys.
metadata: Optional dict of user objects as schema-level metadata.
Raises:
TypeError: Argument `fields` is not a list.
KeyError: If a field name contains characters ('.') which is not
allowed, or a field name from `fields` already exists in parent
schema.
ValueError: When failed to create ValueSpec from `fields`.
It could be an unsupported value type, default value doesn't conform
with value specification, etc.
|
def __init__(
self,
fields: List[Field],
name: Optional[str] = None,
base_schema_list: Optional[List['Schema']] = None,
description: Optional[str] = None,
*,
allow_nonconst_keys: bool = False,
metadata: Optional[Dict[str, Any]] = None):
"""Constructor.
Args:
fields: A list of Field as the definition of the schema. The order of the
fields will be preserved.
name: Optional name of this schema. Useful for debugging.
base_schema_list: List of schema used as base. When present, fields
from these schema will be copied to this schema. Fields from the
latter schema will override those from the former ones.
description: Optional str as the description for the schema.
allow_nonconst_keys: Whether immediate fields can use non-const keys.
metadata: Optional dict of user objects as schema-level metadata.
Raises:
TypeError: Argument `fields` is not a list.
KeyError: If a field name contains characters ('.') which is not
allowed, or a field name from `fields` already exists in parent
schema.
ValueError: When failed to create ValueSpec from `fields`.
It could be an unsupported value type, default value doesn't conform
with value specification, etc.
"""
if not isinstance(fields, list):
raise TypeError(
f"Argument 'fields' must be a list. Encountered: {fields}."
)
self._name = name
self._allow_nonconst_keys = allow_nonconst_keys
self._fields = {f.key: f for f in fields}
self._description = description
self._metadata = metadata or {}
self._dynamic_field = None
for f in fields:
if not f.key.is_const:
self._dynamic_field = f
break
if base_schema_list:
# Extend base schema from the nearest ancestor to the farthest.
for base in reversed(base_schema_list):
self.extend(base)
if not allow_nonconst_keys and self._dynamic_field is not None:
raise ValueError(
f'NonConstKey is not allowed in schema. '
f'Encountered \'{self._dynamic_field.key}\'.')
|
(self, fields: List[pyglove.core.typing.class_schema.Field], name: Optional[str] = None, base_schema_list: Optional[List[pyglove.core.typing.class_schema.Schema]] = None, description: Optional[str] = None, *, allow_nonconst_keys: bool = False, metadata: Optional[Dict[str, Any]] = None)
|
41,340 |
pyglove.core.typing.class_schema
|
apply
|
Apply this schema to a dict object, validate and transform it.
Args:
dict_obj: JSON dict type that (maybe) conform to the schema.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values in dict_obj into
their final values. Transform function is called on leaf nodes first,
then on their containers, recursively.
The signature of transform_fn is: `(path, field, value) -> new_value`
Argument `path` is a KeyPath object to the field. Argument `field` is
on which Field the value should apply. Argument `value` is the value
from input that matches a Field from the schema, with child fields
already transformed by this function.
There are possible values for these two arguments::
------------------------------------------------------------
| field | value
------------------------------------------------------------
The value with | |
applicable Field is | Not None | Not MISSING_VALUE
found in schema. | |
value. | |
------------------------------------------------------------
The value is | |
not present for a | Not None | MISSING_VALUE
key defined in schema. | |
------------------------------------------------------------
Return value will be inserted to the parent dict under path, unless
return value is MISSING_VALUE.
root_path: KeyPath of root element of dict_obj.
Returns:
A dict filled by the schema with transformed values.
Raises:
KeyError: Key is not allowed in schema.
TypeError: Type of dict values are not aligned with schema.
ValueError: Value of dict values are not aligned with schema.
|
def apply(
self,
dict_obj: Dict[str, Any],
allow_partial: bool = False,
child_transform: Optional[Callable[
[object_utils.KeyPath, Field, Any], Any]] = None,
root_path: Optional[object_utils.KeyPath] = None,
) -> Dict[str, Any]: # pyformat: disable
# pyformat: disable
"""Apply this schema to a dict object, validate and transform it.
Args:
dict_obj: JSON dict type that (maybe) conform to the schema.
allow_partial: Whether allow partial object to be created.
child_transform: Function to transform child node values in dict_obj into
their final values. Transform function is called on leaf nodes first,
then on their containers, recursively.
The signature of transform_fn is: `(path, field, value) -> new_value`
Argument `path` is a KeyPath object to the field. Argument `field` is
on which Field the value should apply. Argument `value` is the value
from input that matches a Field from the schema, with child fields
already transformed by this function.
There are possible values for these two arguments::
------------------------------------------------------------
| field | value
------------------------------------------------------------
The value with | |
applicable Field is | Not None | Not MISSING_VALUE
found in schema. | |
value. | |
------------------------------------------------------------
The value is | |
not present for a | Not None | MISSING_VALUE
key defined in schema. | |
------------------------------------------------------------
Return value will be inserted to the parent dict under path, unless
return value is MISSING_VALUE.
root_path: KeyPath of root element of dict_obj.
Returns:
A dict filled by the schema with transformed values.
Raises:
KeyError: Key is not allowed in schema.
TypeError: Type of dict values are not aligned with schema.
ValueError: Value of dict values are not aligned with schema.
""" # pyformat: enable
matched_keys, unmatched_keys = self.resolve(dict_obj.keys())
if unmatched_keys:
raise KeyError(
f'Keys {unmatched_keys} are not allowed in Schema. '
f'(parent=\'{root_path}\')')
for key_spec, keys in matched_keys.items():
field = self._fields[key_spec]
# For missing const keys, we add to keys collection to add missing value.
if key_spec.is_const and key_spec not in keys:
keys.append(str(key_spec))
for key in keys:
if dict_obj:
value = dict_obj.get(key, object_utils.MISSING_VALUE)
else:
value = object_utils.MISSING_VALUE
# NOTE(daiyip): field.default_value may be MISSING_VALUE too
# or partial.
if object_utils.MISSING_VALUE == value:
value = copy.deepcopy(field.default_value)
new_value = field.apply(
value,
allow_partial=allow_partial,
transform_fn=child_transform,
root_path=object_utils.KeyPath(key, root_path))
# NOTE(daiyip): `pg.Dict.__getitem__`` has special logics in handling
# `pg.Contextual`` values. Therefore, we user `dict.__getitem__()`` to
# avoid triggering side effect.
if (key not in dict_obj
or dict.__getitem__(dict_obj, key) is not new_value):
# NOTE(daiyip): minimize call to __setitem__ when possible.
# Custom like symbolic dict may trigger additional logic
# when __setitem__ is called.
dict_obj[key] = new_value
return dict_obj
|
(self, dict_obj: Dict[str, Any], allow_partial: bool = False, child_transform: Optional[Callable[[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.typing.class_schema.Field, Any], Any]] = None, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None) -> Dict[str, Any]
|
41,341 |
pyglove.core.typing.class_schema
|
extend
|
Extend current schema based on a base schema.
|
def extend(self, base: 'Schema') -> 'Schema':
"""Extend current schema based on a base schema."""
def _merge_field(
path,
parent_field: Field,
child_field: Field) -> Field:
"""Merge function on field with the same key."""
if parent_field != object_utils.MISSING_VALUE:
if object_utils.MISSING_VALUE == child_field:
if (not self._allow_nonconst_keys and not parent_field.key.is_const):
hints = object_utils.kvlist_str([
('base', object_utils.quote_if_str(base.name), None),
('path', path, None)
])
raise ValueError(
f'Non-const key {parent_field.key} is not allowed to be '
f'added to the schema. ({hints})')
return copy.deepcopy(parent_field)
else:
try:
child_field.extend(parent_field)
except Exception as e: # pylint: disable=broad-except
hints = object_utils.kvlist_str([
('base', object_utils.quote_if_str(base.name), None),
('path', path, None)
])
raise e.__class__(f'{e} ({hints})').with_traceback(
sys.exc_info()[2])
return child_field
self._fields = object_utils.merge([base.fields, self.fields], _merge_field)
self._metadata = object_utils.merge([base.metadata, self.metadata])
# Inherit dynamic field from base if it's not present in the child.
if self._dynamic_field is None:
for k, f in self._fields.items():
if not k.is_const:
self._dynamic_field = f
break
return self
|
(self, base: pyglove.core.typing.class_schema.Schema) -> pyglove.core.typing.class_schema.Schema
|
41,342 |
pyglove.core.typing.class_schema
|
format
|
Format current Schema into nicely printed string.
|
def format(
self,
compact: bool = False,
verbose: bool = True,
root_indent: int = 0,
cls_name: Optional[str] = None,
bracket_type: object_utils.BracketType = object_utils.BracketType.ROUND,
**kwargs) -> str:
"""Format current Schema into nicely printed string."""
if cls_name is None:
cls_name = 'Schema'
def _indent(text, indent):
return ' ' * 2 * indent + text
def _format_child(child):
return child.format(
compact=compact,
verbose=verbose,
root_indent=root_indent + 1,
**kwargs)
open_bracket, close_bracket = object_utils.bracket_chars(bracket_type)
if compact:
s = [f'{cls_name}{open_bracket}']
s.append(', '.join([
f'{f.key}={_format_child(f.value)}'
for f in self.fields.values()
]))
s.append(close_bracket)
else:
s = [f'{cls_name}{open_bracket}\n']
last_field_show_description = False
for i, f in enumerate(self.fields.values()):
this_field_show_description = verbose and f.description
if i != 0:
s.append(',\n')
if last_field_show_description or this_field_show_description:
s.append('\n')
if this_field_show_description:
s.append(_indent(f'# {f.description}\n', root_indent + 1))
last_field_show_description = this_field_show_description
s.append(
_indent(f'{f.key} = {_format_child(f.value)}', root_indent + 1))
s.append('\n')
s.append(_indent(close_bracket, root_indent))
return ''.join(s)
|
(self, compact: bool = False, verbose: bool = True, root_indent: int = 0, cls_name: Optional[str] = None, bracket_type: pyglove.core.object_utils.formatting.BracketType = <BracketType.ROUND: 0>, **kwargs) -> str
|
41,343 |
pyglove.core.typing.class_schema
|
get
|
Returns field by key with default value if not found.
|
def get(self,
key: Union[str, KeySpec],
default: Optional[Field] = None
) -> Optional[Field]:
"""Returns field by key with default value if not found."""
return self._fields.get(key, default)
|
(self, key: Union[str, pyglove.core.typing.class_schema.KeySpec], default: Optional[pyglove.core.typing.class_schema.Field] = None) -> Optional[pyglove.core.typing.class_schema.Field]
|
41,344 |
pyglove.core.typing.class_schema
|
get_field
|
Get field definition (Field) for a key.
Args:
key: string as input key.
Returns:
Matched field. A field is considered a match when:
* Its key spec is a ConstStrKey that equals to the input key.
* Or it's the first field whose key spec is a NonConstKey
which matches the input key.
|
def get_field(self, key: str) -> Optional[Field]:
"""Get field definition (Field) for a key.
Args:
key: string as input key.
Returns:
Matched field. A field is considered a match when:
* Its key spec is a ConstStrKey that equals to the input key.
* Or it's the first field whose key spec is a NonConstKey
which matches the input key.
"""
# For const string key, we can directly retrieve from fields dict.
if key in self._fields:
return self._fields[key]
if self._allow_nonconst_keys:
for key_spec, field in self._fields.items():
if key_spec.match(key):
return field
return None
|
(self, key: str) -> Optional[pyglove.core.typing.class_schema.Field]
|
41,345 |
pyglove.core.typing.class_schema
|
is_compatible
|
Returns whether current schema is compatible with the other schema.
NOTE(daiyip): schema A is compatible with schema B when:
schema A and schema B have the same keys, with compatible values specs.
Args:
other: Other schema.
Returns:
True if values that is acceptable to the other schema is acceptable to
current schema.
Raises:
TypeError: If `other` is not a schema object.
|
def is_compatible(self, other: 'Schema') -> bool:
"""Returns whether current schema is compatible with the other schema.
NOTE(daiyip): schema A is compatible with schema B when:
schema A and schema B have the same keys, with compatible values specs.
Args:
other: Other schema.
Returns:
True if values that is acceptable to the other schema is acceptable to
current schema.
Raises:
TypeError: If `other` is not a schema object.
"""
if not isinstance(other, Schema):
raise TypeError(f'Argument \'other\' should be a Schema object. '
f'Encountered {other}.')
for key_spec in other.keys():
if key_spec not in self:
return False
for key_spec, field in self.items():
if key_spec not in other:
return False
if not field.value.is_compatible(other[key_spec].value):
return False
return True
|
(self, other: pyglove.core.typing.class_schema.Schema) -> bool
|
41,346 |
pyglove.core.typing.class_schema
|
items
|
Returns an iterable of (KeySpec, Field) tuple in declaration order.
|
def items(self) -> Iterable[Tuple[KeySpec, Field]]:
"""Returns an iterable of (KeySpec, Field) tuple in declaration order."""
return self._fields.items()
|
(self) -> Iterable[Tuple[pyglove.core.typing.class_schema.KeySpec, pyglove.core.typing.class_schema.Field]]
|
41,347 |
pyglove.core.typing.class_schema
|
keys
|
Return an iteratable of KeySpecs in declaration order.
|
def keys(self) -> Iterable[KeySpec]:
"""Return an iteratable of KeySpecs in declaration order."""
return self._fields.keys()
|
(self) -> Iterable[pyglove.core.typing.class_schema.KeySpec]
|
41,348 |
pyglove.core.typing.class_schema
|
resolve
|
Resolve keys by grouping them by their matched fields.
Args:
keys: A list of string keys.
Returns:
A tuple of matched key results and unmatched keys.
Matched key results are an ordered dict of KeySpec to matched keys,
in field declaration order.
Unmatched keys are strings from input.
|
def resolve(
self, keys: Iterable[str]
) -> Tuple[Dict[KeySpec, List[str]], List[str]]:
"""Resolve keys by grouping them by their matched fields.
Args:
keys: A list of string keys.
Returns:
A tuple of matched key results and unmatched keys.
Matched key results are an ordered dict of KeySpec to matched keys,
in field declaration order.
Unmatched keys are strings from input.
"""
keys = list(keys)
input_keyset = set(keys)
nonconst_key_specs = [k for k in self._fields.keys() if not k.is_const]
nonconst_keys = {k: [] for k in nonconst_key_specs}
unmatched_keys = []
keys_by_key_spec = dict()
for key in keys:
if key not in self._fields:
matched_nonconst_keys = False
for key_spec in nonconst_key_specs:
if key_spec.match(key):
nonconst_keys[key_spec].append(key)
matched_nonconst_keys = True
break
if not matched_nonconst_keys:
unmatched_keys.append(key)
for key_spec in self._fields.keys():
keys = []
if not key_spec.is_const:
keys = nonconst_keys.get(key_spec, [])
elif key_spec in input_keyset:
keys.append(str(key_spec))
keys_by_key_spec[key_spec] = keys
return (keys_by_key_spec, unmatched_keys)
|
(self, keys: Iterable[str]) -> Tuple[Dict[pyglove.core.typing.class_schema.KeySpec, List[str]], List[str]]
|
41,349 |
pyglove.core.typing.class_schema
|
set_description
|
Sets the description for the schema.
|
def set_description(self, description: str) -> None:
"""Sets the description for the schema."""
self._description = description
|
(self, description: str) -> NoneType
|
41,350 |
pyglove.core.typing.class_schema
|
set_name
|
Sets the name of this schema.
|
def set_name(self, name: str) -> None:
"""Sets the name of this schema."""
self._name = name
|
(self, name: str) -> NoneType
|
41,351 |
pyglove.core.typing.class_schema
|
to_json
| null |
def to_json(self, **kwargs) -> Dict[str, Any]:
return self.to_json_dict(
fields=dict(
fields=(list(self._fields.values()), []),
name=(self._name, None),
description=(self._description, None),
allow_nonconst_keys=(self._allow_nonconst_keys, False),
metadata=(self._metadata, {}),
),
exclude_default=True,
**kwargs,
)
|
(self, **kwargs) -> Dict[str, Any]
|
41,352 |
pyglove.core.typing.class_schema
|
validate
|
Validates whether dict object is conformed with the schema.
|
def validate(self,
dict_obj: Dict[str, Any],
allow_partial: bool = False,
root_path: Optional[object_utils.KeyPath] = None) -> None:
"""Validates whether dict object is conformed with the schema."""
self.apply(
copy.deepcopy(dict_obj),
allow_partial=allow_partial,
root_path=root_path)
|
(self, dict_obj: Dict[str, Any], allow_partial: bool = False, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath] = None) -> NoneType
|
41,353 |
pyglove.core.typing.class_schema
|
values
|
Returns an iterable of Field in declaration order.
|
def values(self) -> Iterable[Field]:
"""Returns an iterable of Field in declaration order."""
return self._fields.values()
|
(self) -> Iterable[pyglove.core.typing.class_schema.Field]
|
41,354 |
pyglove.core.symbolic.base
|
Symbolic
|
Base for all symbolic types.
Symbolic types are types that provide interfaces for symbolic programming,
based on which symbolic objects can be created. In PyGlove, there are three
categories of symbolic types:
* Symbolic classes: Defined by :class:`pyglove.Object` subclasses,
including symbolic classes created from :func:`pyglove.symbolize`, which
inherit :class:`pyglove.ClassWrapper`, a subclass of ``pg.Object``.
* Symbolic functions: Defined by :class:`pyglove.Functor`.
* Symbolic container types: Defined by :class:`pyglove.List` and
:class:`pyglove.Dict`.
|
class Symbolic(
TopologyAware,
object_utils.JSONConvertible,
object_utils.MaybePartial,
object_utils.Formattable,
):
"""Base for all symbolic types.
Symbolic types are types that provide interfaces for symbolic programming,
based on which symbolic objects can be created. In PyGlove, there are three
categories of symbolic types:
* Symbolic classes: Defined by :class:`pyglove.Object` subclasses,
including symbolic classes created from :func:`pyglove.symbolize`, which
inherit :class:`pyglove.ClassWrapper`, a subclass of ``pg.Object``.
* Symbolic functions: Defined by :class:`pyglove.Functor`.
* Symbolic container types: Defined by :class:`pyglove.List` and
:class:`pyglove.Dict`.
"""
# Do not include comments in str output.
__str_format_kwargs__ = dict(compact=False, verbose=False)
# Symbolic sub-types that will be set when they are defined.
# pylint: disable=invalid-name
DictType = None
ListType = None
ObjectType = None
# pylint: enable=invalid-name
def __init__(self,
*,
allow_partial: bool,
accessor_writable: bool,
sealed: bool,
root_path: Optional[object_utils.KeyPath],
init_super: bool = True):
"""Constructor.
Args:
allow_partial: Whether to allow required fields to be MISSING_VALUE or
partial.
accessor_writable: Whether to allow write access via attributes. This flag
is useful when we want to enforce update of fields using `rebind`
method, which leads to better trackability and batched field update
notification.
sealed: Whether object is sealed that cannot be changed. This flag is
useful when we don't want downstream to modify the object.
root_path: KeyPath of current object in its context (object tree).
init_super: If True, call super.__init__, otherwise short-circuit. This
flag is useful when user want to explicitly implement `__init__` for
multi-inheritance, which is needed to pass different arguments to
different bases. Please see `symbolic_test.py#testMultiInheritance`
for more details.
"""
# NOTE(daiyip): we uses `self._set_raw_attr` here to avoid overridden
# `__setattr__` from subclasses change the behavior unintentionally.
self._set_raw_attr('_allow_partial', allow_partial)
self._set_raw_attr('_accessor_writable', accessor_writable)
self._set_raw_attr('_sealed', sealed)
# NOTE(daiyip): parent is used for rebind call to notify their ancestors
# for updates, not for external usage.
self._set_raw_attr('_sym_parent', None)
self._set_raw_attr('_sym_path', root_path or object_utils.KeyPath())
self._set_raw_attr('_sym_puresymbolic', None)
self._set_raw_attr('_sym_missing_values', None)
self._set_raw_attr('_sym_nondefault_values', None)
origin = Origin(None, '__init__') if flags.is_tracking_origin() else None
self._set_raw_attr('_sym_origin', origin)
# super.__init__ may enter into next base class's __init__ when
# multi-inheritance is used. Since we have override `__setattr__` for
# symbolic.Object, which depends on `_accessor_writable` and so on,
# we want to call make `__setattr__` ready to call before entering
# other base's `__init__`.
if init_super:
super().__init__()
else:
object.__init__(self)
def _init_kwargs(self) -> Dict[str, Any]:
kwargs = {}
def add_if_nondefault(key, attrname, default):
v = getattr(self, attrname)
if v != default:
kwargs[key] = v
add_if_nondefault('allow_partial', '_allow_partial', False)
add_if_nondefault('sealed', '_sealed', False)
return kwargs
#
# Formal contract for symbolic operations.
#
# NOTE(daiyip): Since methods such as `__getattr__`, `keys` can be overriden
# by subclasses of `pg.Object`, we introduces a set of methods in signature
# `sym_<xxx>` as the contract to symbolically operate on a symbolic
# value, which are less likely to clash with other names. These methods will
# be used insided PyGlove library. Users can either use these methods or their
# convenient version at their preferences.
#
@property
def sym_partial(self) -> bool:
"""Returns True if current value is partial."""
return bool(self.sym_missing(flatten=False))
@property
def sym_puresymbolic(self) -> bool:
"""Returns True if current value is or contains subnodes of PureSymbolic."""
pure_symbolic = getattr(self, '_sym_puresymbolic')
if pure_symbolic is None:
pure_symbolic = isinstance(self, PureSymbolic)
if not pure_symbolic:
for v in self.sym_values():
if is_pure_symbolic(v):
pure_symbolic = True
break
self._set_raw_attr('_sym_puresymbolic', pure_symbolic)
return pure_symbolic
@property
def sym_abstract(self) -> bool:
"""Returns True if current value is abstract (partial or pure symbolic)."""
return self.sym_partial or self.sym_puresymbolic
@property
def sym_sealed(self) -> bool:
"""Returns True if current object is sealed."""
return getattr(self, '_sealed')
def sym_seal(self, is_seal: bool = True) -> 'Symbolic':
"""Seals or unseals current object from further modification."""
return self._set_raw_attr('_sealed', is_seal)
def sym_missing(self, flatten: bool = True) -> Dict[str, Any]:
"""Returns missing values."""
missing = getattr(self, '_sym_missing_values')
if missing is None:
missing = self._sym_missing()
self._set_raw_attr('_sym_missing_values', missing)
if flatten:
missing = object_utils.flatten(missing)
return missing
def sym_nondefault(self, flatten: bool = True) -> Dict[Union[int, str], Any]:
"""Returns missing values."""
nondefault = getattr(self, '_sym_nondefault_values')
if nondefault is None:
nondefault = self._sym_nondefault()
self._set_raw_attr('_sym_nondefault_values', nondefault)
if flatten:
nondefault = object_utils.flatten(nondefault)
return nondefault
@property
def sym_field(self) -> Optional[pg_typing.Field]:
"""Returns the symbolic field for current object."""
if self.sym_parent is None:
return None
return self.sym_parent.sym_attr_field(self.sym_path.key)
@property
def sym_root(self) -> 'Symbolic':
"""Returns the root of the symbolic tree."""
root = self
while root.sym_parent is not None:
root = root.sym_parent
return root
def sym_ancestor(
self,
where: Optional[Callable[[Any], bool]] = None,
) -> Optional['Symbolic']:
"""Returns the nearest ancestor of specific classes."""
ancestor = self.sym_parent
where = where or (lambda x: True)
while ancestor is not None and not where(ancestor):
ancestor = ancestor.sym_parent
return ancestor
def sym_descendants(
self,
where: Optional[Callable[[Any], bool]] = None,
option: DescendantQueryOption = DescendantQueryOption.ALL,
include_self: bool = False) -> List[Any]:
"""Returns all descendants of specific classes.
Args:
where: Optional callable object as the filter of descendants to return.
option: Descendant query options, indicating whether all matched,
immediate matched or only the matched leaf nodes will be returned.
include_self: If True, `self` will be included in the query, otherwise
only strict descendants are included.
Returns:
A list of objects that match the descendant_cls.
"""
descendants = []
where = where or (lambda x: True)
def visit(k, v, p):
del k, p
if not where(v):
return TraverseAction.ENTER
if not include_self and self is v:
return TraverseAction.ENTER
if option == DescendantQueryOption.IMMEDIATE:
descendants.append(v)
return TraverseAction.CONTINUE
# Dealing with option = ALL or LEAF.
leaf_descendants = []
if isinstance(v, Symbolic):
leaf_descendants = v.sym_descendants(where, option)
if option is DescendantQueryOption.ALL or not leaf_descendants:
descendants.append(v)
descendants.extend(leaf_descendants)
return TraverseAction.CONTINUE
traverse(self, visit)
return descendants
@abc.abstractmethod
def sym_attr_field(self, key: Union[str, int]) -> Optional[pg_typing.Field]:
"""Returns the field definition for a symbolic attribute."""
def sym_has(self, path: Union[object_utils.KeyPath, str, int]) -> bool:
"""Returns True if a path exists in the sub-tree.
Args:
path: A KeyPath object or equivalence.
Returns:
True if the path exists in current sub-tree, otherwise False.
"""
return object_utils.KeyPath.from_value(path).exists(self)
def sym_get(
self,
path: Union[object_utils.KeyPath, str, int],
default: Any = RAISE_IF_NOT_FOUND) -> Any:
"""Returns a sub-node by path.
NOTE: there is no `sym_set`, use `sym_rebind`.
Args:
path: A KeyPath object or equivalence.
default: Default value if path does not exists. If absent, `KeyError` will
be thrown.
Returns:
Value of symbolic attribute specified by path if found, otherwise the
default value if it's specified.
Raises:
KeyError if `path` does not exist and `default` is not specified.
"""
path = object_utils.KeyPath.from_value(path)
if default is RAISE_IF_NOT_FOUND:
return path.query(self)
else:
return path.get(self, default)
@abc.abstractmethod
def sym_hasattr(self, key: Union[str, int]) -> bool:
"""Returns if a symbolic attribute exists."""
def sym_getattr(
self, key: Union[str, int], default: Any = RAISE_IF_NOT_FOUND
) -> Any:
"""Gets a symbolic attribute.
Args:
key: Key of symbolic attribute.
default: Default value if attribute does not exist. If absent,
Returns:
Value of symbolic attribute if found, otherwise the default value
if it's specified.
Raises:
AttributeError if `key` does not exist and `default` is not provided.
"""
if not self.sym_hasattr(key):
if default is RAISE_IF_NOT_FOUND:
raise AttributeError(
self._error_message(
f'{self.__class__!r} object has no symbolic attribute {key!r}.'
)
)
return default
return self._sym_getattr(key)
def sym_inferrable(self, key: Union[str, int], **kwargs) -> bool:
"""Returns True if the attribute under key can be inferred."""
return (
self.sym_inferred(key, pg_typing.MISSING_VALUE, **kwargs)
!= pg_typing.MISSING_VALUE
)
def sym_inferred(
self,
key: Union[str, int],
default: Any = RAISE_IF_NOT_FOUND,
**kwargs,
) -> Any:
"""Returns the inferred value of the attribute under key."""
if default is RAISE_IF_NOT_FOUND:
return self._sym_inferred(key, **kwargs)
else:
try:
return self._sym_inferred(key, **kwargs)
except Exception: # pylint: disable=broad-exception-caught
return default
def _sym_inferred(self, key: Union[str, int], **kwargs) -> Any:
v = self.sym_getattr(key)
if isinstance(v, Inferential):
v = v.infer(**kwargs)
return v
@abc.abstractmethod
def sym_keys(self) -> Iterator[Union[str, int]]:
"""Iterates the keys of symbolic attributes."""
@abc.abstractmethod
def sym_values(self) -> Iterator[Any]:
"""Iterates the values of symbolic attributes."""
@abc.abstractmethod
def sym_items(self) -> Iterator[Tuple[Union[str, int], Any]]:
"""Iterates the (key, value) pairs of symbolic attributes."""
@property
def sym_parent(self) -> 'Symbolic':
"""Returns the containing symbolic object."""
return getattr(self, '_sym_parent')
def sym_setparent(self, parent: 'Symbolic'):
"""Sets the parent of current node in the symbolic tree."""
self._set_raw_attr('_sym_parent', parent)
def sym_contains(
self,
value: Any = None,
type: Union[None, Type[Any], Tuple[Type[Any]]] = None # pylint: disable=redefined-builtin
) -> bool:
"""Returns True if the object contains sub-nodes of given value or type."""
return contains(self, value, type)
@property
def sym_path(self) -> object_utils.KeyPath:
"""Returns the path of current object from the root of its symbolic tree."""
return getattr(self, '_sym_path')
def sym_setpath(
self, path: Optional[Union[str, object_utils.KeyPath]]) -> None:
"""Sets the path of current node in its symbolic tree."""
if self.sym_path != path:
old_path = self.sym_path
self._set_raw_attr('_sym_path', path)
self._update_children_paths(old_path, path)
def sym_rebind(
self,
path_value_pairs: Optional[Union[
Dict[
Union[object_utils.KeyPath, str, int],
Any],
Callable]] = None, # pylint: disable=g-bare-generic
*,
raise_on_no_change: bool = True,
notify_parents: bool = True,
skip_notification: Optional[bool] = None,
**kwargs,
) -> 'Symbolic':
"""Mutates the sub-nodes of current object. Please see `rebind`."""
assert Symbolic.DictType is not None
if callable(path_value_pairs):
path_value_pairs = get_rebind_dict(path_value_pairs, self)
elif path_value_pairs is None:
path_value_pairs = {}
elif isinstance(path_value_pairs, Symbolic.DictType):
# Rebind work on symbolic form, thus we get their symbol instead of
# their evaluated value when building the rebind dict.
sd = typing.cast(Symbolic.DictType, path_value_pairs)
path_value_pairs = {k: v for k, v in sd.sym_items()}
if not isinstance(path_value_pairs, dict):
raise ValueError(
self._error_message(
f'Argument \'path_value_pairs\' should be a dict. '
f'Encountered {path_value_pairs}'))
path_value_pairs.update(kwargs)
path_value_pairs = {object_utils.KeyPath.from_value(k): v
for k, v in path_value_pairs.items()}
if not path_value_pairs and raise_on_no_change:
raise ValueError(self._error_message('There are no values to rebind.'))
updates = self._sym_rebind(path_value_pairs)
if skip_notification is None:
skip_notification = not flags.is_change_notification_enabled()
if not skip_notification:
self._notify_field_updates(updates, notify_parents=notify_parents)
return self
def sym_clone(self,
deep: bool = False,
memo: Optional[Any] = None,
override: Optional[Dict[str, Any]] = None):
"""Clones current object symbolically."""
assert deep or not memo
new_value = self._sym_clone(deep, memo)
if override:
new_value.sym_rebind(override, raise_on_no_change=False)
if flags.is_tracking_origin():
new_value.sym_setorigin(self, 'deepclone' if deep else 'clone')
return new_value
@abc.abstractmethod
def sym_jsonify(self,
*,
hide_default_values: bool = False,
**kwargs) -> object_utils.JSONValueType:
"""Converts representation of current object to a plain Python object."""
def sym_ne(self, other: Any) -> bool:
"""Returns if this object does not equal to another object symbolically."""
return ne(self, other)
def sym_eq(self, other: Any) -> bool:
"""Returns if this object equals to another object symbolically."""
return eq(self, other)
def sym_gt(self, other: Any) -> bool:
"""Returns if this object is symbolically greater than another object."""
return gt(self, other)
def sym_lt(self, other: Any) -> bool:
"""Returns True if this object is symbolically less than other object."""
return lt(self, other)
@abc.abstractmethod
def sym_hash(self) -> int:
"""Computes the symbolic hash of current object."""
@property
def sym_origin(self) -> Optional[Origin]:
"""Returns the symbolic origin of current object."""
return getattr(self, '_sym_origin')
def sym_setorigin(
self,
source: Any,
tag: str,
stacktrace: Optional[bool] = None,
stacklimit: Optional[int] = None,
stacktop: int = -1):
"""Sets the symbolic origin of current object.
Args:
source: Source value for current object.
tag: A descriptive tag of the origin. Built-in tags are:
`__init__`, `clone`, `deepclone`, `return`. Users can manually
call `sym_setorigin` with custom tag value.
stacktrace: If True, enable stack trace for the origin. If None, enable
stack trace if `pg.tracek_origin()` is called. Otherwise stack trace is
disabled.
stacklimit: An optional integer to limit the stack depth. If None, it's
determined by the value passed to `pg.set_origin_stacktrace_limit`,
which is 10 by default.
stacktop: A negative or zero-value integer indicating the stack top among
the stack frames that we want to present to user, by default it's
1-level up from the stack within current `sym_setorigin` call.
Example::
def foo():
return bar()
def bar():
s = MyObject()
t = s.build()
t.sym_setorigin(s, 'builder',
stacktrace=True, stacklimit=5, stacktop=-1)
This example sets the origin of `t` using `s` as its source with tag
'builder'. We also record the callstack where the `sym_setorigin` is
called, so users can call `t.sym_origin.stacktrace` to get the call stack
later. The `stacktop` -1 indicates that we do not need the stack frame
within ``sym_setorigin``, so users will see the stack top within the
function `bar`. We also set the max number of stack frames to display to 5,
not including the stack frame inside ``sym_setorigin``.
"""
if self.sym_origin is not None:
current_source = self.sym_origin.source
if current_source is not None and current_source is not source:
raise ValueError(
f'Cannot set the origin with a different source value. '
f'Origin source: {current_source!r}, New source: {source!r}.')
# NOTE(daiyip): We decrement the stacktop by 1 as the physical stack top
# is within Origin.
self._set_raw_attr(
'_sym_origin',
Origin(source, tag, stacktrace, stacklimit, stacktop - 1))
#
# Methods for operating the control flags of symbolic behaviors.
#
@property
def allow_partial(self) -> bool:
"""Returns True if partial binding is allowed."""
return getattr(self, '_allow_partial')
@property
def accessor_writable(self) -> bool:
"""Returns True if mutation can be made by attribute assignment."""
return getattr(self, '_accessor_writable')
def set_accessor_writable(self, writable: bool = True) -> 'Symbolic':
"""Sets accessor writable."""
return self._set_raw_attr('_accessor_writable', writable)
#
# Easier-to-access aliases of formal symbolic operations.
#
@property
def is_partial(self) -> bool:
"""Alias for `sym_partial`."""
return self.sym_partial
@property
def is_pure_symbolic(self) -> bool:
"""Alias for `sym_puresymbolic`."""
return self.sym_puresymbolic
@property
def is_abstract(self) -> bool:
"""Alias for `sym_abstract`."""
return self.sym_abstract
@property
def is_deterministic(self) -> bool:
"""Returns if current object is deterministic."""
return is_deterministic(self)
def missing_values(self, flatten: bool = True) -> Dict[str, Any]:
"""Alias for `sym_missing`."""
return self.sym_missing(flatten)
def non_default_values(
self, flatten: bool = True) -> Dict[Union[int, str], Any]:
"""Alias for `sym_nondefault`."""
return self.sym_nondefault(flatten)
def seal(self, sealed: bool = True) -> 'Symbolic':
"""Alias for `sym_seal`."""
return self.sym_seal(sealed)
@property
def is_sealed(self) -> bool:
"""Alias for `sym_sealed`."""
return self.sym_sealed
def rebind(
self,
path_value_pairs: Optional[Union[
Dict[
Union[object_utils.KeyPath, str, int],
Any],
Callable]] = None, # pylint: disable=g-bare-generic
*,
raise_on_no_change: bool = True,
notify_parents: bool = True,
skip_notification: Optional[bool] = None,
**kwargs) -> 'Symbolic':
"""Alias for `sym_rebind`.
Alias for `sym_rebind`. `rebind` is the recommended way for mutating
symbolic objects in PyGlove:
* It allows mutations not only on immediate child nodes, but on the
entire sub-tree.
* It allows mutations by rules via passing a callable object as the
value for `path_value_pairs`.
* It batches the updates from multiple sub-nodes, which triggers the
`_on_change` or `_on_bound` event once for recomputing the parent
object's internal states.
* It respects the "sealed" flag of the object or the `pg.seal`
context manager to trigger permission error.
Example::
#
# Rebind on pg.Object subclasses.
#
@pg.members([
('x', pg.typing.Dict([
('y', pg.typing.Int(default=0))
])),
('z', pg.typing.Int(default=1))
])
class A(pg.Object):
pass
a = A()
# Rebind using path-value pairs.
a.rebind({
'x.y': 1,
'z': 0
})
# Rebind using **kwargs.
a.rebind(x={y: 1}, z=0)
# Rebind using rebinders.
# Rebind based on path.
a.rebind(lambda k, v: 1 if k == 'x.y' else v)
# Rebind based on key.
a.rebind(lambda k, v: 1 if k and k.key == 'y' else v)
# Rebind based on value.
a.rebind(lambda k, v: 0 if v == 1 else v)
# Rebind baesd on value and parent.
a.rebind(lambda k, v, p: (0 if isinstance(p, A) and isinstance(v, int)
else v))
# Rebind on pg.Dict.
#
d = pg.Dict(value_spec=pg.typing.Dict([
('a', pg.typing.Dict([
('b', pg.typing.Int()),
])),
('c', pg.typing.Float())
])
# Rebind using **kwargs.
d.rebind(a={b: 1}, c=1.0)
# Rebind using key path to value dict.
d.rebind({
'a.b': 2,
'c': 2.0
})
# NOT OKAY: **kwargs and dict/rebinder cannot be used at the same time.
d.rebind({'a.b': 2}, c=2)
# Rebind with rebinder by path (on subtree).
d.rebind(lambda k, v: 1 if k.key == 'b' else v)
# Rebind with rebinder by value (on subtree).
d.rebind(lambda k, v: 0 if isinstance(v, int) else v)
#
# Rebind on pg.List.
#
l = pg.List([{
'a': 'foo',
'b': 0,
}
],
value_spec = pg.typing.List(pg.typing.Dict([
('a', pg.typing.Str()),
('b', pg.typing.Int())
]), max_size=10))
# Rebind using integer as list index: update semantics on list[0].
l.rebind({
0: {
'a': 'bar',
'b': 1
}
})
# Rebind: trigger append semantics when index is larger than list length.
l.rebind({
999: {
'a': 'fun',
'b': 2
}
})
# Rebind using key path.
l.rebind({
'[0].a': 'bar2'
'[1].b': 3
})
# Rebind using function (rebinder).
# Change all integers to 0 in sub-tree.
l.rebind(lambda k, v: v if not isinstance(v, int) else 0)
Args:
path_value_pairs: A dictionary of key/or key path to new field value, or
a function that generate updates based on the key path, value and
parent of each node under current object. We use terminology 'rebinder'
for this type of functions. The signature of a rebinder is:
`(key_path: pg.KeyPath, value: Any)` or
`(key_path: pg.KeyPath, value: Any, parent: pg.Symbolic)`
raise_on_no_change: If True, raises ``ValueError`` when there are no
values to change. This is useful when rebinder is used, which may or
may not generate any updates.
notify_parents: If True (default), parents will be notified upon change.
Otherwisee only the current object and the impacted children will
be notified. A most common use case for setting this flag to False
is when users want to rebind a child within the parent `_on_bound`
method.
skip_notification: If True, there will be no ``_on_change`` event
triggered from current `rebind`. If None, the default value will be
inferred from the :func:`pyglove.notify_on_change` context manager.
Use it only when you are certain that current rebind does not
invalidate internal states of its object tree.
**kwargs: For ``pg.Dict`` and ``pg.Object`` subclasses, user can use
keyword arguments (in format of `<field_name>=<field_value>`) to
directly modify immediate child nodes.
Returns:
Self.
Raises:
WritePermissionError: If object is sealed.
KeyError: If update location specified by key or key path is not aligned
with the schema of the object tree.
TypeError: If updated field value type does not conform to field spec.
ValueError: If updated field value is not acceptable according to field
spec, or nothing is updated and `raise_on_no_change` is set to
True.
"""
return self.sym_rebind(
path_value_pairs,
raise_on_no_change=raise_on_no_change,
notify_parents=notify_parents,
skip_notification=skip_notification,
**kwargs)
def clone(
self,
deep: bool = False,
memo: Optional[Any] = None,
override: Optional[Dict[str, Any]] = None
) -> 'Symbolic':
"""Clones current object symbolically.
Args:
deep: If True, perform deep copy (equivalent to copy.deepcopy). Otherwise
shallow copy (equivalent to copy.copy).
memo: Memo object for deep clone.
override: An optional dict of key path to new values to override cloned
value.
Returns:
A copy of self.
"""
return self.sym_clone(deep, memo, override)
def to_json(self, **kwargs) -> object_utils.JSONValueType:
"""Alias for `sym_jsonify`."""
return self.sym_jsonify(**kwargs)
def to_json_str(self, json_indent: Optional[int] = None, **kwargs) -> str:
"""Serializes current object into a JSON string."""
return json.dumps(self.sym_jsonify(**kwargs), indent=json_indent)
@classmethod
def load(cls, *args, **kwargs) -> Any:
"""Loads an instance of this type using the global load handler."""
value = load(*args, **kwargs)
if not isinstance(value, cls):
raise TypeError(f'Value is not of type {cls!r}: {value!r}.')
return value
def save(self, *args, **kwargs) -> Any:
"""Saves current object using the global save handler."""
return save(self, *args, **kwargs)
def inspect(
self,
path_regex: Optional[str] = None,
where: Optional[Union[Callable[[Any], bool],
Callable[[Any, Any], bool]]] = None,
custom_selector: Optional[Union[
Callable[[object_utils.KeyPath, Any], bool],
Callable[[object_utils.KeyPath, Any, Any], bool]]] = None,
file=sys.stdout, # pylint: disable=redefined-builtin
**kwargs) -> None:
"""Inspects current object by printing out selected values.
Example::
@pg.members([
('x', pg.typing.Int(0)),
('y', pg.typing.Str())
])
class A(pg.Object):
pass
value = {
'a1': A(x=0, y=0),
'a2': [A(x=1, y=1), A(x=1, y=2)],
'a3': {
'p': A(x=2, y=1),
'q': A(x=2, y=2)
}
}
# Inspect without constraint,
# which is equivalent as `print(value.format(hide_default_values=True))`
# Shall print:
# {
# a1 = A(y=0)
# a2 = [
# 0: A(x=1, y=1)
# 1: A(x=1, y=2)
# a3 = {
# p = A(x=2, y=1)
# q = A(x=2, y=2)
# }
# }
value.inspect(hide_default_values=True)
# Inspect by path regex.
# Shall print:
# {'a3.p': A(x=2, y=1)}
value.inspect(r'.*p')
# Inspect by value.
# Shall print:
# {
# 'a3.p.x': 2,
# 'a3.q.x': 2,
# 'a3.q.y': 2,
# }
value.inspect(where=lambda v: v==2)
# Inspect by path, value and parent.
# Shall print:
# {
# 'a2[1].y': 2
# }
value.inspect(
r'.*y', where=lambda v, p: v > 1 and isinstance(p, A) and p.x == 1))
# Inspect by custom_selector.
# Shall print:
# {
# 'a2[0].x': 1,
# 'a2[0].y': 1,
# 'a3.q.x': 2,
# 'a3.q.y': 2
# }
value.inspect(
custom_selector=lambda k, v, p: (
len(k) == 3 and isinstance(p, A) and p.x == v))
Args:
path_regex: Optional regex expression to constrain path.
where: Optional callable to constrain value and parent when path matches
`path_regex` or `path_regex` is not provided. The signature is:
`(value) -> should_select`, or `(value, parent) -> should_select`.
custom_selector: Optional callable object as custom selector. When
`custom_selector` is provided, `path_regex` and `where` must be None.
The signature of `custom_selector` is:
`(key_path, value) -> should_select`
or `(key_path, value, parent) -> should_select`.
file: Output file stream. This can be any object with a `write(str)`
method.
**kwargs: Wildcard keyword arguments to pass to `format`.
"""
if path_regex is None and where is None and custom_selector is None:
v = self
else:
v = query(self, path_regex, where, False, custom_selector)
object_utils.print(v, file=file, **kwargs)
def __copy__(self) -> 'Symbolic':
"""Overridden shallow copy."""
return self.sym_clone(deep=False)
def __deepcopy__(self, memo) -> 'Symbolic':
"""Overridden deep copy."""
return self.sym_clone(deep=True, memo=memo)
#
# Proteted methods to implement from subclasses
#
@abc.abstractmethod
def _sym_rebind(
self, path_value_pairs: Dict[object_utils.KeyPath, Any]
) -> List[FieldUpdate]:
"""Subclass specific rebind implementation.
Args:
path_value_pairs: A dictionary of key path to new field value.
Returns:
A list of FieldUpdate from this rebind.
Raises:
WritePermissionError: If object is sealed.
KeyError: If update location specified by key or key path is not aligned
with the schema of the object tree.
TypeError: If updated field value type does not conform to field spec.
ValueError: If updated field value is not acceptable according to field
spec.
"""
@abc.abstractmethod
def _sym_missing(self) -> Dict[str, Any]:
"""Returns missing values."""
@abc.abstractmethod
def _sym_nondefault(self) -> Dict[Union[int, str], Any]:
"""Returns non-default values."""
@abc.abstractmethod
def _sym_getattr(self, key: Union[str, int]) -> Any:
"""Get symbolic attribute by key."""
@abc.abstractmethod
def _sym_clone(self, deep: bool, memo=None) -> 'Symbolic':
"""Subclass specific clone implementation."""
@abc.abstractmethod
def _update_children_paths(
self,
old_path: object_utils.KeyPath,
new_path: object_utils.KeyPath) -> None:
"""Update children paths according to root_path of current node."""
@abc.abstractmethod
def _set_item_without_permission_check(
self, key: Union[str, int], value: Any) -> Optional[FieldUpdate]:
"""Child should implement: set an item without permission check."""
@abc.abstractmethod
def _on_change(self, field_updates: Dict[object_utils.KeyPath, FieldUpdate]):
"""Event that is triggered when field values in the subtree are updated.
This event will be called
* On per-field basis when object is modified via attribute.
* In batch when multiple fields are modified via `rebind` method.
When a field in an object tree is updated, all ancestors' `_on_change` event
will be triggered in order, from the nearest one to furthest one.
Args:
field_updates: Updates made to the subtree. Key path is relative to
current object.
"""
@property
@abc.abstractmethod
def _subscribes_field_updates(self) -> bool:
"""Returns True if current object subscribes field updates in `on_change`.
NOTE(daiyip): When it returns False, we don't need to compute field updates
for this object, but simply invoke onchange with empty fields.
"""
#
# Protected helper methods.
#
def _set_raw_attr(self, name: str, value: Any) -> 'Symbolic':
"""Set raw property without trigger __setattr__."""
# `object.__setattr__` adds a property to the instance without side effects.
object.__setattr__(self, name, value)
return self
def _relocate_if_symbolic(self, key: Union[str, int], value: Any) -> Any:
"""Relocate if a symbolic value is to be inserted as member.
NOTE(daiyip): when a symbolic value is inserted into the object tree,
if it already has a parent, we need to make a shallow copy of this object
to avoid multiple parents. Otherwise we need to set its parent and root_path
according to current object.
Args:
key: Key used to insert the value.
value: formal value to be inserted.
Returns:
Formalized value that is ready for insertion as members.
"""
if isinstance(value, Symbolic):
# NOTE(daiyip): make a copy of symbolic object if it belongs to another
# object tree, this prevents it from having multiple parents. See
# List._formalized_value for similar logic.
root_path = object_utils.KeyPath(key, self.sym_path)
if (value.sym_parent is not None and
(value.sym_parent is not self
or root_path != value.sym_path)):
value = value.clone()
if isinstance(value, TopologyAware):
value.sym_setpath(object_utils.KeyPath(key, self.sym_path))
value.sym_setparent(self._sym_parent_for_children())
return value
def _sym_parent_for_children(self) -> Optional['Symbolic']:
"""Returns the symbolic parent for children."""
return self
def _set_item_of_current_tree(
self, path: object_utils.KeyPath, value: Any) -> Optional[FieldUpdate]:
"""Set a field of current tree by key path and return its parent."""
assert isinstance(path, object_utils.KeyPath), path
if not path:
raise KeyError(
self._error_message(
f'Root key \'$\' cannot be used in '
f'{self.__class__.__name__}.rebind. '
f'Encountered {path!r}'))
parent_node = path.parent.query(self)
if not isinstance(parent_node, Symbolic):
raise KeyError(
f'Cannot rebind key {path.key!r}: {parent_node!r} is not a '
f'symbolic type. (path=\'{path.parent}\')')
if treats_as_sealed(parent_node):
raise WritePermissionError(
f'Cannot rebind key {path.key!r} of '
f'sealed {parent_node.__class__.__name__}: {parent_node!r}. '
f'(path=\'{path.parent}\')')
return parent_node._set_item_without_permission_check(path.key, value) # pylint: disable=protected-access
def _notify_field_updates(
self,
field_updates: List[FieldUpdate],
notify_parents: bool = True) -> None:
"""Notify field updates."""
per_target_updates = dict()
def _get_target_updates(
target: 'Symbolic'
) -> Dict[object_utils.KeyPath, FieldUpdate]:
target_id = id(target)
if target_id not in per_target_updates:
per_target_updates[target_id] = (target, dict())
return per_target_updates[target_id][1]
for update in field_updates:
target = update.target
while target is not None:
target_updates = _get_target_updates(target)
if target._subscribes_field_updates: # pylint: disable=protected-access
relative_path = update.path - target.sym_path
target_updates[relative_path] = update
target = target.sym_parent
# Trigger the notification bottom-up, thus the parent node will always
# be notified after the child nodes.
for target, updates in sorted(per_target_updates.values(),
key=lambda x: x[0].sym_path,
reverse=True):
# Reset content-based cache for the object being notified.
target._set_raw_attr('_sym_puresymbolic', None) # pylint: disable=protected-access
target._set_raw_attr('_sym_missing_values', None) # pylint: disable=protected-access
target._set_raw_attr('_sym_nondefault_values', None) # pylint: disable=protected-access
target._on_change(updates) # pylint: disable=protected-access
# If `notify_parents` is set to False, stop notifications once `self`
# is processed.
if target is self and not notify_parents:
break
def _error_message(self, message: str) -> str:
"""Create error message to include path information."""
return object_utils.message_on_path(message, self.sym_path)
|
(*, allow_partial: bool, accessor_writable: bool, sealed: bool, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath], init_super: bool = True)
|
41,358 |
pyglove.core.symbolic.base
|
__init__
|
Constructor.
Args:
allow_partial: Whether to allow required fields to be MISSING_VALUE or
partial.
accessor_writable: Whether to allow write access via attributes. This flag
is useful when we want to enforce update of fields using `rebind`
method, which leads to better trackability and batched field update
notification.
sealed: Whether object is sealed that cannot be changed. This flag is
useful when we don't want downstream to modify the object.
root_path: KeyPath of current object in its context (object tree).
init_super: If True, call super.__init__, otherwise short-circuit. This
flag is useful when user want to explicitly implement `__init__` for
multi-inheritance, which is needed to pass different arguments to
different bases. Please see `symbolic_test.py#testMultiInheritance`
for more details.
|
def __init__(self,
*,
allow_partial: bool,
accessor_writable: bool,
sealed: bool,
root_path: Optional[object_utils.KeyPath],
init_super: bool = True):
"""Constructor.
Args:
allow_partial: Whether to allow required fields to be MISSING_VALUE or
partial.
accessor_writable: Whether to allow write access via attributes. This flag
is useful when we want to enforce update of fields using `rebind`
method, which leads to better trackability and batched field update
notification.
sealed: Whether object is sealed that cannot be changed. This flag is
useful when we don't want downstream to modify the object.
root_path: KeyPath of current object in its context (object tree).
init_super: If True, call super.__init__, otherwise short-circuit. This
flag is useful when user want to explicitly implement `__init__` for
multi-inheritance, which is needed to pass different arguments to
different bases. Please see `symbolic_test.py#testMultiInheritance`
for more details.
"""
# NOTE(daiyip): we uses `self._set_raw_attr` here to avoid overridden
# `__setattr__` from subclasses change the behavior unintentionally.
self._set_raw_attr('_allow_partial', allow_partial)
self._set_raw_attr('_accessor_writable', accessor_writable)
self._set_raw_attr('_sealed', sealed)
# NOTE(daiyip): parent is used for rebind call to notify their ancestors
# for updates, not for external usage.
self._set_raw_attr('_sym_parent', None)
self._set_raw_attr('_sym_path', root_path or object_utils.KeyPath())
self._set_raw_attr('_sym_puresymbolic', None)
self._set_raw_attr('_sym_missing_values', None)
self._set_raw_attr('_sym_nondefault_values', None)
origin = Origin(None, '__init__') if flags.is_tracking_origin() else None
self._set_raw_attr('_sym_origin', origin)
# super.__init__ may enter into next base class's __init__ when
# multi-inheritance is used. Since we have override `__setattr__` for
# symbolic.Object, which depends on `_accessor_writable` and so on,
# we want to call make `__setattr__` ready to call before entering
# other base's `__init__`.
if init_super:
super().__init__()
else:
object.__init__(self)
|
(self, *, allow_partial: bool, accessor_writable: bool, sealed: bool, root_path: Optional[pyglove.core.object_utils.value_location.KeyPath], init_super: bool = True)
|
41,362 |
pyglove.core.symbolic.base
|
_init_kwargs
| null |
def _init_kwargs(self) -> Dict[str, Any]:
kwargs = {}
def add_if_nondefault(key, attrname, default):
v = getattr(self, attrname)
if v != default:
kwargs[key] = v
add_if_nondefault('allow_partial', '_allow_partial', False)
add_if_nondefault('sealed', '_sealed', False)
return kwargs
|
(self) -> Dict[str, Any]
|
41,365 |
pyglove.core.symbolic.base
|
_on_change
|
Event that is triggered when field values in the subtree are updated.
This event will be called
* On per-field basis when object is modified via attribute.
* In batch when multiple fields are modified via `rebind` method.
When a field in an object tree is updated, all ancestors' `_on_change` event
will be triggered in order, from the nearest one to furthest one.
Args:
field_updates: Updates made to the subtree. Key path is relative to
current object.
|
@abc.abstractmethod
def _on_change(self, field_updates: Dict[object_utils.KeyPath, FieldUpdate]):
"""Event that is triggered when field values in the subtree are updated.
This event will be called
* On per-field basis when object is modified via attribute.
* In batch when multiple fields are modified via `rebind` method.
When a field in an object tree is updated, all ancestors' `_on_change` event
will be triggered in order, from the nearest one to furthest one.
Args:
field_updates: Updates made to the subtree. Key path is relative to
current object.
"""
|
(self, field_updates: Dict[pyglove.core.object_utils.value_location.KeyPath, pyglove.core.symbolic.base.FieldUpdate])
|
41,368 |
pyglove.core.symbolic.base
|
_set_item_without_permission_check
|
Child should implement: set an item without permission check.
|
@abc.abstractmethod
def _set_item_without_permission_check(
self, key: Union[str, int], value: Any) -> Optional[FieldUpdate]:
"""Child should implement: set an item without permission check."""
|
(self, key: Union[str, int], value: Any) -> Optional[pyglove.core.symbolic.base.FieldUpdate]
|
41,370 |
pyglove.core.symbolic.base
|
_sym_clone
|
Subclass specific clone implementation.
|
@abc.abstractmethod
def _sym_clone(self, deep: bool, memo=None) -> 'Symbolic':
"""Subclass specific clone implementation."""
|
(self, deep: bool, memo=None) -> pyglove.core.symbolic.base.Symbolic
|
41,371 |
pyglove.core.symbolic.base
|
_sym_getattr
|
Get symbolic attribute by key.
|
@abc.abstractmethod
def _sym_getattr(self, key: Union[str, int]) -> Any:
"""Get symbolic attribute by key."""
|
(self, key: Union[str, int]) -> Any
|
41,373 |
pyglove.core.symbolic.base
|
_sym_missing
|
Returns missing values.
|
@abc.abstractmethod
def _sym_missing(self) -> Dict[str, Any]:
"""Returns missing values."""
|
(self) -> Dict[str, Any]
|
41,374 |
pyglove.core.symbolic.base
|
_sym_nondefault
|
Returns non-default values.
|
@abc.abstractmethod
def _sym_nondefault(self) -> Dict[Union[int, str], Any]:
"""Returns non-default values."""
|
(self) -> Dict[Union[int, str], Any]
|
41,376 |
pyglove.core.symbolic.base
|
_sym_rebind
|
Subclass specific rebind implementation.
Args:
path_value_pairs: A dictionary of key path to new field value.
Returns:
A list of FieldUpdate from this rebind.
Raises:
WritePermissionError: If object is sealed.
KeyError: If update location specified by key or key path is not aligned
with the schema of the object tree.
TypeError: If updated field value type does not conform to field spec.
ValueError: If updated field value is not acceptable according to field
spec.
|
@abc.abstractmethod
def _sym_rebind(
self, path_value_pairs: Dict[object_utils.KeyPath, Any]
) -> List[FieldUpdate]:
"""Subclass specific rebind implementation.
Args:
path_value_pairs: A dictionary of key path to new field value.
Returns:
A list of FieldUpdate from this rebind.
Raises:
WritePermissionError: If object is sealed.
KeyError: If update location specified by key or key path is not aligned
with the schema of the object tree.
TypeError: If updated field value type does not conform to field spec.
ValueError: If updated field value is not acceptable according to field
spec.
"""
|
(self, path_value_pairs: Dict[pyglove.core.object_utils.value_location.KeyPath, Any]) -> List[pyglove.core.symbolic.base.FieldUpdate]
|
41,377 |
pyglove.core.symbolic.base
|
_update_children_paths
|
Update children paths according to root_path of current node.
|
@abc.abstractmethod
def _update_children_paths(
self,
old_path: object_utils.KeyPath,
new_path: object_utils.KeyPath) -> None:
"""Update children paths according to root_path of current node."""
|
(self, old_path: pyglove.core.object_utils.value_location.KeyPath, new_path: pyglove.core.object_utils.value_location.KeyPath) -> NoneType
|
41,385 |
pyglove.core.symbolic.base
|
seal
|
Alias for `sym_seal`.
|
def seal(self, sealed: bool = True) -> 'Symbolic':
"""Alias for `sym_seal`."""
return self.sym_seal(sealed)
|
(self, sealed: bool = True) -> pyglove.core.symbolic.base.Symbolic
|
41,388 |
pyglove.core.symbolic.base
|
sym_attr_field
|
Returns the field definition for a symbolic attribute.
|
@abc.abstractmethod
def sym_attr_field(self, key: Union[str, int]) -> Optional[pg_typing.Field]:
"""Returns the field definition for a symbolic attribute."""
|
(self, key: Union[str, int]) -> Optional[pyglove.core.typing.class_schema.Field]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.