index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
44,921 |
contextlib
|
_push_exit_callback
| null |
def _push_exit_callback(self, callback, is_sync=True):
self._exit_callbacks.append((is_sync, callback))
|
(self, callback, is_sync=True)
|
44,922 |
contextlib
|
callback
|
Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
|
def callback(self, callback, /, *args, **kwds):
"""Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
"""
_exit_wrapper = self._create_cb_wrapper(callback, *args, **kwds)
# We changed the signature, so using @wraps is not appropriate, but
# setting __wrapped__ may still help with introspection.
_exit_wrapper.__wrapped__ = callback
self._push_exit_callback(_exit_wrapper)
return callback # Allow use as a decorator
|
(self, callback, /, *args, **kwds)
|
44,923 |
contextlib
|
close
|
Immediately unwind the context stack.
|
def close(self):
"""Immediately unwind the context stack."""
self.__exit__(None, None, None)
|
(self)
|
44,924 |
contextlib
|
enter_context
|
Enters the supplied context manager.
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
|
def enter_context(self, cm):
"""Enters the supplied context manager.
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
"""
# We look up the special methods on the type to match the with
# statement.
_cm_type = type(cm)
_exit = _cm_type.__exit__
result = _cm_type.__enter__(cm)
self._push_cm_exit(cm, _exit)
return result
|
(self, cm)
|
44,925 |
contextlib
|
pop_all
|
Preserve the context stack by transferring it to a new instance.
|
def pop_all(self):
"""Preserve the context stack by transferring it to a new instance."""
new_stack = type(self)()
new_stack._exit_callbacks = self._exit_callbacks
self._exit_callbacks = deque()
return new_stack
|
(self)
|
44,926 |
contextlib
|
push
|
Registers a callback with the standard __exit__ method signature.
Can suppress exceptions the same way __exit__ method can.
Also accepts any object with an __exit__ method (registering a call
to the method instead of the object itself).
|
def push(self, exit):
"""Registers a callback with the standard __exit__ method signature.
Can suppress exceptions the same way __exit__ method can.
Also accepts any object with an __exit__ method (registering a call
to the method instead of the object itself).
"""
# We use an unbound method rather than a bound method to follow
# the standard lookup behaviour for special methods.
_cb_type = type(exit)
try:
exit_method = _cb_type.__exit__
except AttributeError:
# Not a context manager, so assume it's a callable.
self._push_exit_callback(exit)
else:
self._push_cm_exit(exit, exit_method)
return exit # Allow use as a decorator.
|
(self, exit)
|
44,927 |
schema
|
Forbidden
| null |
class Forbidden(Hook):
def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs["handler"] = self._default_function
super(Forbidden, self).__init__(*args, **kwargs)
@staticmethod
def _default_function(nkey: Any, data: Any, error: Any) -> NoReturn:
raise SchemaForbiddenKeyError(
f"Forbidden key encountered: {nkey!r} in {data!r}", error
)
|
(*args: Any, **kwargs: Any) -> None
|
44,928 |
schema
|
__init__
| null |
def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs["handler"] = self._default_function
super(Forbidden, self).__init__(*args, **kwargs)
|
(self, *args: Any, **kwargs: Any) -> NoneType
|
44,930 |
schema
|
_default_function
| null |
@staticmethod
def _default_function(nkey: Any, data: Any, error: Any) -> NoReturn:
raise SchemaForbiddenKeyError(
f"Forbidden key encountered: {nkey!r} in {data!r}", error
)
|
(nkey: Any, data: Any, error: Any) -> NoReturn
|
44,936 |
schema
|
validate
| null |
def validate(self, data: Any, **kwargs: Dict[str, Any]) -> Any:
Schema = self.__class__
s: Any = self._schema
e: Union[str, None] = self._error
i: bool = self._ignore_extra_keys
if isinstance(s, Literal):
s = s.schema
flavor = _priority(s)
if flavor == ITERABLE:
data = Schema(type(s), error=e).validate(data, **kwargs)
o: Or = Or(*s, error=e, schema=Schema, ignore_extra_keys=i)
return type(data)(o.validate(d, **kwargs) for d in data)
if flavor == DICT:
exitstack = ExitStack()
data = Schema(dict, error=e).validate(data, **kwargs)
new: Dict = type(data)() # new - is a dict of the validated values
coverage: Set = set() # matched schema keys
# for each key and value find a schema entry matching them, if any
sorted_skeys = sorted(s, key=self._dict_key_priority)
for skey in sorted_skeys:
if hasattr(skey, "reset"):
exitstack.callback(skey.reset)
with exitstack:
# Evaluate dictionaries last
data_items = sorted(
data.items(), key=lambda value: isinstance(value[1], dict)
)
for key, value in data_items:
for skey in sorted_skeys:
svalue = s[skey]
try:
nkey = Schema(skey, error=e).validate(key, **kwargs)
except SchemaError:
pass
else:
if isinstance(skey, Hook):
# As the content of the value makes little sense for
# keys with a hook, we reverse its meaning:
# we will only call the handler if the value does match
# In the case of the forbidden key hook,
# we will raise the SchemaErrorForbiddenKey exception
# on match, allowing for excluding a key only if its
# value has a certain type, and allowing Forbidden to
# work well in combination with Optional.
try:
nvalue = Schema(svalue, error=e).validate(
value, **kwargs
)
except SchemaError:
continue
skey.handler(nkey, data, e)
else:
try:
nvalue = Schema(
svalue, error=e, ignore_extra_keys=i
).validate(value, **kwargs)
except SchemaError as x:
k = "Key '%s' error:" % nkey
message = self._prepend_schema_name(k)
raise SchemaError(
[message] + x.autos,
[e.format(data) if e else None] + x.errors,
)
else:
new[nkey] = nvalue
coverage.add(skey)
break
required = set(k for k in s if not self._is_optional_type(k))
if not required.issubset(coverage):
missing_keys = required - coverage
s_missing_keys = ", ".join(
repr(k) for k in sorted(missing_keys, key=repr)
)
message = "Missing key%s: %s" % (
_plural_s(missing_keys),
s_missing_keys,
)
message = self._prepend_schema_name(message)
raise SchemaMissingKeyError(message, e.format(data) if e else None)
if not self._ignore_extra_keys and (len(new) != len(data)):
wrong_keys = set(data.keys()) - set(new.keys())
s_wrong_keys = ", ".join(repr(k) for k in sorted(wrong_keys, key=repr))
message = "Wrong key%s %s in %r" % (
_plural_s(wrong_keys),
s_wrong_keys,
data,
)
message = self._prepend_schema_name(message)
raise SchemaWrongKeyError(message, e.format(data) if e else None)
# Apply default-having optionals that haven't been used:
defaults = (
set(k for k in s if isinstance(k, Optional) and hasattr(k, "default"))
- coverage
)
for default in defaults:
new[default.key] = (
_invoke_with_optional_kwargs(default.default, **kwargs)
if callable(default.default)
else default.default
)
return new
if flavor == TYPE:
if isinstance(data, s) and not (isinstance(data, bool) and s == int):
return data
else:
message = "%r should be instance of %r" % (data, s.__name__)
message = self._prepend_schema_name(message)
raise SchemaUnexpectedTypeError(message, e.format(data) if e else None)
if flavor == VALIDATOR:
try:
return s.validate(data, **kwargs)
except SchemaError as x:
raise SchemaError(
[None] + x.autos, [e.format(data) if e else None] + x.errors
)
except BaseException as x:
message = "%r.validate(%r) raised %r" % (s, data, x)
message = self._prepend_schema_name(message)
raise SchemaError(message, e.format(data) if e else None)
if flavor == CALLABLE:
f = _callable_str(s)
try:
if s(data):
return data
except SchemaError as x:
raise SchemaError(
[None] + x.autos, [e.format(data) if e else None] + x.errors
)
except BaseException as x:
message = "%s(%r) raised %r" % (f, data, x)
message = self._prepend_schema_name(message)
raise SchemaError(message, e.format(data) if e else None)
message = "%s(%r) should evaluate to True" % (f, data)
message = self._prepend_schema_name(message)
raise SchemaError(message, e.format(data) if e else None)
if s == data:
return data
else:
message = "%r does not match %r" % (s, data)
message = self._prepend_schema_name(message)
raise SchemaError(message, e.format(data) if e else None)
|
(self, data: Any, **kwargs: Dict[str, Any]) -> Any
|
44,938 |
schema
|
Hook
| null |
class Hook(Schema):
def __init__(self, *args: Any, **kwargs: Any) -> None:
self.handler: Callable[..., Any] = kwargs.pop("handler", lambda *args: None)
super(Hook, self).__init__(*args, **kwargs)
self.key = self._schema
|
(*args: Any, **kwargs: Any) -> None
|
44,939 |
schema
|
__init__
| null |
def __init__(self, *args: Any, **kwargs: Any) -> None:
self.handler: Callable[..., Any] = kwargs.pop("handler", lambda *args: None)
super(Hook, self).__init__(*args, **kwargs)
self.key = self._schema
|
(self, *args: Any, **kwargs: Any) -> NoneType
|
44,947 |
schema
|
Literal
| null |
class Literal:
def __init__(self, value: Any, description: Union[str, None] = None) -> None:
self._schema: Any = value
self._description: Union[str, None] = description
def __str__(self) -> str:
return str(self._schema)
def __repr__(self) -> str:
return f'Literal("{self._schema}", description="{self._description or ""}")'
@property
def description(self) -> Union[str, None]:
return self._description
@property
def schema(self) -> Any:
return self._schema
|
(value: Any, description: Optional[str] = None) -> None
|
44,948 |
schema
|
__init__
| null |
def __init__(self, value: Any, description: Union[str, None] = None) -> None:
self._schema: Any = value
self._description: Union[str, None] = description
|
(self, value: Any, description: Optional[str] = None) -> NoneType
|
44,949 |
schema
|
__repr__
| null |
def __repr__(self) -> str:
return f'Literal("{self._schema}", description="{self._description or ""}")'
|
(self) -> str
|
44,950 |
schema
|
__str__
| null |
def __str__(self) -> str:
return str(self._schema)
|
(self) -> str
|
44,951 |
schema
|
Optional
|
Marker for an optional part of the validation Schema.
|
class Optional(Schema):
"""Marker for an optional part of the validation Schema."""
_MARKER = object()
def __init__(self, *args: Any, **kwargs: Any) -> None:
default: Any = kwargs.pop("default", self._MARKER)
super(Optional, self).__init__(*args, **kwargs)
if default is not self._MARKER:
if _priority(self._schema) != COMPARABLE:
raise TypeError(
"Optional keys with defaults must have simple, "
"predictable values, like literal strings or ints. "
f'"{self._schema!r}" is too complex.'
)
self.default = default
self.key = str(self._schema)
def __hash__(self) -> int:
return hash(self._schema)
def __eq__(self, other: Any) -> bool:
return (
self.__class__ is other.__class__
and getattr(self, "default", self._MARKER)
== getattr(other, "default", self._MARKER)
and self._schema == other._schema
)
def reset(self) -> None:
if hasattr(self._schema, "reset"):
self._schema.reset()
|
(*args: Any, **kwargs: Any) -> None
|
44,952 |
schema
|
__eq__
| null |
def __eq__(self, other: Any) -> bool:
return (
self.__class__ is other.__class__
and getattr(self, "default", self._MARKER)
== getattr(other, "default", self._MARKER)
and self._schema == other._schema
)
|
(self, other: Any) -> bool
|
44,953 |
schema
|
__hash__
| null |
def __hash__(self) -> int:
return hash(self._schema)
|
(self) -> int
|
44,954 |
schema
|
__init__
| null |
def __init__(self, *args: Any, **kwargs: Any) -> None:
default: Any = kwargs.pop("default", self._MARKER)
super(Optional, self).__init__(*args, **kwargs)
if default is not self._MARKER:
if _priority(self._schema) != COMPARABLE:
raise TypeError(
"Optional keys with defaults must have simple, "
"predictable values, like literal strings or ints. "
f'"{self._schema!r}" is too complex.'
)
self.default = default
self.key = str(self._schema)
|
(self, *args: Any, **kwargs: Any) -> NoneType
|
44,961 |
schema
|
reset
| null |
def reset(self) -> None:
if hasattr(self._schema, "reset"):
self._schema.reset()
|
(self) -> NoneType
|
44,963 |
schema
|
Or
|
Utility function to combine validation directives in a OR Boolean
fashion.
If one wants to make an xor, one can provide only_one=True optional argument
to the constructor of this object. When a validation was performed for an
xor-ish Or instance and one wants to use it another time, one needs to call
reset() to put the match_count back to 0.
|
class Or(And[TSchema]):
"""Utility function to combine validation directives in a OR Boolean
fashion.
If one wants to make an xor, one can provide only_one=True optional argument
to the constructor of this object. When a validation was performed for an
xor-ish Or instance and one wants to use it another time, one needs to call
reset() to put the match_count back to 0."""
def __init__(
self,
*args: Union[TSchema, Callable[..., Any]],
only_one: bool = False,
**kwargs: Any,
) -> None:
self.only_one: bool = only_one
self.match_count: int = 0
super().__init__(*args, **kwargs)
def reset(self) -> None:
failed: bool = self.match_count > 1 and self.only_one
self.match_count = 0
if failed:
raise SchemaOnlyOneAllowedError(
["There are multiple keys present from the %r condition" % self]
)
def validate(self, data: Any, **kwargs: Any) -> Any:
"""
Validate data using sub defined schema/expressions ensuring at least
one value is valid.
:param data: data to be validated by provided schema.
:return: return validated data if not validation
"""
autos: List[str] = []
errors: List[Union[str, None]] = []
for sub_schema in self._build_schemas():
try:
validation: Any = sub_schema.validate(data, **kwargs)
self.match_count += 1
if self.match_count > 1 and self.only_one:
break
return validation
except SchemaError as _x:
autos += _x.autos
errors += _x.errors
raise SchemaError(
["%r did not validate %r" % (self, data)] + autos,
[self._error.format(data) if self._error else None] + errors,
)
|
(*args: Union[~TSchema, Callable[..., Any]], only_one: bool = False, **kwargs: Any) -> None
|
44,964 |
schema
|
__init__
| null |
def __init__(
self,
*args: Union[TSchema, Callable[..., Any]],
only_one: bool = False,
**kwargs: Any,
) -> None:
self.only_one: bool = only_one
self.match_count: int = 0
super().__init__(*args, **kwargs)
|
(self, *args: Union[~TSchema, Callable[..., Any]], only_one: bool = False, **kwargs: Any) -> NoneType
|
44,968 |
schema
|
reset
| null |
def reset(self) -> None:
failed: bool = self.match_count > 1 and self.only_one
self.match_count = 0
if failed:
raise SchemaOnlyOneAllowedError(
["There are multiple keys present from the %r condition" % self]
)
|
(self) -> NoneType
|
44,969 |
schema
|
validate
|
Validate data using sub defined schema/expressions ensuring at least
one value is valid.
:param data: data to be validated by provided schema.
:return: return validated data if not validation
|
def validate(self, data: Any, **kwargs: Any) -> Any:
"""
Validate data using sub defined schema/expressions ensuring at least
one value is valid.
:param data: data to be validated by provided schema.
:return: return validated data if not validation
"""
autos: List[str] = []
errors: List[Union[str, None]] = []
for sub_schema in self._build_schemas():
try:
validation: Any = sub_schema.validate(data, **kwargs)
self.match_count += 1
if self.match_count > 1 and self.only_one:
break
return validation
except SchemaError as _x:
autos += _x.autos
errors += _x.errors
raise SchemaError(
["%r did not validate %r" % (self, data)] + autos,
[self._error.format(data) if self._error else None] + errors,
)
|
(self, data: Any, **kwargs: Any) -> Any
|
44,970 |
schema
|
Regex
|
Enables schema.py to validate string using regular expressions.
|
class Regex:
"""
Enables schema.py to validate string using regular expressions.
"""
# Map all flags bits to a more readable description
NAMES = [
"re.ASCII",
"re.DEBUG",
"re.VERBOSE",
"re.UNICODE",
"re.DOTALL",
"re.MULTILINE",
"re.LOCALE",
"re.IGNORECASE",
"re.TEMPLATE",
]
def __init__(
self, pattern_str: str, flags: int = 0, error: Union[str, None] = None
) -> None:
self._pattern_str: str = pattern_str
flags_list = [
Regex.NAMES[i] for i, f in enumerate(f"{flags:09b}") if f != "0"
] # Name for each bit
self._flags_names: str = ", flags=" + "|".join(flags_list) if flags_list else ""
self._pattern: re.Pattern = re.compile(pattern_str, flags=flags)
self._error: Union[str, None] = error
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self._pattern_str!r}{self._flags_names})"
@property
def pattern_str(self) -> str:
"""The pattern string for the represented regular expression"""
return self._pattern_str
def validate(self, data: str, **kwargs: Any) -> str:
"""
Validates data using the defined regex.
:param data: Data to be validated.
:return: Returns validated data.
"""
e = self._error
try:
if self._pattern.search(data):
return data
else:
error_message = (
e.format(data)
if e
else f"{data!r} does not match {self._pattern_str!r}"
)
raise SchemaError(error_message)
except TypeError:
error_message = (
e.format(data) if e else f"{data!r} is not string nor buffer"
)
raise SchemaError(error_message)
|
(pattern_str: str, flags: int = 0, error: Optional[str] = None) -> None
|
44,971 |
schema
|
__init__
| null |
def __init__(
self, pattern_str: str, flags: int = 0, error: Union[str, None] = None
) -> None:
self._pattern_str: str = pattern_str
flags_list = [
Regex.NAMES[i] for i, f in enumerate(f"{flags:09b}") if f != "0"
] # Name for each bit
self._flags_names: str = ", flags=" + "|".join(flags_list) if flags_list else ""
self._pattern: re.Pattern = re.compile(pattern_str, flags=flags)
self._error: Union[str, None] = error
|
(self, pattern_str: str, flags: int = 0, error: Optional[str] = None) -> NoneType
|
44,972 |
schema
|
__repr__
| null |
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self._pattern_str!r}{self._flags_names})"
|
(self) -> str
|
44,973 |
schema
|
validate
|
Validates data using the defined regex.
:param data: Data to be validated.
:return: Returns validated data.
|
def validate(self, data: str, **kwargs: Any) -> str:
"""
Validates data using the defined regex.
:param data: Data to be validated.
:return: Returns validated data.
"""
e = self._error
try:
if self._pattern.search(data):
return data
else:
error_message = (
e.format(data)
if e
else f"{data!r} does not match {self._pattern_str!r}"
)
raise SchemaError(error_message)
except TypeError:
error_message = (
e.format(data) if e else f"{data!r} is not string nor buffer"
)
raise SchemaError(error_message)
|
(self, data: str, **kwargs: Any) -> str
|
44,974 |
schema
|
Schema
|
Entry point of the library, use this class to instantiate validation
schema for the data that will be validated.
|
class Schema(object):
"""
Entry point of the library, use this class to instantiate validation
schema for the data that will be validated.
"""
def __init__(
self,
schema: Any,
error: Union[str, None] = None,
ignore_extra_keys: bool = False,
name: Union[str, None] = None,
description: Union[str, None] = None,
as_reference: bool = False,
) -> None:
self._schema: Any = schema
self._error: Union[str, None] = error
self._ignore_extra_keys: bool = ignore_extra_keys
self._name: Union[str, None] = name
self._description: Union[str, None] = description
self.as_reference: bool = as_reference
if as_reference and name is None:
raise ValueError("Schema used as reference should have a name")
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self._schema)
@property
def schema(self) -> Any:
return self._schema
@property
def description(self) -> Union[str, None]:
return self._description
@property
def name(self) -> Union[str, None]:
return self._name
@property
def ignore_extra_keys(self) -> bool:
return self._ignore_extra_keys
@staticmethod
def _dict_key_priority(s) -> float:
"""Return priority for a given key object."""
if isinstance(s, Hook):
return _priority(s._schema) - 0.5
if isinstance(s, Optional):
return _priority(s._schema) + 0.5
return _priority(s)
@staticmethod
def _is_optional_type(s: Any) -> bool:
"""Return True if the given key is optional (does not have to be found)"""
return any(isinstance(s, optional_type) for optional_type in [Optional, Hook])
def is_valid(self, data: Any, **kwargs: Dict[str, Any]) -> bool:
"""Return whether the given data has passed all the validations
that were specified in the given schema.
"""
try:
self.validate(data, **kwargs)
except SchemaError:
return False
else:
return True
def _prepend_schema_name(self, message: str) -> str:
"""
If a custom schema name has been defined, prepends it to the error
message that gets raised when a schema error occurs.
"""
if self._name:
message = "{0!r} {1!s}".format(self._name, message)
return message
def validate(self, data: Any, **kwargs: Dict[str, Any]) -> Any:
Schema = self.__class__
s: Any = self._schema
e: Union[str, None] = self._error
i: bool = self._ignore_extra_keys
if isinstance(s, Literal):
s = s.schema
flavor = _priority(s)
if flavor == ITERABLE:
data = Schema(type(s), error=e).validate(data, **kwargs)
o: Or = Or(*s, error=e, schema=Schema, ignore_extra_keys=i)
return type(data)(o.validate(d, **kwargs) for d in data)
if flavor == DICT:
exitstack = ExitStack()
data = Schema(dict, error=e).validate(data, **kwargs)
new: Dict = type(data)() # new - is a dict of the validated values
coverage: Set = set() # matched schema keys
# for each key and value find a schema entry matching them, if any
sorted_skeys = sorted(s, key=self._dict_key_priority)
for skey in sorted_skeys:
if hasattr(skey, "reset"):
exitstack.callback(skey.reset)
with exitstack:
# Evaluate dictionaries last
data_items = sorted(
data.items(), key=lambda value: isinstance(value[1], dict)
)
for key, value in data_items:
for skey in sorted_skeys:
svalue = s[skey]
try:
nkey = Schema(skey, error=e).validate(key, **kwargs)
except SchemaError:
pass
else:
if isinstance(skey, Hook):
# As the content of the value makes little sense for
# keys with a hook, we reverse its meaning:
# we will only call the handler if the value does match
# In the case of the forbidden key hook,
# we will raise the SchemaErrorForbiddenKey exception
# on match, allowing for excluding a key only if its
# value has a certain type, and allowing Forbidden to
# work well in combination with Optional.
try:
nvalue = Schema(svalue, error=e).validate(
value, **kwargs
)
except SchemaError:
continue
skey.handler(nkey, data, e)
else:
try:
nvalue = Schema(
svalue, error=e, ignore_extra_keys=i
).validate(value, **kwargs)
except SchemaError as x:
k = "Key '%s' error:" % nkey
message = self._prepend_schema_name(k)
raise SchemaError(
[message] + x.autos,
[e.format(data) if e else None] + x.errors,
)
else:
new[nkey] = nvalue
coverage.add(skey)
break
required = set(k for k in s if not self._is_optional_type(k))
if not required.issubset(coverage):
missing_keys = required - coverage
s_missing_keys = ", ".join(
repr(k) for k in sorted(missing_keys, key=repr)
)
message = "Missing key%s: %s" % (
_plural_s(missing_keys),
s_missing_keys,
)
message = self._prepend_schema_name(message)
raise SchemaMissingKeyError(message, e.format(data) if e else None)
if not self._ignore_extra_keys and (len(new) != len(data)):
wrong_keys = set(data.keys()) - set(new.keys())
s_wrong_keys = ", ".join(repr(k) for k in sorted(wrong_keys, key=repr))
message = "Wrong key%s %s in %r" % (
_plural_s(wrong_keys),
s_wrong_keys,
data,
)
message = self._prepend_schema_name(message)
raise SchemaWrongKeyError(message, e.format(data) if e else None)
# Apply default-having optionals that haven't been used:
defaults = (
set(k for k in s if isinstance(k, Optional) and hasattr(k, "default"))
- coverage
)
for default in defaults:
new[default.key] = (
_invoke_with_optional_kwargs(default.default, **kwargs)
if callable(default.default)
else default.default
)
return new
if flavor == TYPE:
if isinstance(data, s) and not (isinstance(data, bool) and s == int):
return data
else:
message = "%r should be instance of %r" % (data, s.__name__)
message = self._prepend_schema_name(message)
raise SchemaUnexpectedTypeError(message, e.format(data) if e else None)
if flavor == VALIDATOR:
try:
return s.validate(data, **kwargs)
except SchemaError as x:
raise SchemaError(
[None] + x.autos, [e.format(data) if e else None] + x.errors
)
except BaseException as x:
message = "%r.validate(%r) raised %r" % (s, data, x)
message = self._prepend_schema_name(message)
raise SchemaError(message, e.format(data) if e else None)
if flavor == CALLABLE:
f = _callable_str(s)
try:
if s(data):
return data
except SchemaError as x:
raise SchemaError(
[None] + x.autos, [e.format(data) if e else None] + x.errors
)
except BaseException as x:
message = "%s(%r) raised %r" % (f, data, x)
message = self._prepend_schema_name(message)
raise SchemaError(message, e.format(data) if e else None)
message = "%s(%r) should evaluate to True" % (f, data)
message = self._prepend_schema_name(message)
raise SchemaError(message, e.format(data) if e else None)
if s == data:
return data
else:
message = "%r does not match %r" % (s, data)
message = self._prepend_schema_name(message)
raise SchemaError(message, e.format(data) if e else None)
def json_schema(
self, schema_id: str, use_refs: bool = False, **kwargs: Any
) -> Dict[str, Any]:
"""Generate a draft-07 JSON schema dict representing the Schema.
This method must be called with a schema_id.
:param schema_id: The value of the $id on the main schema
:param use_refs: Enable reusing object references in the resulting JSON schema.
Schemas with references are harder to read by humans, but are a lot smaller when there
is a lot of reuse
"""
seen: Dict[int, Dict[str, Any]] = {}
definitions_by_name: Dict[str, Dict[str, Any]] = {}
def _json_schema(
schema: "Schema",
is_main_schema: bool = True,
description: Union[str, None] = None,
allow_reference: bool = True,
) -> Dict[str, Any]:
def _create_or_use_ref(return_dict: Dict[str, Any]) -> Dict[str, Any]:
"""If not already seen, return the provided part of the schema unchanged.
If already seen, give an id to the already seen dict and return a reference to the previous part
of the schema instead.
"""
if not use_refs or is_main_schema:
return return_schema
hashed = hash(repr(sorted(return_dict.items())))
if hashed not in seen:
seen[hashed] = return_dict
return return_dict
else:
id_str = "#" + str(hashed)
seen[hashed]["$id"] = id_str
return {"$ref": id_str}
def _get_type_name(python_type: Type) -> str:
"""Return the JSON schema name for a Python type"""
if python_type == str:
return "string"
elif python_type == int:
return "integer"
elif python_type == float:
return "number"
elif python_type == bool:
return "boolean"
elif python_type == list:
return "array"
elif python_type == dict:
return "object"
return "string"
def _to_json_type(value: Any) -> Any:
"""Attempt to convert a constant value (for "const" and "default") to a JSON serializable value"""
if value is None or type(value) in (str, int, float, bool, list, dict):
return value
if type(value) in (tuple, set, frozenset):
return list(value)
if isinstance(value, Literal):
return value.schema
return str(value)
def _to_schema(s: Any, ignore_extra_keys: bool) -> Schema:
if not isinstance(s, Schema):
return Schema(s, ignore_extra_keys=ignore_extra_keys)
return s
s: Any = schema.schema
i: bool = schema.ignore_extra_keys
flavor = _priority(s)
return_schema: Dict[str, Any] = {}
return_description: Union[str, None] = description or schema.description
if return_description:
return_schema["description"] = return_description
# Check if we have to create a common definition and use as reference
if allow_reference and schema.as_reference:
# Generate sub schema if not already done
if schema.name not in definitions_by_name:
definitions_by_name[
cast(str, schema.name)
] = {} # Avoid infinite loop
definitions_by_name[cast(str, schema.name)] = _json_schema(
schema, is_main_schema=False, allow_reference=False
)
return_schema["$ref"] = "#/definitions/" + cast(str, schema.name)
else:
if flavor == TYPE:
# Handle type
return_schema["type"] = _get_type_name(s)
elif flavor == ITERABLE:
# Handle arrays or dict schema
return_schema["type"] = "array"
if len(s) == 1:
return_schema["items"] = _json_schema(
_to_schema(s[0], i), is_main_schema=False
)
elif len(s) > 1:
return_schema["items"] = _json_schema(
Schema(Or(*s)), is_main_schema=False
)
elif isinstance(s, Or):
# Handle Or values
# Check if we can use an enum
if all(
priority == COMPARABLE
for priority in [_priority(value) for value in s.args]
):
or_values = [
str(s) if isinstance(s, Literal) else s for s in s.args
]
# All values are simple, can use enum or const
if len(or_values) == 1:
return_schema["const"] = _to_json_type(or_values[0])
return return_schema
return_schema["enum"] = or_values
else:
# No enum, let's go with recursive calls
any_of_values = []
for or_key in s.args:
new_value = _json_schema(
_to_schema(or_key, i), is_main_schema=False
)
if new_value != {} and new_value not in any_of_values:
any_of_values.append(new_value)
if len(any_of_values) == 1:
# Only one representable condition remains, do not put under anyOf
return_schema.update(any_of_values[0])
else:
return_schema["anyOf"] = any_of_values
elif isinstance(s, And):
# Handle And values
all_of_values = []
for and_key in s.args:
new_value = _json_schema(
_to_schema(and_key, i), is_main_schema=False
)
if new_value != {} and new_value not in all_of_values:
all_of_values.append(new_value)
if len(all_of_values) == 1:
# Only one representable condition remains, do not put under allOf
return_schema.update(all_of_values[0])
else:
return_schema["allOf"] = all_of_values
elif flavor == COMPARABLE:
return_schema["const"] = _to_json_type(s)
elif flavor == VALIDATOR and type(s) == Regex:
return_schema["type"] = "string"
return_schema["pattern"] = s.pattern_str
else:
if flavor != DICT:
# If not handled, do not check
return return_schema
# Schema is a dict
required_keys = []
expanded_schema = {}
additional_properties = i
for key in s:
if isinstance(key, Hook):
continue
def _key_allows_additional_properties(key: Any) -> bool:
"""Check if a key is broad enough to allow additional properties"""
if isinstance(key, Optional):
return _key_allows_additional_properties(key.schema)
return key == str or key == object
def _get_key_description(key: Any) -> Union[str, None]:
"""Get the description associated to a key (as specified in a Literal object). Return None if not a Literal"""
if isinstance(key, Optional):
return _get_key_description(key.schema)
if isinstance(key, Literal):
return key.description
return None
def _get_key_name(key: Any) -> Any:
"""Get the name of a key (as specified in a Literal object). Return the key unchanged if not a Literal"""
if isinstance(key, Optional):
return _get_key_name(key.schema)
if isinstance(key, Literal):
return key.schema
return key
additional_properties = (
additional_properties
or _key_allows_additional_properties(key)
)
sub_schema = _to_schema(s[key], ignore_extra_keys=i)
key_name = _get_key_name(key)
if isinstance(key_name, str):
if not isinstance(key, Optional):
required_keys.append(key_name)
expanded_schema[key_name] = _json_schema(
sub_schema,
is_main_schema=False,
description=_get_key_description(key),
)
if isinstance(key, Optional) and hasattr(key, "default"):
expanded_schema[key_name]["default"] = _to_json_type(
_invoke_with_optional_kwargs(key.default, **kwargs)
if callable(key.default)
else key.default
)
elif isinstance(key_name, Or):
# JSON schema does not support having a key named one name or another, so we just add both options
# This is less strict because we cannot enforce that one or the other is required
for or_key in key_name.args:
expanded_schema[_get_key_name(or_key)] = _json_schema(
sub_schema,
is_main_schema=False,
description=_get_key_description(or_key),
)
return_schema.update(
{
"type": "object",
"properties": expanded_schema,
"required": required_keys,
"additionalProperties": additional_properties,
}
)
if is_main_schema:
return_schema.update(
{
"$id": schema_id,
"$schema": "http://json-schema.org/draft-07/schema#",
}
)
if self._name:
return_schema["title"] = self._name
if definitions_by_name:
return_schema["definitions"] = {}
for definition_name, definition in definitions_by_name.items():
return_schema["definitions"][definition_name] = definition
return _create_or_use_ref(return_schema)
return _json_schema(self, True)
|
(schema: Any, error: Optional[str] = None, ignore_extra_keys: bool = False, name: Optional[str] = None, description: Optional[str] = None, as_reference: bool = False) -> None
|
44,983 |
schema
|
SchemaError
|
Error during Schema validation.
|
class SchemaError(Exception):
"""Error during Schema validation."""
def __init__(
self,
autos: Union[Sequence[Union[str, None]], None],
errors: Union[List, str, None] = None,
):
self.autos = autos if isinstance(autos, List) else [autos]
self.errors = errors if isinstance(errors, List) else [errors]
Exception.__init__(self, self.code)
@property
def code(self) -> str:
"""Remove duplicates in autos and errors list and combine them into a single message."""
def uniq(seq: Iterable[Union[str, None]]) -> List[str]:
"""Utility function to remove duplicates while preserving the order."""
seen: Set[str] = set()
unique_list: List[str] = []
for x in seq:
if x is not None and x not in seen:
seen.add(x)
unique_list.append(x)
return unique_list
data_set = uniq(self.autos)
error_list = uniq(self.errors)
return "\n".join(error_list if error_list else data_set)
|
(autos: Optional[Sequence[Optional[str]]], errors: Union[List, str, NoneType] = None)
|
44,984 |
schema
|
__init__
| null |
def __init__(
self,
autos: Union[Sequence[Union[str, None]], None],
errors: Union[List, str, None] = None,
):
self.autos = autos if isinstance(autos, List) else [autos]
self.errors = errors if isinstance(errors, List) else [errors]
Exception.__init__(self, self.code)
|
(self, autos: Optional[Sequence[Optional[str]]], errors: Union[List, str, NoneType] = None)
|
44,985 |
schema
|
SchemaForbiddenKeyError
|
Error should be raised when a forbidden key is found within the
data set being validated, and its value matches the value that was specified
|
class SchemaForbiddenKeyError(SchemaError):
"""Error should be raised when a forbidden key is found within the
data set being validated, and its value matches the value that was specified"""
pass
|
(autos: Optional[Sequence[Optional[str]]], errors: Union[List, str, NoneType] = None)
|
44,987 |
schema
|
SchemaMissingKeyError
|
Error should be raised when a mandatory key is not found within the
data set being validated
|
class SchemaMissingKeyError(SchemaError):
"""Error should be raised when a mandatory key is not found within the
data set being validated"""
pass
|
(autos: Optional[Sequence[Optional[str]]], errors: Union[List, str, NoneType] = None)
|
44,989 |
schema
|
SchemaOnlyOneAllowedError
|
Error should be raised when an only_one Or key has multiple matching candidates
|
class SchemaOnlyOneAllowedError(SchemaError):
"""Error should be raised when an only_one Or key has multiple matching candidates"""
pass
|
(autos: Optional[Sequence[Optional[str]]], errors: Union[List, str, NoneType] = None)
|
44,991 |
schema
|
SchemaUnexpectedTypeError
|
Error should be raised when a type mismatch is detected within the
data set being validated.
|
class SchemaUnexpectedTypeError(SchemaError):
"""Error should be raised when a type mismatch is detected within the
data set being validated."""
pass
|
(autos: Optional[Sequence[Optional[str]]], errors: Union[List, str, NoneType] = None)
|
44,993 |
schema
|
SchemaWrongKeyError
|
Error Should be raised when an unexpected key is detected within the
data set being.
|
class SchemaWrongKeyError(SchemaError):
"""Error Should be raised when an unexpected key is detected within the
data set being."""
pass
|
(autos: Optional[Sequence[Optional[str]]], errors: Union[List, str, NoneType] = None)
|
45,003 |
schema
|
Use
|
For more general use cases, you can use the Use class to transform
the data while it is being validated.
|
class Use:
"""
For more general use cases, you can use the Use class to transform
the data while it is being validated.
"""
def __init__(
self, callable_: Callable[[Any], Any], error: Union[str, None] = None
) -> None:
if not callable(callable_):
raise TypeError(f"Expected a callable, not {callable_!r}")
self._callable: Callable[[Any], Any] = callable_
self._error: Union[str, None] = error
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self._callable!r})"
def validate(self, data: Any, **kwargs: Any) -> Any:
try:
return self._callable(data)
except SchemaError as x:
raise SchemaError(
[None] + x.autos,
[self._error.format(data) if self._error else None] + x.errors,
)
except BaseException as x:
f = _callable_str(self._callable)
raise SchemaError(
"%s(%r) raised %r" % (f, data, x),
self._error.format(data) if self._error else None,
)
|
(callable_: Callable[[Any], Any], error: Optional[str] = None) -> None
|
45,004 |
schema
|
__init__
| null |
def __init__(
self, callable_: Callable[[Any], Any], error: Union[str, None] = None
) -> None:
if not callable(callable_):
raise TypeError(f"Expected a callable, not {callable_!r}")
self._callable: Callable[[Any], Any] = callable_
self._error: Union[str, None] = error
|
(self, callable_: Callable[[Any], Any], error: Optional[str] = None) -> NoneType
|
45,005 |
schema
|
__repr__
| null |
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self._callable!r})"
|
(self) -> str
|
45,006 |
schema
|
validate
| null |
def validate(self, data: Any, **kwargs: Any) -> Any:
try:
return self._callable(data)
except SchemaError as x:
raise SchemaError(
[None] + x.autos,
[self._error.format(data) if self._error else None] + x.errors,
)
except BaseException as x:
f = _callable_str(self._callable)
raise SchemaError(
"%s(%r) raised %r" % (f, data, x),
self._error.format(data) if self._error else None,
)
|
(self, data: Any, **kwargs: Any) -> Any
|
45,007 |
schema
|
_callable_str
| null |
def _callable_str(callable_: Callable[..., Any]) -> str:
if hasattr(callable_, "__name__"):
return callable_.__name__
return str(callable_)
|
(callable_: Callable[..., Any]) -> str
|
45,008 |
schema
|
_invoke_with_optional_kwargs
| null |
def _invoke_with_optional_kwargs(f: Callable[..., Any], **kwargs: Any) -> Any:
s = inspect.signature(f)
if len(s.parameters) == 0:
return f()
return f(**kwargs)
|
(f: Callable[..., Any], **kwargs: Any) -> Any
|
45,009 |
schema
|
_plural_s
| null |
def _plural_s(sized: Sized) -> str:
return "s" if len(sized) > 1 else ""
|
(sized: Sized) -> str
|
45,010 |
schema
|
_priority
|
Return priority for a given object.
|
def _priority(s: Any) -> int:
"""Return priority for a given object."""
if type(s) in (list, tuple, set, frozenset):
return ITERABLE
if isinstance(s, dict):
return DICT
if issubclass(type(s), type):
return TYPE
if isinstance(s, Literal):
return COMPARABLE
if hasattr(s, "validate"):
return VALIDATOR
if callable(s):
return CALLABLE
else:
return COMPARABLE
|
(s: Any) -> int
|
45,014 |
gggdtparser.dtparser
|
check
|
检测解析结果
:param dst_dt: 解析结果datetime.datetime
:param check_dt: 验证结果datetime.datetime或两datetime.datetime元组
:return: bool
|
def check(dst_dt, check_dt):
"""
检测解析结果
:param dst_dt: 解析结果datetime.datetime
:param check_dt: 验证结果datetime.datetime或两datetime.datetime元组
:return: bool
"""
if isinstance(check_dt, datetime.datetime):
return dst_dt == check_dt
start_dt, end_dt = check_dt
return start_dt <= dst_dt <= end_dt
|
(dst_dt, check_dt)
|
45,019 |
gggdtparser.dtparser
|
parse
|
解析文本时间
:param string_datetime: 字符串时间文本
:param format_list: 时间解析模板列表,如%Y-%m-%d
:param regex_list: 正则解析规则列表,统一为有名分组格式,参考dtconfigs.py
:param langs: 语言列表,优先设置的语言进行翻译替换和解析
:param result_accurately: 解析結果是否为严格模式,format不支持非严格模式
:param extract_accurately: 是否只进行精确抽取
:param max_datetime: 最大时间
:param min_datetime: 最小时间
:param base_datetime: 基准时间
:param translate_func: 翻译函数
:return: datetime.datetime
|
def parse(string_datetime, format_list=None, regex_list=None,
langs=None, result_accurately=True, extract_accurately=False,
max_datetime=None, min_datetime=None, base_datetime=None,
translate_func=None):
"""
解析文本时间
:param string_datetime: 字符串时间文本
:param format_list: 时间解析模板列表,如%Y-%m-%d
:param regex_list: 正则解析规则列表,统一为有名分组格式,参考dtconfigs.py
:param langs: 语言列表,优先设置的语言进行翻译替换和解析
:param result_accurately: 解析結果是否为严格模式,format不支持非严格模式
:param extract_accurately: 是否只进行精确抽取
:param max_datetime: 最大时间
:param min_datetime: 最小时间
:param base_datetime: 基准时间
:param translate_func: 翻译函数
:return: datetime.datetime
"""
# format
# regex
# fanyi
if translate_func and callable(translate_func):
string_datetime = translate_func(string_datetime)
result = parse_by_format(string_datetime, format_list)
if result:
return result
result = parse_by_regex(
string_datetime, regex_list, langs, result_accurately,
max_datetime=max_datetime, min_datetime=min_datetime,
base_datetime=base_datetime,
extract_accurately=extract_accurately)
if result:
return result
|
(string_datetime, format_list=None, regex_list=None, langs=None, result_accurately=True, extract_accurately=False, max_datetime=None, min_datetime=None, base_datetime=None, translate_func=None)
|
45,020 |
gggdtparser.dtparser
|
parse_by_format
|
通过format进行时间解析
:param string_datetime:
:param format_list:
:return:
|
def parse_by_format(string_datetime, format_list=None):
"""
通过format进行时间解析
:param string_datetime:
:param format_list:
:return:
"""
if not format_list:
format_list = []
return s2dt(string_datetime, format_list)
|
(string_datetime, format_list=None)
|
45,021 |
gggdtparser.dtframe
|
parse
|
:param s:
:param seps: 范围标识符
:param format_list: 时间模板
[(start, start, ....), (end, end, ....)]
:param regex_list: 正则表达式
[(start, start, ....), (end, end, ....)]
:param base_datetime: 相对时间的基准,当起始时间没有解析到时,当做时间的范围的起始
:return:
|
def parse(s, seps=None, format_list=None, regex_list=None,
base_datetime=None, ):
"""
:param s:
:param seps: 范围标识符
:param format_list: 时间模板
[(start, start, ....), (end, end, ....)]
:param regex_list: 正则表达式
[(start, start, ....), (end, end, ....)]
:param base_datetime: 相对时间的基准,当起始时间没有解析到时,当做时间的范围的起始
:return:
"""
if not s:
return None, None
if not seps:
seps = []
start_format_list, end_format_list = format_list if format_list else (
None, None)
start_regex_list, end_regex_list = regex_list if regex_list else (
None, None)
for sep in seps:
start, end = _parse(
s, sep, start_format_list, end_format_list,
start_regex_list, end_regex_list, base_datetime)
if any([start, end]):
return start, end
for sep in SEPS:
start, end = _parse(
s, sep, start_format_list, end_format_list,
start_regex_list, end_regex_list, base_datetime)
if any([start, end]):
return start, end
return None, None
|
(s, seps=None, format_list=None, regex_list=None, base_datetime=None)
|
45,023 |
easul.action
|
Action
|
Base Action class. Actions enable things to occur at different points in steps (e.g. before the run, following invalid data).
|
class Action:
"""
Base Action class. Actions enable things to occur at different points in steps (e.g. before the run, following invalid data).
"""
def before_run(self, *args, **kwargs):
pass
def after_run(self, *args, **kwargs):
pass
def invalid_data(self, *args, **kwargs):
pass
def missing_data(self, *args, **kwargs):
pass
def after_data(self, *args, **kwargs):
pass
def describe(self):
return {
"type":self.__class__.__name__
}
|
() -> None
|
45,024 |
easul.action
|
__eq__
|
Method generated by attrs for class Action.
|
"""
"""
import logging
from easul.algorithm.result import DefaultResult
from easul.outcome import ResultOutcome
LOG = logging.getLogger(__name__)
from attrs import define, field
class StopFlow(Exception):
pass
|
(self, other)
|
45,027 |
easul.action
|
__ne__
|
Method generated by attrs for class Action.
| null |
(self, other)
|
45,030 |
easul.action
|
after_data
| null |
def after_data(self, *args, **kwargs):
pass
|
(self, *args, **kwargs)
|
45,031 |
easul.action
|
after_run
| null |
def after_run(self, *args, **kwargs):
pass
|
(self, *args, **kwargs)
|
45,032 |
easul.action
|
before_run
| null |
def before_run(self, *args, **kwargs):
pass
|
(self, *args, **kwargs)
|
45,033 |
easul.action
|
describe
| null |
def describe(self):
return {
"type":self.__class__.__name__
}
|
(self)
|
45,034 |
easul.action
|
invalid_data
| null |
def invalid_data(self, *args, **kwargs):
pass
|
(self, *args, **kwargs)
|
45,035 |
easul.action
|
missing_data
| null |
def missing_data(self, *args, **kwargs):
pass
|
(self, *args, **kwargs)
|
45,036 |
easul.step
|
ActionEvent
|
Event object fed to Action classes to enable access to steps, data, outcome and drivers.
|
class ActionEvent:
"""
Event object fed to Action classes to enable access to steps, data, outcome and drivers.
"""
step = field()
driver = field()
previous_outcome = field(default=None)
data = field(default=None)
outcome = field(default=None)
|
(*, step, driver, previous_outcome=None, data=None, outcome=None) -> None
|
45,037 |
easul.step
|
__eq__
|
Method generated by attrs for class ActionEvent.
|
from attrs import define, field
import logging
from easul.decision import BinaryDecision
LOG = logging.getLogger(__name__)
from easul.error import StepDataNotAvailable, ConversionError, InvalidStepData, VisualDataMissing, ValidationError
from enum import Enum, auto
from easul.outcome import Outcome, EndOutcome, PauseOutcome, InvalidDataOutcome, MissingDataOutcome
from abc import abstractmethod
NO_VISUAL_IN_STEP_MESSAGE = "Sorry no visual for this step"
@define(kw_only=True)
class Step:
"""
Base Step class. Steps are the driving force of plans, they bring the different components together.
The key things are 'visual' components which provide graphical views associated with steps and 'actions' which
define action classes that occur during the lifecycle of a step run.
"""
title = field()
visual = field(default=None)
exclude_from_chart = field(default=False)
name = field(default=None)
actions = field(factory=list)
def layout_kwargs(self, driver, steps, **kwargs):
return {"steps":steps, "driver":driver, "step":self, **kwargs}
def render_visual(self, driver: "easul.driver.Driver", steps, result=None, context=None, renderer=None, **kwargs):
"""
Render visual to HTML utilising the data in the broker (if not supplied) and the supplied renderer
Args:
driver:
steps:
result:
context:
renderer:
**kwargs:
Returns:
"""
if not self.visual:
return NO_VISUAL_IN_STEP_MESSAGE
if not result and not context:
b_data = driver.get_broker_data("outcome:" + self.name)
if b_data:
result = b_data.get("result")
context = b_data.get("context")
try:
return self.visual.render(driver=driver, steps=steps, step=self, result=result, context=context, renderer=renderer)
except VisualDataMissing as ex:
return str(ex)
@property
def possible_links(self):
return {}
@property
def data_sources(self):
return []
def _trigger_actions(self, trigger_type, event):
for action in self.actions:
getattr(action, trigger_type)(event)
def _generate_visual_context(self, data):
return self.visual.generate_context(data) if self.visual else None
def __repr__(self):
return self.name
def describe(self):
"""
Describe step as Python data structures (lists and dicts)
Returns:
"""
return {
"name": self.name,
"title": self.title,
"actions": [a.describe() for a in self.actions],
"visual":self.visual.describe() if self.visual else "N/A"
}
|
(self, other)
|
45,040 |
easul.step
|
__ne__
|
Method generated by attrs for class ActionEvent.
| null |
(self, other)
|
45,043 |
easul.step
|
ActionStep
|
Step which involves action based on data from a 'source'
|
class ActionStep(Step):
"""
Step which involves action based on data from a 'source'
"""
source = field(default=None)
def run_all(self, driver, previous_outcome=None):
event = ActionEvent(step=self, driver=driver, previous_outcome=previous_outcome)
self._trigger_actions("before_run", event)
driver.store_step(self.name, StepStatuses.INIT, timestamp=driver.clock.timestamp)
from easul.plan import run_step_logic
return run_step_logic(self, event)
@abstractmethod
def _determine_outcome(self, event):
pass
def _retrieve_data(self, event):
from easul.data import DataInput
data = self.source.retrieve(event.driver, self)
return DataInput(data, schema=None, convert=False, validate=False)
def _store_current(self, driver, reason):
driver.store_state(self.state.label, self.state_value, reason, self.name, timestamp=driver.clock.timestamp)
driver.store_step(self.name, StepStatuses.INIT, timestamp=driver.clock.timestamp)
def run_logic(self, event):
"""
Runs the logic part of the step lifecycle to determine the outcome.
Args:
event:
Returns:
"""
outcome = self._determine_outcome(event)
event.driver.store_step(self.name, StepStatuses.COMPLETE, outcome=outcome, timestamp=event.driver.clock.timestamp)
return outcome
def layout_kwargs(self, driver, steps, **kwargs):
return {}
def describe(self):
desc = super().describe()
desc.update({"source": self.source.describe() if self.source else "N/A"})
return desc
|
(*, title, visual=None, exclude_from_chart=False, name=None, actions=NOTHING, source=None) -> None
|
45,044 |
easul.step
|
__eq__
|
Method generated by attrs for class ActionStep.
|
from attrs import define, field
import logging
from easul.decision import BinaryDecision
LOG = logging.getLogger(__name__)
from easul.error import StepDataNotAvailable, ConversionError, InvalidStepData, VisualDataMissing, ValidationError
from enum import Enum, auto
from easul.outcome import Outcome, EndOutcome, PauseOutcome, InvalidDataOutcome, MissingDataOutcome
from abc import abstractmethod
NO_VISUAL_IN_STEP_MESSAGE = "Sorry no visual for this step"
@define(kw_only=True)
class Step:
"""
Base Step class. Steps are the driving force of plans, they bring the different components together.
The key things are 'visual' components which provide graphical views associated with steps and 'actions' which
define action classes that occur during the lifecycle of a step run.
"""
title = field()
visual = field(default=None)
exclude_from_chart = field(default=False)
name = field(default=None)
actions = field(factory=list)
def layout_kwargs(self, driver, steps, **kwargs):
return {"steps":steps, "driver":driver, "step":self, **kwargs}
def render_visual(self, driver: "easul.driver.Driver", steps, result=None, context=None, renderer=None, **kwargs):
"""
Render visual to HTML utilising the data in the broker (if not supplied) and the supplied renderer
Args:
driver:
steps:
result:
context:
renderer:
**kwargs:
Returns:
"""
if not self.visual:
return NO_VISUAL_IN_STEP_MESSAGE
if not result and not context:
b_data = driver.get_broker_data("outcome:" + self.name)
if b_data:
result = b_data.get("result")
context = b_data.get("context")
try:
return self.visual.render(driver=driver, steps=steps, step=self, result=result, context=context, renderer=renderer)
except VisualDataMissing as ex:
return str(ex)
@property
def possible_links(self):
return {}
@property
def data_sources(self):
return []
def _trigger_actions(self, trigger_type, event):
for action in self.actions:
getattr(action, trigger_type)(event)
def _generate_visual_context(self, data):
return self.visual.generate_context(data) if self.visual else None
def __repr__(self):
return self.name
def describe(self):
"""
Describe step as Python data structures (lists and dicts)
Returns:
"""
return {
"name": self.name,
"title": self.title,
"actions": [a.describe() for a in self.actions],
"visual":self.visual.describe() if self.visual else "N/A"
}
|
(self, other)
|
45,047 |
easul.step
|
__ne__
|
Method generated by attrs for class ActionStep.
| null |
(self, other)
|
45,050 |
easul.step
|
_determine_outcome
| null |
@abstractmethod
def _determine_outcome(self, event):
pass
|
(self, event)
|
45,051 |
easul.step
|
_generate_visual_context
| null |
def _generate_visual_context(self, data):
return self.visual.generate_context(data) if self.visual else None
|
(self, data)
|
45,052 |
easul.step
|
_retrieve_data
| null |
def _retrieve_data(self, event):
from easul.data import DataInput
data = self.source.retrieve(event.driver, self)
return DataInput(data, schema=None, convert=False, validate=False)
|
(self, event)
|
45,053 |
easul.step
|
_store_current
| null |
def _store_current(self, driver, reason):
driver.store_state(self.state.label, self.state_value, reason, self.name, timestamp=driver.clock.timestamp)
driver.store_step(self.name, StepStatuses.INIT, timestamp=driver.clock.timestamp)
|
(self, driver, reason)
|
45,054 |
easul.step
|
_trigger_actions
| null |
def _trigger_actions(self, trigger_type, event):
for action in self.actions:
getattr(action, trigger_type)(event)
|
(self, trigger_type, event)
|
45,055 |
easul.step
|
describe
| null |
def describe(self):
desc = super().describe()
desc.update({"source": self.source.describe() if self.source else "N/A"})
return desc
|
(self)
|
45,056 |
easul.step
|
layout_kwargs
| null |
def layout_kwargs(self, driver, steps, **kwargs):
return {}
|
(self, driver, steps, **kwargs)
|
45,057 |
easul.step
|
render_visual
|
Render visual to HTML utilising the data in the broker (if not supplied) and the supplied renderer
Args:
driver:
steps:
result:
context:
renderer:
**kwargs:
Returns:
|
def render_visual(self, driver: "easul.driver.Driver", steps, result=None, context=None, renderer=None, **kwargs):
"""
Render visual to HTML utilising the data in the broker (if not supplied) and the supplied renderer
Args:
driver:
steps:
result:
context:
renderer:
**kwargs:
Returns:
"""
if not self.visual:
return NO_VISUAL_IN_STEP_MESSAGE
if not result and not context:
b_data = driver.get_broker_data("outcome:" + self.name)
if b_data:
result = b_data.get("result")
context = b_data.get("context")
try:
return self.visual.render(driver=driver, steps=steps, step=self, result=result, context=context, renderer=renderer)
except VisualDataMissing as ex:
return str(ex)
|
(self, driver: 'easul.driver.Driver', steps, result=None, context=None, renderer=None, **kwargs)
|
45,058 |
easul.step
|
run_all
| null |
def run_all(self, driver, previous_outcome=None):
event = ActionEvent(step=self, driver=driver, previous_outcome=previous_outcome)
self._trigger_actions("before_run", event)
driver.store_step(self.name, StepStatuses.INIT, timestamp=driver.clock.timestamp)
from easul.plan import run_step_logic
return run_step_logic(self, event)
|
(self, driver, previous_outcome=None)
|
45,059 |
easul.step
|
run_logic
|
Runs the logic part of the step lifecycle to determine the outcome.
Args:
event:
Returns:
|
def run_logic(self, event):
"""
Runs the logic part of the step lifecycle to determine the outcome.
Args:
event:
Returns:
"""
outcome = self._determine_outcome(event)
event.driver.store_step(self.name, StepStatuses.COMPLETE, outcome=outcome, timestamp=event.driver.clock.timestamp)
return outcome
|
(self, event)
|
45,060 |
easul.process
|
Age
|
Calculates age in years based on date objects in 'from_field' and 'to_field'.
Resultant age is put into 'target_field'
|
class Age:
"""
Calculates age in years based on date objects in 'from_field' and 'to_field'.
Resultant age is put into 'target_field'
"""
from_field:str = field()
to_field:str = field()
target_field:str = field()
def __call__(self, record):
record[self.target_field] = calculate_age(record.get(self.from_field), record.get(self.to_field))
return record
|
(*, from_field: str, to_field: str, target_field: str) -> None
|
45,061 |
easul.process
|
__call__
| null |
def __call__(self, record):
record[self.target_field] = calculate_age(record.get(self.from_field), record.get(self.to_field))
return record
|
(self, record)
|
45,062 |
easul.process
|
__eq__
|
Method generated by attrs for class Age.
|
# processes are callables which receive a data structure and return a processed version of the data structure
# most of the processes defined here are classes but functions can work as well
from datetime import datetime, date, time
import logging
from typing import Callable, List, Dict, Optional
LOG = logging.getLogger(__name__)
from attrs import define, field
@define(kw_only=True)
class ExcludeFields:
"""
Exclude specified fields from the output data
"""
exclude_fields = field()
def __call__(self, record):
for exclude_field in self.exclude_fields:
if exclude_field in record.keys():
del record[exclude_field]
return record
|
(self, other)
|
45,065 |
easul.process
|
__ne__
|
Method generated by attrs for class Age.
| null |
(self, other)
|
45,068 |
easul.algorithm.algorithm
|
Algorithm
|
Base class for algorithms
|
class Algorithm:
"""
Base class for algorithms
"""
title:str = field()
schema:Any = field()
encoder = field(default=None)
def __post_init__(self):
self.id = str(util.new_id())
def to_serialized(self):
return util.to_serialized(self)
@abstractmethod
def serialize_with_dataset_id(self):
pass
@property
def unique_digest(self)->str:
"""
Generate unique digest for algorithm
Returns: hex digest
"""
algo_dump = dill.dumps(self)
return hashlib.sha256(algo_dump).hexdigest()
@abstractmethod
def single_result(self, data:Any)->Result:
"""
Execute algorithm based on input data
Args:
data: input data
Returns: an algorithm result object
"""
pass
def create_input_dataset(self, data:Any)->DataInput:
return create_input_dataset(data=data, schema=self.schema, encoder=self.encoder)
def save(self, filename:str):
"""
Save algorithm
Args:
filename:
"""
from easul.util import save_data
save_data(filename, self)
def describe(self):
"""
Describes the algorithm
Returns: dictionary containing key elements
"""
return {
"title":self.title,
"type":self.__class__.__name__
}
|
(*, title: str, schema: Any, encoder=None) -> None
|
45,069 |
easul.algorithm.algorithm
|
__eq__
|
Method generated by attrs for class Algorithm.
|
import os
from typing import Any
from easul import util
from easul.algorithm.result import Result
from abc import abstractmethod
import dill
import hashlib
from attrs import define, field
from easul.data import create_input_dataset, DataInput
import logging
logging.basicConfig(level=logging.INFO)
LOG = logging.getLogger(__name__)
@define(kw_only=True)
class Algorithm:
"""
Base class for algorithms
"""
title:str = field()
schema:Any = field()
encoder = field(default=None)
def __post_init__(self):
self.id = str(util.new_id())
def to_serialized(self):
return util.to_serialized(self)
@abstractmethod
def serialize_with_dataset_id(self):
pass
@property
def unique_digest(self)->str:
"""
Generate unique digest for algorithm
Returns: hex digest
"""
algo_dump = dill.dumps(self)
return hashlib.sha256(algo_dump).hexdigest()
@abstractmethod
def single_result(self, data:Any)->Result:
"""
Execute algorithm based on input data
Args:
data: input data
Returns: an algorithm result object
"""
pass
def create_input_dataset(self, data:Any)->DataInput:
return create_input_dataset(data=data, schema=self.schema, encoder=self.encoder)
def save(self, filename:str):
"""
Save algorithm
Args:
filename:
"""
from easul.util import save_data
save_data(filename, self)
def describe(self):
"""
Describes the algorithm
Returns: dictionary containing key elements
"""
return {
"title":self.title,
"type":self.__class__.__name__
}
|
(self, other)
|
45,072 |
easul.algorithm.algorithm
|
__ne__
|
Method generated by attrs for class Algorithm.
| null |
(self, other)
|
45,073 |
easul.algorithm.algorithm
|
__post_init__
| null |
def __post_init__(self):
self.id = str(util.new_id())
|
(self)
|
45,076 |
easul.algorithm.algorithm
|
create_input_dataset
| null |
def create_input_dataset(self, data:Any)->DataInput:
return create_input_dataset(data=data, schema=self.schema, encoder=self.encoder)
|
(self, data: Any) -> easul.data.DataInput
|
45,077 |
easul.algorithm.algorithm
|
describe
|
Describes the algorithm
Returns: dictionary containing key elements
|
def describe(self):
"""
Describes the algorithm
Returns: dictionary containing key elements
"""
return {
"title":self.title,
"type":self.__class__.__name__
}
|
(self)
|
45,078 |
easul.algorithm.algorithm
|
save
|
Save algorithm
Args:
filename:
|
def save(self, filename:str):
"""
Save algorithm
Args:
filename:
"""
from easul.util import save_data
save_data(filename, self)
|
(self, filename: str)
|
45,079 |
easul.algorithm.algorithm
|
serialize_with_dataset_id
| null |
@abstractmethod
def serialize_with_dataset_id(self):
pass
|
(self)
|
45,080 |
easul.algorithm.algorithm
|
single_result
|
Execute algorithm based on input data
Args:
data: input data
Returns: an algorithm result object
|
@abstractmethod
def single_result(self, data:Any)->Result:
"""
Execute algorithm based on input data
Args:
data: input data
Returns: an algorithm result object
"""
pass
|
(self, data: Any) -> easul.algorithm.result.Result
|
45,081 |
easul.algorithm.algorithm
|
to_serialized
| null |
def to_serialized(self):
return util.to_serialized(self)
|
(self)
|
45,082 |
easul.step
|
AlgorithmStep
|
Step which enables algorithms to be embedded into decisions. Uses the 'source' to retrieve and process data.
Feeds the data into the 'algorithm' to get a result which is fed into a 'decision' to determine an outcome.
|
class AlgorithmStep(ActionStep):
"""
Step which enables algorithms to be embedded into decisions. Uses the 'source' to retrieve and process data.
Feeds the data into the 'algorithm' to get a result which is fed into a 'decision' to determine an outcome.
"""
source = field()
algorithm = field()
decision = field()
def describe(self):
desc = super().describe()
desc.update({
"decision": self.decision.describe(),
"algorithm": self.algorithm.describe()
})
return desc
def _determine_outcome(self, event):
data = self._retrieve_data(event)
result, context = self._run_algorithm(data, event.driver)
return self.decision.decide_outcome(result=result, context=context, data=data, step=self)
def _run_algorithm(self, data, driver):
algorithm = self.algorithm
result = algorithm.single_result(data)
context = self._generate_visual_context(data)
return result, context
def _retrieve_data(self, event):
if not self.source:
LOG.warning(f"No source specified in step '{self.name}' so cannot retrieve data")
raise InvalidStepData(journey=event.driver.journey, step_name=self.name, exception=SystemError(f"No source specified in step '{self.name}' so cannot retrieve data"))
event.data = self.source.retrieve(event.driver, self)
self._trigger_actions("after_data", event)
if not event.data:
raise StepDataNotAvailable(journey=event.driver.journey, step_name=self.name)
try:
return self.algorithm.create_input_dataset(event.data)
except ConversionError as ex:
raise InvalidStepData(journey=event.driver.journey, step_name=self.name, exception=ex.orig_exception)
except ValidationError as ex:
raise InvalidStepData(journey=event.driver.journey, step_name=self.name, exception=ex)
@property
def possible_links(self):
return self.decision.possible_links
@property
def data_sources(self):
return self.source.source_titles
|
(*, title, visual=None, exclude_from_chart=False, name=None, actions=NOTHING, source, algorithm, decision) -> None
|
45,083 |
easul.step
|
__eq__
|
Method generated by attrs for class AlgorithmStep.
|
from attrs import define, field
import logging
from easul.decision import BinaryDecision
LOG = logging.getLogger(__name__)
from easul.error import StepDataNotAvailable, ConversionError, InvalidStepData, VisualDataMissing, ValidationError
from enum import Enum, auto
from easul.outcome import Outcome, EndOutcome, PauseOutcome, InvalidDataOutcome, MissingDataOutcome
from abc import abstractmethod
NO_VISUAL_IN_STEP_MESSAGE = "Sorry no visual for this step"
@define(kw_only=True)
class Step:
"""
Base Step class. Steps are the driving force of plans, they bring the different components together.
The key things are 'visual' components which provide graphical views associated with steps and 'actions' which
define action classes that occur during the lifecycle of a step run.
"""
title = field()
visual = field(default=None)
exclude_from_chart = field(default=False)
name = field(default=None)
actions = field(factory=list)
def layout_kwargs(self, driver, steps, **kwargs):
return {"steps":steps, "driver":driver, "step":self, **kwargs}
def render_visual(self, driver: "easul.driver.Driver", steps, result=None, context=None, renderer=None, **kwargs):
"""
Render visual to HTML utilising the data in the broker (if not supplied) and the supplied renderer
Args:
driver:
steps:
result:
context:
renderer:
**kwargs:
Returns:
"""
if not self.visual:
return NO_VISUAL_IN_STEP_MESSAGE
if not result and not context:
b_data = driver.get_broker_data("outcome:" + self.name)
if b_data:
result = b_data.get("result")
context = b_data.get("context")
try:
return self.visual.render(driver=driver, steps=steps, step=self, result=result, context=context, renderer=renderer)
except VisualDataMissing as ex:
return str(ex)
@property
def possible_links(self):
return {}
@property
def data_sources(self):
return []
def _trigger_actions(self, trigger_type, event):
for action in self.actions:
getattr(action, trigger_type)(event)
def _generate_visual_context(self, data):
return self.visual.generate_context(data) if self.visual else None
def __repr__(self):
return self.name
def describe(self):
"""
Describe step as Python data structures (lists and dicts)
Returns:
"""
return {
"name": self.name,
"title": self.title,
"actions": [a.describe() for a in self.actions],
"visual":self.visual.describe() if self.visual else "N/A"
}
|
(self, other)
|
45,086 |
easul.step
|
__ne__
|
Method generated by attrs for class AlgorithmStep.
| null |
(self, other)
|
45,089 |
easul.step
|
_determine_outcome
| null |
def _determine_outcome(self, event):
data = self._retrieve_data(event)
result, context = self._run_algorithm(data, event.driver)
return self.decision.decide_outcome(result=result, context=context, data=data, step=self)
|
(self, event)
|
45,091 |
easul.step
|
_retrieve_data
| null |
def _retrieve_data(self, event):
if not self.source:
LOG.warning(f"No source specified in step '{self.name}' so cannot retrieve data")
raise InvalidStepData(journey=event.driver.journey, step_name=self.name, exception=SystemError(f"No source specified in step '{self.name}' so cannot retrieve data"))
event.data = self.source.retrieve(event.driver, self)
self._trigger_actions("after_data", event)
if not event.data:
raise StepDataNotAvailable(journey=event.driver.journey, step_name=self.name)
try:
return self.algorithm.create_input_dataset(event.data)
except ConversionError as ex:
raise InvalidStepData(journey=event.driver.journey, step_name=self.name, exception=ex.orig_exception)
except ValidationError as ex:
raise InvalidStepData(journey=event.driver.journey, step_name=self.name, exception=ex)
|
(self, event)
|
45,092 |
easul.step
|
_run_algorithm
| null |
def _run_algorithm(self, data, driver):
algorithm = self.algorithm
result = algorithm.single_result(data)
context = self._generate_visual_context(data)
return result, context
|
(self, data, driver)
|
45,095 |
easul.step
|
describe
| null |
def describe(self):
desc = super().describe()
desc.update({
"decision": self.decision.describe(),
"algorithm": self.algorithm.describe()
})
return desc
|
(self)
|
45,100 |
easul.expression
|
BetweenExpression
|
Expression which determines if a field value is between (e.g. greater or equal to OR less or equal to) defined
from and to values.
|
class BetweenExpression(FieldExpression):
"""
Expression which determines if a field value is between (e.g. greater or equal to OR less or equal to) defined
from and to values.
"""
from_value = field()
to_value = field()
def _test(self, item):
return bool(item>=self.from_value and item<=self.to_value)
@property
def label(self):
return f"{self.input_field} between {self.from_value} and {self.to_value}"
|
(*, input_field: str, ignore_empty: bool = False, from_value, to_value) -> None
|
45,101 |
easul.expression
|
__eq__
|
Method generated by attrs for class BetweenExpression.
|
import operator
import re
from abc import abstractmethod
from typing import Callable
from attrs import define, field
import numpy as np
import pandas as pd
from easul.error import MissingValue
import logging
LOG = logging.getLogger(__name__)
@define(kw_only=True)
class Expression:
"""
Base Expression class. Derived classes evaluate input data and return a True/False
"""
label = ""
empty_values = [None, np.nan]
@abstractmethod
def evaluate(self, data):
pass
@classmethod
def is_empty(cls, item):
if item in cls.empty_values:
return True
try:
if np.isnan(item):
return True
except TypeError:
pass
return False
|
(self, other)
|
45,104 |
easul.expression
|
__ne__
|
Method generated by attrs for class BetweenExpression.
| null |
(self, other)
|
45,107 |
easul.expression
|
_test
| null |
def _test(self, item):
return bool(item>=self.from_value and item<=self.to_value)
|
(self, item)
|
45,108 |
easul.expression
|
evaluate
| null |
def evaluate(self, data):
try:
item = data[self.input_field]
except TypeError:
item = data.value
if self.is_empty(item):
if self.ignore_empty is True:
return False
raise MissingValue(f"Data item '{self.input_field}' is empty and cannot be ignored")
return self._test(item)
|
(self, data)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.