index
int64
0
731k
package
stringlengths
2
98
name
stringlengths
1
76
docstring
stringlengths
0
281k
code
stringlengths
4
1.07M
signature
stringlengths
2
42.8k
56,469
distrax._src.utils.monte_carlo
mc_estimate_kl_with_reparameterized
Estimates KL(distribution_a, distribution_b).
def mc_estimate_kl_with_reparameterized( distribution_a: DistributionLike, distribution_b: DistributionLike, rng_key: PRNGKey, num_samples: int): """Estimates KL(distribution_a, distribution_b).""" if isinstance(distribution_a, tfd.Distribution): if distribution_a.reparameterization_type != tfd.FULLY_REPARAMETERIZED: raise ValueError( f'Distribution `{distribution_a.name}` cannot be reparameterized.') distribution_a = conversion.as_distribution(distribution_a) distribution_b = conversion.as_distribution(distribution_b) samples, logp_a = distribution_a.sample_and_log_prob( seed=rng_key, sample_shape=[num_samples]) logp_b = distribution_b.log_prob(samples) log_ratio = logp_b - logp_a kl_estimator = -log_ratio return jnp.mean(kl_estimator, axis=0)
(distribution_a: Union[distrax._src.distributions.distribution.Distribution, tensorflow_probability.substrates.jax.distributions.distribution.Distribution], distribution_b: Union[distrax._src.distributions.distribution.Distribution, tensorflow_probability.substrates.jax.distributions.distribution.Distribution], rng_key: jax.Array, num_samples: int)
56,470
distrax._src.utils.monte_carlo
mc_estimate_mode
Returns a Monte Carlo estimate of the mode of a distribution.
def mc_estimate_mode( distribution: DistributionLike, rng_key: PRNGKey, num_samples: int): """Returns a Monte Carlo estimate of the mode of a distribution.""" distribution = conversion.as_distribution(distribution) # Obtain samples from the distribution and their log probability. samples, log_probs = distribution.sample_and_log_prob( seed=rng_key, sample_shape=[num_samples]) # Do argmax over the sample_shape. index = jnp.expand_dims(jnp.argmax(log_probs, axis=0), axis=0) # Broadcast index to include event_shape of the sample. index = index.reshape(index.shape + (1,) * (samples.ndim - index.ndim)) mode = jnp.squeeze(jnp.take_along_axis(samples, index, axis=0), axis=0) return mode
(distribution: Union[distrax._src.distributions.distribution.Distribution, tensorflow_probability.substrates.jax.distributions.distribution.Distribution], rng_key: jax.Array, num_samples: int)
56,471
distrax._src.utils.transformations
register_inverse
Register a function that implements the inverse of a JAX primitive. Args: primitive: JAX primitive, often named `*_p` and located in `jax.lax.lax.py`. inverse_left: a function implementing the inverse if the primitive is a unary operator or if `inv(f(x,y)) == inv(f(y,x))`, else a function implementing the inverse of a binary operator when the variable in question comes before the operator, e.g. `x div_p 2`. inverse_right: a function implementing the inverse of a binary operator when the variable in question comes after the operator, e.g. `2 div_p x`.
def register_inverse(primitive, inverse_left, inverse_right=None): """Register a function that implements the inverse of a JAX primitive. Args: primitive: JAX primitive, often named `*_p` and located in `jax.lax.lax.py`. inverse_left: a function implementing the inverse if the primitive is a unary operator or if `inv(f(x,y)) == inv(f(y,x))`, else a function implementing the inverse of a binary operator when the variable in question comes before the operator, e.g. `x div_p 2`. inverse_right: a function implementing the inverse of a binary operator when the variable in question comes after the operator, e.g. `2 div_p x`. """ if inverse_right is None: _inverse_registry[primitive] = inverse_left else: _inverse_registry[primitive] = (inverse_left, inverse_right)
(primitive, inverse_left, inverse_right=None)
56,472
distrax._src.distributions.straight_through
straight_through_wrapper
Wrap a distribution to use straight-through gradient for samples.
def straight_through_wrapper( # pylint: disable=invalid-name Distribution, ) -> distribution.DistributionLike: """Wrap a distribution to use straight-through gradient for samples.""" def sample(self, seed, sample_shape=()): # pylint: disable=g-doc-args """Sampling with straight through biased gradient estimator. Sample a value from the distribution, but backpropagate through the underlying probability to compute the gradient. References: [1] Yoshua Bengio, Nicholas Léonard, Aaron Courville, Estimating or Propagating Gradients Through Stochastic Neurons for Conditional Computation, https://arxiv.org/abs/1308.3432 Args: seed: a random seed. sample_shape: the shape of the required sample. Returns: A sample with straight-through gradient. """ # pylint: disable=protected-access obj = Distribution(probs=self._probs, logits=self._logits) assert isinstance(obj, categorical.Categorical) sample = obj.sample(seed=seed, sample_shape=sample_shape) probs = obj.probs padded_probs = _pad(probs, sample.shape) # Keep sample unchanged, but add gradient through probs. sample += padded_probs - jax.lax.stop_gradient(padded_probs) return sample def _pad(probs, shape): """Grow probs to have the same number of dimensions as shape.""" while len(probs.shape) < len(shape): probs = probs[None] return probs parent_name = Distribution.__name__ # Return a new object, overriding sample. return type('StraighThrough' + parent_name, (Distribution,), {'sample': sample})
(Distribution) -> Union[distrax._src.distributions.distribution.Distribution, tensorflow_probability.substrates.jax.distributions.distribution.Distribution]
56,473
distrax._src.utils.conversion
to_tfp
Converts a distribution or bijector to a TFP-compatible equivalent object. The returned object is not necessarily of type `tfb.Bijector` or `tfd.Distribution`; rather, it is a Distrax object that implements TFP functionality so that it can be used in TFP. If the input is already of TFP type, it is returned unchanged. Args: obj: The distribution or bijector to be converted to TFP. name: The name of the resulting object. Returns: A TFP-compatible equivalent distribution or bijector.
def to_tfp(obj: Union[bijector.Bijector, tfb.Bijector, distribution.Distribution, tfd.Distribution], name: Optional[str] = None): """Converts a distribution or bijector to a TFP-compatible equivalent object. The returned object is not necessarily of type `tfb.Bijector` or `tfd.Distribution`; rather, it is a Distrax object that implements TFP functionality so that it can be used in TFP. If the input is already of TFP type, it is returned unchanged. Args: obj: The distribution or bijector to be converted to TFP. name: The name of the resulting object. Returns: A TFP-compatible equivalent distribution or bijector. """ if isinstance(obj, (tfb.Bijector, tfd.Distribution)): return obj elif isinstance(obj, bijector.Bijector): return tfp_compatible_bijector.tfp_compatible_bijector(obj, name) elif isinstance(obj, distribution.Distribution): return tfp_compatible_distribution.tfp_compatible_distribution(obj, name) else: raise TypeError( f"`to_tfp` can only convert objects of type: `distrax.Bijector`," f" `tfb.Bijector`, `distrax.Distribution`, `tfd.Distribution`. Got type" f" `{type(obj)}`.")
(obj: Union[distrax._src.bijectors.bijector.Bijector, tensorflow_probability.substrates.jax.bijectors.bijector.Bijector, distrax._src.distributions.distribution.Distribution, tensorflow_probability.substrates.jax.distributions.distribution.Distribution], name: Optional[str] = None)
56,474
flask_marshmallow
Marshmallow
Wrapper class that integrates Marshmallow with a Flask application. To use it, instantiate with an application:: from flask import Flask app = Flask(__name__) ma = Marshmallow(app) The object provides access to the :class:`Schema` class, all fields in :mod:`marshmallow.fields`, as well as the Flask-specific fields in :mod:`flask_marshmallow.fields`. You can declare schema like so:: class BookSchema(ma.Schema): class Meta: fields = ("id", "title", "author", "links") author = ma.Nested(AuthorSchema) links = ma.Hyperlinks( { "self": ma.URLFor("book_detail", values=dict(id="<id>")), "collection": ma.URLFor("book_list"), } ) In order to integrate with Flask-SQLAlchemy, this extension must be initialized *after* `flask_sqlalchemy.SQLAlchemy`. :: db = SQLAlchemy(app) ma = Marshmallow(app) This gives you access to `ma.SQLAlchemySchema` and `ma.SQLAlchemyAutoSchema`, which generate marshmallow `~marshmallow.Schema` classes based on the passed in model or table. :: class AuthorSchema(ma.SQLAlchemyAutoSchema): class Meta: model = Author :param Flask app: The Flask application object.
class Marshmallow: """Wrapper class that integrates Marshmallow with a Flask application. To use it, instantiate with an application:: from flask import Flask app = Flask(__name__) ma = Marshmallow(app) The object provides access to the :class:`Schema` class, all fields in :mod:`marshmallow.fields`, as well as the Flask-specific fields in :mod:`flask_marshmallow.fields`. You can declare schema like so:: class BookSchema(ma.Schema): class Meta: fields = ("id", "title", "author", "links") author = ma.Nested(AuthorSchema) links = ma.Hyperlinks( { "self": ma.URLFor("book_detail", values=dict(id="<id>")), "collection": ma.URLFor("book_list"), } ) In order to integrate with Flask-SQLAlchemy, this extension must be initialized *after* `flask_sqlalchemy.SQLAlchemy`. :: db = SQLAlchemy(app) ma = Marshmallow(app) This gives you access to `ma.SQLAlchemySchema` and `ma.SQLAlchemyAutoSchema`, which generate marshmallow `~marshmallow.Schema` classes based on the passed in model or table. :: class AuthorSchema(ma.SQLAlchemyAutoSchema): class Meta: model = Author :param Flask app: The Flask application object. """ def __init__(self, app: typing.Optional["Flask"] = None): self.Schema = Schema if has_sqla: self.SQLAlchemySchema = sqla.SQLAlchemySchema self.SQLAlchemyAutoSchema = sqla.SQLAlchemyAutoSchema self.auto_field = sqla.auto_field self.HyperlinkRelated = sqla.HyperlinkRelated _attach_fields(self) if app is not None: self.init_app(app) def init_app(self, app: "Flask"): """Initializes the application with the extension. :param Flask app: The Flask application object. """ app.extensions = getattr(app, "extensions", {}) # If using Flask-SQLAlchemy, attach db.session to SQLAlchemySchema if has_sqla and "sqlalchemy" in app.extensions: db = app.extensions["sqlalchemy"] self.SQLAlchemySchema.OPTIONS_CLASS.session = db.session self.SQLAlchemyAutoSchema.OPTIONS_CLASS.session = db.session app.extensions[EXTENSION_NAME] = self
(app: Optional[ForwardRef('Flask')] = None)
56,475
flask_marshmallow
__init__
null
def __init__(self, app: typing.Optional["Flask"] = None): self.Schema = Schema if has_sqla: self.SQLAlchemySchema = sqla.SQLAlchemySchema self.SQLAlchemyAutoSchema = sqla.SQLAlchemyAutoSchema self.auto_field = sqla.auto_field self.HyperlinkRelated = sqla.HyperlinkRelated _attach_fields(self) if app is not None: self.init_app(app)
(self, app: Optional[ForwardRef('Flask')] = None)
56,476
flask_marshmallow
init_app
Initializes the application with the extension. :param Flask app: The Flask application object.
def init_app(self, app: "Flask"): """Initializes the application with the extension. :param Flask app: The Flask application object. """ app.extensions = getattr(app, "extensions", {}) # If using Flask-SQLAlchemy, attach db.session to SQLAlchemySchema if has_sqla and "sqlalchemy" in app.extensions: db = app.extensions["sqlalchemy"] self.SQLAlchemySchema.OPTIONS_CLASS.session = db.session self.SQLAlchemyAutoSchema.OPTIONS_CLASS.session = db.session app.extensions[EXTENSION_NAME] = self
(self, app: 'Flask')
56,477
flask_marshmallow.schema
Schema
Base serializer with which to define custom serializers. See `marshmallow.Schema` for more details about the `Schema` API.
class Schema(ma.Schema): """Base serializer with which to define custom serializers. See `marshmallow.Schema` for more details about the `Schema` API. """ def jsonify( self, obj: typing.Any, many: typing.Optional[bool] = None, *args, **kwargs ) -> "Response": """Return a JSON response containing the serialized data. :param obj: Object to serialize. :param bool many: Whether `obj` should be serialized as an instance or as a collection. If None, defaults to the value of the `many` attribute on this Schema. :param kwargs: Additional keyword arguments passed to `flask.jsonify`. .. versionchanged:: 0.6.0 Takes the same arguments as `marshmallow.Schema.dump`. Additional keyword arguments are passed to `flask.jsonify`. .. versionchanged:: 0.6.3 The `many` argument for this method defaults to the value of the `many` attribute on the Schema. Previously, the `many` argument of this method defaulted to False, regardless of the value of `Schema.many`. """ if many is None: many = self.many data = self.dump(obj, many=many) return flask.jsonify(data, *args, **kwargs)
(*, only: 'types.StrSequenceOrSet | None' = None, exclude: 'types.StrSequenceOrSet' = (), many: 'bool' = False, context: 'dict | None' = None, load_only: 'types.StrSequenceOrSet' = (), dump_only: 'types.StrSequenceOrSet' = (), partial: 'bool | types.StrSequenceOrSet | None' = None, unknown: 'str | None' = None)
56,478
marshmallow.schema
__apply_nested_option
Apply nested options to nested fields
def __apply_nested_option(self, option_name, field_names, set_operation) -> None: """Apply nested options to nested fields""" # Split nested field names on the first dot. nested_fields = [name.split(".", 1) for name in field_names if "." in name] # Partition the nested field names by parent field. nested_options = defaultdict(list) # type: defaultdict for parent, nested_names in nested_fields: nested_options[parent].append(nested_names) # Apply the nested field options. for key, options in iter(nested_options.items()): new_options = self.set_class(options) original_options = getattr(self.declared_fields[key], option_name, ()) if original_options: if set_operation == "union": new_options |= self.set_class(original_options) if set_operation == "intersection": new_options &= self.set_class(original_options) setattr(self.declared_fields[key], option_name, new_options)
(self, option_name, field_names, set_operation) -> NoneType
56,479
marshmallow.schema
__init__
null
def __init__( self, *, only: types.StrSequenceOrSet | None = None, exclude: types.StrSequenceOrSet = (), many: bool = False, context: dict | None = None, load_only: types.StrSequenceOrSet = (), dump_only: types.StrSequenceOrSet = (), partial: bool | types.StrSequenceOrSet | None = None, unknown: str | None = None, ): # Raise error if only or exclude is passed as string, not list of strings if only is not None and not is_collection(only): raise StringNotCollectionError('"only" should be a list of strings') if not is_collection(exclude): raise StringNotCollectionError('"exclude" should be a list of strings') # copy declared fields from metaclass self.declared_fields = copy.deepcopy(self._declared_fields) self.many = many self.only = only self.exclude: set[typing.Any] | typing.MutableSet[typing.Any] = set( self.opts.exclude ) | set(exclude) self.ordered = self.opts.ordered self.load_only = set(load_only) or set(self.opts.load_only) self.dump_only = set(dump_only) or set(self.opts.dump_only) self.partial = partial self.unknown = ( self.opts.unknown if unknown is None else validate_unknown_parameter_value(unknown) ) self.context = context or {} self._normalize_nested_options() #: Dictionary mapping field_names -> :class:`Field` objects self.fields = {} # type: typing.Dict[str, ma_fields.Field] self.load_fields = {} # type: typing.Dict[str, ma_fields.Field] self.dump_fields = {} # type: typing.Dict[str, ma_fields.Field] self._init_fields() messages = {} messages.update(self._default_error_messages) for cls in reversed(self.__class__.__mro__): messages.update(getattr(cls, "error_messages", {})) messages.update(self.error_messages or {}) self.error_messages = messages
(self, *, only: Union[Sequence[str], AbstractSet[str], NoneType] = None, exclude: Union[Sequence[str], AbstractSet[str]] = (), many: bool = False, context: Optional[dict] = None, load_only: Union[Sequence[str], AbstractSet[str]] = (), dump_only: Union[Sequence[str], AbstractSet[str]] = (), partial: Union[bool, Sequence[str], AbstractSet[str], NoneType] = None, unknown: Optional[str] = None)
56,480
marshmallow.schema
__repr__
null
def __repr__(self) -> str: return f"<{self.__class__.__name__}(many={self.many})>"
(self) -> str
56,481
marshmallow.schema
_bind_field
Bind field to the schema, setting any necessary attributes on the field (e.g. parent and name). Also set field load_only and dump_only values if field_name was specified in ``class Meta``.
def _bind_field(self, field_name: str, field_obj: ma_fields.Field) -> None: """Bind field to the schema, setting any necessary attributes on the field (e.g. parent and name). Also set field load_only and dump_only values if field_name was specified in ``class Meta``. """ if field_name in self.load_only: field_obj.load_only = True if field_name in self.dump_only: field_obj.dump_only = True try: field_obj._bind_to_schema(field_name, self) except TypeError as error: # Field declared as a class, not an instance. Ignore type checking because # we handle unsupported arg types, i.e. this is dead code from # the type checker's perspective. if isinstance(field_obj, type) and issubclass(field_obj, base.FieldABC): msg = ( f'Field for "{field_name}" must be declared as a ' "Field instance, not a class. " f'Did you mean "fields.{field_obj.__name__}()"?' # type: ignore ) raise TypeError(msg) from error raise error self.on_bind_field(field_name, field_obj)
(self, field_name: str, field_obj: marshmallow.fields.Field) -> NoneType
56,482
marshmallow.schema
_call_and_store
Call ``getter_func`` with ``data`` as its argument, and store any `ValidationErrors`. :param callable getter_func: Function for getting the serialized/deserialized value from ``data``. :param data: The data passed to ``getter_func``. :param str field_name: Field name. :param int index: Index of the item being validated, if validating a collection, otherwise `None`.
@staticmethod def _call_and_store(getter_func, data, *, field_name, error_store, index=None): """Call ``getter_func`` with ``data`` as its argument, and store any `ValidationErrors`. :param callable getter_func: Function for getting the serialized/deserialized value from ``data``. :param data: The data passed to ``getter_func``. :param str field_name: Field name. :param int index: Index of the item being validated, if validating a collection, otherwise `None`. """ try: value = getter_func(data) except ValidationError as error: error_store.store_error(error.messages, field_name, index=index) # When a Nested field fails validation, the marshalled data is stored # on the ValidationError's valid_data attribute return error.valid_data or missing return value
(getter_func, data, *, field_name, error_store, index=None)
56,483
marshmallow.schema
_deserialize
Deserialize ``data``. :param dict data: The data to deserialize. :param ErrorStore error_store: Structure to store errors. :param bool many: `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data.
def _deserialize( self, data: ( typing.Mapping[str, typing.Any] | typing.Iterable[typing.Mapping[str, typing.Any]] ), *, error_store: ErrorStore, many: bool = False, partial=None, unknown=RAISE, index=None, ) -> _T | list[_T]: """Deserialize ``data``. :param dict data: The data to deserialize. :param ErrorStore error_store: Structure to store errors. :param bool many: `True` if ``data`` should be deserialized as a collection. :param bool|tuple partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. :param int index: Index of the item being serialized (for storing errors) if serializing a collection, otherwise `None`. :return: A dictionary of the deserialized data. """ index_errors = self.opts.index_errors index = index if index_errors else None if many: if not is_collection(data): error_store.store_error([self.error_messages["type"]], index=index) ret_l = [] # type: typing.List[_T] else: ret_l = [ typing.cast( _T, self._deserialize( typing.cast(typing.Mapping[str, typing.Any], d), error_store=error_store, many=False, partial=partial, unknown=unknown, index=idx, ), ) for idx, d in enumerate(data) ] return ret_l ret_d = self.dict_class() # Check data is a dict if not isinstance(data, Mapping): error_store.store_error([self.error_messages["type"]], index=index) else: partial_is_collection = is_collection(partial) for attr_name, field_obj in self.load_fields.items(): field_name = ( field_obj.data_key if field_obj.data_key is not None else attr_name ) raw_value = data.get(field_name, missing) if raw_value is missing: # Ignore missing field if we're allowed to. if partial is True or ( partial_is_collection and attr_name in partial ): continue d_kwargs = {} # Allow partial loading of nested schemas. if partial_is_collection: prefix = field_name + "." len_prefix = len(prefix) sub_partial = [ f[len_prefix:] for f in partial if f.startswith(prefix) ] d_kwargs["partial"] = sub_partial elif partial is not None: d_kwargs["partial"] = partial def getter( val, field_obj=field_obj, field_name=field_name, d_kwargs=d_kwargs ): return field_obj.deserialize( val, field_name, data, **d_kwargs, ) value = self._call_and_store( getter_func=getter, data=raw_value, field_name=field_name, error_store=error_store, index=index, ) if value is not missing: key = field_obj.attribute or attr_name set_value(ret_d, key, value) if unknown != EXCLUDE: fields = { field_obj.data_key if field_obj.data_key is not None else field_name for field_name, field_obj in self.load_fields.items() } for key in set(data) - fields: value = data[key] if unknown == INCLUDE: ret_d[key] = value elif unknown == RAISE: error_store.store_error( [self.error_messages["unknown"]], key, (index if index_errors else None), ) return ret_d
(self, data: Union[Mapping[str, Any], Iterable[Mapping[str, Any]]], *, error_store: marshmallow.error_store.ErrorStore, many: bool = False, partial=None, unknown='raise', index=None) -> Union[~_T, list[~_T]]
56,484
marshmallow.schema
_do_load
Deserialize `data`, returning the deserialized result. This method is private API. :param data: The data to deserialize. :param many: Whether to deserialize `data` as a collection. If `None`, the value for `self.many` is used. :param partial: Whether to validate required fields. If its value is an iterable, only fields listed in that iterable will be ignored will be allowed missing. If `True`, all fields will be allowed missing. If `None`, the value for `self.partial` is used. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. If `None`, the value for `self.unknown` is used. :param postprocess: Whether to run post_load methods.. :return: Deserialized data
def _do_load( self, data: ( typing.Mapping[str, typing.Any] | typing.Iterable[typing.Mapping[str, typing.Any]] ), *, many: bool | None = None, partial: bool | types.StrSequenceOrSet | None = None, unknown: str | None = None, postprocess: bool = True, ): """Deserialize `data`, returning the deserialized result. This method is private API. :param data: The data to deserialize. :param many: Whether to deserialize `data` as a collection. If `None`, the value for `self.many` is used. :param partial: Whether to validate required fields. If its value is an iterable, only fields listed in that iterable will be ignored will be allowed missing. If `True`, all fields will be allowed missing. If `None`, the value for `self.partial` is used. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. If `None`, the value for `self.unknown` is used. :param postprocess: Whether to run post_load methods.. :return: Deserialized data """ error_store = ErrorStore() errors = {} # type: dict[str, list[str]] many = self.many if many is None else bool(many) unknown = ( self.unknown if unknown is None else validate_unknown_parameter_value(unknown) ) if partial is None: partial = self.partial # Run preprocessors if self._has_processors(PRE_LOAD): try: processed_data = self._invoke_load_processors( PRE_LOAD, data, many=many, original_data=data, partial=partial ) except ValidationError as err: errors = err.normalized_messages() result = None # type: list | dict | None else: processed_data = data if not errors: # Deserialize data result = self._deserialize( processed_data, error_store=error_store, many=many, partial=partial, unknown=unknown, ) # Run field-level validation self._invoke_field_validators( error_store=error_store, data=result, many=many ) # Run schema-level validation if self._has_processors(VALIDATES_SCHEMA): field_errors = bool(error_store.errors) self._invoke_schema_validators( error_store=error_store, pass_many=True, data=result, original_data=data, many=many, partial=partial, field_errors=field_errors, ) self._invoke_schema_validators( error_store=error_store, pass_many=False, data=result, original_data=data, many=many, partial=partial, field_errors=field_errors, ) errors = error_store.errors # Run post processors if not errors and postprocess and self._has_processors(POST_LOAD): try: result = self._invoke_load_processors( POST_LOAD, result, many=many, original_data=data, partial=partial, ) except ValidationError as err: errors = err.normalized_messages() if errors: exc = ValidationError(errors, data=data, valid_data=result) self.handle_error(exc, data, many=many, partial=partial) raise exc return result
(self, data: Union[Mapping[str, Any], Iterable[Mapping[str, Any]]], *, many: Optional[bool] = None, partial: Union[bool, Sequence[str], AbstractSet[str], NoneType] = None, unknown: Optional[str] = None, postprocess: bool = True)
56,485
marshmallow.schema
_init_fields
Update self.fields, self.load_fields, and self.dump_fields based on schema options. This method is private API.
def _init_fields(self) -> None: """Update self.fields, self.load_fields, and self.dump_fields based on schema options. This method is private API. """ if self.opts.fields: available_field_names = self.set_class(self.opts.fields) else: available_field_names = self.set_class(self.declared_fields.keys()) if self.opts.additional: available_field_names |= self.set_class(self.opts.additional) invalid_fields = self.set_class() if self.only is not None: # Return only fields specified in only option field_names: typing.AbstractSet[typing.Any] = self.set_class(self.only) invalid_fields |= field_names - available_field_names else: field_names = available_field_names # If "exclude" option or param is specified, remove those fields. if self.exclude: # Note that this isn't available_field_names, since we want to # apply "only" for the actual calculation. field_names = field_names - self.exclude invalid_fields |= self.exclude - available_field_names if invalid_fields: message = f"Invalid fields for {self}: {invalid_fields}." raise ValueError(message) fields_dict = self.dict_class() for field_name in field_names: field_obj = self.declared_fields.get(field_name, ma_fields.Inferred()) self._bind_field(field_name, field_obj) fields_dict[field_name] = field_obj load_fields, dump_fields = self.dict_class(), self.dict_class() for field_name, field_obj in fields_dict.items(): if not field_obj.dump_only: load_fields[field_name] = field_obj if not field_obj.load_only: dump_fields[field_name] = field_obj dump_data_keys = [ field_obj.data_key if field_obj.data_key is not None else name for name, field_obj in dump_fields.items() ] if len(dump_data_keys) != len(set(dump_data_keys)): data_keys_duplicates = { x for x in dump_data_keys if dump_data_keys.count(x) > 1 } raise ValueError( "The data_key argument for one or more fields collides " "with another field's name or data_key argument. " "Check the following field names and " f"data_key arguments: {list(data_keys_duplicates)}" ) load_attributes = [obj.attribute or name for name, obj in load_fields.items()] if len(load_attributes) != len(set(load_attributes)): attributes_duplicates = { x for x in load_attributes if load_attributes.count(x) > 1 } raise ValueError( "The attribute argument for one or more fields collides " "with another field's name or attribute argument. " "Check the following field names and " f"attribute arguments: {list(attributes_duplicates)}" ) self.fields = fields_dict self.dump_fields = dump_fields self.load_fields = load_fields
(self) -> NoneType
56,486
marshmallow.schema
_invoke_dump_processors
null
def _invoke_dump_processors( self, tag: str, data, *, many: bool, original_data=None ): # The pass_many post-dump processors may do things like add an envelope, so # invoke those after invoking the non-pass_many processors which will expect # to get a list of items. data = self._invoke_processors( tag, pass_many=False, data=data, many=many, original_data=original_data ) data = self._invoke_processors( tag, pass_many=True, data=data, many=many, original_data=original_data ) return data
(self, tag: str, data, *, many: bool, original_data=None)
56,487
marshmallow.schema
_invoke_field_validators
null
def _invoke_field_validators(self, *, error_store: ErrorStore, data, many: bool): for attr_name in self._hooks[VALIDATES]: validator = getattr(self, attr_name) validator_kwargs = validator.__marshmallow_hook__[VALIDATES] field_name = validator_kwargs["field_name"] try: field_obj = self.fields[field_name] except KeyError as error: if field_name in self.declared_fields: continue raise ValueError(f'"{field_name}" field does not exist.') from error data_key = ( field_obj.data_key if field_obj.data_key is not None else field_name ) if many: for idx, item in enumerate(data): try: value = item[field_obj.attribute or field_name] except KeyError: pass else: validated_value = self._call_and_store( getter_func=validator, data=value, field_name=data_key, error_store=error_store, index=(idx if self.opts.index_errors else None), ) if validated_value is missing: data[idx].pop(field_name, None) else: try: value = data[field_obj.attribute or field_name] except KeyError: pass else: validated_value = self._call_and_store( getter_func=validator, data=value, field_name=data_key, error_store=error_store, ) if validated_value is missing: data.pop(field_name, None)
(self, *, error_store: marshmallow.error_store.ErrorStore, data, many: bool)
56,488
marshmallow.schema
_invoke_load_processors
null
def _invoke_load_processors( self, tag: str, data, *, many: bool, original_data, partial: bool | types.StrSequenceOrSet | None, ): # This has to invert the order of the dump processors, so run the pass_many # processors first. data = self._invoke_processors( tag, pass_many=True, data=data, many=many, original_data=original_data, partial=partial, ) data = self._invoke_processors( tag, pass_many=False, data=data, many=many, original_data=original_data, partial=partial, ) return data
(self, tag: str, data, *, many: bool, original_data, partial: Union[bool, Sequence[str], AbstractSet[str], NoneType])
56,489
marshmallow.schema
_invoke_processors
null
def _invoke_processors( self, tag: str, *, pass_many: bool, data, many: bool, original_data=None, **kwargs, ): key = (tag, pass_many) for attr_name in self._hooks[key]: # This will be a bound method. processor = getattr(self, attr_name) processor_kwargs = processor.__marshmallow_hook__[key] pass_original = processor_kwargs.get("pass_original", False) if many and not pass_many: if pass_original: data = [ processor(item, original, many=many, **kwargs) for item, original in zip(data, original_data) ] else: data = [processor(item, many=many, **kwargs) for item in data] else: if pass_original: data = processor(data, original_data, many=many, **kwargs) else: data = processor(data, many=many, **kwargs) return data
(self, tag: str, *, pass_many: bool, data, many: bool, original_data=None, **kwargs)
56,490
marshmallow.schema
_invoke_schema_validators
null
def _invoke_schema_validators( self, *, error_store: ErrorStore, pass_many: bool, data, original_data, many: bool, partial: bool | types.StrSequenceOrSet | None, field_errors: bool = False, ): for attr_name in self._hooks[(VALIDATES_SCHEMA, pass_many)]: validator = getattr(self, attr_name) validator_kwargs = validator.__marshmallow_hook__[ (VALIDATES_SCHEMA, pass_many) ] if field_errors and validator_kwargs["skip_on_field_errors"]: continue pass_original = validator_kwargs.get("pass_original", False) if many and not pass_many: for idx, (item, orig) in enumerate(zip(data, original_data)): self._run_validator( validator, item, original_data=orig, error_store=error_store, many=many, partial=partial, index=idx, pass_original=pass_original, ) else: self._run_validator( validator, data, original_data=original_data, error_store=error_store, many=many, pass_original=pass_original, partial=partial, )
(self, *, error_store: marshmallow.error_store.ErrorStore, pass_many: bool, data, original_data, many: bool, partial: Union[bool, Sequence[str], AbstractSet[str], NoneType], field_errors: bool = False)
56,491
marshmallow.schema
_normalize_nested_options
Apply then flatten nested schema options. This method is private API.
def _normalize_nested_options(self) -> None: """Apply then flatten nested schema options. This method is private API. """ if self.only is not None: # Apply the only option to nested fields. self.__apply_nested_option("only", self.only, "intersection") # Remove the child field names from the only option. self.only = self.set_class([field.split(".", 1)[0] for field in self.only]) if self.exclude: # Apply the exclude option to nested fields. self.__apply_nested_option("exclude", self.exclude, "union") # Remove the parent field names from the exclude option. self.exclude = self.set_class( [field for field in self.exclude if "." not in field] )
(self) -> NoneType
56,492
marshmallow.schema
_run_validator
null
def _run_validator( self, validator_func, output, *, original_data, error_store, many, partial, pass_original, index=None, ): try: if pass_original: # Pass original, raw data (before unmarshalling) validator_func(output, original_data, partial=partial, many=many) else: validator_func(output, partial=partial, many=many) except ValidationError as err: error_store.store_error(err.messages, err.field_name, index=index)
(self, validator_func, output, *, original_data, error_store, many, partial, pass_original, index=None)
56,493
marshmallow.schema
_serialize
Serialize ``obj``. :param obj: The object(s) to serialize. :param bool many: `True` if ``data`` should be serialized as a collection. :return: A dictionary of the serialized data .. versionchanged:: 1.0.0 Renamed from ``marshal``.
def _serialize(self, obj: _T | typing.Iterable[_T], *, many: bool = False): """Serialize ``obj``. :param obj: The object(s) to serialize. :param bool many: `True` if ``data`` should be serialized as a collection. :return: A dictionary of the serialized data .. versionchanged:: 1.0.0 Renamed from ``marshal``. """ if many and obj is not None: return [ self._serialize(d, many=False) for d in typing.cast(typing.Iterable[_T], obj) ] ret = self.dict_class() for attr_name, field_obj in self.dump_fields.items(): value = field_obj.serialize(attr_name, obj, accessor=self.get_attribute) if value is missing: continue key = field_obj.data_key if field_obj.data_key is not None else attr_name ret[key] = value return ret
(self, obj: Union[~_T, Iterable[~_T]], *, many: bool = False)
56,494
marshmallow.schema
dump
Serialize an object to native Python data types according to this Schema's fields. :param obj: The object to serialize. :param many: Whether to serialize `obj` as a collection. If `None`, the value for `self.many` is used. :return: Serialized data .. versionadded:: 1.0.0 .. versionchanged:: 3.0.0b7 This method returns the serialized data rather than a ``(data, errors)`` duple. A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised if ``obj`` is invalid. .. versionchanged:: 3.0.0rc9 Validation no longer occurs upon serialization.
def dump(self, obj: typing.Any, *, many: bool | None = None): """Serialize an object to native Python data types according to this Schema's fields. :param obj: The object to serialize. :param many: Whether to serialize `obj` as a collection. If `None`, the value for `self.many` is used. :return: Serialized data .. versionadded:: 1.0.0 .. versionchanged:: 3.0.0b7 This method returns the serialized data rather than a ``(data, errors)`` duple. A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised if ``obj`` is invalid. .. versionchanged:: 3.0.0rc9 Validation no longer occurs upon serialization. """ many = self.many if many is None else bool(many) if self._has_processors(PRE_DUMP): processed_obj = self._invoke_dump_processors( PRE_DUMP, obj, many=many, original_data=obj ) else: processed_obj = obj result = self._serialize(processed_obj, many=many) if self._has_processors(POST_DUMP): result = self._invoke_dump_processors( POST_DUMP, result, many=many, original_data=obj ) return result
(self, obj: Any, *, many: Optional[bool] = None)
56,495
marshmallow.schema
dumps
Same as :meth:`dump`, except return a JSON-encoded string. :param obj: The object to serialize. :param many: Whether to serialize `obj` as a collection. If `None`, the value for `self.many` is used. :return: A ``json`` string .. versionadded:: 1.0.0 .. versionchanged:: 3.0.0b7 This method returns the serialized data rather than a ``(data, errors)`` duple. A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised if ``obj`` is invalid.
def dumps(self, obj: typing.Any, *args, many: bool | None = None, **kwargs): """Same as :meth:`dump`, except return a JSON-encoded string. :param obj: The object to serialize. :param many: Whether to serialize `obj` as a collection. If `None`, the value for `self.many` is used. :return: A ``json`` string .. versionadded:: 1.0.0 .. versionchanged:: 3.0.0b7 This method returns the serialized data rather than a ``(data, errors)`` duple. A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised if ``obj`` is invalid. """ serialized = self.dump(obj, many=many) return self.opts.render_module.dumps(serialized, *args, **kwargs)
(self, obj: Any, *args, many: Optional[bool] = None, **kwargs)
56,496
marshmallow.schema
get_attribute
Defines how to pull values from an object to serialize. .. versionadded:: 2.0.0 .. versionchanged:: 3.0.0a1 Changed position of ``obj`` and ``attr``.
def get_attribute(self, obj: typing.Any, attr: str, default: typing.Any): """Defines how to pull values from an object to serialize. .. versionadded:: 2.0.0 .. versionchanged:: 3.0.0a1 Changed position of ``obj`` and ``attr``. """ return get_value(obj, attr, default)
(self, obj: Any, attr: str, default: Any)
56,497
marshmallow.schema
handle_error
Custom error handler function for the schema. :param error: The `ValidationError` raised during (de)serialization. :param data: The original input data. :param many: Value of ``many`` on dump or load. :param partial: Value of ``partial`` on load. .. versionadded:: 2.0.0 .. versionchanged:: 3.0.0rc9 Receives `many` and `partial` (on deserialization) as keyword arguments.
def handle_error( self, error: ValidationError, data: typing.Any, *, many: bool, **kwargs ): """Custom error handler function for the schema. :param error: The `ValidationError` raised during (de)serialization. :param data: The original input data. :param many: Value of ``many`` on dump or load. :param partial: Value of ``partial`` on load. .. versionadded:: 2.0.0 .. versionchanged:: 3.0.0rc9 Receives `many` and `partial` (on deserialization) as keyword arguments. """ pass
(self, error: marshmallow.exceptions.ValidationError, data: Any, *, many: bool, **kwargs)
56,498
flask_marshmallow.schema
jsonify
Return a JSON response containing the serialized data. :param obj: Object to serialize. :param bool many: Whether `obj` should be serialized as an instance or as a collection. If None, defaults to the value of the `many` attribute on this Schema. :param kwargs: Additional keyword arguments passed to `flask.jsonify`. .. versionchanged:: 0.6.0 Takes the same arguments as `marshmallow.Schema.dump`. Additional keyword arguments are passed to `flask.jsonify`. .. versionchanged:: 0.6.3 The `many` argument for this method defaults to the value of the `many` attribute on the Schema. Previously, the `many` argument of this method defaulted to False, regardless of the value of `Schema.many`.
def jsonify( self, obj: typing.Any, many: typing.Optional[bool] = None, *args, **kwargs ) -> "Response": """Return a JSON response containing the serialized data. :param obj: Object to serialize. :param bool many: Whether `obj` should be serialized as an instance or as a collection. If None, defaults to the value of the `many` attribute on this Schema. :param kwargs: Additional keyword arguments passed to `flask.jsonify`. .. versionchanged:: 0.6.0 Takes the same arguments as `marshmallow.Schema.dump`. Additional keyword arguments are passed to `flask.jsonify`. .. versionchanged:: 0.6.3 The `many` argument for this method defaults to the value of the `many` attribute on the Schema. Previously, the `many` argument of this method defaulted to False, regardless of the value of `Schema.many`. """ if many is None: many = self.many data = self.dump(obj, many=many) return flask.jsonify(data, *args, **kwargs)
(self, obj: Any, many: Optional[bool] = None, *args, **kwargs) -> 'Response'
56,499
marshmallow.schema
load
Deserialize a data structure to an object defined by this Schema's fields. :param data: The data to deserialize. :param many: Whether to deserialize `data` as a collection. If `None`, the value for `self.many` is used. :param partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. If `None`, the value for `self.unknown` is used. :return: Deserialized data .. versionadded:: 1.0.0 .. versionchanged:: 3.0.0b7 This method returns the deserialized data rather than a ``(data, errors)`` duple. A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised if invalid data are passed.
def load( self, data: ( typing.Mapping[str, typing.Any] | typing.Iterable[typing.Mapping[str, typing.Any]] ), *, many: bool | None = None, partial: bool | types.StrSequenceOrSet | None = None, unknown: str | None = None, ): """Deserialize a data structure to an object defined by this Schema's fields. :param data: The data to deserialize. :param many: Whether to deserialize `data` as a collection. If `None`, the value for `self.many` is used. :param partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. If `None`, the value for `self.unknown` is used. :return: Deserialized data .. versionadded:: 1.0.0 .. versionchanged:: 3.0.0b7 This method returns the deserialized data rather than a ``(data, errors)`` duple. A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised if invalid data are passed. """ return self._do_load( data, many=many, partial=partial, unknown=unknown, postprocess=True )
(self, data: Union[Mapping[str, Any], Iterable[Mapping[str, Any]]], *, many: Optional[bool] = None, partial: Union[bool, Sequence[str], AbstractSet[str], NoneType] = None, unknown: Optional[str] = None)
56,500
marshmallow.schema
loads
Same as :meth:`load`, except it takes a JSON string as input. :param json_data: A JSON string of the data to deserialize. :param many: Whether to deserialize `obj` as a collection. If `None`, the value for `self.many` is used. :param partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. If `None`, the value for `self.unknown` is used. :return: Deserialized data .. versionadded:: 1.0.0 .. versionchanged:: 3.0.0b7 This method returns the deserialized data rather than a ``(data, errors)`` duple. A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised if invalid data are passed.
def loads( self, json_data: str, *, many: bool | None = None, partial: bool | types.StrSequenceOrSet | None = None, unknown: str | None = None, **kwargs, ): """Same as :meth:`load`, except it takes a JSON string as input. :param json_data: A JSON string of the data to deserialize. :param many: Whether to deserialize `obj` as a collection. If `None`, the value for `self.many` is used. :param partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :param unknown: Whether to exclude, include, or raise an error for unknown fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`. If `None`, the value for `self.unknown` is used. :return: Deserialized data .. versionadded:: 1.0.0 .. versionchanged:: 3.0.0b7 This method returns the deserialized data rather than a ``(data, errors)`` duple. A :exc:`ValidationError <marshmallow.exceptions.ValidationError>` is raised if invalid data are passed. """ data = self.opts.render_module.loads(json_data, **kwargs) return self.load(data, many=many, partial=partial, unknown=unknown)
(self, json_data: str, *, many: Optional[bool] = None, partial: Union[bool, Sequence[str], AbstractSet[str], NoneType] = None, unknown: Optional[str] = None, **kwargs)
56,501
marshmallow.schema
on_bind_field
Hook to modify a field when it is bound to the `Schema`. No-op by default.
def on_bind_field(self, field_name: str, field_obj: ma_fields.Field) -> None: """Hook to modify a field when it is bound to the `Schema`. No-op by default. """ return None
(self, field_name: str, field_obj: marshmallow.fields.Field) -> NoneType
56,502
marshmallow.schema
validate
Validate `data` against the schema, returning a dictionary of validation errors. :param data: The data to validate. :param many: Whether to validate `data` as a collection. If `None`, the value for `self.many` is used. :param partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :return: A dictionary of validation errors. .. versionadded:: 1.1.0
def validate( self, data: ( typing.Mapping[str, typing.Any] | typing.Iterable[typing.Mapping[str, typing.Any]] ), *, many: bool | None = None, partial: bool | types.StrSequenceOrSet | None = None, ) -> dict[str, list[str]]: """Validate `data` against the schema, returning a dictionary of validation errors. :param data: The data to validate. :param many: Whether to validate `data` as a collection. If `None`, the value for `self.many` is used. :param partial: Whether to ignore missing fields and not require any fields declared. Propagates down to ``Nested`` fields as well. If its value is an iterable, only missing fields listed in that iterable will be ignored. Use dot delimiters to specify nested fields. :return: A dictionary of validation errors. .. versionadded:: 1.1.0 """ try: self._do_load(data, many=many, partial=partial, postprocess=False) except ValidationError as exc: return typing.cast(typing.Dict[str, typing.List[str]], exc.messages) return {}
(self, data: Union[Mapping[str, Any], Iterable[Mapping[str, Any]]], *, many: Optional[bool] = None, partial: Union[bool, Sequence[str], AbstractSet[str], NoneType] = None) -> dict[str, list[str]]
56,503
flask_marshmallow
_attach_fields
Attach all the marshmallow fields classes to ``obj``, including Flask-Marshmallow's custom fields.
def _attach_fields(obj): """Attach all the marshmallow fields classes to ``obj``, including Flask-Marshmallow's custom fields. """ for attr in base_fields.__all__: if not hasattr(obj, attr): setattr(obj, attr, getattr(base_fields, attr)) for attr in fields.__all__: setattr(obj, attr, getattr(fields, attr))
(obj)
56,507
marshmallow.utils
pprint
Pretty-printing function that can pretty-print OrderedDicts like regular dictionaries. Useful for printing the output of :meth:`marshmallow.Schema.dump`. .. deprecated:: 3.7.0 marshmallow.pprint will be removed in marshmallow 4.
def pprint(obj, *args, **kwargs) -> None: """Pretty-printing function that can pretty-print OrderedDicts like regular dictionaries. Useful for printing the output of :meth:`marshmallow.Schema.dump`. .. deprecated:: 3.7.0 marshmallow.pprint will be removed in marshmallow 4. """ warnings.warn( "marshmallow's pprint function is deprecated and will be removed in marshmallow 4.", RemovedInMarshmallow4Warning, stacklevel=2, ) if isinstance(obj, collections.OrderedDict): print(json.dumps(obj, *args, **kwargs)) else: py_pprint(obj, *args, **kwargs)
(obj, *args, **kwargs) -> NoneType
56,511
sqlalchemy_continuum.exc
ClassNotVersioned
null
class ClassNotVersioned(VersioningError): pass
null
56,512
sqlalchemy_continuum.exc
ImproperlyConfigured
null
class ImproperlyConfigured(VersioningError): pass
null
56,513
sqlalchemy_continuum.operation
Operation
null
class Operation(object): INSERT = 0 UPDATE = 1 DELETE = 2 def __init__(self, target, type): self.target = target self.type = type self.processed = False def __eq__(self, other): return ( self.target == other.target and self.type == other.type ) def __ne__(self, other): return not (self == other)
(target, type)
56,514
sqlalchemy_continuum.operation
__eq__
null
def __eq__(self, other): return ( self.target == other.target and self.type == other.type )
(self, other)
56,515
sqlalchemy_continuum.operation
__init__
null
def __init__(self, target, type): self.target = target self.type = type self.processed = False
(self, target, type)
56,517
sqlalchemy_continuum.transaction
TransactionFactory
null
class TransactionFactory(ModelFactory): model_name = 'Transaction' def __init__(self, remote_addr=True): self.remote_addr = remote_addr def create_class(self, manager): """ Create Transaction class. """ class Transaction( manager.declarative_base, TransactionBase ): __tablename__ = 'transaction' __versioning_manager__ = manager id = sa.Column( sa.types.BigInteger, sa.schema.Sequence('transaction_id_seq'), primary_key=True, autoincrement=True ) if self.remote_addr: remote_addr = sa.Column(sa.String(50)) if manager.user_cls: user_cls = manager.user_cls Base = manager.declarative_base registry = Base.registry._class_registry if isinstance(user_cls, str): try: user_cls = registry[user_cls] except KeyError: raise ImproperlyConfigured( 'Could not build relationship between Transaction' ' and %s. %s was not found in declarative class ' 'registry. Either configure VersioningManager to ' 'use different user class or disable this ' 'relationship ' % (user_cls, user_cls) ) user_id = sa.Column( sa.inspect(user_cls).primary_key[0].type, sa.ForeignKey(sa.inspect(user_cls).primary_key[0]), index=True ) user = sa.orm.relationship(user_cls) def __repr__(self): fields = ['id', 'issued_at', 'user'] field_values = OrderedDict( (field, getattr(self, field)) for field in fields if hasattr(self, field) ) return '<Transaction %s>' % ', '.join( ( '%s=%r' % (field, value) if not isinstance(value, int) # We want the following line to ensure that longs get # shown without the ugly L suffix on python 2.x # versions else '%s=%d' % (field, value) for field, value in field_values.items() ) ) if manager.options['native_versioning']: create_triggers(Transaction) return Transaction
(remote_addr=True)
56,518
sqlalchemy_continuum.factory
__call__
Create model class but only if it doesn't already exist in declarative model registry.
def __call__(self, manager): """ Create model class but only if it doesn't already exist in declarative model registry. """ Base = manager.declarative_base registry = Base.registry._class_registry if self.model_name not in registry: return self.create_class(manager) return registry[self.model_name]
(self, manager)
56,519
sqlalchemy_continuum.transaction
__init__
null
def __init__(self, remote_addr=True): self.remote_addr = remote_addr
(self, remote_addr=True)
56,520
sqlalchemy_continuum.transaction
create_class
Create Transaction class.
def create_class(self, manager): """ Create Transaction class. """ class Transaction( manager.declarative_base, TransactionBase ): __tablename__ = 'transaction' __versioning_manager__ = manager id = sa.Column( sa.types.BigInteger, sa.schema.Sequence('transaction_id_seq'), primary_key=True, autoincrement=True ) if self.remote_addr: remote_addr = sa.Column(sa.String(50)) if manager.user_cls: user_cls = manager.user_cls Base = manager.declarative_base registry = Base.registry._class_registry if isinstance(user_cls, str): try: user_cls = registry[user_cls] except KeyError: raise ImproperlyConfigured( 'Could not build relationship between Transaction' ' and %s. %s was not found in declarative class ' 'registry. Either configure VersioningManager to ' 'use different user class or disable this ' 'relationship ' % (user_cls, user_cls) ) user_id = sa.Column( sa.inspect(user_cls).primary_key[0].type, sa.ForeignKey(sa.inspect(user_cls).primary_key[0]), index=True ) user = sa.orm.relationship(user_cls) def __repr__(self): fields = ['id', 'issued_at', 'user'] field_values = OrderedDict( (field, getattr(self, field)) for field in fields if hasattr(self, field) ) return '<Transaction %s>' % ', '.join( ( '%s=%r' % (field, value) if not isinstance(value, int) # We want the following line to ensure that longs get # shown without the ugly L suffix on python 2.x # versions else '%s=%d' % (field, value) for field, value in field_values.items() ) ) if manager.options['native_versioning']: create_triggers(Transaction) return Transaction
(self, manager)
56,521
sqlalchemy_continuum.unit_of_work
UnitOfWork
null
class UnitOfWork(object): def __init__(self, manager): self.manager = manager self.reset() def reset(self, session=None): """ Reset the internal state of this UnitOfWork object. Normally this is called after transaction has been committed or rolled back. """ self.version_session = None self.current_transaction = None self.operations = Operations() self.pending_statements = [] self.version_objs = {} def is_modified(self, session): """ Return whether or not given session has been modified. Session has been modified if any versioned property of any version object in given session has been modified or if any of the plugins returns that session has been modified. :param session: SQLAlchemy session object """ return ( is_session_modified(session) or any(self.manager.plugins.is_session_modified(session)) ) def process_before_flush(self, session): """ Before flush processor for given session. This method creates a version session which is later on used for the creation of version objects. It also creates Transaction object for the current transaction and invokes before_flush template method on all plugins. If the given session had no relevant modifications regarding versioned objects this method does nothing. :param session: SQLAlchemy session object """ if session == self.version_session: return if not self.is_modified(session): return if not self.version_session: self.version_session = sa.orm.session.Session( bind=session.connection() ) if not self.current_transaction: self.create_transaction(session) self.manager.plugins.before_flush(self, session) def process_after_flush(self, session): """ After flush processor for given session. Creates version objects for all modified versioned parent objects that were affected during the flush phase. :param session: SQLAlchemy session object """ if session == self.version_session: return if not self.current_transaction: return if not self.version_session: self.version_session = sa.orm.session.Session( bind=session.connection() ) self.make_versions(session) def transaction_args(self, session): args = {} for plugin in self.manager.plugins: args.update(plugin.transaction_args(self, session)) return args def create_transaction(self, session): """ Create transaction object for given SQLAlchemy session. :param session: SQLAlchemy session object """ args = self.transaction_args(session) Transaction = self.manager.transaction_cls self.current_transaction = Transaction() for key, value in args.items(): setattr(self.current_transaction, key, value) if not self.version_session: self.version_session = sa.orm.session.Session( bind=session.connection() ) self.version_session.add(self.current_transaction) self.version_session.flush() self.version_session.expunge(self.current_transaction) session.add(self.current_transaction) return self.current_transaction def get_or_create_version_object(self, target): """ Return version object for given parent object. If no version object exists for given parent object, create one. :param target: Parent object to create the version object for """ version_cls = version_class(target.__class__) version_id = identity(target) + (self.current_transaction.id, ) version_key = (version_cls, version_id) if version_key not in self.version_objs: version_obj = version_cls() self.version_objs[version_key] = version_obj self.version_session.add(version_obj) tx_column = self.manager.option( target, 'transaction_column_name' ) setattr( version_obj, tx_column, self.current_transaction.id ) return version_obj else: return self.version_objs[version_key] def process_operation(self, operation): """ Process given operation object. The operation processing has x stages: 1. Get or create a version object for given parent object 2. Assign the operation type for this object 3. Invoke listeners 4. Update version validity in case validity strategy is used 5. Mark operation as processed :param operation: Operation object """ target = operation.target version_obj = self.get_or_create_version_object(target) version_obj.operation_type = operation.type self.assign_attributes(target, version_obj) self.manager.plugins.after_create_version_object( self, target, version_obj ) if self.manager.option(target, 'strategy') == 'validity': self.update_version_validity( target, version_obj ) operation.processed = True def create_version_objects(self, session): """ Create version objects for given session based on operations collected by insert, update and deleted trackers. :param session: SQLAlchemy session object """ if ( not self.manager.options['versioning'] or self.manager.options['native_versioning'] ): return for key, operation in copy(self.operations).items(): if operation.processed: continue if not self.current_transaction: raise Exception( 'Current transaction not available.' ) self.process_operation(operation) self.version_session.flush() def version_validity_subquery(self, parent, version_obj, alias=None): """ Return the subquery needed by :func:`update_version_validity`. This method is only used when using 'validity' versioning strategy. :param parent: SQLAlchemy declarative parent object :parem version_obj: SQLAlchemy declarative version object .. seealso:: :func:`update_version_validity` """ fetcher = self.manager.fetcher(parent) session = sa.orm.object_session(version_obj) subquery = fetcher._transaction_id_subquery( version_obj, next_or_prev='prev', alias=alias ) return subquery def update_version_validity(self, parent, version_obj): """ Updates previous version object end_transaction_id based on given parent object and newly created version object. This method is only used when using 'validity' versioning strategy. :param parent: SQLAlchemy declarative parent object :parem version_obj: SQLAlchemy declarative version object .. seealso:: :func:`version_validity_subquery` """ session = sa.orm.object_session(version_obj) for class_ in version_obj.__class__.__mro__: if class_ in self.manager.parent_class_map: subquery = self.version_validity_subquery( parent, version_obj, alias=sa.orm.aliased(class_.__table__) ) subquery = subquery.scalar_subquery() vobj_tx_col = getattr(class_, tx_column_name(version_obj)) query = ( sa.select(class_) .where( vobj_tx_col == subquery, *[ getattr(version_obj, pk) == getattr(class_.__table__.c, pk) for pk in get_primary_keys(class_) if pk != tx_column_name(class_) ] ) .execution_options(synchronize_session=False) ) old_versions = session.scalars(query).all() for old_version in old_versions: setattr(old_version, end_tx_column_name(version_obj), self.current_transaction.id) def create_association_versions(self, session): """ Creates association table version records for given session. :param session: SQLAlchemy session object """ statements = copy(self.pending_statements) for stmt in statements: stmt = stmt.values( **{ self.manager.options['transaction_column_name']: self.current_transaction.id } ) session.execute(stmt) self.pending_statements = [] def make_versions(self, session): """ Create transaction, transaction changes records, version objects. :param session: SQLAlchemy session object """ if not self.manager.options['versioning']: return if self.pending_statements: self.create_association_versions(session) if self.operations: self.manager.plugins.before_create_version_objects(self, session) self.create_version_objects(session) self.manager.plugins.after_create_version_objects(self, session) @property def has_changes(self): """ Return whether or not this unit of work has changes. """ return self.operations or self.pending_statements def assign_attributes(self, parent_obj, version_obj): """ Assign attributes values from parent object to version object. :param parent_obj: Parent object to get the attribute values from :param version_obj: Version object to assign the attribute values to """ for prop in versioned_column_properties(parent_obj): try: value = getattr(parent_obj, prop.key) except sa.orm.exc.ObjectDeletedError: value = None setattr(version_obj, prop.key, value)
(manager)
56,522
sqlalchemy_continuum.unit_of_work
__init__
null
def __init__(self, manager): self.manager = manager self.reset()
(self, manager)
56,523
sqlalchemy_continuum.unit_of_work
assign_attributes
Assign attributes values from parent object to version object. :param parent_obj: Parent object to get the attribute values from :param version_obj: Version object to assign the attribute values to
def assign_attributes(self, parent_obj, version_obj): """ Assign attributes values from parent object to version object. :param parent_obj: Parent object to get the attribute values from :param version_obj: Version object to assign the attribute values to """ for prop in versioned_column_properties(parent_obj): try: value = getattr(parent_obj, prop.key) except sa.orm.exc.ObjectDeletedError: value = None setattr(version_obj, prop.key, value)
(self, parent_obj, version_obj)
56,524
sqlalchemy_continuum.unit_of_work
create_association_versions
Creates association table version records for given session. :param session: SQLAlchemy session object
def create_association_versions(self, session): """ Creates association table version records for given session. :param session: SQLAlchemy session object """ statements = copy(self.pending_statements) for stmt in statements: stmt = stmt.values( **{ self.manager.options['transaction_column_name']: self.current_transaction.id } ) session.execute(stmt) self.pending_statements = []
(self, session)
56,525
sqlalchemy_continuum.unit_of_work
create_transaction
Create transaction object for given SQLAlchemy session. :param session: SQLAlchemy session object
def create_transaction(self, session): """ Create transaction object for given SQLAlchemy session. :param session: SQLAlchemy session object """ args = self.transaction_args(session) Transaction = self.manager.transaction_cls self.current_transaction = Transaction() for key, value in args.items(): setattr(self.current_transaction, key, value) if not self.version_session: self.version_session = sa.orm.session.Session( bind=session.connection() ) self.version_session.add(self.current_transaction) self.version_session.flush() self.version_session.expunge(self.current_transaction) session.add(self.current_transaction) return self.current_transaction
(self, session)
56,526
sqlalchemy_continuum.unit_of_work
create_version_objects
Create version objects for given session based on operations collected by insert, update and deleted trackers. :param session: SQLAlchemy session object
def create_version_objects(self, session): """ Create version objects for given session based on operations collected by insert, update and deleted trackers. :param session: SQLAlchemy session object """ if ( not self.manager.options['versioning'] or self.manager.options['native_versioning'] ): return for key, operation in copy(self.operations).items(): if operation.processed: continue if not self.current_transaction: raise Exception( 'Current transaction not available.' ) self.process_operation(operation) self.version_session.flush()
(self, session)
56,527
sqlalchemy_continuum.unit_of_work
get_or_create_version_object
Return version object for given parent object. If no version object exists for given parent object, create one. :param target: Parent object to create the version object for
def get_or_create_version_object(self, target): """ Return version object for given parent object. If no version object exists for given parent object, create one. :param target: Parent object to create the version object for """ version_cls = version_class(target.__class__) version_id = identity(target) + (self.current_transaction.id, ) version_key = (version_cls, version_id) if version_key not in self.version_objs: version_obj = version_cls() self.version_objs[version_key] = version_obj self.version_session.add(version_obj) tx_column = self.manager.option( target, 'transaction_column_name' ) setattr( version_obj, tx_column, self.current_transaction.id ) return version_obj else: return self.version_objs[version_key]
(self, target)
56,528
sqlalchemy_continuum.unit_of_work
is_modified
Return whether or not given session has been modified. Session has been modified if any versioned property of any version object in given session has been modified or if any of the plugins returns that session has been modified. :param session: SQLAlchemy session object
def is_modified(self, session): """ Return whether or not given session has been modified. Session has been modified if any versioned property of any version object in given session has been modified or if any of the plugins returns that session has been modified. :param session: SQLAlchemy session object """ return ( is_session_modified(session) or any(self.manager.plugins.is_session_modified(session)) )
(self, session)
56,529
sqlalchemy_continuum.unit_of_work
make_versions
Create transaction, transaction changes records, version objects. :param session: SQLAlchemy session object
def make_versions(self, session): """ Create transaction, transaction changes records, version objects. :param session: SQLAlchemy session object """ if not self.manager.options['versioning']: return if self.pending_statements: self.create_association_versions(session) if self.operations: self.manager.plugins.before_create_version_objects(self, session) self.create_version_objects(session) self.manager.plugins.after_create_version_objects(self, session)
(self, session)
56,530
sqlalchemy_continuum.unit_of_work
process_after_flush
After flush processor for given session. Creates version objects for all modified versioned parent objects that were affected during the flush phase. :param session: SQLAlchemy session object
def process_after_flush(self, session): """ After flush processor for given session. Creates version objects for all modified versioned parent objects that were affected during the flush phase. :param session: SQLAlchemy session object """ if session == self.version_session: return if not self.current_transaction: return if not self.version_session: self.version_session = sa.orm.session.Session( bind=session.connection() ) self.make_versions(session)
(self, session)
56,531
sqlalchemy_continuum.unit_of_work
process_before_flush
Before flush processor for given session. This method creates a version session which is later on used for the creation of version objects. It also creates Transaction object for the current transaction and invokes before_flush template method on all plugins. If the given session had no relevant modifications regarding versioned objects this method does nothing. :param session: SQLAlchemy session object
def process_before_flush(self, session): """ Before flush processor for given session. This method creates a version session which is later on used for the creation of version objects. It also creates Transaction object for the current transaction and invokes before_flush template method on all plugins. If the given session had no relevant modifications regarding versioned objects this method does nothing. :param session: SQLAlchemy session object """ if session == self.version_session: return if not self.is_modified(session): return if not self.version_session: self.version_session = sa.orm.session.Session( bind=session.connection() ) if not self.current_transaction: self.create_transaction(session) self.manager.plugins.before_flush(self, session)
(self, session)
56,532
sqlalchemy_continuum.unit_of_work
process_operation
Process given operation object. The operation processing has x stages: 1. Get or create a version object for given parent object 2. Assign the operation type for this object 3. Invoke listeners 4. Update version validity in case validity strategy is used 5. Mark operation as processed :param operation: Operation object
def process_operation(self, operation): """ Process given operation object. The operation processing has x stages: 1. Get or create a version object for given parent object 2. Assign the operation type for this object 3. Invoke listeners 4. Update version validity in case validity strategy is used 5. Mark operation as processed :param operation: Operation object """ target = operation.target version_obj = self.get_or_create_version_object(target) version_obj.operation_type = operation.type self.assign_attributes(target, version_obj) self.manager.plugins.after_create_version_object( self, target, version_obj ) if self.manager.option(target, 'strategy') == 'validity': self.update_version_validity( target, version_obj ) operation.processed = True
(self, operation)
56,533
sqlalchemy_continuum.unit_of_work
reset
Reset the internal state of this UnitOfWork object. Normally this is called after transaction has been committed or rolled back.
def reset(self, session=None): """ Reset the internal state of this UnitOfWork object. Normally this is called after transaction has been committed or rolled back. """ self.version_session = None self.current_transaction = None self.operations = Operations() self.pending_statements = [] self.version_objs = {}
(self, session=None)
56,534
sqlalchemy_continuum.unit_of_work
transaction_args
null
def transaction_args(self, session): args = {} for plugin in self.manager.plugins: args.update(plugin.transaction_args(self, session)) return args
(self, session)
56,535
sqlalchemy_continuum.unit_of_work
update_version_validity
Updates previous version object end_transaction_id based on given parent object and newly created version object. This method is only used when using 'validity' versioning strategy. :param parent: SQLAlchemy declarative parent object :parem version_obj: SQLAlchemy declarative version object .. seealso:: :func:`version_validity_subquery`
def update_version_validity(self, parent, version_obj): """ Updates previous version object end_transaction_id based on given parent object and newly created version object. This method is only used when using 'validity' versioning strategy. :param parent: SQLAlchemy declarative parent object :parem version_obj: SQLAlchemy declarative version object .. seealso:: :func:`version_validity_subquery` """ session = sa.orm.object_session(version_obj) for class_ in version_obj.__class__.__mro__: if class_ in self.manager.parent_class_map: subquery = self.version_validity_subquery( parent, version_obj, alias=sa.orm.aliased(class_.__table__) ) subquery = subquery.scalar_subquery() vobj_tx_col = getattr(class_, tx_column_name(version_obj)) query = ( sa.select(class_) .where( vobj_tx_col == subquery, *[ getattr(version_obj, pk) == getattr(class_.__table__.c, pk) for pk in get_primary_keys(class_) if pk != tx_column_name(class_) ] ) .execution_options(synchronize_session=False) ) old_versions = session.scalars(query).all() for old_version in old_versions: setattr(old_version, end_tx_column_name(version_obj), self.current_transaction.id)
(self, parent, version_obj)
56,536
sqlalchemy_continuum.unit_of_work
version_validity_subquery
Return the subquery needed by :func:`update_version_validity`. This method is only used when using 'validity' versioning strategy. :param parent: SQLAlchemy declarative parent object :parem version_obj: SQLAlchemy declarative version object .. seealso:: :func:`update_version_validity`
def version_validity_subquery(self, parent, version_obj, alias=None): """ Return the subquery needed by :func:`update_version_validity`. This method is only used when using 'validity' versioning strategy. :param parent: SQLAlchemy declarative parent object :parem version_obj: SQLAlchemy declarative version object .. seealso:: :func:`update_version_validity` """ fetcher = self.manager.fetcher(parent) session = sa.orm.object_session(version_obj) subquery = fetcher._transaction_id_subquery( version_obj, next_or_prev='prev', alias=alias ) return subquery
(self, parent, version_obj, alias=None)
56,537
sqlalchemy_continuum.manager
VersioningManager
VersioningManager delegates versioning configuration operations to builder classes and the actual versioning to UnitOfWork class. Manager contains configuration options that act as defaults for all versioned classes. :param unit_of_work_cls: The UnitOfWork class to use for initializing UnitOfWork objects for versioning :param transaction_cls: Transaction class to use for versioning. If None, the default Transaction class generated by TransactionFactory will be used. :param user_cls: User class which Transaction class should have relationship to. This can either be a class or string name of a class for lazy evaluation. :param options: Versioning options :param plugins: Versioning plugins that listen the events invoked by the manager. :param builder: Builder object which handles the building of versioning tables and models.
class VersioningManager(object): """ VersioningManager delegates versioning configuration operations to builder classes and the actual versioning to UnitOfWork class. Manager contains configuration options that act as defaults for all versioned classes. :param unit_of_work_cls: The UnitOfWork class to use for initializing UnitOfWork objects for versioning :param transaction_cls: Transaction class to use for versioning. If None, the default Transaction class generated by TransactionFactory will be used. :param user_cls: User class which Transaction class should have relationship to. This can either be a class or string name of a class for lazy evaluation. :param options: Versioning options :param plugins: Versioning plugins that listen the events invoked by the manager. :param builder: Builder object which handles the building of versioning tables and models. """ def __init__( self, unit_of_work_cls=UnitOfWork, transaction_cls=None, user_cls=None, options={}, plugins=None, builder=None ): self.uow_class = unit_of_work_cls if builder is None: self.builder = Builder() else: self.builder = builder self.builder.manager = self self.reset() if transaction_cls is not None: self.transaction_cls = transaction_cls else: self.transaction_cls = TransactionFactory() if user_cls is not None: self.user_cls = user_cls self.options = { 'versioning': True, 'base_classes': None, 'table_name': '%s_version', 'exclude': [], 'include': [], 'native_versioning': False, 'create_models': True, 'create_tables': True, 'transaction_column_name': 'transaction_id', 'end_transaction_column_name': 'end_transaction_id', 'operation_type_column_name': 'operation_type', 'strategy': 'validity', 'use_module_name': False } if plugins is None: self.plugins = [] else: self.plugins = plugins self.options.update(options) @property def plugins(self): return self._plugins @plugins.setter def plugins(self, plugin_collection): self._plugins = PluginCollection(plugin_collection) def fetcher(self, obj): if self.option(obj, 'strategy') == 'subquery': return SubqueryFetcher(self) else: return ValidityFetcher(self) def reset(self): """ Resets this manager's internal state. This method should be used in test cases that create models on the fly. Otherwise history_class_map and some other variables would be polluted by no more used model classes. """ self.tables = {} self.pending_classes = [] self.association_tables = set() self.association_version_tables = set() self.declarative_base = None self.version_class_map = {} self.parent_class_map = {} self.session_listeners = { 'before_flush': self.before_flush, 'after_flush': self.after_flush, 'after_commit': self.clear, 'after_rollback': self.clear, } self.mapper_listeners = { 'after_delete': self.track_deletes, 'after_update': self.track_updates, 'after_insert': self.track_inserts, } self.class_config_listeners = { 'instrument_class': self.builder.instrument_versioned_classes, 'after_configured': self.builder.configure_versioned_classes, } # A dictionary of units of work. Keys as connection objects and values # as UnitOfWork objects. self.units_of_work = {} self.session_connection_map = {} self.metadata = None def create_transaction_model(self): """ Create Transaction class but only if it doesn't already exist in declarative model registry. """ if isinstance(self.transaction_cls, TransactionFactory): self.transaction_cls = self.transaction_cls(self) return self.transaction_cls def is_excluded_column(self, model, column): try: key = get_column_key(model, column) except sa.orm.exc.UnmappedColumnError: return False return self.is_excluded_property(model, key) def is_excluded_property(self, model, key): """ Returns whether or not given property of given model is excluded from the associated history model. :param model: SQLAlchemy declarative model object. :param key: Model property key """ if key in self.option(model, 'include'): return False return key in self.option(model, 'exclude') def option(self, model, name): """ Returns the option value for given model. If the option is not found from given model falls back to default values of this manager object. If the option is not found from this manager object either this method throws a KeyError. :param model: SQLAlchemy declarative object :param name: name of the versioning option """ if not hasattr(model, '__versioned__'): raise TypeError('Model %r is not versioned.' % model) try: return model.__versioned__[name] except KeyError: return self.options[name] def apply_class_configuration_listeners(self, mapper): """ Applies class configuration listeners for given mapper. The listener work in two phases: 1. Class instrumentation phase The first listeners listens to class instrumentation event and handles the collecting of versioned models and adds them to the pending_classes list. 2. After class configuration phase The second listener listens to after class configuration event and handles the actual history model generation based on list that was collected during class instrumenation phase. :param mapper: SQLAlchemy mapper to apply the class configuration listeners to """ for event_name, listener in self.class_config_listeners.items(): sa.event.listen(mapper, event_name, listener) def remove_class_configuration_listeners(self, mapper): """ Remove versioning class configuration listeners from specified mapper. :param mapper: mapper to remove class configuration listeners from """ for event_name, listener in self.class_config_listeners.items(): sa.event.remove(mapper, event_name, listener) def track_operations(self, mapper): """ Attach listeners for specified mapper that track SQL inserts, updates and deletes. :param mapper: mapper to track the SQL operations from """ for event_name, listener in self.mapper_listeners.items(): sa.event.listen(mapper, event_name, listener) def remove_operations_tracking(self, mapper): """ Remove listeners from specified mapper that track SQL inserts, updates and deletes. :param mapper: mapper to remove the SQL operations tracking listeners from """ for event_name, listener in self.mapper_listeners.items(): sa.event.remove(mapper, event_name, listener) def track_session(self, session): """ Attach listeners that track the operations (flushing, committing and rolling back) of given session. This method should be used in conjunction with `track_operations`. :param session: SQLAlchemy session to track the operations from """ for event_name, listener in self.session_listeners.items(): sa.event.listen(session, event_name, listener) def remove_session_tracking(self, session): """ Remove listeners that track the operations (flushing, committing and rolling back) of given session. This method should be used in conjunction with `remove_operations_tracking`. :param session: SQLAlchemy session to remove the operations tracking from """ for event_name, listener in self.session_listeners.items(): sa.event.remove(session, event_name, listener) @tracked_operation def track_inserts(self, uow, target): """ Track object insert operations. Whenever object is inserted it is added to this UnitOfWork's internal operations dictionary. """ uow.operations.add_insert(target) @tracked_operation def track_updates(self, uow, target): """ Track object update operations. Whenever object is updated it is added to this UnitOfWork's internal operations dictionary. """ if not is_modified(target): return uow.operations.add_update(target) @tracked_operation def track_deletes(self, uow, target): """ Track object deletion operations. Whenever object is deleted it is added to this UnitOfWork's internal operations dictionary. """ uow.operations.add_delete(target) def unit_of_work(self, session): """ Return the associated SQLAlchemy-Continuum UnitOfWork object for given SQLAlchemy session object. If no UnitOfWork object exists for given object then this method tries to create one. :param session: SQLAlchemy session object """ conn = session.connection() if conn not in self.session_connection_map.values(): self.session_connection_map[session] = conn if conn in self.units_of_work: return self.units_of_work[conn] else: uow = self.uow_class(self) self.units_of_work[conn] = uow return uow def _uow_from_conn(self, conn): try: uow = self.units_of_work[conn] except KeyError: try: uow = self.units_of_work[conn.engine] except KeyError: for connection in self.units_of_work.keys(): if not connection.closed and connection.connection is conn.connection: uow = self.unit_of_work(session) break # The ConnectionFairy is the same, this connection is a clone else: raise return uow def before_flush(self, session, flush_context, instances): """ Before flush listener for SQLAlchemy sessions. If this manager has versioning enabled this listener invokes the process before flush of associated UnitOfWork object. :param session: SQLAlchemy session """ if not self.options['versioning']: return uow = self.unit_of_work(session) uow.process_before_flush(session) def after_flush(self, session, flush_context): """ After flush listener for SQLAlchemy sessions. If this manager has versioning enabled this listener gets the UnitOfWork associated with session's connections and invokes the process_after_flush method of that object. :param session: SQLAlchemy session """ if not self.options['versioning']: return uow = self.unit_of_work(session) uow.process_after_flush(session) def clear(self, session): """ Simple SQLAlchemy listener that is being invoked after successful transaction commit or when transaction rollback occurs. The purpose of this listener is to reset this UnitOfWork back to its initialization state. :param session: SQLAlchemy session object """ if session.in_nested_transaction(): return conn = self.session_connection_map.pop(session, None) if conn is None: return if conn in self.units_of_work: uow = self.units_of_work[conn] uow.reset(session) del self.units_of_work[conn] for connection in dict(self.units_of_work).keys(): if connection.closed or conn.connection is connection.connection: uow = self.units_of_work[connection] uow.reset(session) del self.units_of_work[connection] def clear_connection(self, conn): if conn in self.units_of_work: uow = self.units_of_work[conn] uow.reset() del self.units_of_work[conn] for session, connection in dict(self.session_connection_map).items(): if connection is conn: del self.session_connection_map[session] for connection in dict(self.units_of_work).keys(): if connection.closed or conn.connection is connection.connection: uow = self.units_of_work[connection] uow.reset() del self.units_of_work[connection] def append_association_operation(self, conn, table_name, params, op): """ Append history association operation to pending_statements list. """ stmt = ( self.metadata.tables[self.options['table_name'] % table_name] .insert() .values({**params, 'operation_type': op}) ) uow = self.uow_from_conn(conn) uow.pending_statements.append(stmt) def track_cloned_connections(self, c, opt): """ Track cloned connections from association tables. """ if c not in self.units_of_work.keys(): for connection, uow in dict(self.units_of_work).items(): if not connection.closed and connection.connection is c.connection: # ConnectionFairy is the same - this is a clone self.units_of_work[c] = uow def track_association_operations( self, conn, clauseelement, multiparams, params, execution_options, ): if ( not self.options['versioning'] and not self.options['native_versioning'] ): return if isinstance(clauseelement, str): op = None elif clauseelement.is_insert: op = Operation.INSERT elif clauseelement.is_delete: op = Operation.DELETE else: op = None if op is not None and clauseelement.table in self.association_tables: if not multiparams: multiparams = [params] uow = self._uow_from_conn(conn) for params in multiparams: stmt = version_table(clauseelement.table).insert().values({ **params, 'operation_type': op, }) uow.pending_statements.append(stmt)
(unit_of_work_cls=<class 'sqlalchemy_continuum.unit_of_work.UnitOfWork'>, transaction_cls=None, user_cls=None, options={}, plugins=None, builder=None)
56,538
sqlalchemy_continuum.manager
__init__
null
def __init__( self, unit_of_work_cls=UnitOfWork, transaction_cls=None, user_cls=None, options={}, plugins=None, builder=None ): self.uow_class = unit_of_work_cls if builder is None: self.builder = Builder() else: self.builder = builder self.builder.manager = self self.reset() if transaction_cls is not None: self.transaction_cls = transaction_cls else: self.transaction_cls = TransactionFactory() if user_cls is not None: self.user_cls = user_cls self.options = { 'versioning': True, 'base_classes': None, 'table_name': '%s_version', 'exclude': [], 'include': [], 'native_versioning': False, 'create_models': True, 'create_tables': True, 'transaction_column_name': 'transaction_id', 'end_transaction_column_name': 'end_transaction_id', 'operation_type_column_name': 'operation_type', 'strategy': 'validity', 'use_module_name': False } if plugins is None: self.plugins = [] else: self.plugins = plugins self.options.update(options)
(self, unit_of_work_cls=<class 'sqlalchemy_continuum.unit_of_work.UnitOfWork'>, transaction_cls=None, user_cls=None, options={}, plugins=None, builder=None)
56,539
sqlalchemy_continuum.manager
_uow_from_conn
null
def _uow_from_conn(self, conn): try: uow = self.units_of_work[conn] except KeyError: try: uow = self.units_of_work[conn.engine] except KeyError: for connection in self.units_of_work.keys(): if not connection.closed and connection.connection is conn.connection: uow = self.unit_of_work(session) break # The ConnectionFairy is the same, this connection is a clone else: raise return uow
(self, conn)
56,540
sqlalchemy_continuum.manager
after_flush
After flush listener for SQLAlchemy sessions. If this manager has versioning enabled this listener gets the UnitOfWork associated with session's connections and invokes the process_after_flush method of that object. :param session: SQLAlchemy session
def after_flush(self, session, flush_context): """ After flush listener for SQLAlchemy sessions. If this manager has versioning enabled this listener gets the UnitOfWork associated with session's connections and invokes the process_after_flush method of that object. :param session: SQLAlchemy session """ if not self.options['versioning']: return uow = self.unit_of_work(session) uow.process_after_flush(session)
(self, session, flush_context)
56,541
sqlalchemy_continuum.manager
append_association_operation
Append history association operation to pending_statements list.
def append_association_operation(self, conn, table_name, params, op): """ Append history association operation to pending_statements list. """ stmt = ( self.metadata.tables[self.options['table_name'] % table_name] .insert() .values({**params, 'operation_type': op}) ) uow = self.uow_from_conn(conn) uow.pending_statements.append(stmt)
(self, conn, table_name, params, op)
56,542
sqlalchemy_continuum.manager
apply_class_configuration_listeners
Applies class configuration listeners for given mapper. The listener work in two phases: 1. Class instrumentation phase The first listeners listens to class instrumentation event and handles the collecting of versioned models and adds them to the pending_classes list. 2. After class configuration phase The second listener listens to after class configuration event and handles the actual history model generation based on list that was collected during class instrumenation phase. :param mapper: SQLAlchemy mapper to apply the class configuration listeners to
def apply_class_configuration_listeners(self, mapper): """ Applies class configuration listeners for given mapper. The listener work in two phases: 1. Class instrumentation phase The first listeners listens to class instrumentation event and handles the collecting of versioned models and adds them to the pending_classes list. 2. After class configuration phase The second listener listens to after class configuration event and handles the actual history model generation based on list that was collected during class instrumenation phase. :param mapper: SQLAlchemy mapper to apply the class configuration listeners to """ for event_name, listener in self.class_config_listeners.items(): sa.event.listen(mapper, event_name, listener)
(self, mapper)
56,543
sqlalchemy_continuum.manager
before_flush
Before flush listener for SQLAlchemy sessions. If this manager has versioning enabled this listener invokes the process before flush of associated UnitOfWork object. :param session: SQLAlchemy session
def before_flush(self, session, flush_context, instances): """ Before flush listener for SQLAlchemy sessions. If this manager has versioning enabled this listener invokes the process before flush of associated UnitOfWork object. :param session: SQLAlchemy session """ if not self.options['versioning']: return uow = self.unit_of_work(session) uow.process_before_flush(session)
(self, session, flush_context, instances)
56,544
sqlalchemy_continuum.manager
clear
Simple SQLAlchemy listener that is being invoked after successful transaction commit or when transaction rollback occurs. The purpose of this listener is to reset this UnitOfWork back to its initialization state. :param session: SQLAlchemy session object
def clear(self, session): """ Simple SQLAlchemy listener that is being invoked after successful transaction commit or when transaction rollback occurs. The purpose of this listener is to reset this UnitOfWork back to its initialization state. :param session: SQLAlchemy session object """ if session.in_nested_transaction(): return conn = self.session_connection_map.pop(session, None) if conn is None: return if conn in self.units_of_work: uow = self.units_of_work[conn] uow.reset(session) del self.units_of_work[conn] for connection in dict(self.units_of_work).keys(): if connection.closed or conn.connection is connection.connection: uow = self.units_of_work[connection] uow.reset(session) del self.units_of_work[connection]
(self, session)
56,545
sqlalchemy_continuum.manager
clear_connection
null
def clear_connection(self, conn): if conn in self.units_of_work: uow = self.units_of_work[conn] uow.reset() del self.units_of_work[conn] for session, connection in dict(self.session_connection_map).items(): if connection is conn: del self.session_connection_map[session] for connection in dict(self.units_of_work).keys(): if connection.closed or conn.connection is connection.connection: uow = self.units_of_work[connection] uow.reset() del self.units_of_work[connection]
(self, conn)
56,546
sqlalchemy_continuum.manager
create_transaction_model
Create Transaction class but only if it doesn't already exist in declarative model registry.
def create_transaction_model(self): """ Create Transaction class but only if it doesn't already exist in declarative model registry. """ if isinstance(self.transaction_cls, TransactionFactory): self.transaction_cls = self.transaction_cls(self) return self.transaction_cls
(self)
56,547
sqlalchemy_continuum.manager
fetcher
null
def fetcher(self, obj): if self.option(obj, 'strategy') == 'subquery': return SubqueryFetcher(self) else: return ValidityFetcher(self)
(self, obj)
56,548
sqlalchemy_continuum.manager
is_excluded_column
null
def is_excluded_column(self, model, column): try: key = get_column_key(model, column) except sa.orm.exc.UnmappedColumnError: return False return self.is_excluded_property(model, key)
(self, model, column)
56,549
sqlalchemy_continuum.manager
is_excluded_property
Returns whether or not given property of given model is excluded from the associated history model. :param model: SQLAlchemy declarative model object. :param key: Model property key
def is_excluded_property(self, model, key): """ Returns whether or not given property of given model is excluded from the associated history model. :param model: SQLAlchemy declarative model object. :param key: Model property key """ if key in self.option(model, 'include'): return False return key in self.option(model, 'exclude')
(self, model, key)
56,550
sqlalchemy_continuum.manager
option
Returns the option value for given model. If the option is not found from given model falls back to default values of this manager object. If the option is not found from this manager object either this method throws a KeyError. :param model: SQLAlchemy declarative object :param name: name of the versioning option
def option(self, model, name): """ Returns the option value for given model. If the option is not found from given model falls back to default values of this manager object. If the option is not found from this manager object either this method throws a KeyError. :param model: SQLAlchemy declarative object :param name: name of the versioning option """ if not hasattr(model, '__versioned__'): raise TypeError('Model %r is not versioned.' % model) try: return model.__versioned__[name] except KeyError: return self.options[name]
(self, model, name)
56,551
sqlalchemy_continuum.manager
remove_class_configuration_listeners
Remove versioning class configuration listeners from specified mapper. :param mapper: mapper to remove class configuration listeners from
def remove_class_configuration_listeners(self, mapper): """ Remove versioning class configuration listeners from specified mapper. :param mapper: mapper to remove class configuration listeners from """ for event_name, listener in self.class_config_listeners.items(): sa.event.remove(mapper, event_name, listener)
(self, mapper)
56,552
sqlalchemy_continuum.manager
remove_operations_tracking
Remove listeners from specified mapper that track SQL inserts, updates and deletes. :param mapper: mapper to remove the SQL operations tracking listeners from
def remove_operations_tracking(self, mapper): """ Remove listeners from specified mapper that track SQL inserts, updates and deletes. :param mapper: mapper to remove the SQL operations tracking listeners from """ for event_name, listener in self.mapper_listeners.items(): sa.event.remove(mapper, event_name, listener)
(self, mapper)
56,553
sqlalchemy_continuum.manager
remove_session_tracking
Remove listeners that track the operations (flushing, committing and rolling back) of given session. This method should be used in conjunction with `remove_operations_tracking`. :param session: SQLAlchemy session to remove the operations tracking from
def remove_session_tracking(self, session): """ Remove listeners that track the operations (flushing, committing and rolling back) of given session. This method should be used in conjunction with `remove_operations_tracking`. :param session: SQLAlchemy session to remove the operations tracking from """ for event_name, listener in self.session_listeners.items(): sa.event.remove(session, event_name, listener)
(self, session)
56,554
sqlalchemy_continuum.manager
reset
Resets this manager's internal state. This method should be used in test cases that create models on the fly. Otherwise history_class_map and some other variables would be polluted by no more used model classes.
def reset(self): """ Resets this manager's internal state. This method should be used in test cases that create models on the fly. Otherwise history_class_map and some other variables would be polluted by no more used model classes. """ self.tables = {} self.pending_classes = [] self.association_tables = set() self.association_version_tables = set() self.declarative_base = None self.version_class_map = {} self.parent_class_map = {} self.session_listeners = { 'before_flush': self.before_flush, 'after_flush': self.after_flush, 'after_commit': self.clear, 'after_rollback': self.clear, } self.mapper_listeners = { 'after_delete': self.track_deletes, 'after_update': self.track_updates, 'after_insert': self.track_inserts, } self.class_config_listeners = { 'instrument_class': self.builder.instrument_versioned_classes, 'after_configured': self.builder.configure_versioned_classes, } # A dictionary of units of work. Keys as connection objects and values # as UnitOfWork objects. self.units_of_work = {} self.session_connection_map = {} self.metadata = None
(self)
56,555
sqlalchemy_continuum.manager
track_association_operations
null
def track_association_operations( self, conn, clauseelement, multiparams, params, execution_options, ): if ( not self.options['versioning'] and not self.options['native_versioning'] ): return if isinstance(clauseelement, str): op = None elif clauseelement.is_insert: op = Operation.INSERT elif clauseelement.is_delete: op = Operation.DELETE else: op = None if op is not None and clauseelement.table in self.association_tables: if not multiparams: multiparams = [params] uow = self._uow_from_conn(conn) for params in multiparams: stmt = version_table(clauseelement.table).insert().values({ **params, 'operation_type': op, }) uow.pending_statements.append(stmt)
(self, conn, clauseelement, multiparams, params, execution_options)
56,556
sqlalchemy_continuum.manager
track_cloned_connections
Track cloned connections from association tables.
def track_cloned_connections(self, c, opt): """ Track cloned connections from association tables. """ if c not in self.units_of_work.keys(): for connection, uow in dict(self.units_of_work).items(): if not connection.closed and connection.connection is c.connection: # ConnectionFairy is the same - this is a clone self.units_of_work[c] = uow
(self, c, opt)
56,557
sqlalchemy_continuum.manager
track_deletes
Track object deletion operations. Whenever object is deleted it is added to this UnitOfWork's internal operations dictionary.
def tracked_operation(func): @wraps(func) def wrapper(self, mapper, connection, target): if not is_versioned(target): return session = object_session(target) uow = self._uow_from_conn(session.connection()) return func(self, uow, target) return wrapper
(self, uow, target)
56,558
sqlalchemy_continuum.manager
track_inserts
Track object insert operations. Whenever object is inserted it is added to this UnitOfWork's internal operations dictionary.
def tracked_operation(func): @wraps(func) def wrapper(self, mapper, connection, target): if not is_versioned(target): return session = object_session(target) uow = self._uow_from_conn(session.connection()) return func(self, uow, target) return wrapper
(self, uow, target)
56,559
sqlalchemy_continuum.manager
track_operations
Attach listeners for specified mapper that track SQL inserts, updates and deletes. :param mapper: mapper to track the SQL operations from
def track_operations(self, mapper): """ Attach listeners for specified mapper that track SQL inserts, updates and deletes. :param mapper: mapper to track the SQL operations from """ for event_name, listener in self.mapper_listeners.items(): sa.event.listen(mapper, event_name, listener)
(self, mapper)
56,560
sqlalchemy_continuum.manager
track_session
Attach listeners that track the operations (flushing, committing and rolling back) of given session. This method should be used in conjunction with `track_operations`. :param session: SQLAlchemy session to track the operations from
def track_session(self, session): """ Attach listeners that track the operations (flushing, committing and rolling back) of given session. This method should be used in conjunction with `track_operations`. :param session: SQLAlchemy session to track the operations from """ for event_name, listener in self.session_listeners.items(): sa.event.listen(session, event_name, listener)
(self, session)
56,561
sqlalchemy_continuum.manager
track_updates
Track object update operations. Whenever object is updated it is added to this UnitOfWork's internal operations dictionary.
def tracked_operation(func): @wraps(func) def wrapper(self, mapper, connection, target): if not is_versioned(target): return session = object_session(target) uow = self._uow_from_conn(session.connection()) return func(self, uow, target) return wrapper
(self, uow, target)
56,562
sqlalchemy_continuum.manager
unit_of_work
Return the associated SQLAlchemy-Continuum UnitOfWork object for given SQLAlchemy session object. If no UnitOfWork object exists for given object then this method tries to create one. :param session: SQLAlchemy session object
def unit_of_work(self, session): """ Return the associated SQLAlchemy-Continuum UnitOfWork object for given SQLAlchemy session object. If no UnitOfWork object exists for given object then this method tries to create one. :param session: SQLAlchemy session object """ conn = session.connection() if conn not in self.session_connection_map.values(): self.session_connection_map[session] = conn if conn in self.units_of_work: return self.units_of_work[conn] else: uow = self.uow_class(self) self.units_of_work[conn] = uow return uow
(self, session)
56,564
sqlalchemy_continuum.utils
changeset
Return a humanized changeset for given SQLAlchemy declarative object. With this function you can easily check the changeset of given object in current transaction. :: from sqlalchemy_continuum import changeset article = Article(name=u'Some article') changeset(article) # {'name': [u'Some article', None]} :param obj: SQLAlchemy declarative model object
def changeset(obj): """ Return a humanized changeset for given SQLAlchemy declarative object. With this function you can easily check the changeset of given object in current transaction. :: from sqlalchemy_continuum import changeset article = Article(name=u'Some article') changeset(article) # {'name': [u'Some article', None]} :param obj: SQLAlchemy declarative model object """ data = {} session = sa.orm.object_session(obj) if session and obj in session.deleted: columns = [c for c in sa.inspect(obj.__class__).columns.values() if is_table_column(c)] for column in columns: if not column.primary_key: value = getattr(obj, column.key) if value is not None: data[column.key] = [None, getattr(obj, column.key)] else: for prop in obj.__mapper__.iterate_properties: history = get_history(obj, prop.key) if history.has_changes(): old_value = history.deleted[0] if history.deleted else None new_value = history.added[0] if history.added else None if new_value: data[prop.key] = [new_value, old_value] return data
(obj)
56,565
sqlalchemy_continuum.utils
count_versions
Return the number of versions given object has. This function works even when obj has `create_models` and `create_tables` versioned settings disabled. :: article = Article(name=u'Some article') count_versions(article) # 0 session.add(article) session.commit() count_versions(article) # 1 :param obj: SQLAlchemy declarative model object
def count_versions(obj): """ Return the number of versions given object has. This function works even when obj has `create_models` and `create_tables` versioned settings disabled. :: article = Article(name=u'Some article') count_versions(article) # 0 session.add(article) session.commit() count_versions(article) # 1 :param obj: SQLAlchemy declarative model object """ session = sa.orm.object_session(obj) if session is None: # If object is transient, we assume it has no version history. return 0 manager = get_versioning_manager(obj) table_name = manager.option(obj, 'table_name') % obj.__table__.name criteria = [ '%s = %r' % (pk, getattr(obj, pk)) for pk in get_primary_keys(obj) ] query = sa.text('SELECT COUNT(1) FROM %s WHERE %s' % ( table_name, ' AND '.join(criteria) )) return session.execute(query).scalar()
(obj)
56,571
sqlalchemy_continuum.utils
get_versioning_manager
Return the associated SQLAlchemy-Continuum VersioningManager for given SQLAlchemy declarative model class or object. :param obj_or_class: SQLAlchemy declarative model object or class
def get_versioning_manager(obj_or_class): """ Return the associated SQLAlchemy-Continuum VersioningManager for given SQLAlchemy declarative model class or object. :param obj_or_class: SQLAlchemy declarative model object or class """ if isinstance(obj_or_class, AliasedClass): obj_or_class = sa.inspect(obj_or_class).mapper.class_ cls = obj_or_class if isclass(obj_or_class) else obj_or_class.__class__ try: return cls.__versioning_manager__ except AttributeError: raise ClassNotVersioned(cls.__name__)
(obj_or_class)
56,572
sqlalchemy_continuum.utils
is_modified
Return whether or not the versioned properties of given object have been modified. :: article = Article() is_modified(article) # False article.name = 'Something' is_modified(article) # True :param obj: SQLAlchemy declarative model object .. seealso:: :func:`is_modified_or_deleted` .. seealso:: :func:`is_session_modified`
def is_modified(obj): """ Return whether or not the versioned properties of given object have been modified. :: article = Article() is_modified(article) # False article.name = 'Something' is_modified(article) # True :param obj: SQLAlchemy declarative model object .. seealso:: :func:`is_modified_or_deleted` .. seealso:: :func:`is_session_modified` """ column_names = sa.inspect(obj.__class__).columns.keys() versioned_column_keys = [ prop.key for prop in versioned_column_properties(obj) ] versioned_relationship_keys = [ prop.key for prop in versioned_relationships(obj, versioned_column_keys) ] for key, attr in sa.inspect(obj).attrs.items(): if key in column_names: if key not in versioned_column_keys: continue if attr.history.has_changes(): return True if key in versioned_relationship_keys: if attr.history.has_changes(): return True return False
(obj)
56,573
sqlalchemy_continuum.utils
is_session_modified
Return whether or not any of the versioned objects in given session have been either modified or deleted. :param session: SQLAlchemy session object .. seealso:: :func:`is_versioned` .. seealso:: :func:`versioned_objects`
def is_session_modified(session): """ Return whether or not any of the versioned objects in given session have been either modified or deleted. :param session: SQLAlchemy session object .. seealso:: :func:`is_versioned` .. seealso:: :func:`versioned_objects` """ return any( is_modified_or_deleted(obj) for obj in versioned_objects(session) )
(session)
56,574
sqlalchemy_continuum
make_versioned
This is the public API function of SQLAlchemy-Continuum for making certain mappers and sessions versioned. By default this applies to all mappers and all sessions. :param mapper: SQLAlchemy mapper to apply the versioning to. :param session: SQLAlchemy session to apply the versioning to. By default this is sa.orm.session.Session meaning it applies to all Session subclasses. :param manager: SQLAlchemy-Continuum versioning manager. :param plugins: Plugins to pass for versioning manager. :param options: A dictionary of VersioningManager options. :param user_cls: User class which the Transaction class should have relationship to. This can either be a class or string name of a class for lazy evaluation.
def make_versioned( mapper=sa.orm.Mapper, session=sa.orm.session.Session, manager=versioning_manager, plugins=None, options=None, user_cls='User' ): """ This is the public API function of SQLAlchemy-Continuum for making certain mappers and sessions versioned. By default this applies to all mappers and all sessions. :param mapper: SQLAlchemy mapper to apply the versioning to. :param session: SQLAlchemy session to apply the versioning to. By default this is sa.orm.session.Session meaning it applies to all Session subclasses. :param manager: SQLAlchemy-Continuum versioning manager. :param plugins: Plugins to pass for versioning manager. :param options: A dictionary of VersioningManager options. :param user_cls: User class which the Transaction class should have relationship to. This can either be a class or string name of a class for lazy evaluation. """ if plugins is not None: manager.plugins = plugins if options is not None: manager.options.update(options) manager.user_cls = user_cls manager.apply_class_configuration_listeners(mapper) manager.track_operations(mapper) manager.track_session(session) sa.event.listen( sa.engine.Engine, 'before_execute', manager.track_association_operations ) sa.event.listen( sa.engine.Engine, 'rollback', manager.clear_connection ) sa.event.listen( sa.engine.Engine, 'set_connection_execution_options', manager.track_cloned_connections )
(mapper=<class 'sqlalchemy.orm.mapper.Mapper'>, session=<class 'sqlalchemy.orm.session.Session'>, manager=<sqlalchemy_continuum.manager.VersioningManager object at 0x7f32fe71f1f0>, plugins=None, options=None, user_cls='User')
56,578
sqlalchemy_continuum.utils
parent_class
Return the parent class for given version model class. :: parent_class(ArticleVersion) # Article class :param model: SQLAlchemy declarative version model class .. seealso:: :func:`version_class`
def parent_class(version_cls): """ Return the parent class for given version model class. :: parent_class(ArticleVersion) # Article class :param model: SQLAlchemy declarative version model class .. seealso:: :func:`version_class` """ return get_versioning_manager(version_cls).parent_class_map[version_cls]
(version_cls)
56,581
sqlalchemy_continuum
remove_versioning
Remove the versioning from given mapper / session and manager. :param mapper: SQLAlchemy mapper to remove the versioning from. :param session: SQLAlchemy session to remove the versioning from. By default this is sa.orm.session.Session meaning it applies to all sessions. :param manager: SQLAlchemy-Continuum versioning manager.
def remove_versioning( mapper=sa.orm.Mapper, session=sa.orm.session.Session, manager=versioning_manager ): """ Remove the versioning from given mapper / session and manager. :param mapper: SQLAlchemy mapper to remove the versioning from. :param session: SQLAlchemy session to remove the versioning from. By default this is sa.orm.session.Session meaning it applies to all sessions. :param manager: SQLAlchemy-Continuum versioning manager. """ manager.reset() manager.remove_class_configuration_listeners(mapper) manager.remove_operations_tracking(mapper) manager.remove_session_tracking(session) sa.event.remove( sa.engine.Engine, 'before_execute', manager.track_association_operations ) sa.event.remove( sa.engine.Engine, 'rollback', manager.clear_connection ) sa.event.remove( sa.engine.Engine, 'set_connection_execution_options', manager.track_cloned_connections )
(mapper=<class 'sqlalchemy.orm.mapper.Mapper'>, session=<class 'sqlalchemy.orm.session.Session'>, manager=<sqlalchemy_continuum.manager.VersioningManager object at 0x7f32fe71f1f0>)
56,586
sqlalchemy_continuum.utils
transaction_class
Return the associated transaction class for given versioned SQLAlchemy declarative class or version class. :: from sqlalchemy_continuum import transaction_class transaction_class(Article) # Transaction class :param cls: SQLAlchemy versioned declarative class or version model class
def transaction_class(cls): """ Return the associated transaction class for given versioned SQLAlchemy declarative class or version class. :: from sqlalchemy_continuum import transaction_class transaction_class(Article) # Transaction class :param cls: SQLAlchemy versioned declarative class or version model class """ return get_versioning_manager(cls).transaction_cls
(cls)
56,587
sqlalchemy_continuum.utils
tx_column_name
null
def tx_column_name(obj): return option(obj, 'transaction_column_name')
(obj)
56,590
sqlalchemy_continuum.utils
vacuum
When making structural changes to version tables (for example dropping columns) there are sometimes situations where some old version records become futile. Vacuum deletes all futile version rows which had no changes compared to previous version. :: from sqlalchemy_continuum import vacuum vacuum(session, User) # vacuums user version :param session: SQLAlchemy session object :param model: SQLAlchemy declarative model class :param yield_per: how many rows to process at a time
def vacuum(session, model, yield_per=1000): """ When making structural changes to version tables (for example dropping columns) there are sometimes situations where some old version records become futile. Vacuum deletes all futile version rows which had no changes compared to previous version. :: from sqlalchemy_continuum import vacuum vacuum(session, User) # vacuums user version :param session: SQLAlchemy session object :param model: SQLAlchemy declarative model class :param yield_per: how many rows to process at a time """ version_cls = version_class(model) versions = defaultdict(list) query = ( session.query(version_cls) .order_by(option(version_cls, 'transaction_column_name')) ).yield_per(yield_per) primary_key_col = sa.inspection.inspect(model).primary_key[0].name for version in query: version_id = getattr(version, primary_key_col) if versions[version_id]: prev_version = versions[version_id][-1] if naturally_equivalent(prev_version, version): session.delete(version) else: versions[version_id].append(version)
(session, model, yield_per=1000)
56,592
sqlalchemy_continuum.utils
version_class
Return the version class for given SQLAlchemy declarative model class. :: version_class(Article) # ArticleVersion class :param model: SQLAlchemy declarative model class .. seealso:: :func:`parent_class`
def version_class(model): """ Return the version class for given SQLAlchemy declarative model class. :: version_class(Article) # ArticleVersion class :param model: SQLAlchemy declarative model class .. seealso:: :func:`parent_class` """ manager = get_versioning_manager(model) try: return manager.version_class_map[model] except KeyError: return model
(model)
56,593
libsast.core_matcher.choice_matcher
ChoiceMatcher
null
class ChoiceMatcher: def __init__(self, options: dict) -> None: self.scan_rules = get_rules(options.get('choice_rules')) self.show_progress = options.get('show_progress') self.alternative_path = options.get('alternative_path') exts = options.get('choice_extensions') if exts: self.exts = [ext.lower() for ext in exts] else: self.exts = [] self.findings = {} def scan(self, paths: list) -> dict: """Scan file(s) or directory per rule.""" if not (self.scan_rules and paths): return self.validate_rules() choice_args = [] if self.show_progress: pbar = common.ProgressBar('Choice Match', len(self.scan_rules)) self.scan_rules = pbar.progrees_loop(self.scan_rules) for rule in self.scan_rules: scan_paths = paths if rule['type'] != 'code' and self.alternative_path: # Scan only alternative path scan_paths = [Path(self.alternative_path)] choice_args.append((scan_paths, rule)) with ProcessPoolExecutor(max_workers=common.get_worker_count()) as exe: results = exe.map( self.choice_matcher, choice_args, chunksize=1) self.add_finding(results) return self.findings def validate_rules(self): """Validate Rules before scanning.""" for rule in self.scan_rules: if not isinstance(rule, dict): raise exceptions.InvalidRuleFormatError( 'Choice Matcher Rule format is invalid.') if not rule.get('id'): raise exceptions.TypeKeyMissingError( 'The rule is missing the key \'id\'') if not rule.get('type'): raise exceptions.PatternKeyMissingError( 'The rule is missing the key \'type\'') if not rule.get('choice_type'): raise exceptions.PatternKeyMissingError( 'The rule is missing the key \'choice_type\'') if not rule.get('selection'): raise exceptions.PatternKeyMissingError( 'The rule is missing the key \'selection\'') if not rule.get('choice'): raise exceptions.PatternKeyMissingError( 'The rule is missing the key \'choice\'') def choice_matcher(self, args): """Run a Single Choice Matcher rule on all files.""" results = [] scan_paths, rule = args try: matches = set() all_matches = set() for sfile in scan_paths: ext = sfile.suffix.lower() if self.exts and ext not in self.exts: continue if sfile.stat().st_size / 1000 / 1000 > 5: # Skip scanning files greater than 5 MB continue data = sfile.read_text('utf-8', 'ignore') if ext in ('.html', '.xml'): data = strip_comments2(data) else: data = strip_comments(data) match = choices.find_choices(data, rule) if match: if isinstance(match, set): # all all_matches.update(match) elif isinstance(match, list): # or, and matches.add(match[0]) results.append({ 'rule': rule, 'matches': matches, 'all_matches': all_matches, }) except Exception: raise exceptions.RuleProcessingError('Rule processing error.') return results def add_finding(self, results): """Add Choice Findings.""" for res_list in results: if not res_list: continue for match_dict in res_list: all_matches = match_dict['all_matches'] matches = match_dict['matches'] rule = match_dict['rule'] if all_matches: selection = rule['selection'].format(list(all_matches)) elif matches: select = rule['choice'][min(matches)][1] selection = rule['selection'].format(select) elif rule.get('else'): selection = rule['selection'].format(rule['else']) else: continue self.findings[rule['id']] = self.get_meta(rule, selection) def get_meta(self, rule, selection): """Get Finding Meta.""" meta_dict = {} meta_dict['choice'] = selection meta_dict['description'] = rule['message'] for key in rule: if key in ('choice', 'message', 'id', 'type', 'choice_type', 'selection', 'else'): continue meta_dict[key] = rule[key] return meta_dict
(options: dict) -> None