code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from importlib import reload as reload_module
def reload_module_if_blok_is_reloading(module):
from anyblok.environment import EnvironmentManager
if EnvironmentManager.get("reload", default=False):
reload_module(module)
class ImportManagerException(AttributeError):
"""Exception for Import Manager"""
class ImportManager:
"""Used to import bloks or reload the blok imports
To add a blok and import its modules::
blok = ImportManager.add('my blok')
blok.imports()
To reload the modules of a blok::
if ImportManager.has('my blok'):
blok = ImportManager.get('my blok')
blok.reload()
"""
modules = {}
@classmethod
def add(cls, blok):
"""Store the blok so that we know which bloks to reload if needed
:param blok: name of the blok to add
:rtype: loader instance
:exception: ImportManagerException
"""
from anyblok.blok import BlokManager
if cls.has(blok):
return cls.get(blok)
if not BlokManager.has(blok):
raise ImportManagerException("Unexisting blok") # pragma: no cover
loader = Loader(blok)
cls.modules[blok] = loader
return loader
@classmethod
def get(cls, blok):
"""Return the module imported for this blok
:param blok: name of the blok to add
:rtype: loader instance
:exception: ImportManagerException
"""
if not cls.has(blok):
raise ImportManagerException("Unexisting blok %r" % blok)
return cls.modules[blok]
@classmethod
def has(cls, blok):
"""Return True if the blok was imported
:param blok: name of the blok to add
:rtype: boolean
"""
return blok in cls.modules
class Loader:
def __init__(self, blok):
self.blok = blok
def imports(self):
"""Imports modules and / or packages listed in the blok path"""
from anyblok.blok import BlokManager
from anyblok.registry import RegistryManager
RegistryManager.init_blok(self.blok)
b = BlokManager.get(self.blok)
b.import_declaration_module()
def reload(self):
"""Reload all the imports for this module
:exception: ImportManagerException
"""
from anyblok.blok import BlokManager
from anyblok.environment import EnvironmentManager
from anyblok.registry import RegistryManager
b = BlokManager.get(self.blok)
if not hasattr(b, "reload_declaration_module"):
return
try:
EnvironmentManager.set("reload", True)
RegistryManager.init_blok(self.blok)
b.reload_declaration_module(reload_module)
finally:
EnvironmentManager.set("reload", False)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/imp.py
|
imp.py
|
import sys
import warnings
from argparse import RawDescriptionHelpFormatter
from logging import getLogger
from os import walk
from os.path import join
from textwrap import dedent
from sqlalchemy_utils.functions import create_database
import anyblok
from anyblok import (
configuration_post_load,
load_init_function_from_entry_points,
)
from anyblok._graphviz import ModelSchema, SQLSchema
from anyblok.blok import BlokManager
from anyblok.common import return_list
from anyblok.config import Configuration, get_db_name, get_url
from anyblok.registry import RegistryManager
from anyblok.release import version
logger = getLogger(__name__)
Configuration.add_application_properties(
"createdb",
[
"unittest",
"logging",
"create_db",
"install-bloks",
"install-or-update-bloks",
],
prog="AnyBlok create database, version %r" % version,
description="Create a database and install bloks to populate it",
)
Configuration.add_application_properties(
"updatedb",
[
"unittest",
"logging",
"install-bloks",
"uninstall-bloks",
"update-bloks",
"install-or-update-bloks",
],
prog="AnyBlok update database, version %r" % version,
description="Update a database: install, upgrade or uninstall the bloks ",
)
Configuration.add_application_properties(
"interpreter",
["logging", "interpreter"],
prog="AnyBlok interpreter, version %r" % version,
description="Run an interpreter on the registry",
formatter_class=RawDescriptionHelpFormatter,
epilog=dedent(
"Example\n"
"-------\n"
" $ anyblok_interpreter [anyblok arguments] \n"
" $ => anyblok_registry \n"
" ... <registry> \n\n"
" The interpreter gives you a python console with the "
"registry of the selected database \n\n"
"Note\n"
"----\n"
" If 'ipython' is installed, then the interpreter will be "
"an interactive ipython one."
),
)
Configuration.add_application_properties(
"autodoc",
["logging", "doc", "schema"],
prog="AnyBlok auto documentation, version %r" % version,
)
Configuration.add_application_properties(
"nose",
["logging", "unittest"],
prog="AnyBlok nose, version %r" % version,
description="Run functional nosetest against installed bloks.",
)
def anyblok_createdb():
"""Create a database and install blok from config"""
load_init_function_from_entry_points()
Configuration.load("createdb")
configuration_post_load()
BlokManager.load()
db_name = get_db_name()
db_template_name = Configuration.get("db_template_name", None)
url = get_url(db_name=db_name)
create_database(url, template=db_template_name)
anyblok_registry = RegistryManager.get(db_name)
if anyblok_registry is None:
return
anyblok_registry.System.Parameter.set(
"with-demo", Configuration.get("with_demo", False)
)
if Configuration.get("install_all_bloks"):
bloks = anyblok_registry.System.Blok.list_by_state("uninstalled")
else:
install_bloks = Configuration.get("install_bloks") or []
iou_bloks = Configuration.get("install_or_update_bloks") or []
bloks = list(set(install_bloks + iou_bloks))
anyblok_registry.upgrade(install=bloks)
anyblok_registry.commit()
anyblok_registry.close()
def anyblok_updatedb():
"""Update an existing database"""
anyblok_registry = anyblok.start("updatedb", loadwithoutmigration=True)
installed_bloks = anyblok_registry.System.Blok.list_by_state("installed")
toupdate_bloks = anyblok_registry.System.Blok.list_by_state("toupdate")
required_install_bloks = []
required_update_bloks = []
for blok in Configuration.get("install_or_update_bloks") or []:
if blok in installed_bloks:
required_update_bloks.append(blok)
elif blok not in toupdate_bloks:
required_install_bloks.append(blok)
if Configuration.get("install_all_bloks"):
install_bloks = anyblok_registry.System.Blok.list_by_state(
"uninstalled"
)
else:
install_bloks = Configuration.get("install_bloks") or []
install_bloks = list(set(install_bloks + required_install_bloks))
if Configuration.get("update_all_bloks"):
update_bloks = anyblok_registry.System.Blok.list_by_state("installed")
else:
update_bloks = Configuration.get("update_bloks") or []
update_bloks = list(set(update_bloks + required_update_bloks))
uninstall_bloks = Configuration.get("uninstall_bloks")
if anyblok_registry:
anyblok_registry.update_blok_list() # case, new blok added
anyblok_registry.upgrade(
install=install_bloks,
update=update_bloks,
uninstall=uninstall_bloks,
)
anyblok_registry.commit()
anyblok_registry.close()
class RegistryWrapper:
def __init__(self, anyblok_registry):
self.anyblok_registry = anyblok_registry
def __getattr__(self, key, **kwargs):
logger.warning("registry in local is déprécated, use anyblok_registry")
return getattr(self.anyblok_registry, key, **kwargs)
def anyblok_interpreter():
"""Execute a script or open an interpreter"""
anyblok_registry = anyblok.start("interpreter")
if anyblok_registry:
anyblok_registry.commit()
registry = RegistryWrapper(anyblok_registry)
python_script = Configuration.get("python_script")
if python_script:
with open(python_script, "r") as fh:
exec(fh.read(), None, locals())
else:
try:
from IPython import embed
embed()
except ImportError:
import code
code.interact(local=locals())
def anyblok2doc():
"""Return auto documentation for the registry"""
anyblok_registry = anyblok.start("autodoc")
if anyblok_registry:
anyblok_registry.commit()
doc = anyblok_registry.Documentation()
doc.auto_doc()
if Configuration.get("doc_format") == "RST":
with open(Configuration.get("doc_output"), "w") as fp:
doc.toRST(fp)
elif Configuration.get("doc_format") == "UML":
format_ = Configuration.get("schema_format")
name_ = Configuration.get("schema_output")
dot = ModelSchema(name_, format=format_)
doc.toUML(dot)
dot.save()
elif Configuration.get("doc_format") == "SQL":
format_ = Configuration.get("schema_format")
name_ = Configuration.get("schema_output")
dot = SQLSchema(name_, format=format_)
doc.toSQL(dot)
dot.save()
def anyblok_nose():
"""Run nose unit test after giving it the registry"""
warnings.simplefilter("default")
warnings.warn(
"This script is deprecated and will be removed soon. "
"The Nose test machinery has been removed from the framework in order "
"to be replaced with Pytest. "
"If you need to run your tests with nose, install the Nose package.",
DeprecationWarning,
stacklevel=2,
)
try:
from nose import main
except ImportError:
logger.error('"Nosetest" is not installed, try: pip install nose')
anyblok_registry = anyblok.start("nose", useseparator=True, unittest=True)
if anyblok_registry:
installed_bloks = anyblok_registry.System.Blok.list_by_state(
"installed"
)
selected_bloks = (
return_list(Configuration.get("selected_bloks")) or installed_bloks
)
unwanted_bloks = return_list(Configuration.get("unwanted_bloks")) or []
unwanted_bloks.extend(["anyblok-core", "anyblok-test", "model_authz"])
defaultTest = []
for blok in installed_bloks:
if blok not in selected_bloks or blok in unwanted_bloks:
continue
startpath = BlokManager.getPath(blok)
for root, dirs, _ in walk(startpath):
if "tests" in dirs:
defaultTest.append(join(root, "tests"))
anyblok_registry.close() # free the registry to force create it again
sys.exit(main(defaultTest=defaultTest))
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/scripts.py
|
scripts.py
|
from .common import add_autodocs
from .mapper import MapperAdapter
class DeclarationsException(AttributeError):
"""Simple Exception for Declarations"""
class Declarations:
"""Represents all the declarations done by the bloks
.. warning::
This is a global information, during the execution you must use the
registry. The registry is the real assembler of the python classes
based on the installed bloks
::
from anyblok import Declarations
"""
declaration_types = {}
@classmethod
def register(cls, parent, cls_=None, **kwargs):
"""Method to add the blok in the registry under a type of declaration
:param parent: An existing blok class in the Declaration
:param ``cls_``: The ``class`` object to add in the Declaration
:rtype: ``cls_``
:exception: DeclarationsException
"""
def wrapper(self):
name = kwargs.get("name_", self.__name__)
if parent.__declaration_type__ not in cls.declaration_types:
raise DeclarationsException(
"No parents %r for %s" % (parent, name)
) # pragma: no cover
declaration = cls.declaration_types[parent.__declaration_type__]
declaration.register(parent, name, self, **kwargs)
node = getattr(parent, name)
setattr(node, "__declaration_type__", parent.__declaration_type__)
setattr(
node, "__registry_name__", parent.__registry_name__ + "." + name
)
# Only for auto doc with autoanyblok-declaration directive
setattr(self, "__declaration__", declaration)
setattr(
self, "__registry_name__", parent.__registry_name__ + "." + name
)
return self
if cls_:
return wrapper(cls_)
else:
return wrapper
@classmethod
def unregister(cls, entry, cls_):
"""Method to remove the blok from a type of declaration
:param entry: declaration entry of the model where the ``cls_``
must be removed
:param ``cls_``: The ``class`` object to remove from the
Declaration
:rtype: ``cls_``
"""
declaration = cls.declaration_types[entry.__declaration_type__]
declaration.unregister(entry, cls_)
return cls_
@classmethod
def add_declaration_type(
cls,
cls_=None,
isAnEntry=False,
pre_assemble=None,
assemble=None,
initialize=None,
unload=None,
):
"""Add a declaration type
:param cls_: The ``class`` object to add as a world of the MetaData
:param isAnEntry: if true the type will be assembled by the registry
:param pre_assemble: name of the method callback to call (classmethod)
:param assemble: name of the method callback to call (classmethod)
:param initialize: name of the method callback to call (classmethod)
:param unload: name of the method callback to call (classmethod)
:exception: DeclarationsException
"""
def wrapper(self):
from anyblok.registry import RegistryManager
name = self.__name__
if name in cls.declaration_types:
raise DeclarationsException(
"The declaration type %r is already defined" % name
)
cls.declaration_types[name] = self
setattr(self, "__registry_name__", name)
setattr(self, "__declaration_type__", name)
setattr(cls, name, self)
if isAnEntry:
pre_assemble_callback = assemble_callback = None
initialize_callback = None
if pre_assemble and hasattr(self, pre_assemble):
pre_assemble_callback = getattr(self, pre_assemble)
if assemble and hasattr(self, assemble):
assemble_callback = getattr(self, assemble)
if initialize and hasattr(self, initialize):
initialize_callback = getattr(self, initialize)
RegistryManager.declare_entry(
name,
pre_assemble_callback=pre_assemble_callback,
assemble_callback=assemble_callback,
initialize_callback=initialize_callback,
)
# All declaration type can need to be unload declarated values
if unload and hasattr(self, unload):
RegistryManager.declare_unload_callback(
name, getattr(self, unload)
) # pragma: no cover
return self
if cls_:
return wrapper(cls_)
else:
return wrapper
def cache(size=128):
autodoc = """
**Cached method** with size=%(size)s
""" % dict(
size=size
)
def wrapper(method):
add_autodocs(method, autodoc)
method.is_cache_method = True
method.is_cache_classmethod = False
method.size = size
return method
return wrapper
def classmethod_cache(size=128):
autodoc = """
**Cached classmethod** with size=%(size)s
""" % dict(
size=size
)
def wrapper(method):
add_autodocs(method, autodoc)
method.is_cache_method = True
method.is_cache_classmethod = True
method.size = size
return method
return wrapper
def hybrid_method(method=None):
autodoc = """
**Hybrid method**
"""
if method:
add_autodocs(method, autodoc)
method.is_an_hybrid_method = True
return method
else:
def wrapper(method):
add_autodocs(method, autodoc)
method.is_an_hybrid_method = True
return method
return wrapper
def listen(*args, **kwargs):
autodoc = """
**listen** event call with the arguments %(args)r and the positionnal
argument %(kwargs)r
""" % dict(
args=args, kwargs=kwargs
)
mapper = MapperAdapter(*args, **kwargs)
def wrapper(method):
add_autodocs(method, autodoc)
mapper.listen(method)
return classmethod(method)
return wrapper
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/declarations.py
|
declarations.py
|
from contextlib import contextmanager
from logging import getLogger
from alembic.autogenerate import compare_metadata
from alembic.migration import MigrationContext
from alembic.operations import Operations
from pkg_resources import iter_entry_points
from sqlalchemy import and_, func, inspect, select, text, update
from sqlalchemy.exc import IntegrityError, OperationalError
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.schema import (
CheckConstraint,
DDLElement,
PrimaryKeyConstraint,
UniqueConstraint,
)
from sqlalchemy.sql.ddl import CreateSchema, DropSchema
from anyblok.config import Configuration
from .common import return_list, sgdb_in
logger = getLogger(__name__)
MIGRATION_TYPE_PLUGINS_NAMESPACE = "anyblok.migration_type.plugins"
class AlterSchema(DDLElement):
def __init__(self, oldname, newname):
self.oldname = oldname
self.newname = newname
@compiles(CreateSchema, "mysql")
def compile_create_schema(element, compiler, **kw):
schema = compiler.preparer.format_schema(element.element)
return "CREATE SCHEMA %s CHARSET UTF8" % schema
@compiles(AlterSchema)
def compile_alter_schema(element, compiler, **kw):
old_schema_name = compiler.preparer.format_schema(element.oldname)
new_schema_name = compiler.preparer.format_schema(element.newname)
return "ALTER SCHEMA %s RENAME TO %s" % (old_schema_name, new_schema_name)
@contextmanager
def cnx(migration):
"""Context manager used by migration to get the connection"""
try:
yield migration.conn
except MigrationException:
raise
except Exception: # pragma: no cover
migration.conn.execute(text("rollback"))
raise
class MigrationException(AttributeError):
"""Simple Exception class for Migration"""
class MigrationReport:
"""Change report
Get a new report::
report = MigrationReport(migrationinstance, change_detected)
"""
def ignore_migration_for(self, schema, table, default=None):
if schema in self.ignore_migration_for_schema_from_configuration:
return True
if table in self.ignore_migration_for_table_from_configuration:
return True
return self.migration.ignore_migration_for.get(table, default)
def raise_if_withoutautomigration(self):
if self.migration.withoutautomigration:
raise MigrationException(
"The metadata and the base structue are "
"different, or this difference is "
"forbidden in 'no auto migration' mode"
)
def table_is_added(self, table):
for action in self.actions:
if action[0] == "add_table" and action[1] is table:
return True # pragma: no cover
return False
def init_add_schema(self, diff):
self.raise_if_withoutautomigration()
_, schema = diff
self.log_names.append("Add schema %s" % schema)
def init_add_table(self, diff):
self.raise_if_withoutautomigration()
_, table = diff
table_name = (
"%s.%s" % (table.schema, table.name) if table.schema else table.name
)
self.log_names.append("Add table %s" % table_name)
def init_add_column(self, diff):
self.raise_if_withoutautomigration()
_, schema, table, column = diff
if self.ignore_migration_for(schema, table) is True:
return True
self.log_names.append("Add %s.%s" % (table, column.name))
def can_remove_constraints(self, name):
if name.startswith("anyblok_uq_"):
return True
if self.migration.reinit_constraints:
return True
if self.migration.reinit_all:
return True
return False
def can_remove_fk_constraints(self, name):
if name.startswith("anyblok_fk_"):
return True
if self.migration.reinit_constraints:
return True
if self.migration.reinit_all:
return True
return False
def can_remove_check_constraints(self, name):
if name.startswith("anyblok_ck_"):
return True
if self.migration.reinit_constraints:
return True
if self.migration.reinit_all:
return True
return False
def init_remove_constraint(self, diff):
_, constraint = diff
if (
self.ignore_migration_for(
constraint.table.schema, constraint.table.name
)
is True
):
return True
self.log_names.append(
"Drop constraint %s on %s" % (constraint.name, constraint.table)
)
if self.can_remove_constraints(constraint.name):
self.raise_if_withoutautomigration()
else:
return True
def can_remove_index(self, name):
if name.startswith("anyblok_ix_"):
return True
if self.migration.reinit_indexes:
return True
if self.migration.reinit_all:
return True
return False
def init_add_index(self, diff):
self.raise_if_withoutautomigration()
_, constraint = diff
if (
self.ignore_migration_for(
constraint.table.schema, constraint.table.name
)
is True
):
return True # pragma: no cover
columns = [x.name for x in constraint.columns]
if self.table_is_added(constraint.table):
return True # pragma: no cover
self.log_names.append(
"Add index constraint on %s (%s)"
% (constraint.table.name, ", ".join(columns))
)
def init_remove_index(self, diff):
_, index = diff
if sgdb_in(self.migration.conn.engine, ["MySQL", "MariaDB"]):
if index.table.schema in (
"mysql",
"performance_schema",
"percona",
):
return True
if (
self.ignore_migration_for(index.table.schema, index.table.name)
is True
):
return True
self.log_names.append("Drop index %s on %s" % (index.name, index.table))
if self.can_remove_index(index.name):
self.raise_if_withoutautomigration()
else:
return True
def init_add_fk(self, diff):
self.raise_if_withoutautomigration()
_, fk = diff
if self.ignore_migration_for(fk.table.schema, fk.table.name) is True:
return True
from_ = []
to_ = []
for column in fk.columns:
if column.name in self.ignore_migration_for(
fk.table.schema, fk.table.name, []
):
return True
for fk_ in column.foreign_keys:
from_.append("%s.%s" % (fk.table.name, column.name))
to_.append(fk_.target_fullname)
self.log_names.append(
"Add Foreign keys on (%s) => (%s)"
% (", ".join(from_), ", ".join(to_))
)
def init_remove_fk(self, diff):
_, fk = diff
if self.ignore_migration_for(fk.table.schema, fk.table.name) is True:
return True
for column in fk.columns:
if column.name in self.ignore_migration_for(
fk.table.schema, fk.table.name, []
):
return True
for fk_ in column.foreign_keys:
self.log_names.append(
"Drop Foreign keys on %s.%s => %s"
% (fk.table.name, column.name, fk_.target_fullname)
)
if not self.can_remove_fk_constraints(fk.name):
return True
self.raise_if_withoutautomigration()
def init_add_ck(self, diff):
self.raise_if_withoutautomigration()
_, table, ck = diff
if self.ignore_migration_for(ck.table.schema, table) is True:
return True
if ck.table.schema:
table = ck.table.schema + "." + table
self.log_names.append(
"Add check constraint %s on %s" % (ck.name, table)
)
def init_remove_ck(self, diff):
_, table, ck = diff
if self.ignore_migration_for(ck["schema"], table) is True:
return True
if ck["schema"]:
table = ck["schema"] + "." + table
self.log_names.append(
"Drop check constraint %s on %s" % (ck["name"], table)
)
if not self.can_remove_check_constraints(ck["name"]):
return True
self.raise_if_withoutautomigration()
def init_add_constraint(self, diff):
self.raise_if_withoutautomigration()
_, constraint = diff
columns = []
if (
self.ignore_migration_for(
constraint.table.schema, constraint.table.name
)
is True
):
return True
for column in constraint.columns:
columns.append(column.name)
if column.name in self.ignore_migration_for(
constraint.table.schema, constraint.table.name, []
):
return True
self.log_names.append(
"Add unique constraint on %s (%s)"
% (constraint.table.name, ", ".join(columns))
)
def can_remove_column(self):
if self.migration.reinit_columns:
return True
if self.migration.reinit_all:
return True
return False
def init_remove_column(self, diff):
column = diff[3]
if (
self.ignore_migration_for(column.table.schema, column.table.name)
is True
):
return True
msg = "Drop Column %s.%s" % (column.table.name, column.name)
if self.can_remove_column():
self.log_names.append(msg)
self.raise_if_withoutautomigration()
return False
fk_removed = []
for fk in column.foreign_keys:
if not self.can_remove_fk_constraints(fk.name):
# only if fk is not removable. FK can come from
# * DBA manager, it is the only raison to destroy it
# * alembic, some constrainte change name during the remove
if fk.name not in fk_removed: # pragma: no cover
self.actions.append(("remove_fk", fk.constraint))
fk_removed.append(fk.name)
if column.nullable is False:
self.raise_if_withoutautomigration()
msg += " (not null)"
self.log_names.append(msg)
self.actions.append(
(
"modify_nullable",
column.table.schema,
column.table.name,
column.name,
{},
False,
True,
)
)
return True
self.log_names.append(msg)
return True
def can_remove_table(self, schema):
schemas = self.migration.metadata._schemas
if schema and schema not in schemas:
return False
if self.migration.reinit_tables:
return True
if self.migration.reinit_all:
return True
return False
def init_change_pk(self, diff):
name, table, constraint = diff
raise MigrationException(
(
"Change primary key constraint %s on %s: (%s). "
"AnyBlok can't determine the good action to do "
"for relation ship based on primary key who changed, "
"You must make the migration by your self before."
)
% (name, table, ", ".join([x.name for x in constraint.columns]))
)
def init_remove_table(self, diff):
table = diff[1]
if sgdb_in(self.migration.conn.engine, ["MySQL", "MariaDB"]):
if table.schema in (
"mysql",
"performance_schema",
"percona",
):
return True
table_name = (
"%s.%s" % (table.schema, table.name) if table.schema else table.name
)
self.log_names.append("Drop Table %s" % table_name)
if self.can_remove_table(diff[1].schema):
self.raise_if_withoutautomigration()
else:
return True
def init_modify_type(self, diff):
if self.ignore_migration_for(diff[1], diff[2]) is True:
return True
if diff[3] in self.ignore_migration_for(diff[1], diff[2], []):
return True
selected_plugin = self.get_plugin_for(diff[5], diff[6])
if selected_plugin is not None:
if not selected_plugin.need_to_modify_type():
return True
table = "%s.%s" % diff[1:3] if diff[1] else diff[2]
self.log_names.append(
"Modify column type %s.%s : %s => %s"
% (table, diff[3], diff[5], diff[6])
)
return False
def init_modify_nullable(self, diff):
if self.ignore_migration_for(diff[1], diff[2]) is True:
return True
if diff[3] in self.ignore_migration_for(diff[1], diff[2], []):
return True
table = "%s.%s" % diff[1:3] if diff[1] else diff[2]
self.log_names.append(
"Modify column nullable %s.%s : %s => %s"
% (table, diff[3], diff[5], diff[6])
)
return False
def init_modify_server_default(self, diff):
if self.ignore_migration_for(diff[1], diff[2]) is True:
return True
if diff[3] in self.ignore_migration_for(diff[1], diff[2], []):
return True
table = "%s.%s" % diff[1:3] if diff[1] else diff[2]
self.log_names.append(
"Modify column default %s.%s : %s => %s"
% (table, diff[3], diff[5], diff[6])
)
return False
def init_plugins(self):
"""Get migration plugins from entry points"""
def dialect_sort(plugin):
"""Sort plugins with dialect not None first"""
return (plugin.dialect is None, plugin.dialect)
plugins = sorted(
(
entry_point.load()
for entry_point in iter_entry_points(
MIGRATION_TYPE_PLUGINS_NAMESPACE
)
),
key=dialect_sort,
)
return plugins
def get_plugin_for(self, oldvalue, newvalue):
"""search plugin by column types"""
for plugin in self.plugins:
if isinstance(plugin.dialect, (tuple, list)):
dialects = plugin.dialect
else:
dialects = [plugin.dialect]
if (
issubclass(plugin, MigrationColumnTypePlugin)
and isinstance(oldvalue, plugin.from_type)
and isinstance(newvalue, plugin.to_type)
and (
plugin.dialect is None
or sgdb_in(self.migration.conn.engine, dialects)
)
):
return plugin()
return None
def __init__(self, migration, diffs):
"""Initializer
:param migration: migration instance
:param diffs: diff between the metadata and the database, come from
change detection of alembic
"""
self.migration = migration
self.logs = []
self.actions = []
self.diffs = diffs
self.log_names = []
self.plugins = self.init_plugins()
self.ignore_migration_for_table_from_configuration = [
self.migration.loaded_namespaces[x].__tablename__
for x in return_list(
Configuration.get("ignore_migration_for_models")
)
if (
x in self.migration.loaded_namespaces
and self.migration.loaded_namespaces[x].is_sql
)
]
self.ignore_migration_for_schema_from_configuration = return_list(
Configuration.get("ignore_migration_for_schemas")
)
mappers = {
"add_schema": self.init_add_schema,
"add_table": self.init_add_table,
"add_column": self.init_add_column,
"remove_constraint": self.init_remove_constraint,
"add_index": self.init_add_index,
"remove_index": self.init_remove_index,
"add_fk": self.init_add_fk,
"remove_fk": self.init_remove_fk,
"add_ck": self.init_add_ck,
"remove_ck": self.init_remove_ck,
"add_constraint": self.init_add_constraint,
"remove_column": self.init_remove_column,
"remove_table": self.init_remove_table,
"change_pk": self.init_change_pk,
"modify_type": self.init_modify_type,
"modify_nullable": self.init_modify_nullable,
"modify_default": self.init_modify_server_default,
}
for diff in diffs:
if isinstance(diff, list):
self.raise_if_withoutautomigration()
for change in diff:
_, _, table, column, _, _, _ = change
fnct = mappers.get(change[0])
if fnct:
if fnct(change):
continue
else:
logger.warning("Unknow diff: %r", change)
self.log_names.append("Alter %s.%s" % (table, column))
self.actions.append(change)
else:
fnct = mappers.get(diff[0])
if fnct:
if fnct(diff):
continue
else:
logger.warning("Unknow diff: %r", diff)
self.actions.append(diff)
for log_name in self.log_names:
if log_name and not self.log_has(log_name):
self.logs.append(log_name)
def log_has(self, log):
"""return True id the log is present
.. warning:: this method is only used for the unittest
:param log: log sentence expected
"""
return log in self.logs
def apply_change_add_schema(self, action):
_, schema = action
self.migration.schema().add(schema)
def apply_change_add_table(self, action):
_, table = action
if table.schema:
t = self.migration.schema(table.schema).table()
else:
t = self.migration.table()
t.add(table.name, table=table)
def get_migration_table(self, table):
if table.schema:
return self.migration.schema(table.schema).table(table.name)
else:
return self.migration.table(table.name)
def apply_change_add_column(self, action):
_, _, table, column = action
t = self.get_migration_table(column.table)
t.column().add(column)
def apply_change_modify_nullable(self, action):
_, schema, table, column, kwargs, oldvalue, newvalue = action
if schema:
t = self.migration.schema(schema).table(table)
else:
t = self.migration.table(table)
t.column(column).alter(
nullable=newvalue, existing_nullable=oldvalue, **kwargs
)
def apply_change_modify_type(self, action):
_, schema, table, column, kwargs, oldvalue, newvalue = action
if schema:
t = self.migration.schema(schema).table(table)
else:
t = self.migration.table(table)
selected_plugin = self.get_plugin_for(oldvalue, newvalue)
if selected_plugin is not None:
selected_plugin.apply(t.column(column), **kwargs)
else:
t.column(column).alter(
type_=newvalue, existing_type=oldvalue, **kwargs
)
def apply_change_modify_default(self, action):
_, schema, table, column, kwargs, oldvalue, newvalue = action
if schema:
t = self.migration.schema(schema).table(table) # pragma: no cover
else:
t = self.migration.table(table)
t.column(column).alter(
server_default=newvalue, existing_server_default=oldvalue, **kwargs
)
def apply_change_remove_constraint(self, action):
_, constraint = action
if constraint.__class__ is UniqueConstraint:
table = self.get_migration_table(constraint.table)
table.unique(name=constraint.name).drop()
def apply_change_remove_index(self, action):
_, index = action
if not index.unique:
table = self.get_migration_table(index.table)
table.index(name=index.name).drop()
def apply_change_add_fk(self, action):
_, fk = action
t = self.get_migration_table(fk.table)
from_ = []
to_ = []
for column in fk.columns:
for fk_ in column.foreign_keys:
from_.append(column.name)
to_.append(fk_.column)
t.foreign_key(fk.name).add(from_, to_)
def apply_change_add_ck(self, action):
_, table, ck = action
t = self.get_migration_table(ck.table)
t.check(ck.name).add(ck.sqltext)
def apply_change_remove_fk(self, action):
_, fk = action
t = self.get_migration_table(fk.table)
t.foreign_key(fk.name).drop()
def apply_change_remove_ck(self, action):
_, table, ck = action
if ck["schema"]:
t = self.migration.schema(ck["schema"]).table(table)
else:
t = self.migration.table(table)
t.foreign_key(ck["name"]).drop()
def apply_change_add_constraint(self, action):
_, constraint = action
table = self.get_migration_table(constraint.table)
table.unique(name=constraint.name).add(*constraint.columns)
def apply_change_add_index(self, action):
_, constraint = action
table = self.get_migration_table(constraint.table)
table.index().add(*constraint.columns, name=constraint.name)
def apply_remove_table(self, action):
table = self.get_migration_table(action[1])
table.drop()
def apply_remove_column(self, action):
table = self.get_migration_table(action[3].table)
table.column(action[3].name).drop()
def apply_change(self):
"""Apply the migration
this method parses the detected change and calls the Migration
system to apply the change with the api of Declarations
"""
for log in self.logs:
logger.debug(log)
mappers = {
"add_schema": self.apply_change_add_schema,
"add_table": self.apply_change_add_table,
"add_column": self.apply_change_add_column,
"modify_nullable": self.apply_change_modify_nullable,
"modify_type": self.apply_change_modify_type,
"modify_default": self.apply_change_modify_default,
"add_index": self.apply_change_add_index,
"add_fk": self.apply_change_add_fk,
"add_ck": self.apply_change_add_ck,
"add_constraint": self.apply_change_add_constraint,
"remove_constraint": self.apply_change_remove_constraint,
"remove_index": self.apply_change_remove_index,
"remove_fk": self.apply_change_remove_fk,
"remove_ck": self.apply_change_remove_ck,
"remove_table": self.apply_remove_table,
"remove_column": self.apply_remove_column,
}
for action in self.actions:
fnct = mappers.get(action[0])
if fnct:
fnct(action)
class MigrationConstraintForeignKey:
"""Used to apply a migration on a foreign key
You can add::
table.column('my column').foreign_key().add(Blok.name)
Or drop::
table.column('my column').foreign_key().drop()
"""
def __init__(self, table, name):
self.table = table
self.name = name
def add(self, local_columns, remote_columns, **kwargs):
"""Add a new foreign key
:param remote_field: The column of the remote model
:rtype: MigrationConstraintForeignKey instance
"""
remote_columns = [
x.property.columns[0] if hasattr(x, "property") else x
for x in remote_columns
]
remote_table = set(x.table.name for x in remote_columns)
if len(remote_table) != 1:
raise MigrationException( # pragma: no cover
"Remote column must have the same table "
"(%s)" % ", ".join(remote_table)
)
remote_table = remote_table.pop()
remote_columns_names = [x.name for x in remote_columns]
self.table.migration.operation.create_foreign_key(
self.name,
self.table.name,
remote_table,
local_columns,
remote_columns_names,
source_schema=self.table.schema,
referent_schema=remote_columns[0].table.schema,
**kwargs,
)
return self
def drop(self):
"""Drop the foreign key"""
self.table.migration.operation.drop_constraint(
self.name,
self.table.name,
type_="foreignkey",
schema=self.table.schema,
)
return self
class MigrationColumnTypePlugin:
"""Meta class for column migration type plugin
Must be exposed as entry point in namespace 'anyblok.migration_type.plugins'
:param to_type: Column type value (sqlalchemy.types) as used in Model
classes in source code
:param from_type: Column type value (sqlalchemy.types) as required to
communicate with the DBMS
:param dialect: DB dialect (list of strings or string)
Example::
class BooleanToTinyIntMySQL(MigrationColumnTypePlugin):
to_type = sqlalchemy.types.Boolean
from_type = sqlalchemy.types.TINYINT
dialect = ['MySQL', 'MariaDB']
def need_to_modify_type(self):
return False
def apply(self, column, **kwargs):
'''Boolean are TINYINT in MySQL DataBases'''
# do nothing
pass
"""
to_type = None
from_type = None
dialect = None
def apply(self, column, **kwargs):
"""Apply column migration, this method MUST be overriden in plugins
subclass
"""
raise NotImplementedError() # pragma: no cover
def need_to_modify_type(self, column, **kwargs):
"""If False the type won't be modified"""
return True # pragma: no cover
class MigrationColumn:
"""get or add a column
Add a new column::
table.column().add(Sqlachemy column)
Get a column::
c = table.column('My column name')
Alter the column::
c.alter(new_column_name='Another column name')
Drop the column::
c.drop()
"""
def __init__(self, table, name):
self.table = table
self.name = name
self.info = {}
if name is not None:
op = self.table.migration.operation
with cnx(self.table.migration) as conn:
columns = op.impl.dialect.get_columns(
conn, self.table.name, schema=table.schema
)
for c in columns:
if c["name"] == name:
self.info.update(c)
if not self.info:
raise MigrationException(
"No column %r found on %r" % (name, self.table.name)
)
def apply_default_value(self, column):
if column.default:
execute = self.table.migration.conn.execute
val = column.default.arg
table = self.table.migration.metadata.tables[self.table.name]
table.append_column(column)
cname = getattr(table.c, column.name)
if column.default.is_callable:
columns = [col for col in table.columns if col.primary_key]
query_count = select(func.count()).select_from(table)
query_count = query_count.where(cname.is_(None))
nb_row = self.table.migration.conn.execute(
query_count
).fetchone()[0]
for offset in range(nb_row):
query = select(*columns)
query = query.where(cname.is_(None))
query = query.limit(1)
res = execute(query).fetchone()
where = []
for index, col in enumerate(columns):
where.append(col == res[index])
if len(where) == 1:
where = where[0]
else:
where = and_(*where)
query_update = update(table)
query_update = query_update.where(where)
query_update = query_update.values({cname: val(None)})
execute(query_update)
else:
query = (
update(table).where(cname.is_(None)).values({cname: val})
)
execute(query)
def add(self, column):
"""Add a new column
The column is added in two phases, the last phase is only for the
the nullable, if nullable can not be applied, a warning is logged
:param column: sqlalchemy column
:rtype: MigrationColumn instance
"""
migration = self.table.migration
nullable = column.nullable
if not nullable:
column.nullable = True
# check the table exist
table = (
"%s.%s" % (self.table.schema, self.table.name)
if self.table.schema
else self.table.name
)
table_ = migration.metadata.tables[table]
if sgdb_in(self.table.migration.conn.engine, ["MsSQL"]):
column.table = table_
migration.operation.impl.add_column(
self.table.name, column, schema=self.table.schema
)
self.apply_default_value(column)
if not nullable:
c = MigrationColumn(self.table, column.name)
c.alter(nullable=False)
return MigrationColumn(self.table, column.name)
def alter(self, **kwargs):
"""Alter an existing column
Alter the column in two phases, because the nullable column has not
locked the migration
.. warning::
See Alembic alter_column, the existing_* param are used for some
dialect like mysql, is importante to filled them for these dialect
:param new_column_name: New name for the column
:param type_: New sqlalchemy type
:param server_default: The default value in database server
:param nullable: New nullable value
:param comment: New comment value
:rtype: MigrationColumn instance
"""
vals = {}
name = self.name
if "existing_server_default" in kwargs:
esd = kwargs["existing_server_default"]
if esd:
vals["existing_server_default"] = esd.arg
else:
vals["existing_server_default"] = esd
else:
vals["existing_server_default"] = (
self.server_default if "server_default" not in kwargs else None
)
vals.update(
{
"existing_type": kwargs.get(
"existing_type",
self.type if "type_" not in kwargs else None,
),
"existing_autoincrement": (
None
if not sgdb_in(
self.table.migration.conn.engine, ["MySQL", "MariaDB"]
)
else kwargs.get(
"existing_autoincrement",
self.autoincrement
if "autoincrement" not in kwargs
else None,
)
),
"existing_comment": kwargs.get(
"existing_comment",
self.comment if "comment" not in kwargs else None,
),
}
)
vals.update(
{
k: kwargs[k]
for k in ("autoincrement", "server_default", "type_")
if k in kwargs
}
)
if "name" in kwargs:
vals["new_column_name"] = kwargs["name"]
name = kwargs["name"]
if vals:
self.table.migration.operation.alter_column(
self.table.name, self.name, schema=self.table.schema, **vals
)
if "nullable" in kwargs:
nullable = kwargs["nullable"]
vals["existing_nullable"] = (
self.nullable if "nullable" in kwargs else None
)
savepoint = "%s_not_null" % name
try:
self.table.migration.savepoint(savepoint)
self.table.migration.operation.alter_column(
self.table.name,
self.name,
nullable=nullable,
schema=self.table.schema,
**vals,
)
self.table.migration.release_savepoint(savepoint)
except (IntegrityError, OperationalError) as e:
self.table.migration.rollback_savepoint(savepoint)
logger.warning(str(e))
return MigrationColumn(self.table, name)
def drop(self):
"""Drop the column"""
self.table.migration.operation.drop_column(
self.table.name, self.name, schema=self.table.schema
)
@property
def nullable(self):
"""Use for unittest return if the column is nullable"""
return self.info.get("nullable", None)
@property
def type(self):
"""Use for unittest: return the column type"""
return self.info.get("type", None)
@property
def server_default(self):
"""Use for unittest: return the default database value"""
sdefault = self.info.get("default", None)
if sgdb_in(self.table.migration.conn.engine, ["MySQL", "MariaDB"]):
if sdefault:
if not isinstance(sdefault, str):
return sdefault.arg # pragma: no cover
elif sdefault is None:
return None # pragma: no cover
else:
return text(sdefault)
return sdefault
@property
def comment(self):
"""Use for unittest: return the default database value"""
return self.info.get("comment", None)
@property
def autoincrement(self):
"""Use for unittest: return the default database value"""
table_name = (
"%s.%s" % (self.table.schema, self.table.name)
if self.table.schema
else self.table.name
)
table = self.table.migration.metadata.tables[table_name]
primary_keys = [x.name for x in table.primary_key.columns]
if self.name in primary_keys:
return False # pragma: no cover
return self.info.get("autoincrement", None)
class MigrationConstraintCheck:
"""Used for the Check constraint
Add a new constraint::
table('My table name').check().add('check_my_column', 'mycolumn > 5')
Get and drop the constraint::
table('My table name').check('check_my_column').drop()
"""
def __init__(self, table, name):
self.table = table
self.name = name
# TODO dialect not have method to check if constraint exist
def add(self, condition):
"""Add the constraint
:param condition: constraint to apply
:rtype: MigrationConstraintCheck instance
"""
self.table.migration.operation.create_check_constraint(
self.name, self.table.name, condition, schema=self.table.schema
)
return self
def drop(self):
"""Drop the constraint"""
self.table.migration.operation.drop_constraint(
self.name, self.table.name, type_="check", schema=self.table.schema
)
class MigrationConstraintUnique:
"""Used for the Unique constraint
Add a new constraint::
table('My table name').unique('constraint name').add('col1', 'col2')
Get and drop the constraint::
table('My table name').unique('constraint name').drop()
Let AnyBlok to define the name of the constraint::
table('My table name').unique(None).add('col1', 'col2')
"""
def __init__(self, table, name):
self.table = table
self.name = name
def add(self, *columns):
"""Add the constraint
:param *columns: list of SQLalchemy column
:rtype: MigrationConstraintUnique instance
:exception: MigrationException
"""
if not columns:
raise MigrationException( # pragma: no cover
"""To add an unique constraint you """
"""must define one or more columns"""
)
columns_name = [x.name for x in columns]
savepoint = "uq_%s" % (self.name or "")
try:
self.table.migration.savepoint(savepoint)
self.table.migration.operation.create_unique_constraint(
self.name,
self.table.name,
columns_name,
schema=self.table.schema,
)
self.table.migration.release_savepoint(savepoint)
except (IntegrityError, OperationalError) as e:
self.table.migration.rollback_savepoint(savepoint)
logger.warning(
"Error during the add of new unique constraint %r "
"on table %r and columns %r : %r "
% (self.name, self.table.name, columns_name, str(e))
)
return self
def drop(self):
"""Drop the constraint"""
self.table.migration.operation.drop_constraint(
self.name, self.table.name, type_="unique", schema=self.table.schema
)
class MigrationConstraintPrimaryKey:
"""Used for the primary key constraint
Add a new constraint::
table('My table name').primarykey().add('col1', 'col2')
Get and drop the constraint::
table('My table name').primarykey('col1', 'col2').drop()
"""
def __init__(self, table):
self.table = table
self.name = self.format_name()
def format_name(self, *columns):
return "anyblok_pk_%s" % self.table.name
def add(self, *columns):
"""Add the constraint
:param *columns: list of SQLalchemy column
:rtype: MigrationConstraintPrimaryKey instance
:exception: MigrationException
"""
if not columns:
raise MigrationException( # pragma: no cover
"""To add a primary key constraint """
"""you must define one or more columns"""
)
if sgdb_in(self.table.migration.conn.engine, ["MsSQL"]):
for column in columns: # pragma: no cover
if column.nullable:
column.alter(nullable=False)
columns_name = [x.name for x in columns]
self.table.migration.operation.create_primary_key(
self.name, self.table.name, columns_name
)
return self
def drop(self):
"""Drop the constraint"""
self.table.migration.operation.drop_constraint(
self.name, self.table.name, type_="primary"
)
return self
class MigrationIndex:
"""Used for the index constraint
Add a new constraint::
table('My table name').index().add('col1', 'col2')
Get and drop the constraint::
table('My table name').index('col1', 'col2').drop()
"""
def __init__(self, table, *columns, **kwargs):
self.table = table
if "name" in kwargs:
self.name = kwargs["name"]
else:
self.name = self.format_name(*columns)
self.exist = False
if self.name is not None:
op = self.table.migration.operation
with cnx(self.table.migration) as conn:
indexes = op.impl.dialect.get_indexes(
conn, self.table.name, schema=self.table.schema
)
for i in indexes:
if i["name"] == self.name:
self.exist = True
if not self.exist:
raise MigrationException( # pragma: no cover
"No index %r found on %r" % (self.name, self.table.name)
)
def format_name(self, *columns):
if columns:
cols = [x.name for x in columns]
cols.sort()
cols = "_".join(cols)
return "idx_%s_on_%s" % (cols, self.table.name)
return None
def add(self, *columns, **kwargs):
"""Add the constraint
:param *columns: list of SQLalchemy column
:param **kwargs: other attribute fir l __init__
:rtype: MigrationIndex instance
:exception: MigrationException
"""
if not columns:
raise MigrationException( # pragma: no cover
"To add an index you must define one or more columns"
)
index_name = kwargs.get("name", self.format_name(*columns))
columns_name = [x.name for x in columns]
self.table.migration.operation.create_index(
index_name, self.table.name, columns_name, schema=self.table.schema
)
return MigrationIndex(self.table, *columns, **kwargs)
def drop(self):
"""Drop the constraint"""
self.table.migration.operation.drop_index(
self.name, table_name=self.table.name, schema=self.table.schema
)
class MigrationTable:
"""Use to manipulate tables
Add a table::
table().add('New table')
Get an existing table::
t = table('My table name')
Alter the table::
t.alter(name='Another table name')
Drop the table::
t.drop()
"""
def __init__(self, migration, name, schema=None):
self.name = name
self.migration = migration
self.schema = schema
if name is not None:
with cnx(self.migration) as conn:
has_table = migration.operation.impl.dialect.has_table
if not has_table(conn, name, schema=schema):
raise MigrationException("No table %r found" % name)
def add(self, name, table=None):
"""Add a new table
:param name: name of the table
:param table: an existing instance of the table to create
:rtype: MigrationTable instance
"""
if table is not None:
if table.schema != self.schema:
raise MigrationException( # pragma: no cover
"The schema of the table (%r.%r) and the MigrationTable %r"
"instance are not the same"
% (table.schema, table.name, self.schema)
)
self.migration.metadata.create_all(
bind=self.migration.conn, tables=[table]
)
else:
self.migration.operation.create_table(name, schema=self.schema)
return MigrationTable(self.migration, name, self.schema)
def column(self, name=None):
"""Get Column
:param name: Column name
:rtype: MigrationColumn instance
"""
return MigrationColumn(self, name)
def drop(self):
"""Drop the table"""
self.migration.operation.drop_table(self.name, schema=self.schema)
def index(self, *columns, **kwargs):
"""Get index
:param *columns: List of the column's name
:rtype: MigrationIndex instance
"""
return MigrationIndex(self, *columns, **kwargs)
def unique(self, name):
"""Get unique
:param name: str name of the unique constraint
:rtype: MigrationConstraintUnique instance
"""
return MigrationConstraintUnique(self, name)
def check(self, name=None):
"""Get check
:param name: str name of the check constraint
:rtype: MigrationConstraintCheck instance
"""
return MigrationConstraintCheck(self, name)
def primarykey(self):
"""Get primary key
:rtype: MigrationConstraintPrimaryKey instance
"""
return MigrationConstraintPrimaryKey(self)
def alter(self, **kwargs):
"""Atler the current table
:param name: New table name
:rtype: MigrationTable instance
:exception: MigrationException
"""
if "name" not in kwargs:
raise MigrationException(
"Table can only alter name"
) # pragma: no cover
name = kwargs["name"]
self.migration.operation.rename_table(
self.name, name, schema=self.schema
)
return MigrationTable(self.migration, name, schema=self.schema)
def foreign_key(self, name):
"""Get a foreign key
:rtype: MigrationConstraintForeignKey instance
"""
return MigrationConstraintForeignKey(self, name)
class MigrationSchema:
"""Use to manipulate tables
Add a Schema::
schema().add('New schema')
Get an existing schema::
s = schema('My table schema')
Alter the schema::
s.alter(name='Another schema name')
Drop the schema::
s.drop()
"""
def __init__(self, migration, name):
self.name = name
self.migration = migration
if name is not None:
if not self.has_schema():
raise MigrationException("No schema %r found" % self.name)
def has_schema(self):
with cnx(self.migration) as conn:
if sgdb_in(conn.engine, ["MySQL", "MariaDB", "MsSQL"]):
query = """
SELECT count(*)
FROM INFORMATION_SCHEMA.SCHEMATA
WHERE SCHEMA_name=:schema_name
"""
return conn.execute(
text(query).bindparams(schema_name=self.name)
).fetchone()[0]
else:
return self.migration.operation.impl.dialect.has_schema(
conn, self.name
)
def add(self, name):
"""Add a new schema
:param name: name of the schema
:rtype: MigrationSchema instance
"""
with cnx(self.migration) as conn:
conn.execute(CreateSchema(name))
return MigrationSchema(self.migration, name)
def table(self, name=None):
"""Get a table
:rtype: MigrationTable instance
"""
return MigrationTable(self.migration, name, schema=self.name)
def alter(self, name=None):
"""Atler the current table
:param name: New schema name
:rtype: MigrationSchema instance
:exception: MigrationException
"""
with cnx(self.migration) as conn:
conn.execute(AlterSchema(self.name, name))
return MigrationSchema(self.migration, name)
def drop(self, cascade=False):
"""Drop the schema"""
with cnx(self.migration) as conn:
conn.execute(DropSchema(self.name, cascade=cascade))
class Migration:
"""Migration Main entry
This class allows to manipulate all the migration class::
migration = Migration(Session(), Base.Metadata)
t = migration.table('My table name')
c = t.column('My column name from t')
"""
def __init__(self, registry):
self.withoutautomigration = registry.withoutautomigration
self.conn = registry.connection()
self.loaded_namespaces = registry.loaded_namespaces
self.loaded_views = registry.loaded_views
self.metadata = registry.declarativebase.metadata
self.ddl_compiler = self.conn.dialect.ddl_compiler(
self.conn.dialect, None
)
self.ignore_migration_for = registry.ignore_migration_for
opts = {
"include_schemas": True,
"compare_server_default": True,
"render_item": self.render_item,
"compare_type": self.compare_type,
}
self.context = MigrationContext.configure(self.conn, opts=opts)
self.operation = Operations(self.context)
self.reinit_all = Configuration.get("reinit_all", False)
self.reinit_tables = Configuration.get("reinit_tables", False)
self.reinit_columns = Configuration.get("reinit_columns", False)
self.reinit_indexes = Configuration.get("reinit_indexes", False)
self.reinit_constraints = Configuration.get("reinit_constraints", False)
def table(self, name=None, schema=None):
"""Get a table
:param name: default None, name of the table
:param schema: default None, name of the schema
:rtype: MigrationTable instance
"""
return MigrationTable(self, name=name, schema=schema)
def schema(self, name=None):
"""Get a table
:rtype: MigrationSchema instance
"""
return MigrationSchema(self, name)
def auto_upgrade_database(self, schema_only=False):
"""Upgrade the database automaticly"""
report = self.detect_changed(schema_only=schema_only)
report.apply_change()
def detect_changed(self, schema_only=False):
"""Detect the difference between the metadata and the database
:rtype: MigrationReport instance
"""
inspector = inspect(self.conn)
if schema_only:
diff = self.detect_added_new_schema(inspector)
else:
diff = compare_metadata(self.context, self.metadata)
diff.extend(
self.detect_undetected_constraint_from_alembic(inspector)
)
return MigrationReport(self, diff)
def detect_added_new_schema(self, inspector):
diff = []
schemas = self.metadata._schemas
reflected_schemas = set(inspector.get_schema_names())
added_schemas = schemas - reflected_schemas
for schema in added_schemas:
diff.append(("add_schema", schema))
return diff
def detect_undetected_constraint_from_alembic(self, inspector):
diff = []
diff.extend(self.detect_check_constraint_changed(inspector))
diff.extend(self.detect_pk_constraint_changed(inspector))
return diff
def check_constraint_is_same(self, reflected_constraint, constraint):
"""the goal is to detect if contrainst changed when the name is long
SQLAlchemy trunkated the name if function of database type (
postgres 63 characters)
this method check if the truncated name is the same that no truncated
name and if the constraint text is the same: return True else False
"""
truncated_name = self.ddl_compiler.preparer.format_constraint(
constraint
)
if truncated_name == reflected_constraint["name"]:
return True
return False # pragma: no cover
def detect_check_constraint_changed(self, inspector):
if sgdb_in(self.conn.engine, ["MySQL", "MariaDB", "MsSQL"]):
# MySQL don t return the reflected constraint
return []
diff = []
schemas = list(self.metadata._schemas)
schemas.append(None)
for schema in schemas:
for table in inspector.get_table_names(schema=schema):
table_ = "%s.%s" % (schema, table) if schema else table
if table_ not in self.metadata.tables:
continue
reflected_constraints = {
ck["name"]: ck
for ck in inspector.get_check_constraints(
table, schema=schema
)
}
constraints = {
ck.name: ck
for ck in self.metadata.tables[table_].constraints
if isinstance(ck, CheckConstraint)
if ck.name != "_unnamed_"
}
todrop = set(reflected_constraints.keys()) - set(
constraints.keys()
)
toadd = set(constraints.keys()) - set(
reflected_constraints.keys()
)
# check a constraint have not been truncated
todrop_ = todrop.copy()
for x in todrop_:
for y in toadd:
if self.check_constraint_is_same(
reflected_constraints[x], constraints[y]
):
toadd.remove(y)
todrop.remove(x)
break
for ck in todrop:
ck_ = reflected_constraints[ck]
ck_["schema"] = schema
diff.append(("remove_ck", table, ck_))
for ck in toadd:
diff.append(("add_ck", table, constraints[ck]))
return diff
def detect_pk_constraint_changed(self, inspector):
diff = []
schemas = list(self.metadata._schemas)
schemas.append(None)
for schema in schemas:
for table in inspector.get_table_names(schema=schema):
table_ = "%s.%s" % (schema, table) if schema else table
if table_ not in self.metadata.tables:
continue
reflected_constraint = inspector.get_pk_constraint(
table, schema=schema
)
constraint = [
pk
for pk in self.metadata.tables[table_].constraints
if isinstance(pk, PrimaryKeyConstraint)
][0]
reflected_columns = set(
reflected_constraint["constrained_columns"]
)
columns = set(x.name for x in constraint.columns)
if columns != reflected_columns:
diff.append(("change_pk", table, constraint))
return diff
def savepoint(self, name=None):
"""Add a savepoint
:param name: name of the save point
:rtype: return the name of the save point
"""
if sgdb_in(self.conn.engine, ["MySQL", "MariaDB"]):
logger.warning(
"Try to create a SAVEPOINT, but %r don't have this "
"functionality" % self.conn.engine.dialect
)
return
return self.conn._savepoint_impl(name=name)
def rollback_savepoint(self, name):
"""Rollback to the savepoint
:param name: name of the savepoint
"""
if sgdb_in(self.conn.engine, ["MySQL", "MariaDB"]):
logger.warning(
"Try to ROLLBACK TO SAVEPOINT, but %r don't have this "
"functionality" % self.conn.engine.dialect
)
return
self.conn._rollback_to_savepoint_impl(name)
def release_savepoint(self, name):
"""Release the save point
:param name: name of the savepoint
"""
if sgdb_in(self.conn.engine, ["MySQL", "MariaDB"]):
logger.warning(
"Try to RELEASE SAVEPOINT, but %r don't have this "
"functionality" % self.conn.engine.dialect
)
return
self.conn._release_savepoint_impl(name)
def render_item(self, type_, obj, autogen_context):
logger.debug("%r, %r, %r" % (type_, obj, autogen_context))
return False # pragma: no cover
def compare_type(
self,
context,
inspected_column,
metadata_column,
inspected_type,
metadata_type,
):
if hasattr(metadata_type, "compare_type"):
return metadata_type.compare_type(
inspected_type
) # pragma: no cover
return None
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/migration.py
|
migration.py
|
from pkg_resources import iter_entry_points
from logging import getLogger
from .schema import ( # noqa
ForeignKeyConstraint,
UniqueConstraint,
PrimaryKeyConstraint,
CheckConstraint,
Index,
)
logger = getLogger(__name__)
def load_init_function_from_entry_points(unittest=False):
"""Call all the entry points ``anyblok_pyramid.init`` to update
the argument setting
The callable needs a dict of entry points as parameter::
def init_function(unittest=False):
...
Entry points are defined in the setup.py file::
setup(
...,
entry_points={
'anyblok.init': [
init_function=path:init_function,
...
],
},
...,
)
"""
for i in iter_entry_points("anyblok.init"): # pragma: no cover
print("AnyBlok Load init: %r" % i)
i.load()(unittest=unittest)
def configuration_post_load(unittest=False):
"""Call all the entry points defined as ``anyblok_configuration.post_load``
to initialize some services depending on the configuration
The callable needs a dict of entry points as parameter::
def post_load_function(unittest=False):
...
Entry points are defined in the setup.py file::
setup(
...,
entry_points={
'anyblok_configuration.post_load': [
post_load_function=path:post_load_function,
...
],
},
...,
)
"""
for i in iter_entry_points("anyblok_configuration.post_load"):
logger.info("AnyBlok configuration post load: %r" % i)
i.load()(unittest=unittest) # pragma: no cover
def start(
processName,
entry_points=None,
useseparator=False,
loadwithoutmigration=False,
config=None,
**kwargs,
):
"""Function used to initialize the application
::
registry = start('My application',
entry_points=['AnyBlok'])
:param processName: Name of the application
:param entry_points: entry point where load blok
:param useseparator: boolean, indicate if configuration option are split
betwwen two application
:param loadwithoutmigration: if True, any migration operation will do
:param config: dict of configuration parameters
:rtype: registry if the database name is in the configuration
"""
from .blok import BlokManager
from .config import Configuration
from .registry import RegistryManager
load_init_function_from_entry_points()
if config is None:
config = {}
Configuration.load(processName, useseparator=useseparator, **config)
configuration_post_load()
if entry_points:
BlokManager.load(entry_points=entry_points) # pragma: no cover
else:
BlokManager.load()
db_name = Configuration.get("db_name")
logger.debug("start(): db_name=%r", db_name)
if not db_name:
logger.warning(
"start(): no database name in configuration, " "bailing out"
)
return None # pragma: no cover
registry = RegistryManager.get(
db_name, loadwithoutmigration=loadwithoutmigration, **kwargs
)
registry.commit()
return registry
from .declarations import Declarations # noqa
from . import core # noqa
from . import model # noqa
from . import mixin # noqa
from .authorization import binding # noqa
from .imp import reload_module_if_blok_is_reloading # noqa
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/__init__.py
|
__init__.py
|
import threading
from inspect import ismethod
class EnvironmentException(AttributeError):
"""Exception for the Environment"""
class EnvironmentManager:
"""Manage the Environment for an application"""
environment = None
@classmethod
def define_environment_cls(cls, Environment):
"""Define the class used for the environment
:param Environment: class of environment
:exception: EnvironmentException
"""
def check_classmethod(method, acceptNone=False):
if not hasattr(Environment, method):
raise EnvironmentException("No %r found" % method)
m = getattr(Environment, method)
if m is None:
if acceptNone:
return
if ismethod(m):
return
raise EnvironmentException("%r must be a class method" % m)
check_classmethod("scoped_function_for_session", acceptNone=True)
check_classmethod("setter")
check_classmethod("getter")
cls.environment = Environment
@classmethod
def set(cls, key, value):
"""Save the value of the key in the environment
:param key: the key of the value to save
:param value: the value to save
:exception: EnvironmentException
"""
if cls.environment is None:
raise EnvironmentException("No environments defined")
cls.environment.setter(key, value)
@classmethod
def get(cls, key, default=None):
"""Load the value of the key in the environment
:param key: the key of the value to load
:param default: return this value if not value loaded for the key
:rtype: the value of the key
:exception: EnvironmentException
"""
if cls.environment is None:
raise EnvironmentException("No environments defined")
return cls.environment.getter(key, default)
@classmethod
def scoped_function_for_session(cls):
"""Save the value of the key in the environment"""
return cls.environment.scoped_function_for_session
class ThreadEnvironment:
"""Use the thread, to get the environment"""
scoped_function_for_session = None
""" No scoped function here because for none value sqlalchemy already uses
a thread to save the session """
values = {}
@classmethod
def setter(cls, key, value):
"""Save the value of the key in the environment
:param key: the key of the value to save
:param value: the value to save
"""
if str(threading.current_thread()) not in cls.values:
cls.values[str(threading.current_thread())] = {}
cls.values[str(threading.current_thread())][key] = value
@classmethod
def getter(cls, key, default):
"""Get the value of the key in the environment
:param key: the key of the value to retrieve
:param default: return this value if no value loaded for the key
:rtype: the value of the key
"""
if str(threading.current_thread()) not in cls.values:
return default
return cls.values[str(threading.current_thread())].get(key, default)
EnvironmentManager.define_environment_cls(ThreadEnvironment)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/environment.py
|
environment.py
|
from graphviz import Digraph
class BaseSchema:
"""Common class extended by the type of schema"""
def __init__(self, name, format="png"):
self.name = name
self.format = format
self._nodes = {}
self._edges = {}
self.count = 0
def add_edge(self, cls_1, cls_2, attr=None):
"""Add a new edge between two nodes
::
dot.add_edge(node1, node2)
:param cls_1: node (string or object) - from
:param cls_2: node (string or object) - to
:param attr: attribute of the edge
"""
cls_1 = cls_1 if isinstance(cls_1, str) else cls_1.name
cls_2 = cls_2 if isinstance(cls_2, str) else cls_2.name
self.count += 1
self._edges["%s_%s_2_%d" % (cls_1, cls_2, self.count)] = {
"from": cls_1,
"to": cls_2,
"attr": {} if attr is None else attr,
}
def render(self):
"""Call graphviz to create the schema"""
self.dot = Digraph(
name=self.name,
format=self.format,
node_attr={
"shape": "record",
"style": "filled",
"fillcolor": "gray95",
},
)
for _, cls in self._nodes.items():
cls.render(self.dot)
for _, edge in self._edges.items():
self.dot.edge(edge["from"], edge["to"], _attributes=edge["attr"])
def save(self):
"""Render and create the output file"""
self.render()
self.dot.render(self.name)
class TableSchema:
"""Describe one table"""
def __init__(self, name, parent, islabel=False):
self.name = name
self.parent = parent
self.islabel = islabel
self.column = []
def render(self, dot):
"""Call graphviz to create the schema"""
if self.islabel:
label = "{%s}" % self.name
else:
column = "\\n".join(self.column)
label = "{%s|%s}" % (self.name, column)
dot.node(self.name, label=label)
def add_column(self, name, type_, primary_key=False):
"""Add a new column to the table
:param name: the name of the column
:param type_: the type of the column
:param primary_key: if True, 'PK' argument will be added
"""
self.column.append(
"%s%s (%s)" % ("PK " if primary_key else "", name, type_)
)
def add_foreign_key(self, node, label=None, nullable=True):
"""Add a new foreign key
:param node: node (string or object) of the table attached
:param label: name of the column to add the foreign key to
TODO: i did not understand the explanation of 'nullable' parameter
:param nullable: boolean to select the multiplicity of the association
"""
self.parent.add_foreign_key(self, node, label, nullable)
class SQLSchema(BaseSchema):
"""Create a schema to display the table model
::
dot = SQLSchema('the name of my schema')
t1 = dot.add_table('Table 1')
t1.add_column('c1', 'Integer')
t1.add_column('c2', 'Integer')
t2 = dot.add_table('Table 2')
t2.add_column('c1', 'Integer')
t2.add_foreign_key(t1, 'c2')
dot.save()
"""
def add_table(self, name):
"""Add a new node TableSchema with columns
:param name: the name of the table
:rtype: returns an instance of TableSchema
"""
tmp = TableSchema(name, self)
self._nodes[name] = tmp
return tmp
def add_label(self, name):
"""Add a new node TableSchema without column
:param name: the name of the table
:rtype: returns an instance of TableSchema
"""
tmp = TableSchema(name, self, islabel=True)
self._nodes[name] = tmp
return tmp
def get_table(self, name):
"""Return the instance of TableSchema linked to the table name given
:param name: the name of the table
:rtype: return an instance of TableSchema
"""
return self._nodes.get(name)
def add_foreign_key(self, cls_1, cls_2, label=None, nullable=False):
multiplicity = "0..1" if nullable else "1"
hlabel = "%s (%s)" % (label, multiplicity) if label else multiplicity
self.add_edge(
cls_1,
cls_2,
attr={
"arrowhead": "none",
"headlabel": hlabel,
},
)
class ClassSchema:
"""Used to display a class"""
def __init__(self, name, parent, islabel=False):
self.name = name
self.parent = parent
self.islabel = islabel
self.properties = []
self.column = []
self.method = []
def extend(self, node):
"""Add an edge with extended shape to the node
:param node: node (string or object)
"""
self.parent.add_extend(self, node)
def strong_aggregate(
self,
node,
label_from=None,
multiplicity_from=None,
label_to=None,
multiplicity_to=None,
):
"""Add an edge with strong aggregate shape to the node
:param node: node (string or object)
:param label_from: the name of the attribute
:param multiplicity_from: multiplicity of the attribute
:param label_to: the name of the attribute
:param multiplicity_to: multiplicity of the attribute
"""
self.parent.add_strong_aggregation(
self, node, label_from, multiplicity_from, label_to, multiplicity_to
)
def aggregate(
self,
node,
label_from=None,
multiplicity_from=None,
label_to=None,
multiplicity_to=None,
):
"""Add an edge with aggregate shape to the node
:param node: node (string or object)
:param label_from: the name of the attribute
:param multiplicity_from: multiplicity of the attribute
:param label_to: the name of the attribute
:param multiplicity_to: multiplicity of the attribute
"""
self.parent.add_aggregation(
self, node, label_from, multiplicity_from, label_to, multiplicity_to
)
def associate(
self,
node,
label_from=None,
multiplicity_from=None,
label_to=None,
multiplicity_to=None,
):
"""Add an edge with associate shape to the node
:param node: node (string or object)
:param label_from: the name of the attribute
:param multiplicity_from: multiplicity of the attribute
:param label_to: the name of the attribute
:param multiplicity_to: multiplicity of the attribute
"""
self.parent.add_association(
self, node, label_from, multiplicity_from, label_to, multiplicity_to
)
def add_property(self, name):
"""Add a property to the class
:param name: the name of the property
"""
self.properties.append(name)
def add_column(self, name):
"""Add a column to the class
:param name: the name of the column
"""
self.column.append(name)
def add_method(self, name):
"""Add a method to the class
:param name: the name of the method
"""
self.method.append(name)
def render(self, dot):
"""Call graphviz to create the schema"""
if self.islabel:
label = "{%s}" % self.name
else:
properties = "\\n".join(self.properties)
column = "\\n".join(self.column)
method = "\\n".join("%s()" % x for x in self.method)
label = "{%s|%s|%s|%s}" % (self.name, properties, column, method)
dot.node(self.name, label=label)
class ModelSchema(BaseSchema):
"""Create a schema to display the UML model
::
dot = ModelSchema('The name of my UML schema')
cls = dot.add_class('My class')
cls.add_method('insert')
cls.add_property('items')
cls.add_column('my column')
dot.save()
"""
def add_class(self, name):
"""Add a new node ClassSchema with column
:param name: the name of the class
:rtype: return an instance of ClassSchema
"""
tmp = ClassSchema(name, self)
self._nodes[name] = tmp
return tmp
def add_label(self, name):
"""Return an instance of ClassSchema linked to the class name given
:param name: the name of the class
:rtype: return an instance of ClassSchema
"""
tmp = ClassSchema(name, self, islabel=True)
self._nodes[name] = tmp
return tmp
def get_class(self, name):
"""Add a new node ClassSchema without column
:param name: the name of the class
:rtype: return an instance of ClassSchema
"""
return self._nodes.get(name)
def add_extend(self, cls_1, cls_2):
"""Add edge to extend
:param cls_1: the name of the class 1
:param cls_2: the name of the class 2
"""
self.add_edge(
cls_1,
cls_2,
attr={
"dir": "back",
"arrowtail": "empty",
},
)
def add_aggregation(
self,
cls_1,
cls_2,
label_from=None,
multiplicity_from=None,
label_to=None,
multiplicity_to=None,
):
"""Add edge for aggregation
:param cls_1: the name of the class 1
:param cls_2: the name of the class 2
:param label_from: attribute name
:param multiplicity_from: multiplicity of the attribute
:param label_to: attribute name
:param multiplicity_to: multiplicity of the attribute
:return:
"""
label_from, label_to = self.format_label(
label_from, multiplicity_from, label_to, multiplicity_to
)
if not cls_1 or not cls_2:
return # pragma: no cover
self.add_edge(
cls_1,
cls_2,
attr={
"dir": "back",
"arrowtail": "odiamond",
"headlabel": label_from,
"taillabel": label_to,
},
)
def add_strong_aggregation(
self,
cls_1,
cls_2,
label_from=None,
multiplicity_from=None,
label_to=None,
multiplicity_to=None,
):
"""Add edge for strong aggregation
:param cls_1:
:param cls_2:
:param label_from:
:param multiplicity_from:
:param label_to:
:param multiplicity_to:
:return:
"""
label_from, label_to = self.format_label(
label_from, multiplicity_from, label_to, multiplicity_to
)
self.add_edge(
cls_1,
cls_2,
attr={
"dir": "back",
"arrowtail": "diamond",
"headlabel": label_from,
"taillabel": label_to,
},
)
@staticmethod
def format_label(label_from, multiplicity_from, label_to, multiplicity_to):
def _format_label(label, multiplicity):
if label:
if multiplicity:
return "%s (%s)" % (label, multiplicity)
return label
else:
if multiplicity:
return multiplicity
return
return (
_format_label(label_from, multiplicity_from),
_format_label(label_to, multiplicity_to),
)
def add_association(
self,
cls_1,
cls_2,
label_from=None,
multiplicity_from=None,
label_to=None,
multiplicity_to=None,
):
label_from, label_to = self.format_label(
label_from, multiplicity_from, label_to, multiplicity_to
)
self.add_edge(
cls_1,
cls_2,
attr={
"arrowhead": "none",
"headlabel": label_from,
"taillabel": label_to,
},
)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/_graphviz.py
|
_graphviz.py
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2020 Pierre Verkest <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
Test blok16
===========
Test Blok16 is used to validate the order of calls over following methods:
* pre_migration
* post_migration
* update
* update_demo
* uninstall
* uninstall_demo
In différent cases playing with following options:
* with-demo system parameter
* withoutautomigration
* loadwithoutmigration
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/test_bloks/test_blok16/README.rst
|
README.rst
|
from .base import AuthorizationRule, RuleNotForModelClasses
class AttributeAccessRule(AuthorizationRule):
"""Grant authorization to principals coinciding with a record attribute.
Whatever the permission is associated to this policy, it will be granted
to principals on records whose attribute is equal to the principal.
A common use-case is to associate it to a precise permission, in
conjunction with a flatter default policy, such as
:class:`..model_authz.ModelBasedAutorizationRule`
"""
def __init__(self, attr, model_rule=None):
""".
:param attr: The attribute that is being compared with principal.
:param model_rule: If set, checks done on model classes will be
relayed to this other rule. Otherwise, the
standard exception that the rule does not apply
to model classes is raised.
The ``model_rule`` allows to express conveniently that some
principal has a "general" Read right on some model,
while still allowing it to read only some of the
records, and to protect the querying by the same
'Read' permission. Similar and finer effects can
be obtained by creating a separate 'Search' permission, but that may
not be appropriate in a given context.
"""
self.attr = attr
self.model_rule = model_rule
self._registry = None
@property
def registry(self):
"""On this rule, we'll need a setter for registry"""
return self._registry # pragma: no cover
@registry.setter
def registry(self, registry):
"""Apply registry also to model_rule if needed"""
self._registry = registry
if self.model_rule is not None:
self.model_rule.registry = registry
def check(self, record, principals, permission):
if isinstance(record, type):
if self.model_rule is not None:
return self.model_rule.check(record, principals, permission)
raise RuleNotForModelClasses(self, record)
return getattr(record, self.attr) in principals
def filter(self, model, query, principals, permission):
return query.filter(getattr(model, self.attr).in_(principals))
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/authorization/rule/attraccess.py
|
attraccess.py
|
class RuleNotForModelClasses(Exception):
"""Raised by authorization policies that don't make sense on model classes.
For instance, if a permission check is done on a model class, and the
policy associations are made with a policy that needs to check attributes,
then the association must be corrected.
"""
def __init__(self, policy, model):
self.policy = policy
self.model = model
self.message = "Rule %r cannot be used on a model class (got %r)" % (
policy,
model,
)
class AuthorizationRule:
"""Base class to define the interface and provide some helpers"""
registry = None
"""Set during assembly phase."""
def is_declaration(self):
return self.registry is None # pragma: no cover
def check(self, target, principals, permission):
"""Check that one of the principals has permisson on given record.
:param target: model instance (record) or class. Checking a permission
on a model class with a policy that is designed to work
on records is considered a configuration error,
expressed by :exc:`RuleNotForModelClasses`.
:param principals: list, set or tuple of strings
:rtype: bool
Must be implemented by concrete subclasses.
"""
raise NotImplementedError # pragma: no cover
def filter(self, model, query, principals, permission):
"""Return a new query with added permission filtering.
Must be implemented by concrete subclasses.
:param query: the :class:`Query` instance to modify to express
the permission for these principals.
:param model: the model on which the policy is applied
:rtype: :class:`Query`)
It's not necessary that the resulting query expresses fully
the permission check: this can be complemented if needed
by postfiltering, notably for conditions that can't be expressed
conveniently in SQL.
That being said, if the policy can be expressed totally by query,
alteration, it's usually the best choice, as it keeps database traffic
at the lowest.
The policy also has the possibility to return False, for flat denial
without even querying the server. That may prove useful in some cases.
"""
raise NotImplementedError # pragma: no cover
postfilter = None
"""Filter by permission records obtained by a filtered query.
By default, this is ``None``, to indicate that the policy does not perform
any post filtering, but concrete policies can implement
a method with the following signature::
def postfilter(self, record, principals, permission):
Such implementations can (and usually, for performance, should) assume
that the query that produced the records was a filtered one.
The purpose of using the explicit ``None`` marker is to permit some calls
that don't make sense on a postfiltered operation (such as ``count()``).
"""
class DenyAll(AuthorizationRule):
def check(self, *args):
return False # pragma: no cover
def filter(self, *args):
return False # pragma: no cover
deny_all = DenyAll
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/authorization/rule/base.py
|
base.py
|
"""Per model flat access rule, based on a Model (table)"""
from anyblok.registry import RegistryManagerException
from .base import AuthorizationRule
class ModelAccessRule(AuthorizationRule):
"""Rule to grant authorization uniformly for all records of one model.
The grants are themselves stored using a model class, that's provided
in this blok. The users don't need to install the blok to use this class,
provided they pass the model class to be used in all cases.
"""
grant_model_name = "Model.Authorization.ModelPermissionGrant"
def __init__(self, grant_model=None):
""".
:params: grant_model is a model declaration, that has the needed
columns (model, principal, permission)
"""
if grant_model is not None: # pragma: no cover
self.grant_model_name = grant_model.__registry_name__
@property
def grant_model(self):
try:
return self.registry.get(self.grant_model_name)
except RegistryManagerException: # pragma: no cover
cls = self.__class__
if self.grant_model_name is not cls.grant_model_name:
raise
raise RuntimeError(
"To use %s with no explicit Grant "
"model, you must install the model_access blok, "
"that provides the default %r"
% (cls.__name__, cls.grant_model_name)
)
def check_on_model(self, model, principals, permission):
Grant = self.grant_model
return bool(
Grant.query()
.filter(
Grant.model == model,
Grant.principal.in_(principals),
Grant.permission == permission,
)
.limit(1)
.count()
)
def check(self, record, principals, permission):
return self.check_on_model(
record.__registry_name__, principals, permission
)
def filter(self, model, query, principals, permission):
if self.check_on_model(model.__registry_name__, principals, permission):
return query
return False
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/authorization/rule/modelaccess.py
|
modelaccess.py
|
from logging import getLogger
from anyblok.blok import Blok
from anyblok.release import version
logger = getLogger(__name__)
class AnyBlokCore(Blok):
"""This Blok is required in all AnyBlok applications.
This Blok provides the main fonctionalities for Bloks management (install,
update, uninstall…).
It also brings the representation of Anyblok objects (Models, Fields, etc.)
within the database itself, and some fundamental facilities.
* Core Models
These are pure code Models, used as base classes:
- Base: inherited by all Models
- SqlBase: inherited by all models backed by an SQL table
- SqlViewBase: inherited by all models bacled by an SQL view
* System Models
These correspond to actual tables in the table. They provide reflection
or fundamental facilities.
- Blok: represent all *available* Bloks, with their state and more
- Model
- Field
- Column
- Relationship
- :class:`Sequence <.system.sequence.Sequence>`: database sequences,
for use in applications.
- :class:`Parameter <.system.parameter.Parameter>`: application
parameters
"""
version = version
autoinstall = True
priority = 0
author = "Suzanne Jean-Sébastien"
logo = "../anyblok-logo_alpha_256.png"
def pre_migration(self, latest_version): # pragma: no cover
if latest_version is None:
return
if latest_version < "0.4.1":
self.pre_migration_0_4_1_fields_become_polymorphic(latest_version)
def pre_migration_0_4_1_fields_become_polymorphic(
self, latest_version
): # pragma: no cover
logger.info(
"Pre Migration %s => %s: Field, Column, Relation Ship "
"become prolymophic models" % (latest_version, self.version)
)
system_field = self.anyblok.migration.table("system_field")
system_field.column().add(self.anyblok.System.Field.entity_type)
self.anyblok.execute(
"UPDATE system_field SET entity_type='Model.System.Field'"
)
query = """
INSERT INTO system_field (
name,
model,
code,
label,
ftype,
entity_type)
SELECT
name,
model,
code,
label,
ftype,
'%(entity_type)s' AS entity_type
FROM %(table)s
"""
self.anyblok.execute(
query
% {"entity_type": "Model.System.Column", "table": "system_column"}
)
self.anyblok.execute(
query
% {
"entity_type": "Model.System.RelationShip",
"table": "system_relationship",
}
)
system_column = self.anyblok.migration.table("system_column")
system_column.column("code").drop()
system_column.column("ftype").drop()
system_column.column("label").drop()
system_relationship = self.anyblok.migration.table(
"system_relationship"
)
system_relationship.column("code").drop()
system_relationship.column("ftype").drop()
system_relationship.column("label").drop()
@classmethod
def import_declaration_module(cls):
from . import authorization # noqa
from . import core # noqa
from . import documentation # noqa
from . import system # noqa
@classmethod
def reload_declaration_module(cls, reload):
from . import core
reload(core)
from . import system
reload(system)
from . import authorization
reload(authorization)
from . import documentation
reload(documentation)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/__init__.py
|
__init__.py
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2015 Jean-Sebastien SUZANNE <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
Authorization
~~~~~~~~~~~~~
.. automodule:: anyblok.bloks.anyblok_core.authorization
.. autoanyblok-declaration:: Authorization
:members:
:undoc-members:
.. autoclass:: DefaultModelDeclaration
:members:
:undoc-members:
:show-inheritance:
Core Models
~~~~~~~~~~~
.. automodule:: anyblok.bloks.anyblok_core.core.base
.. autoanyblok-declaration:: Base
:members:
.. automodule:: anyblok.bloks.anyblok_core.core.sqlbase
.. autoclass:: SqlMixin
:members:
.. autoanyblok-declaration:: SqlBase
:members:
.. automodule:: anyblok.bloks.anyblok_core.core.sqlviewbase
.. autoanyblok-declaration:: SqlViewBase
:members:
.. automodule:: anyblok.bloks.anyblok_core.core.instrumentedlist
.. autoanyblok-declaration:: InstrumentedList
:members:
.. automodule:: anyblok.bloks.anyblok_core.core.query
.. autoanyblok-declaration:: Query
:members:
.. automodule:: anyblok.bloks.anyblok_core.core.session
.. autoanyblok-declaration:: Session
:members:
System Models
~~~~~~~~~~~~~
.. automodule:: anyblok.bloks.anyblok_core.system
.. autoanyblok-declaration:: System
:members:
.. automodule:: anyblok.bloks.anyblok_core.system.blok
.. autoanyblok-declaration:: Blok
:members:
.. automodule:: anyblok.bloks.anyblok_core.system.cache
.. autoanyblok-declaration:: Cache
:members:
.. automodule:: anyblok.bloks.anyblok_core.system.field
.. autoanyblok-declaration:: Field
:members:
.. warning::
Deprecated
.. automodule:: anyblok.bloks.anyblok_core.system.column
.. autoanyblok-declaration:: Column
:members:
.. warning::
Deprecated
.. automodule:: anyblok.bloks.anyblok_core.system.relationship
.. autoanyblok-declaration:: RelationShip
:members:
.. warning::
Deprecated
.. automodule:: anyblok.bloks.anyblok_core.system.model
.. autoanyblok-declaration:: Model
:members:
.. warning::
Deprecated
.. automodule:: anyblok.bloks.anyblok_core.system.parameter
.. autoanyblok-declaration:: Parameter
:members:
.. automodule:: anyblok.bloks.anyblok_core.system.sequence
.. autoanyblok-declaration:: Sequence
:members:
.. _blok_anyblok_core_documentation:
Documentation Models
~~~~~~~~~~~~~~~~~~~~
.. automodule:: anyblok.bloks.anyblok_core.documentation
.. autoanyblok-declaration:: DocElement
:members:
.. autoanyblok-declaration:: Documentation
:members:
.. automodule:: anyblok.bloks.anyblok_core.documentation.blok
.. autoanyblok-declaration:: Blok
:members:
.. automodule:: anyblok.bloks.anyblok_core.documentation.model
.. autoanyblok-declaration:: Model
:members:
.. automodule:: anyblok.bloks.anyblok_core.documentation.model.attribute
.. autoanyblok-declaration:: Attribute
:members:
.. automodule:: anyblok.bloks.anyblok_core.documentation.model.field
.. autoanyblok-declaration:: Field
:members:
.. _blok_anyblok_core_exceptions:
Exceptions
~~~~~~~~~~
.. automodule:: anyblok.bloks.anyblok_core.exceptions
.. autoexception:: CoreBaseException
:members:
:show-inheritance:
.. autoexception:: SqlBaseException
:members:
:show-inheritance:
.. autoexception:: QueryException
:members:
:show-inheritance:
.. autoexception:: CacheException
:members:
:show-inheritance:
.. autoexception:: ParameterException
:members:
:show-inheritance:
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/CODE.rst
|
CODE.rst
|
from anyblok import Declarations
from anyblok.column import String
from anyblok.schema import ForeignKeyConstraint
register = Declarations.register
System = Declarations.Model.System
Mixin = Declarations.Mixin
@register(System) # noqa
class Field:
name = String(primary_key=True)
code = String(nullable=True)
model = String(primary_key=True)
# FIXME, foreign_key=System.Model.use('name'))
label = String()
ftype = String(label="Type", nullable=True)
entity_type = String(nullable=True)
@classmethod
def define_table_args(cls):
table_args = super(Field, cls).define_table_args()
if cls.__registry_name__ != System.Field.__registry_name__:
F = cls.anyblok.System.Field
return table_args + (
ForeignKeyConstraint(
[cls.name, cls.model], [F.name, F.model], ondelete="CASCADE"
),
)
return table_args
@classmethod
def define_mapper_args(cls):
mapper_args = super(Field, cls).define_mapper_args()
if cls.__registry_name__ == System.Field.__registry_name__:
mapper_args.update(
{
"polymorphic_identity": cls.__registry_name__,
"polymorphic_on": cls.entity_type,
}
)
else:
mapper_args.update(
{
"polymorphic_identity": cls.__registry_name__,
}
)
return mapper_args
@classmethod
def get_cname(self, field, cname):
return cname
def _description(self):
res = {
"id": self.name,
"label": self.label,
"type": self.ftype,
"nullable": True,
"primary_key": False,
"model": None,
}
c = self.anyblok.loaded_namespaces_first_step[self.model][self.name]
c.update_description(self.anyblok, self.model, res)
return res
@classmethod
def add_field(cls, rname, label, model, table, ftype):
"""Insert a field definition
:param rname: name of the field
:param label: label of the field
:param model: namespace of the model
:param table: name of the table of the model
:param ftype: type of the AnyBlok Field
"""
cls.insert(
code=table + "." + rname,
model=model,
name=rname,
label=label,
ftype=ftype,
)
@classmethod
def alter_field(cls, field, label, ftype):
"""Update an existing field
:param field: instance of the Field model to update
:param label: label of the field
:param ftype: type of the AnyBlok Field
"""
field.update(label=label, ftype=ftype)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/system/field.py
|
field.py
|
from anyblok import Declarations
from anyblok.column import Boolean, String
from anyblok.common import anyblok_column_prefix
register = Declarations.register
System = Declarations.Model.System
Mixin = Declarations.Mixin
@register(System)
class Column(System.Field):
name = String(primary_key=True)
model = String(primary_key=True)
autoincrement = Boolean(label="Auto increment")
foreign_key = String()
primary_key = Boolean()
unique = Boolean()
nullable = Boolean()
remote_model = String()
def _description(self):
res = super(Column, self)._description()
res.update(
nullable=self.nullable,
primary_key=self.primary_key,
model=self.remote_model,
)
return res
@classmethod
def get_cname(self, field, cname):
"""Return the real name of the column
:param field: the instance of the column
:param cname: Not use here
:rtype: string of the real column name
"""
return cname
@classmethod
def add_field(cls, cname, column, model, table, ftype):
"""Insert a column definition
:param cname: name of the column
:param column: instance of the column
:param model: namespace of the model
:param table: name of the table of the model
:param ftype: type of the AnyBlok Field
"""
Model = cls.anyblok.get(model)
if hasattr(Model, anyblok_column_prefix + cname):
c = getattr(Model, anyblok_column_prefix + cname)
else:
c = column.property.columns[0] # pragma: no cover
autoincrement = c.autoincrement
if autoincrement == "auto":
autoincrement = (
True if c.primary_key and ftype == "Integer" else False
)
vals = dict(
autoincrement=autoincrement,
code=table + "." + cname,
model=model,
name=cname,
foreign_key=c.info.get("foreign_key"),
label=c.info.get("label"),
nullable=c.nullable,
primary_key=c.primary_key,
ftype=ftype,
remote_model=c.info.get("remote_model"),
unique=c.unique,
)
cls.insert(**vals)
@classmethod
def alter_field(cls, column, meta_column, ftype):
"""Update an existing column
:param column: instance of the Column model to update
:param meta_column: instance of the SqlAlchemy column
:param ftype: type of the AnyBlok Field
"""
Model = cls.anyblok.get(column.model)
if hasattr(Model, anyblok_column_prefix + column.name):
c = getattr(Model, anyblok_column_prefix + column.name)
else:
c = meta_column.property.columns[0] # pragma: no cover
autoincrement = c.autoincrement
if autoincrement == "auto":
autoincrement = (
True if c.primary_key and ftype == "Integer" else False
)
if column.autoincrement != autoincrement:
column.autoincrement = autoincrement # pragma: no cover
for col in ("nullable", "primary_key", "unique"):
if getattr(column, col) != getattr(c, col):
setattr(column, col, getattr(c, col)) # pragma: no cover
for col in ("foreign_key", "label", "remote_model"):
if getattr(column, col) != c.info.get(col):
setattr(column, col, c.info.get(col)) # pragma: no cover
if column.ftype != ftype:
column.ftype = ftype # pragma: no cover
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/system/column.py
|
column.py
|
from anyblok import Declarations
from anyblok.column import Boolean, String
register = Declarations.register
System = Declarations.Model.System
Mixin = Declarations.Mixin
@register(System)
class RelationShip(System.Field):
name = String(primary_key=True)
model = String(primary_key=True)
local_columns = String()
remote_columns = String()
remote_name = String()
remote_model = String(nullable=False)
remote = Boolean(default=False)
nullable = Boolean()
def _description(self):
res = super(RelationShip, self)._description()
remote_name = self.remote_name or ""
local_columns = []
if self.local_columns:
local_columns = [x.strip() for x in self.local_columns.split(",")]
remote_columns = []
if self.remote_columns:
remote_columns = [x.strip() for x in self.remote_columns.split(",")]
res.update(
nullable=self.nullable,
model=self.remote_model,
remote_name=remote_name,
local_columns=local_columns,
remote_columns=remote_columns,
)
return res
@classmethod
def add_field(cls, rname, relation, model, table, ftype):
"""Insert a relationship definition
:param rname: name of the relationship
:param relation: instance of the relationship
:param model: namespace of the model
:param table: name of the table of the model
:param ftype: type of the AnyBlok Field
"""
local_columns = ",".join(relation.info.get("local_columns", []))
remote_columns = ",".join(relation.info.get("remote_columns", []))
remote_model = relation.info.get("remote_model")
remote_name = relation.info.get("remote_name")
label = relation.info.get("label")
nullable = relation.info.get("nullable", True)
vals = dict(
code=table + "." + rname,
model=model,
name=rname,
local_columns=local_columns,
remote_model=remote_model,
remote_name=remote_name,
remote_columns=remote_columns,
label=label,
nullable=nullable,
ftype=ftype,
)
cls.insert(**vals)
if remote_name:
remote_type = "Many2One"
if ftype == "Many2One":
remote_type = "One2Many"
elif ftype == "Many2Many":
remote_type = "Many2Many"
elif ftype == "One2One":
remote_type = "One2One"
m = cls.anyblok.get(remote_model)
vals = dict(
code=m.__tablename__ + "." + remote_name,
model=remote_model,
name=remote_name,
local_columns=remote_columns,
remote_model=model,
remote_name=rname,
remote_columns=local_columns,
label=remote_name.capitalize().replace("_", " "),
nullable=True,
ftype=remote_type,
remote=True,
)
cls.insert(**vals)
@classmethod
def alter_field(cls, field, field_, ftype):
field.label = field_.info["label"]
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/system/relationship.py
|
relationship.py
|
from sqlalchemy import func
from anyblok.column import Integer, ModelSelection, String
from anyblok.declarations import Declarations
from ..exceptions import CacheException
register = Declarations.register
System = Declarations.Model.System
@register(System)
class Cache:
last_cache_id = None
id = Integer(primary_key=True)
registry_name = ModelSelection(nullable=False)
method = String(nullable=False)
@classmethod
def get_last_id(cls):
"""Return the last primary key ``id`` value"""
res = cls.query("id").order_by(cls.id.desc()).limit(1).first()
if res:
return res[0]
return 0 # pragma: no cover
@classmethod
def initialize_model(cls):
"""Initialize the last_cache_id known"""
super(Cache, cls).initialize_model()
cls.last_cache_id = cls.get_last_id()
@classmethod
def invalidate_all(cls):
res = []
for registry_name, methods in cls.anyblok.caches.items():
for method, caches in methods.items():
res.append(dict(registry_name=registry_name, method=method))
for cache in caches:
cache.cache_clear()
if res:
instances = cls.multi_insert(*res)
cls.last_cache_id = max(i.id for i in instances)
@classmethod
def invalidate(cls, registry_name, method):
"""Call the invalidation for a specific method cached on a model
:param registry_name: namespace of the model
:param method: name of the method on the model
:exception: CacheException
"""
caches = cls.anyblok.caches
if hasattr(registry_name, "__registry_name__"):
registry_name = registry_name.__registry_name__
if registry_name in caches:
if method in caches[registry_name]:
cls.last_cache_id = cls.insert(
registry_name=registry_name, method=method
).id
for cache in caches[registry_name][method]:
cache.cache_clear()
else:
raise CacheException( # pragma: no cover
"Unknown cached method %r" % method
)
else:
raise CacheException("Unknown cached model %r" % registry_name)
@classmethod
def get_invalidation(cls):
"""Return the pointer of the method to invalidate"""
res = []
query = cls.select_sql_statement(
func.max(cls.id).label("id"),
cls.registry_name,
cls.method,
)
query = query.group_by(cls.registry_name, cls.method)
query = query.where(cls.id > cls.last_cache_id)
query_res = cls.execute_sql_statement(query)
caches = cls.anyblok.caches
for id_, registry_name, method in query_res:
res.extend(caches[registry_name][method])
cls.last_cache_id = max(cls.last_cache_id, id_)
return res
@classmethod
def clear_invalidate_cache(cls):
"""Invalidate the cache that needs to be invalidated"""
for cache in cls.get_invalidation():
cache.cache_clear()
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/system/cache.py
|
cache.py
|
from logging import getLogger
from os.path import isfile, join
from anyblok.blok import BlokManager, BlokManagerException, UndefinedBlok
from anyblok.column import Integer, Selection, String
from anyblok.declarations import Declarations, classmethod_cache, listen
from anyblok.field import Function
from anyblok.version import parse_version
logger = getLogger(__name__)
register = Declarations.register
System = Declarations.Model.System
@register(System)
class Blok:
STATES = {
"uninstalled": "Uninstalled",
"installed": "Installed",
"toinstall": "To install",
"toupdate": "To update",
"touninstall": "To uninstall",
"undefined": "Undefined",
}
name = String(primary_key=True, nullable=False)
state = Selection(
selections=STATES, default="uninstalled", nullable=False, index=True
)
author = String()
order = Integer(default=-1, nullable=False)
short_description = Function(fget="get_short_description")
long_description = Function(fget="get_long_description")
logo = Function(fget="get_logo")
version = String(nullable=False)
installed_version = String()
def get_short_description(self):
"""fget of the ``short_description`` Column.Selection
:rtype: the docstring of the blok
"""
blok = BlokManager.get(self.name)
if hasattr(blok, "__doc__"):
return blok.__doc__ or ""
return "" # pragma: no cover
def get_long_description(self):
"""fget of the ``long_description`` Column.Selection
:rtype: the readme file of the blok
"""
blok = BlokManager.get(self.name)
path = BlokManager.getPath(self.name)
readme = getattr(blok, "readme", "README.rst")
if readme == "__doc__":
return blok.__doc__ # pragma: no cover
file_path = join(path, readme)
description = ""
if isfile(file_path):
with open(file_path, "r") as fp:
description = fp.read()
return description
def get_logo(self): # pragma: no cover
"""fget of ``logo`` return the path in the blok of the logo
:rtype: absolute path or None if unexiste logo
"""
blok = BlokManager.get(self.name)
blok_path = BlokManager.getPath(blok.name)
file_path = join(blok_path, blok.logo)
if isfile(file_path):
return file_path
return None
def __repr__(self):
return "%s (%s)" % (self.name, self.state)
@classmethod
def list_by_state(cls, *states):
"""Return the blok name in function of the wanted states
:param states: list of the state
:rtype: list if state is a state, dict if the states is a list
"""
if not states:
return None
res = {state: [] for state in states}
bloks = cls.query().filter(cls.state.in_(states)).order_by(cls.order)
for blok in bloks.all():
res[blok.state].append(blok.name)
if len(states) == 1:
return res[states[0]]
return res # pragma: no cover
@classmethod
def update_list(cls):
"""Populate the bloks list and update the state of existing bloks"""
# Do not remove blok because 2 or More AnyBlok api may use the same
# Database
for order, blok in enumerate(BlokManager.ordered_bloks):
b = cls.query().filter(cls.name == blok).one_or_none()
Blok = BlokManager.bloks[blok]
version = Blok.version
author = Blok.author
is_undefined = issubclass(Blok, UndefinedBlok)
if b is None:
cls.insert(
name=blok,
order=order,
version=version,
author=author,
state="undefined" if is_undefined else "uninstalled",
)
else:
values = dict(order=order, version=version, author=author)
if b.state == "undefined" and not is_undefined:
values["state"] = "uninstalled" # pragma: no cover
elif is_undefined: # pragma: no cover
if b.state not in ("uninstalled", "undefined"):
raise BlokManagerException(
(
"Change state %r => 'undefined' for %s is "
"forbidden"
)
% (b.state, blok)
)
values["state"] = "undefined"
b.update(**values)
@classmethod
def apply_state(cls, *bloks):
"""Call the rigth method is the blok state change
.. warning::
for the uninstallation the method called is ``uninstall_all``
:param bloks: list of the blok name load by the registry
"""
for blok in bloks:
# Make the query in the loop to be sure to keep order
b = cls.query().filter(cls.name == blok).first()
if b.state in ("uninstalled", "toinstall"):
b.install()
elif b.state == "toupdate":
b.upgrade()
uninstalled_bloks = (
cls.query().filter(cls.state == "uninstalled").all().name
)
conditional_bloks_to_install = []
for blok in uninstalled_bloks:
if cls.check_if_the_conditional_are_installed(blok):
conditional_bloks_to_install.append(blok)
if conditional_bloks_to_install:
for b in conditional_bloks_to_install:
cls.execute_sql_statement(
cls.update_sql_statement()
.where(cls.name == b)
.values(state="toinstall")
)
return True
return False
@classmethod
def uninstall_all(cls, *bloksname):
"""Search and call the uninstall method for all the uninstalled bloks
.. warning::
Use the ``desc order`` to uninstall because we can't uninstall
a dependancies before
:param bloksname: list of the blok name to uninstall
"""
if not bloksname:
return
query = cls.query().filter(cls.name.in_(bloksname))
query = query.order_by(cls.order.desc())
bloks = query.all()
if bloks:
bloks.uninstall()
@classmethod
def check_if_the_conditional_are_installed(cls, blok):
"""Return True if all the conditions to install the blok are satisfied
:param blok: blok name
:rtype: boolean
"""
if blok in BlokManager.bloks:
conditional = BlokManager.bloks[blok].conditional
if conditional:
query = cls.query().filter(cls.name.in_(conditional))
query = query.filter(
cls.state.in_(["installed", "toinstall", "toupdate"])
)
if len(conditional) == query.count():
return True
return False
def install(self):
"""Method to install the blok"""
logger.info("Install the blok %r" % self.name)
self.fire("Update installed blok")
entry = self.anyblok.loaded_bloks[self.name]
entry.update(None)
if self.anyblok.System.Parameter.get("with-demo", False):
entry.update_demo(None)
self.state = "installed"
self.installed_version = self.version
def upgrade(self):
"""Method to update the blok"""
logger.info("Update the blok %r" % self.name)
self.fire("Update installed blok")
entry = self.anyblok.loaded_bloks[self.name]
parsed_version = (
parse_version(self.installed_version)
if self.installed_version is not None
else None
)
entry.update(parsed_version)
if self.anyblok.System.Parameter.get("with-demo", False):
entry.update_demo(parsed_version)
self.state = "installed"
self.installed_version = self.version
def uninstall(self):
"""Method to uninstall the blok"""
logger.info("Uninstall the blok %r" % self.name)
self.fire("Update installed blok")
entry = BlokManager.bloks[self.name](self.anyblok)
if self.anyblok.System.Parameter.get("with-demo", False):
entry.uninstall_demo()
entry.uninstall()
self.state = "uninstalled"
self.installed_version = None
def load(self):
"""Method to load the blok when the registry is completly loaded"""
name = self.name
blok_cls = BlokManager.get(name)
if blok_cls is None:
logger.warning(
"load(): class of Blok %r not found, " "Blok can't be loaded",
name,
)
return # pragma: no cover
logger.info("Loading Blok %r", name)
blok_cls(self.anyblok).load()
logger.debug("Succesfully loaded Blok %r", name)
@classmethod
def load_all(cls):
"""Load all the installed bloks"""
query = cls.select_sql_statement()
query = query.where(cls.state == "installed")
query = query.order_by(cls.state)
query_res = cls.execute_sql_statement(query).scalars()
for blok in query_res:
blok.load()
@classmethod_cache()
def is_installed(cls, blok_name):
return (
cls.query().filter_by(name=blok_name, state="installed").count()
!= 0
)
@listen("Model.System.Blok", "Update installed blok")
def listen_update_installed_blok(cls):
cls.anyblok.System.Cache.invalidate(
cls.__registry_name__, "is_installed"
)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/system/blok.py
|
blok.py
|
from anyblok import Declarations
from anyblok.column import Boolean, Json, String
from ..exceptions import ParameterException
register = Declarations.register
System = Declarations.Model.System
NOT_PROVIDED = object
@register(Declarations.Model.System)
class Parameter:
"""Applications parameters.
This Model is provided by ``anyblok-core`` to give applications a uniform
way of specifying in-database configuration.
It is a simple key/value representation, where values can be of any type
that can be encoded as JSON.
A simple access API is provided with the :meth:`get`, :meth:`set`,
:meth:`is_exist` and further methods.
"""
key = String(primary_key=True)
value = Json(nullable=False)
multi = Boolean(default=False)
@classmethod
def set(cls, key, value):
"""Insert or update parameter value for a key.
.. note:: if the key already exists, the value will be updated
:param str key: key to save
:param value: value to save
"""
multi = False
if not isinstance(value, dict):
value = {"value": value}
else:
multi = True
if cls.is_exist(key):
param = cls.from_primary_keys(key=key)
param.update(value=value, multi=multi)
else:
cls.insert(key=key, value=value, multi=multi)
@classmethod
def is_exist(cls, key):
"""Check if one parameter exist for the key
:param key: key to check
:rtype: bool
"""
query = cls.query().filter(cls.key == key)
return True if query.count() else False
@classmethod
def get_parameter(cls, key, default=NOT_PROVIDED, remove=False):
"""Return the value of the key
:param key: key whose value to retrieve
:param default: default value if key does not exists
:param remove: bool if True the entry will be removed
:return: associated value
:rtype: anything JSON encodable
:raises ParameterException: if the key doesn't exist and default is not
set.
"""
if not cls.is_exist(key):
if default is NOT_PROVIDED:
raise ParameterException("unexisting key %r" % key)
return default
param = cls.from_primary_keys(key=key)
if param.multi:
res = param.value
else:
res = param.value["value"]
if remove:
param.delete()
return res
@classmethod
def get(cls, key, default=NOT_PROVIDED):
"""Return the value of the key
:param key: key whose value to retrieve
:param default: default value if key does not exists
:return: associated value
:rtype: anything JSON encodable
:raises ParameterException: if the key doesn't exist and default is not
set.
"""
return cls.get_parameter(key, default=default)
@classmethod
def pop(cls, key, default=NOT_PROVIDED):
"""Remove the given key and return the associated value.
:param str key: the key to remove
:return: the value before removal
:param default: default value if key does not exists
:rtype: any JSON encodable type
:raises ParameterException: if the key wasn't present
"""
return cls.get_parameter(key, default=default, remove=True)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/system/parameter.py
|
parameter.py
|
from logging import getLogger
from anyblok.column import Boolean, String
from anyblok.declarations import Declarations, listen
from anyblok.field import Function
logger = getLogger(__name__)
register = Declarations.register
System = Declarations.Model.System
@register(System)
class Model:
"""Models assembled"""
def __str__(self):
if self.description:
return self.description # pragma: no cover
return self.name
name = String(size=256, primary_key=True)
table = String(size=256)
schema = String()
is_sql_model = Boolean(label="Is a SQL model")
description = Function(fget="get_model_doc_string")
def get_model_doc_string(self):
"""Return the docstring of the model"""
m = self.anyblok.get(self.name)
if hasattr(m, "__doc__"):
return m.__doc__ or ""
return "" # pragma: no cover
@listen("Model.System.Model", "Update Model")
def listener_update_model(cls, model):
cls.anyblok.System.Cache.invalidate(model, "_fields_description")
cls.anyblok.System.Cache.invalidate(model, "getFieldType")
cls.anyblok.System.Cache.invalidate(model, "get_primary_keys")
cls.anyblok.System.Cache.invalidate(
model, "find_remote_attribute_to_expire"
)
cls.anyblok.System.Cache.invalidate(model, "find_relationship")
cls.anyblok.System.Cache.invalidate(
model, "get_hybrid_property_columns"
)
@classmethod
def get_field_model(cls, field):
ftype = field.property.__class__.__name__
if ftype == "ColumnProperty":
return cls.anyblok.System.Column
elif ftype in ("Relationship", "RelationshipProperty"):
return cls.anyblok.System.RelationShip
else:
raise Exception("Not implemented yet") # pragma: no cover
@classmethod
def get_field(cls, model, cname):
if cname in model.loaded_fields.keys():
field = model.loaded_fields[cname]
Field = cls.anyblok.System.Field
else:
field = getattr(model, cname)
Field = cls.get_field_model(field)
return field, Field
@classmethod
def update_fields(cls, model, table):
fsp = cls.anyblok.loaded_namespaces_first_step
m = cls.anyblok.get(model)
# remove useless column
Field = cls.anyblok.System.Field
query = Field.query()
query = query.filter(Field.model == model)
query = query.filter(Field.name.notin_(m.loaded_columns))
for model_ in query:
if model_.entity_type == "Model.System.RelationShip":
if model_.remote:
continue
else: # pragma: no cover
RelationShip = cls.anyblok.System.RelationShip
Q = RelationShip.query()
Q = Q.filter(RelationShip.name == model_.remote_name)
Q = Q.filter(RelationShip.model == model_.remote_model)
Q.delete()
model_.delete()
# add or update new column
for cname in m.loaded_columns:
ftype = fsp[model][cname].__class__.__name__
field, Field = cls.get_field(m, cname)
cname = Field.get_cname(field, cname)
query = Field.query()
query = query.filter(Field.model == model)
query = query.filter(Field.name == cname)
if query.count():
Field.alter_field(query.first(), field, ftype)
else:
Field.add_field(cname, field, model, table, ftype)
@classmethod
def add_fields(cls, model, table):
fsp = cls.anyblok.loaded_namespaces_first_step
m = cls.anyblok.get(model)
is_sql_model = len(m.loaded_columns) > 0
cls.insert(
name=model,
table=table,
schema=m.__db_schema__,
is_sql_model=is_sql_model,
)
for cname in m.loaded_columns:
field, Field = cls.get_field(m, cname)
cname = Field.get_cname(field, cname)
ftype = fsp[model][cname].__class__.__name__
Field.add_field(cname, field, model, table, ftype)
@classmethod
def update_list(cls):
"""Insert and update the table of models
:exception: Exception
"""
for model in cls.anyblok.loaded_namespaces.keys():
try:
# TODO need refactor, then try except pass whenever refactor
# not apply
m = cls.anyblok.get(model)
table = ""
if hasattr(m, "__tablename__"):
table = m.__tablename__
_m = cls.query("name").filter(cls.name == model)
if _m.count():
cls.update_fields(model, table)
else:
cls.add_fields(model, table)
if m.loaded_columns:
cls.fire("Update Model", model)
except Exception as e:
logger.exception(str(e))
# remove model and field which are not in loaded_namespaces
query = cls.query()
query = query.filter(
cls.name.notin_(cls.anyblok.loaded_namespaces.keys())
)
Field = cls.anyblok.System.Field
for model_ in query:
Q = Field.query().filter(Field.model == model_.name)
for field in Q:
field.delete()
model_.delete()
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/system/model.py
|
model.py
|
from sqlalchemy import Sequence as SQLASequence
from anyblok import Declarations
from anyblok.column import Boolean, Integer, String
register = Declarations.register
System = Declarations.Model.System
Model = Declarations.Model
@register(System)
class Sequence:
"""Database sequences.
This Model allows applications to define and use Database sequences easily.
It is a rewrapping of `SQLAlchemy sequences
<http://docs.sqlalchemy.org/en/latest/core/defaults.html
#sqlalchemy.schema.Sequence>`_, with additional formatting
capabilities to use them, e.g, in fields of applicative Models.
Sample usage::
sequence = registry.System.Sequence.insert(
code="string code",
formater="One prefix {seq} One suffix")
.. seealso:: The :attr:`formater` field.
To get the next formatted value of the sequence::
sequence.nextval()
Full example in a Python shell::
>>> seq = Sequence.insert(code='SO', formater="{code}-{seq:06d}")
>>> seq.nextval()
'SO-000001'
>>> seq.nextval()
'SO-000002'
You can create a Sequence without gap warranty using `no_gap` while
creating the sequence::
>>> seq = Sequence.insert(
code='SO', formater="{code}-{seq:06d}", no_gap=True)
>>> commit()
>>> # Transaction 1:
>>> Sequence.nextvalBy(code='SO')
'SO-000001'
>>> # Concurrent transaction 2:
>>> Sequence.nextvalBy(code='SO')
...
sqlalchemy.exc.OperationalError: (psycopg2.errors.LockNotAvailable)
...
"""
_cls_seq_name = "system_sequence_seq_name"
id = Integer(primary_key=True)
code = String(nullable=False, index=True)
number = Integer(nullable=False)
seq_name = String(nullable=False)
"""Name of the sequence in the database.
Most databases identify sequences by names which must be globally
unique.
If not passed at insertion, the value of this field is automatically
generated.
"""
formater = String(nullable=False, default="{seq}")
"""Python format string to render the sequence values.
This format string is used in :meth:`nextval`. Within it, you can use the
following variables:
* code: :attr:`code` field
* id: :attr:`id` field
"""
no_gap = Boolean(default=False, nullable=False)
"""If no_gap is False, it will use Database sequence. Otherwise, if `True`
it will ensure there is no gap while getting next value locking the
sequence row until transaction is released (rollback/commit). If a
concurrent transaction try to get a lock an
`sqlalchemy.exc.OperationalError: (psycopg2.errors.LockNotAvailable)`
exception is raised.
"""
@classmethod
def initialize_model(cls):
"""Create the sequence to determine name"""
super(Sequence, cls).initialize_model()
seq = SQLASequence(cls._cls_seq_name)
seq.create(cls.anyblok.bind)
to_create = getattr(
cls.anyblok, "_need_sequence_to_create_if_not_exist", ()
)
if to_create is None:
return
for vals in to_create:
if cls.query().filter(cls.code == vals["code"]).count():
continue # pragma: no cover
formatter = vals.get("formater")
if formatter is None:
del vals["formater"]
no_gap = vals.get("no_gap")
if no_gap is None:
del vals["no_gap"]
cls.insert(**vals)
@classmethod
def create_sequence(cls, values):
"""Create the database sequence for an instance of Sequence Model.
:return: suitable field values for insertion of the Model instance
:rtype: dict
"""
seq_name = values.get("seq_name")
number = values.setdefault("number", 0)
if values.get("no_gap"):
values.setdefault("seq_name", values.get("code", "no_gap_seq"))
else:
if seq_name is None:
seq_id = cls.anyblok.scalar(SQLASequence(cls._cls_seq_name))
seq_name = "%s_%d" % (cls.__tablename__, seq_id)
values["seq_name"] = seq_name
if number:
seq = SQLASequence(seq_name, number)
else:
seq = SQLASequence(seq_name)
seq.create(cls.anyblok.bind)
return values
@classmethod
def insert(cls, **kwargs):
"""Overwrite to call :meth:`create_sequence` on the fly."""
return super(Sequence, cls).insert(**cls.create_sequence(kwargs))
@classmethod
def multi_insert(cls, *args): # pragma: no cover
"""Overwrite to call :meth:`create_sequence` on the fly."""
res = [cls.create_sequence(x) for x in args]
return super(Sequence, cls).multi_insert(*res)
def nextval(self):
"""Format and return the next value of the sequence.
:rtype: str
"""
cls = self.__class__
if self.no_gap:
nextval = cls.execute_sql_statement(
cls.select_sql_statement(cls.number)
.with_for_update(nowait=True)
.where(cls.id == self.id)
).scalar()
nextval += 1
cls.execute_sql_statement(
cls.update_sql_statement()
.where(cls.id == self.id)
.values(number=nextval)
)
else:
nextval = self.anyblok.scalar(SQLASequence(self.seq_name))
return self.formater.format(code=self.code, seq=nextval, id=self.id)
@classmethod
def nextvalBy(cls, **crit):
"""Return next value of the first Sequence matching given criteria.
:param crit: criteria to match, e.g., ``code=SO``
:return: :meth:`next_val` result for the first matching Sequence,
or ``None`` if there's no match.
"""
filters = [getattr(cls, k) == v for k, v in crit.items()]
seq = cls.query().filter(*filters).first()
if seq is None:
return None # pragma: no cover
return seq.nextval()
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/system/sequence.py
|
sequence.py
|
from anyblok import Declarations, reload_module_if_blok_is_reloading
@Declarations.register(Declarations.Mixin)
class DocElement:
def _auto_doc(self, Model, elements, *args, **kwargs):
for el in Model.getelements(*args, **kwargs):
elements.append(Model(el, self))
def _toRST(self, doc, Model, elements):
Model.header2RST(doc)
for el in elements:
el.toRST(doc)
Model.footer2RST(doc)
@Declarations.register(Declarations.Model)
class Documentation(Declarations.Mixin.DocElement):
def __init__(self):
self.bloks = []
self.models = []
def auto_doc_blok(self):
self._auto_doc(self.anyblok.Documentation.Blok, self.bloks)
def auto_doc_model(self):
self._auto_doc(self.anyblok.Documentation.Model, self.models)
def auto_doc(self):
self.auto_doc_blok()
self.auto_doc_model()
def header2RST(self, doc):
pass
def footer2RST(self, doc):
pass
def chapter2RST(self, doc):
self.toRST_blok(doc)
self.toRST_model(doc)
def toRST(self, doc):
title = "Documentation of the %s project" % self.Env.get("db_name")
quote = "=" * len(title)
doc.write("\n".join([quote, title, quote, "\n"]))
self.header2RST(doc)
self.chapter2RST(doc)
self.footer2RST(doc)
def toUML(self, dot):
for m in self.models:
m.toUML_add_model(dot)
for m in self.models:
m.toUML_add_attributes(dot)
def toSQL(self, dot):
for m in self.models:
m.toSQL_add_table(dot)
for m in self.models:
m.toSQL_add_fields(dot)
def toRST_blok(self, doc):
self._toRST(doc, self.anyblok.Documentation.Blok, self.bloks)
def toRST_model(self, doc):
self._toRST(doc, self.anyblok.Documentation.Model, self.models)
from . import blok # noqa
reload_module_if_blok_is_reloading(blok)
from . import model # noqa
reload_module_if_blok_is_reloading(model)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/documentation/__init__.py
|
__init__.py
|
from logging import getLogger
from anyblok import Declarations
logger = getLogger(__name__)
@Declarations.register(Declarations.Model.Documentation.Model)
class Field:
mappers = {
("Many2One", True): ("m2o", "o2m"),
("Many2One", False): ("m2o", None),
("Many2Many", True): ("m2m", "m2m"),
("Many2Many", False): ("m2m", None),
("One2Many", True): ("o2m", "m2o"),
("One2Many", False): ("o2m", None),
("One2One", True): ("o2o", "o2o"),
("One2One", False): ("o2o", "o2o"),
}
def __init__(self, field, parent):
self.field = field
self.model = parent.model
@classmethod
def getelements(cls, model):
Model = cls.anyblok.get(model.model)
if Model.is_sql:
return Model.fields_description().values()
return []
@classmethod
def header2RST(cls, doc):
doc.write("Fields\n~~~~~~\n\n")
@classmethod
def footer2RST(cls, doc):
pass
def toRST(self, doc):
doc.write("* " + self.field["id"] + "\n\n")
self.toRST_docstring(doc)
self.toRST_properties(doc)
def toRST_docstring(self, doc):
if hasattr(self.field, "__doc__") and self.field.__doc__:
doc.write(self.field.__doc__ + "\n\n") # pragma: no cover
def toRST_properties_get(self):
return {x: y for x, y in self.field.items() if x != "id"}
def toRST_properties(self, doc):
properties = self.toRST_properties_get()
msg = ", ".join(" **%s** (%s)" % (x, y) for x, y in properties.items())
doc.write(msg + "\n\n")
def toUML(self, dot):
if "remote_name" in self.field:
self.toUML_relationship(dot) # pragma: no cover
else:
self.toUML_column(dot)
def toUML_column(self, dot):
model = dot.get_class(self.model)
name = self.field["id"]
if self.field["primary_key"]:
name = "+PK+ " + name
if self.field["model"]: # pragma: no cover
remote_model = dot.get_class(self.field["model"])
multiplicity = "1"
if self.field["nullable"]:
multiplicity = "0..1"
model.aggregate(
remote_model, label_from=name, multiplicity_from=multiplicity
)
else:
name += " (%s)" % self.field["type"]
model.add_column(name)
def toUML_relationship(self, dot): # pragma: no cover
if self.field.remote:
return
model = dot.get_class(self.field.model)
multiplicity, multiplicity_to = self.mappers[
(self.field.ftype, True if self.field.remote_name else False)
]
model.associate(
self.field.remote_model,
label_from=self.field.name,
label_to=self.field.remote_name,
multiplicity_from=multiplicity,
multiplicity_to=multiplicity_to,
)
def toSQL(self, dot):
if "remote_name" in self.field:
self.toSQL_relationship(dot) # pragma: no cover
else:
self.toSQL_column(dot)
def toSQL_relationship(self, dot):
# TODO
pass # pragma: no cover
def toSQL_column(self, dot):
Model = self.anyblok.get(self.model)
if self.field["id"] in Model.loaded_fields:
return
table = dot.get_table(self.anyblok.get(self.model).__tablename__)
if self.field.get("foreign_key"): # pragma: no cover
remote_table = dot.get_table(self.field.foreign_key.split(".")[0])
if remote_table is None:
remote_table = dot.add_label(
self.field.foreign_key.split(".")[0]
)
table.add_foreign_key(
remote_table,
label=self.field.name,
nullable=self.field.nullable,
)
else:
table.add_column(
self.field["id"],
self.field["type"],
primary_key=self.field["primary_key"],
)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/documentation/model/field.py
|
field.py
|
from inspect import getmembers, isclass, isfunction, ismethod, ismodule
from anyblok import Declarations
from anyblok.field import FieldException
@Declarations.register(Declarations.Model.Documentation.Model)
class Attribute:
def __init__(self, attribute, parent):
self.name, self.attribute = attribute
@classmethod
def filterAttribute(cls, model, name):
if name in (
"insert",
"update",
"to_primary_keys",
"to_dict",
"sqlalchemy_query_update",
"sqlalchemy_query_delete",
"query",
"precommit_hook",
"multi_insert",
"initialize_model",
"has_perm",
"has_model_perm",
"get_where_clause_from_primary_keys",
"get_primary_keys",
"get_model",
"from_primary_keys",
"from_multi_primary_keys",
"fire",
"fields_description",
"_fields_description",
"delete",
"aliased",
"__init__",
"loaded_columns",
"loaded_fields",
"registry",
"anyblok" "_sa_class_manager",
"_decl_class_registry",
):
return True
return False
@classmethod
def getelements(cls, model):
res = []
Model = cls.anyblok.get(model.model)
try:
for k, v in getmembers(Model):
if ismodule(v) or isclass(v):
continue
if k.startswith("__"):
continue
if cls.filterAttribute(model, k):
continue
res.append((k, v))
except FieldException: # pragma: no cover
pass
return res
@classmethod
def header2RST(cls, doc):
title = "Attributes, methods and class methods"
doc.write("%s\n%s\n\n" % (title, "~" * len(title)))
@classmethod
def footer2RST(cls, doc):
pass
def toRST(self, doc):
doc.write("* " + self.name + "\n\n")
self.toRST_docstring(doc)
def toRST_docstring(self, doc):
if hasattr(self.attribute, "__doc__") and self.attribute.__doc__:
doc.write(self.attribute.__doc__ + "\n\n")
def toUML(self, dot, modelname):
model = dot.get_class(modelname)
if ismethod(self.attribute) or isfunction(self.attribute):
model.add_method(self.name)
else:
model.add_property(self.name)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/documentation/model/attribute.py
|
attribute.py
|
from anyblok import Declarations, reload_module_if_blok_is_reloading
from anyblok.config import Configuration
@Declarations.register(Declarations.Model.Documentation)
class Model(Declarations.Mixin.DocElement):
def __init__(self, model, parent):
self.model = model
self.fields = []
self.attributes = []
if self.exist():
self._auto_doc(
self.anyblok.Documentation.Model.Field, self.fields, self
)
self._auto_doc(
self.anyblok.Documentation.Model.Attribute,
self.attributes,
self,
)
def exist(self):
return self.anyblok.has(self.model)
@classmethod
def filterModel(cls, models): # noqa: C901
wanted_models = Configuration.get("doc_wanted_models") or []
if wanted_models: # pragma: no cover
new_models = []
for model in models:
for wanted_model in wanted_models:
if wanted_model[-1] == "*" and model.startswidth(
wanted_model[:-1]
):
new_models.append(model)
elif wanted_model == model:
new_models.append(model)
models = new_models
unwanted_models = Configuration.get("doc_unwanted_models") or []
if unwanted_models: # pragma: no cover
unwanted_models = [
x for x in unwanted_models if x not in wanted_models
]
new_models = []
for model in models:
for unwanted_model in unwanted_models:
if unwanted_model[-1] == "*" and model.startswidth(
unwanted_model[:-1]
):
continue
elif unwanted_model == model:
continue
new_models.append(model)
models = new_models
return models
@classmethod
def getelements(cls):
return cls.filterModel(
[x for x in cls.anyblok.loaded_namespaces.keys()]
)
@classmethod
def header2RST(cls, doc):
doc.write(
"Models\n======\n\n"
"This the differents models defined "
"on the project" + ("\n" * 2)
)
@classmethod
def footer2RST(cls, doc):
pass
def toRST(self, doc):
doc.write(self.model + "\n" + "-" * len(self.model) + "\n\n")
self.toRST_docstring(doc)
self.toRST_properties(doc)
self.toRST_field(doc)
self.toRST_method(doc)
def toRST_field(self, doc):
if self.fields:
self._toRST(
doc, self.anyblok.Documentation.Model.Field, self.fields
)
def toRST_method(self, doc):
if self.attributes:
self._toRST(
doc, self.anyblok.Documentation.Model.Attribute, self.attributes
)
def toRST_docstring(self, doc):
Model = self.anyblok.get(self.model)
if hasattr(Model, "__doc__") and Model.__doc__:
doc.write(Model.__doc__ + "\n\n")
def toRST_properties_get(self):
Model = self.anyblok.get(self.model)
tablename = getattr(Model, "__tablename__", "No table")
return {
"table name": tablename,
}
def toRST_properties(self, doc):
properties = self.toRST_properties_get()
msg = "Properties:\n\n* " + "\n* ".join(
"**%s** : %s" % (x, y) for x, y in properties.items()
)
doc.write(msg + "\n\n")
def toUML_add_model(self, dot):
dot.add_class(self.model)
def toUML_add_attributes(self, dot):
for f in self.fields:
f.toUML(dot)
for attr in self.attributes:
attr.toUML(dot, self.model)
def toSQL_add_table(self, dot):
Model = self.anyblok.get(self.model)
if hasattr(Model, "__tablename__"):
dot.add_table(Model.__tablename__)
def toSQL_add_fields(self, dot):
Model = self.anyblok.get(self.model)
if hasattr(Model, "__tablename__"):
for f in self.fields:
f.toSQL(dot)
from . import field # noqa
reload_module_if_blok_is_reloading(field)
from . import attribute # noqa
reload_module_if_blok_is_reloading(attribute)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/documentation/model/__init__.py
|
__init__.py
|
from anyblok import Declarations
from ..exceptions import CoreBaseException
register = Declarations.register
@register(Declarations.Core)
class Base:
"""Inherited by all the models"""
is_sql = False
@classmethod
def initialize_model(cls):
"""This method is called to initialize a model during the creation of
the registry
"""
pass
@classmethod
def clear_all_model_caches(cls):
"""Clear all caches in the case of bloks changes"""
pass
@classmethod
def fire(cls, event, *args, **kwargs):
"""Call a specific event on the model
:param event: Name of the event
"""
events = cls.anyblok.events
if cls.__registry_name__ in events:
if event in events[cls.__registry_name__]:
for model, method in events[cls.__registry_name__][event]:
m = cls.anyblok.get(model)
getattr(m, method)(*args, **kwargs)
@classmethod
def get_model(cls, model):
return cls.anyblok.get(model)
@classmethod
def get_primary_keys(cls, **pks):
"""No SQL Model has not primary key"""
raise CoreBaseException("No primary key for No SQL Model")
@classmethod
def from_primary_keys(cls, **pks):
"""No SQL Model has not primary key"""
raise CoreBaseException("No primary key for No SQL Model")
def to_primary_keys(self):
"""No SQL Model has not primary key"""
raise CoreBaseException("No primary key for No SQL Model")
def has_perm(self, principals, permission):
"""Check that one of principals has permission on given record.
Since this is an ordinary instance method, it can't be used on the
model class itself. For this use case, see :meth:`has_model_perm`
"""
return self.anyblok.check_permission(self, principals, permission)
@classmethod
def has_model_perm(cls, principals, permission):
"""Check that one of principals has permission on given model.
Since this is a classmethod, even if called on a record, only its
model class will be considered for the permission check.
"""
return cls.anyblok.check_permission(cls, principals, permission)
@classmethod
def precommit_hook(cls, method, *args, **kwargs):
"""Same in the registry a hook to call just before the commit
.. warning::
Only one instance with same parameters of the hook is called before
the commit
:param method: the method to call on this model
:param put_at_the_end_if_exist: If ``True`` the hook is move at the end
"""
cls.anyblok.precommit_hook(
cls.__registry_name__, method, *args, **kwargs
)
@classmethod
def postcommit_hook(cls, method, *args, **kwargs):
"""Same in the registry a hook to call just after the commit
you can choice if the hook is called in function of ``call_only_if``:
* ``commited``: Call if the commit is done without exception
* ``raised``: Call if one exception was raised
* ``always``: Always call
.. warning::
Only one instance with same paramters of the hook is called
after the commit
:param method: the method to call on this model
:param put_at_the_end_if_exist: If ``True`` the hook is move at the end
:param call_only_if: ['commited' (default), 'raised', 'always']
"""
cls.anyblok.postcommit_hook(
cls.__registry_name__, method, *args, **kwargs
)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/core/base.py
|
base.py
|
from sqlalchemy import and_, delete, inspect, or_, select
from sqlalchemy import update as sqla_update
from sqlalchemy.orm import ColumnProperty, aliased
from sqlalchemy.orm.base import LoaderCallableStatus
from sqlalchemy.orm.session import object_state
from sqlalchemy_utils.models import NOT_LOADED_REPR
from anyblok.column import Column
from anyblok.common import anyblok_column_prefix
from anyblok.declarations import Declarations, classmethod_cache
from anyblok.field import FieldException
from anyblok.mapper import FakeColumn, FakeRelationShip
from anyblok.relationship import Many2Many, RelationShip
from ..exceptions import SqlBaseException
class uniquedict(dict):
def add_in_res(self, key, attrs):
if key not in self:
self[key] = []
for attr in attrs:
if attr not in self[key]:
self[key].append(attr)
class SqlMixin:
__db_schema__ = None
def __repr__(self):
state = inspect(self)
field_reprs = []
fields_description = self.fields_description()
keys = list(fields_description.keys())
keys.sort()
for key in keys:
type_ = fields_description[key]["type"]
if key in state.attrs:
value = state.attrs.get(key).loaded_value
elif (anyblok_column_prefix + key) in state.attrs:
value = state.attrs.get(
anyblok_column_prefix + key
).loaded_value
else:
continue # pragma: no cover
if value == LoaderCallableStatus.NO_VALUE:
value = NOT_LOADED_REPR
elif value and type_ in ("One2Many", "Many2Many"):
value = "<%s len(%d)>" % (
fields_description[key]["model"],
len(value),
)
elif value and type_ in ("One2One", "Many2One"):
value = "<%s(%s)>" % (
fields_description[key]["model"],
", ".join(
[
"=".join([x, str(y)])
for x, y in value.to_primary_keys().items()
]
),
)
else:
value = repr(value)
field_reprs.append("=".join((key, value)))
return "<%s(%s)>" % (
self.__class__.__registry_name__,
", ".join(field_reprs),
)
@classmethod
def initialize_model(cls):
super().initialize_model()
cls.SQLAMapper = inspect(cls)
@classmethod
def clear_all_model_caches(cls):
super().clear_all_model_caches()
Cache = cls.anyblok.System.Cache
Cache.invalidate(cls, "_fields_description")
Cache.invalidate(cls, "fields_name")
Cache.invalidate(cls, "getFieldType")
Cache.invalidate(cls, "get_primary_keys")
Cache.invalidate(cls, "find_remote_attribute_to_expire")
Cache.invalidate(cls, "find_relationship")
Cache.invalidate(cls, "get_hybrid_property_columns")
@classmethod
def define_table_args(cls):
return ()
@classmethod
def define_table_kwargs(cls):
res = {}
if cls.__db_schema__ is not None:
res.update({"schema": cls.__db_schema__})
return res
@classmethod
def define_mapper_args(cls):
return {}
@classmethod
def get_all_registry_names(cls):
models = list(cls.__depends__)
models.insert(0, cls.__registry_name__)
return models
@classmethod
def query(cls, *elements):
"""Facility to do a SqlAlchemy query::
query = MyModel.query()
is equal at::
query = self.anyblok.Query(MyModel)
:param elements: pass at the SqlAlchemy query, if the element is a
string then thet are see as field of the model
:rtype: AnyBlok Query
"""
return cls.anyblok.Query(cls, *elements)
@classmethod
def select_sql_statement(cls, *elements):
"""Facility to do a SqlAlchemy query::
stmt = MyModel.select()
is equal at::
from anyblok import select
stmt = select(MyModel)
but select can be overload by model and it is
possible to apply whereclause or anything matter
:param elements: pass at the SqlAlchemy query, if the element is a
string then thet are see as field of the model
:rtype: SqlAlchemy select statement
"""
res = []
for f in elements:
if isinstance(f, str):
res.append(getattr(cls, f).label(f))
else:
res.append(f)
if res:
stmt = select(*res)
else:
stmt = select(cls)
return cls.default_filter_on_sql_statement(stmt)
@classmethod
def execute(cls, *args, **kwargs):
"""call SqlA execute method on the session"""
return cls.anyblok.session.execute(*args, **kwargs)
@classmethod
def default_filter_on_sql_statement(cls, statement):
return statement
@classmethod
def execute_sql_statement(cls, *args, **kwargs):
"""call SqlA execute method on the session"""
return cls.anyblok.execute(*args, **kwargs)
is_sql = True
@classmethod
def aliased(cls, *args, **kwargs):
"""Facility to Apply an aliased on the model::
MyModelAliased = MyModel.aliased()
is equal at::
from sqlalchemy.orm import aliased
MyModelAliased = aliased(MyModel)
:rtype: SqlAlchemy aliased of the model
"""
alias = aliased(cls, *args, **kwargs)
alias.anyblok = cls.anyblok
return alias
@classmethod
def get_where_clause_from_primary_keys(cls, **pks):
"""return the where clause to find object from pks
:param _*_*pks: dict {primary_key: value, ...}
:rtype: where clause
:exception: SqlBaseException
"""
_pks = cls.get_primary_keys()
for pk in _pks:
if pk not in pks: # pragma: no cover
raise SqlBaseException(
"No primary key %s filled for %r"
% (pk, cls.__registry_name__)
)
return [getattr(cls, k) == v for k, v in pks.items()]
@classmethod
def query_from_primary_keys(cls, **pks):
"""return a Query object in order to get object from primary keys.
.. code::
query = Model.query_from_primary_keys(**pks)
obj = query.one()
:param _*_*pks: dict {primary_key: value, ...}
:rtype: Query object
"""
where_clause = cls.get_where_clause_from_primary_keys(**pks)
return cls.query().filter(*where_clause)
@classmethod
def from_primary_keys(cls, **pks):
"""return the instance of the model from the primary keys
:param **pks: dict {primary_key: value, ...}
:rtype: instance of the model
"""
query = cls.query_from_primary_keys(**pks)
return query.one_or_none()
@classmethod
def from_multi_primary_keys(cls, *pks):
"""return the instances of the model from the primary keys
:param _*pks: list of dict [{primary_key: value, ...}]
:rtype: instances of the model
"""
where_clause = []
for _pks in pks:
where_clause.append(cls.get_where_clause_from_primary_keys(**_pks))
if not where_clause:
return []
where_clause = or_(*[and_(*x) for x in where_clause])
query = cls.query().filter(where_clause)
return query.all()
def to_primary_keys(self):
"""return the primary keys and values for this instance
:rtype: dict {primary key: value, ...}
"""
pks = self.get_primary_keys()
return {x: getattr(self, x) for x in pks}
@classmethod_cache()
def get_primary_keys(cls):
"""return the name of the primary keys of the model
:type: list of the primary keys name
"""
return list(
{
column.key
for model in cls.get_all_registry_names()
for column in cls.anyblok.get(model).SQLAMapper.primary_key
}
)
@classmethod
def _fields_description_field(cls):
res = {}
fsp = cls.anyblok.loaded_namespaces_first_step[cls.__registry_name__]
for cname in cls.loaded_fields:
ftype = fsp[cname].__class__.__name__
res[cname] = dict(
id=cname,
label=cls.loaded_fields[cname],
type=ftype,
nullable=True,
primary_key=False,
model=None,
)
fsp[cname].update_description(
cls.anyblok, cls.__registry_name__, res[cname]
)
return res
@classmethod
def _fields_description_column(cls):
res = {}
fsp = cls.anyblok.loaded_namespaces_first_step[cls.__registry_name__]
for field in cls.SQLAMapper.columns:
if field.key not in fsp:
continue
ftype = fsp[field.key].__class__.__name__
res[field.key] = dict(
id=field.key,
label=field.info.get("label"),
type=ftype,
nullable=field.nullable,
primary_key=field.primary_key,
model=field.info.get("remote_model"),
)
fsp[field.key].update_description(
cls.anyblok, cls.__registry_name__, res[field.key]
)
return res
@classmethod
def _fields_description_relationship(cls):
res = {}
fsp = cls.anyblok.loaded_namespaces_first_step[cls.__registry_name__]
for field in cls.SQLAMapper.relationships:
key = (
field.key[len(anyblok_column_prefix) :]
if field.key.startswith(anyblok_column_prefix)
else field.key
)
ftype = fsp[key].__class__.__name__
if ftype == "FakeRelationShip":
Model = field.mapper.entity
model = Model.__registry_name__
nullable = True
remote_name = field.back_populates
if remote_name.startswith(anyblok_column_prefix):
remote_name = remote_name[len(anyblok_column_prefix) :]
remote = getattr(Model, remote_name)
remote_columns = remote.info.get("local_columns", [])
local_columns = remote.info.get("remote_columns", [])
rtype = remote.info["rtype"]
if rtype == "Many2One":
ftype = "One2Many"
elif rtype == "Many2Many":
ftype = "Many2Many"
elif rtype == "One2One":
ftype = "One2One"
else:
local_columns = field.info.get("local_columns", [])
remote_columns = field.info.get("remote_columns", [])
nullable = field.info.get("nullable", True)
model = field.info.get("remote_model")
remote_name = field.info.get("remote_name")
res[key] = dict(
id=key,
label=field.info.get("label"),
type=ftype,
nullable=nullable,
model=model,
local_columns=local_columns,
remote_columns=remote_columns,
remote_name=remote_name,
primary_key=False,
)
fsp[key].update_description(
cls.anyblok, cls.__registry_name__, res[key]
)
return res
@classmethod_cache()
def _fields_description(cls):
"""Return the information of the Field, Column, RelationShip"""
res = {}
for registry_name in cls.__depends__:
Depend = cls.anyblok.get(registry_name)
res.update(Depend._fields_description())
res.update(cls._fields_description_field())
res.update(cls._fields_description_column())
res.update(cls._fields_description_relationship())
return res
@classmethod
def _fields_name_field(cls):
return [cname for cname in cls.loaded_fields]
@classmethod
def _fields_name_column(cls):
return [field.key for field in cls.SQLAMapper.columns]
@classmethod
def _fields_name_relationship(cls):
return [
(
field.key[len(anyblok_column_prefix) :]
if field.key.startswith(anyblok_column_prefix)
else field.key
)
for field in cls.SQLAMapper.relationships
]
@classmethod_cache()
def fields_name(cls):
"""Return the name of the Field, Column, RelationShip"""
res = []
for registry_name in cls.__depends__:
Depend = cls.anyblok.get(registry_name)
res.extend(Depend.fields_name())
res.extend(cls._fields_name_field())
res.extend(cls._fields_name_column())
res.extend(cls._fields_name_relationship())
return list(set(res))
@classmethod
def fields_description(cls, fields=None):
res = cls._fields_description()
if fields:
return {x: y for x, y in res.items() if x in fields}
return res
@classmethod_cache()
def get_hybrid_property_columns(cls):
"""Return the hybrid properties columns name from the Model and the
inherited model if they come from polymorphisme
"""
hybrid_property_columns = cls.hybrid_property_columns
if "polymorphic_identity" in cls.__mapper_args__:
pks = cls.get_primary_keys()
fd = cls.fields_description(pks)
for pk in pks:
if fd[pk].get("model"): # pragma: no cover
Model = cls.anyblok.get(fd[pk]["model"])
hybrid_property_columns.extend(
Model.get_hybrid_property_columns()
)
return hybrid_property_columns
def _format_field(self, field):
related_fields = None
if isinstance(field, (tuple, list)):
if len(field) == 1:
related_fields = ()
elif len(field) == 2:
related_fields = field[1]
if related_fields is None:
related_fields = ()
elif not isinstance(related_fields, (tuple, list)):
raise SqlBaseException(
"%r the related fields wanted "
"must be a tuple or empty or "
"None value" % related_fields
)
else:
raise SqlBaseException(
"%r the number of argument is "
"wrong, waiting 1 or 2 arguments "
"(name of the relation[, (related "
"fields)])" % (field,)
)
field = field[0]
return field, related_fields
def to_dict(self, *fields):
"""Transform a record to the dict of value
:param fields: list of fields to put in dict; if not selected, fields
then take them all. A field is either one of these:
* a string (which is the name of the field)
* a 2-tuple if the field is a relationship (name of the field,
tuple of foreign model fields)
:rtype: dict
Here are some examples::
=>> instance.to_dict() # get all fields
{"id": 1,
"column1": "value 1",
"column2": "value 2",
"column3": "value 3",
"relation1": {"relation_pk_1": 42, "relation_pk_2": "also 42"}
# m2o or o2o : this is a dictionary
"relation2": [{"id": 28}, {"id": 1}, {"id": 34}]
# o2m or m2m : this is a list of dictionaries
}
=>> instance.to_dict("column1", "column2", "relation1")
# get selected fields only (without any constraints)
{"column1": "value 1",
"column2": "value 2",
"relation1": {"relation_pk_1": 42, "relation_pk_2": "also 42"}
}
=>> instance.to_dict("column1", "column2", (
# select fields to use in the relation related model
"relation1", ("relation_pk1", "name", "value")
# there is no constraints in the choice of fields
))
{"column1": "value",
"column2": "value",
"relation1": {"relation_pk_1": 42, "name": "H2G2", "value": "42"}
}
=>> instance.to_dict("column1", "column2", ("relation1", ))
# or
=>> instance.to_dict("column1", "column2", ("relation1", None))
# or
=>> instance.to_dict("column1", "column2", ("relation1", ()))
# select all the fields of the relation ship
{"column1": "value",
"column2": "value",
"relation1": {"relation_pk_1": 42, "name": "H2G2", "value": "42"}
}
=>> instance.to_dict("column1", "column2", (
# select relation fields recursively
"relation1", ("name", "value", (
"relation", ("a", "b", "c")
))
))
{"column1": "value",
"column2": "value",
"relation1": {"name": "H2G2", "value": "42", "relation": [
{"a": 10, "b": 20, "c": 30},
{"a": 11, "b": 22, "c": 33},
]}
}
"""
result = {}
cls = self.__class__
fields = fields if fields else cls.fields_description().keys()
for field in fields:
# if field is ("relation_name", ("list", "of", "relation",
# "fields")), deal with it.
field, related_fields = self._format_field(field)
# Get the actual data
field_value, field_property = getattr(self, field), None
try:
field_property = getattr(getattr(cls, field), "property", None)
except FieldException: # pragma: no cover
pass
# Deal with this data
if field_property is None:
# it is the case of field function (hyprid property)
result[field] = field_value
elif field_value is None or type(field_property) == ColumnProperty:
# If value is None, then do not go any further whatever
# the column property tells you.
result[field] = field_value
else:
# it is should be RelationshipProperty
if related_fields is None:
# If there is no field list to the relation,
# use only primary keys
related_fields = field_property.mapper.entity
related_fields = related_fields.get_primary_keys()
# One2One, One2Many, Many2One or Many2Many ?
if field_property.uselist:
result[field] = [
r.to_dict(*related_fields) for r in field_value
]
else:
result[field] = field_value.to_dict(*related_fields)
return result
@classmethod_cache()
def getFieldType(cls, name):
"""Return the type of the column
::
TheModel.getFieldType(nameOfTheColumn)
this method take care if it is a polymorphic model or not
:param name: name of the column
:rtype: String, the name of the Type of column used
"""
return cls.fields_description(name)[name]["type"]
@classmethod_cache()
def find_remote_attribute_to_expire(cls, *fields):
res = uniquedict()
_fields = []
_fields.extend(fields)
model = get_model_information(cls.anyblok, cls.__registry_name__)
while _fields:
field = _fields.pop()
field = field if isinstance(field, str) else field.name
_field = model[field]
if isinstance(_field, (Column, FakeColumn)):
_fields.extend(
x
for x, y in model.items()
if (
isinstance(y, RelationShip)
and not isinstance(y, Many2Many)
)
for mapper in y.column_names
if mapper.attribute_name == field
)
if (
isinstance(_field, Column) and _field.foreign_key
): # pragma: no cover
rmodel = cls.anyblok.loaded_namespaces_first_step[
_field.foreign_key.model_name
]
for rc in [
x
for x, y in rmodel.items()
if isinstance(y, RelationShip)
for mapper in y.remote_columns
if mapper.attribute_name == field
]:
rfield = rmodel[rc]
if isinstance(rfield, FakeRelationShip):
res.add_in_res(rfield.mapper.attribute_name, [rc])
elif (
isinstance(rfield, RelationShip)
and "backref" in rfield.kwargs
):
res.add_in_res(rfield.kwargs["backref"][0], [rc])
elif (
isinstance(_field, RelationShip)
and not isinstance(_field, Many2Many)
and "backref" in _field.kwargs
):
res.add_in_res(field, [_field.kwargs["backref"][0]])
elif isinstance(_field, FakeRelationShip): # pragma: no cover
res.add_in_res(field, [_field.mapper.attribute_name])
return res
@classmethod_cache()
def find_relationship(cls, *fields):
"""Find column and relation ship link with the column or relationship
passed in fields.
:param _*fields: lists of the attribute name
:rtype: list of the attribute name of the attribute and relation ship
"""
res = []
_fields = []
_fields.extend(fields)
model = get_model_information(cls.anyblok, cls.__registry_name__)
while _fields:
field = _fields.pop()
if not isinstance(field, str):
field = field.name # pragma: no cover
if field in res:
continue
_field = model[field]
res.append(field)
if isinstance(_field, (Column, FakeColumn)):
_fields.extend(
x
for x, y in model.items()
if (
isinstance(y, RelationShip)
and not isinstance(y, Many2Many)
)
for mapper in y.column_names
if mapper.attribute_name == field
)
elif isinstance(_field, RelationShip) and not isinstance(
_field, Many2Many
):
for mapper in _field.column_names:
_fields.append(mapper.attribute_name)
return res
def get_model_information(anyblok, registry_name):
model = anyblok.loaded_namespaces_first_step[registry_name]
for depend in model["__depends__"]:
if depend != registry_name:
for x, y in get_model_information(anyblok, depend).items():
if x not in model:
model[x] = y # pragma: no cover
return model
@Declarations.register(Declarations.Core)
class SqlBase(SqlMixin):
"""this class is inherited by all the SQL model"""
def get_modified_fields(self):
"""return the fields which have changed and their previous values"""
state = object_state(self)
modified_fields = {}
for attr in state.manager.attributes:
if not hasattr(attr.impl, "get_history"):
continue # pragma: no cover
added, unmodified, deleted = attr.impl.get_history(
state, state.dict
)
if added or deleted:
field = attr.key
if field.startswith(anyblok_column_prefix):
field = field[len(anyblok_column_prefix) :]
modified_fields[field] = deleted[0] if deleted else None
return modified_fields
def expire_relationship_mapped(self, mappers):
"""Expire the objects linked with this object, in function of
the mappers definition
"""
for field_name, rfields in mappers.items():
fields = getattr(self, field_name)
if not isinstance(fields, list):
fields = [fields]
for field in fields:
if field is not None:
field.expire(*rfields)
def refresh(self, *fields, with_for_update=None):
"""Expire and reload all the attribute of the instance
See: http://docs.sqlalchemy.org/en/latest/orm/session_api.html
#sqlalchemy.orm.session.Session.refresh
"""
self.anyblok.refresh(self, fields, with_for_update=with_for_update)
def expunge(self):
"""Expunge the instance in the session"""
self.anyblok.session.expunge(self)
def expire(self, *fields):
"""Expire the attribute of the instance, theses attributes will be
load at the next call of the instance
see: http://docs.sqlalchemy.org/en/latest/orm/session_api.html
#sqlalchemy.orm.session.Session.expire
"""
self.anyblok.expire(self, fields)
def flag_modified(self, *fields):
"""Flag the attributes as modified
see: http://docs.sqlalchemy.org/en/latest/orm/session_api.html
#sqlalchemy.orm.session.Session.expire
"""
self.anyblok.flag_modified(self, fields)
@classmethod
def delete_sql_statement(cls):
"""Return a statement to delete some element"""
return cls.default_filter_on_sql_statement(delete(cls))
def delete(self, byquery=False, flush=True):
"""Call the SqlAlchemy Query.delete method on the instance of the
model::
self.delete()
is equal at::
flush the session
remove the instance of the session
and expire all the session, to reload the relation ship
"""
if byquery:
cls = self.__class__
self.execute_sql_statement(
delete(cls).where(
*cls.get_where_clause_from_primary_keys(
**self.to_primary_keys()
)
)
)
self.expunge()
else:
model = self.anyblok.loaded_namespaces_first_step[
self.__registry_name__
]
fields = model.keys()
mappers = self.__class__.find_remote_attribute_to_expire(*fields)
self.expire_relationship_mapped(mappers)
self.anyblok.session.delete(self)
if flush:
self.anyblok.flush()
@classmethod
def update_sql_statement(cls):
return cls.default_filter_on_sql_statement(sqla_update(cls))
def update(self, byquery=False, flush=False, **values):
"""Hight livel method to update the session for the instance
::
self.update(val1=.., val2= ...)
..warning::
the columns and values is passed as named arguments to show
a difference with Query.update meth
"""
if byquery:
cls = self.__class__
return self.execute_sql_statement(
sqla_update(cls)
.where(
*cls.get_where_clause_from_primary_keys(
**self.to_primary_keys()
)
)
.values(**values)
).rowcount
for x, v in values.items():
setattr(self, x, v)
if flush:
self.anyblok.flush() # pragma: no cover
return 1 if values else 0
@classmethod
def insert(cls, **kwargs):
"""Insert in the table of the model::
MyModel.insert(...)
is equal at::
mymodel = MyModel(...)
MyModel.anyblok.session.add(mymodel)
MyModel.anyblok.flush()
"""
instance = cls(**kwargs)
cls.anyblok.add(instance)
cls.anyblok.flush()
return instance
@classmethod
def multi_insert(cls, *args):
"""Insert in the table one or more entry of the model::
MyModel.multi_insert([{...}, ...])
the flush will be done only one time at the end of the insert
:exception: SqlBaseException
"""
instances = cls.anyblok.InstrumentedList()
session = cls.anyblok.session
for kwargs in args:
if not isinstance(kwargs, dict): # pragma: no cover
raise SqlBaseException("multi_insert method wait list of dict")
instance = cls(**kwargs)
session.add(instance)
instances.append(instance)
if instances:
session.flush()
return instances
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/core/sqlbase.py
|
sqlbase.py
|
from logging import getLogger
from sqlalchemy import func, select
from sqlalchemy.orm.exc import NoResultFound
from anyblok import Declarations
from anyblok.common import anyblok_column_prefix
logger = getLogger(__name__)
@Declarations.register(Declarations.Core)
class Query:
"""Overload the SqlAlchemy Query"""
def __init__(self, Model, *elements, sql_statement=None):
self.Model = Model
self.elements = elements
self.sql_statement = sql_statement
if sql_statement is None:
self.sql_statement = Model.select_sql_statement(*elements)
def __getattr__(self, key, default=None):
sqla_function = getattr(self.sql_statement, key)
def wrapper(*args, **kwargs):
statement = sqla_function(*args, **kwargs)
return self.anyblok.Query(
self.Model, *self.elements, sql_statement=statement
)
return wrapper
def __iter__(self):
for res in self._execute():
yield res
def __str__(self):
return str(self.sql_statement)
def __repr__(self):
return str(self.sql_statement)
def _execute(self):
res = self.Model.execute(self.sql_statement)
if self.elements:
return res
return res.scalars()
@property
def column_descriptions(self):
return self.sql_statement.column_descriptions
def count(self):
stmt = select(func.count())
stmt = stmt.select_from(self.sql_statement.subquery())
return self.Model.execute(stmt).scalars().first()
def delete(self, *args, **kwargs):
raise NotImplementedError( # pragma: no cover
"You have to use Model.delete_sql_statement()"
)
def update(self, *args, **kwargs):
raise NotImplementedError( # pragma: no cover
"You have to use Model.update_sql_statement()"
)
def first(self):
try:
return self._execute().first()
except NoResultFound as exc: # pragma: no cover
logger.debug(
"On Model %r: exc %s: query %s",
self.Model.__registry_name__,
str(exc),
str(self),
)
raise exc.__class__(
"On Model %r: %s" % (self.Model.__registry_name__, str(exc))
)
def one(self):
"""Overwrite sqlalchemy one() method to improve exception message
Add model name to query exception message
"""
try:
return self._execute().one()
except NoResultFound as exc:
logger.debug(
"On Model %r: exc %s: query %s",
self.Model.__registry_name__,
str(exc),
str(self),
)
raise exc.__class__( # pragma: no cover
"On Model %r: %s" % (self.Model.__registry_name__, str(exc))
)
def one_or_none(self):
return self._execute().one_or_none()
def all(self):
"""Return an instrumented list of the result of the query"""
res = self._execute().all()
return self.anyblok.InstrumentedList(res)
def with_perm(self, principals, permission):
"""Add authorization pre- and post-filtering to query.
This must be last in the construction chain of the query.
Queries too complicated for the authorization system to infer
safely will be refused.
:param principals: list, set or tuple of strings
:param str permission: the permission to filter for
:returns: a query-like object, with only the returning methods, such
as ``all()``, ``count()`` etc. available.
"""
return self.anyblok.wrap_query_permission(self, principals, permission)
def get_field_names_in_column_description(self):
field2get = [
x["name"]
for x in self.column_descriptions
if not hasattr(x["type"], "__table__")
]
field2get = [
(
x[len(anyblok_column_prefix) :]
if x.startswith(anyblok_column_prefix)
else x,
x,
)
for x in field2get
]
return field2get
def dictone(self):
try:
val = self.one()
except NoResultFound as exc:
msg = str(exc).replace("one()", "dictone()")
raise exc.__class__(msg)
field2get = self.get_field_names_in_column_description()
if field2get:
return {x: getattr(val, y) for x, y in field2get}
else:
return val.to_dict()
def dictfirst(self):
val = self.first()
field2get = self.get_field_names_in_column_description()
if field2get:
return {x: getattr(val, y) for x, y in field2get}
else:
return val.to_dict()
def dictall(self):
vals = self.all()
if not vals:
return [] # pragma: no cover
field2get = self.get_field_names_in_column_description()
if field2get:
return [{x: getattr(y, z) for x, z in field2get} for y in vals]
else:
return vals.to_dict()
def get(self, primary_keys=None, **kwargs):
"""Return instance of the Model
::
instance = Model.query().get(the primary key value)
or
::
instance Model.query().get(pk1 name=pk1 value, ...)
"""
if primary_keys is None:
primary_keys = kwargs
if isinstance(primary_keys, dict):
primary_keys = {
(
anyblok_column_prefix + k
if k in self.Model.hybrid_property_columns
else k
): v
for k, v in primary_keys.items()
}
return self.anyblok.session.get(self.Model, primary_keys)
def subquery(self, *args, **kwargs):
return self.sql_statement.subquery(*args, **kwargs)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/bloks/anyblok_core/core/query.py
|
query.py
|
from anyblok.blok import BlokManager
from anyblok.model import autodoc_fields
from anyblok.release import version
from sphinx.ext.autodoc import ClassDocumenter, MethodDocumenter
from sphinx.util.docstrings import prepare_docstring
def autodoc_registration(declaration, cls):
res = [
"**AnyBlok registration**:",
"",
"- Type: " + declaration.__declaration_type__,
"- Registry name: " + cls.__registry_name__,
]
if getattr(declaration, "autodoc_anyblok_kwargs", False):
res.extend(
"- %s: %s" % (x.replace("_", " ").strip().capitalize(), y)
for x, y in cls.__anyblok_kwargs__.items()
if x != "__registry_name__"
)
if getattr(declaration, "autodoc_anyblok_bases", False):
ab_bases = cls.__anyblok_bases__
if ab_bases:
res.extend(["- Inherited Models or Mixins:", ""])
res.extend(
" * :class:`%s.%s`" % (c.__module__, c.__name__)
for c in ab_bases
)
res.append("")
res.extend(("", ""))
return "\n".join(res)
def default_autodoc_method(declaration):
def wrapper(cls, meth_name, meth):
return None
return wrapper
class AnyBlokDeclarationDocumenter(ClassDocumenter):
objtype = "anyblok-declaration"
directivetype = "class"
def get_doc(self, *args, **kwargs):
ignore = kwargs.get("ignore", 1) or 1
lines = getattr(self, "_new_docstrings", None)
if lines is not None:
return lines
doc = super(AnyBlokDeclarationDocumenter, self).get_doc(
*args, ignore=ignore, **kwargs
)
registry_name = self.get_attr(self.object, "__registry_name__", None)
declaration = self.get_attr(self.object, "__declaration__", None)
if registry_name and declaration:
autodoc = self.get_attr(declaration, "autodoc_class", None)
if autodoc is not None:
docstrings = autodoc(self.object)
else:
docstrings = autodoc_registration(declaration, self.object)
if getattr(declaration, "autodoc_anyblok_fields", False):
docstrings += autodoc_fields(declaration, self.object)
if docstrings:
doc.append(prepare_docstring(docstrings, ignore))
return doc
class AnyBlokMethodDocumenter(MethodDocumenter):
def get_doc(self, *args, **kwargs):
ignore = kwargs.get("ignore", 1) or 1
lines = getattr(self, "_new_docstrings", None)
if lines is not None:
return lines
doc = super(AnyBlokMethodDocumenter, self).get_doc(
*args, ignore=ignore, **kwargs
)
autodocs = self.get_attr(self.object, "autodocs", [])
for autodoc in autodocs:
doc.append(prepare_docstring(autodoc, ignore))
return doc
def setup(app):
BlokManager.load()
app.add_autodocumenter(AnyBlokDeclarationDocumenter)
app.add_autodocumenter(AnyBlokMethodDocumenter)
return {"version": version, "parallel_read_safe": True}
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/sphinx/ext/load/blok.py
|
blok.py
|
from anyblok.mapper import ModelAttribute, ModelMapper
from .plugins import ModelPluginBase
class ORMEventException(Exception):
pass
class EventPlugin(ModelPluginBase):
def __init__(self, registry):
if not hasattr(registry, "events"):
registry.events = {}
super(EventPlugin, self).__init__(registry)
def transform_base_attribute(
self,
attr,
method,
namespace,
base,
transformation_properties,
new_type_properties,
):
"""Find the event listener methods in the base to save the
namespace and the method in the registry
:param attr: attribute name
:param method: method pointer of the attribute
:param namespace: the namespace of the model
:param base: One of the base of the model
:param transformation_properties: the properties of the model
:param new_type_properties: param to add in a new base if need
"""
if not hasattr(method, "is_an_event_listener"):
return
elif method.is_an_event_listener is True:
model = method.model
event = method.event
events = self.registry.events
if model not in events:
events[model] = {event: []}
elif event not in events[model]:
events[model][event] = [] # pragma: no cover
val = (namespace, attr)
ev = events[model][event]
if val not in ev:
ev.append(val)
class SQLAlchemyEventPlugin(ModelPluginBase):
def transform_base_attribute(
self,
attr,
method,
namespace,
base,
transformation_properties,
new_type_properties,
):
"""declare in the registry the sqlalchemy event
:param attr: attribute name
:param method: method pointer of the attribute
:param namespace: the namespace of the model
:param base: One of the base of the model
:param transformation_properties: the properties of the model
:param new_type_properties: param to add in a new base if need
"""
if not hasattr(method, "is_an_sqlalchemy_event_listener"):
return
elif method.is_an_sqlalchemy_event_listener is True:
self.registry._sqlalchemy_known_events.append(
(
method.sqlalchemy_listener,
namespace,
ModelAttribute(namespace, attr),
)
)
class AutoSQLAlchemyORMEventPlugin(ModelPluginBase):
def after_model_construction(
self, base, namespace, transformation_properties
):
for eventtype in (
"before_insert",
"after_insert",
"before_update",
"after_update",
"before_delete",
"after_delete",
):
attr = eventtype + "_orm_event"
if hasattr(base, attr):
if not hasattr(getattr(base, attr), "__self__"):
raise ORMEventException(
"On %s %s is not a classmethod" % (base, attr)
)
self.registry._sqlalchemy_known_events.append(
(
ModelMapper(base, eventtype),
namespace,
ModelAttribute(namespace, attr),
)
)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/model/event.py
|
event.py
|
from anyblok.common import apply_cache
from .plugins import ModelPluginBase
class CachePlugin(ModelPluginBase):
def __init__(self, registry):
if not hasattr(registry, "caches"):
registry.caches = {}
super(CachePlugin, self).__init__(registry)
def insert_in_bases(
self, new_base, namespace, properties, transformation_properties
):
"""Create overload to define the cache from __depends__.
Because the cache is defined on the depend models and this namespace
does not exist in caches dict
:param new_base: the base to be put on front of all bases
:param namespace: the namespace of the model
:param properties: the properties declared in the model
:param transformation_properties: the properties of the model
"""
for dep in properties["__depends__"]:
if dep in self.registry.caches:
cache = self.registry.caches.setdefault(namespace, {})
for method_name, methods in self.registry.caches[dep].items():
entry = cache.setdefault(method_name, [])
entry.extend(methods)
return {}
def transform_base_attribute(
self,
attr,
method,
namespace,
base,
transformation_properties,
new_type_properties,
):
"""Find the sqlalchemy hybrid methods in the base to save the
namespace and the method in the registry
:param attr: attribute name
:param method: method pointer of the attribute
:param namespace: the namespace of the model
:param base: One of the base of the model
:param transformation_properties: the properties of the model
:param new_type_properties: param to add in a new base if need
"""
new_type_properties.update(
apply_cache(
attr,
method,
self.registry,
namespace,
base,
transformation_properties,
)
)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/model/cache.py
|
cache.py
|
from logging import getLogger
from pkg_resources import iter_entry_points
logger = getLogger(__name__)
def get_model_plugins(registry):
res = []
for i in iter_entry_points("anyblok.model.plugin"):
logger.info("AnyBlok Load model plugin: %r" % i)
res.append(i.load()(registry))
return res
class ModelPluginBase:
def __init__(self, registry):
self.registry = registry
# def initialisation_tranformation_properties(self, properties,
# transformation_properties):
# """ Initialise the transform properties
# :param properties: the properties declared in the model
# :param new_type_properties: param to add in a new base if need
# """
# def declare_field(self, name, field, namespace, properties,
# transformation_properties):
# """Declare a field in the model
# :param name: field name
# :param field: field instance
# :param namespace: the namespace of the model
# :param properties: the properties of the model
# :param transformation_properties: the transformation properties
# """
# def transform_base_attribute(self, attr, method, namespace, base,
# transformation_properties,
# new_type_properties):
# """ transform the attribute for the final Model
# :param attr: attribute name
# :param method: method pointer of the attribute
# :param namespace: the namespace of the model
# :param base: One of the base of the model
# :param transformation_properties: the properties of the model
# :param new_type_properties: param to add in a new base if need
# """
# def transform_base(self, namespace, base,
# transformation_properties,
# new_type_properties):
# """ transform the base for the final Model
# :param namespace: the namespace of the model
# :param base: One of the base of the model
# :param transformation_properties: the properties of the model
# :param new_type_properties: param to add in a new base if need
# """
# def insert_in_bases(self, new_base, namespace, properties,
# transformation_properties):
# """Insert in a base the overload
# :param new_base: the base to be put on front of all bases
# :param namespace: the namespace of the model
# :param properties: the properties declared in the model
# :param transformation_properties: the properties of the model
# """
# def after_model_construction(self, base, namespace,
# transformation_properties):
# """Do some action with the constructed Model
# :param base: the Model class
# :param namespace: the namespace of the model
# :param transformation_properties: the properties of the model
# """
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/model/plugins.py
|
plugins.py
|
import warnings
from sqlalchemy import CheckConstraint, ForeignKeyConstraint
from sqlalchemy.exc import NoInspectionAvailable
from sqlalchemy.ext.declarative import declared_attr
from ..common import sgdb_in
from .exceptions import ModelException
from .plugins import ModelPluginBase
def check_deprecated_foreign_keys(res):
for entry in res:
if isinstance(entry, ForeignKeyConstraint):
table = entry.elements[0].target_fullname.split(".")[0]
if table in (
"system_model",
"system_field",
"system_column",
"system_relationship",
):
warnings.warn(
f"A foreign key to {table} is depecated becauses this "
"Model will be removed",
DeprecationWarning,
stacklevel=2,
)
return res
class TableMapperPlugin(ModelPluginBase):
def initialisation_tranformation_properties(
self, properties, transformation_properties
):
"""Initialise the transform properties: hybrid_method
:param new_type_properties: param to add in a new base if need
"""
properties["add_in_table_args"] = []
if "table_args" not in transformation_properties:
transformation_properties["table_args"] = False
transformation_properties["table_kwargs"] = False
if "mapper_args" not in transformation_properties:
transformation_properties["mapper_args"] = False
def transform_base(
self, namespace, base, transformation_properties, new_type_properties
):
"""Test if define_table/mapper_args are in the base, and call them
save the value in the properties
if __table/mapper_args__ are in the base then raise ModelException
:param namespace: the namespace of the model
:param base: One of the base of the model
:param transformation_properties: the properties of the model
:param new_type_properties: param to add in a new base if need
"""
if hasattr(base, "__table_args__"):
raise ModelException(
"'__table_args__' attribute is forbidden, on Model : %r (%r)."
"Use the class method 'define_table_args' to define the value "
"allow anyblok to fill his own '__table_args__' attribute"
% (namespace, base.__table_args__)
)
if hasattr(base, "__mapper_args__"):
raise ModelException(
"'__mapper_args__' attribute is forbidden, on Model : %r (%r)."
"Use the class method 'define_mapper_args' to define the "
"value allow anyblok to fill his own '__mapper_args__' "
"attribute" % (namespace, base.__mapper_args__)
)
if hasattr(base, "define_table_args"):
transformation_properties["table_args"] = True
if hasattr(base, "define_table_kwargs"):
transformation_properties["table_kwargs"] = True
if hasattr(base, "define_mapper_args"):
transformation_properties["mapper_args"] = True
def insert_in_bases(
self, new_base, namespace, properties, transformation_properties
):
"""Create overwrite to define table and mapper args to define some
options for SQLAlchemy
:param new_base: the base to be put on front of all bases
:param namespace: the namespace of the model
:param properties: the properties declared in the model
:param transformation_properties: the properties of the model
"""
table_args = tuple(properties["add_in_table_args"])
if table_args:
new_base.define_table_args = self.define_table_args(
new_base, namespace, table_args
)
transformation_properties["table_args"] = True
if transformation_properties["table_kwargs"] is True:
if sgdb_in(self.registry.engine, ["MySQL", "MariaDB"]):
new_base.define_table_kwargs = self.define_table_kwargs(
new_base, namespace
)
self.insert_in_bases_table_args(new_base, transformation_properties)
self.insert_in_bases_mapper_args(new_base, transformation_properties)
def define_table_args(self, new_base, namespace, table_args):
"""
:param new_base: the base to be put on front of all bases
:param namespace: the namespace of the model
"""
def fnct(cls_):
if cls_.__registry_name__ == namespace:
res = super(new_base, cls_).define_table_args()
fks = [
x.name for x in res if isinstance(x, ForeignKeyConstraint)
]
t_args = []
for field in table_args:
for constraint in field.update_table_args(
self.registry, cls_
):
if (
not isinstance(constraint, ForeignKeyConstraint)
or constraint.name not in fks
):
t_args.append(constraint)
elif isinstance(constraint, CheckConstraint):
t_args.append(constraint) # pragma: no cover
return res + tuple(t_args)
return ()
return classmethod(fnct)
def define_table_kwargs(self, new_base, namespace):
"""
:param new_base: the base to be put on front of all bases
:param namespace: the namespace of the model
"""
def fnct(cls_):
res = {}
if cls_.__registry_name__ == namespace:
res = super(new_base, cls_).define_table_kwargs()
res.update(dict(mysql_engine="InnoDB", mysql_charset="utf8"))
return res
return classmethod(fnct)
def insert_in_bases_table_args(self, new_base, transformation_properties):
"""Add table __table_args__ in new_base
:param new_base: the base to be put on front of all bases
:param transformation_properties: the properties of the model
"""
if (
transformation_properties["table_args"]
and transformation_properties["table_kwargs"]
):
def __table_args__(cls_):
try:
res = cls_.define_table_args() + (
cls_.define_table_kwargs(),
)
except NoInspectionAvailable: # pragma: no cover
raise ModelException(
"A Index or constraint on the model "
f'"{cls_.__registry_name__}" if defined with SQLAlchemy'
"class use the anyblok Index or constraint"
)
return check_deprecated_foreign_keys(res)
new_base.__table_args__ = declared_attr(__table_args__)
elif transformation_properties["table_args"]:
def __table_args__(cls_):
return check_deprecated_foreign_keys(cls_.define_table_args())
new_base.__table_args__ = declared_attr(__table_args__)
elif transformation_properties["table_kwargs"]: # pragma: no cover
def __table_args__(cls_):
return check_deprecated_foreign_keys(cls_.define_table_kwargs())
new_base.__table_args__ = declared_attr(__table_args__)
def insert_in_bases_mapper_args(self, new_base, transformation_properties):
"""Add table __mapper_args__ in new_base
:param new_base: the base to be put on front of all bases
:param transformation_properties: the properties of the model
"""
if transformation_properties["mapper_args"]:
def __mapper_args__(cls_):
res = cls_.define_mapper_args()
if "polymorphic_on" in res and res["polymorphic_on"]:
column = res["polymorphic_on"]
res["polymorphic_on"] = column.descriptor.sqla_column
return res
new_base.__mapper_args__ = declared_attr(__mapper_args__)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/model/table_and_mapper.py
|
table_and_mapper.py
|
import inspect
from copy import deepcopy
from sqlalchemy import inspection
from sqlalchemy.orm import declared_attr
from texttable import Texttable
from anyblok import Declarations
from anyblok.column import Column
from anyblok.common import TypeList, anyblok_column_prefix
from anyblok.field import Field, FieldException
from anyblok.mapper import ModelAttribute, format_schema
from anyblok.registry import RegistryManager
from anyblok.relationship import RelationShip
from .exceptions import ModelException
from .factory import ModelFactory, has_sql_fields
from .plugins import get_model_plugins
def has_sqlalchemy_fields(base):
for p in base.__dict__.keys():
attr = base.__dict__[p]
if inspection.inspect(attr, raiseerr=False) is not None:
return True
return False
def is_in_mro(cls, attr):
return cls in attr.__class__.__mro__
def get_fields(
base,
without_relationship=False,
only_relationship=False,
without_column=False,
):
"""Return the fields for a model
:param base: Model Class
:param without_relationship: Do not return the relationship field
:param only_relationship: return only the relationship field
:param without_column: Do not return the column field
:rtype: dict with name of the field in key and instance of Field in value
"""
fields = {}
for p in base.__dict__:
if p.startswith("__"):
continue
try:
attr = getattr(base, p)
if hasattr(attr, "__class__"):
if without_relationship and is_in_mro(RelationShip, attr):
continue
if without_column and is_in_mro(Column, attr):
continue
if only_relationship and not is_in_mro(RelationShip, attr):
continue
if is_in_mro(Field, attr):
fields[p] = attr
except FieldException: # pragma: no cover
pass
return fields
def autodoc_fields(declaration_cls, model_cls): # pragma: no cover
"""Produces autodocumentation table for the fields.
Exposed as a function in order to be reusable by a simple export,
e.g., from anyblok.mixin.
"""
if not has_sql_fields([model_cls]):
return ""
rows = [["Fields", ""]]
rows.extend([x, y.autodoc()] for x, y in get_fields(model_cls).items())
table = Texttable(max_width=0)
table.set_cols_valign(["m", "t"])
table.add_rows(rows)
return table.draw() + "\n\n"
def update_factory(kwargs):
if "factory" in kwargs:
kwargs["__model_factory__"] = kwargs.pop("factory")
@Declarations.add_declaration_type(
isAnEntry=True,
pre_assemble="pre_assemble_callback",
assemble="assemble_callback",
initialize="initialize_callback",
)
class Model:
"""The Model class is used to define or inherit an SQL table.
Add new model class::
@Declarations.register(Declarations.Model)
class MyModelclass:
pass
Remove a model class::
Declarations.unregister(Declarations.Model.MyModelclass,
MyModelclass)
There are three Model families:
* No SQL Model: These models have got any field, so any table
* SQL Model:
* SQL View Model: it is a model mapped with a SQL View, the insert, update
delete method are forbidden by the database
Each model has a:
* registry name: compose by the parent + . + class model name
* table name: compose by the parent + '_' + class model name
The table name can be overloaded by the attribute tablename. the wanted
value are a string (name of the table) of a model in the declaration.
..warning::
Two models can have the same table name, both models are mapped on
the table. But they must have the same column.
"""
autodoc_anyblok_kwargs = True
autodoc_anyblok_bases = True
autodoc_anyblok_fields = True
@classmethod
def pre_assemble_callback(cls, registry):
plugins_by = {}
for plugin in get_model_plugins(registry):
for attr, func in inspect.getmembers(
plugin, predicate=inspect.ismethod
):
by = plugins_by.setdefault(attr, [])
by.append(func)
def call_plugins(method, *args, **kwargs):
"""call the method on each plugin"""
for func in plugins_by.get(method, []):
func(*args, **kwargs)
registry.call_plugins = call_plugins
@classmethod
def register(self, parent, name, cls_, **kwargs):
"""add new sub registry in the registry
:param parent: Existing global registry
:param name: Name of the new registry to add it
:param cls_: Class Interface to add in registry
"""
_registryname = parent.__registry_name__ + "." + name
if "tablename" in kwargs:
tablename = kwargs.pop("tablename")
if not isinstance(tablename, str):
tablename = tablename.__tablename__
elif hasattr(parent, name):
tablename = getattr(parent, name).__tablename__
else:
if parent is Declarations or parent is Declarations.Model:
tablename = name.lower()
elif hasattr(parent, "__tablename__"):
tablename = parent.__tablename__
tablename += "_" + name.lower()
if not hasattr(parent, name):
p = {
"__tablename__": tablename,
"__registry_name__": _registryname,
"use": lambda x: ModelAttribute(_registryname, x),
}
ns = type(name, tuple(), p)
setattr(parent, name, ns)
if parent is Declarations:
return # pragma: no cover
kwargs["__registry_name__"] = _registryname
kwargs["__tablename__"] = tablename
update_factory(kwargs)
RegistryManager.add_entry_in_register(
"Model", _registryname, cls_, **kwargs
)
setattr(cls_, "__anyblok_kwargs__", kwargs)
@classmethod
def unregister(self, entry, cls_):
"""Remove the Interface from the registry
:param entry: entry declaration of the model where the ``cls_``
must be removed
:param cls_: Class Interface to remove in registry
"""
RegistryManager.remove_in_register(cls_)
@classmethod
def declare_field(
cls,
registry,
name,
field,
namespace,
properties,
transformation_properties,
):
"""Declare the field/column/relationship to put in the properties
of the model
:param registry: the current registry
:param name: name of the field / column or relationship
:param field: the declaration field / column or relationship
:param namespace: the namespace of the model
:param properties: the properties of the model
"""
if name in properties["loaded_columns"]:
return
if field.must_be_copied_before_declaration():
field = deepcopy(field)
attr_name = name
if field.use_hybrid_property:
attr_name = anyblok_column_prefix + name
if field.must_be_declared_as_attr():
# All the declaration are seen as mixin for sqlalchemy
# some of them need de be defered for the initialisation
# cause of the mixin as relation ship and column with foreign key
def wrapper(cls):
return field.get_sqlalchemy_mapping(
registry, namespace, name, properties
)
properties[attr_name] = declared_attr(wrapper)
properties[attr_name].anyblok_field = field
else:
properties[attr_name] = field.get_sqlalchemy_mapping(
registry, namespace, name, properties
)
if field.use_hybrid_property:
properties[name] = field.get_property(
registry, namespace, name, properties
)
properties[name].sqla_column = properties[attr_name]
properties["hybrid_property_columns"].append(name)
def field_description():
return registry.get(namespace).fields_description(name)[name]
def from_model():
return registry.get(namespace)
properties[name].anyblok_field_name = name
properties[name].anyblok_registry_name = namespace
properties[name].field_description = field_description
properties[name].from_model = from_model
registry.call_plugins(
"declare_field",
name,
field,
namespace,
properties,
transformation_properties,
)
properties["loaded_columns"].append(name)
field.update_properties(registry, namespace, name, properties)
@classmethod
def transform_base(cls, registry, namespace, base, properties):
"""Detect specific declaration which must define by registry
:param registry: the current registry
:param namespace: the namespace of the model
:param base: One of the base of the model
:param properties: the properties of the model
:rtype: new base
"""
new_type_properties = {}
for attr, method in inspect.getmembers(base):
if attr in ("registry", "anyblok", "_sa_registry"):
continue
if attr.startswith("__"):
continue
registry.call_plugins(
"transform_base_attribute",
attr,
method,
namespace,
base,
properties,
new_type_properties,
)
registry.call_plugins(
"transform_base", namespace, base, properties, new_type_properties
)
if new_type_properties:
return [type(namespace, (), new_type_properties), base]
return [base]
@classmethod
def insert_in_bases(
cls, registry, namespace, bases, transformation_properties, properties
):
"""Add in the declared namespaces new base.
:param registry: the current registry
:param namespace: the namespace of the model
:param base: One of the base of the model
:param transformation_properties: the properties of the model
:param properties: assembled attributes of the namespace
"""
new_base = type(namespace, (), {})
bases.insert(0, new_base)
registry.call_plugins(
"insert_in_bases",
new_base,
namespace,
properties,
transformation_properties,
)
@classmethod
def raise_if_has_sqlalchemy(cls, base):
if has_sqlalchemy_fields(base):
raise ModelException(
"the base %r have an SQLAlchemy attribute" % base
)
@classmethod
def load_namespace_first_step(cls, registry, namespace):
"""Return the properties of the declared bases for a namespace.
This is the first step because some actions need to known all the
properties
:param registry: the current registry
:param namespace: the namespace of the model
:rtype: dict of the known properties
"""
if namespace in registry.loaded_namespaces_first_step:
return registry.loaded_namespaces_first_step[namespace]
properties = {
"__depends__": set(),
"__db_schema__": format_schema(None, namespace),
}
ns = registry.loaded_registries[namespace]
for b in ns["bases"]:
cls.raise_if_has_sqlalchemy(b)
for b_ns in b.__anyblok_bases__:
if b_ns.__registry_name__.startswith("Model."):
properties["__depends__"].add(b_ns.__registry_name__)
ps = cls.load_namespace_first_step(
registry, b_ns.__registry_name__
)
ps = ps.copy()
ps.update(properties)
properties.update(ps)
fields = get_fields(b)
for p, f in fields.items():
if p not in properties:
properties[p] = f
if hasattr(b, "__db_schema__"):
properties["__db_schema__"] = format_schema(
b.__db_schema__, namespace
)
if "__tablename__" in ns["properties"]:
properties["__tablename__"] = ns["properties"]["__tablename__"]
registry.loaded_namespaces_first_step[namespace] = properties
return properties
@classmethod
def apply_inheritance_base(
cls,
registry,
namespace,
ns,
bases,
realregistryname,
properties,
transformation_properties,
):
# remove doublon
for b in ns["bases"]:
if b in bases:
continue
kwargs = {"namespace": realregistryname} if realregistryname else {}
bases.append(b, **kwargs)
if b.__doc__ and "__doc__" not in properties:
properties["__doc__"] = b.__doc__
for b_ns in b.__anyblok_bases__:
brn = b_ns.__registry_name__
if brn in registry.loaded_registries["Mixin_names"]:
tp = transformation_properties
if realregistryname:
bs, ps = cls.load_namespace_second_step(
registry,
brn,
realregistryname=realregistryname,
transformation_properties=tp,
)
else:
bs, ps = cls.load_namespace_second_step(
registry,
brn,
realregistryname=namespace,
transformation_properties=tp,
)
elif brn in registry.loaded_registries["Model_names"]:
bs, ps = cls.load_namespace_second_step(registry, brn)
else:
raise ModelException( # pragma: no cover
"You have not to inherit the %r "
"Only the 'Mixin' and %r types are allowed"
% (brn, cls.__name__)
)
bases += bs
@classmethod
def init_core_properties_and_bases(cls, registry, bases, properties):
properties["loaded_columns"] = []
properties["hybrid_property_columns"] = []
properties["loaded_fields"] = {}
properties["__model_factory__"].insert_core_bases(bases, properties)
@classmethod
def declare_all_fields(
cls, registry, namespace, bases, properties, transformation_properties
):
# do in the first time the fields and columns
# because for the relationship on the same model
# the primary keys must exist before the relationship
# load all the base before do relationship because primary key
# can be come from inherit
for b in bases:
for p, f in get_fields(b, without_relationship=True).items():
cls.declare_field(
registry,
p,
f,
namespace,
properties,
transformation_properties,
)
for b in bases:
for p, f in get_fields(b, only_relationship=True).items():
cls.declare_field(
registry,
p,
f,
namespace,
properties,
transformation_properties,
)
@classmethod
def apply_existing_table(
cls,
registry,
namespace,
tablename,
properties,
bases,
transformation_properties,
):
if "__tablename__" in properties:
del properties["__tablename__"]
for t in registry.loaded_namespaces.keys():
m = registry.loaded_namespaces[t]
if m.is_sql:
if getattr(m, "__tablename__"):
if m.__tablename__ == tablename:
properties["__table__"] = m.__table__
tablename = namespace.replace(".", "_").lower()
for b in bases:
for p, f in get_fields(
b, without_relationship=True, without_column=True
).items():
cls.declare_field(
registry,
p,
f,
namespace,
properties,
transformation_properties,
)
@classmethod
def load_namespace_second_step(
cls,
registry,
namespace,
realregistryname=None,
transformation_properties=None,
):
"""Return the bases and the properties of the namespace
:param registry: the current registry
:param namespace: the namespace of the model
:param realregistryname: the name of the model if the namespace is a
mixin
:rtype: the list od the bases and the properties
:exception: ModelException
"""
if namespace in registry.loaded_namespaces:
return [registry.loaded_namespaces[namespace]], {}
if transformation_properties is None:
transformation_properties = {}
bases = TypeList(cls, registry, namespace, transformation_properties)
ns = registry.loaded_registries[namespace]
properties = ns["properties"].copy()
first_step = registry.loaded_namespaces_first_step[namespace]
properties["__depends__"] = first_step["__depends__"]
properties["__db_schema__"] = first_step.get("__db_schema__", None)
registry.call_plugins(
"initialisation_tranformation_properties",
properties,
transformation_properties,
)
properties["__model_factory__"] = properties.get(
"__model_factory__", ModelFactory
)(registry)
cls.apply_inheritance_base(
registry,
namespace,
ns,
bases,
realregistryname,
properties,
transformation_properties,
)
if namespace in registry.loaded_registries["Model_names"]:
tablename = properties["__tablename__"]
modelname = namespace.replace(".", "")
cls.init_core_properties_and_bases(registry, bases, properties)
if tablename in registry.declarativebase.metadata.tables:
cls.apply_existing_table(
registry,
namespace,
tablename,
properties,
bases,
transformation_properties,
)
else:
cls.declare_all_fields(
registry,
namespace,
bases,
properties,
transformation_properties,
)
bases.append(registry.registry_base)
cls.insert_in_bases(
registry,
namespace,
bases,
transformation_properties,
properties,
)
bases = [
properties["__model_factory__"].build_model(
modelname, bases, properties
)
]
properties = {}
registry.add_in_registry(namespace, bases[0])
registry.loaded_namespaces[namespace] = bases[0]
registry.call_plugins(
"after_model_construction",
bases[0],
namespace,
transformation_properties,
)
return bases, properties
@classmethod
def assemble_callback(cls, registry):
"""Assemble callback is called to assemble all the Model
from the installed bloks
:param registry: registry to update
"""
registry.loaded_namespaces_first_step = {}
registry.loaded_views = {}
# get all the information to create a namespace
for namespace in registry.loaded_registries["Model_names"]:
cls.load_namespace_first_step(registry, namespace)
# create the namespace with all the information come from first
# step
for namespace in registry.loaded_registries["Model_names"]:
cls.load_namespace_second_step(registry, namespace)
@classmethod
def initialize_callback(cls, registry):
"""initialize callback is called after assembling all entries
This callback updates the database information about
* Model
* Column
* RelationShip
:param registry: registry to update
"""
for Model in registry.loaded_namespaces.values():
Model.initialize_model()
if not registry.loadwithoutmigration:
Model.clear_all_model_caches()
if registry.loadwithoutmigration:
return False
Blok = registry.System.Blok
if not registry.withoutautomigration:
Model = registry.System.Model
Model.update_list()
registry.update_blok_list()
bloks = Blok.list_by_state("touninstall")
Blok.uninstall_all(*bloks)
return Blok.apply_state(*registry.ordered_loaded_bloks)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/model/__init__.py
|
__init__.py
|
from sqlalchemy.ext.hybrid import hybrid_method
from .plugins import ModelPluginBase
class HybridMethodPlugin(ModelPluginBase):
def initialisation_tranformation_properties(
self, properties, transformation_properties
):
"""Initialise the transform properties: hybrid_method
:param properties: the properties declared in the model
:param new_type_properties: param to add in a new base if need
"""
if "hybrid_method" not in transformation_properties:
transformation_properties["hybrid_method"] = []
def transform_base_attribute(
self,
attr,
method,
namespace,
base,
transformation_properties,
new_type_properties,
):
"""Find the sqlalchemy hybrid methods in the base to save the
namespace and the method in the registry
:param attr: attribute name
:param method: method pointer of the attribute
:param namespace: the namespace of the model
:param base: One of the base of the model
:param transformation_properties: the properties of the model
:param new_type_properties: param to add in a new base if need
"""
if not hasattr(method, "is_an_hybrid_method"):
return
elif method.is_an_hybrid_method is True:
if attr not in transformation_properties["hybrid_method"]:
transformation_properties["hybrid_method"].append(attr)
def insert_in_bases(
self, new_base, namespace, properties, transformation_properties
):
"""Create overload to define the write declaration of sqlalchemy
hybrid method, add the overload in the declared bases of the
namespace
:param new_base: the base to be put on front of all bases
:param namespace: the namespace of the model
:param properties: the properties declared in the model
:param transformation_properties: the properties of the model
"""
type_properties = {}
def apply_wrapper(attr):
def wrapper(self, *args, **kwargs):
self_ = self.anyblok.loaded_namespaces[self.__registry_name__]
if self is self_:
return getattr(super(new_base, self), attr)(
self, *args, **kwargs
)
elif hasattr(self, "_aliased_insp"):
return getattr(
super(new_base, self._aliased_insp._target), attr
)(self, *args, **kwargs)
else:
return getattr(super(new_base, self), attr)(*args, **kwargs)
setattr(new_base, attr, hybrid_method(wrapper))
if transformation_properties["hybrid_method"]:
for attr in transformation_properties["hybrid_method"]:
apply_wrapper(attr)
return type_properties
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/model/hybrid_method.py
|
hybrid_method.py
|
from sqlalchemy import and_, event, table
from sqlalchemy.orm import Query, relationship
from sqlalchemy_views import CreateView, DropView
from anyblok.common import anyblok_column_prefix
from anyblok.field import Field, FieldException
from .exceptions import ModelFactoryException, ViewException
def has_sql_fields(bases):
"""Tells whether the model as field or not
:param bases: list of Model's Class
:rtype: boolean
"""
for base in bases:
for p in base.__dict__.keys():
try:
if hasattr(getattr(base, p), "__class__"):
if Field in getattr(base, p).__class__.__mro__:
return True
except FieldException: # pragma: no cover
# field function case already computed
return True
return False
class BaseFactory:
def __init__(self, registry):
self.registry = registry
def insert_core_bases(self, bases, properties):
raise ModelFactoryException("Must be overwritten") # pragma: no cover
def build_model(self, modelname, bases, properties):
raise ModelFactoryException("Must be overwritten") # pragma: no cover
class ModelFactory(BaseFactory):
def insert_core_bases(self, bases, properties):
if has_sql_fields(bases):
bases.extend([x for x in self.registry.loaded_cores["SqlBase"]])
bases.append(self.registry.declarativebase)
else:
# remove tablename to inherit from a sqlmodel
del properties["__tablename__"]
bases.extend([x for x in self.registry.loaded_cores["Base"]])
def build_model(self, modelname, bases, properties):
if properties.get("ignore_migration") is True:
self.registry.ignore_migration_for[ # pragma: no cover
properties["__tablename__"]
] = True
return type(modelname, tuple(bases), properties)
def get_columns(view, columns):
if not isinstance(columns, list): # pragma: no cover
if ", " in columns:
columns = columns.split(", ")
else:
columns = [columns]
return [getattr(view.c, x) for x in columns]
class ViewFactory(BaseFactory):
def insert_core_bases(self, bases, properties):
bases.extend([x for x in self.registry.loaded_cores["SqlViewBase"]])
bases.extend([x for x in self.registry.loaded_cores["Base"]])
def build_model(self, modelname, bases, properties):
Model = type(modelname, tuple(bases), properties)
self.apply_view(Model, properties)
return Model
def apply_view(self, base, properties):
"""Transform the sqlmodel to view model
:param base: Model cls
:param properties: properties of the model
:exception: MigrationException
:exception: ViewException
"""
tablename = base.__tablename__
if hasattr(base, "__view__"):
view = base.__view__
elif tablename in self.registry.loaded_views:
view = self.registry.loaded_views[tablename]
else:
if not hasattr(base, "sqlalchemy_view_declaration"):
raise ViewException(
"%r.'sqlalchemy_view_declaration' is required to "
"define the query to apply of the view" % base
)
view = table(tablename)
selectable = getattr(base, "sqlalchemy_view_declaration")()
if isinstance(selectable, Query):
selectable = selectable.subquery() # pragma: no cover
for c in selectable.subquery().columns:
col = c._make_proxy(view)[1]
view._columns.replace(col)
metadata = self.registry.declarativebase.metadata
event.listen(
metadata, "before_create", DropView(view, if_exists=True)
)
event.listen(metadata, "after_create", CreateView(view, selectable))
event.listen(
metadata, "before_drop", DropView(view, if_exists=True)
)
self.registry.loaded_views[tablename] = view
pks = [
col
for col in properties["loaded_columns"]
if getattr(
getattr(base, anyblok_column_prefix + col), "primary_key", False
)
]
if not pks:
raise ViewException("%r have any primary key defined" % base)
pks = [getattr(view.c, x) for x in pks]
mapper_properties = self.get_mapper_properties(base, view, properties)
base.anyblok.declarativebase.registry.map_imperatively(
base, view, primary_key=pks, properties=mapper_properties
)
setattr(base, "__view__", view)
def get_mapper_properties(self, base, view, properties):
mapper_properties = base.define_mapper_args()
for field in properties["loaded_columns"]:
if not hasattr(
properties[anyblok_column_prefix + field], "anyblok_field"
):
mapper_properties[field] = getattr(view.c, field)
continue
anyblok_field = properties[
anyblok_column_prefix + field
].anyblok_field
kwargs = anyblok_field.kwargs.copy()
if "foreign_keys" in kwargs:
foreign_keys = kwargs["foreign_keys"][1:][:-1].split(", ")
foreign_keys = [
getattr(view.c, x.split(".")[1]) for x in foreign_keys
]
kwargs["foreign_keys"] = foreign_keys
if anyblok_field.model.model_name == base.__registry_name__:
remote_columns = get_columns(
view, kwargs["info"]["remote_columns"]
)
local_columns = get_columns(
view, kwargs["info"]["local_columns"]
)
assert len(remote_columns) == len(local_columns)
primaryjoin = []
for i in range(len(local_columns)):
primaryjoin.append(remote_columns[i] == local_columns[i])
if len(primaryjoin) == 1:
primaryjoin = primaryjoin[0]
else:
primaryjoin = and_(*primaryjoin)
kwargs["remote_side"] = remote_columns
kwargs["primaryjoin"] = primaryjoin
Model = base
else:
Model = self.registry.get(anyblok_field.model.model_name)
mapper_properties[field] = relationship(Model, **kwargs)
return mapper_properties
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok/model/factory.py
|
factory.py
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2014 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2015 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2016 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2017 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2018 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2019 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2019 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2020 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2021 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2022 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2023 Jean-Sebastien SUZANNE <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
CHANGELOG
=========
2.1.0 (2023-07-26)
------------------
* Implement psycopg3 capability
* Improved TimeStamp field
* Fixed #227 str and repr of the query call str query.sql_statement
* Refactored main methods to get the description of the models and fields
without used the existing table
* Added new column type **ModelSelection** this role is to replace a column
with a foreigh key to the deprecated model **Model.System.Model**
* Added new column type **ModelFieldSelection** this role is to replace a
column with a foreigh key to the deprecated model **Model.System.Field**,
**Model.System.Column** or **Model.System.Relationship**
2.0.0 (2023-03-11)
------------------
* Upgrade version of sqlalchemy to **2.0.0**
* Upgrade version of sqlalchemy utils to **0.40.0**
* Upgrade version of sqlalchemy views to **0.3.2**
* The previsous deprecated configuration are placed as removed
theses configurations will be removed from the code in the next version
* Added Sequence with *no gap* to provide functional sequence without gaps.
* Fixed sphinx plugins
1.4.0 (2023-02-17)
------------------
* Freeze version of SQLAlchemy < 2.0
* Freeze version of SQLAlchemy_Util < 0.40.0
* Forbid primary key on Float and Decimal
* Replace setup packagin by pyproject.toml
1.3.2 (2022-02-19)
------------------
* Fixed Graphviz dependencies #207
* Fixed Mapper on column with **primary_key=False** #208
1.3.1 (2021-08-25)
------------------
* Fixed sphinx pluggin
1.3.0 (2021-07-12)
------------------
* Added deprecated name argument on the configuration to
add a deprecated message on theses. The goal is to informe
the user of AnyBlok that a sys args will be removed
* Added removed name argument on the configuration to forbid
the sys arg
* Added plugins to migrate DateTime to DateTime with MariaDB and MySQL
* Added cache_ok attribute on Column Type
* Fixed issue #195, the issue was due of a changement of the api of
the hybrid properties, Thanks @petrus-v for your help
* Fixed issue #196, A setter on the deprecated attribut **registry**
do a setter on the attribute **anyblok**
1.2.0 (2021-04-14)
------------------
* Adapted to **SQLAlchemy >= 1.4**
* Fixed Many2One with pycountry as primary key
* Fixed Many2One with **primary_key=True**, when primary key is True,
this nullable is forced to False
* Refactored hybrid_property to use the decorator mecanisme
* Adapted SQLA.URL manipulation, which was became immutable
* Used the inspect method, not the deprecated Inspector
* Refactored View, need the latest version of sqlalchemy-views
* Removed old sqlalchemy interfaces for MySQL, replaced it by entry points
* Added engine.event entry point to define action on engine
* Added engine.event.**dialect name** entry point to define action on the
engine for the dialect
* Added session.event.**dialect name** entry point to define action on the
session for the dialect
* Added entries in configuration to ignore migration for Schema or Model
* Fixed the configuration of the documentation build on readthedoc
1.1.0 (2021-03-22)
------------------
* Fixed version of **SQLAlchemy < 1.4.0**, The next version of AnyBlok
will be adapted to **SQLAlchemy >= 1.4.0 < 2.0.0**, and will prepare to
**SQLAlchemy >= 2.0**, See issue #168
* **SQLAlchemy 1.4.0** add a new attribut in the model **registry**,
this attribute conflicts with the registry of AnyBlok. A new attribute
**anyblok** is created on the model to call the registry of AnyBlok.
A modification of the attribute **registry** is done to use both attributes
with the same name. A deprecation warning is added on the attibute **registry**
of AnyBlok
1.0.0 (2020-12-03)
------------------
* Added Sequence with *no gap* to provide functional sequence without gaps.
* OrderList class_collection can be defined on x2Many
* Added plugins system for migration of column type
* Bug Fix on registry loading sequence. The **apply_model_schema_on_table**
method called at registry initialisation has been splitted to make sqlalchemy
ORM events registration independent from migration.
* Added undefined blok behaviour. It is an important behaviour needed and
written in the roadmap where the goal is to defined blok which need another
inexisting blok. this blok is installabled only if the blok exist. A package
can defined a group of bloks that depend of another packages. They bloks
are installabled if the another package is installed
* It is now possible to place a "Model" in another schema on the SGDB
::
@registry(...)
class MyModelInOtherSchema:
__db_schema__ = 'my_schema'
* Now the Email, Selection, Country, Json, Decimal, Interval, LargeBinary
columns can be crypted
* Removed compatibility with **Python 3.4** and **python 3.5**, because
these versions are deprecated
* Added *TimeStamp* column
* Added *Enum* column
* Added **ignore_migration** to protect the existing table or column again
auto migration when then schema and this definition are diferents
* Added capability with mssql+pyodbc
* Fixed View: allow to add Many2One to a View
* Added flag_modified method on SQLModel #72
* Add in extra dependencies drivers used in unit tests for dialects tested
with cPython
* postgres: psycopg2-binary
* mysql: mysqlclient
* mariadb: mysqlclient
* mssql: pymssql
* pyodbc
* System.Parameter.get do not raise if a default value is provided
* Add ``--with-demo`` parameter while creating a database (anyblok_createdb) in
order to load demo data while installing bloks on that database.
* Fixed issue #45: System.Sequence: inconsistency in 'number' field
* Fixed issue #134: is now possible to validate if the type at each part of foreign key
are the same
0.22.5 (2019-06-24)
-------------------
* Reverted console script **anyblok_nose**: to keep compatibility
* Fixed minimum version of **SQLalchemy >= 1.3** in package dependencies
* Removed some deprecation warning
0.22.2 (2019-06-23)
-------------------
* Fixed configfile with pytest with blok tests
0.22.1 (2019-06-22)
-------------------
* Fixed configfile with pytest
0.22.0 (2019-06-21)
-------------------
* Replaced the **nose test** by **pytest**. All the test was converted.
* Moved and renamed ``anyblok.tests.testcase.py`` to ``anyblok.testing.py``.
The file ``anyblok.tests.testcase.py`` import only the TestCase class to
continue to maintain the compatibility with older tests. PR
`#67 <https://github.com/AnyBlok/AnyBlok/pull/67>`_
* the console script ``anyblok_nose`` raise an exception. Because this runner
can't execute pytest tests. The goal is to use the nose plugin.
* Removed the behviour ``test-bloks-at-install``
* Added compatibility with `MySQL <https://www.mysql.com/>`_ and
`MariaDB <https://mariadb.org/>`_. PR `#85 <https://github.com/AnyBlok/AnyBlok/pull/85>`_
.. warning::
this version can break unittest if the console script **anyblok_nose** is used
0.21.3 (2019-03-19)
-------------------
* Fixed issue #83, the name of constraint create by anyblok is truncated to check if the name is the same as the reflected name
0.21.2 (2019-03-18)
-------------------
* Fixed truncated constraint, the sqltext is different in function of the DB type
0.21.1 (2019-03-18)
-------------------
* Fixed pypi upload
0.21.0 (2019-03-15)
-------------------
* Fixed alias. The ``Model.aliased`` method now binds the registry to the alias. The goal is
to use **hybrid_method** with alias in AnyBlok.
* Fixed Column.Country, The latest version of pycountry does not raise a lookup exception
When the countries does not exist. Now AnyBlok takes this change into consideration to raise the exception
and keep the main behaviour
* Fixed alias. Now the ``Model.aliased`` method links the registry instance into the aliased model.
The goal is to use `hybrid_method <https://docs.sqlalchemy.org/en/latest/orm/extensions/hybrid.html#sqlalchemy.ext.hybrid.hybrid_method>`_
decorator with alias in AnyBlok.
* Fixed #60: Now DateTime plugins verify also the DateTime columns of the dependencies of the Model
* Removed **Python 3.3** compatibility
* Improved the NoResultFound Exception for query.one and query.dictone. Now the registry name of the model
is added to the exception's message
* Fixed compatitibility with **SQLAlchemy > 1.3**
0.20.0 (2018-09-10)
-------------------
* Refactor the plugins MODEL and VIEW to become factory, rename type attribute by factory
attribute.
.. warning::
This new version does not break the compatibility, but
deprecates older ways of registering SQL View Models.
The version 1.0.0 of AnyBlok will remove these deprecated ways entirely.
The new way to register SQL View Model is as follows::
from anyblok.model.factory import ViewFactory
@register(Model, factory=ViewFactory)
class MyModel:
...
whereas before version 0.19.2, it would have been::
@register(Model, is_sql_view=True)
class MyModel:
...
and in version 0.19.3, it was::
from anyblok.model.common import VIEW
@register(Model, type=VIEW)
class MyModel:
...
* issue #53: added primary join for mapping relationship of SQL views
to themselves
* issue #54: on Blok methods
``pre_migration()``, ``post_migration()`` and ``update()`` the
``latest_version`` parameter is now instance
of ``pkg_resources`` ``Version`` class, or ``None``
* new tests base class: SharedDataTestCase, allowing to share costly
fixtures among tests of the same class
* scripts: removed useless and too magic ``need_blok``
* fixed Travis configuration for python 3.7
* plugins sytem optimization: removed stub implementations for all
possible methods in base class (gives a substantial speedup in
Travis tests).
* issue #55: Now, the **auto migration engine** of the Blok is entirely executed between
``pre_migration()`` and ``post_migration()``.
0.19.3 (2018-09-03)
-------------------
* FIX #52, bad naming convention for type of model
0.19.2 (2018-09-01)
-------------------
* Fix phone number with an empty string value
* Fix migration with added more than one new required columns with default values
* Fix Sql View can define Many2One relationship
* Fix SQL View, The __mapper__ is put in the Model
* FIX SQL View, For Sql view the mapping of the field must be find with anyblok prefix column
* The build of the Model class come from plugin model, The objectif is to implement other than
Model or View
0.19.1 (2018-06-07)
-------------------
* Remove useless method, because this was move on distribution **anyblok_io**
* In Many2Many the generated join table add fieldname to get two M2M with the same models
* Fix ``load_namespace_first_step``, the bug merged properties between Mixin
0.19.0 (2018-05-29)
-------------------
* Remove All mixins, put them in another distribution **anyblok_mixins**
* remove bloks **anyblok-io**, **anyblok-io-csv**, **anyblok-io-xml**, they
are put in another distribution **anyblok_io**
* improve unit test + coverage
0.18.0 (2018-05-22)
-------------------
* Refactor of the nose's plugin by Georges Racinet
* Add mixin **ConditionalForbidUpdate**, **ConditionalForbidDelete**,
**ConditionalReadOnly**, **BooleanForbidUpdate**, **BooleanForbidDelete**,
**BooleanReadOnly**, **StateReadOnly**
0.17.4 (2018-05-16)
-------------------
* [FIX] autodoc
0.17.3 (2018-05-16)
-------------------
* [FIX] fix remove field
* [FIX] ``io`` formater model with external id. The error forbidden the
none / empty value.
* [FIX] option ``install_or_update_bloks``, if one blok is marked as
**toupdate**, then this blok will be not marked as **toinstall**
* [FIX] ``Blok.import_file`` have not raise if does not found
**error_found** in the result of the import
* [FIX] ``Model.get_primary_keys`` with polymorphism (single table)
* Remove ``column.uText``, ``column.uString``, ``column.SmallInteger``
* [ADD] ``column.PhoneNumber``
* [ADD] ``column.Email``
* [ADD] ``column.Country``
0.17.2 (2018-02-27)
-------------------
* [FIX] hasattr on field.Function
* [IMP] Improve rich **Many2Many** to allow to have got a rich **Many2Many**
with the same model on the both side, Add also a new attribute
**join_model** to compute the real join table from the table name defined
on the Model, In the case of join_model with column based on Many2One.
Add an option to force recompute secondaryjoin and primaryjoin in the
case where the model join have more than one foreign key to one of the
both model. the m2m_local_columns and m2m_remote_columns become required
attribute
* [ADD] New field.JsonRelated. The goal is to manipulate a json entry as a
column
0.17.1 (2018-02-24)
-------------------
* [FIX] repr when no value for relationship
0.17.0 (2018-02-23)
-------------------
* [FIX] **SQLAlchemy_Utils** changed the import path of **EncryptedType** in
version **0.33.0**
* [REF] Add default_time on configuration and **DateTime** column.
Before this change a naive datetime get the timezone of server host, now
it possible to add the attibute **default_timezone** on each column or
**default_timezone** on configuration for all the column.
For each **DateTime** column the default timezone use is define by the order:
1) default_timezone on column (if defined)
2) default_timezone in configuration (if defined)
3) timezone of the server
.. warning::
This options are only use for naive datetime, to save them with a timezone.
In the case of a datetime with timezone, this datetime keep their own datetime.
* [REF] add a function in config to get the name of the database from Configuration
**db_name** or **db_url**. The script **anyblok_createdb** and the plugin
use this function.
* [IMP] Add option **--anyblok-db-url** in plugin nose options. the options can have
default value from environment variable **ANYBLOK_DATABASE_URL**
* [IMP] add primary_key attibute on Many2One and One2One.
Only when the column is created by the relationship Many2One or One2One.
The column created get the attribute primary_key gave to relationship and
apply it.
* [IMP] add repr default method on all SqlModel and ViewSqlModel
* [FIX] Encrypt columns, the custom type from anyblok did not implement
the ``process_result_value`` need for decrypt value
* [REF] Simplify Configuration, old and unused code are removed
0.16.2 (2018-02-12)
-------------------
* [FIX] Add column with an unique constraint.
Alembic declare now two changes: **add column** and **add unique constraint**
. Then the migration operation **add column** must not create the constraint
* [IMP] add index constraint operation.
Alembic declare now **add unique constraint**, the migration operation
create the constraint on an existing and an unexisting columns
* [IMP] add index attibute on Many2One and One2One.
Only when the column is created by the relationship Many2One or One2One.
The column created get the attribute index gave to relationship and
apply it.
* [FIX] raise an ConfigurationException on wrong path
if an inexisting configuration file is given the the exception is raised
* [REF] update **anyblok.start**
The configuration is now loaded when configuration_group is none
* [ADD] **isolation_level** configuration
The isolation level can be passed by **anyblok.start** or by the Configuration
The default value is **READ_COMMITTED**, In the case or console script and
Configuration add isolation level, the isolation used is always the isolation
came from **anyblok.start**
* [REF] The nose plugins take the **tests** directories in any location in the blok
not only at the root of the blok
* [REF] The options **test-blok-at-install** take the **tests** directory in any
location in the blok not only at the root of the blok
* [REF] The anyblok_nose console script to take the **tests** directories in any
location in the blok not only at the root of the blok
0.16.1 (2018-01-29)
-------------------
* [FIX] Many2Many on the same Model
* [FIX] Many2One with multi primary key
* [FIX] add specific exception when the number of column in join table
for many2many is not the same that primary key
0.16.0 (2018-01-25)
-------------------
* [REF] Json field, use the Json field from SQLAlchemy, because it
is implemented by the db
* [FIX] check constraint on selection to get a name without number
of entry
0.15.0 (2018-01-17)
-------------------
* [REF] column Selection add check constraint to forbid other
value than the wanted
0.14.0 (2018-01-15)
-------------------
* [REF] change log level, the instalation become less verbose
* [REF] change namimg convention, dont check in function of
table and column name to know if it is an AnyBlok constraint
* [FIX] check if a drop check constraint is not a add check constraint
truncated
* [ADD] raise an exception if the primary changed, this action is too
complexe to know how transform the relationship and keep the real
value
0.13.0 (2018-01-09)
-------------------
* [FIX] check constraint must not be create at the same time that the column,
because the column of the constraint could have not exist yet
* [REF] change namimg convention
* [FIX] detect and apply drop check constraint in the migration
* [FIX] detect and apply add check constraint in the migration
0.12.2 (2018-01-04)
-------------------
* [FIX] name of the created class, before the fix anyblok use the tablename.
In the case of polymorphism on single table, sqlalchemy found two two or more
mappers for a single mapper name. Now the class name is the registry name
without dot.
This change have consequence only if the primary join is forced.
In the case::
Test = registry.Test
Test.id
you need to change::
primaryjoin = 'test.id == ...'
by::
primaryjoin = 'ModelTest.id == ...'
* [FIX] name of the fakecolumn when a Many2One is added whitout
existing column names. This action allow to create two Many2One
to the same remote Model.
.. warning::
This change have a big consequence on existing table, because a new column
is added and the origin column is mark as unknown. You have to rename the column
by SQL migration or add the column in Model or force the name in Many2One.
* [FIX] In the One2Many when two foreign keys found to the same primary key
the primary join of the relation ship is now a ``or_`` not a ``and_``
* [FIX] One2Many detect also the Many2One to get the FakeColumn to do primary join
0.12.1 (2017-12-23)
-------------------
* [FIX] not invalidate cache on an uninstalled model
0.12.0 (2017-12-23)
-------------------
* [FIX] Declare Field Function in Polymophic subclass
* [FIX] Declare Field Function in Polymophic
* [ADD] auto register of sqlalchemy ORM event
* [ADD] Mixin to do readonly
* [REMOVE] cron functionality, it will be add in another package **anyblok_dramatiq**
* [FIX] Field.DateTime documentation, add ``is auto updated``
* [REF] add entry point ``anyblok.session.event`` and additional_setting
``anyblok.session.event`` to add some events on the session
* [FIX] clean foreign_key in some column type, now the foreign_key is made by Column class
* [FIX] remove for System.Field and System.Model the removed fields
0.11.1 (2017-11-28)
-------------------
* [ADD] in DBTestCase add init_registry_with_bloks, this method is similar at
init_registry, it install the bloks after add the new model
* [FIX] create precommit_hooks in the EnvironnementManager if it does not exist
* [FIX] create postcommit_hooks in the EnvironnementManager if it does not exist
0.11.0 (2017-11-20)
-------------------
* [ADD] log debug for commit / rollback
* [REF] precommit_hook, can also be on no SQL Model
* [ADD] postcommit_hook
* [FIX] UUID inheritance
0.10.1 (2017-11-14)
-------------------
* [FIX] change log
0.10.0 (2017-11-14)
-------------------
* [ADD] ``anyblok_configuration.post_load`` to initialize some services in
function of configuration
* [REF] Update configuration groups to add ``dramatiq-broker`` by default.
This configuration groups is filled by **anyblok_dramatiq** package
* [FIX] when the applications configuration has not ``configuration_groups``
then the configuration use the ``configuration_groups`` of the default
application
* [ADD] Add configuration group ``preload`` definition, but not used
* [ADD] Entry point ``anyblok.model.plugin`` to add behaviour on the model
* [REF] **hybrid_method** become an ``anyblok.model.plugin``
* [REF] adapter of mapper_args and table_args become an ``anyblok.model.plugin``
* [REF] **event** become an ``anyblok.model.plugin``
* [REF] **sqlachemy event** become an ``anyblok.model.plugin``
* [REF] **cache** and **classmethod_cache** become an ``anyblok.model.plugin``
* [IMP] **Configuration.add_configuration_group** need to add a new group for
a console script
* [IMP] add new ``anyblok.model.plugin`` to update datetime columns when the
auto_update is True
0.9.10 (2017-09-23)
-------------------
* [FIX] type ``Paramater`` => ``Parameter``
* [IMP] add the the author in autodoc
* [IMP] in the script blok the exclude and include model can use ``.*`` to take
children in the namespace
* [FIX] anyblok_doc with UML, don 't make agregation when the model doesn't
exist
0.9.9 (2017-09-19)
------------------
* [FIX]: add logo in the MANIFEST.in
0.9.8 (2017-09-19)
------------------
* [IMP] fields_description add remote_name
* [Update] doc, add foreign_key_option and unique for Many2One
* [IMP] add ``expire_all`` and ``expunge`` registry methods, expire all the
instance in the session
* [IMP] add ``expunge`` method on the instance
* FIX]: expire attribute must use also all the fields which come from
polymorphic model
* [FIX] if ondelete=cascade in foreign keu options, then the many2one force
the delete directely in the session
* [FIX] delete method can be also be que session.query, mapping.remove can
use this session.query.delete to remove in case of recursivity
* [IMP] IO.Mapping save the blok name when use the Blok.import_file method
* [IMP] IO blok overload ``Model.delete`` and ``Query.delete`` to delete mapping
with instances of the Models
* [FIX] create new session make must commit and remove all old session instances
* [IMP] add ``Mapping.clean`` method to clean unlinked mapping
* [IMP] add ``Mapping.remove_for_blokname`` method to remove mapping and obj
* [IMP] add new field in ``Model.System.Blok`` ``author`` and ``logo``
0.9.7 (2017-07-03)
------------------
* [FIX] field_description get also the polymorphique fields from inherit model
0.9.6 (2017-07-03)
------------------
* [FIX] in One2Many and Many2Many field, the attribute model can be used on
record node. Used for Polymorphisme
0.9.5 (2016-12-05)
------------------
* [ADD] Python 3.6 support
* Flake8
0.9.4 (2016-10-27)
------------------
* [FIX] Nose test pluggins load the configuration need for unit test
* [ADD] getFieldType on SQLBase, this method return the type of the column
0.9.3 (2016-10-12)
------------------
* [FIX] SQLAlchemy 1.1.* add autoincrement='auto', or AnyBlok wait Boolean.
If the field is an Integer and a primary_key with autoincrement='auto'
then the value is True else False
* [FIX] SQLAlchemy 1.1.*, primary_key attribute don't define autoincrement.
The column Integer with a primary_key=True whithout autoincrement
declaration use autoincrement=True
* [FIX] SQLAlchemy 1.1.*, backref property check if the collection_class has
__emulates__ attributes. InstrumentedList haven't to have this attribute
* [FIX] SQLAlchemy 1.1.*, Session State changed, update the update method
of the registry to install / update / uninstall bloks
* [FIX] SQLAlchemy 1.1.*, Hybrid property don't propagate the relationship
info attribute. The propagate is forced for Many2One and One2One. The only
both relationships to be wrapped by hybrid_property
* [FIX] SQLAlchemy 1.1.*, Hybrid property wrap the fget result in the case of
the fget is called on the class (not the instance). Adapt the unit test,
don't check if the result id of column are the same, check if the expression
give by this results are the same.
* [FIX] SQLAlchemy 1.1.*, listen can not be used with a hybrid_property.
In the case of a listen, the mapper returned is not the hybrid_property
but the real wrapped field
0.9.2 (2016-10-12)
------------------
* [FIX] setup.py: error with pip
0.9.1 (2016-10-3)
-----------------
* [FIX] migration testcase
* [FIX] graphviz FORMATS
* [FIX] travis configuration
0.9.0 (2016-07-11)
------------------
* [REF] add Configuration.has method
* [FIX] test migration, force to load registry with unittest=True
* [FIX] test event
* [FIX] test blok
* [FIX] mapper with None parameter
* [FIX] add set_defaults in parser to update configuration dict
* [FIX] one2many remote columns
* [FIX] load anyblok.init in the unit test
* [IMP] Add plugins by configuration for:
* Registry
* Migration
* get_url
* [IMP] add LogCapture
* [IMP] TestCase.Configuration, use to update Configuration only in
a context manager
* [IMP] add Registry.db_exists class method, check with the configuration
and the db_name if the connection is possible
0.8.5 (2016-06-20)
------------------
* [FIX] utf-8 encoding
* [REF] move bitbucket (mergurial) to github (git)
0.8.4 (2016-06-14)
------------------
* [FIX] io/xml/importer one2many field
* [FIX] install blok, who are not in the blok list yet. But the blok is loaded
0.8.3 (2016-04-18)
------------------
* [FIX] cache and classmethod_cache on SQL model
* [ADD] is_installed classmethod cache
0.8.2 (2016-04-06)
------------------
* [REF] IO.Mapping methods delete and multi_delete can remove entry
* [FIX] datetime with timezone use timezone.localize, better than
datetime.replace(tzinfo=...)
* [ADD] update sphinx extension
0.8.1 (2016-03-15)
------------------
* [FIX] `#21 <https://bitbucket.org/jssuzanne/anyblok/issues/21/update-setter-for-decimal>`_
Improve Decimal column setter
* [FIX] `#22 <https://bitbucket.org/jssuzanne/anyblok/issues/22/string-ustring-text-utext-columns-save>`_
String, uString, Text and uText write '' in database for False value
* [FIX] Change the external_id save in a two way
* [FIX] `#23 <https://bitbucket.org/jssuzanne/anyblok/issues/23/selection-field-when-nullable-true-doesnt>`_
Column.Selection with None value, don't return 'None' value by the getter
0.8.0 (2016-02-05)
------------------
.. warning::
Break the compatibility with the previous version of anyblok
* update method on the model
replace ::
obj.update({field1: val1, ...})
by::
obj.update(field1=val1, ...)
* [REF] session expire is now on the attribute, the update method is refactored
too.
* [FIX] blok: update version if the version change
* [REF] add required blok, this bloks is installed and updated by the scripts
anyblok_updatedb and anyblok_createdb
* [ADD] Add Color Column
* [REF] column can be encrypted
* [REF] DataTime column is not a naive datatime value
* [ADD] Add Password Column
* [ADD] Add UUID Column
* [ADD] Add URL Column
0.7.2 (2016-01-14)
------------------
* [FIX] delete flush after remove of the session
* [FIX] nose plugins
* [FIX] does'nt destroy automaticly constraints (not created by anyblok),
indexes (not created by anyblok), columns, tables by automigration, add
options to force the delete of its.
* [REF] standardize the constraint and index names
* [FIX] Multi declaration of the same foreign key in the case of M2O and O2O
* [REF] SqlBase.update, become hight level meth
0.7.1 (2016-01-08)
------------------
* [FIX] didn't cast the config data from the config file
* [IMP] copy init entry point from anyblok_pyramid
0.7.0 (2016-01-07)
------------------
.. warning::
Python 3.2 is not supported
* [REF] Add options to give database url, No break compatibility
* [REF] the argument of ArgumentParser can be add in the configuration
- Improve the help of the application
- Improve the type of the configuration, Work also with config file.
- Adapt current configuration
* [REF] start to use sqlalchemy-utils, replace the database management
* [IMP] `#18 <https://bitbucket.org/jssuzanne/anyblok/issues/18/forbidden-the-declaration-of-sqlachemy>`_
Forbidden the declaration of SQLAchemy column or relationship
* [REF] `#15 <https://bitbucket.org/jssuzanne/anyblok/issues/15/speed-up-the-unittest>`_
Refactor unittest case to not create/drop database for each test
* [FIX] `#19 <https://bitbucket.org/jssuzanne/anyblok/issues/19/migration-contrainte>`_
During migration if an unique constraint must be apply without unique
value, then the constraint will be ignore and log a warning. No break the
instalation of the blok
* [FIX] `#20 <https://bitbucket.org/jssuzanne/anyblok/issues/20/update-meth-must-refresh-the-instance-when>`_
Update meth: expire the instance cause of relationship
* [IMP] refresh and expire meth on model
* [REF] delete obj, flush the session and delete the instance of obj of the
session, before expire all the session, the goal is to reload the
relation ship.
* [REF] `#13 <https://bitbucket.org/jssuzanne/anyblok/issues/13/refactor-inheritance-tree>`_
Remove association model, replace it by call at the Blok definition
* [IMP] `#14 <https://bitbucket.org/jssuzanne/anyblok/issues/14/add-conflicting-link-between-bloks>`_
Add conflicting link between blok, two blok who are in conflict can be installed
if the other is installed
0.6.0 (2016-01-07)
------------------
* [REF] unittest isolation
* [IMP] possibility to apply an extension for sqlalchemy
* [ADD] pool configuration
0.5.2 (2015-09-28)
------------------
* [IMP] extension for Sphinx and autodoc
* [ADD] API doc in doc
* [ADD] add foreign key option in relation ship
* [CRITICAL FIX] the EnvironnementManager didn't return the good scoped method
for SQLAlchemy
* [CRITICAL FIX] the precommit_hook was not isolated by session
* [REF] add a named argument ``must_be_loaded_by_unittest``, by dafault False,
in ``Configuration.add`` to indicate if the function must be call during the
initialisation of the unittest, generally for the configuration initialized
by Environ variable
0.5.1 (2015-08-29)
------------------
* [IMP] unload declaration type callback
0.5.0 (2015-08-28)
------------------
.. warning::
Break the compatibility with the previous version of anyblok
* cache, classmethod_cache, hybrid_method and listen
replace::
from anyblok import Declarations
cache = Declarations.cache
classmethod_cache = Declarations.classmethod_cache
hybrid_method = Declarations.hybrid_method
addListener = Declarations.addListener
by::
from anyblok.declarations import (cache, classmethod_cache,
hybrid_method, listen)
.. note::
The listener can declare SQLAlchemy event
* declaration of the foreign key
replace::
@register(Model):
class MyClass:
myfield = Integer(foreign_key=(Model.System.Blok, 'name'))
myotherfield = Integer(foreign_key=('Model.System.Blok', 'name'))
by::
@register(Model):
class MyClass:
myfield = Integer(foreign_key=Model.System.Blok.use('name'))
myotherfield = Integer(foreign_key="Model.System.Blok=>name")
* [IMP] add ``pop`` behaviour on **Model.System.Parameter**
* [REF] Load configuration befoare load bloks, to use Configuration during
the declaration
* [FIX] all must return InstrumentedList, also when the result is empty
* [FIX] to_dict must not cast column
* [REF] add third entry in foreign key declaration to add options
* [IMP] ModelAttribute used to declarate the need of specific attribute and
get the attribute or the foreign key from this attribute
* [IMP] ModelAttributeAdapter, get a ModelAttribute from ModelAttribute or str
* [IMP] ModelRepr, Speudo representation of a Model
* [IMP] ModelAdapter, get a ModelRepr from ModelRepr or str
* [IMP] ModelMapper and ModelAttributeMapper
* [REF] Event, the declaration of an event can be an anyblok or a sqlalchemy event
* [REF] the foreign key must be declared with ModelAttribute
* [REF] Use Adapter for Model and attribute in relation ship
* [REF] hybrid_method, cache and classmethod_cache are now only impotable decorator function
* [IMP] in column the default can be a classmethod name
* [REF] replace all the field (prefix, suffic, ...) by a formater field.
It is a python formater string
* [IMP] Sequence column
* [IMP] add the default system or user configuration file
0.4.1 (2015-07-22)
------------------
.. warning::
Field Function change, fexp is required if you need filter
* [FIX] Field.Function, fexp is now a class method
* [REF] reduce flake8 complexity
* [REF] refactor field function
* [FIX] inherit relation ship from another model, thank Simon ANDRÉ for the
bug report
* [REF] table/mapper args definition
* [REF] Refactor Field, Column, RelationShip use now polymophic inherit
* [FIX] Foreign key constraint, allow to add and drop constraint on more than
one foreign key
* [ADD] update-all-bloks option
* [ADD] pre / post migration
* [REF] UML Diagram is now with autodoc script
* [REF] SQL Diagram is now with autodoc script
* [REF] Add **extend** key word in configuration file to extend an existing
configuration
0.4.0 (2015-06-21)
------------------
.. warning::
Break the compatibility with the previous version of anyblok
* [REF] Add the possibility to add a logging file by argparse
* [ADD] No auto migration option
* [ADD] Plugin for nose to run unit test of the installed bloks
* [REF] The relation ship can be reference more than one foreign key
* [IMP] Add define_table/mapper_args methods to fill __table/mapper\_args\_\_
class attribute need to configure SQLAlachemy models
* [REF] Limit the commit in the registry only when the SQLA Session factory
is recreated
* [REF] Commit and re-create the SQLA Session Factory, at installation, only
if the number of Session inheritance of the number of Query inheritance
change, else keep the same session
* [REF] Exception is not a Declarations type
* [FIX] Reload fonctionnality in python 3.2
* [REF] Remove the Declarations typs Field, Column, RelationShip, they are
replaced by python import
* [REF] rename **ArgsParseManager** by **Configuration**
* [REF] rename **reload_module_if_blok_is_reloaded** by
**reload_module_if_blok_is_reloading** method on blok
* [REF] rename **import_cfg_file** by **import_file** method on blok
* [REF] Consistency the argsparse configuration
* [REF] refactor part_to_load, the entry points loaded is bloks
* [IMP] Allow to define another column name in the table versus model
* [FIX] add importer for import configuration file
* [FIX] x2M importer without field just, external id
0.3.5 (2015-05-10)
------------------
* [IMP] When a new column is add, if the column have a default value, then
this value will be added in all the entries where the value is null for this
column
* [REF] import_cfg_file remove the importer when import has done
0.3.4 (2015-05-10)
------------------
* [ADD] logger.info on migration script to indicate what is changed
* [IMP] Add sequence facility in the declaration of Column
* [ADD] ADD XML Importer
0.3.3 (2015-05-04)
------------------
* [FIX] createdb script
0.3.2 (2015-05-04)
------------------
* [IMP] doc
* [REF] Use logging.config.configFile
0.3.1 (2015-05-04)
------------------
* [IMP] Update setup to add documentation files and blok's README
0.3.0 (2015-05-03)
------------------
* [IMP] Update Doc
* [FIX] Remove nullable column, the nullable constraint is removed not the column
* [ADD] Formater, convert value 2 str or str 2 value, with or without mapping
* [ADD] CSV Importer
* [REF] CSV Exporter to use Formater
0.2.12 (2015-04-29)
-------------------
* [IMP] CSV Exporter
* [IMP] Exporter Model give external ID behaviour
* [ADD] Sequence model (Model.System.Sequence)
* [ADD] fields_description cached_classmethod with invalidation
* [ADD] Parameter Model (Model.System.Parameter)
* [FIX] environnement variable for test unitaire
0.2.11 (2015-04-26)
-------------------
* [FIX] UNIT test createdb with prefix
0.2.10 (2015-04-26)
-------------------
* [IMP] add enviroment variable for database information
* [ADD] argsparse option install all bloks
* [FIX] Python 3.2 need that bloks directory are python modules, add empty __init__ file
0.2.9 (2015-04-18)
------------------
* [FIX] Add all rst at the main path of all the bloks
0.2.8 (2015-04-16)
------------------
* [IMP] unittest on SQLBase
* [IMP] add delete method on SQLBase to delete une entry from an instance of the model
* [REF] rename get_primary_keys to get_mapping_primary_keys, cause of get_primary_keys
already exist in SQLBase
0.2.7 (2015-04-15)
------------------
* [IMP] Add IPython support for interpreter
* [REF] Update and Standardize the method to field the models (Field, Column, RelationShip)
now all the type of the column go on the ftype and comme from the name of the class
0.2.6 (2015-04-11)
------------------
* [FIX] use the backref name to get the label of the remote relation ship
* [FIX] add type information of the simple field
0.2.5 (2015-03-23)
------------------
* [FIX] In the parent / children relationship, where the pk is on a mixin or
from inherit
* [FIX] How to Environment
* [FIX] Many2Many declared in Mixin
* [IMP] Many2One can now declared than the local column must be unique (
only if the local column is not declared in the model)
0.2.3 (2015-03-23)
------------------
.. warning::
This version can be not compatible with the version **0.2.2**. Because
in the foregn key model is a string you must replace the tablename by
the registry name
* [FIX] Allow to add a relationship on the same model, the main use is to add
parent / children relation ship on a model, They are any difference with
the declaration of ta relation ship on another model
* [REF] standardize foreign_key and relation ship, if the str which replace
the Model Declarations is now the registry name
0.2.2 (2015-03-15)
------------------
* [REF] Unittest
* TestCase and DBTestCase are only used for framework
* BlokTestCase is used:
- by ``run_exit`` function to test all the installed bloks
- at the installation of a blok if wanted
0.2.0 (2015-02-13)
------------------
.. warning::
This version is not compatible with the version **0.1.3**
* [REF] Import and reload are more explicite
* [ADD] IO:
* Mapping: Link between Model instance and (Model, str key)
* [ADD] Env in registry_base to access at EnvironmentManager without to import
it at each time
* [IMP] doc add how to on the environment
0.1.3 (2015-02-03)
------------------
* [FIX] setup long description, good for pypi but not for easy_install
0.1.2 (2015-02-02)
------------------
* [REFACTOR] Allow to declare Core components
* [ADD] Howto declare Core / Type
* [FIX] Model can only inherit simple python class, Mixin or Model
* [FIX] Mixin inherit chained
* [FIX] Flake8
0.1.1 (2015-01-23)
------------------
* [FIX] version, documentation, setup
0.1.0 (2015-01-23)
------------------
Main version of AnyBlok. You can with this version
* Create your own application
* Connect to a database
* Define bloks
* Install, Update, Uninstall the blok
* Define field types
* Define Column types
* Define Relationship types
* Define Core
* Define Mixin
* Define Model (SQL or not)
* Define SQL view
* Define more than one Model on a specific table
* Write unittest for your blok
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/doc/CHANGES.rst
|
CHANGES.rst
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2015 Jean-Sebastien SUZANNE <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
===============
Advanced topics
===============
How to add a new ``Type`` /core
===============================
``Type`` and ``Core`` are both ``Declarations``.
Difference between ``Core`` and ``Type``
----------------------------------------
``Core`` is also an Entry ``Type``. But it is a particular entry ``Type``.
``Core`` is used to define low level at the entry ``Type``. For example
the ``Core.Base`` is the low level at all the ``Model``. Modify the behaviours
of the ``Core.Base`` is equal to modify the behaviours of all the ``Model``.
this is the inheritance model of the ``Model`` ``Type``
+--------------------+------------------------------------+-------------------+
| ``Entry`` ``Type`` | inheritance ``Types`` | Core |
+====================+====================================+===================+
| Model | Model / Mixin | Base |
+--------------------+------------------------------------+-------------------+
Declare a new ``Type``
----------------------
The declaration of new ``Type``, is declarations of a new type of declaration.
The known ``Type`` declarations are:
* Model
* Mixin
* Core
* AuthorizationPolicyAssociation
This is an example to declare new entry ``Type``::
from anyblok import Declarations
@Declarations.add_declaration_type()
class MyType:
@classmethod
def register(cls, parent, name, cls_, **kwargs):
...
@classmethod
def unregister(cls, child, cls_):
...
The Type must implement:
+---------------------+-------------------------------------------------------+
| Method name | Description |
+=====================+=======================================================+
| register | This ``classmethod`` describe what append when a |
| | a declaration is done by he decorator |
| | ``Declarations.register`` |
+---------------------+-------------------------------------------------------+
| unregister | This ``classmethod`` describe what append when an |
| | undeclaration is done. |
+---------------------+-------------------------------------------------------+
The ``add_declaration_type`` can define the arguments:
+---------------------+-------------------------------------------------------+
| Argument's name | Description |
+=====================+=======================================================+
| isAnEntry | **Boolean** |
| | Define if the new ``Type`` is an entry, depend of the |
| | installation or not of the bloks |
+---------------------+-------------------------------------------------------+
| assemble | **Only for the entry ``Type``** |
| | Waiting the name of the classmethod which make the |
| | action to group and create a new class with the |
| | complete inheritance tree:: |
| | |
| | @add_declaration_type(isAnEntry=True, |
| | assemble='assemble') |
| | class MyTpe: |
| | ... |
| | |
| | @classmethod |
| | def assemble(cls, registry): |
| | ... |
| | |
| | .. warning:: |
| | registry is the registry of the database |
| | |
+---------------------+-------------------------------------------------------+
| initialize | **Only for the entry ``Type``** |
| | Waiting the name of the classmethod which make the |
| | action to initialize the registry:: |
| | |
| | @add_declaration_type(isAnEntry=True, |
| | initialize='initialize') |
| | class MyTpe: |
| | ... |
| | |
| | @classmethod |
| | def initialize(cls, registry): |
| | ... |
| | |
| | .. warning:: |
| | registry is the registry of the database |
| | |
+---------------------+-------------------------------------------------------+
Declare a Mixin entry type
--------------------------
``Mixin`` is a ``Type`` to add behaviours, it is not a particular ``Type``.
But it is always very interresting to use it.
AnyBlok had already a ``Mixin`` ``Type`` for the ``Model`` ``Type``. The
``Mixin`` ``Type`` must not be the same for all the entry ``Type``, then
``Model`` inherit only other ``Model`` or ``Declarations.Mixin``. If you add
an another ``Declarations.AnotherMixin`` then ``Model`` won't inherit this
``Mixin`` ``Type``.
The new ``Mixin`` ``Type`` is easy to add::
from anyblok import Declarations
from anyblok.mixin import MixinType
@Declarations.add_declaration_type(isAnEntry=True)
class MyMixin(MixinType):
pass
Declare a new ``Core``
----------------------
The definition of a Core and the Declaration is in different parts
Declarations of a new ``Core``::
from anyblok.registry import RegistryManager
RegistryManager.declare_core('MyCore')
Definition or register of an overload of the ``Core`` declaration::
from anyblok import Declarations
@Declarations.register(Declarations.Core)
class MyCore:
...
The declaration must be done in the application, not in the blok. The
is only done in the blok.
.. warning::
``Core`` can't inherit ``Model``, ``Mixin`` or other Type
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/doc/advanced_topics.rst
|
advanced_topics.rst
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2014 Jean-Sebastien SUZANNE <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
.. AnyBlok documentation master file, created by
sphinx-quickstart on Mon Feb 24 10:12:33 2014.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
AnyBlok documentation
=====================
AnyBlok is a Python framework allowing to create highly dynamic and modular
applications on top of the SQLAlchemy ORM. Applications are made of
"bloks" that can be installed, extended, replaced, upgraded or uninstalled.
Bloks can provide SQL Models, Column types, Fields, Mixins, SQL views,
or even plain Python code unrelated to the database, and all of these
can be dynamically customized, modified, or extended
without strong dependencies between them, just by adding new bloks.
Bloks are declared (made available) through dedicated setuptools entry
points, and are explicitely *installed* in the
database, which provides the needed dynamicity for multi-tenant
scenarios: a given AnyBlok process can connect to several databases,
and execute different sets of code on each of them, according
to their installed bloks. Installing bloks could, e.g., be done through
some HTTP interface (not provided by AnyBlok itself).
That being said, Anyblok's scope of usage is by no means limited to
multi-tenant applications. The flexibility and extendability it
provides can be enjoyed even when working on a single database.
AnyBlok is released under the terms of the :doc:`Mozilla Public
License version 2 <LICENSE>`.
.. toctree::
:maxdepth: 2
FRONT.rst
basic_usage
advanced_topics
MEMENTO.rst
internals
builtin_bloks
CHANGES.rst
ROADMAP.rst
LICENSE.rst
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/doc/index.rst
|
index.rst
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2014 Jean-Sebastien SUZANNE <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
.. contents::
ROADMAP
=======
To implement
------------
* Add slogan
* Update doc
* Need improve alembic, sqlalchemy-util
* Refactor the engine declarations to have master / slave(s) configuration
* Addons for sqlalchemy : http://sqlalchemy-utils.readthedocs.org/en/latest/installation.html
Library to include
------------------
* full text search: https://pypi.python.org/pypi/SQLAlchemy-FullText-Search/0.2
* internationalisation: https://pypi.python.org/pypi/SQLAlchemy-i18n/0.8.2
* sqltap http://sqltap.inconshreveable.com, profiling and introspection for SQLAlchemy applications
* Crypt https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DatabaseCrypt
* profiling https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/Profiling
Functionnality which need a sprint
----------------------------------
* Tasks Management
* Internalization
* Ancestor left / right
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/doc/ROADMAP.rst
|
ROADMAP.rst
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2014 Jean-Sebastien SUZANNE <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
AnyBlok framework internals
===========================
anyblok module
--------------
.. automodule:: anyblok
.. autofunction:: start
.. autofunction:: load_init_function_from_entry_points
.. autofunction:: configuration_post_load
anyblok.declarations module
---------------------------
.. automodule:: anyblok.declarations
.. autoexception:: DeclarationsException
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: Declarations
:members:
anyblok.model module
--------------------
.. automodule:: anyblok.model
.. autoclass:: Model
:members:
anyblok.model.exceptions module
-------------------------------
.. automodule:: anyblok.model.exceptions
.. autoexception:: ModelException
:members:
:show-inheritance:
:inherited-members:
.. autoexception:: ViewException
:members:
:show-inheritance:
:inherited-members:
.. autoexception:: ModelFactoryException
:members:
:show-inheritance:
:inherited-members:
anyblok.model.plugins module
----------------------------
.. automodule:: anyblok.model.plugins
.. autoclass:: ModelPluginBase
:members:
Plugin: hybrid_method
~~~~~~~~~~~~~~~~~~~~~
.. automodule:: anyblok.model.hybrid_method
.. autoclass:: HybridMethodPlugin
:members:
:show-inheritance:
Plugin: table_mapper
~~~~~~~~~~~~~~~~~~~~
.. automodule:: anyblok.model.table_and_mapper
.. autoclass:: TableMapperPlugin
:members:
:show-inheritance:
Plugin: event / SQLAlchemy event
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. automodule:: anyblok.model.event
.. autoclass:: EventPlugin
:members:
:show-inheritance:
.. autoclass:: SQLAlchemyEventPlugin
:members:
:show-inheritance:
Plugin: cache
~~~~~~~~~~~~~
.. automodule:: anyblok.model.cache
.. autoclass:: CachePlugin
:members:
:show-inheritance:
Plugin: field datetime
~~~~~~~~~~~~~~~~~~~~~~
.. automodule:: anyblok.model.field_datetime
.. autoclass:: AutoUpdatePlugin
:members:
:show-inheritance:
anyblok.model.factory module
----------------------------
.. automodule:: anyblok.model.factory
.. autoclass:: BaseFactory
:members:
ModelFactory
~~~~~~~~~~~~
.. autoclass:: ModelFactory
:members:
:show-inheritance:
ViewFactory
~~~~~~~~~~~
.. autoclass:: ViewFactory
:members:
:show-inheritance:
anyblok.mapper module
---------------------
.. automodule:: anyblok.mapper
.. autoexception:: ModelAttributeException
:members:
:show-inheritance:
:inherited-members:
.. autoexception:: ModelReprException
:members:
:show-inheritance:
:inherited-members:
.. autoexception:: ModelAttributeAdapterException
:members:
:show-inheritance:
:inherited-members:
.. autoexception:: MapperException
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: ModelRepr
:members:
.. autoclass:: ModelAttribute
:members:
.. autoclass:: ModelMapper
:members:
.. autoclass:: ModelAttributeMapper
:members:
.. autofunction:: ModelAttributeAdapter
.. autofunction:: ModelAdapter
.. autofunction:: MapperAdapter
anyblok.config module
---------------------
.. automodule:: anyblok.config
.. autoexception:: ConfigurationException
:members:
:show-inheritance:
:inherited-members:
.. autofunction:: get_db_name
.. autofunction:: get_url
.. autoclass:: Configuration
:members:
anyblok.logging module
----------------------
.. automodule:: anyblok.logging
.. autoclass:: consoleFormatter
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: anyblokFormatter
:members:
:show-inheritance:
:inherited-members:
.. autofunction:: log
anyblok.imp module
------------------
.. automodule:: anyblok.imp
.. autoexception:: ImportManagerException
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: ImportManager
:members:
anyblok.environment module
--------------------------
.. automodule:: anyblok.environment
.. autoexception:: EnvironmentException
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: EnvironmentManager
:members:
.. autoclass:: ThreadEnvironment
:members:
anyblok.blok module
-------------------
.. automodule:: anyblok.blok
.. autoexception:: BlokManagerException
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: BlokManager
:members:
.. autoclass:: Blok
:members:
anyblok.registry module
-----------------------
.. automodule:: anyblok.registry
.. autoexception:: RegistryManagerException
:members:
:show-inheritance:
:inherited-members:
.. autoexception:: RegistryException
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: RegistryManager
:members:
.. autoclass:: Registry
:members:
anyblok.migration module
------------------------
.. automodule:: anyblok.migration
.. warning::
AnyBlok use Alembic to do the dynamic migration, but Alembic does'nt detect
all the change (primary key, ...), we must wait the Alembic or
implement it in Alembic project before use it in AnyBlok
.. autoexception:: MigrationException
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: MigrationReport
:members:
.. autoclass:: MigrationConstraintForeignKey
:members:
.. autoclass:: MigrationColumn
:members:
.. autoclass:: MigrationConstraintCheck
:members:
.. autoclass:: MigrationConstraintUnique
:members:
.. autoclass:: MigrationConstraintPrimaryKey
:members:
.. autoclass:: MigrationIndex
:members:
.. autoclass:: MigrationTable
:members:
.. autoclass:: Migration
:members:
anyblok.field module
--------------------
.. automodule:: anyblok.field
.. autoclass:: Field
:members:
.. autoclass:: Function
:show-inheritance:
:inherited-members:
:members:
anyblok.column module
----------------------
.. automodule:: anyblok.column
.. autoclass:: Column
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Integer
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: BigInteger
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Boolean
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Float
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Decimal
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Date
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: DateTime
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Time
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Interval
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: String
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Text
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: StrSelection
:members:
.. autoclass:: SelectionType
:members:
.. autoclass:: Selection
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Json
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: LargeBinary
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Color
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Password
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: PhoneNumber
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Email
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Country
:show-inheritance:
:inherited-members:
:members:
anyblok.relationship module
---------------------------
.. automodule:: anyblok.relationship
.. autoclass:: RelationShip
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Many2One
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: One2One
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: Many2Many
:show-inheritance:
:inherited-members:
:members:
.. autoclass:: One2Many
:show-inheritance:
:inherited-members:
:members:
anyblok._graphviz module
------------------------
.. automodule:: anyblok._graphviz
.. autoclass:: BaseSchema
:members:
.. autoclass:: SQLSchema
:members:
.. autoclass:: TableSchema
:members:
.. autoclass:: ModelSchema
:members:
.. autoclass:: ClassSchema
:members:
anyblok.scripts module
----------------------
.. automodule:: anyblok.scripts
.. autofunction:: anyblok_createdb
.. autofunction:: anyblok_updatedb
.. autofunction:: anyblok_interpreter
.. autofunction:: anyblok_nose
.. autofunction:: anyblok2doc
anyblok.tests.testcase module
-----------------------------
.. automodule:: anyblok.tests.testcase
.. autoclass:: BlokTestCase
:members:
:undoc-members:
:show-inheritance:
.. autoclass:: LogCapture
:members:
:undoc-members:
:show-inheritance:
:inherited-members:
.. autoclass:: DBTestCase
:members:
:undoc-members:
:show-inheritance:
.. autoclass:: TestCase
:members:
:undoc-members:
:show-inheritance:
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/doc/internals.rst
|
internals.rst
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2015 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2016 Jean-Sebastien SUZANNE <[email protected]>
.. Copyright (C) 2019 Hugo QUEZADA <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
.. AnyBlok documentation master file, created by
sphinx-quickstart on Mon Feb 24 10:12:33 2014.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
.. contents::
Front Matter
============
Information about the AnyBlok project.
Project Homepage
----------------
AnyBlok is a Python framework allowing to create highly dynamic and modular
applications on top of SQLAlchemy.
AnyBlok is released under the terms of the `Mozilla Public License`.
AnyBlok is hosted on `github <https://github.com>`_ - the main project
page is at https://github.com/anyblok/anyblok or
http://code.anyblok.org. source code is tracked here
using `git <https://git-scm.com>`_.
Releases and project status are available on Pypi at
https://pypi.python.org/pypi/anyblok.
The most recent published version of the documentation should be at
https://doc.anyblok.org.
There is a tutorial to teach you how to develop applications with AnyBlok at
https://anyblok.gitbooks.io/anyblok-book/content/en/
Project Status
--------------
AnyBlok is expected to be stable.
Some early partners are using it on production and are involved in
the project development.
We are aiming to make a stable release as soon as possible.
Users should take care to report bugs and missing features on an as-needed
basis.
It should be expected that the development version may be required
for proper implementation of recently repaired issues in between releases;
the latest master is always available at https://github.com/AnyBlok/AnyBlok/archive/master.zip.
Installation
------------
Install released versions of AnyBlok from the Python package index with
`pip <http://pypi.python.org/pypi/pip>`_ or a similar tool::
pip install anyblok
Installation via source distribution is via the ``setup.py`` script::
python setup.py install
Installation will add the ``anyblok`` commands to the environment.
.. note:: AnyBlok use Python version >= 3.6
Running Tests
-------------
.. .. seealso:: the :ref:`section about testing of AnyBlok applications
.. <basedoc_tests>`.
To run framework tests with ``pytest``::
pip install pytest
ANYBLOK_DATABASE_DRIVER postgresql ANYBLOK_DATABASE_NAME test_anyblok py.test anyblok/tests
To run tests of all installed bloks::
anyblok_createdb --db-name test_anyblok --db-driver-name postgresql --install-all-bloks
ANYBLOK_DATABASE_DRIVER postgresql ANYBLOK_DATABASE_NAME test_anyblok py.test anyblok/bloks
AnyBlok is tested continuously using `Travis CI
<https://travis-ci.org/AnyBlok/AnyBlok>`_
Contributing (hackers needed!)
------------------------------
AnyBlok is ready for production usage even though it can be
improved and enriched.
Feel free to fork, talk with core dev, and spread the word !
Author
------
Jean-Sébastien Suzanne
Contributors
------------
* Jean-Sébastien Suzanne
* Georges Racinet
* Pierre Verkest
* Franck Bret
* Denis Viviès
* Alexis Tourneux
* Hugo Quezada
* Simon André
* Florent Jouatte
* Christophe Combelles
* Sébastien Chazallet
Bugs
----
Bugs and feature enhancements to AnyBlok should be reported on the `Issue
tracker <http://issue.anyblok.org>`_.
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/doc/FRONT.rst
|
FRONT.rst
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2014 Jean-Sebastien SUZANNE <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
Builtin Bloks
=============
AnyBlok ships with some builtin Bloks. Among them,
:ref:`anyblok-core <blok_anyblok_core>` is essential for the
framework itself, while the others provide optional functionalities
that have been found generic enough that uniformity across
applications would be a good thing.
.. contents:: Covered Bloks
:local:
:depth: 1
.. _blok_anyblok_core:
Blok anyblok-core
-----------------
.. automodule:: anyblok.bloks.anyblok_core
.. autoclass:: AnyBlokCore
:show-inheritance:
.. autoattribute:: name
.. autoattribute:: version
.. autoattribute:: author
.. autoattribute:: autoinstall
.. autoattribute:: priority
.. include:: ../anyblok/bloks/anyblok_core/CODE.rst
.. _blok_model_authz:
Blok Model Authz
----------------
.. automodule:: anyblok.bloks.model_authz
.. autoclass:: ModelBasedAuthorizationBlok
:members:
:undoc-members:
:show-inheritance:
.. include:: ../anyblok/bloks/model_authz/README.rst
.. include:: ../anyblok/bloks/model_authz/CODE.rst
.. _blok_anyblok_test:
Blok anyblok-test
-----------------
.. automodule:: anyblok.bloks.anyblok_test
.. autoclass:: AnyBlokTest
:show-inheritance:
.. autoattribute:: name
.. autoattribute:: version
.. autoattribute:: author
.. autoattribute:: autoinstall
.. autoattribute:: priority
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/doc/builtin_bloks.rst
|
builtin_bloks.rst
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2016 Jean-Sebastien SUZANNE <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
.. contents::
MEMENTO
=======
Anyblok mainly depends on:
* Python 3.2+
* `SQLAlchemy <http://www.sqlalchemy.org>`_
* `Alembic <http://alembic.readthedocs.org/en/latest/>`_
Blok
----
A blok is a collection of source code files. These files are loaded in the registry
only if the blok state is ``installed``.
To declare a blok you have to:
1) Declare a Python package::
The name of the module is not really significant
--> Just create an ``__init__.py`` file
2) Declare a blok class in the ``__init__.py`` of the Python package::
from anyblok.blok import Blok
class MyBlok(Blok):
""" Short description of the blok """
...
version = '1.0.0'
Here are the available attributes for the blok:
+-----------------------+-----------------------------------------------------+
| Attribute | Description |
+=======================+=====================================================+
| ``__doc__`` | Short description of the blok (in the docstring) |
+-----------------------+-----------------------------------------------------+
| ``version`` | the version of the blok (required because no value |
| | by default) |
+-----------------------+-----------------------------------------------------+
| ``autoinstall`` | boolean, if ``True`` this blok is automatically |
| | installed |
+-----------------------+-----------------------------------------------------+
| ``priority`` | installation order of the blok to installation |
+-----------------------+-----------------------------------------------------+
| ``readme`` | Path of the 'readme' file of the blok, by default |
| | ``README.rst`` |
+-----------------------+-----------------------------------------------------+
| ``required`` | List of the required dependancies for install |
+-----------------------+-----------------------------------------------------+
| ``optional`` | List of the optional dependencies, their are |
| | installed if they are found |
+-----------------------+-----------------------------------------------------+
| ``conflicting`` | List the blok which are not be installed to install |
| | this blok |
+-----------------------+-----------------------------------------------------+
| ``conditionnal`` | If the bloks of this list ares installed the this |
| | blok will be automaticly installed |
+-----------------------+-----------------------------------------------------+
And the methods that define blok behaviours:
+-------------------------------+---------------------------------------------+
| Method | Description |
+===============================+=============================================+
| ``import_declaration_module`` | ``classmethod``, call to import all python |
| | module which declare object from blok. |
+-------------------------------+---------------------------------------------+
| ``reload_declaration_module`` | ``classmethod``, call to reload the import |
| | all the python module which declare object |
+-------------------------------+---------------------------------------------+
| ``update`` | Action to do when the blok is being |
| | install or updated. This method has one |
| | argument ``latest_version`` (None for |
| | install) |
| | |
| | Since version **0.20.0** the |
| | ``latest_verison`` is an |
| | **pkg_resources.parse_version** |
+-------------------------------+---------------------------------------------+
| ``update_demo`` | Action to do when the blok is being |
| | install or updated. Called after ``update`` |
| | if database was created with ``--with-demo``|
| | parameter. |
| | This method has one argument |
| | ``latest_version`` (None for install) |
| | |
| | Since version **0.20.0** the |
| | ``latest_verison`` is an |
| | **pkg_resources.parse_version** |
+-------------------------------+---------------------------------------------+
| ``uninstall_demo`` | Action to do when the blok is being |
| | uninstalled. Called before ``uninstall`` |
| | if database was created with ``--with-demo``|
| | parameter. |
+-------------------------------+---------------------------------------------+
| ``uninstall`` | Action to do when the blok is being |
| | uninstalled |
+-------------------------------+---------------------------------------------+
| ``load`` | Action to do when the server starts |
+-------------------------------+---------------------------------------------+
| ``pre_migration`` | Action to do when the blok is being |
| | installed or updated to make some specific |
| | migration, before auto migration. |
| | This method has one argument |
| | ``latest_version`` (None for install) |
| | |
| | Since version **0.20.0** the |
| | ``latest_verison`` is an |
| | **pkg_resources.parse_version** |
+-------------------------------+---------------------------------------------+
| ``post_migration`` | Action to do when the blok is being |
| | installed or updated to make some specific |
| | migration, after auto migration. |
| | This method has one argument |
| | ``latest_version`` (None for install) |
| | |
| | Since version **0.20.0** the |
| | ``latest_verison`` is an |
| | **pkg_resources.parse_version** |
+-------------------------------+---------------------------------------------+
And some facility:
+-------------------------------+---------------------------------------------+
| Method | Description |
+===============================+=============================================+
| ``import_file`` | facility to import data |
+-------------------------------+---------------------------------------------+
.. note::
The version 0.2.0 change the import and reload of the module python
3) Declare the entry point in the ``setup.py``::
from setuptools import setup
setup(
...
entry_points={
'bloks': [
'web=anyblok_web_server.bloks.web:Web',
],
},
...
)
.. note::
The version 0.4.0, required all the declaration of the bloks on the entry
point **bloks**
Declaration
-----------
In AnyBlok, everything is a declaration (Model, Mixin, ...) and you have to
import the ``Declarations`` class::
from anyblok.declarations import Declarations
The ``Declarations`` has two main methods
+---------------------+-------------------------------------------------------+
| Method name | Description |
+=====================+=======================================================+
| ``register`` | Add the declaration in the registry |
| | This method can be used as: |
| | |
| | * A function:: |
| | |
| | class Foo: |
| | pass |
| | |
| | register(``Declarations.type``, cls_=Foo) |
| | |
| | * A decorator:: |
| | |
| | @register(``Declarations.type``) |
| | class Foo: |
| | pass |
| | |
+---------------------+-------------------------------------------------------+
| ``unregister`` | Remove an existing declaration from the registry. |
| | This method is only used as a function:: |
| | |
| | from ... import Foo |
| | |
| | unregister(``Declarations.type``, cls_=Foo) |
| | |
+---------------------+-------------------------------------------------------+
.. note::
``Declarations.type`` must be replaced by:
* Model
* ...
``Declarations.type`` defines the behaviour of the ``register`` and
``unregister`` methods
Model
-----
A Model is an AnyBlok class referenced in the registry. The registry of AnyBlok
is hierarchical. The model ``Foo`` is accessed by ``anyblok.Foo`` and the model
``Foo.Bar`` is accessed by ``anyblok.Foo.Bar``.
To declare a Model you must use ``register``::
from anyblok.declarations import Declarations
register = Declarations.register
Model = Declarations.Model
@register(Model):
class Foo:
pass
The name of the model is defined by the name of the class (here ``Foo``).
The namespace of ``Foo`` is defined by the hierarchy under ``Model``. In this
example, ``Foo`` is in ``Model``, you can access at ``Foo`` by ``Model.Foo``.
.. warning::
``Model.Foo`` is not the ``Foo`` Model. It is an avatar of ``Foo`` only
used for the declaration.
If you define the ``Bar`` model, under the ``Foo`` model, you should write::
@register(Model.Foo)
class Bar:
""" Description of the model """
pass
.. note::
The description is used by the model System.Model to describe the model
The declaration name of ``Bar`` is ``Model.Foo.Bar``. The namespace of
``Bar`` in the AnyBlok's registry is ``Foo.Bar``. The namespace of ``Foo`` in the
registry r of AnyBlok is ``Foo``::
Foo = anyblok.Foo
Bar = anyblok.Foo.Bar
Some models have a table in the database. The name of the table is by default the
namespace in lowercase with ``.`` replaced with ``.``.
.. note::
The registry of AnyBlok is accessible only in the method of the models::
@register(Model)
class Foo:
def myMethod(self):
anyblok = self.anyblok
Foo = anyblok.Foo
The main goal of AnyBlok is not only to add models in the registry, but also
to easily overload these models. The declaration stores the Python class in
the registry. If one model already exist then the second declaration of this
model overloads the first model::
@register(Model)
class Foo:
x = 1
@register(Model)
class Foo:
x = 2
------------------------------------------
Foo = anyblok.Foo
assert Foo.x == 2
Here are the parameters of the ``register`` method for ``Model``:
+------------------+---------------------------------------------------------------+
| Param | Description |
+==================+===============================================================+
| cls\_ | Define the real class if ``register`` is used as a |
| | function not as a decorator |
+------------------+---------------------------------------------------------------+
| name\_ | Overload the name of the class:: |
| | |
| | @register(Model, name_='Bar') |
| | class Foo: |
| | pass |
| | |
| | Declarations.Bar |
| | |
+------------------+---------------------------------------------------------------+
| factory | Factory class to build the Model class. |
| | Default : ``anyblok.model.factory.ModelFactory`` |
+------------------+---------------------------------------------------------------+
| tablename | Define the real name of the table. By default the table name |
| | is the registry name without the declaration type, and with |
| | '.' replaced with '_'. This attribute is also used to map an |
| | existing table declared by a previous Model. Allowed values: |
| | |
| | * str :: |
| | |
| | @register(Model, tablename='foo') |
| | class Bar: |
| | pass |
| | |
| | * declaration :: |
| | |
| | @register(Model, tablename=Model.Foo) |
| | class Bar: |
| | pass |
| | |
+------------------+---------------------------------------------------------------+
| ignore_migration | If True then the table will not be altered if the definition |
| | of the model and the schema in the database are diferents :: |
| | |
| | @register(Model, ignore_migration=True) |
| | class Foo: |
| | pass |
| | |
+------------------+---------------------------------------------------------------+
.. warning::
Model can only inherit simple python class, Mixin or Model.
Non SQL Model
~~~~~~~~~~~~~
This is the default model. This model has no tables. It is used to
organize the registry or for specific process.::
@register(Model)
class Foo:
pass
SQL Model
~~~~~~~~~
A ``SQL Model`` is a simple ``Model`` with ``Column`` or ``RelationShip``. For
each model, one table will be created.::
@register(Model)
class Foo:
# SQL Model with mapped with the table ``foo``
id = Integer(primary_key=True)
# id is a column on the table ``foo``
.. warning:: Each SQL Model have to have got one or more primary key
In the case or you need to add some configuration in the SQLAlchemy class
attrinute:
* __table_args\_\_
* __table_kwargs\_\_
* __mapper_args\_\_
you can use the next class methods
+---------------------+--------------------------------------------------------+
| method | description |
+=====================+========================================================+
| __db_schema__ | Simple attribute to define the name of the schema in |
| | which the model-related table is located |
+---------------------+--------------------------------------------------------+
| define_table_args | Add options for SQLAlchemy table build: |
| | |
| | * Constraints on multiple columns |
| | * ... |
| | |
| | :: |
| | |
| | @classmethod |
| | def define_table_args(cls): |
| | res = super(MyModel, cls).define_table_args() |
| | return res + my_tuple_value |
| | |
+---------------------+--------------------------------------------------------+
| define_table_kwargs | Add named options for SQLAlchemy table build: |
| | |
| | :: |
| | |
| | @classmethod |
| | def define_table_kwargs(cls): |
| | res = super(MyModel, cls).define_table_kwargs()|
| | res.update(my_tuple_value) |
| | return res |
| | |
+---------------------+--------------------------------------------------------+
| define_mapper_args | Add options for SQLAlchemy mappers build: |
| | |
| | * polymorphism |
| | * ... |
| | |
| | :: |
| | |
| | @classmethod |
| | def define_mapper_args(cls): |
| | return my_dict_value |
| | |
+---------------------+--------------------------------------------------------+
.. note::
New in 0.4.0
View Model
~~~~~~~~~~
A ``View Model`` as ``SQL Model``. Need the declaration of ``Column`` and / or
``RelationShip``. In the ``register`` the param ``factory`` must be
``anyblok.model.factory.ViewFactory`` and the ``View Model`` must define the
``sqlalchemy_view_declaration`` classmethod.::
from anyblok.model.factory import ViewFactory
@register(Model, factory=ViewFactory)
class Foo:
id = Integer(primary_key=True)
name = String()
@classmethod
def sqlalchemy_view_declaration(cls):
from sqlalchemy.sql import select
Model = cls.anyblok.System.Model
return select([Model.id.label('id'), Model.name.label('name')])
``sqlalchemy_view_declaration`` must return a select query corresponding to the
request of the SQL view.
Column
------
To declare a ``Column`` in a model, add a column on the table of the model.::
from anyblok.declarations import Declarations
from anyblok.column import Integer, String
@Declarations.register(Declaration.Model)
class MyModel:
id = Integer(primary_key=True)
name = String()
.. note::
Since the version 0.4.0 the ``Columns`` are not ``Declarations``
List of the column type:
* ``DateTime``: use datetime.datetime, with pytz for the timezone
* ``TimeStamp``: use datetime.datetime, with pytz for the timezone
* ``Decimal``: use decimal.Decimal
* ``Float``
* ``Time``: use datetime.time
* ``BigInteger``
* ``Boolean``
* ``Date``: use datetime.date
* ``Integer``
* ``Interval``: use datetime.timedelta
* ``LargeBinary``
* ``String``
* ``Text``
* ``Selection``
* ``Enum``: use enum.Enum inherited class
* ``Json``
* ``Sequence``
* ``Color``: use colour.Color
* ``Password``: use sqlalchemy_utils.types.password.Password
* ``UUID``: use uuid
* ``URL``: use furl.furl
* ``PhoneNumber``: use sqlalchemy_utils.PhoneNumber
* ``Email``
* ``Country``: use pycountry
* ``ModelSelection``
* ``ModelFieldSelection``
All the columns have the following optional parameters:
+------------------+------------------------------------------------------------+
| Parameter | Description |
+==================+============================================================+
| label | Label of the column, If None the label is the name of |
| | column capitalized |
+------------------+------------------------------------------------------------+
| default | define a default value for this column. |
| | |
| | ..warning:: |
| | |
| | The default value depends of the column type |
| | |
| | ..note:: |
| | |
| | Put the name of a classmethod to call it |
| | |
+------------------+------------------------------------------------------------+
| index | boolean flag to define whether the column is indexed |
+------------------+------------------------------------------------------------+
| nullable | Defines if the column must be filled or not |
+------------------+------------------------------------------------------------+
| primary_key | Boolean flag to define if the column is a primary key or |
| | not |
+------------------+------------------------------------------------------------+
| unique | Boolean flag to define if the column value must be unique |
| | or not |
+------------------+------------------------------------------------------------+
| foreign_key | Define a foreign key on this column to another column of |
| | another model:: |
| | |
| | @register(Model) |
| | class Foo: |
| | id = Integer(primary_key=True) |
| | |
| | @register(Model) |
| | class Bar: |
| | id = Integer(primary_key=True) |
| | foo = Integer(foreign_key=Model.Foo.use('id')) |
| | |
| | If the ``Model`` Declarations doesn't exist yet, you can |
| | use the regisrty name:: |
| | |
| | foo = Integer(foreign_key='Model.Foo=>id')) |
| | |
+------------------+------------------------------------------------------------+
| db_column_name | String to define the real column name in the table, |
| | different from the model attribute name |
+------------------+------------------------------------------------------------+
| encrypt_key | Crypt the column in the database. can take the values: |
| | |
| | * a String ex: foo = String(encrypt_key='SecretKey') |
| | * a classmethod name on the model |
| | * True value, search in the Configuration |
| | ``default_encrypt_key`` the value, they are no default. |
| | if no value exist, an exception is raised |
| | |
| | ..warning:: |
| | |
| | The python package cryptography must be installed |
| | |
+------------------+------------------------------------------------------------+
| ignore_migration | if True then the column in the table will not be modified |
| | when the definition of the column and the column of the |
| | table are diferents |
+------------------+------------------------------------------------------------+
Other attribute for ``String``:
+-------------+---------------------------------------------------------------+
| Param | Description |
+=============+===============================================================+
| ``size`` | Column size in the table |
+-------------+---------------------------------------------------------------+
Other attribute for ``Selection``:
+----------------+------------------------------------------------------------+
| Param | Description |
+================+============================================================+
| ``size`` | column size in the table |
+----------------+------------------------------------------------------------+
| ``selections`` | ``dict`` or ``dict.items`` to give the available key with |
| | the associate label |
+----------------+------------------------------------------------------------+
Other attribute for ``Sequence``:
+--------------+--------------------------------------------------------------+
| Param | Description |
+==============+==============================================================+
| ``size`` | column size in the table |
+--------------+--------------------------------------------------------------+
| ``code`` | code of the sequence |
+--------------+--------------------------------------------------------------+
| ``formater`` | formater of the sequence |
+--------------+--------------------------------------------------------------+
Other attribute for ``Color``:
+----------------+------------------------------------------------------------+
| Param | Description |
+================+============================================================+
| ``size`` | column max size in the table |
+----------------+------------------------------------------------------------+
Other attribute for ``Password``:
+-------------------+---------------------------------------------------------+
| Param | Description |
+===================+=========================================================+
| ``size`` | password max size in the table |
+-------------------+---------------------------------------------------------+
| ``crypt_context`` | see the option for the python lib `passlib |
| | <https://passlib.readthedocs.io/en/stable/lib/passlib.co|
| | ntext.html>`_ |
+-------------------+---------------------------------------------------------+
..warning::
The Password column can be found with the query meth:
Other attribute for ``UUID``:
+----------------+------------------------------------------------------------+
| Param | Description |
+================+============================================================+
| ``binary`` | Stores a UUID in the database natively when it can and |
| | falls back to a BINARY(16) or a CHAR(32) |
+----------------+------------------------------------------------------------+
Other attribute for ``DateTime`` and ``TimeStamp``:
+----------------------+------------------------------------------------------+
| Param | Description |
+======================+======================================================+
| ``auto_update`` | Boolean (default: **False**) if True the value will |
| | be update when the session is flushed |
+----------------------+------------------------------------------------------+
| ``default_timezone`` | timezone or timezone's name, define the timezone to |
| | on naive datetime. |
| | |
| | .. warning:: |
| | The datetime with another timezone don't change |
| | and keep their own timezone |
| | |
| | :: |
| | |
| | tokyo_tz = pytz.timezone('Asia/Tokyo') |
| | |
| | @register(Model) |
| | class Bar: |
| | foo = DateTime(default_timezone=tokyo_tz) |
| | // |
| | foo = DateTime(default_timezone='Asia/Tokyo')|
| | |
+----------------------+------------------------------------------------------+
Other attribute for ``PhoneNumber``:
+----------------------+------------------------------------------------------+
| Param | Description |
+======================+======================================================+
| ``region`` | Default region to save phone number (FR) |
+----------------------+------------------------------------------------------+
| ``max_length`` | max size of the column in the database (20) |
+----------------------+------------------------------------------------------+
Other attribute for ``Country``:
+-----------+-----------------------------------------------------------------+
| Param | Description |
+===========+=================================================================+
| ``mode`` | Define the mode by default to store in the DB (default alpha_2) |
+-----------+-----------------------------------------------------------------+
Other attribute for ``ModelSelection``:
+----------------+------------------------------------------------------------+
| Param | Description |
+================+============================================================+
| ``validator`` | function or name of the method on the Model. their goal |
| | is to defined which models can be used. Some function |
| | exist in anyblok.column: |
| | |
| | * model_validator_all: All models |
| | * model_validator_is_sql: Only SQL models |
| | * model_validator_is_not_sql: Not the SQL models |
| | * model_validator_is_view: Only models with factory view |
| | * model_validator_is_not_view: Not the model with factory |
| | view |
| | * model_validator_in_namespace: filter by namespace |
| | * merge_validators: Do a and between validators |
| | |
+----------------+------------------------------------------------------------+
Other attribute for ``ModelFieldSelection``:
+---------------------+-------------------------------------------------------+
| Param | Description |
+=====================+=======================================================+
| ``model_validator`` | function or name of the method on the Model. their |
| | goal is to defined which models can be used. Some |
| | function exist in anyblok.column: |
| | |
| | * model_validator_all: All models |
| | * model_validator_is_sql: Only SQL models |
| | * model_validator_is_not_sql: Not the SQL models |
| | * model_validator_is_view: Only models with factory |
| | view |
| | * model_validator_is_not_view: Not the model with |
| | factory view |
| | * model_validator_in_namespace: filter by namespace |
| | * merge_validators: Do a and between validators |
| | |
+---------------------+-------------------------------------------------------+
| ``field_validator`` | function or name of the method on the Model. their |
| | goal is to defined which field can be used. Some |
| | function exist in anyblok.column: |
| | |
| | * field_validator_all: All fields on the model |
| | * field_validator_is_field: Only no SQL field |
| | * field_validator_is_not_field: Not the SQL fields |
| | * field_validator_is_column: Only the Column field |
| | * field_validator_is_not_column: Not the Column field |
| | * field_validator_is_relationship: Only the |
| | relationsship (Many2One, One2One, One2Many, |
| | Many2Many) |
| | * field_validator_is_not_relationship: Not the |
| | RelationShip |
| | * field_validator_is_named: filter by names of field |
| | * field_validator_is_from_types: filter by Field |
| | Types |
| | |
+---------------------+-------------------------------------------------------+
RelationShip
------------
To declare a ``RelationShip`` in a model, add a RelationShip on the table of
the model.::
from anyblok.declarations import Declarations
from anyblok.column import Integer
from anyblok.relationship import Many2One
@Declarations.register(Declaration.Model)
class MyModel:
id = Integer(primary_key=True)
@Declarations.register(Declaration.Model)
class MyModel2:
id = Integer(primary_key=True)
mymodel = Many2One(model=Declaration.Model.MyModel)
.. note::
Since the version 0.4.0 the ``RelationShip`` don't come from ``Declarations``
List of the RelationShip type:
* ``One2One``
* ``Many2One``
* ``One2Many``
* ``Many2Many``
Parameters of a ``RelationShip``:
+--------------------+--------------------------------------------------------+
| Param | Description |
+====================+========================================================+
| ``label`` | The label of the column |
+--------------------+--------------------------------------------------------+
| ``model`` | The remote model |
+--------------------+--------------------------------------------------------+
| ``remote_columns`` | The column name on the remote model, if no remote |
| | columns are defined the remote column will be the |
| | primary column of the remote model |
+--------------------+--------------------------------------------------------+
Parameters of the ``One2One`` field:
+-------------------+---------------------------------------------------------+
| Param | Description |
+===================+=========================================================+
| ``column_names`` | Name of the local column. |
| | If the column doesn't exist then this column will be |
| | created. |
| | If no column name then the name will be 'M2O name' + |
| | '_' + 'name of the remote column' |
+-------------------+---------------------------------------------------------+
| ``nullable`` | Indicates if the column name is nullable or not |
+-------------------+---------------------------------------------------------+
| ``backref`` | Remote One2One link with the column name |
+-------------------+---------------------------------------------------------+
| ``unique`` | Add unique constraint on the created column(s) |
+-------------------+---------------------------------------------------------+
| ``index`` | Add index constraint on the created column(s) |
+-------------------+---------------------------------------------------------+
| ``primary_key`` | The created column(s) are primary key |
+-------------------+---------------------------------------------------------+
Parameters of the ``Many2One`` field:
+-------------------------+---------------------------------------------------+
| Parameter | Description |
+=========================+===================================================+
| ``column_names`` | Name of the local column. |
| | If the column doesn't exist then this column will |
| | be created. |
| | If no column name then the name will be |
| | 'M2O name' + '_' + 'name of the remote column' |
+-------------------------+---------------------------------------------------+
| ``nullable`` | Indicate if the column name is nullable or not |
+-------------------------+---------------------------------------------------+
| ``unique`` | Add unique constraint on the created column(s) |
+-------------------------+---------------------------------------------------+
| ``index`` | Add index constraint on the created column(s) |
+-------------------------+---------------------------------------------------+
| ``primary_key`` | The created column(s) are primary key |
+-------------------------+---------------------------------------------------+
| ``one2many`` | Opposite One2Many link with this Many2one |
+-------------------------+---------------------------------------------------+
| ``foreign_key_options`` | take a dict with the option for create the |
| | foreign key |
+-------------------------+---------------------------------------------------+
::
Many2One(model=The.Model, nullable=True,
foreign_key_options={'ondelete': 'cascade'})
Parameters of the ``One2Many`` field:
+-------------------+---------------------------------------------------------+
| Parameter | Description |
+===================+=========================================================+
| ``primaryjoin`` | Join condition between the relationship and the remote |
| | column |
+-------------------+---------------------------------------------------------+
| ``many2one`` | Opposite Many2One link with this One2Many |
+-------------------+---------------------------------------------------------+
.. warning::
In the case where two or more foreign keys is found to the same primary key,
then the primary join become a ``or`` between them. You must considere this
field as a readonly field, because SQLAlchemy will change the both foreign key
Parameters of the ``Many2Many`` field:
+------------------------+----------------------------------------------------+
| Parameter | Description |
+========================+====================================================+
| ``join_table`` | many2many intermediate table between both models |
+------------------------+----------------------------------------------------+
| ``join_model`` | many2many intermediate table compute from a Model, |
| | This attribute is used to build a rich Many2Many |
| | |
| | .. warning:: |
| | |
| | An exception is raised if the table come from |
| | join_table and join_model are different |
| | |
+------------------------+----------------------------------------------------+
| ``m2m_remote_columns`` | Column name in the join table which have got the |
| | foreign key to the remote model |
+------------------------+----------------------------------------------------+
| ``local_columns`` | Name of the local column which holds the foreign |
| | key to the join table. |
| | If the column does not exist then this column will |
| | be created. |
| | If no column name then the name will be 'tablename'|
| | + '_' + name of the relationship |
+------------------------+----------------------------------------------------+
| ``m2m_local_columns`` | Column name in the join table which holds the |
| | foreign key to the model |
+------------------------+----------------------------------------------------+
| ``many2many`` | Opposite Many2Many link with this relationship |
+------------------------+----------------------------------------------------+
| ``compute_join`` | Force to compute secondaryjoin and primaryjoin |
| | In the most case this is forbidden because it is |
| | dangeourous, The only case where the compute is |
| | required, is when the model_join have more than |
| | one primary key to the main model for rich |
| | Many2Many |
| | |
| | .. note:: |
| | |
| | In the case where the both model are the same |
| | this option is forced |
| | |
+------------------------+----------------------------------------------------+
.. note::
Since 0.4.0, when the relationnal table is created by AnyBlok, the
m2m_columns becomme foreign keys
Field
-----
To declare a ``Field`` in a model, add a Field on the Model, this is not a
SQL column.::
from anyblok.declarations import Declarations
from anyblok.field import Function
from anyblok.column import Integer
@Declarations.register(Declaration.Model)
class MyModel:
id = Integer(primary_key=True)
first_name = String()
last_name = String()
name = Function(fget='fget', fset='fset', fdel='fdel', fexpr='fexpr')
def fget(self):
return '{0} {1}'.format(self.first_name, self.last_name)
def fset(self, value):
self.first_name, self.last_name = value.split(' ', 1)
def fdel(self):
self.first_name = self.last_name = None
@classmethod
def fexpr(cls):
return func.concat(cls.first_name, ' ', cls.last_name)
List of the ``Field`` type:
* ``Function``
* ``JsonRelated``
Parameters for ``Field.Function``
+-------------------+---------------------------------------------------------+
| Parameter | Description |
+===================+=========================================================+
| ``fget`` | name of the method to call to get the value of field:: |
| | |
| | def fget(self): |
| | return '{0} {1}'.format(self.first_name, |
| | self.last_name) |
| | |
+-------------------+---------------------------------------------------------+
| ``fset`` | name of the method to call to set the value of field:: |
| | |
| | def fset(self): |
| | self.first_name, self.last_name = value.split(' ',|
| | 1) |
| | |
+-------------------+---------------------------------------------------------+
| ``fdel`` | name of the method to call to del the value of field:: |
| | |
| | def fdel(self): |
| | self.first_name = self.last_name = None |
| | |
+-------------------+---------------------------------------------------------+
| ``fexp`` | name of the class method to call to filter on the |
| | field:: |
| | |
| | @classmethod |
| | def fexp(self): |
| | return func.concat(cls.first_name, ' ', |
| | cls.last_name) |
| | |
+-------------------+---------------------------------------------------------+
| ``fuexp`` | name of the class method to update the field from query |
| | :: |
| | |
| | @classmethod |
| | def fuexp(self): |
| | fname, lname = value.split(" ", 1) |
| | return [ |
| | (cls.first_name, fname), |
| | (cls.last_name, lname), |
| | ] |
| | |
+-------------------+---------------------------------------------------------+
Parameters for ``Field.JsonRelated``
Define setter, getter for a key in **Column.Json**, it is a helper to do an alias
of specific entry in a **Column.Json**.
+-------------------+---------------------------------------------------------+
| Parameter | Description |
+===================+=========================================================+
| ``json_column`` | name of the json column in the Model |
+-------------------+---------------------------------------------------------+
| ``keys`` | list of string, represent the path in json to store and |
| | get the value |
+-------------------+---------------------------------------------------------+
| ``get_adapter`` | method to convert the date after get it. This value |
| | can be the name of a method on the model |
+-------------------+---------------------------------------------------------+
| ``set_adapter`` | method to convert the date before store it. This value |
| | can be the name of a method on the model |
+-------------------+---------------------------------------------------------+
Mixin
-----
A Mixin looks like a Model, but has no tables. A Mixin adds behaviour to
a Model with Python inheritance::
@register(Mixin)
class MyMixin:
def foo():
pass
@register(Model)
class MyModel(Mixin.MyMixin):
pass
----------------------------------
assert hasattr(registry.MyModel, 'foo')
If you inherit a mixin, all the models previously using the base mixin also benefit
from the overload::
@register(Mixin)
class MyMixin:
pass
@register(Model)
class MyModel(Mixin.MyMixin):
pass
@register(Mixin)
class MyMixin:
def foo():
pass
----------------------------------
assert hasattr(anyblok.MyModel, 'foo')
SQL View
--------
An SQL view is a model, with the argument ``factory=anyblok.model.factory.ViewFactory`` in the
register. and the classmethod ``sqlalchemy_view_declaration``::
from anyblok.model.factory import ViewFactory
@register(Model)
class T1:
id = Integer(primary_key=True)
code = String()
val = Integer()
@register(Model)
class T2:
id = Integer(primary_key=True)
code = String()
val = Integer()
@register(Model, factory=ViewFactory)
class TestView:
code = String(primary_key=True)
val1 = Integer()
val2 = Integer()
@classmethod
def sqlalchemy_view_declaration(cls):
""" This method must return the query of the view """
T1 = cls.anyblok.T1
T2 = cls.anyblok.T2
query = select([T1.code.label('code'),
T1.val.label('val1'),
T2.val.label('val2')])
return query.where(T1.code == T2.code)
Core
----
``Core`` is a low level set of declarations for all the Models of AnyBlok. ``Core`` adds
general behaviour to the application.
.. warning::
Core can not inherit Model, Mixin, Core, or other declaration type.
Base
~~~~
Add a behaviour in all the Models, Each Model inherits Base. For instance, the
``fire`` method of the event come from ``Core.Base``.
::
from anyblok import Declarations
@Declarations.register(Declarations.Core)
class Base:
pass
SqlBase
~~~~~~~
Only the Models with ``Field``, ``Column``, ``RelationShip`` inherits ``Core.SqlBase``.
For instance, the ``insert`` method only makes sense for the ``Model`` with a table.
::
from anyblok import Declarations
@Declarations.register(Declarations.Core)
class SqlBase:
pass
SqlViewBase
~~~~~~~~~~~
Like ``SqlBase``, only the ``SqlView`` inherits this ``Core`` class.
::
from anyblok import Declarations
@Declarations.register(Declarations.Core)
class SqlViewBase:
pass
Query
~~~~~
Overloads the SQLAlchemy ``Query`` class.
::
from anyblok import Declarations
@Declarations.register(Declarations.Core)
class Query
pass
InstrumentedList
~~~~~~~~~~~~~~~~
::
from anyblok import Declarations
@Declarations.register(Declarations.Core)
class InstrumentedList
pass
``InstrumentedList`` is the class returned by the Query for all the list result
like:
* query.all()
* relationship list (Many2Many, One2Many)
Adds some features like getting a specific property or calling a method on all
the elements of the list::
MyModel.query().all().foo(bar)
Sharing a table between more than one model
-------------------------------------------
SQLAlchemy allows two methods to share a table between two or more mapping
class:
* Inherit an SQL Model in a non-SQL Model::
@register(Model)
class Test:
id = Integer(primary_key=True)
name = String()
@register(Model)
class Test2(Model.Test):
pass
----------------------------------------
t1 = Test1.insert(name='foo')
assert Test2.query().filter(Test2.id == t1.id,
Test2.name == t1.name).count() == 1
* Share the ``__table__``.
AnyBlok cannot give the table at the declaration, because the table does not
exist yet. But during the assembly, if the table exists and the model
has the name of this table, AnyBlok directly links the table. To
define the table you must use the named argument ``tablename`` in the
``register``
::
@register(Model)
class Test:
id = Integer(primary_key=True)
name = String()
@register(Model, tablename=Model.Test)
class Test2:
id = Integer(primary_key=True)
name = String()
----------------------------------------
t1 = Test1.insert(name='foo')
assert Test2.query().filter(Test2.id == t1.id,
Test2.name == t1.name).count() == 1
.. warning::
There are no checks on the existing columns.
Sharing a view between more than one model
------------------------------------------
Sharing a view between two Models is the merge between:
* Creating a View Model
* Sharing the same table between more than one model.
.. warning::
For the view you must redined the column in the Model corresponding to the view
with inheritance or simple Share by tablename
Specific behaviour
------------------
AnyBlok implements some facilities to help developers
Column encryption
~~~~~~~~~~~~~~~~~
You can encrypt some columns to protect them. The python package cryptography
must be installed::
pip install cryptography
Use the encrypt_key attribute on the column to define the key of cryptography::
@register(Model)
class MyModel:
# define the specific encrypt_key
encrypt_column_1 = String(encrypt_key='SecretKey')
# Use the default encrypt_key
encrypt_column_2 = String(encrypt_key=Configuration.get('default_encrypt_key')
encrypt_column_3 = String(encrypt_key=True)
# Use the class method to get encrypt_key
encrypt_column_1 = String(encrypt_key='get_encrypt_key')
@classmethod
def get_encrypt_key(cls):
return 'SecretKey'
The encryption works for any Columns.
Environment
~~~~~~~~~~~
The Environment contains non persistent contextual variables. By
default, it is stored in the current :class:`Thread` object, but that
is amendable (see :ref:`environment_types`).
Use the current environment
+++++++++++++++++++++++++++
The environment can be used from whereever in the code.
Generic use
///////////
To get or set variable in environment, you must import the
``EnvironmentManager``::
from anyblok.environment import EnvironmentManager
Set a variable::
EnvironmentManager.set('my variable name', some_value)
Get a variable::
EnvironmentManager.get('my variable name', default=some_default)
Use from a ``Model``
////////////////////
A class-level attribute is present on all Model classes to access the
Environment variables conveniently.
To grab the EnvironmentManager from a ``Model`` method, just use
``self.Env``. For a classmethod, that would be as in::
@classmethod
def myclsmeth(cls):
env = cls.Env
(...)
Then, it's easy to get and set variables. Here's an example from a Model
instance method::
self.Env.set('my variable name', some_value)
self.Env.get('my variable name', default=some_default_value)
.. note:: the ``Env`` attribute is actually set in
``registry.registry_base``, which is a class dynamically
generated at registry creation, and of which all assembled
classes stored in the registry inherit.
.. _environment_types:
Define a new environment type
+++++++++++++++++++++++++++++
If you do not want to stock the environment in the ``Thread``, you must
implement a new type of environment.
This type is a simple class which have theses class methods:
* scoped_function_for_session
* setter
* getter
::
MyEnvironmentClass:
@classmethod
def scoped_function_for_session(cls):
...
@classmethod
def setter(cls, key, value):
...
@classmethod
def getter(cls, key, default):
...
return value
Declare your class as the Environment class::
EnvironmentManager.define_environment_cls(MyEnvironmentClass)
The classmethod ``scoped_function_for_session`` is passed at SQLAlchemy
``scoped_session`` function `see <http://docs.sqlalchemy.org/en/rel_0_9/orm/
contextual.html#contextual-thread-local-sessions>`_
Get the registry
~~~~~~~~~~~~~~~~
You can get the registry in any method of Models with the attribute **anyblok**::
Model = self.anyblok.System.Model
assert Model.__registry_name__ == 'Model.System.Blok'
.. warning::
Since version 1.1.0 of AnyBlok the attribute **registry** is renamed **anyblok**
Cache
~~~~~
The cache allows to call a method more than once without having any difference
in the result. But the cache must also depend on the registry database and the
model. The cache of anyblok can be put on a Model, a Core or a Mixin method. If
the cache is on a Core or a Mixin then the usecase depends on the registry name
of the assembled model.
Use ``cache`` or ``classmethod_cache`` to apply a cache on a method::
from anyblok.declarations import cache, classmethod_cache
.. warning::
``cache`` depend of the instance, if you want add a cache for
any instance you must use ``classmethod_cache``
Cache the method of a Model::
@register(Model)
class Foo:
@classmethod_cache()
def bar(cls):
import random
return random.random()
-----------------------------------------
assert Foo.bar() == Foo.bar()
Cache the method coming from a Mixin::
@register(Mixin)
class MFoo:
@classmethod_cache()
def bar(cls):
import random
return random.random()
@register(Model)
class Foo(Mixin.MFoo):
pass
@register(Model)
class Foo2(Mixin.MFoo):
pass
-----------------------------------------
assert Foo.bar() == Foo.bar()
assert Foo2.bar() == Foo2.bar()
assert Foo.bar() != Foo2.bar()
Cache the method coming from a Mixin::
@register(Core)
class Base
@classmethod_cache()
def bar(cls):
import random
return random.random()
@register(Model)
class Foo:
pass
@register(Model)
class Foo2:
pass
-----------------------------------------
assert Foo.bar() == Foo.bar()
assert Foo2.bar() == Foo2.bar()
assert Foo.bar() != Foo2.bar()
Event
~~~~~
Simple implementation of a synchronous ``event`` for AnyBlok or SQLAlchemy::
@register(Model)
class Event:
pass
@register(Model)
class Test:
x = 0
@listen(Model.Event, 'fireevent')
def my_event(cls, a=1, b=1):
cls.x = a * b
---------------------------------------------
registry.Event.fire('fireevent', a=2)
assert registry.Test.x == 2
.. note::
The decorated method is seen as a classmethod
This API gives:
* a decorator ``listen`` which binds the decorated method to the event.
* ``fire`` method with the following parameters (Only for AnyBlok event):
- ``event``: string name of the event
- ``*args``: positionnal arguments to pass att the decorated method
- ``**kwargs``: named argument to pass at the decorated method
It is possible to overload an existing event listener, just by overloading the
decorated method::
@register(Model)
class Test:
@classmethod
def my_event(cls, **kwarg):
res = super(Test, cls).my_event(**kwargs)
return res * 2
---------------------------------------------
registry.Event.fire('fireevent', a=2)
assert registry.Test.x == 4
.. warning::
The overload does not take the ``listen`` decorator but the
classmethod decorator, because the method name is already seen as an
event listener
Some of the Attribute events of the Mapper events are implemented. See the
SQLAlchemy ORM Events http://docs.sqlalchemy.org/en/latest/orm/events.html#orm-events
You may also add a classmethod with the name ``event type + '_orm_event'``. The event will be automaticly
create with on the Model and the event type without arguments::
@register(Model)
class Test:
x = 0
@classmethod
def after_insert_orm_event(cls, mapper, connection, target):
# call when a new instance of Test is added in the session
pass
@listen('Model.Test', 'after_insert')
def another_orm_event(cls, mapper, connection, target):
# it is the same effect as ``after_insert_orm_event``,
# it is call after the add of a new instance in the session
Hybrid method
~~~~~~~~~~~~~
Facility to create an SQLAlchemy hybrid method. See this page:
http://docs.sqlalchemy.org/en/latest/orm/extensions/hybrid.html#module-sqlalchemy.ext.hybrid
AnyBlok allows to define a hybrid_method which can be overloaded, because the
real sqlalchemy decorator is applied after assembling in the last overload
of the decorated method::
from anyblok.declarations import hybrid_method
@register(Model)
class Test:
@hybrid_method
def my_hybrid_method(self):
return ...
Pre-commit hook
~~~~~~~~~~~~~~~
It is possible to call specific classmethods just before the commit of the
session::
@register(Model)
class Test:
id = Integer(primary_key=True)
val = Integer(default=0)
@classmethod
def method2call_just_before_the_commit(cls, *a, **kw):
pass
-----------------------------------------------------
registry.Test.precommit_hook('method2call_just_before_the_commit', *a, **kw)
Post-commit hook
~~~~~~~~~~~~~~~~
It is possible to call specific classmethods just after the commit of the
session::
@register(Model)
class Test:
id = Integer(primary_key=True)
val = Integer(default=0)
@classmethod
def method2call_just_after_the_commit(cls, *a, **kw):
pass
-----------------------------------------------------
registry.Test.postcommit_hook('method2call_just_after_the_commit', *a, **kw)
Aliased
~~~~~~~
Facility to create an SQL alias for the SQL query by the ORM::
select * from my_table the_table_alias.
This facility is given by SQLAlchemy, and anyblok adds this functionnality
directly in the Model::
BlokAliased = registry.System.Blok.aliased()
.. note:: See this page:
http://docs.sqlalchemy.org/en/latest/orm/query.html#sqlalchemy.orm.aliased
to know the parameters of the ``aliased`` method
.. warning:: The first arg is already passed by AnyBlok
.. warning:: Only this method give the registry into the alias, don't import **sqlalchemy.orm.aliased**
Get the current environment
~~~~~~~~~~~~~~~~~~~~~~~~~~~
The current environment is saved in the main thread. You can add a value to
the current Environment::
self.Env.set('My var', 'one value')
You can get a value from the current Environment::
myvalue = self.Env.get('My var', defaul="My default value")
.. note::
The environment is as a dict the value can be an instance of any type
Initialize some data by entry point
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
the entry point ``anyblok.init`` allow to define function, ``ìnit_function``
in this example::
setup(
...
entry_points={
'anyblok.init': [
'my_function=path:init_function',
],
},
)
In the path the init_function must be defined::
def init_function(unittest=False):
...
..warning::
Use unittest parameter to defined if the function must be call
or not
Make easily ReadOnly model
~~~~~~~~~~~~~~~~~~~~~~~~~~
In somme case you want that your model is:
* readonly: No modification, No deletion::
@register(...)
class MyModel(Mixin.ReadOnly):
...
* forbid modification: No modification but can delete::
@register(...)
class MyModel(Mixin.ForbidUpdate):
...
* forbid deletion: No deletion but can modify::
@register(...)
class MyModel(Mixin.ForbidDelete):
...
Plugin
------
Plugin is used for the low level, it is not use in the bloks, because the model
can be overload by the declaration.
Define a new plugin
~~~~~~~~~~~~~~~~~~~
A plugin can be a class or a function::
class MyPlugin:
pass
Add the plugin definition in the configuration::
@Configuration.add('plugins')
def add_plugins(self, group)
group.add_argument('--my-option', dest='plugin_name',
type=AnyBlokPlugin,
default='path:MyPlugin')
Use the plugin::
plugin = Configuration.get('plugin_name')
**anyblok.model.plugin**
------------------------
This a hook to add new feature in Model, this is already use for:
* hybrid_method
* table and mapper args
* event
* Sqlalchemy event
* cache / classmethod_cache
Start by implementing the plugin (see
:class:`ModelPluginBase <anyblok.model.plugins.ModelPluginBase>`)::
from anyblok.model.plugins import ModelPluginBase
class MyPlugin(ModelPluginBase):
...
Then, declare it in ``setup.py``::
setup(
...
entry_points={
...
'anyblok.model.plugin': [
'myplugin=path:MyPlugin',
],
...
},
...
)
**anyblok.model.factory**
-------------------------
This factory is used to:
* give the core classes need to build the model
* build the model
Start by implementing the factory (see
:class:`BaseFactory <anyblok.model.tactory.BaseFactory>`)::
from anyblok.model.factory import BaseFactory
class MyFactory(BaseFactory):
def insert_core_bases(self, bases, properties):
...
def build_model(self, modelname, bases, properties):
...
In your bloks you can use your factory::
@register(Model, factory=MyFactory)
class MyModel:
...
**Engine's events**
-------------------
The engine's events is used to define sqlalchemy event listener on engine
this event is declared by entrypoint:
* **anyblok.engine.event** : For all dialects
* **anyblok.engine.event.postgres** : only for postgresql
* **anyblok.engine.event.mysql** : only for MySQL
* **anyblok.engine.event.mssql** : only for MsSQL
Exemple with the **mysql_no_autocommit** listener
**anyblok.event**::
from sqlalchemy import event
def mysql_no_autocommit(engine):
def mysql_set_no_autocommit(dbapi_con, connection_record):
cur = dbapi_con.cursor()
cur.execute("SET autocommit=0;")
cur.execute("SET SESSION sql_mode='TRADITIONAL';")
cur = None
event.listen(engine, 'connect', mysql_set_no_autocommit)
**setup.py**::
setup(
entry_points={
'anyblok.engine.event.mysql': [
'mysql-no-autocommit=anyblok.event:mysql_no_autocommit',
],
},
)
.. note::
The SQLAlchemy decumentation for the `core event <https://docs.sqlalchemy.org/en/14/core/events.html?highlight=event#connection-pool-events>`_
**Session's events**
--------------------
The engine's events is used to define sqlalchemy event listener on engine
this event is declared by entrypoint:
* **anyblok.session.event** : For all dialects
* **anyblok.session.event.postgresql** : only for postgresql
* **anyblok.session.event.mysql** : only for MySQL
* **anyblok.session.event.mssql** : only for MsSQL
Exemple
method::
from sqlalchemy import event
def do_something(session):
def something(sess, transaction, connection):
pass
event.listen(session, 'after_begin', something)
**setup.py**::
setup(
entry_points={
'anyblok.session.event': [
'do-something=path:do_something',
],
},
)
.. note::
The SQLAlchemy decumentation for the `session events <https://docs.sqlalchemy.org/en/14/orm/events.html#session-events>`_
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/doc/MEMENTO.rst
|
MEMENTO.rst
|
.. This file is a part of the AnyBlok project
..
.. Copyright (C) 2015 Jean-Sebastien SUZANNE <[email protected]>
..
.. This Source Code Form is subject to the terms of the Mozilla Public License,
.. v. 2.0. If a copy of the MPL was not distributed with this file,You can
.. obtain one at http://mozilla.org/MPL/2.0/.
.. contents::
Basic usage
===========
To demonstrate, we will write a simple application; let's call it
``WorkApp``.
Here are the Models we'll create, with their fields.
Anyblok being an ORM framework, these will be Python classes, backed
by database tables.
* Employee
- name: employee's name
- office (Room): the room where the employee works
- position: employee position (manager, developer...)
* Room
- number: describe the room in the building
- address: postal address
- employees: men and women working in that room
* Address
- street
- zipcode
- city
- rooms: room list
* Position
- name: position name
.. _basedoc_bloks:
Bloks
-----
Within AnyBlok, all business logic objects, among them in the first place
:ref:`Models <basedoc_models>` must be declared as part of some Blok.
Bloks themselves are subclasses of the :class:`Blok <anyblok.blok.Blok>`
base class. They have basic metadata attributes (author, version, dependencies…) and
methods to import the business logic objects declarations.
Bloks also bear the methods for installation, update and removal.
Here's a very minimal (and pretty much useless) valid Blok::
from anyblok.blok import Blok
class MyFirstBlok(Blok):
""" This is valid blok """
To demonstrate the extreme modularity that can be achieved with
Anyblok, we'll organize the application in four different bloks:
**Office blok**
File tree::
workapp
├── (...)
└── office_blok
├── __init__.py
└── office.py
``__init__.py`` file::
from anyblok.blok import Blok
class OfficeBlok(Blok):
version = '1.0.0'
author = 'Suzanne Jean-Sébastien'
logo = 'relative/path'
def install(self):
"""This room will be always present after the blok installation
"""
address = self.anyblok.Address.insert(
street='la Tour Eiffel',
street2='5 avenue Anatole France',
zip='75007',
city='Paris'
)
self.anyblok.Room.insert(number=1, address=address)
def install_demo(self):
"""Extra data to add once blok is installed if database was created
with ``--with-demo``.
"""
address = self.anyblok.Address.insert(street='14-16 rue Soleillet',
zip='75020', city='Paris')
self.anyblok.Room.insert(number=308, address=address)
def update(self, latest_version):
"""Method called when blok is installed or updated to let
a chance to add data or configuration.
"""
if latest_version is None:
self.install()
def update_demo(self, latest_version):
"""Method called when blok is installed or updated if database
was created with the `--with-demo` parameter in order to add demo
data to quickly present product with examples or to
populate database with data that could be use in test case.
This method is called after ``update``.
"""
if latest_version is None:
self.install_demo()
@classmethod
def import_declaration_module(cls):
from . import office
So for instance, in this example, we'll import the ``office`` module
(which defines ``Address`` and ``Room`` Models, :ref:`see below <basedoc_models>`) and at the time of
first installation (detected by ``latest_version`` being ``None``),
we'll create an ``Address`` and a ``Room`` instance right away, as
base data.
.. note:: this anticipates a bit on the :ref:`Model <basedoc_models>`
base usage.
**Position blok**
File tree::
workapp
├── (...)
└── position_blok
├── __init__.py
└── position.py
``__init__.py`` file::
from anyblok.blok import Blok
class PositionBlok(Blok):
version = '1.0.0'
def install_demo(self):
self.anyblok.Position.multi_insert({'name': 'CTO'},
{'name': 'CEO'},
{'name': 'Administrative Manager'},
{'name': 'Project Manager'},
{'name': 'Developer'})
def update_demo(self, latest_version):
if latest_version is None:
self.install()
@classmethod
def import_declaration_module(cls):
from . import position # noqa
Same here, the installation automatically creates some data, in this
case ``Position`` instances.
**Employee blok**
Bloks can have requirements. Each blok define its dependencies:
* required:
list of the bloks that must be installed (and loaded at
startup) before
* optional:
list of bloks that will be installed before the present
one, if they are available in the application.
File tree::
employee_blok
├── __init__.py
├── config.py
└── employee.py
``__init__.py`` file::
from anyblok.blok import Blok
class EmployeeBlok(Blok):
version = '1.0.0'
required = ['office']
optional = ['position']
def install(self):
room = self.anyblok.Room.query().filter(
self.anyblok.Room.number == 308).first()
employees = [dict(name=employee, room=room)
for employee in ('Georges Racinet',
'Christophe Combelles',
'Sandrine Chaufournais',
'Pierre Verkest',
'Franck Bret',
"Simon André",
'Florent Jouatte',
'Clovis Nzouendjou',
"Jean-Sébastien Suzanne")]
self.anyblok.Employee.multi_insert(*employees)
def update_demo(self, latest_version):
if latest_version is None:
self.install()
@classmethod
def import_declaration_module(cls):
from . import config
from . import employee
**EmployeePosition blok**:
Some bloks can be installed automatically if some specific other bloks are
installed. They are called conditional bloks.
File tree::
employee_position_blok
├── __init__.py
└── employee.py
``__init__.py`` file::
from anyblok.blok import Blok
class EmployeePositionBlok(Blok):
version = '1.0.0'
priority = 200
conditional = [
'employee',
'position',
]
def install(self):
Employee = self.anyblok.Employee
position_by_employee = {
'Georges Racinet': 'CTO',
'Christophe Combelles': 'CEO',
'Sandrine Chaufournais': u"Administrative Manager",
'Pierre Verkest': 'Project Manager',
'Franck Bret': 'Project Manager',
u"Simon André": 'Developer',
'Florent Jouatte': 'Developer',
'Clovis Nzouendjou': 'Developer',
u"Jean-Sébastien Suzanne": 'Developer',
}
for employee, position in position_by_employee.items():
Employee.query().filter(Employee.name == employee).update({
'position_name': position})
def update_demo(self, latest_version):
if latest_version is None:
self.install()
@classmethod
def import_declaration_module(cls):
from . import employee # noqa
.. warning::
There are no strong dependencies between conditional blok and bloks,
so the priority number of the conditional blok must be bigger than bloks
defined in the `conditional` list. Bloks are loaded by dependencies
and priorities so a blok with small dependency/priority will be loaded before a blok with
an higher dependency/priority.
.. _declare_blok:
Bloks registration
------------------
Now that we have our Bloks, they must be registered through the ``bloks`` setuptools `entry point
<https://setuptools.readthedocs.io/en/latest/setuptools.html#entry-points>`_::
setup(
# (...)
entry_points={
'bloks': [
'office=workapp.office_blok:OfficeBlok',
'employee=workapp.employee_blok:EmployeeBlok',
'position=workapp.position_blok:PositionBlok',
'employee-position=workapp.employee_position_blok:EmployeePositionBlok',
],
},
)
.. _basedoc_models:
Models
------
With AnyBlok, most of the business logic is organized as Models.
There are two types of Model:
* SQL: They bear Fields, and correspond to a table in the database,
that's automatically created and updated if needed.
* Non SQL: No persistent data, but still useful to attach methods onto
them, which then could be overridden by downstream Bloks.
To declare a Model, use the ``Declarations.register`` decorator::
from anyblok import Declarations
@Declarations.register(Declarations.Model)
class AAnyBlokModel:
""" The first Model of our application """
.. note:: At this point, it is important to realize that this Model
class won't be used directly in this form, which is but a
Declaration. It will actually be just one element of
a whole inheritance hierarchy, which AnyBlok constructs for each
database, according to its installed Bloks. This is the fundamental
way AnyBlok's flexibility works (see :ref:`basedoc_override`).
Here's an example SQL model, with just one Column::
from anyblok import Declarations
from anyblok.column import String
register = Declarations.register
Model = Declarations.Model
@register(Model)
class ASQLModel:
acolumn = String(label="The first column", primary_key=True)
This Model will be backed by the ``asqlmodel`` table, whose rows will
correspond to Model instances.
Once the application has started, the fully assembled Model class is
available within the Registry, which itself can be accessed in various ways, depending
on the context.
In particular, the Registry is available on any Model
instance as the ``anyblok`` attribute. So, from instance, from a method of another
Model, we could create an instance of ``ASQLModel`` in this way::
def mymethod(self):
self.anyblok.ASQLModel.insert(acolumn="Foo")
Another example would be the ``install()`` methods of our
:ref:`basedoc_bloks` above.
.. note:: There is a Registry instance for each database, and it holds for each
Model the resulting concrete class after all overrides
have been applied.
.. warning::
SQL Models must have a primary key made of one or more columns
(those flagged with ``primary_key=True``)
.. note::
The table name depends on the registry tree. Here the table is ``asqlmodel``.
If a new model is defined under ASQLModel (example UnderModel:
``asqlcolumn_undermodel``), the registry model will be stored
as Model.ASQLModel.UnderModel
Let's then proceed with our more concrete example:
**office_blok.office**::
from anyblok import Declarations
from anyblok.column import Integer, String
from anyblok.relationship import Many2One
register = Declarations.register
Model = Declarations.Model
@register(Model)
class Address:
id = Integer(label="Identifier", primary_key=True)
street = String(label="Street", nullable=False)
zip = String(label="Zip", nullable=False)
city = String(label="City", nullable=False)
def __str__(self):
return "%s %s %s" % (self.street, self.zip, self.city)
@register(Model)
class Room:
id = Integer(label="Identifier", primary_key=True)
number = Integer(label="Number of the room", nullable=False)
address = Many2One(label="Address", model=Model.Address, nullable=False,
one2many="rooms")
def __str__(self):
return "Room %d at %s" % (self.number, self.address)
The relationships can also define the opposite relation. Here the ``address`` Many2One relation
also declares the ``room`` One2Many relation on the Address Model.
A Many2One or One2One relationship must have an existing column.
The ``column_name`` attribute allows to choose the linked column, if this
attribute is missing then the value is "'model.table'.'remote_column'"
If the linked column does not exist, the relationship creates the
column with the same type as the remote_column.
**position_blok.position**::
from anyblok import Declarations
from anyblok.column import String
register = Declarations.register
Model = Declarations.Model
@register(Model)
class Position:
name = String(label="Position", primary_key=True)
def __str__(self):
return self.name
**employee_blok.employee**::
from anyblok import Declarations
from anyblok.column import String
from anyblok.relationship import Many2One
register = Declarations.register
Model = Declarations.Model
@register(Model)
class Employee:
name = String(label="Number of the room", primary_key=True)
room = Many2One(label="Office", model=Model.Room, one2many="employees")
def __str__(self):
return "%s in %s" % (self.name, self.room)
.. _basedoc_override:
Overriding Models
-----------------
If one declares two Models with the same name, the
second Model will subclass the first one in the final assembled Model
class. This is mostly interesting when the two
declarations belong to different bloks.
**employee_position_blok.employee**::
from anyblok import Declarations
from anyblok.relationship import Many2One
register = Declarations.register
Model = Declarations.Model
@register(Model)
class Employee:
position = Many2One(label="Position", model=Model.Position, nullable=False)
def __str__(self):
res = super(Employee, self).__str__()
return "%s (%s)" % (res, self.position)
Standalone executables
----------------------
If the AnyBlok application is an HTTP server running through some WSGI compatibility
layer, such as AnyBlok / Pyramid, one does not need to care about
running processes: the WSGI server provides them already.
But in other cases, including background processing alongside HTTP
workers, we need to setup executables.
Add entries in the argparse configuration
+++++++++++++++++++++++++++++++++++++++++
Some applications may require options. Options are grouped by
category. And the application chooses the option category to display.
**employee_blok.config**::
from anyblok.config import Configuration
@Configuration.add('message', label="This is the group message")
def add_interpreter(parser, configuration):
parser.add_argument('--message-before', dest='message_before')
parser.add_argument('--message-after', dest='message_after')
Create the executable
+++++++++++++++++++++
The application can be a simple script or a setuptools script. For a setuptools
script, add this in the ``setup.py``::
setup(
...
entry_points={
'console_scripts': ['exampleblok=exampleblok.scripts:exampleblok'],
'bloks': bloks,
},
)
The script must display:
* the provided ``message_before``
* the lists of the employee by address and by room
* the provided ``message_after``
**scripts.py**::
import anyblok
from logging import getLogger
from anyblok.config import Configuration
logger = getLogger()
def exampleblok():
# Initialise the application, with a name and a version number
# select the groupe of options to display
# return a registry if the database are selected
anyblok_registry = anyblok.start(
'Example Blok', argparse_groups=['message', 'logging'])
if not anyblok_registry:
return
message_before = Configuration.get('message_before')
message_after = Configuration.get('message_after')
if message_before:
logger.info(message_before)
for address in anyblok_registry.Address.query().all():
for room in address.rooms:
for employee in room.employees:
logger.info(employee)
if message_after:
logger.info(message_after)
**Display the help of your application**::
jssuzanne:anyblok jssuzanne$ ./bin/exampleblok -h
usage: exampleblok [-h]
[--logging-level {NOTSET,DEBUG,INFO,WARNING,ERROR,CRITICAL}]
[--logging-level-qualnames LOGGING_LEVEL_QUALNAMES [LOGGING_LEVEL_QUALNAMES ...]]
[--logging-config-file LOGGING_CONFIGFILE]
[--logging-json-config-file JSON_LOGGING_CONFIGFILE]
[--logging-yaml-config-file YAML_LOGGING_CONFIGFILE]
[-c CONFIGFILE] [--without-auto-migration]
[--db-name DB_NAME] [--db-driver-name DB_DRIVER_NAME]
[--db-user-name DB_USER_NAME] [--db-password DB_PASSWORD]
[--db-host DB_HOST] [--db-port DB_PORT] [--db-echo]
[options] -- other arguments
optional arguments:
-h, --help show this help message and exit
-c CONFIGFILE Relative path of the config file
--without-auto-migration
Logging:
--logging-level {NOTSET,DEBUG,INFO,WARNING,ERROR,CRITICAL}
--logging-level-qualnames LOGGING_LEVEL_QUALNAMES [LOGGING_LEVEL_QUALNAMES ...]
Limit the log level on a qualnames list
--logging-config-file LOGGING_CONFIGFILE
Relative path of the logging config file
--logging-json-config-file JSON_LOGGING_CONFIGFILE
Relative path of the logging config file (json). Only
if the logging config file doesn't filled
--logging-yaml-config-file YAML_LOGGING_CONFIGFILE
Relative path of the logging config file (yaml). Only
if the logging and json config file doesn't filled
Database:
--db-name DB_NAME Name of the database
--db-driver-name DB_DRIVER_NAME
the name of the database backend. This name will
correspond to a module in sqlalchemy/databases or a
third party plug-in
--db-user-name DB_USER_NAME
The user name
--db-password DB_PASSWORD
database password
--db-host DB_HOST The name of the host
--db-port DB_PORT The port number
--db-echo
**Create an empty database and call the script**::
jssuzanne:anyblok jssuzanne$ createdb anyblok
jssuzanne:anyblok jssuzanne$ ./bin/exampleblok -c anyblok.cfg --message-before "Get the employee ..." --message-after "End ..."
2014-1129 10:54:27 INFO - anyblok:root - Registry.load
2014-1129 10:54:27 INFO - anyblok:anyblok.registry - Blok 'anyblok-core' loaded
2014-1129 10:54:27 INFO - anyblok:anyblok.registry - Assemble 'Model' entry
2014-1129 10:54:27 INFO - anyblok:alembic.migration - Context impl PostgresqlImpl.
2014-1129 10:54:27 INFO - anyblok:alembic.migration - Will assume transactional DDL.
2014-1129 10:54:27 INFO - anyblok:alembic.ddl.postgresql - Detected sequence named 'system_cache_id_seq' as owned by integer column 'system_cache(id)', assuming SERIAL and omitting
2014-1129 10:54:27 INFO - anyblok:anyblok.registry - Initialize 'Model' entry
2014-1129 10:54:27 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Install the blok 'anyblok-core'
2014-1129 10:54:27 INFO - anyblok:root - Registry.reload
2014-1129 10:54:27 INFO - anyblok:root - Registry.load
2014-1129 10:54:27 INFO - anyblok:anyblok.registry - Blok 'anyblok-core' loaded
2014-1129 10:54:27 INFO - anyblok:anyblok.registry - Blok 'office' loaded
2014-1129 10:54:27 INFO - anyblok:anyblok.registry - Assemble 'Model' entry
2014-1129 10:54:27 INFO - anyblok:alembic.migration - Context impl PostgresqlImpl.
2014-1129 10:54:27 INFO - anyblok:alembic.migration - Will assume transactional DDL.
2014-1129 10:54:27 INFO - anyblok:alembic.ddl.postgresql - Detected sequence named 'address_id_seq' as owned by integer column 'address(id)', assuming SERIAL and omitting
2014-1129 10:54:27 INFO - anyblok:alembic.ddl.postgresql - Detected sequence named 'system_cache_id_seq' as owned by integer column 'system_cache(id)', assuming SERIAL and omitting
2014-1129 10:54:27 INFO - anyblok:alembic.ddl.postgresql - Detected sequence named 'room_id_seq' as owned by integer column 'room(id)', assuming SERIAL and omitting
2014-1129 10:54:27 INFO - anyblok:anyblok.registry - Initialize 'Model' entry
2014-1129 10:54:28 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Install the blok 'office'
2014-1129 10:54:28 INFO - anyblok:root - Registry.reload
2014-1129 10:54:28 INFO - anyblok:root - Registry.load
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Blok 'anyblok-core' loaded
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Blok 'office' loaded
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Blok 'position' loaded
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Assemble 'Model' entry
2014-1129 10:54:28 INFO - anyblok:alembic.migration - Context impl PostgresqlImpl.
2014-1129 10:54:28 INFO - anyblok:alembic.migration - Will assume transactional DDL.
2014-1129 10:54:28 INFO - anyblok:alembic.ddl.postgresql - Detected sequence named 'address_id_seq' as owned by integer column 'address(id)', assuming SERIAL and omitting
2014-1129 10:54:28 INFO - anyblok:alembic.ddl.postgresql - Detected sequence named 'system_cache_id_seq' as owned by integer column 'system_cache(id)', assuming SERIAL and omitting
2014-1129 10:54:28 INFO - anyblok:alembic.ddl.postgresql - Detected sequence named 'room_id_seq' as owned by integer column 'room(id)', assuming SERIAL and omitting
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Initialize 'Model' entry
2014-1129 10:54:28 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Install the blok 'position'
2014-1129 10:54:28 INFO - anyblok:root - Registry.reload
2014-1129 10:54:28 INFO - anyblok:root - Registry.load
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Blok 'anyblok-core' loaded
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Blok 'office' loaded
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Blok 'position' loaded
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Blok 'employee' loaded
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Assemble 'Model' entry
2014-1129 10:54:28 INFO - anyblok:alembic.migration - Context impl PostgresqlImpl.
2014-1129 10:54:28 INFO - anyblok:alembic.migration - Will assume transactional DDL.
2014-1129 10:54:28 INFO - anyblok:alembic.ddl.postgresql - Detected sequence named 'system_cache_id_seq' as owned by integer column 'system_cache(id)', assuming SERIAL and omitting
2014-1129 10:54:28 INFO - anyblok:anyblok.registry - Initialize 'Model' entry
2014-1129 10:54:29 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Install the blok 'employee'
2014-1129 10:54:29 INFO - anyblok:root - Registry.reload
2014-1129 10:54:29 INFO - anyblok:root - Registry.load
2014-1129 10:54:29 INFO - anyblok:anyblok.registry - Blok 'anyblok-core' loaded
2014-1129 10:54:29 INFO - anyblok:anyblok.registry - Blok 'office' loaded
2014-1129 10:54:29 INFO - anyblok:anyblok.registry - Blok 'position' loaded
2014-1129 10:54:29 INFO - anyblok:anyblok.registry - Blok 'employee' loaded
2014-1129 10:54:29 INFO - anyblok:anyblok.registry - Blok 'employee-position' loaded
2014-1129 10:54:29 INFO - anyblok:anyblok.registry - Assemble 'Model' entry
2014-1129 10:54:29 INFO - anyblok:alembic.migration - Context impl PostgresqlImpl.
2014-1129 10:54:29 INFO - anyblok:alembic.migration - Will assume transactional DDL.
2014-1129 10:54:29 INFO - anyblok:alembic.ddl.postgresql - Detected sequence named 'system_cache_id_seq' as owned by integer column 'system_cache(id)', assuming SERIAL and omitting
2014-1129 10:54:29 INFO - anyblok:alembic.autogenerate.compare - Detected added column 'employee.position_name'
2014-1129 10:54:29 WARNING - anyblok:anyblok.migration - (IntegrityError) column "position_name" contains null values
'ALTER TABLE employee ALTER COLUMN position_name SET NOT NULL' {}
2014-1129 10:54:29 INFO - anyblok:anyblok.registry - Initialize 'Model' entry
2014-1129 10:54:29 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Install the blok 'employee-position'
2014-1129 10:54:30 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Load the blok 'anyblok-core'
2014-1129 10:54:30 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Load the blok 'office'
2014-1129 10:54:30 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Load the blok 'position'
2014-1129 10:54:30 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Load the blok 'employee'
2014-1129 10:54:30 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Load the blok 'employee-position'
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - Get the employee ...
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - Sandrine Chaufournais in Room 308 at 14-16 rue Soleillet 75020 Paris (Administrative Manager)
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - Christophe Combelles in Room 308 at 14-16 rue Soleillet 75020 Paris (CEO)
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - Clovis Nzouendjou in Room 308 at 14-16 rue Soleillet 75020 Paris (Developer)
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - Florent Jouatte in Room 308 at 14-16 rue Soleillet 75020 Paris (Developer)
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - Simon André in Room 308 at 14-16 rue Soleillet 75020 Paris (Developer)
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - Jean-Sébastien Suzanne in Room 308 at 14-16 rue Soleillet 75020 Paris (Developer)
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - Georges Racinet in Room 308 at 14-16 rue Soleillet 75020 Paris (CTO)
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - Pierre Verkest in Room 308 at 14-16 rue Soleillet 75020 Paris (Project Manager)
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - Franck Bret in Room 308 at 14-16 rue Soleillet 75020 Paris (Project Manager)
2014-1129 10:54:30 INFO - anyblok:exampleblok.scripts - End ...
The registry is loaded twice:
* The first load installs the bloks ``anyblok-core``, ``office``, ``position`` and ``employee``
* The second load installs the conditional blok ``employee-position`` and runs a migration to add the field ``employee_name``
**Call the script again**::
jssuzanne:anyblok jssuzanne$ ./bin/exampleblok -c anyblok.cfg --message-before "Get the employee ..." --message-after "End ..."
2014-1129 10:57:52 INFO - anyblok:root - Registry.load
2014-1129 10:57:52 INFO - anyblok:anyblok.registry - Blok 'anyblok-core' loaded
2014-1129 10:57:52 INFO - anyblok:anyblok.registry - Blok 'office' loaded
2014-1129 10:57:52 INFO - anyblok:anyblok.registry - Blok 'position' loaded
2014-1129 10:57:52 INFO - anyblok:anyblok.registry - Blok 'employee' loaded
2014-1129 10:57:52 INFO - anyblok:anyblok.registry - Blok 'employee-position' loaded
2014-1129 10:57:52 INFO - anyblok:anyblok.registry - Assemble 'Model' entry
2014-1129 10:57:52 INFO - anyblok:alembic.migration - Context impl PostgresqlImpl.
2014-1129 10:57:52 INFO - anyblok:alembic.migration - Will assume transactional DDL.
2014-1129 10:57:52 INFO - anyblok:alembic.ddl.postgresql - Detected sequence named 'system_cache_id_seq' as owned by integer column 'system_cache(id)', assuming SERIAL and omitting
2014-1129 10:57:52 INFO - anyblok:alembic.autogenerate.compare - Detected NOT NULL on column 'employee.position_name'
2014-1129 10:57:52 INFO - anyblok:anyblok.registry - Initialize 'Model' entry
2014-1129 10:57:52 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Load the blok 'anyblok-core'
2014-1129 10:57:52 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Load the blok 'office'
2014-1129 10:57:52 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Load the blok 'position'
2014-1129 10:57:52 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Load the blok 'employee'
2014-1129 10:57:52 INFO - anyblok:anyblok.bloks.anyblok_core.declarations.system.blok - Load the blok 'employee-position'
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - Get the employee ...
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - Sandrine Chaufournais in Room 308 at 14-16 rue Soleillet 75020 Paris (Administrative Manager)
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - Christophe Combelles in Room 308 at 14-16 rue Soleillet 75020 Paris (CEO)
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - Clovis Nzouendjou in Room 308 at 14-16 rue Soleillet 75020 Paris (Developer)
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - Florent Jouatte in Room 308 at 14-16 rue Soleillet 75020 Paris (Developer)
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - Simon André in Room 308 at 14-16 rue Soleillet 75020 Paris (Developer)
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - Jean-Sébastien Suzanne in Room 308 at 14-16 rue Soleillet 75020 Paris (Developer)
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - Georges Racinet in Room 308 at 14-16 rue Soleillet 75020 Paris (CTO)
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - Pierre Verkest in Room 308 at 14-16 rue Soleillet 75020 Paris (Project Manager)
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - Franck Bret in Room 308 at 14-16 rue Soleillet 75020 Paris (Project Manager)
2014-1129 10:57:52 INFO - anyblok:exampleblok.scripts - End ...
The registry is loaded only once, because the bloks are already installed
Builtin generic scripts
+++++++++++++++++++++++
Anyblok provides some helper generic console scripts out of the box:
* anyblok_createdb
* anyblok_updatedb
* anyblok_interpreter
.. note::
if IPython is in the sys.modules then the interpreter is an IPython interpreter
TODO: I know it's not a setuptools documentation but it could be kind to show
a complete minimalist exampe of `setup.py` with requires (to anyblok).
We could also display the full tree from root
A direct link to download the full working example.
.. _basedoc_tests:
Writing and launching tests
---------------------------
We want to foster a very test friendly culture in the AnyBlok
community, that's why we cover tests writing and launching in this
"Basic usage" page.
That being said, such a dynamic framework represents a challenge for
tests, because the application constructs, e.g., application Models,
must *not* be imported directly. Instead, a proper Registry must be
set up one way or another before the test launcher kicks in, and that
interferes wildly with coverage reports.
Also, the Anyblok Registry being tightly tied to a database, we need
to set it up before hand (most common in application tests) or manage
it from the tests (mostly meant for the framework tests, but could
find its use for some applications or middleware).
.. note:: all of this means that the tests we're discussing aren't
stricto sensu unit tests, but rather integration
tests. Nevertheless, we casually speak of them as unit tests
if they stay lightweight and are about testing individual
AnyBlok components.
Nothing prevents application developers to also write true unit
tests, perhaps for subroutines that don't interact with the
database at all.
To address these challenges, AnyBlok ships with helper pytest fixture
.. _basedoc_testcases:
Writing tests with pytest
+++++++++++++++++++++++++
.. note:: See: `Pytest documentation <https://docs.pytest.org/en/latest/>`_
To start correctly you will need a ``conftest.py`` file.
Generally, you just want to import the conftest from the bloks you need
in your context, for example in our case::
from anyblok.conftest import * # noqa: F401,F403
Here you have an example to write a basic test class::
class TestRoom:
"""Test Room model"""
def test_create_room(self, rollback_registry):
registry_anyblok = rollback_registry
room_count = registry_anyblok.Room.query().count()
room = registry_anyblok.Room.insert(
name="A1",
capacity=25,
)
assert registry_anyblok.Room.query().count() == room_count + 1
assert room.name == "A1"
.. note:: For advanced examples, you can refer to our `developer guide <https://anyblok.github.io/anyblok-book/en/>`_
Launching tests with the pytest plugin
++++++++++++++++++++++++++++++++++++++
Summary: use this if you need accurate coverage results. This is a
good fit for Continuous Integration (CI).
AnyBlok comes with a `pytest <https://pypi.org/project/pytest/>`_
plugin right away. Once the testing database is set up, and described
by proper environment variables or :ref:`default configuration files
<basedoc_conf_files_default>`, you can test your bloks with the
``--with-anyblok-bloks`` option.
Here's an example, adapted from AnyBlok's ``.travis.yml``::
export ANYBLOK_DATABASE_NAME=travis_ci_test
export ANYBLOK_DATABASE_DRIVER=postgresql
export ANYBLOK_DATABASE_USER=postgres
anyblok_createdb --install-all-bloks
py.test --cov-report= --cov=anyblok anyblok/bloks
Typical usage is with a ``configuration file <basedoc_conf_files>``
(this example also demonstrate the usage of more nose options)::
ANYBLOK_CONFIG_FILE=tests.cfg pytest anyblok/bloks
.. _basedoc_conf_files:
Configuration files
-------------------
Custom or builtin AnyBlok console scripts accept the ``-c`` parameter,
to specify a configuration file instead of passing all the options in the
command line. Example::
anyblok_createdb -c myapp.cfg
Syntax
++++++
The configuration file allow to load all the initialisation variable::
[AnyBlok]
key = value
You can extend an existing config file::
[AnyBlok]
extend = ``path of the configfile``
The logging configuration are also loaded, see `logging configuration file format
<https://docs.python.org/3/library/logging.config.html#configuration-file-format>`_::
[AnyBlok]
logging_configfile = ``name of the config file``
# json_logging_configfile = logging config file write with json
# yaml_logging_configfile = logging config file write with yaml
loggers]
keys=root,anyblok
[handlers]
keys=consoleHandler
[formatters]
keys=consoleFormatter
[logger_root]
level=INFO
handlers=consoleHandler
[logger_anyblok]
level=INFO
handlers=consoleHandler
qualname=anyblok
propagate=1
[handler_consoleHandler]
class=StreamHandler
level=INFO
formatter=consoleFormatter
args=(sys.stdout,)
[formatter_consoleFormatter]
class=anyblok.logging.consoleFormatter
format=%(database)s:%(levelname)s - %(message)s
datefmt=
.. _basedoc_conf_files_default:
Default configuration files
+++++++++++++++++++++++++++
You can define default *system* or *user* configuration file in fonction of
your *OS*:
* *linux*
- *system*: /etc/xdg/AnyBlok/conf.cfg
- *user*: /home/``user name``/.config/AnyBlok/conf.cfg
* *mac os x*
- *system*: /Library/Application Support/AnyBlok/conf.cfg
- *user*: /Users/``user name``/Library/Application Support/AnyBlok/conf.cfg
.. note::
Works also for *windows*, See https://pypi.python.org/pypi/appdirs. The
entry used are:
* *system*: site_config_dir
* *user*: user_config_dir
Theses configuration files are loaded before the specific configuration file. If
the configuration file does not exist then it will not raise error
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/doc/basic_usage.rst
|
basic_usage.rst
|
import warnings
from os import pardir, walk
from os.path import join, normpath, relpath
from nose.plugins import Plugin
def isindir(path, dirpath):
# normpath simplifies stuff like a/../c but doesn't follow symlinks
# that's what we need. Nose will feed us absolute paths, btw
return not relpath(normpath(path), normpath(dirpath)).startswith(pardir)
class Arg2OptOptions:
def __init__(self, options):
self.options = options
def _get_args(self):
return False
def _get_kwargs(self):
keys = [
"configfile",
"db_name",
"db_driver_name",
"db_user_name",
"db_password",
"db_host",
"db_port",
]
return [(x, getattr(self.options, x)) for x in keys] + [
("withoutautomigration", True)
]
def __getattr__(self, name, default=None):
return getattr(self.options, name, default)
class AnyBlokPlugin(Plugin):
name = "anyblok-bloks"
score = 100
def __init__(self):
super(AnyBlokPlugin, self).__init__()
self.registryLoaded = False
self.AnyBlokOptions = None
def options(self, parser, env):
super(AnyBlokPlugin, self).options(parser, env)
parser.add_option("--anyblok-configfile", dest="configfile")
parser.add_option(
"--anyblok-db-name",
dest="db_name",
default=env.get("ANYBLOK_DATABASE_NAME"),
help="Name of the database",
)
parser.add_option(
"--anyblok-db-driver-name",
dest="db_driver_name",
default=env.get("ANYBLOK_DATABASE_DRIVER"),
help="the name of the database backend. This name "
"will correspond to a module in "
"sqlalchemy/databases or a third party plug-in",
)
parser.add_option(
"--anyblok-db-user-name",
dest="db_user_name",
default=env.get("ANYBLOK_DATABASE_USER"),
help="The user name",
)
parser.add_option(
"--anyblok-db-password",
dest="db_password",
default=env.get("ANYBLOK_DATABASE_PASSWORD"),
help="database password",
)
parser.add_option(
"--anyblok-db-host",
dest="db_host",
default=env.get("ANYBLOK_DATABASE_HOST"),
help="The name of the host",
)
parser.add_option(
"--anyblok-db-port",
dest="db_port",
default=env.get("ANYBLOK_DATABASE_PORT"),
help="The port number",
)
parser.add_option(
"--anyblok-db-url",
dest="db_url",
default=env.get("ANYBLOK_DATABASE_URL"),
help="Complete URL for connection with the database",
)
def configure(self, options, conf):
super(AnyBlokPlugin, self).configure(options, conf)
if self.enabled:
warnings.simplefilter("default")
self.AnyBlokOptions = Arg2OptOptions(options)
def load_registry(self):
if not self.enabled or self.registryLoaded:
return
from anyblok import (
configuration_post_load,
load_init_function_from_entry_points,
)
from anyblok.blok import BlokManager
from anyblok.common import return_list
from anyblok.config import Configuration, get_db_name
from anyblok.registry import RegistryManager
# Load the registry here not in configuration,
# because the configurations are not loaded in order of score
self.registryLoaded = True
load_init_function_from_entry_points(unittest=True)
Configuration.load_config_for_test()
Configuration.parse_options(self.AnyBlokOptions)
configuration_post_load()
BlokManager.load()
db_name = get_db_name()
registry = RegistryManager.get(db_name)
if not registry:
return
installed_bloks = registry.System.Blok.list_by_state("installed")
selected_bloks = (
return_list(Configuration.get("selected_bloks")) or installed_bloks
)
unwanted_bloks = return_list(Configuration.get("unwanted_bloks")) or []
self.authorized_blok_paths = set(
BlokManager.getPath(b)
for b in BlokManager.list()
if b in selected_bloks and b not in unwanted_bloks
)
test_dirs = self.authorized_blok_test_dirs = set()
for startpath in self.authorized_blok_paths:
for root, dirs, _ in walk(startpath):
if "tests" in dirs:
test_dirs.add(join(root, "tests"))
registry.close() # free the registry to force create it again
def file_from_authorized_blok_tests(self, file_path):
return any(
isindir(file_path, tp) for tp in self.authorized_blok_test_dirs
)
def wantModule(self, module):
self.load_registry()
return True
def wantFile(self, file_path, package=None):
self.load_registry()
return (
self.enabled
and file_path.endswith(".py")
and self.file_from_authorized_blok_tests(file_path)
)
def wantDirectory(self, path):
self.load_registry()
return self.enabled and any(
isindir(path, bp) for bp in self.authorized_blok_paths
)
|
AnyBlok
|
/AnyBlok-2.1.0.tar.gz/AnyBlok-2.1.0/anyblok_nose/plugins.py
|
plugins.py
|
# AnyDeck Python Library
___
#### Author: Thomas Amore Jr
#### License: MIT
___
## Purpose:
Generates a deck of virtually any type of cards from which drawing, adding, removing, and
shuffling is handled by the deck object. Useful for the creation of a game or application
that uses any deck of cards which must be created and tracked through the game play.
___
## Installation:
pip install AnyDeck
___
## Usage:
### Definitions:
* **Special Cards**: One off cards that are added to the parent deck
* **Wild Cards**: Added to the end of each child deck
* **Child Deck**: An individual deck of regular and wild cards
* **Parent Deck**: The all encompassing deck that you will ultimately interact with. Includes all child decks and special cards
### Order of Card Insertion at Deck Creation:
1. Retained, unused, cards
2. Special Cards
3. Regular Cards
4. Wild Cards
### Deck Creation:
The simplest implementation is to simply call the library. With this call you will have a standard deck of 52 cards in a list of 'Card' objects referred to as the 'deck'.
``` python
cards = AnyDeck()
```
Now let's take a look at the deck...
``` python
for card in cards.deck:
print(f'{card.face} of {card.suit}')
```
Notice that the cards are not shuffled. Shuffling can be handled in two ways.
* During initiation of the deck
``` python
cards = AnyDeck(shuffled=True)
```
* At any time
``` python
cards.shuffle()
```
Instead of using the default cards you can get a _custom deck_ of regular cards by passing arguments.
For example:
1. Create a deck of cards for the faces listed in 'cards'. A card will be created for each card in each suit provided.
In this case a deck will be created with 8 cards, all with the suit of 'Tarot'
``` python
cards = AnyDeck(suits='Tarot',
cards=('Strength', 'The Moon', 'Justice', 'The Hermit', 'The Fool',
'The Sun', 'The Tower', 'Temperance'))
```
2. Create a set of multiple decks of regular cards added into one parent deck. The following will yield a deck of 96
cards numbered 1 to 12 for each suit provided for the number of decks provided.
``` python
cards.new_deck(decks=2,
suits=('Red', 'Blue', 'Yellow', 'Green'),
cards=('1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12'))
```
3. Let's do that again but with some wild cards. This time each child deck contains a 'wild' card with a face of 'Skip'.
This results in a deck of 98 cards. The same as before but with two skips cards (one for each child deck).
``` python
cards.new_deck(decks=2,
suits=('Red', 'Blue', 'Yellow', 'Green'),
cards=('1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12'),
wilds='Skip')
```
4. Let's add a total of 4 'Skip' cards and 8 'Joker' cards to the original deck. In this case we will utilize the
'retain' argument of the new_deck method.
* First, we create the base deck of 96 cards:
``` python
cards.new_deck(decks=2,
suits=('Red', 'Blue', 'Yellow', 'Green'),
cards=('1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12'))
```
* Now we can add the 'Skip' cards by retaining the original deck and adding the new deck into it by creating
four child decks with a face of 'Skip' and a suit of 'Wild'. Since we passed the suits and cards arguments the
default cards will not be generated. When the new_deck is generated it will retain the cards created from the
first deck due to the retain flag being True.
``` python
cards.new_deck(decks=4,
retain_unused = True,
suits='Wild',
cards='Skip')
```
* Finally, to add the eight jokers we do the same thing again. This time though we will simply override the
adding of the default cards so that we can make a deck of eight children decks (each consisting of a single card)
and again using the retained deck to keep all the already generated cards.
``` python
cards.new_deck(decks=8
retain_unused = True,
wilds='Joker',
override_defaults=True)
```
* Notice that what we did was first create a deck of 96 standard cards, and then a deck of 4 'Skip' cards,
and finally a deck of 8 'Joker' cards. Using retain we kept the cards from each deck as a new deck was added,
so we end up with a deck of 108 cards. This is the same as....
``` python
cards.new_deck(decks=2,
suits=['Red', 'Blue', 'Yellow', 'Green'],
cards=['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12'],
wilds=['Skip', 'Skip', 'Joker', 'Joker', 'Joker, 'Joker'])
```
....since each child deck gets one of each wild card provided to the argument. Thus, each child deck
would have 2 Skip and 4 Joker cards. With that we would end up with the same 108 cards only in a slightly
different order (see 'Order of card insertion at deck creation')
5. "Special Cards" are passed as Card objects and are added to the top of the deck regardless of the amount of
child decks added. In this example a special card with just the face value of "Old Maid" is created and then
passed to the new_deck method. This example will create the special card and then two decks of regular cards that
were added by face.
``` pythonls
old_maid_card = Card(face="Old Maid")
cards.new_deck(decks=2,
cards=('Alto Annie', 'Slap on Sam', 'Billy Blaze', 'Heap Big Talk', 'Clancy Clown', 'Crazy Cop',
'Loggin Larry', 'Greenthumb Gert', 'Diver Dan', 'Freddie Falloff', 'Baker Benny',
'Tumbledown Tess', 'Hayseed Hank', 'Postman Pete', 'Fifi Fluff', 'Bagpipe Barney',
'Milkman Mo', 'Careless Carrie'),
special_cards=old_maid_card)
```
### Drawing Cards:
There are two main ways to draw cards.
1. A single card draw is done simply with:
```python
card = cards.draw()
```
* This will return a single card object from the top of the deck. See the method arguments for more information of
where in the deck the card should come from.
2. Multiple complete hands can be drawn at once with a single statement. The amount of cards in the hand is required. In
this case the hand size is 5 cards.
```python
card = cards.draw_hand(5)
```
* A call to this method will return a list containing lists of cards. Further arguments can produce more than one
hand, choose to alternate the dealing of the cards, and handle situations where the deck has run out. Refer to the
Classes and Methods section.
### Replacing Used Cards:
At any time, the cards which have already been drawn can be returned to the bottom of the deck with:
```python
cards.replace_used_cards()
```
The default behavior is not to shuffle the cards but to simply place them back into the deck. An argument can be
passed to shuffle the used pile before putting the cards on the bottom as well as shuffling the entire deck after
replacing the used cards. Refer to the Classes and Methods section.
### Replacing card values from a dictionary:
Card values can be set by referring to a dictionary passed to the dict_to_value method. A dictionary with keys equal
to the face of the cards and values equal to the int value of the card is required.
```python
community_chest_cards = AnyDeck()
community_chest_dict = {'You Inherit $100': 100,
'Doctor\'s Fee Pay $50': -50,
'Get Out of Jail Free': 0}
community_chest_cards.new_deck(suits='Community Chest',
cards=[card for card in community_chest_dict])
community_chest_cards.dict_to_value(community_chest_dict)
```
The above uses a comprehension to create the list of cards to be created in the new deck. Next the dict_to_value
method is called with the dictionary which will set the cards to the values listed in the dictionary.
## Class - Card:
### Attributes:
* **face:** (str) The human readable text of a card (ie: "2" or "Ace") (Default - None)
* **suit:** (str) The human readable text of a card suit (ie: "Spades" or "Red") (Default - None)
* **value:** (int) Value assigned to card (Default - 0)
* **suit_id:** (int) Key sequentially assigned to suits as they are added to a deck (Default - 0)
* **suit_order_num:** (int) The order of the card in the suit it belongs to (Default - 0)
* **child_order_num:** (int) The order of the card in the child deck (Default - None)
* **unique_card_num:** (int) Unique number assigned to each card as they are added to the deck (Default - None)
* **child_deck_num:** (int) The sequence number of the child deck (Default - None)
## Class - AnyDeck:
### **Attributes:**
* **total_cards:** (int) Returns the amount of cards which encompass the entire deck at creation
* **remaining_cards:** (int) Returns the amount of cards remaining in the deck
* **deck_info:** (dict) Returns a dictionary with information about the total deck
### Methods:
### AnyDeck():
During the initialization of the library the arguments are passed to the new_deck function. If the library is
called without cards and suits then a default deck will be generated which includes a standard deck of US playing cards.
**Arguments:**
decks: (int) The number of duplicate decks to compile into the parent deck (Default - 1)
shuffled: (bool) Should the deck be shuffled (Default - False)
wilds: (list) Strings in the list will be added as 'face' to wild cards (Default - None)
(str) String will be added as 'face' to the wild card (Default - None)
(int) Will create int number of wild cards with the 'face' of 'Wild' (Default - None)
suits: (list) Strings in the list will be added as suits in the regular deck (Default - None)
(str) String will be added as the only suit in the regular deck (Default - None)
cards: (list) Strings in the list will be added as cards in the regular deck (Default - None)
(str) String will be added as the only card in the regular deck (Default - None)
special_cards: (Card) One off special cards to be added to the parent deck
override_defaults: (bool) Allows you to override adding the default cards and suits so that only
special or wild cards are added to the deck without any regular cards. (Default - False)
### new_deck():
Creates a new deck of cards from the provided arguments. If no cards and no suits are provided then a default deck
of 2 through 10, Jack, Queen, King and Ave is generated. Wild and special cards can be added to a default deck. If
you instead need a deck with no regular cards you can call 'override_defaults'. The retain arguments will allow for
keeping cards between cards to new_deck. Used cards are cards which have already been drawn from the deck. Retaining
used cards will simply maintain the used pile. Unused cards are cards which are still in the deck when new_deck is
called. Retaining unused cards are kept at the top of the deck and any new cards are added to the bottom of the unused
cards. Note that wild cards are added with a default suit of 'Wild' and a value of 0.
**Arguments:**
decks: (int) The number of duplicate decks to compile into the parent deck (DEFAULT = 1)
shuffled: (bool) Should the deck be shuffled (DEFAULT = False)
wilds: (list) Strings in the list will be added as 'face' to wild cards (DEFAULT = None)
(str) String will be added as 'face' to the wild card (DEFAULT = None)
(int) Will create int number os wild cards with the 'face' of 'Wild' (DEFAULT = None)
suits: (list) Strings in the list will be added as suits in the regular deck (DEFAULT = None)
(str) String will be added as the only suit in the regular deck (DEFAULT = None)
cards: (list) Strings in the list will be added as cards in the regular deck (DEFAULT = None)
(str) String will be added as the only card in the regular deck (DEFAULT = None)
special_cards: (Card) One off special cards to be added to the parent deck
override_defaults: (bool) Allows you to override adding the default cards and suits so that only
special or wild cards are added to the deck without any regular cards. (DEFAULT = False)
retain_unused:
retain_used:
**Returns:**
Nothing
---
### draw():
Returns a Card object from the deck from the argument provided position. When a card is drawn it is added to the internal
'used_cards' list. At the application level the card does not have to be returned to be considered 'used'.
**Arguments:**
position: (int) Returns card from the index position of the currently unused deck
(str) Returns card from named position (DEFAULT = 'top')
('random') Returns card from a random position of the currently unused deck
('top') Returns card from the top of the unused deck
('bottom') Returns card from the bottom of the unused deck
('middle) Returns card from the middle of the unused deck
**Returns**:
Nothing
---
### draw_hand():
Draws multiple cards into individual hands based on the arguments provided. Arguments will allow an alternating
drawn where each hand is given a card in turn as opposed to each hand being given all cards before moving on
to the next hand. Should the unused deck run out of cards the refill argument can be set to add the used cards back into
the active deck to continue dealing the required number of cards upon unused completion. Further, with refill set,
a new deck will be created that was identical to the original deck and dealing will continue should all cards be
drawn. Should refill be set to false then the cards will be drawn until the used cards are depleted and the returned
list will have only the cards left in order of the other arguments.
**Arguments**:
cards: (int) REQUIRED: number of cards requested for each hand
hands: (int) Number of hands requested (DEFAULT = 1)
alternating: (bool) (DEFAULT = True)
(True) Draws cards through alternating hands before beginning the first deck again
(False) Draws all cards to a hand before moving on to subsequent hands
refill: (bool) (DEFAULT = True)
(True) Refill the deck if there are not enough cards to complete the draw
(False) Returns the list up until that last available card
**Returns:**
List of Lists of Cards
---
### add_card():
Add a Card object to the deck from the argument provided position.
**Arguments:**
card: (Card) REQUIRED: Card object to be added
position: (int) Add card to the index position of the currently unused deck
(str) (DEFAULT = 'bottom')
('random') Add card to a random position of the currently unused deck
('top') Add card to the top of the unused deck
('bottom') Add card to the bottom of the unused deck
('middle) Add card to the middle of the unused deck
**Returns:**
Nothing
---
### shuffle():
Shuffles the unused cards in the deck
**Arguments:**
None
**Returns:**
Nothing
---
### replace_used_cards():
Puts the used cards back into the bottom of the active deck.
**Arguments:**
shuffle_used: (bool) Shuffles the used cards before putting the back into the deck. Calling this
will also clear the list of used cards. (DEFAULT = False)
shuffle: (bool) Shuffles the new deck which now includes the used and unused cards. (DEFAULT = False)
**Returns:**
Nothing
---
### dict_to_value():
Take a dictionary of face:int value. The unused deck is looped through and if the face key is found to
match the face of a card the value is updated for that card.
**Arguments:**
value_dict: (dict) Dictionary of key/values to update the int value of a card
**Returns:**
Nothing
---
### get_deck_info():
Returns a dictionary containing the 'child_decks', 'suits', 'cards', 'wilds', 'special_cards' and
'total_cards' of the current deck.
**Arguments:**
None
**Returns:**
dict
---
### clear_values():
Sets the integer value of all unused cards to 'None'
**Arguments:**
None
**Returns:**
Nothing
---
|
AnyDeck
|
/AnyDeck-0.1.8.tar.gz/AnyDeck-0.1.8/README.md
|
README.md
|
import random
DEFAULT_SUITS = ('Hearts', 'Clubs', 'Diamonds', 'Spades')
DEFAULT_CARDS = ('2', '3', '4', '5', '6', '7', '8', '9', '10', 'Jack', 'Queen', 'King', 'Ace')
class Card:
def __init__(self,
face=None,
suit=None,
value=0,
suit_id=0,
suit_order_num=0,
child_order_num=None,
unique_card_num=None,
child_deck_num=None):
self.face = face
self.suit = suit
self.value = value
self.suit_id = suit_id
self.suit_order_num = suit_order_num
self.child_order_num = child_order_num
self.unique_card_num = unique_card_num
self.child_deck_num = child_deck_num
class AnyDeck:
def __init__(self,
decks=1,
shuffled=False,
wilds=None,
suits=None,
cards=None,
special_cards=None,
override_defaults=False):
if suits is None and cards is None and not override_defaults:
suits, cards = DEFAULT_SUITS, DEFAULT_CARDS
self._deck = []
self._used_cards = []
self._child_decks = decks
self._shuffled = bool(shuffled)
self._suits = self.__validate_input(suits)
self._cards = self.__validate_input(cards)
if type(wilds) is int:
wilds = ['Wild'] * wilds
self._wilds = self.__validate_input(wilds)
self._special_cards = self.__validate_card(special_cards)
self.new_deck(decks=self._child_decks,
shuffled=self._shuffled,
wilds=self._wilds,
suits=self._suits,
cards=self._cards,
special_cards=self._special_cards,
override_defaults=override_defaults)
self._total_cards = len(self._deck)
self._remaining_cards = len(self._deck)
self.deck_info = self.get_deck_info()
self._full_deck = self._deck.copy()
@property
def deck(self):
return self._deck
@property
def used_cards(self):
return self._used_cards
@property
def total_cards(self):
return self._total_cards
@property
def remaining_cards(self):
self._remaining_cards = len(self._deck)
return self._remaining_cards
@property
def full_deck(self):
return self._full_deck
def new_deck(self,
decks=1,
shuffled=False,
wilds=None,
suits=None,
cards=None,
special_cards=None,
override_defaults=False,
retain_unused=False,
retain_used=False):
if suits is None and cards is None and not override_defaults:
suits, cards = DEFAULT_SUITS, DEFAULT_CARDS
retained = []
retained_info = dict()
if retain_unused:
retained = [card for card in self._deck]
retained_info = self.get_deck_info()
if not retain_used:
self._used_cards.clear()
self._deck.clear()
self._child_decks = decks
self._shuffled = bool(shuffled)
self._suits = self.__validate_input(suits)
self._cards = self.__validate_input(cards)
if type(wilds) is int:
wilds = ['Wild'] * wilds
self._wilds = self.__validate_input(wilds)
self._special_cards = self.__validate_card(special_cards)
for card in retained:
card.unique_card_num = len(self._deck) + 1
self._deck.append(card)
for card in self._special_cards:
card.unique_card_num = len(self._deck) + 1
self._deck.append(card)
if len(self._suits) == 0:
self._suits = ['']
for child_deck_num in range(decks):
deck_order_num = 1
for suit_id, suit in enumerate(self._suits):
for suit_order_num, face in enumerate(self._cards):
card = Card(face=face,
suit=suit,
value=self.__value_from_face(face),
suit_id=suit_id + 1,
suit_order_num=suit_order_num + 1,
child_order_num=deck_order_num,
unique_card_num=len(self._deck) + 1,
child_deck_num=child_deck_num + 1)
if suit == '':
card.suit = None
deck_order_num += 1
self._deck.append(card)
if self._wilds:
for wild in self._wilds:
card = Card(face=wild,
suit='Wild',
suit_order_num=0,
suit_id=0,
child_order_num=deck_order_num,
unique_card_num=len(self._deck) + 1,
child_deck_num=child_deck_num + 1)
deck_order_num += 1
self._deck.append(card)
self._total_cards = len(self._deck)
if shuffled:
self.shuffle()
if retain_unused:
self._child_decks += retained_info.get('child_decks')
self._suits = list(set(self._suits + retained_info.get('suits')))
self._cards = list(set(self._cards + retained_info.get('cards')))
self._wilds = list(set(self._wilds + retained_info.get('wilds')))
self._special_cards = list(set(self._special_cards + retained_info.get('special_cards')))
self.deck_info = self.get_deck_info()
self._full_deck = self._deck.copy()
def draw(self, position='top'):
if len(self._deck) == 0:
return None
if type(position) is int:
drawn_card = self._deck.pop(int(position))
elif position.lower() == 'random':
drawn_card = self._deck.pop(random.randrange(0, len(self._deck)))
elif position.lower() == 'top':
drawn_card = self._deck.pop(0)
elif position.lower() == 'middle':
drawn_card = self._deck.pop(int(len(self._deck) / 2))
elif position.lower() == 'bottom':
drawn_card = self._deck.pop(len(self._deck) - 1)
else:
raise Exception('position argument is invalid')
self._used_cards.append(drawn_card)
self._remaining_cards = len(self._deck)
return drawn_card
def draw_hand(self, cards, hands=1, alternating=True, refill=True):
drawn_list = []
if hands > 0:
for _ in range(hands):
drawn_list.append([])
if alternating:
for i in range(cards):
for j in range(hands):
if self.remaining_cards > 0:
drawn_list[j].append(self.draw())
elif self.remaining_cards <= 0 and refill:
self.replace_used_cards(shuffle=True)
if len(self.deck) <= 0:
for card in self._full_deck:
self._deck.append(card)
self.shuffle()
drawn_list[j].append(self.draw())
else:
return drawn_list
else:
for i in range(hands):
for j in range(cards):
if self.remaining_cards > 0:
drawn_list[i].append(self.draw())
elif self.remaining_cards <= 0 and refill:
self.replace_used_cards(shuffle=True)
if len(self.deck) <= 0:
for card in self._full_deck:
self._deck.append(card)
self.shuffle()
drawn_list[i].append(self.draw())
else:
return drawn_list
return drawn_list
def add_card(self, card, position='bottom'):
if type(card) is Card:
self._total_cards += 1
card.unique_card_num = self._total_cards
if type(position) is int:
self._deck.insert(int(position), card)
elif position.lower() == 'random':
self._deck.insert(random.randrange(0, len(self._deck)), card)
elif position.lower() == 'top':
self._deck.insert(0, card)
elif position.lower() == 'middle':
self._deck.insert(int(len(self._deck) / 2), card)
elif position.lower() == 'bottom':
self._deck.append(card)
else:
raise Exception('position argument is invalid')
else:
raise Exception('added card must be of type \'Card\'')
def shuffle(self):
random.shuffle(self._deck)
self._shuffled = True
def replace_used_cards(self, shuffle=False, shuffle_used=False):
if shuffle_used:
random.shuffle(self._used_cards)
for card in self._used_cards:
self._deck.append(card)
if shuffle:
random.shuffle(self._deck)
self._remaining_cards = len(self._deck)
self._used_cards.clear()
def dict_to_value(self, value_dict):
for card in self._deck:
if card.face in value_dict:
card.value = value_dict.get(card.face)
def get_deck_info(self):
deck_info = {'child_decks': self._child_decks,
'suits': sorted(self._suits),
'cards': sorted(self._cards),
'wilds': sorted(self._wilds),
'special_cards': [sc.face for sc in self._special_cards if type(self._special_cards) is not None],
'total_cards': self._total_cards}
return deck_info
def clear_values(self):
for card in self._deck:
card.value = None
@staticmethod
def __value_from_face(face):
try:
if face.upper() in ['ACE']:
value = 11
elif face.upper() in ['KING', 'QUEEN', 'JACK']:
value = 10
elif str(face).isnumeric():
value = int(face)
else:
value = 0
except ValueError:
value = 0
return value
@staticmethod
def __validate_input(validate):
output = []
if validate is not None:
if type(validate) is str:
output.append(validate)
else:
output = [item for item in validate if type(item) is str]
if len(output) != len(validate):
raise Exception('Non string item input')
return output
@staticmethod
def __validate_card(validate):
output = []
if validate is not None:
if type(validate) is Card:
output.append(validate)
else:
output = [item for item in validate if type(item) is Card]
if len(output) != len(validate):
raise Exception('Non card object input')
return output
|
AnyDeck
|
/AnyDeck-0.1.8.tar.gz/AnyDeck-0.1.8/src/anydeck/anydeck.py
|
anydeck.py
|
.. |af| replace:: AnyFilter
====
|af|
====
|af| is a simple base class for defining data filters. It provides the
following functionality:
* stores configurations in JSON
* retains previous versions of configurations with user info and timestamp
* isolates custom code
The intent is to create a subclass of the ``Filter`` class in any case where custom
code has to be written. This keeps the custom code out of the primary
workflow and codebase, and allows ``Filter`` subclasses to be inserted and toggled
as needed, while storing configurations for the filters outside the codebase
and the applications's primary database for portability and ease of maintenance.
Motivation
==========
The problem which led to this solution is the need to consume data from
end users. Usually, and especially when the users are clients, this data can
not be relied upon to meet the input specifications of your system.
This usually leads one of these sub-optimal solutions:
* Have custom scripts to pre-process data per client
* Adding a bunch of ``if`` statements, or other similar logic to the core product
* Attempting to make transformation scripts generic enough to re-use, thus
making them less useful for their primary purpose and harder to debug
* Hard-coding data into transformation scripts
There is often a custom transformation per client or project,
so these solutions do not scale well.
The goals of |af| are:
* Minimize the amount of custom code in the primary codebase
* Store configurations outside the application's database in a portable format
* Allow updates of configuration data without deploying code (privileged users
may even edit a configuration via some sort of GUI)
Benefits
========
* create pluggable data filters
* store configurations outside your application's database
* easily back up and restore configurations
* easily duplicate configurations across servers
Planned features
================
* Export and import configs
* Convert configs to and from HTML forms for easy front-end functionality
* Easily revert to a prior config
* Comprehensive unit tests
Sample usage
============
.. code-block:: python
#!/usr/bin/env python
"""
This is a simple example of the use of the Filter class. In this case, a
dictionary has some keys renamed. This is a trivial example; filters
can be as complex as required.
"""
from anyfilter import Filter
class NameFilter(Filter):
"""
A filter that changes the names of dictionary keys.
'data' should be an iterable of dictionaries
"""
def __call__(self, data):
"""
The contents of this function are the least-important part of
this demo. This is where you custom code will go, doing whatever
it is you need with whatever config format and content you need.
"""
for rec in data:
# The config for this filter is a dictionary where the
# key is the key name to replace, and the value is the new name.
# update values in "data" dict
for key, value in self.config.items():
if key in rec:
rec[value] = rec[key]
del rec[key]
return data
if __name__ == '__main__':
import os # for dealing with the environment variable manually
# set environment variable for demo purposes
original_envvar = os.environ.get('FILTER_CONFIG_DIR', '')
os.environ['FILTER_CONFIG_DIR'] = '/tmp'
# Instantiate subclass. The only argument is the uid of the subject
# of the filter. For example, if you need to store different rules
# per user of your site, you might use the user's primary key here.
# This allows storage of configs per filter *and* per user.
name_filter = NameFilter('foo')
# Set some filter items. This normally won't be a part of the flow.
# It's here for demo purposes. In normal usage, the config would
# already be set and probably rarely updated.
name_filter.config = {
'dog': 'canine',
'cat': 'feline',
'horse': 'equine',
}
name_filter.save_config(user='example')
data = [{
'cat': 'meow',
'dog': 'woof',
'horse': 'neigh',
'foo': 'bar',
}]
print data # original
print name_filter(data) # altered
# Put it back like we found it, just to be good citizens.
os.environ['FILTER_CONFIG_DIR'] = original_envvar
Sample output
=============
::
[{'horse': 'neigh', 'foo': 'bar', 'dog': 'woof', 'cat': 'meow'}]
[{'equine': 'neigh', 'feline': 'meow', 'canine': 'woof', 'foo': 'bar'}]
|
AnyFilter
|
/AnyFilter-0.05.tar.gz/AnyFilter-0.05/README.rst
|
README.rst
|
from datetime import datetime
import os
import json
class Filter(object):
def __init__(self, uid):
"""
Initialize the filter. This requires a UID which is used to determine
which configuration file is to be used. This also requires that the
FILTER_CONFIG_DIR environment variable be set.
If the config file is successfully located, it is loaded into
the namespace as self.config for use by the __call__ method.
To use the Filter class, subclass it and define a __call__ method.
The __call__ method should accept an iterable of dictionaries and
return the same iterable of dictionaries, potentially altered.
"""
# test that FILTER_CONFIG_DIR exists and is a valid path.
filter_dir = os.environ.get('FILTER_CONFIG_DIR', '')
if filter_dir == '':
raise ValueError("FILTER_CONFIG_DIR must be set.")
if not os.path.isdir(filter_dir):
raise ValueError("FILTER_CONFIG_DIR is invalid.")
# this is the key to use to find the config
config_key = "{0}_{1}.json".format(self.__class__.__name__, uid)
self.config_file = os.path.join(filter_dir, config_key)
self.config = self.get_config()
def get_raw_config(self):
"""
Grab full JSON file, return empty list if not there.
"""
if os.path.isfile(self.config_file):
with open(self.config_file, 'r') as raw:
raw_json = raw.read()
try:
configs = json.loads(raw_json)
except ValueError as ex:
# config empty or corrupted; return default
return []
return configs
else:
return []
def get_config(self):
"""
Get config dict for instance.
"""
config_list = self.get_raw_config()
if len(config_list) == 0:
config = {}
else:
# the config_dict has three keys:
# config: actual config dict
# created_date: date config saved
# user: user who saved config
config_list.sort(key=lambda x: x['created_date'])
config_dict = config_list[-1]
config = config_dict['config']
return config
def save_config(self, user):
"""
Save the config if it's been changed, or do nothing.
"""
old_config = self.get_config()
if self.config == old_config:
return True
raw = self.get_raw_config()
raw.append({
'created_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'user': user,
'config': self.config,
})
# write updated config
with open(self.config_file, 'w') as output:
output.write(json.dumps(raw))
def update_config(self, data, user):
"""
Accepts a HTML form post data and updates the config dict
using the values parsed from it.
"""
# This class's name; used to parse POST data.
class_name = self.__class__.__name__
# Extract just the keys from the POST that pertain to this filter.
raw = filter(lambda x: x.startswith(class_name), data)
# Turn key/value pairs into config dict.
config = {}
for key in raw:
key_prefix = "{0}_key".format(class_name)
val_prefix = "{0}_val".format(class_name)
if not key.startswith(key_prefix):
continue
# Chop off just the unique part of the key (which should match
# the unique part of the value), so key/value pairs can be
# made from the POST data.
suffix = key.split(key_prefix)[-1]
key = data["{0}{1}".format(key_prefix, suffix)].strip()
val = data["{0}{1}".format(val_prefix, suffix)].strip()
config[key] = val
self.config = config
self.save_config(user=user)
|
AnyFilter
|
/AnyFilter-0.05.tar.gz/AnyFilter-0.05/anyfilter/__init__.py
|
__init__.py
|
import os
import simplejson
from twisted.internet import defer
from twisted.python import log
from twisted.internet import error
class PersistentQueue(object):
"""
Implementation of a queue for the failsafe processing of items
through an API call.
Note that L{PersistentQueue.save} needs explicitly be called for
the queue to be saved.
"""
# Private variables
_queue = None
_state_file = None
def __init__(self, state_file = None):
self._queue = []
if state_file:
self._state_file = state_file
self._state_file = os.path.expanduser(self._state_file)
if os.path.exists(self._state_file):
self._queue = simplejson.load(open(self._state_file, 'r'))
def save(self):
"""
Saves the current queue to the state file. When the queue is
empty, it is not saved and the state file is thrown away.
"""
if len(self._queue):
log.msg("Saving submit queue state.")
simplejson.dump( self._queue, open(self._state_file, 'w'))
elif os.path.exists(self._state_file):
os.unlink(self._state_file)
def add(self, item):
"""
Adds an item to the queue.
"""
self._queue.append(item)
def size(self):
"""
Returns the current size of the queue.
"""
return len(self._queue)
def processBatch(self, callable, max=10):
"""
Process the next batch of items which are waiting to be
sent. For every item, the callable is called which is expected
to return a deferred.
This function itself returns a deferred which will fire when
the entire current batch has completed. Return value of this
deferred is a (success count, fail count) tuple.
"""
if not self._queue:
log.msg("Nothing in the queue...")
return defer.succeed(True)
items, self._queue = self._queue[:max], self._queue[max:]
log.msg("Submitting %d item(s)" % len(items))
ds = []
for item in items:
ds.append(callable(item))
l = defer.DeferredList(ds, consumeErrors = True)
def cb(result):
i = 0
success_count = 0
fail_count = 0
for state, r in result:
if not state or not r:
# submission of item failed, re-add it to queue
self._queue.append(items[i])
log.err("Submit of %s failed!" % items[i])
log.err(r)
fail_count += 1
else:
# submission succeeded
success_count += 1
i += 1
return success_count, fail_count
l.addCallback(cb)
return l
class Task(object):
value = None
status = 'new'
def __init__(self, value):
self.value = value
class TaskNotAssociatedError(Exception):
"""
This task is not associated with this queue.
"""
class TaskQueue(object):
"""
An event driven task queue.
Values may be added as usual to this queue. When an attempt is
made to retrieve a value when the queue is empty, a Deferred is
returned which will fire when a value becomes available.
"""
def __init__(self):
self.waiting = []
self._tasks = set()
self.pending = []
def createTask(self, value):
task = Task(value)
task.queue = self
self._tasks.add(task)
return task
def _enqueue(self, task):
"""
Enqueue a task.
If a consumer is waiting, its callback is called with the task,
otherwise it is in the queue of pending tasks.
"""
if self.waiting:
task.status = 'in_progress'
self.waiting.pop(0).callback(task)
else:
self.pending.append(task)
def put(self, value):
"""
Create a new task and add it the queue.
When retrieving the enqueued task, the value is stored in the
C{value} attribute of the task instance.
@param value: The value that represents the task.
@return: The new task.
"""
task = self.createTask(value)
self._enqueue(task)
return task
def get(self):
"""
Attempt to retrieve and remove a task from the queue.
The returned task will contain the value as it was queued with L{put}
in the C{value} attribute. As the queue keeps track of created tasks,
it is required to call L{retry}, L{fail} or L{done} after
processing the task.
@return: A Deferred which fires with the next task available in the
queue.
"""
if self.pending:
task = self.pending.pop(0)
task.status = 'in_progress'
return defer.succeed(task)
else:
d = defer.Deferred()
self.waiting.append(d)
return d
def retry(self, task):
"""
Retry a task.
The task, gotten through L{get}, is requeued for later retry.
"""
if not task in self._tasks:
raise TaskNotAssociatedError()
task.status = 'retryable'
self._enqueue(task)
def fail(self, task):
"""
Fail a task.
The task, gotten through L{get}, is not requeued for later retry,
but kept in L{tasks} for later inspection. The task can be retried
by calling L{retry}.
"""
if not task in self._tasks:
raise TaskNotAssociatedError()
task.status = 'failed'
def succeed(self, task):
"""
Succeed a task.
The task, gotten through L{get}, is not requeued and removed from
its record of tasks in L{tasks}.
"""
if not task in self._tasks:
raise TaskNotAssociatedError()
task.status = 'done'
self._tasks.remove(task)
class SQLiteTaskQueue(object):
"""
An event driven task queue.
Values may be added as usual to this queue. When an attempt is
made to retrieve a value when the queue is empty, a Deferred is
returned which will fire when a value becomes available.
"""
def __init__(self, connection):
self._connection = connection
self._cursor = connection.cursor()
self.waiting = []
self.pending = []
self.fillQueue()
def fillQueue(self):
try:
self._cursor.execute("""SELECT rowid, value, status FROM tasks
WHERE status != 'failed'""")
except self._connection.OperationalError:
# table does not exist. Try to create it
self._cursor.execute("""CREATE TABLE tasks (value text,
status text)""")
self._connection.commit()
else:
for rowid, value, status in self._cursor:
try:
task = Task(simplejson.loads(value))
except ValueError:
log.msg("Invalid task in storage: %d, %r" % (rowid, value))
continue
task.identifier = rowid
task.status = status
task.queue = self
self._enqueue(task)
def createTask(self, value):
task = Task(value)
self._cursor.execute("""INSERT INTO tasks (value, status)
VALUES (?, 'new')""",
(simplejson.dumps(value),))
self._connection.commit()
task.identifier = self._cursor.lastrowid
task.queue = self
return task
def _enqueue(self, task):
"""
Enqueue a task.
If a consumer is waiting, its callback is called with the task,
otherwise it is in the queue of pending tasks.
"""
if self.waiting:
task.status = 'in_progress'
self.waiting.pop(0).callback(task)
else:
self.pending.append(task)
def put(self, value):
"""
Create a new task and add it the queue.
When retrieving the enqueued task, the value is stored in the
C{value} attribute of the task instance.
@param value: The value that represents the task.
@return: The new task.
"""
task = self.createTask(value)
self._enqueue(task)
return task
def get(self):
"""
Attempt to retrieve and remove a task from the queue.
The returned task will contain the value as it was queued with L{put}
in the C{value} attribute. As the queue keeps track of created tasks,
it is required to call L{retry}, L{fail} or L{done} after
processing the task.
@return: A Deferred which fires with the next task available in the
queue.
"""
if self.pending:
task = self.pending.pop(0)
task.status = 'in_progress'
return defer.succeed(task)
else:
d = defer.Deferred()
self.waiting.append(d)
return d
def retry(self, task):
"""
Retry a task.
The task, gotten through L{get}, is requeued for later retry.
"""
if task.queue != self:
raise TaskNotAssociatedError()
self._cursor.execute("""UPDATE tasks SET status='retryable'
WHERE rowid=?""",
(task.identifier,))
if self._cursor.rowcount < 1:
raise TaskNotAssociatedError()
self._connection.commit()
task.status = 'retryable'
self._enqueue(task)
def fail(self, task):
"""
Fail a task.
The task, gotten through L{get}, is not requeued for later retry,
but kept in L{tasks} for later inspection. The task can be retried
by calling L{retry}.
"""
if task.queue != self:
raise TaskNotAssociatedError()
self._cursor.execute("UPDATE tasks SET status='failed' WHERE rowid=?",
(task.identifier,))
if self._cursor.rowcount < 1:
raise TaskNotAssociatedError()
self._connection.commit()
task.status = 'failed'
def succeed(self, task):
"""
Succeed a task.
The task, gotten through L{get}, is not requeued and removed from
its record of tasks in L{tasks}.
"""
if task.queue != self:
raise TaskNotAssociatedError()
self._cursor.execute("""DELETE FROM tasks WHERE rowid=?""",
(task.identifier,))
if self._cursor.rowcount < 1:
raise TaskNotAssociatedError()
self._connection.commit()
task.status = 'done'
class RetryError(Exception):
"""
Container of a failure to signal that retries are possible.
"""
def __init__(self, failure):
Exception.__init__(self, failure)
self.subFailure = failure
class TaskQueueRunner(object):
"""
Basic submission queue runner.
This runner makes no assumptions on the types of tasks and retry
behaviour. Once L{run} is called, it reschedules itself according
to L{delay}.
"""
clock = None
delay = 0
def __init__(self, queue, callable):
self.queue = queue
self.callable = callable
if self.clock is None:
from twisted.internet import reactor
self.clock = reactor
self.run()
def run(self):
def succeed(_, task):
self.queue.succeed(task)
def retry(failure, task):
failure.trap(RetryError)
log.err(failure.value.subFailure.value, "Retrying task")
self.queue.retry(task)
def fail(failure, task):
log.err(failure.value, "Failing task")
self.queue.fail(task)
def call(task):
d = self.callable(task.value)
d.addCallback(succeed, task)
d.addErrback(retry, task)
d.addErrback(fail, task)
return d
d = self.queue.get()
d.addCallback(call)
d.addCallback(lambda _: self.clock.callLater(self.delay, self.run))
class APIQueuer(object):
def __init__(self, queue, controller):
self.queue = queue
self.controller = controller
self.runner = TaskQueueRunner(self.queue, self.doMethod)
def add(self, method, args):
value = (method, args)
task = self.queue.put(value)
def doMethod(self, value):
def trapConnectError(failure):
failure.trap(error.ConnectError,
error.TimeoutError,
error.ConnectionClosed)
self.runner.delay = 5
raise RetryError(failure)
def succeeded(result):
self.runner.delay = 0
method, args = value
d = defer.maybeDeferred(getattr(self.controller, method), args)
d.addCallback(succeeded)
d.addErrback(trapConnectError)
return d
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/queue.py
|
queue.py
|
import httplib
import os
import sys
import tempfile
import urllib
from xml.dom import minidom
from pydataportability.xrds import parser as xrdsparser
from oauth.oauth import OAuthError
class OAuthDiscovery:
@staticmethod
def discover(uri):
"""
Discover the OAuth {request,authorize,access} endpoints and
possibly the static consumer key at the given URI.
"""
xml = OAuthDiscovery.__discoverXRDS(uri)
if xml is None:
raise OAuthError("Could not discover XRDS file")
fn = tempfile.mktemp()
fp = open(fn, 'w')
fp.write(xml)
fp.close()
result = {}
parser = xrdsparser.XRDSParser(open(fn, 'r'))
os.unlink(fn)
for service in parser.services:
if service.type == 'http://oauth.net/discovery/1.0/consumer-identity/oob':
result['oob_uri'] = service.uris[0].uri
elif service.type == 'http://oauth.net/core/1.0/endpoint/request':
result['request_uri'] = service.uris[0].uri
elif service.type == 'http://oauth.net/core/1.0/endpoint/access':
result['access_uri'] = service.uris[0].uri
elif service.type == 'http://oauth.net/core/1.0/endpoint/authorize':
result['authorize_uri'] = service.uris[0].uri
elif service.type == 'http://oauth.net/discovery/1.0/consumer-identity/static':
if hasattr(service, 'localid'):
# pydataportability.xrds < 0.2
result['static_key'] = service.localid.text
elif hasattr(service, 'localids'):
# pydataportability.xrds >= 0.2
result['static_key'] = service.localids[0].localid
return result
@staticmethod
def __discoverXRDS(uri, recur = 0):
if recur > 10:
return None
try:
body, headers = OAuthDiscovery.__request(uri)
except Exception:
raise OAuthError("HTTP Error discovering")
headers = dict(headers)
if "content-type" in headers and headers['content-type'] == 'application/xrds+xml':
return body
location = None
if 'x-xrds-location' in headers:
location = headers['x-xrds-location']
elif 'location' in headers:
location = headers['location']
if location is None or location == uri:
return None
return OAuthDiscovery.__discoverXRDS(location, recur + 1)
@staticmethod
def __request(uri):
host = urllib.splithost(urllib.splittype(uri)[1])[0]
conn = httplib.HTTPConnection(host)
headers = {'Accept': 'application/xrds+xml'}
conn.request('GET', uri, '', headers)
response = conn.getresponse()
return response.read(), response.getheaders()
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/api/discovery.py
|
discovery.py
|
import ConfigParser
import os, sys
from base import AnyMetaException
class AnyMetaRegistryException(Exception):
pass
class AnyMetaRegistry:
__instance = None
cfgfile = None
config = None
@staticmethod
def getInstance():
"""
Get the registry singleton, creating it if it's the first
time.
"""
if AnyMetaRegistry.__instance is None:
AnyMetaRegistry.__instance = AnyMetaRegistry()
return AnyMetaRegistry.__instance
def __init__(self, cfgfile = None):
"""
Initializes the registry singleton.
"""
if cfgfile is not None:
self.cfgfile = cfgfile
else:
userfile = os.path.expanduser("~/.anymeta")
if os.path.exists(userfile):
self.cfgfile = userfile
elif os.path.exists("/etc/anymeta.conf"):
self.cfgfile = "/etc/anymeta.conf"
else:
# create userfile
self.cfgfile = userfile
open(userfile, "w").close()
self.config = ConfigParser.ConfigParser()
self.config.read([self.cfgfile])
def get(self, key):
"""
Get a registry entry from the config file.
"""
s = self.config.sections()
if key not in s:
raise AnyMetaRegistryException("Unknown config section '%s'" % key)
return {'entrypoint': self.config.get(key, 'entrypoint'),
'oauth': {'c_key': self.config.get(key, 'c_key'),
'c_sec': self.config.get(key, 'c_sec'),
't_key': self.config.get(key, 't_key'),
't_sec': self.config.get(key, 't_sec')
},
'comment': self.config.get(key, 'comment')}
def getAll(self):
"""
Return all registry entries from the config file.
"""
all = {}
for k in self.config.sections():
all[k] = self.get(k)
return all
def set(self, key, entrypoint, oauth, comment = ""):
"""
Set a registry entry in the config file.
"""
if key not in self.config.sections():
self.config.add_section(key)
self.config.set(key, 'entrypoint', entrypoint)
self.config.set(key, 'comment', comment)
try:
self.config.set(key, 'c_key', oauth['c_key'])
self.config.set(key, 'c_sec', oauth['c_sec'])
self.config.set(key, 't_key', oauth['t_key'])
self.config.set(key, 't_sec', oauth['t_sec'])
except KeyError:
raise AnyMetaRegistryException('Incomplete oauth data')
def delete(self, key):
"""
Delete a registry item from the config file.
"""
self.config.remove_section(key)
def save(self):
"""
Saves the config file to ~/.anymeta.
"""
fp = open(self.cfgfile, 'w')
self.config.write(fp)
fp.close()
def exists(self, key):
return key in self.config.sections()
def register_interactive(self, key, entrypoint, comment = "", c_key = None, c_sec = None, callback = None):
"""
Convenience function which creates a new OAuth key to use with
AnyMeta, by doing discovery of the end points, authorizing the
request interactively (the call prints instructions and
pauses), and afterwards, saving the key in the registry.
"""
if self.exists(key):
print "*** Registration key '%s' already exists, skipping... ***" % key
print
return
if callback is None:
def wait_for_url(url):
print "*" * 60
print "Please go to the following URL to authorize your request."
print "When you're done, press ENTER here to finish."
print
print ">>> ", url
print
print "*" * 60
sys.stdin.readline()
callback = wait_for_url
from discovery import OAuthDiscovery
import oauth.oauth as oauth
import httplib, urllib
sigmethod = oauth.OAuthSignatureMethod_HMAC_SHA1()
connection = httplib.HTTPConnection(urllib.splithost(urllib.splittype(entrypoint)[1])[0])
oauthinfo = OAuthDiscovery.discover(entrypoint)
# Create the consumer
if c_key is None and not 'static_key' in oauthinfo:
raise AnyMetaRegistryException("No consumer key given and no static consumer key discovered")
if c_key is not None:
consumer = oauth.OAuthConsumer(c_key, c_sec)
else:
consumer = oauth.OAuthConsumer(oauthinfo['static_key'], '')
# Get request token
oauth_request = oauth.OAuthRequest.from_consumer_and_token(consumer, http_url=oauthinfo['request_uri'])
oauth_request.sign_request(sigmethod, consumer, None)
connection.request(oauth_request.http_method, oauthinfo['request_uri'], headers=oauth_request.to_header())
token = oauth.OAuthToken.from_string(connection.getresponse().read())
# Authorize request token (interactively)
oauth_request = oauth.OAuthRequest.from_token_and_callback(token=token, callback=None, http_url=oauthinfo['authorize_uri'])
url = oauth_request.to_url()
callback(url)
oauth_request = oauth.OAuthRequest.from_consumer_and_token(consumer, token=token, http_url=oauthinfo['access_uri'])
oauth_request.sign_request(sigmethod, consumer, token)
connection.request(oauth_request.http_method, oauthinfo['access_uri'], headers=oauth_request.to_header())
s = connection.getresponse().read()
token = oauth.OAuthToken.from_string(s)
oauth = {'c_key': consumer.key,
'c_sec': consumer.secret,
't_key': token.key,
't_sec': token.secret }
print "Saving... ",
self.set(key, entrypoint, oauth, comment)
self.save()
print "done!"
def get():
"""
Shortcut to call the registry with anymeta.registry.get()
"""
return AnyMetaRegistry.getInstance()
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/api/registry.py
|
registry.py
|
from oauth.oauth import OAuthSignatureMethod, OAuthToken, OAuthConsumer, OAuthRequest, OAuthSignatureMethod_HMAC_SHA1, escape, _utf8_str
import httplib, urllib, simplejson, base64, os
class APIMethodPart(object):
api = None
part = ''
def __init__(self, api, part):
self.api = api
self.part = part
def __getattr__(self, part):
return APIMethodPart(self.api, self.part + '.' + part)
def __call__(self, **kwargs):
return self.api.doMethod(self.part, kwargs)
class AnyMetaException(Exception):
code = None
message = None
def __init__(self, code, msg):
self.code = code
self.message = msg
Exception.__init__(self, "%d: %s" % (code, msg))
pass
class AnyMetaAPI(object):
entrypoint = None
consumer = None
token = None
sigmethod = None
_getPage = None
last_headers = None
def __init__(self, entrypoint, oauth, **args):
"""
Initializes the AnyMeta API
"""
self.entrypoint = entrypoint
self.oauth = oauth
self.consumer = OAuthConsumer(oauth['c_key'], oauth['c_sec'])
self.token = OAuthToken(oauth['t_key'], oauth['t_sec'])
self.sigmethod = OAuthSignatureMethod_HMAC_SHA1()
engine = args.get('engine', 'httplib')
self._getPage = getattr(self, '_getPage_%s' % engine)
@staticmethod
def from_registry(key, **args):
"""
Create an AnyMeta instance by looking into the local key registry.
"""
from registry import AnyMetaRegistry
registry = AnyMetaRegistry.getInstance()
r = registry.get(key)
return AnyMetaAPI(r['entrypoint'], r['oauth'], **args)
def __getattr__(self, base):
return APIMethodPart(self, base)
def exp(self, args):
return self.__expand_parameters(args)
def __expand_parameters(self, arg, key = "", prefix = ""):
"""
Flatten parameters into a URL-compliant array.
"""
newargs = {}
if prefix == "":
nextprefix = "%s"
else:
nextprefix = "[%s]"
if type(arg) == dict:
for k in arg.keys():
n = self.__expand_parameters(arg[k], k, nextprefix % k)
for k2 in n.keys():
newargs[prefix+k2] = n[k2]
elif type(arg) == list:
for i in range(len(arg)):
n = self.__expand_parameters(arg[i], str(i), nextprefix % str(i))
for k in n.keys():
newargs[prefix+k] = n[k]
else:
if prefix == key:
nextprefix = "%s"
newargs[nextprefix % key] = arg
return newargs
def __parse_parameters(self, args):
"""
Convert arguments to AnyMeta compliance.
This includes some handy magic for uploading files: if a string starts
with an '@', and it will be considered a file, and replaced by the
base64 data of the file.
"""
if type(args) != dict:
return args
for k in args.keys():
if (type(args[k]) == str or type(args[k]) == unicode) \
and len(args[k]) > 0 and args[k][0] == "@":
filename = args[k][1:]
if filename == os.path.abspath(filename):
# only accept absolute paths
fp = open(args[k][1:], "r")
data = base64.b64encode("".join(fp.readlines()))
fp.close()
args[k] = data
elif type(args[k]) == dict:
args[k] = self.__parse_parameters(args[k])
elif type(args[k]) == list:
for i in range(len(args[k])):
args[k][i] = self.__parse_parameters(args[k][i])
return args
def doMethod(self, method, parameters, http_method="POST", headers=None, data=None, format='json'):
"""
Call the specified AnyMeta method. Currently, all requests are
done as POST."""
parameters = self.__parse_parameters(parameters)
parameters['method'] = method
parameters['format'] = format
parameters = self.__expand_parameters(parameters)
request = OAuthRequest.from_consumer_and_token(self.consumer,
token=self.token,
http_method=http_method,
http_url=self.entrypoint,
parameters=parameters)
request.sign_request(self.sigmethod, self.consumer, self.token)
headers = headers or {}
if data is None:
headers['Content-Type'] = 'application/x-www-form-urlencoded'
url = self.entrypoint
postdata = '&'.join(['%s=%s' % (escape(_utf8_str(k)),
escape(_utf8_str(v))) \
for k, v in request.parameters.iteritems()])
else:
encodedParameters = '&'.join(['%s=%s' % (escape(str(k)),
escape(str(v)))
for k, v in parameters.iteritems()
if not k.startswith('oauth_')])
url = "%s?%s" % (self.entrypoint, encodedParameters)
postdata = data
headers.update(request.to_header())
return self._getPage(http_method, str(url), postdata, headers, format)
def _processPage(self, page, format):
if format != 'json':
return page
try:
result = simplejson.loads(unicode(page))
except ValueError, e:
raise AnyMetaException(0, "API error: %s\n\n%s" % (e, page))
if type(result) == dict and 'err' in result:
err = result['err']['-attrib-']
raise AnyMetaException(err['code'], err['msg'])
return result
def _getPage_httplib(self, http_method, url, body, headers, format):
host = urllib.splithost(urllib.splittype(url)[1])[0]
conn = httplib.HTTPConnection(host)
conn.request(http_method, url, body=body, headers=headers)
response = conn.getresponse()
self.last_headers = dict(response.getheaders())
page = response.read()
return self._processPage(page, format)
def _getPage_twisted(self, http_method, url, body, headers, format):
from twisted.web import client
d = client.getPage(url, method=http_method, postdata=body,
headers=headers)
d.addCallback(self._processPage, format)
return d
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/api/base.py
|
base.py
|
# regular imports
import sys
import re
from optparse import OptionParser
from anymeta import __version__
from anymeta.api import AnyMetaAPI
from anymeta.api.registry import AnyMetaRegistry, AnyMetaRegistryException
def usage():
"""
Show usage message and exit.
"""
print "any-registry %s" % __version__
print "Usage %s [opts] <command> [cmdoptions]" % sys.argv[0]
print
print "Command is one of:"
print " list - List all registry entries"
print " gui - Show graphical user interface (linux only)"
print " add <id> <url> - Add API endpoint"
print " del <id> - Remove API endpoint"
print " cli <id> - Commandline interface"
print
sys.exit(1)
def print_list(r):
"""
Print a list entries in the given registry.
"""
all = r.getAll()
for entry in all.keys():
print "%-20s- %s" % (entry, all[entry]['entrypoint'])
print
def main():
"""
Main entry point for cli access.
"""
parser = OptionParser()
parser.add_option("-f", "--file", help="Registry file", action='store')
(options, args) = parser.parse_args()
registry = AnyMetaRegistry(options.file)
if len(args) < 1:
usage()
cmd = args[0]
args = args[1:]
print "Registry: %s" % registry.cfgfile
print
if cmd == "list":
print_list(registry)
print "OK"
elif cmd == "gui":
from anymeta.gtk import registry
d = registry.RegistryDialog()
d.run_as_main()
elif cmd == "cli":
try:
id, = tuple(args)
except Exception, e:
usage()
try:
r = registry.get(id)
except AnyMetaRegistryException, e:
print "No such entry"
exit()
welcomemsg = "Use the 'api' python variable to access %s (%s)" % (id, r['entrypoint'])
api = AnyMetaAPI.from_registry(id)
result = api.doMethod("anymeta.user.info", {})
welcomemsg += "\n\nLogged in as %s." % result['title']
try:
raise ImportError()
from twisted.conch import stdio
from twisted.internet import reactor, stdio as sio
from twisted.conch.insults.insults import ServerProtocol
class WelcomeManhole(stdio.ConsoleManhole):
def initializeScreen(self):
self.terminal.reset()
self.terminal.write("\n%s\n\n" % welcomemsg)
self.terminal.write(self.ps[self.pn])
self.setInsertMode()
import tty, termios, os
fd = sys.__stdin__.fileno()
oldSettings = termios.tcgetattr(fd)
tty.setraw(fd)
try:
p = stdio.ServerProtocol(WelcomeManhole, {'api': api})
sio.StandardIO(p)
reactor.run()
finally:
termios.tcsetattr(fd, termios.TCSANOW, oldSettings)
os.write(fd, "\r\x1bc\r")
except ImportError,e :
import os
print welcomemsg
os.system("python -i -c\"from anymeta.api import AnyMetaAPI; api = AnyMetaAPI.from_registry('%s');\"" % id)
print "Bye!"
print
elif cmd == "add":
try:
id, url = tuple(args)
except Exception, e:
usage()
if not re.match(r'^https?://', url):
url = "http://" + url
if url[-1:] != "/":
url += "/"
if not re.match(r'^.*services/rest/', url):
url += "services/rest/"
try:
r = registry.get(id)
print "Already registered"
print
print "'%s' is already linked, to %s" % (id, r['entrypoint'])
print
sys.exit(1)
except AnyMetaRegistryException, e:
pass
try:
registry.register_interactive(id, url)
except Exception, e:
print "Error registering: ", e
print
print "Please provide a valid AnyMeta endpoint as second argument."
print
sys.exit(1)
print "OK"
elif cmd == "del":
try:
(id,) = tuple(args)
except Exception, e:
usage()
try:
r = registry.get(id)
except AnyMetaRegistryException, e:
print "Unknown id: " + id
print
sys.exit(1)
registry.delete(id)
registry.save()
print "OK"
else:
usage()
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/api/cli.py
|
cli.py
|
import cgi
import urllib
import time
import random
import urlparse
import hmac
import binascii
VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
class OAuthError(RuntimeError):
"""Generic exception class."""
def __init__(self, message='OAuth error occured.'):
self.message = message
def build_authenticate_header(realm=''):
"""Optional WWW-Authenticate header (401 error)"""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def escape(s):
"""Escape a URL including any /."""
return urllib.quote(s, safe='~')
def _utf8_str(s):
"""Convert unicode to utf-8."""
if isinstance(s, unicode):
return s.encode("utf-8")
else:
return str(s)
def generate_timestamp():
"""Get seconds since epoch (UTC)."""
return int(time.time())
def generate_nonce(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
def generate_verifier(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
class OAuthConsumer(object):
"""Consumer of OAuth authentication.
OAuthConsumer is a data type that represents the identity of the Consumer
via its shared secret with the Service Provider.
"""
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
class OAuthToken(object):
"""OAuthToken is a data type that represents an End User via either an access
or request token.
key -- the token
secret -- the token secret
"""
key = None
secret = None
callback = None
callback_confirmed = None
verifier = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
def set_callback(self, callback):
self.callback = callback
self.callback_confirmed = 'true'
def set_verifier(self, verifier=None):
if verifier is not None:
self.verifier = verifier
else:
self.verifier = generate_verifier()
def get_callback_url(self):
if self.callback and self.verifier:
# Append the oauth_verifier.
parts = urlparse.urlparse(self.callback)
scheme, netloc, path, params, query, fragment = parts[:6]
if query:
query = '%s&oauth_verifier=%s' % (query, self.verifier)
else:
query = 'oauth_verifier=%s' % self.verifier
return urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
return self.callback
def to_string(self):
data = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
}
if self.callback_confirmed is not None:
data['oauth_callback_confirmed'] = self.callback_confirmed
return urllib.urlencode(data)
def from_string(s):
""" Returns a token from something like:
oauth_token_secret=xxx&oauth_token=xxx
"""
params = cgi.parse_qs(s, keep_blank_values=False)
key = params['oauth_token'][0]
secret = params['oauth_token_secret'][0]
token = OAuthToken(key, secret)
try:
token.callback_confirmed = params['oauth_callback_confirmed'][0]
except KeyError:
pass # 1.0, no callback confirmed.
return token
from_string = staticmethod(from_string)
def __str__(self):
return self.to_string()
class OAuthRequest(object):
"""OAuthRequest represents the request and can be serialized.
OAuth parameters:
- oauth_consumer_key
- oauth_token
- oauth_signature_method
- oauth_signature
- oauth_timestamp
- oauth_nonce
- oauth_version
- oauth_verifier
... any additional parameters, as defined by the Service Provider.
"""
parameters = None # OAuth parameters.
http_method = HTTP_METHOD
http_url = None
version = VERSION
def __init__(self, http_method=HTTP_METHOD, http_url=None, parameters=None):
self.http_method = http_method
self.http_url = http_url
self.parameters = parameters or {}
def set_parameter(self, parameter, value):
self.parameters[parameter] = value
def get_parameter(self, parameter):
try:
return self.parameters[parameter]
except:
raise OAuthError('Parameter not found: %s' % parameter)
def _get_timestamp_nonce(self):
return self.get_parameter('oauth_timestamp'), self.get_parameter(
'oauth_nonce')
def get_nonoauth_parameters(self):
"""Get any non-OAuth parameters."""
parameters = {}
for k, v in self.parameters.iteritems():
# Ignore oauth parameters.
if k.find('oauth_') < 0:
parameters[k] = v
return parameters
def to_header(self, realm=''):
"""Serialize as a header for an HTTPAuth request."""
auth_header = 'OAuth realm="%s"' % realm
# Add the oauth parameters.
if self.parameters:
for k, v in self.parameters.iteritems():
if k[:6] == 'oauth_':
auth_header += ', %s="%s"' % (k, escape(str(v)))
return {'Authorization': auth_header}
def to_postdata(self):
"""Serialize as post data for a POST request."""
return '&'.join(['%s=%s' % (escape(str(k)), escape(str(v))) \
for k, v in self.parameters.iteritems()])
def to_url(self):
"""Serialize as a URL for a GET request."""
return '%s?%s' % (self.get_normalized_http_url(), self.to_postdata())
def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
params = self.parameters
try:
# Exclude the signature if it exists.
del params['oauth_signature']
except:
pass
# Escape key values before sorting.
key_values = [(escape(_utf8_str(k)), escape(_utf8_str(v))) \
for k,v in params.items()]
# Sort lexicographically, first after key, then after value.
key_values.sort()
# Combine key value pairs into a string.
return '&'.join(['%s=%s' % (k, v) for k, v in key_values])
def get_normalized_http_method(self):
"""Uppercases the http method."""
return self.http_method.upper()
def get_normalized_http_url(self):
"""Parses the URL and rebuilds it to be scheme://host/path."""
parts = urlparse.urlparse(self.http_url)
scheme, netloc, path = parts[:3]
# Exclude default port numbers.
if scheme == 'http' and netloc[-3:] == ':80':
netloc = netloc[:-3]
elif scheme == 'https' and netloc[-4:] == ':443':
netloc = netloc[:-4]
return '%s://%s%s' % (scheme, netloc, path)
def sign_request(self, signature_method, consumer, token):
"""Set the signature parameter to the result of build_signature."""
# Set the signature method.
self.set_parameter('oauth_signature_method',
signature_method.get_name())
# Set the signature.
self.set_parameter('oauth_signature',
self.build_signature(signature_method, consumer, token))
def build_signature(self, signature_method, consumer, token):
"""Calls the build signature method within the signature method."""
return signature_method.build_signature(self, consumer, token)
def from_request(http_method, http_url, headers=None, parameters=None,
query_string=None):
"""Combines multiple parameter sources."""
if parameters is None:
parameters = {}
# Headers
if headers and 'Authorization' in headers:
auth_header = headers['Authorization']
# Check that the authorization header is OAuth.
if auth_header[:6] == 'OAuth ':
auth_header = auth_header[6:]
try:
# Get the parameters from the header.
header_params = OAuthRequest._split_header(auth_header)
parameters.update(header_params)
except:
raise OAuthError('Unable to parse OAuth parameters from '
'Authorization header.')
# GET or POST query string.
if query_string:
query_params = OAuthRequest._split_url_string(query_string)
parameters.update(query_params)
# URL parameters.
param_str = urlparse.urlparse(http_url)[4] # query
url_params = OAuthRequest._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return OAuthRequest(http_method, http_url, parameters)
return None
from_request = staticmethod(from_request)
def from_consumer_and_token(oauth_consumer, token=None,
callback=None, verifier=None, http_method=HTTP_METHOD,
http_url=None, parameters=None):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': oauth_consumer.key,
'oauth_timestamp': generate_timestamp(),
'oauth_nonce': generate_nonce(),
'oauth_version': OAuthRequest.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
if token.callback:
parameters['oauth_callback'] = token.callback
# 1.0a support for verifier.
if verifier:
parameters['oauth_verifier'] = verifier
elif callback:
# 1.0a support for callback in the request token request.
parameters['oauth_callback'] = callback
return OAuthRequest(http_method, http_url, parameters)
from_consumer_and_token = staticmethod(from_consumer_and_token)
def from_token_and_callback(token, callback=None, http_method=HTTP_METHOD,
http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = callback
return OAuthRequest(http_method, http_url, parameters)
from_token_and_callback = staticmethod(from_token_and_callback)
def _split_header(header):
"""Turn Authorization: header into parameters."""
params = {}
parts = header.split(',')
for param in parts:
# Ignore realm parameter.
if param.find('realm') > -1:
continue
# Remove whitespace.
param = param.strip()
# Split key-value.
param_parts = param.split('=', 1)
# Remove quotes and unescape the value.
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
_split_header = staticmethod(_split_header)
def _split_url_string(param_str):
"""Turn URL string into parameters."""
parameters = cgi.parse_qs(param_str, keep_blank_values=False)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
_split_url_string = staticmethod(_split_url_string)
class OAuthServer(object):
"""A worker to check the validity of a request against a data store."""
timestamp_threshold = 300 # In seconds, five minutes.
version = VERSION
signature_methods = None
data_store = None
def __init__(self, data_store=None, signature_methods=None):
self.data_store = data_store
self.signature_methods = signature_methods or {}
def set_data_store(self, data_store):
self.data_store = data_store
def get_data_store(self):
return self.data_store
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.get_name()] = signature_method
return self.signature_methods
def fetch_request_token(self, oauth_request):
"""Processes a request_token request and returns the
request token on success.
"""
try:
# Get the request token for authorization.
token = self._get_token(oauth_request, 'request')
except OAuthError:
# No token required for the initial token request.
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
try:
callback = self.get_callback(oauth_request)
except OAuthError:
callback = None # 1.0, no callback specified.
self._check_signature(oauth_request, consumer, None)
# Fetch a new token.
token = self.data_store.fetch_request_token(consumer, callback)
return token
def fetch_access_token(self, oauth_request):
"""Processes an access_token request and returns the
access token on success.
"""
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
try:
verifier = self._get_verifier(oauth_request)
except OAuthError:
verifier = None
# Get the request token.
token = self._get_token(oauth_request, 'request')
self._check_signature(oauth_request, consumer, token)
new_token = self.data_store.fetch_access_token(consumer, token, verifier)
return new_token
def verify_request(self, oauth_request):
"""Verifies an api call and checks all the parameters."""
# -> consumer and token
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
# Get the access token.
token = self._get_token(oauth_request, 'access')
self._check_signature(oauth_request, consumer, token)
parameters = oauth_request.get_nonoauth_parameters()
return consumer, token, parameters
def authorize_token(self, token, user):
"""Authorize a request token."""
return self.data_store.authorize_request_token(token, user)
def get_callback(self, oauth_request):
"""Get the callback URL."""
return oauth_request.get_parameter('oauth_callback')
def build_authenticate_header(self, realm=''):
"""Optional support for the authenticate header."""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def _get_version(self, oauth_request):
"""Verify the correct version request for this server."""
try:
version = oauth_request.get_parameter('oauth_version')
except:
version = VERSION
if version and version != self.version:
raise OAuthError('OAuth version %s not supported.' % str(version))
return version
def _get_signature_method(self, oauth_request):
"""Figure out the signature with some defaults."""
try:
signature_method = oauth_request.get_parameter(
'oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# Get the signature method object.
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise OAuthError('Signature method %s not supported try one of the '
'following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_consumer(self, oauth_request):
consumer_key = oauth_request.get_parameter('oauth_consumer_key')
consumer = self.data_store.lookup_consumer(consumer_key)
if not consumer:
raise OAuthError('Invalid consumer.')
return consumer
def _get_token(self, oauth_request, token_type='access'):
"""Try to find the token for the provided request token key."""
token_field = oauth_request.get_parameter('oauth_token')
token = self.data_store.lookup_token(token_type, token_field)
if not token:
raise OAuthError('Invalid %s token: %s' % (token_type, token_field))
return token
def _get_verifier(self, oauth_request):
return oauth_request.get_parameter('oauth_verifier')
def _check_signature(self, oauth_request, consumer, token):
timestamp, nonce = oauth_request._get_timestamp_nonce()
self._check_timestamp(timestamp)
self._check_nonce(consumer, token, nonce)
signature_method = self._get_signature_method(oauth_request)
try:
signature = oauth_request.get_parameter('oauth_signature')
except:
raise OAuthError('Missing signature.')
# Validate the signature.
valid_sig = signature_method.check_signature(oauth_request, consumer,
token, signature)
if not valid_sig:
key, base = signature_method.build_signature_base_string(
oauth_request, consumer, token)
raise OAuthError('Invalid signature. Expected signature base '
'string: %s' % base)
built = signature_method.build_signature(oauth_request, consumer, token)
def _check_timestamp(self, timestamp):
"""Verify that timestamp is recentish."""
timestamp = int(timestamp)
now = int(time.time())
lapsed = abs(now - timestamp)
if lapsed > self.timestamp_threshold:
raise OAuthError('Expired timestamp: given %d and now %s has a '
'greater difference than threshold %d' %
(timestamp, now, self.timestamp_threshold))
def _check_nonce(self, consumer, token, nonce):
"""Verify that the nonce is uniqueish."""
nonce = self.data_store.lookup_nonce(consumer, token, nonce)
if nonce:
raise OAuthError('Nonce already used: %s' % str(nonce))
class OAuthClient(object):
"""OAuthClient is a worker to attempt to execute a request."""
consumer = None
token = None
def __init__(self, oauth_consumer, oauth_token):
self.consumer = oauth_consumer
self.token = oauth_token
def get_consumer(self):
return self.consumer
def get_token(self):
return self.token
def fetch_request_token(self, oauth_request):
"""-> OAuthToken."""
raise NotImplementedError
def fetch_access_token(self, oauth_request):
"""-> OAuthToken."""
raise NotImplementedError
def access_resource(self, oauth_request):
"""-> Some protected resource."""
raise NotImplementedError
class OAuthDataStore(object):
"""A database abstraction used to lookup consumers and tokens."""
def lookup_consumer(self, key):
"""-> OAuthConsumer."""
raise NotImplementedError
def lookup_token(self, oauth_consumer, token_type, token_token):
"""-> OAuthToken."""
raise NotImplementedError
def lookup_nonce(self, oauth_consumer, oauth_token, nonce):
"""-> OAuthToken."""
raise NotImplementedError
def fetch_request_token(self, oauth_consumer, oauth_callback):
"""-> OAuthToken."""
raise NotImplementedError
def fetch_access_token(self, oauth_consumer, oauth_token, oauth_verifier):
"""-> OAuthToken."""
raise NotImplementedError
def authorize_request_token(self, oauth_token, user):
"""-> OAuthToken."""
raise NotImplementedError
class OAuthSignatureMethod(object):
"""A strategy class that implements a signature method."""
def get_name(self):
"""-> str."""
raise NotImplementedError
def build_signature_base_string(self, oauth_request, oauth_consumer, oauth_token):
"""-> str key, str raw."""
raise NotImplementedError
def build_signature(self, oauth_request, oauth_consumer, oauth_token):
"""-> str."""
raise NotImplementedError
def check_signature(self, oauth_request, consumer, token, signature):
built = self.build_signature(oauth_request, consumer, token)
return built == signature
class OAuthSignatureMethod_HMAC_SHA1(OAuthSignatureMethod):
def get_name(self):
return 'HMAC-SHA1'
def build_signature_base_string(self, oauth_request, consumer, token):
sig = (
escape(oauth_request.get_normalized_http_method()),
escape(oauth_request.get_normalized_http_url()),
escape(oauth_request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def build_signature(self, oauth_request, consumer, token):
"""Builds the base signature string."""
key, raw = self.build_signature_base_string(oauth_request, consumer,
token)
# HMAC object.
try:
import hashlib # 2.5
hashed = hmac.new(key, raw, hashlib.sha1)
except:
import sha # Deprecated
hashed = hmac.new(key, raw, sha)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
class OAuthSignatureMethod_PLAINTEXT(OAuthSignatureMethod):
def get_name(self):
return 'PLAINTEXT'
def build_signature_base_string(self, oauth_request, consumer, token):
"""Concatenates the consumer key and secret."""
sig = '%s&' % escape(consumer.secret)
if token:
sig = sig + escape(token.secret)
return sig, sig
def build_signature(self, oauth_request, consumer, token):
key, raw = self.build_signature_base_string(oauth_request, consumer,
token)
return key
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/api/oauth/oauth.py
|
oauth.py
|
import cgi
import simplejson
from time import gmtime, strftime
from twisted.internet import defer
from twisted.python import log
from twisted.web2 import http, http_headers, resource, responsecode
from twisted.web2.stream import readStream
from twisted.words.protocols.jabber.error import StanzaError
from twisted.words.protocols.jabber.jid import internJID as JID
from twisted.words.xish import domish
from wokkel import disco, pubsub
from wokkel.generic import parseXml
from anymeta.api.base import AnyMetaException
NS_ATOM = 'http://www.w3.org/2005/Atom'
MIME_ATOM_ENTRY = http_headers.MimeType('application',
'atom+xml',
{'type': 'entry',
'charset': 'utf-8'})
MIME_JSON = http_headers.MimeType('application', 'json')
class Error(Exception):
pass
class BadRequestError(Error):
pass
class MissingMediaTypeError(Error):
pass
class UnsupportedMediaTypeError(Error):
pass
class ResourceNotLocal(Error):
pass
class NodeNotFound(Error):
"""
Node not found.
"""
class NotSubscribed(Error):
"""
Entity is not subscribed to this node.
"""
class SubscriptionExists(Error):
"""
There already exists a subscription to this node.
"""
class Forbidden(Error):
pass
class XMPPURIParseError(ValueError):
"""
Raised when a given XMPP URI couldn't be properly parsed.
"""
_excToHTTPStatusMap = {
NodeNotFound:
(responsecode.FORBIDDEN, "Node not found"),
NotSubscribed:
(responsecode.FORBIDDEN, "No such subscription found"),
SubscriptionExists:
(responsecode.FORBIDDEN, "Subscription already exists"),
BadRequestError:
(responsecode.BAD_REQUEST, "Bad request"),
MissingMediaTypeError:
(responsecode.BAD_REQUEST, "Media type not specified"),
UnsupportedMediaTypeError:
(responsecode.UNSUPPORTED_MEDIA_TYPE, "Unsupported media type"),
XMPPURIParseError:
(responsecode.BAD_REQUEST, "Malformed XMPP URI"),
}
def excToHTTPStatus(failure):
"""
Convert an exception to an appropriate HTTP status response.
"""
e = failure.trap(*_excToHTTPStatusMap.keys())
code, description = _excToHTTPStatusMap[e]
msg = str(failure.value)
if msg:
description = "%s: %s" % (description, msg)
return http.StatusResponse(code, description)
def getServiceAndNode(uri):
"""
Given an XMPP URI, extract the publish subscribe service JID and node ID.
"""
try:
scheme, rest = uri.split(':', 1)
except ValueError:
raise XMPPURIParseError("No URI scheme component")
if scheme != 'xmpp':
raise XMPPURIParseError("Unknown URI scheme")
if rest.startswith("//"):
raise XMPPURIParseError("Unexpected URI authority component")
try:
entity, query = rest.split('?', 1)
except ValueError:
raise XMPPURIParseError("No URI query component")
if not entity:
raise XMPPURIParseError("Empty URI path component")
try:
service = JID(entity)
except Exception, e:
raise XMPPURIParseError("Invalid JID: %s" % e)
params = cgi.parse_qs(query)
try:
nodeIdentifier = params['node'][0]
except (KeyError, ValueError):
nodeIdentifier = ''
return service, nodeIdentifier
def getXMPPURI(service, nodeIdentifier):
"""
Construct an XMPP URI from a service JID and node identifier.
"""
return "xmpp:%s?;node=%s" % (service.full(), nodeIdentifier or '')
def extractAtomEntries(items):
"""
Extract atom entries from a list of publish-subscribe items.
@param items: List of L{domish.Element}s that represent publish-subscribe
items.
@type items: C{list}
"""
atomEntries = []
for item in items:
# ignore non-items (i.e. retractions)
if item.name != 'item':
continue
atomEntry = None
for element in item.elements():
# extract the first element that is an atom entry
if element.uri == NS_ATOM and element.name == 'entry':
atomEntry = element
break
if atomEntry:
atomEntries.append(atomEntry)
return atomEntries
def constructFeed(service, nodeIdentifier, entries, title):
nodeURI = getXMPPURI(service, nodeIdentifier)
now = strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
# Collect the received entries in a feed
feed = domish.Element((NS_ATOM, 'feed'))
feed.addElement('title', content=title)
feed.addElement('id', content=nodeURI)
feed.addElement('updated', content=now)
for entry in entries:
feed.addChild(entry)
return feed
class WebStreamParser(object):
def __init__(self):
self.elementStream = domish.elementStream()
self.elementStream.DocumentStartEvent = self.docStart
self.elementStream.ElementEvent = self.elem
self.elementStream.DocumentEndEvent = self.docEnd
self.done = False
def docStart(self, elem):
self.document = elem
def elem(self, elem):
self.document.addChild(elem)
def docEnd(self):
self.done = True
def parse(self, stream):
def endOfStream(result):
if not self.done:
raise Exception("No more stuff?")
else:
return self.document
d = readStream(stream, self.elementStream.parse)
d.addCallback(endOfStream)
return d
class AnyMetaPubSubAPI(object):
"""
Abstraction of the services provided by the anyMeta PubSub module.
@ivar apis: Mapping of domains to L{AnyMetaAPI} instances.
@type apis: C{dict}
"""
_errorMap = {
'pubsub.subscribers.subscribe': {
4: NodeNotFound,
5: ResourceNotLocal,
},
'pubsub.subscribers.unsubscribe': {
4: NodeNotFound,
5: NotSubscribed,
},
}
def __init__(self, apis):
self.apis = apis
def _mapErrors(self, failure, method):
failure.trap(AnyMetaException)
e = failure.value
try:
raise self._errorMap[method][e.code](str(e))
except KeyError:
if e.code == 99:
raise Forbidden(str(e))
else:
raise Error(str(e))
def getSubscribers(self, service, nodeIdentifier):
"""
Get the subscribers to a node.
@param service: JID of the service the node is located at.
@type service: L{jid.JID}
@param nodeIdentifier: Identifier of the node.
@type: C{unicode}
@return: Deferred that fires with a C{list} of L{jid.JID}s.
@rtype: L{defer.Deferred}
"""
def cb(result):
for sub in result:
if sub['status'] == 'subscribed':
yield JID('%s/%s' % (sub['jid_entity'],
sub['jid_resource']))
method = 'pubsub.subscribers.bynode'
d = self.apis[service.host].doMethod(method, {'node': nodeIdentifier})
d.addCallback(cb)
d.addErrback(self._mapErrors, method)
return d
def getSubscriptions(self, service, entity):
"""
Get the subscriptions for an entity.
@param service: JID of the service the nodes are located at.
@type service: L{jid.JID}
@param entity: The entity to request the subscriptions for.
@type entity: L{jid.JID}
@return: Deferred that fires with a generator yielding
L{pubsub.Subscription}s.
@rtype: L{defer.Deferred}
"""
def cb(result):
for sub in result:
subscriber = JID('%s/%s' % (entity,
sub['jid_resource']))
yield pubsub.Subscription(sub['node'],
subscriber,
sub['status'])
method = 'pubsub.subscribers.bysubscriber'
d = self.apis[service.host].doMethod(method,
{'jid_entity': entity.full()})
d.addCallback(cb)
d.addErrback(self._mapErrors, method)
return d
def subscribe(self, service, nodeIdentifier, subscriber):
"""
Subscribe entity to a node.
@return: A deferred that fires with the subscription state as
L{pubsub.Subscription}.
@rtype: L{defer.Deferred}
"""
def cb(result):
subscription = pubsub.Subscription(nodeIdentifier,
subscriber,
result['status'])
subscription.new = result.get('created', False)
return subscription
method = 'pubsub.subscribers.subscribe'
d = self.apis[service.host].doMethod(method,
{'node': nodeIdentifier,
'jid_entity': subscriber.userhost(),
'jid_resource': subscriber.resource or ''})
d.addCallback(cb)
d.addErrback(self._mapErrors, method)
return d
def unsubscribe(self, service, nodeIdentifier, subscriber):
"""
Unsubscribe an entity from a node.
@return: A deferred that fires when unsubscription is complete.
@rtype: L{defer.Deferred}
"""
method = 'pubsub.subscribers.unsubscribe'
d = self.apis[service.host].doMethod(method,
{'node': nodeIdentifier,
'jid_entity': subscriber.userhost(),
'jid_resource': subscriber.resource or ''})
d.addErrback(self._mapErrors, method)
return d
def items(self, service, nodeIdentifier):
"""
Retrieve items published to a node.
"""
def cb(data):
payload = parseXml(data)
return [pubsub.Item('current', payload)]
method = 'pubsub.items'
d = self.apis[service.host].doMethod(method,
{'node': nodeIdentifier},
format='atom')
d.addCallback(cb)
d.addErrback(self._mapErrors, method)
return d
def notify(self, recipient, service, nodeIdentifier,
payload, contentType, headers):
method = 'pubsub.notify'
nodeURI = getXMPPURI(service, nodeIdentifier)
requestHeaders = {'Referer': nodeURI.encode('utf-8'),
'Content-Type': "%s;charset=utf-8" % contentType}
if 'Collection' in headers:
requestHeaders['Collection'] = ','.join(headers['Collection'])
postdata = payload.toXml().encode('utf-8')
d = self.apis[recipient.host].doMethod(method,
{'uri': nodeURI},
headers=requestHeaders,
data=postdata)
d.addErrback(self._mapErrors, method)
d.addErrback(log.err)
return d
def delete(self, recipient, service, nodeIdentifier, redirectURI):
print "In delete"
print recipient, service, nodeIdentifier, redirectURI
method = 'pubsub.notify'
nodeURI = getXMPPURI(service, nodeIdentifier)
requestHeaders = {'Referer': nodeURI.encode('utf-8'),
'Event': 'DELETED'}
if redirectURI:
requestHeaders['Link'] = \
'<%s>; rel=alternate' % redirectURI.encode('utf-8')
d = self.apis[recipient.host].doMethod(method,
{'uri': nodeURI},
headers=requestHeaders,
data=None)
d.addErrback(self._mapErrors, method)
d.addErrback(log.err)
return d
class AnyMetaBackend(pubsub.PubSubResource):
"""
Publish-subscribe backend to anyMeta.
@ivar api: The anyMeta PubSub API.
@type api: L{AnyMetaPubSubAPI}.
"""
features = ["persistent-items",
"retrieve_items",
"retrieve_subscriptions",
"subscribe",
]
discoIdentity = disco.DiscoIdentity('pubsub', 'service',
'anyMeta publish-subscribe service')
pubsubService = None
_errorMap = {
NodeNotFound: ('item-not-found', None, None),
ResourceNotLocal: ('feature-not-implemented',
'unsupported',
'subscribe'),
NotSubscribed: ('unexpected-request', 'not-subscribed', None),
}
def __init__(self, api):
self.api = api
def _mapErrors(self, failure):
e = failure.trap(*self._errorMap.keys())
condition, pubsubCondition, feature = self._errorMap[e]
try:
msg = failure.value.msg
except:
msg = None
if pubsubCondition:
exc = pubsub.PubSubError(condition, pubsubCondition, feature, msg)
else:
exc = StanzaError(condition, text=msg)
raise exc
def subscribe(self, request):
"""
Request the subscription of an entity to a pubsub node.
@return: A deferred that fires with the subscription state as
L{pubsub.Subscription}.
@rtype: L{defer.Deferred}
"""
def notify(items, subscription):
if not items:
return
notifications = [(request.subscriber, [subscription], items)]
self.pubsubService.notifyPublish(request.recipient,
request.nodeIdentifier,
notifications)
def checkNewSubscription(subscription):
if (subscription.state == 'subscribed' and subscription.new):
d = self.api.items(request.recipient, request.nodeIdentifier)
d.addCallback(notify, subscription)
d.addErrback(log.err)
return subscription
if request.subscriber.userhostJID() != request.sender.userhostJID():
return defer.fail(Forbidden())
d = self.api.subscribe(request.recipient, request.nodeIdentifier,
request.subscriber)
d.addCallback(checkNewSubscription)
d.addErrback(self._mapErrors)
return d
def unsubscribe(self, request):
"""
Cancel the subscription of an entity to a pubsub node.
@return: A deferred that fires when unsubscription is complete.
@rtype: L{defer.Deferred}
"""
if request.subscriber.userhostJID() != request.sender.userhostJID():
return defer.fail(Forbidden())
d = self.api.unsubscribe(request.recipient, request.nodeIdentifier,
request.subscriber)
d.addErrback(self._mapErrors)
return d
def subscriptions(self, request):
"""
Get current subscriptions for an entity.
@return: Deferred that fires with a generator yielding
L{pubsub.Subscription}s.
@rtype: L{defer.Deferred}
"""
entity = request.sender.userhostJID()
d = self.api.getSubscriptions(request.recipient, entity)
d.addErrback(self._mapErrors)
return d
def items(self, request):
"""
Called upon an items request by a remote entity.
"""
d = self.api.items(request.recipient, request.nodeIdentifier)
d.addErrback(self._mapErrors)
return d
def notifyPublish(self, service, nodeIdentifier, subscriptions, items):
"""
Send out notifications for items published to a node.
@param service: JID of the service the node is located at.
@type service: L{jid.JID}
@param nodeIdentifier: Identifier of the node.
@type: C{unicode}
@rtype: L{defer.Deferred}
"""
def createNotifications():
for subscription in subscriptions:
yield (subscription.subscriber,
[subscription],
items)
self.pubsubService.notifyPublish(service,
nodeIdentifier,
createNotifications())
def notifyDelete(self, service, nodeIdentifier, subscribers,
redirectURI=None):
"""
Send out notifications for items published to a node.
@param service: JID of the service the node is located at.
@type service: L{jid.JID}
@param nodeIdentifier: Identifier of the node.
@type: C{unicode}
@rtype: L{defer.Deferred}
"""
self.pubsubService.notifyDelete(service,
nodeIdentifier,
subscribers,
redirectURI)
def checkMime(request, mimeType):
"""
Check the MIME type of the request.
"""
ctype = request.headers.getHeader('content-type')
if not ctype:
raise MissingMediaTypeError()
if ctype != mimeType:
ctypeString = http_headers.generateContentType(ctype)
raise UnsupportedMediaTypeError(ctypeString)
return request
def loadJSONFromStream(request):
"""
Load a JSON object from the stream of a web request.
"""
content = []
def loadJSON(content):
try:
return simplejson.loads(content)
except Exception:
log.err()
raise BadRequestError("Passed document is not proper JSON")
d = readStream(request.stream, content.append)
d.addCallback(lambda _: ''.join((str(item) for item in content)))
d.addCallback(loadJSON)
return d
class NotifyPublishResource(resource.Resource):
"""
A resource to publish to a publish-subscribe node.
"""
def __init__(self, backend):
self.backend = backend
http_GET = None
def http_POST(self, request):
"""
Respond to a POST request to create a new item.
"""
def subscriptionsFromDict(subscriptions, nodeIdentifier):
for subscription in subscriptions:
subscriber = JID("%s/%s" % (subscription['jid_entity'],
subscription['jid_resource']))
if 'node' in subscription:
node = subscription['node']
else:
node = nodeIdentifier
yield pubsub.Subscription(node,
subscriber,
subscription['status'])
def createPubSubItems(items):
for item in items:
try:
content = item['payload']
except KeyError:
payload = None
else:
payload = parseXml(content.encode('utf-8'))
itemIdentifier = item.get('id', 'current')
yield pubsub.Item(itemIdentifier, payload)
def doNotify(params):
try:
uri = params['uri']
except KeyError:
raise BadRequestError("Missing 'uri' parameter")
else:
service, nodeIdentifier = getServiceAndNode(uri)
try:
subscriptions = params['subscriptions']
except KeyError:
raise BadRequestError("Missing 'subscriptions' parameter")
else:
subscriptions = subscriptionsFromDict(subscriptions,
nodeIdentifier)
try:
items = params['items']
except KeyError:
raise BadRequestError("Missing 'items' parameter")
else:
items = list(createPubSubItems(items))
return self.backend.notifyPublish(service, nodeIdentifier,
subscriptions, items)
d = defer.succeed(request)
d.addCallback(checkMime, MIME_JSON)
d.addCallback(loadJSONFromStream)
d.addCallback(doNotify)
d.addCallback(lambda _: http.Response(responsecode.NO_CONTENT))
d.addErrback(excToHTTPStatus)
return d
class NotifyDeleteResource(resource.Resource):
"""
A resource to publish to a publish-subscribe node.
"""
def __init__(self, backend):
self.backend = backend
http_GET = None
def http_POST(self, request):
"""
Respond to a POST request to create a new item.
"""
def doNotify(params):
try:
uri = params['uri']
except KeyError:
raise BadRequestError("Missing 'uri' parameter")
else:
service, nodeIdentifier = getServiceAndNode(uri)
try:
subscriptions = params['subscriptions']
except KeyError:
raise BadRequestError("Missing 'subscriptions' parameter")
else:
subscriptions = (JID("%s/%s" % (subscription['jid_entity'],
subscription['jid_resource']))
for subscription in subscriptions)
redirectURI = params.get('redirect_uri', None)
return self.backend.notifyDelete(service, nodeIdentifier,
subscriptions, redirectURI)
d = defer.succeed(request)
d.addCallback(checkMime, MIME_JSON)
d.addCallback(loadJSONFromStream)
d.addCallback(doNotify)
d.addCallback(lambda _: http.Response(responsecode.NO_CONTENT))
d.addErrback(excToHTTPStatus)
return d
class AnyMetaPubSubClient(pubsub.PubSubClient):
"""
Publish-subscribe client for anyMeta.
Notifications are POSTed to with the received items in notifications.
"""
def __init__(self, api):
self.api = api
def itemsReceived(self, event):
"""
Fire up HTTP client to do callback
"""
atomEntries = extractAtomEntries(event.items)
# Don't notify if there are no atom entries
if not atomEntries:
return
if len(atomEntries) == 1:
contentType = 'application/atom+xml;type=entry'
payload = atomEntries[0]
else:
contentType = 'application/atom+xml;type=feed'
payload = constructFeed(event.sender,
event.nodeIdentifier,
atomEntries,
title='Received item collection')
self.api.notify(event.recipient, event.sender, event.nodeIdentifier,
payload, contentType, event.headers)
def deleteReceived(self, event):
"""
Fire up HTTP client to do callback
"""
print "deleteReceived"
print event.__dict__
self.api.delete(event.recipient, event.sender, event.nodeIdentifier,
event.redirectURI)
class RemoteSubscribeBaseResource(resource.Resource):
"""
Base resource for remote pubsub node subscription and unsubscription.
This resource accepts POST request with a JSON document that holds a
dictionary with the key C{uri} the XMPP URI of the publish-subscribe node.
This class should be inherited with L{backendMethod} overridden.
@cvar backendMethod: The name of the method to be called with
the JID of the pubsub service, the node identifier
and the callback URI as received in the HTTP POST
request to this resource.
"""
clientMethod = None
def __init__(self, client):
self.client = client
http_GET = None
def http_POST(self, request):
def gotRequest(params):
subscriber = JID(params['subscriber'])
uri = params['uri']
service, nodeIdentifier = getServiceAndNode(uri)
method = getattr(self.client, self.clientMethod)
d = method(service, nodeIdentifier, subscriber, sender=subscriber)
return d
d = defer.succeed(request)
d.addCallback(checkMime, MIME_JSON)
d.addCallback(loadJSONFromStream)
d.addCallback(gotRequest)
d.addCallback(lambda _: http.Response(responsecode.NO_CONTENT))
d.addErrback(excToHTTPStatus)
return d
class RemoteSubscribeResource(RemoteSubscribeBaseResource):
"""
Resource to subscribe to a remote publish-subscribe node.
"""
clientMethod = 'subscribe'
class RemoteUnsubscribeResource(RemoteSubscribeBaseResource):
"""
Resource to unsubscribe from a remote publish-subscribe node.
"""
clientMethod = 'unsubscribe'
class RemoteItemsResource(resource.Resource):
"""
Resource for retrieving items from a remote pubsub node.
"""
def __init__(self, client):
self.client = client
def render(self, request):
try:
maxItems = int(request.args.get('max_items', [0])[0]) or None
except ValueError:
return http.StatusResponse(responsecode.BAD_REQUEST,
"The argument max_items has an invalid value.")
try:
uri = request.args['uri'][0]
except KeyError:
return http.StatusResponse(responsecode.BAD_REQUEST,
"No URI for the remote node provided.")
try:
subscriber = JID(request.args['subscriber'][0])
except KeyError:
return http.StatusResponse(responsecode.BAD_REQUEST,
"No URI for the remote node provided.")
try:
service, nodeIdentifier = getServiceAndNode(uri)
except XMPPURIParseError:
return http.StatusResponse(responsecode.BAD_REQUEST,
"Malformed XMPP URI: %s" % uri)
def respond(items):
"""Create a feed out the retrieved items."""
contentType = http_headers.MimeType('application',
'atom+xml',
{'type': 'feed'})
atomEntries = extractAtomEntries(items)
feed = constructFeed(service, nodeIdentifier, atomEntries,
"Retrieved item collection")
payload = feed.toXml().encode('utf-8')
return http.Response(responsecode.OK, stream=payload,
headers={'Content-Type': contentType})
def trapNotFound(failure):
failure.trap(StanzaError)
if not failure.value.condition == 'item-not-found':
raise failure
return http.StatusResponse(responsecode.NOT_FOUND,
"Node not found")
d = self.client.items(service, nodeIdentifier, maxItems,
sender=subscriber)
d.addCallback(respond)
d.addErrback(trapNotFound)
return d
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/xmpp/service.py
|
service.py
|
from ConfigParser import ConfigParser
from twisted.application import service, strports
from twisted.python import usage
from twisted.web2 import channel, log, resource, server
from twisted.web2.tap import Web2Service
from wokkel.component import InternalComponent, Router
from wokkel.disco import DiscoHandler
from wokkel.generic import FallbackHandler, VersionHandler
from wokkel.pubsub import PubSubService
from wokkel.server import ServerService, XMPPS2SServerFactory
from anymeta import manhole
from anymeta.api.base import AnyMetaAPI
from anymeta.xmpp import service as anyservice
__version__ = '0.0.1'
class Options(usage.Options):
optParameters = [
('port', None, '5269', 'Server-to-server port'),
('config', 'c', '~/.anyxmpp.conf', 'Configuration file'),
('webport', None, '8088', 'Web port'),
('manhole-port', None, 'tcp:2224:interface=127.0.0.1',
'Manhole port'),
]
optFlags = [
('verbose', 'v', 'Show traffic'),
]
def postOptions(self):
import os
parser = ConfigParser()
cfgfile = os.path.expanduser(self['config'])
if not os.path.exists(cfgfile):
raise Exception("Missing configuration file: %s " % cfgfile)
parser.read(cfgfile)
self['config'] = parser
def makeService(config):
s = service.MultiService()
# Set up anyMeta service
apis = {}
domains = set()
for domain in config['config'].sections():
domains.add(domain)
entry = config['config'].get(domain, 'api_key')
engine = 'twisted'
apis[domain] = AnyMetaAPI.from_registry(entry, engine=engine)
api = anyservice.AnyMetaPubSubAPI(apis)
# Set up XMPP server
router = Router()
serverService = ServerService(router)
if config["verbose"]:
serverService.logTraffic = True
s2sFactory = XMPPS2SServerFactory(serverService)
if config["verbose"]:
s2sFactory.logTraffic = True
s2sService = strports.service(config['port'], s2sFactory)
s2sService.setServiceParent(s)
# Set up XMPP server-side component
cs = InternalComponent(router)
cs.setName('component')
cs.setServiceParent(s)
if config["verbose"]:
cs.logTraffic = True
FallbackHandler().setHandlerParent(cs)
VersionHandler('anyXMPP', __version__).setHandlerParent(cs)
DiscoHandler().setHandlerParent(cs)
# Set up domains
for domain in domains:
serverService.domains.add(domain)
cs.domains.add(domain)
# Hook up XMPP Publish-subscribe service adaptor to the backend
bs = anyservice.AnyMetaBackend(api)
ps = PubSubService(bs)
ps.setHandlerParent(cs)
bs.pubsubService = ps
# Hook up XMPP Publish-subscribe client adaptor to the backend
pc = anyservice.AnyMetaPubSubClient(api)
pc.setHandlerParent(cs)
# Set up web service
root = resource.Resource()
# Set up resources that exposes the backend
root.child_notify = anyservice.NotifyPublishResource(bs)
root.child_delete = anyservice.NotifyDeleteResource(bs)
# Set up resources for accessing remote pubsub nodes.
root.child_subscribe = anyservice.RemoteSubscribeResource(pc)
root.child_unsubscribe = anyservice.RemoteUnsubscribeResource(pc)
root.child_items = anyservice.RemoteItemsResource(pc)
if config["verbose"]:
root = log.LogWrapperResource(root)
site = server.Site(root)
w = strports.service(config['webport'], channel.HTTPFactory(site))
if config["verbose"]:
logObserver = log.DefaultCommonAccessLoggingObserver()
w2s = Web2Service(logObserver)
w.setServiceParent(w2s)
w = w2s
w.setServiceParent(s)
# Set up a manhole
namespace = {'service': s,
'component': cs,
'backend': bs,
'site': site,
'webService': w,
'root': root}
manholeFactory = manhole.getFactory(namespace, admin='admin')
manholeService = strports.service(config['manhole-port'], manholeFactory)
manholeService.setServiceParent(s)
return s
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/xmpp/tap.py
|
tap.py
|
import gtk, base
class AvailabilityWidget (gtk.VBox):
"""
A widget which represents the state of the L{AvailabilityService}
it connects to.
It consists of a series of rows, for each component a row with the
component's caption and an icon representing its state.
Below that is a text area which shows help texts for every
component that is not in the"OK" state.
"""
stock_map = { base.OK: "gtk-apply",
base.NOTICE: "gtk-about",
base.WARNING: "gtk-dialog-warning",
base.ERROR: "gtk-stop",
None: "gtk-about" }
images = None
def __init__(self, service):
"""
Initialize the widget. In the constructor the
L{AvailabilityService} is given as only argument.
"""
gtk.VBox.__init__(self)
self.service = service
self.service.widgets.append(self)
self.set_size_request(200, 160)
self.images = {}
self.rebuild()
self.refresh()
def rebuild(self):
"""
Rebuild the widget. Call this when the number components in the L{AvailabilityService} have changed.
"""
for c in self.get_children():
self.remove(c)
self.images = {}
for c in self.service.components:
v = gtk.HBox()
l = gtk.Label(c.caption)
l.set_property("xalign", 0)
v.pack_start(l, True, True, 10)
im = gtk.Image()
im.set_from_stock("gtk-missing-image", gtk.ICON_SIZE_MENU)
v.pack_start(im, False, True, 10)
self.pack_start(v, False, False, 10)
self.images[c.name] = im
self.pack_start(gtk.HSeparator(), False, True, 0)
self.textbox = gtk.Label("fasd fasdljf asdklfj asdklfj lads")
self.textbox.set_property("use_markup", True)
self.textbox.set_property("xpad", 10)
self.textbox.set_property("xalign", 0)
self.textbox.set_property("yalign", 0)
self.textbox.set_property("wrap", True)
self.textbox.set_property("width_request", 160)
self.pack_start(self.textbox, True, True, 10)
def refresh(self):
"""
Refresh the state of the widget. This is called automatically
by the L{AvailabilityService} when a change in the
availability has happened.
"""
state = dict(self.service.getStateFull())
for name in self.images:
self.images[name].set_from_stock(self.stock_map[state[name]['state']], gtk.ICON_SIZE_MENU)
txt = ""
for name in state:
if state[name]['state'] == base.OK:
continue # do not add "OK" states to the explanation texts
txt += "<b>" + state[name]['caption'] + "</b>\n"
txt += state[name]['message'].strip()
txt += "\n\n"
self.textbox.set_label(txt)
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/availability/gui.py
|
gui.py
|
from twisted.application import service
from zope.interface import implements
from fizzjik import event, interfaces
# the different availability levels that are known.
OK = 0
NOTICE = 1
WARNING = 2
ERROR = 3
class AvailabilityChangedEvent(event.Event):
"""
A generic event which is thrown when the availability of the
system has changed. In the "data" field of the event, the
component is given which was responsible for the change.
"""
pass
class AvailabilityService (service.MultiService):
"""
A service which manages several availability components which each
contribute to the general availability of the system.
"""
implements(interfaces.IController)
components = None
event_mappings = None
widgets = None
components_by_name = None
def __init__(self):
service.MultiService.__init__(self)
self.components = []
self.event_mappings = []
self.widgets = []
self.components_by_name = {}
def getState(self):
"""
Return list (component name, state) tuples in which state is
one of OK, NOTICE, WARNING, ERROR
"""
return [(c.name, c.getState()) for c in self.components]
def getPercentage(self):
"""
Returns the availability of the system, measured as a
percentage. 100% = fully functional, 0% = completely broken.
"""
if not len(self.components):
return 100.
total = ERROR * len(self.components)
av = total - sum([x[1] for x in self.getState()])
return 100 * av/float(total)
def getStateFull(self):
"""
Returns a dictionary with full state information per
component. For each component it returns a dict with: state,
caption, message, solution.
"""
result = []
for c in self.components:
r = dict(state = c.getState())
r['caption'] = c.caption
r['message'], r['solution'] = c.getHelp()
r['component'] = c
result.append((c.name, r))
return result
def addComponent(self, component):
"""
Add a L{AvailabilityComponent} to the monitoring service.
"""
self.components.append(component)
self.components_by_name[component.name] = component
component.setParent(self)
def registerObservers(self, hub):
"""
Implementation function for L{fizzjik.interfaces.IController}:
registers all events from the controllers and sets up the
dispatcher functions in the hub.
"""
for evt, cb in self.event_mappings:
hub.addObserver(evt, cb)
pass
def addEvent(self, evt, cb):
"""
Internal function, called by the L{AvailabilityComponent}s to setup event handlers.
"""
self.event_mappings.append((evt, cb))
def observe(self, event):
"""
Convenience function which passes "observe" through to parent (which is supposed to be a L{fizzjik.Hub})
"""
self.parent.observe(event)
def availabilityChanged(self, component):
"""
Called by any of the components when the availability
changed. This fires an L{AvailabilityChangedEvent}.
"""
self.parent.observe(AvailabilityChangedEvent(component))
for w in self.widgets:
w.refresh()
def __getitem__(self, k):
"""
Retrieve the component
"""
return self.components_by_name[k]
class AvailabilityComponent:
"""
A component which is part of the system availability.
"""
# general name for the component, e.g., 'power', 'internet'. Lowercase, no spaces.
name = None
# caption of this component
caption = "Genereric availability component"
# the parent (an AvailabilityService); set by AvailabilityService.add_component
parent = None
def __init__(self, **kw):
if 'name' in kw:
self.name = kw['name']
def setParent(self, p):
"""
Set parent. This is the place where to hook up events and
start services for the component.
"""
self.parent = p
def getState(self):
"""
Return one of OK, NOTICE, WARNING, ERROR
"""
raise Exception("Implement me")
def getHelp(self):
"""
Return a tuple with 2 text: error message, possible
solution. If there is no solution, a None value should be
returned instead of a message.
"""
raise Exception("Implement me")
if __name__ == "__builtin__":
from fizzjik import hub
from twisted.application import service
application = service.Application("test")
hub = hub.Hub()
hub.setServiceParent(application)
# Create service and add some components which we wanna check
s = AvailabilityService()
import component
s.add_component(component.PowerComponent())
s.add_component(component.InternetComponent())
s.setServiceParent(hub)
import dbus.mainloop.glib
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/availability/base.py
|
base.py
|
from twisted.internet import task
from twisted.python import log
import base
class PowerComponent(base.AvailabilityComponent):
"""
Availability component which monitors the power of the computer.
"""
name = "power"
caption = "Power connection"
powerAvailable = True
powerLow = False
def setParent(self, parent):
base.AvailabilityComponent.setParent(self, parent)
from fizzjik.input import power
self.parent.addEvent(power.PowerAvailableEvent, self.onPowerAvailable)
self.parent.addEvent(power.LowPowerEvent, self.onLowPower)
svc = power.PowerMonitor()
svc.setServiceParent(self.parent)
def onPowerAvailable(self, event):
changed = self.powerAvailable != event.data
self.powerAvailable = event.data
if changed:
self.parent.availabilityChanged(self)
def onLowPower(self, event):
changed = self.powerLow != event.data
on_battery = event.data
self.powerLow = on_battery
if changed:
self.parent.availabilityChanged(self)
def getState(self):
if self.powerLow:
return base.ERROR
if not self.powerAvailable:
return base.WARNING
return base.OK
def getHelp(self):
if self.powerLow:
return "The computer is very low on power!", "Try finding an elextricity outlet and connect the computer so it can recharge."
if not self.powerAvailable:
return "Running on battery power.", None
return "Power is connected.", None
class InternetComponent(base.AvailabilityComponent):
"""
Availability component which monitors for a working connection to
the internet.
"""
name = "internet"
caption = "Internet connection"
severity = base.ERROR # how bad is it that there is no internet?
connection_present = None
def __init__(self, **kwargs):
base.AvailabilityComponent.__init__(self, **kwargs)
if 'severity' in kwargs:
self.severity = kwargs['severity']
def setParent(self, parent):
base.AvailabilityComponent.setParent(self, parent)
from fizzjik.input import network
self.parent.addEvent(network.NetworkConnectionPresentEvent, self.onConnectionAdded)
self.parent.addEvent(network.NetworkConnectionAddedEvent, self.onConnectionAdded)
self.parent.addEvent(network.NetworkConnectionRemovedEvent, self.onConnectionRemoved)
svc = network.NetworkConnectionSensor()
svc.immediate = True
svc.setServiceParent(self.parent)
def onConnectionRemoved(self, e):
changed = self.connection_present
self.connection_present = False
if changed:
self.parent.availabilityChanged(self)
def onConnectionAdded(self, e):
changed = not self.connection_present
self.connection_present = True
if changed:
self.parent.availabilityChanged(self)
def getState(self):
if not self.connection_present:
return self.severity
return base.OK
def getHelp(self):
if not self.connection_present:
return "There is no internet connection.", "Try to find an ethernet cable and plug it in the ethernet port of the computer. Alternatively, configure the computer to use the wireless network."
return "Internet is available.", None
class RFIDReaderComponent(base.AvailabilityComponent):
"""
Component which watches for connected RFID readers.
Options:
- Minimum / maximum / specific number of readers
- List of required serial numbers
"""
name = "rfid"
caption = "RFID readers"
severity = base.ERROR # how bad is it if conditions are not applied?
min_readers = None
max_readers = None
num_readers = None
serials = None
connected_readers = None
greedy = False
def __init__(self, **kw):
if 'min_readers' in kw:
self.min_readers = kw['min_readers']
if 'max_readers' in kw:
self.max_readers = kw['max_readers']
if 'num_readers' in kw:
self.num_readers = kw['num_readers']
if 'serials' in kw:
self.serials = kw['serials']
if 'greedy' in kw:
self.greedy = kw['greedy']
self.connected_readers = []
def setParent(self, parent):
base.AvailabilityComponent.setParent(self, parent)
from fizzjik.input import sonmicro
self.parent.addEvent(sonmicro.SonMicroMifareSensorAddedEvent, self.readerAdded)
self.parent.addEvent(sonmicro.SonMicroMifareSensorRemovedEvent, self.readerRemoved)
svc = sonmicro.SonMicroMifareSensorMonitor(greedy = self.greedy)
svc.setServiceParent(self.parent)
def readerAdded(self, e):
serial = e.data.serial
if serial in self.connected_readers:
return
self.connected_readers.append(serial)
self.parent.availabilityChanged(self)
def readerRemoved(self, e):
serial = e.data
if serial not in self.connected_readers:
return
self.connected_readers.remove(serial)
self.parent.availabilityChanged(self)
def getState(self):
if not self.serials:
# Not checking on specific serial numbers; check min,max and num.
if self.min_readers is not None and len(self.connected_readers) < self.min_readers:
return self.severity
if self.max_readers is not None and len(self.connected_readers) > self.max_readers:
return self.severity
if self.num_readers is not None and len(self.connected_readers) != self.num_readers:
return self.severity
return base.OK
else:
# check for some specific readers.
if set(self.connected_readers) != set(self.serials):
return self.severity
return base.OK
def getHelp(self):
if not self.serials:
# Not checking on specific serial numbers; check min,max and num.
if self.min_readers is not None and len(self.connected_readers) < self.min_readers:
return "There need to be at least %d reader(s) connected." % self.min_readers, "Connect at least %d more reader(s)." % (self.min_readers - len(self.connected_readers))
if self.max_readers is not None and len(self.connected_readers) > self.max_readers:
return "There need to be at maximum %d reader(s) connected." % self.min_readers, "Disconnect %d more reader(s)." % (self.max_readers - len(self.connected_readers))
if self.num_readers is not None and len(self.connected_readers) != self.num_readers:
return "There need to be precisely %d reader(s) connected." % self.num_readers, "Please connect the right number of readers."
return "Readers are configured.", None
else:
con = set(self.connected_readers)
need = set(self.serials)
if con != need:
return "You need to connect specifically the following reader(s): %s" % (", ".join(self.serials)), None
return "Readers are configured.", None
class AnymetaAPIComponent(base.AvailabilityComponent):
"""
Checks whether the Anymeta API can be reached using a given
C{AnyMetaAPI} instance and whether it does not return an erroneous
result.
"""
name = "anymeta"
caption = "Anymeta connection"
# the API to use
api = None
call = "anymeta.user.info"
call_args = None
# LoopingCall
lc = None
state = None
info = None
severity = base.ERROR
def __init__(self, **kw):
base.AvailabilityComponent.__init__(self, **kw)
if "api" in kw:
self.api = kw['api']
if "call" in kw:
self.call = kw['call']
if "call_args" in kw:
self.call_args = kw['call_args']
else:
self.call_args = {}
self.state = self.severity
def setAPI(self, api):
if api == self.api:
return
self.state = self.severity
self.api = api
self.info = None
if self.lc and self.lc.running:
self.lc.stop()
self._checkStart()
def setParent(self, parent):
base.AvailabilityComponent.setParent(self, parent)
self._checkStart()
def _checkStart(self):
if not self.api:
return
if self.lc and self.lc.running:
self.lc.stop()
self.lc = task.LoopingCall(self._checkAnymeta)
self.lc.start(30)
def _checkAnymeta(self):
def connected(result):
changed = self.state != base.OK
self.state = base.OK
self.info = result
if changed:
self.parent.availabilityChanged(self)
def not_connected(failure):
log.err(failure)
self.result = None
changed = self.state != self.severity
self.state = self.severity
if changed:
self.parent.availabilityChanged(self)
self.api.doMethod(self.call, self.call_args).addCallback(connected).addErrback(not_connected)
def getState(self):
if not self.api:
return base.NOTICE
return self.state
def getHelp(self):
if not self.api:
return "No Anymeta connection has been configured.", "Choose an AnyMeta site to connect to."
if self.state != base.OK:
return "Anymeta connection failed.", None
return "Anymeta ok.", None
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/availability/component.py
|
component.py
|
import gtk
from twisted.internet import reactor
class AutocompleteWidget (gtk.HBox):
"""
Widget which handles auto-completion for AnyMeta lookups.
This widget contains a L{gtk.Entry} with autocompletion support,
and has a small image next to it, which shows the current status
of the lookup, e.g. whether a valid item has been selected or not.
@ivar api: L{anymeta.api.AnyMetaAPI} instance which is used for the autocompletion.
@ivar timeout: After what time to do the lookup (in seconds)
@ivar kind: Filter Anymeta lookups on this kind.
"""
api = None
timeout = 0.5
kind = None
# Private variables
store = None
compl = None
by_id = None
by_title = None
_call = None
_oldtxt = None
def __init__(self, api, kind = None):
gtk.HBox.__init__(self)
self.api = api
if kind:
self.kind = kind
self.store = gtk.TreeStore(int, str)
self.by_id = {}
self.by_title = {}
self.compl = gtk.EntryCompletion()
self.compl.set_model(self.store)
self.compl.set_popup_completion(True)
self.compl.set_inline_completion(True)
self.compl.set_text_column(1)
self.compl.connect("match-selected", self.matchSelected)
self.combo = gtk.Entry()
self.combo.set_completion(self.compl)
self.combo.connect("changed", self.comboChanged)
self.add(self.combo)
self.img = gtk.Image()
self.img.set_from_stock("gtk-no", gtk.ICON_SIZE_MENU)
self.add(self.img)
def get_value(self):
"""
Return the currently selected ID, or None if there is no valid value.
"""
return self.id
def set_value(self, id):
"""
Set the current value of this widget to the given id. It uses
C{anymeta.predicates.get} to get a title for the given ID.
"""
def res(r):
self.id = id
self._oldtxt = r['result']
self.combo.set_text(r['result'])
self.img.set_from_stock("gtk-yes", gtk.ICON_SIZE_MENU)
self.id = None
self.combo.set_text("")
self.img.set_from_stock("gtk-no", gtk.ICON_SIZE_MENU)
self.api.anymeta.predicates.get(id=id, field='text.title').addCallback(res)
def matchSelected(self, compl, filter, iter):
"""
Signal handler for the C{match-selected} signal, which is
emitted by the completion popup after selecting an item.
"""
self.id = filter.get_value(iter, 0)
self.checkMatch()
def checkMatch(self, *a):
"""
Checks if the current text in the entry matches with an item
that has been retrieved from the site.
"""
ttl = self.combo.get_text()
if not self.id and ttl in self.by_title:
# It matches
self.id = self.by_title[ttl]
if self.id:
self.img.set_from_stock("gtk-yes", gtk.ICON_SIZE_MENU)
else:
self.img.set_from_stock("gtk-no", gtk.ICON_SIZE_MENU)
pass
def comboChanged(self, *a):
"""
Event is fired whenever the text in our entry box has
changed. It first looks in the local cache for a match based
on the current text (using C{checkMatch}), if this fails it
will do a C{anymeta.search.live} call to the API, populating
the completion model with the results.
"""
self.id = None
self.checkMatch()
txt = self.combo.get_text()
if len(txt) < 3 or txt == self._oldtxt:
return
self._oldtxt = txt
def result(r):
for res in r:
if res['id'] in self.by_id:
continue
self.store.append(None, [int(res['id']), res['title']])
self.by_id[res['id']] = res
if res['title'] not in self.by_title:
self.by_title[res['title']] = res['id']
self.checkMatch()
if not self.id:
# force popup if we have no direct match
self.combo.emit("changed")
if self._call and self._call.active():
self._call.cancel()
arg = {'search': txt}
if self.kind:
arg['kind'] = self.kind
self._call = reactor.callLater(self.timeout, lambda : self.api.anymeta.search.live(**arg).addCallback(result))
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/gtk/autocomplete.py
|
autocomplete.py
|
import re
import gtk
from anymeta.api.oauth.oauth import OAuthError
from anymeta.api.registry import AnyMetaRegistry
def sibpath(frm, to):
"""
Clone from Twisted's sibpath.
"""
import os
return os.path.join(os.path.dirname(frm), to)
class GladeDialog (gtk.Dialog):
"""
A generic class for dialogs based on glade.
"""
gladefile = None
top_widget = None
builder = None
registry = None
def __init__(self, registry = None):
gtk.Dialog.__init__(self)
self.set_modal(True)
if not registry:
self.registry = AnyMetaRegistry()
else:
self.registry = registry
self.builder = gtk.Builder()
self.builder.add_from_file(sibpath(__file__, self.gladefile))
self.builder.connect_signals(self)
w = self.builder.get_object(self.top_widget)
w.reparent(self.vbox)
w.show()
self.connect("destroy", self.stop)
self.connect("delete_event", self.stop)
def stop(self, *args):
self.emit("response", 0)
self.destroy()
class RegistryDialog (GladeDialog):
"""
Dialog which shows a list of APIs from the AnyMeta registry. It
has the possibility to select an API, add a new one, or edit an
existing one.
"""
gladefile = "registry.glade"
top_widget = "top_widget"
current_id = None
_run_as_main = False
def __init__(self, *args, **kwargs):
GladeDialog.__init__(self, *args, **kwargs)
self.set_title("AnyMeta sites")
self.set_size_request(640, 400)
self.site_store = gtk.ListStore(str, str) # self.builder.get_object("site_store")
self.site_view = self.builder.get_object("site_view")
self.site_view.set_model(self.site_store)
col = gtk.TreeViewColumn("Site")
cell = gtk.CellRendererText()
col.pack_start(cell, True)
col.set_min_width(120)
col.add_attribute(cell, "text", 0)
self.site_view.append_column(col)
col = gtk.TreeViewColumn("Base URL")
cell = gtk.CellRendererText()
col.pack_start(cell, True)
col.add_attribute(cell, "text", 1)
col.set_expand(True)
self.site_view.append_column(col)
sel = self.site_view.get_selection()
sel.connect("changed", self.selection_changed)
self._refresh_sites()
self.selection_changed()
def selection_changed(self, sel = None):
if sel:
store, iter = sel.get_selected()
if not sel or not iter:
self.current_id = None
self.builder.get_object("edit_button").set_sensitive(False)
self.builder.get_object("remove_button").set_sensitive(False)
if not self._run_as_main:
self.builder.get_object("use_button").set_sensitive(False)
else:
self.current_id = store.get_value(iter, 0)
self.builder.get_object("edit_button").set_sensitive(True)
self.builder.get_object("remove_button").set_sensitive(True)
self.builder.get_object("use_button").set_sensitive(True)
def run_as_main(self):
self._run_as_main = True
self.builder.get_object("use_button").set_label("Quit")
self.builder.get_object("use_button").set_sensitive(True)
self.run()
def _refresh_sites(self):
self.site_store.clear()
entries = self.registry.getAll()
for entry in entries:
site = entries[entry]
self.site_store.append([entry, site["entrypoint"]])
def quit(self, *args):
self.hide()
self.destroy()
def on_use_button_clicked(self, w):
self.emit("response", 1)
self.id = self.current_id
self.destroy()
def on_add_button_clicked (self, w):
d = SiteDialog(self.registry)
d.set_title("Add site")
if not d.run():
return
self._refresh_sites()
def on_edit_button_clicked (self, w):
d = SiteDialog(self.registry)
d.set_title("Edit site")
d.edit(self.current_id, self.registry.get(self.current_id))
self._refresh_sites()
def on_remove_button_clicked (self, w):
msg = "Are you sure you want to remove site '%s'?" % self.current_id
d = gtk.MessageDialog(self, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO, msg)
d.set_title("Confirm removal")
if d.run() == gtk.RESPONSE_YES:
self.registry.delete(self.current_id)
d.destroy()
self._refresh_sites()
def on_site_view_row_activated(self, *args):
self.emit("response", 1)
self.id = self.current_id
self.destroy()
class SiteDialog (GladeDialog):
"""
Dialog for editing/adding a single API entry.
"""
gladefile = "registry.glade"
top_widget = "site_dialog_root"
id = None
def on_save_button_clicked (self, w):
self._sanify_url_box()
new_id = self.builder.get_object("site_id").get_text()
self.url = self.builder.get_object("url").get_text()
if self.id:
# editing
if self.id != new_id:
# rename old id to new id
cfg = self.registry.get(self.id)
self.registry.set(new_id, cfg['entrypoint'], cfg['oauth'], cfg['comment'])
self.registry.delete(self.id)
self.registry.save()
self.id = new_id
else:
def wait_open_url(url):
d = URLDialog(url)
d.run()
d.destroy()
# adding
try:
self.registry.register_interactive(new_id, self.url, "", None, None, wait_open_url)
except OAuthError, e:
gtk.MessageDialog(self, gtk.DIALOG_MODAL, gtk.MESSAGE_ERROR, gtk.BUTTONS_OK, e.message).run()
return
self.id = new_id
self.emit("response", 1)
self.hide()
def edit(self, id, site):
self.id = id
self.builder.get_object("site_id").set_text(id)
self.builder.get_object("url").set_text(site["entrypoint"])
self.builder.get_object("url").set_sensitive(False)
return self.run()
def _sanify_url_box(self):
"""
Sanify the URL to conform to the REST API entry point.
Very AnyMeta specific - should be replaced with proper
discovery, some day.
"""
url = self.builder.get_object("url").get_text()
if not re.match(r'^https?://', url):
url = "http://" + url
if url[-1:] != "/":
url += "/"
if not re.match(r'^.*services/rest/', url):
url += "services/rest/"
self.builder.get_object("url").set_text(url)
def __init__(self, *args, **kwargs):
GladeDialog.__init__(self, *args, **kwargs)
self.set_size_request(400, 130)
class URLDialog (GladeDialog):
"""
The dialog for the OAuth authorization.
"""
gladefile = "registry.glade"
top_widget = "url_dialog_root"
def __init__(self, url):
GladeDialog.__init__(self)
self.builder.get_object("urlbutton").set_uri(url)
self.set_title("Authorization required")
def on_auth_button_clicked(self, b):
self.emit("response", 1)
self.hide()
__all__ = ["RegistryDialog", "SiteDialog", "URLDialog"]
|
AnyMetaAPI
|
/AnyMetaAPI-1.15.tar.gz/AnyMetaAPI-1.15/anymeta/gtk/registry.py
|
registry.py
|
AnyQt
-----
PyQt/PySide compatibility layer.
Features:
* At the top level AnyQt exports a Qt5 compatible module namespace along with
some minimal renames to better support portability between different
versions
* Which Qt api/backend is chosen can be controlled by a QT_API env variable
* The api can be chosen/forced programmatically (as long as no
PyQt4/PyQt5/PySide/PySide2 was already imported)
* provides an optional compatibility import hook, that denys imports from
conflicting Qt api, or intercepts and fakes a Qt4 api imports, to use a Qt5
compatible API (some monkey patching is involved).
The documentation is hosted at https://anyqt.readthedocs.io/en/stable/
|
AnyQt
|
/AnyQt-0.2.0.tar.gz/AnyQt-0.2.0/README.txt
|
README.txt
|
from aishu import setting
from aishu.datafaker.profession.entity import name, switch, ip, timestamp, ml, kai, objectManager, index, agent ,id ,port, DataManage, databaseInfo
from aishu.datafaker.profession.entity import zx,testMail, arPort
from aishu.datafaker.profession.entity.ParaDateFiled import ParaDateFiledServer
from aishu.datafaker.profession.entity.RetrievesAssociated import ParaDateAnyRobotServer
from aishu.datafaker.profession.entity import CreateTestPort
def filed(key,inquire=[]):
"""
:param key:
:return:
"""
SERVICE_KPI_ = {
'AnyRobotNameIDwh': name.date().getNameWhipptree,
'AnyRobotNameID': name.date().getName,
'AnyRobotOtherNameID': name.date().getName,
'Closed': switch.date().getSwClosed,
'Open': switch.date().getSwOpen,
'UUid': id.date().getUUid,
'IpVR': ip.date().getIpVR,
'IPError': ip.date().getIpError,
'startTime': timestamp.date().getStartTime,
'endTime': timestamp.date().getEndTime,
'lastHourTime': timestamp.date().getLastHourTime,
'last24HourTime': timestamp.date().getLast24HourTime,
'lastTime':timestamp.date().getLastTime,
'getEtlPortOld': port.date().getEtlPortOld,
'getEtlPortNew': port.date().getEtlPortNew,
'getEtlPortIll': port.date().getEtlPortIll,
'enity': ml.machine(inquire).inquire,
'entityHost': kai.machine(inquire).inquireEntity,
'serviceKpiAlert': kai.machine(inquire).inquireServiceKpi,
'businessKpiIdAndServiceId':kai.machine(inquire).inquireBusinessKPIAndServiceId,
'ServiceBusinessID':kai.machine(inquire).ServiceBusiness,
'pensInfo':kai.machine(inquire).inquirePens,
'testHostIP':ip.date().getTestHostIP,
'service_host_ip': ip.date().get_host_ip,
'service_host_name': ip.date().get_host_name,
'rarserRuleName': objectManager.date().getRuleNameId,
'dashboardId': objectManager.date().getDashboardId,
'searchId': objectManager.date().getSearchId,
'visualizationId': objectManager.date().getVisualizationId,
'indexId':index.date().getIndexId,
'indexList':index.date().getIndexName,
'indexList_as':index.date().getIndexTypeAs,
'DateTime':timestamp.date().getDateTime,
'agentPort': agent.machine(inquire).getAgentPort,
'AlertAgentPort': agent.machine(inquire).getIntPort,
'intPort':agent.machine(inquire).getIntPort,
'adminID':id.date().getAdminID,
'DefaultLogGroupID':id.date().getDefaultLogGroupID,
'asLogWareID':id.date().getAsLogWareID,
'httpUrl':kai.machine(inquire).getAlertHttpUrl,
'lastFiveYearTime':timestamp.date().getLastFiveYearTime,
'alertmergeID': ParaDateFiledServer().getUUid,
'fromID': ParaDateFiledServer().getFromTime,
'ToID': ParaDateFiledServer().getToTime,
'StartDateID': ParaDateFiledServer().getStartDate,
'EndDateID': ParaDateFiledServer().getEndDate,
'TimeRangeID': ParaDateFiledServer().getTimeRangeId,
'RangeUnitID': ParaDateFiledServer().getRangeUnitId,
'TimeLabelID': ParaDateFiledServer().getTimeLabelId,
'serviceID': ParaDateAnyRobotServer(key).getServiceId,
'KpiID': ParaDateAnyRobotServer(key).getKpiId,
'kpiNameID': ParaDateFiledServer().getkpiNameID,
'kpiNameId': ParaDateFiledServer().kpiNameId,
'SavedSearchID': ParaDateAnyRobotServer(key).getSavedSearchId,
'SavedSearchNameID': ParaDateAnyRobotServer(key).getSavedSearchNameId,
'SavedSearchLogGroupID': ParaDateAnyRobotServer(key).getSavedSearchLogGroupId,
'SavedSearchLogLibraryID': ParaDateAnyRobotServer(key).getSavedSearchLogLibraryId,
'AlertRuleNamesID': ParaDateAnyRobotServer(key).getAlertRuleNamesId,
'AlertScenarioID': ParaDateAnyRobotServer(key).getAlertScenarioId,
'DeleteAlertRuleNamesID': ParaDateAnyRobotServer(key).getDeleteAlertRuleNamesId,
'UpdateTimeID': ParaDateFiledServer().getUpdateTime,
'UtcStartID': ParaDateFiledServer().getUtcStartTime,
'UtcEndID': ParaDateFiledServer().getUtcEndTime,
's_id': zx.search().createSearchId,
'InspectionID': zx.search().createInspectionID,
'InspectionTaskID': zx.search().createInspectionTaskID,
'ExportFileID': zx.search().createExportFileID,
'RepeatInspectionName': zx.search().getRepeatInspectionName,
'agentDeployIp': ip.date().getAentHostIp,
'http_port': CreateTestPort.Port().httpport,
'syslog_port': CreateTestPort.Port().syslogPort,
'FutureTime': ParaDateFiledServer().getFutureTime,
'RoleId_user': DataManage.DataManage_Storage().RoleId_user,
'mailUser': testMail.date().getMailUser,
'mailPass': testMail.date().getMailPass,
'ADUser': testMail.date().getADUser,
'ADPass': testMail.date().getADPass,
'm_id': arPort.inputs().createarPortId,
'now_times': ParaDateFiledServer().now_time,
'five_feature_times': ParaDateFiledServer().five_feature_time,
'getlocalhostIp':ip.date().getlocalhostIp,
'getNowYear':timestamp.date().getNowYear,
'getSelfDatabase':databaseInfo.data().getSelfDatabase,
'getSelfDatabasePassword':databaseInfo.data().getSelfDatabasePassword,
'getSelfDatabaseUser':databaseInfo.data().getSelfDatabaseUser,
'getSelfDatabaseUrl':databaseInfo.data().getSelfDatabaseUrl,
'getSelfDatabasePort':databaseInfo.data().getSelfDatabasePort,
'getOpenlogPort': port.date().getOpenlogPort
}
switcher = SERVICE_KPI_
if switcher.get(key) is not None:
return switcher[key]()
else:
return False
if __name__ == '__main__':
date = filed("getOpenlogPort")
print(date)
|
AnyRobot
|
/AnyRobot-1.5.4.tar.gz/AnyRobot-1.5.4/aishu/datafaker/profession/getFiledValue.py
|
getFiledValue.py
|
import jsonpath
import requests,json
from aishu import setting
from aishu.datafaker.profession.entity import id
from aishu.datafaker.profession.entity import name,timestamp,zx
from aishu.public.db_select import select
from aishu.public import urlJoin
class search(object):
def createSearchId(self):
path = "/v1/search/submit"
url =urlJoin.url(path)
logGroup = id.date().getDefaultLogGroupID()
startTime = timestamp.date().getLastHourTime()
endTime = timestamp.date().getEndTime()
payload = [
{
"logGroup": logGroup,
"query": "*",
"sort": [
{
"@timestamp": "desc"
}
],
"size": 10,
"needFieldList": True,
"filters": {
"must": [
{
"@timestamp": {
"from": startTime,
"to": endTime
}
}
],
"must_not": []
}
}
]
headers = setting.header
rsp = requests.request("POST", url, headers=headers, data = json.dumps(payload))
s_id = jsonpath.jsonpath(rsp.json(), '$..{name}'.format(name='id'))
if isinstance(s_id,bool):
return False
else:
return s_id[0]
def createInspectionID(self):
path = "/manager/inspections/inspection"
url = urlJoin.url(path)
assigned = id.date().getAdminID()
number = name.date().getName()
time = timestamp.date().getStartTime()
payload = {
"assign": assigned,
"name": number,
"info": "This is a test case by AT",
"ctime": time
}
headers = setting.header
rsp = requests.request("POST", url, headers=headers, data = json.dumps(payload))
InspectionID = jsonpath.jsonpath(rsp.json(), '$..{name}'.format(name='id'))
if isinstance(InspectionID,bool):
return False
else:
return InspectionID[0]
def createInspectionTaskID(self):
path = "/manager/inspections/task"
url = urlJoin.url(path)
InspectionID = zx.search().createInspectionID()
number = name.date().getName()
payload = {
"name": number,
"position": "localUpload",
"info": "step one: find the target page ;step two:write the check result",
"inspectionID": InspectionID
}
headers = setting.header
rsp = requests.request("POST", url, headers=headers, data = json.dumps(payload))
InspectionTaskID = jsonpath.jsonpath(rsp.json(), '$..{name}'.format(name='id'))
if isinstance(InspectionTaskID,bool):
return False
else:
return InspectionTaskID[0]
def createExportFileID(self):
path = "/manager/export".format(id=zx.search().createSearchId())
url = urlJoin.url(path)
start = timestamp.date().getLastFiveYearTime()
end = timestamp.date().getStartTime()
user = id.date().getAdminID()
loggroup = id.date().getDefaultLogGroupID()
payload = {
"logtype": "list",
"filename": "列表",
"timezone": 8,
"user": user,
"fileType": "txt",
"fileCode": "UTF-8",
"query": [
{
"logGroup": loggroup,
"query": "*",
"size": 207,
"needFieldList": True,
"filters": {
"must": [
{
"@timestamp": {
"from": start,
"to": end
}
}
],
"must_not": []
}
}
],
"fields": [
"_source"
],
"indexs": [
"fe5b7f96-443a-11e7-a467-000c29253e90"
]
}
headers = setting.header
rsp = requests.request("POST", url, headers=headers, data=json.dumps(payload))
ExportFileID = jsonpath.jsonpath(rsp.json(), '$..{name}'.format(name='id'))
if isinstance(ExportFileID, bool):
return False
else:
return ExportFileID[0]
def getRepeatInspectionName(self):
sql = 'select name from Inspection'
sqldata = select(sql)
if not (sqldata):
return False
return sqldata[0][0]
# def getContextID(self):
# url = "http://{ip}/v1/search/fetch/{id}".format(ip=setting.host, id=zx.search().createSearchId())
# headers = setting.header
# time.sleep(10)
# rsp = requests.request("GET", url, headers=headers)
# print(rsp)
# ContextID = jsonpath.jsonpath(rsp.json(), '$..{name}'.format(name='_id'))
# if isinstance(ContextID, bool):
# return False
# else:
# return ContextID
# def getOrdinaryID(self):
# sql = 'select userId from `User` where loginName!="admin"'
# sqldata = select(sql)
# if not (sqldata):
# return False
#
# return sqldata[0][0]
if __name__ == '__main__':
date = search().createSearchId()
print(date)
|
AnyRobot
|
/AnyRobot-1.5.4.tar.gz/AnyRobot-1.5.4/aishu/datafaker/profession/entity/zx.py
|
zx.py
|
import jsonpath
import random
import requests,json
from aishu import setting
from aishu.datafaker.profession.entity import id
from aishu.datafaker.profession.entity import name,timestamp,zx
from aishu.public.db_select import select
from aishu.public import urlJoin
class DataManage_Storage(object):
#返回一个角色id,但是这个角色id被用户已经使用
def RoleId_user1(self):
path = '/manager/role'
url = urlJoin.url(path)
headers = setting.header
data = {
"roleName": str(random.choice(range(10, 999))) + '_' + str(random.choice(range(10, 999))),
"permissions": [
{
"permissionId": "ID_MAINPAGE",
"parentId": "",
"isLeaf": 1,
"name": "ID_MAINPAGE",
"checked": True
},
{
"permissionId": "ID_SYSTEM_MANAGER_SYSTEM_LICENSE",
"parentId": "ID_SYSTEM_MANAGER",
"isLeaf": 1,
"name": "ID_SYSTEM_MANAGER_SYSTEM_LICENSE",
"checked": False
}
],
"resource": {
"logGroup": [
"fe5b7f96-443a-11e7-a467-000c29253e90"
],
"desensitize": 0,
"jobTemplate": [],
"dashboard": {
"dashboardId": [],
"mainPageId": ""
}
},
"description": "应用给用户的角色",
"defaultLogGroupID": "fe5b7f96-443a-11e7-a467-000c29253e90"
}
res = requests.post(url, headers=headers, data=json.dumps(data))
sql = 'select roleId from Role where description = "应用给用户的角色"'
sqldata = select(sql)
if not (sqldata):
return False
return sqldata[0]
def RoleId_user(self):
path = '/manager/user'
url = urlJoin.url(path)
headers = setting.header
data ={
"displayName": str(random.choice(range(10, 999))),
"loginName": str(random.choice(range(10, 999))),
"roleId": self.RoleId_user1(),
"status": 1,
"wechatStatus": 0,
"emailVerifyStatus": 0,
"description": "不能删除的用户"
}
res = requests.post(url, headers=headers, data=json.dumps(data))
roleId = jsonpath.jsonpath(res.json(), '$..{name}'.format(name='roleId'))
if isinstance(roleId,bool):
return False
else:
return roleId
if __name__ == '__main__':
a = DataManage_Storage()
a.RoleId_user()
|
AnyRobot
|
/AnyRobot-1.5.4.tar.gz/AnyRobot-1.5.4/aishu/datafaker/profession/entity/DataManage.py
|
DataManage.py
|
from aishu import setting
from aishu.datafaker.profession.entity import date
def sql(key):
"""
对应数据服务的sql语句注册
:param key:
:return:
"""
switcher = {
'SavedSearchID': {'sql':"select id from Kibana where type = 'search';",'database':'AnyRobot'},
'SavedSearchNameID': {'sql':"select title from Kibana where id = '{id}';".format(id="" if not date.saved_search_Id_List else date.saved_search_Id_List[0]),'database':'AnyRobot'},
'serviceID': {'sql':"select id from KAIService;",'database':'AnyRobot'},
'KpiID': {'sql':"select id from KAIKpi where serviceId = '{id}';".format(id="" if not date.Service_Id_List else date.Service_Id_List[0]),'database':'AnyRobot'},
'SavedSearchLogGroupID': {'sql':"select json_extract(json_extract(json_extract(searchSourceJSON,'$.filter[0]'),'$.meta'),'$.index') from Kibana where id = '{id}';".format(id="" if not date.saved_search_Id_List else date.saved_search_Id_List[0]),'database':'AnyRobot'},
'SavedSearchLogLibraryID': {'sql':"select json_extract(json_extract(json_extract(searchSourceJSON,'$.filter[0]'),'$.meta'),'$.value') from Kibana where id = '{id}';".format(id="" if not date.saved_search_Id_List else date.saved_search_Id_List[0]),'database':'AnyRobot'},
'AlertRuleNamesID': {'sql':"select alert_rule_name from RuleEngineAlert;",'database':'AnyRobot'},
'AlertScenarioID': {'sql':"select id from RuleEngineAlertScenario;",'database':'AnyRobot'},
'DeleteAlertRuleNamesID': {'sql':"select alert_rule_names from RuleEngineAlertScenario where id = '{id}';".format(id="" if not date.AlertScenario_Id_List else date.AlertScenario_Id_List[0]),'database':'AnyRobot'}
}
if switcher.get(key) is not None:
if switcher[key].get('database') is not None:
if len(switcher[key]['database']) == 0:
setting.database = 'AnyRobot'
else:
setting.database = switcher[key]['database']
return switcher[key]['sql']
else:
return False
|
AnyRobot
|
/AnyRobot-1.5.4.tar.gz/AnyRobot-1.5.4/aishu/datafaker/profession/entity/AssociationMapping.py
|
AssociationMapping.py
|
import time
import random, uuid
import datetime
from aishu.datafaker.profession.entity import date
class ParaDateFiledServer(object):
# def __init__(self,info):
# self.info = info
"""
生成接口字段参数
"""
def getName(self):
name = str(random.choice(range(10, 999))) + '_' + str(random.choice(range(10, 999)))
return name
def getSwClosed(self):
return 'disable'
def getSwOpen(self):
return 'enable'
def getIp(self):
ipList = ["192.168.84.105",
"192.168.84.192",
"192.168.84.217",
"192.168.84.108",
"192.168.84.107",
"192.168.84.182",
"192.168.84.193",
"192.168.84.109",
"192.168.84.175",
"192.168.84.60",
"192.168.84.61",
"192.168.84.63",
"192.168.84.62",
"192.168.84.64",
"192.168.84.65",
"192.168.84.66"]
return random.choice(ipList)
def getPort(self):
# 系统合法参数 20010-20099、162,514,5140
portList = [port for port in range(20010, 20100)]
portList.append(162)
portList.append(514)
portList.append(5140)
port = random.choice(portList)
return port
def getIpVR(self):
return '{ip1}.{ip2}.{ip3}.{ip4}'.format(ip1=10, ip2=random.choice(range(10, 250)),
ip3=random.choice(range(10, 250)), ip4=random.choice(range(10, 250)))
def getIpError(self):
return '{ip1}.{ip2}.{ip3}.{ip4}'.format(ip1='abc', ip2=random.choice(range(10, 99)),
ip3=random.choice(range(10, 99)), ip4=random.choice(range(10, 99)))
def getUUid(self):
uuidStr = uuid.uuid4()
return str(uuidStr)
def getStartTime(self):
return int(round(time.time() * 1000))
def getEndTime(self):
return int(round(time.time() * 1000))
def getUpdateTime(self):
return int(round(time.time()))
def getEtlPortIll(self):
portList = [port for port in range(10000, 20000)]
port = random.choice(portList)
return port
def getFromTime(self):
timeList = []
differenceTime = 60 * 1000
ToTime = self.getEndTime()
timeList.append(ToTime - 12 * 60 * differenceTime)
timeList.append(ToTime - 24 * 60 * differenceTime)
startTime = random.choice(timeList)
date.FromTime_List.append(startTime)
date.ToTime_List.append(ToTime)
return startTime
def getFutureTime(self):
timeList = []
differenceTime = 60 * 1000
ToTime = self.getStartTime()
timeList.append(ToTime + 24 * 60 * differenceTime)
FutureTime = random.choice(timeList)
date.FromTime_List.append(FutureTime)
date.ToTime_List.append(ToTime)
return FutureTime
def getToTime(self):
return 0 if not date.ToTime_List else date.ToTime_List[0]
def getUtcStartTime(self):
year_1 = str(int(time.strftime("%Y", time.localtime())) - 1)
year_2 = str(int(time.strftime("%Y", time.localtime())) - 2)
year_3 = str(int(time.strftime("%Y", time.localtime())) - 3)
year_4 = str(int(time.strftime("%Y", time.localtime())) - 4)
year_5 = str(int(time.strftime("%Y", time.localtime())) - 5)
time_list = [year_1, year_2, year_3, year_4, year_5]
info = {
'year': random.choice(time_list),
'month': time.strftime("%m", time.localtime()),
'day': time.strftime("%d", time.localtime()),
'hour': time.strftime("%H", time.localtime()),
'minute': time.strftime("%M", time.localtime()),
'second': time.strftime("%S", time.localtime()),
}
ut_time = info['year'] + '-' + info['month'] + '-' + info['day'] + ' ' + info['hour'] + ':' + info['minute'] + ':' + info['second']
bj_time = datetime.datetime.strptime(ut_time, "%Y-%m-%d %H:%M:%S") - datetime.timedelta(hours=8)
bj_time_info = str(bj_time).split(' ')
bj_time_year = str(bj_time_info[0]).split('-')
bj_time_host = str(bj_time_info[1]).split(':')
return bj_time_year[0] + '-' + bj_time_year[1] + '-' + bj_time_year[2] + 'T' + bj_time_host[0] + ':' + bj_time_host[1] + ':' + bj_time_host[2] + '.000Z'
def getUtcEndTime(self):
info = {
'year': str(int(time.strftime("%Y", time.localtime())) + 1),
'month': time.strftime("%m", time.localtime()),
'day': time.strftime("%d", time.localtime()),
'hour': time.strftime("%H", time.localtime()),
'minute': time.strftime("%M", time.localtime()),
'second': time.strftime("%S", time.localtime()),
}
ut_time = info['year'] + '-' + info['month'] + '-' + info['day'] + ' ' + info['hour'] + ':' + info['minute'] + ':' + info['second']
bj_time = datetime.datetime.strptime(ut_time, "%Y-%m-%d %H:%M:%S") - datetime.timedelta(hours=8)
bj_time_info = str(bj_time).split(' ')
bj_time_year = str(bj_time_info[0]).split('-')
bj_time_host = str(bj_time_info[1]).split(':')
return bj_time_year[0] + '-' + bj_time_year[1] + '-' + bj_time_year[2] + 'T' + bj_time_host[0] + ':' + bj_time_host[1] + ':' + bj_time_host[2] + '.000Z'
def getStartDate(self):
startdate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
return startdate
def getEndDate(self):
enddate = (datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%d %H:%M:%S")
return enddate
def getkpiNameID(self):
name = str(random.choice(range(10, 999))) + '_' + str(random.choice(range(10, 999)))
return name
def kpiNameId(self):
name = str(random.choice(range(10, 999))) + '_' + str(random.choice(range(10, 999)))
return name
def getTimeRangeId(self):
FromID = 0 if not date.FromTime_List else date.FromTime_List[0]
ToID = 0 if not date.ToTime_List else date.ToTime_List[0]
time_diff = ToID - FromID
time_unit = {
date.time_diff_value[0]: {
'timeRange': 30,
'rangeUnit': 'm',
'timeLabel': 'last30Minutes'
},
date.time_diff_value[1]: {
'timeRange': 6,
'rangeUnit': 'h',
'timeLabel': 'last6Hours'
},
date.time_diff_value[2]: {
'timeRange': 12,
'rangeUnit': 'h',
'timeLabel': 'last12Hours'
},
date.time_diff_value[3]: {
'timeRange': 24,
'rangeUnit': 'h',
'timeLabel': 'last24Hours'
}
}
time_Info = time_unit[str(time_diff)]
timeRange_Info = time_Info['timeRange']
return timeRange_Info
def getRangeUnitId(self):
FromID = 0 if not date.FromTime_List else date.FromTime_List[0]
ToID = 0 if not date.ToTime_List else date.ToTime_List[0]
time_diff = ToID - FromID
time_unit = {
date.time_diff_value[0]: {
'timeRange': 30,
'rangeUnit': 'm',
'timeLabel': 'last30Minutes'
},
date.time_diff_value[1]: {
'timeRange': 6,
'rangeUnit': 'h',
'timeLabel': 'last6Hours'
},
date.time_diff_value[2]: {
'timeRange': 12,
'rangeUnit': 'h',
'timeLabel': 'last12Hours'
},
date.time_diff_value[3]: {
'timeRange': 24,
'rangeUnit': 'h',
'timeLabel': 'last24Hours'
}
}
time_Info = time_unit[str(time_diff)]
rangeUnit_Info = time_Info['rangeUnit']
return rangeUnit_Info
def getTimeLabelId(self):
FromID = 0 if not date.FromTime_List else date.FromTime_List[0]
ToID = 0 if not date.ToTime_List else date.ToTime_List[0]
time_diff = ToID - FromID
time_unit = {
date.time_diff_value[0]: {
'timeRange': 30,
'rangeUnit': 'm',
'timeLabel': 'last30Minutes'
},
date.time_diff_value[1]: {
'timeRange': 6,
'rangeUnit': 'h',
'timeLabel': 'last6Hours'
},
date.time_diff_value[2]: {
'timeRange': 12,
'rangeUnit': 'h',
'timeLabel': 'last12Hours'
},
date.time_diff_value[3]: {
'timeRange': 24,
'rangeUnit': 'h',
'timeLabel': 'last24Hours'
}
}
time_Info = time_unit[str(time_diff)]
timeLabel_Info = time_Info['timeLabel']
return timeLabel_Info
def now_time(self):
now_times = time.strftime("%H:%M", time.localtime())
return now_times
def five_feature_time(self):
five_feature_times = (datetime.datetime.now()+datetime.timedelta(minutes=5)).strftime('%H:%M')
return five_feature_times
|
AnyRobot
|
/AnyRobot-1.5.4.tar.gz/AnyRobot-1.5.4/aishu/datafaker/profession/entity/ParaDateFiled.py
|
ParaDateFiled.py
|
import requests
from aishu.public.db_select import select
from aishu import setting
from aishu.public import urlJoin
class machine(object):
def __init__(self, search):
self.search = search
def inquireServiceKpi(self):
serviceKpi = []
if not (self.search):
return False
for key in self.search:
sql = 'SELECT serviceId,id From KAIKpi where KAIKpi.serviceId={serverID}'.format(serverID=key)
date = select(sql)
for sqlDate in date:
serviceKpi.append(list(sqlDate))
#当没有查询找数据时,返回错误
if not (serviceKpi):
return False
context = [
{"key": serviceKpi[0], "operator": "matches", "value": ["2", "0", "3", "4", "5", "6"], "type": "kpi"},
{"key": serviceKpi[1], "operator": "matches", "value": ["0", "2", "3", "4", "5", "6"], "type": "kpi"},
{"key": serviceKpi[2], "operator": "matches", "value": ["2", "0", "3", "4", "5", "6"], "type": "kpi"}
]
return context
def inquireEntity(self):
entityhostIps = []
if not (self.search):
return False
for key in self.search:
sql = 'SELECT fieldsCondition From KAIEntity where id={EntityId}'.format(EntityId=key)
sqldata = select(sql)
for key in sqldata:
for entityRules in key:
entityRules = eval(entityRules)
for entityRule in entityRules:
entityhostIps.append(entityRule['value'])
#当没有查询找数据时,返回错误
if not (entityhostIps):
return False
data = [{"fieldsCondition": [{"host": {"matches": entityhostIps}}]}]
return data
def inquireBusinessKPIAndServiceId(self):
id=[]
if not (self.search):
return False
for key in self.search:
sql = 'SELECT serviceId,id From KAIKpi where KAIKpi.serviceId={serverID}'.format(serverID=key)
sqldata = select(sql)
for i in sqldata:
id.append(list(i))
if not (id):
return False
data = [{"id":id[0][1],"type":"kpi"},{"id":id[0][0],"type":"service"}]
return data
def ServiceBusiness(self):
nameAndId=[]
if not (self.search):
return False
for key in self.search:
sql='SELECT KAIKpi.`name`,KAIKpi.id,KAIService.`name`,KAIKpi.serviceId FROM KAIService INNER JOIN KAIKpi ON KAIService.id = KAIKpi.serviceId WHERE KAIService.id = {serverID}'.format(serverID=key)
sqldata = select(sql)
for info in sqldata:
nameAndId=list(info)
if not (nameAndId):
return False
data = [
{
"type": 0,
"rect": {
"x": 442,
"y": 265.5,
"width": 70,
"height": 100,
"center": {
"x": 477,
"y": 315.5
},
"ex": 512,
"ey": 365.5
},
"lineWidth": 1,
"rotate": 0,
"offsetRotate": 0,
"globalAlpha": 1,
"dash": 0,
"strokeStyle": "#000000",
"font": {
"color": "#000000",
"fontFamily": "\"Hiragino Sans GB\", \"Microsoft YaHei\", \"Helvetica Neue\", Helvetica, Arial",
"fontSize": 12,
"lineHeight": 1.5,
"fontStyle": "normal",
"fontWeight": "normal",
"textAlign": "center",
"textBaseline": "middle",
"background": ""
},
"animateCycleIndex": 0,
"events": [],
"eventFns": [
"link",
"doAnimate",
"doFn",
"doWindowFn"
],
"id": "ab82928",
"name": "people",
"tags": [],
"lineDashOffset": 0,
"textOffsetX": 0,
"textOffsetY": 0,
"animateType": "",
"visible": True,
"data": "",
"zRotate": 0,
"anchors": [
{
"x": 442,
"y": 315.5,
"direction": 4
},
{
"x": 477,
"y": 265.5,
"direction": 1
},
{
"x": 512,
"y": 315.5,
"direction": 2
},
{
"x": 477,
"y": 365.5,
"direction": 3
}
],
"rotatedAnchors": [
{
"x": 442,
"y": 315.5,
"direction": 4
},
{
"x": 477,
"y": 265.5,
"direction": 1
},
{
"x": 512,
"y": 315.5,
"direction": 2
},
{
"x": 477,
"y": 365.5,
"direction": 3
}
],
"animateDuration": 0,
"animateFrames": [],
"z": None,
"borderRadius": 0,
"iconSize": 0,
"imageAlign": "center",
"gradientAngle": 0,
"gradientRadius": 0.01,
"paddingTop": 0,
"paddingBottom": 0,
"paddingLeft": 0,
"paddingRight": 0,
"paddingLeftNum": 0,
"paddingRightNum": 0,
"paddingTopNum": 0,
"paddingBottomNum": 0,
"textRect": {
"x": 0,
"y": 0,
"width": 0,
"height": 0,
"center": {
"x": 0,
"y": 0
},
"ex": 0,
"ey": 0
},
"fullTextRect": {
"x": 0,
"y": 0,
"width": 0,
"height": 0,
"center": {
"x": 0,
"y": 0
},
"ex": 0,
"ey": 0
},
"iconRect": {
"x": 0,
"y": 0,
"width": 0,
"height": 0,
"center": {
"x": 0,
"y": 0
},
"ex": 0,
"ey": 0
},
"elementRendered": False,
"TID": "ec085cd",
"lastImage": None,
"dockWatchers": [
{
"x": 477,
"y": 315.5
},
{
"x": 0,
"y": 276.5
},
{
"x": 70,
"y": 276.5
},
{
"x": 70,
"y": 376.5
},
{
"x": 0,
"y": 376.5
}
]
},
{
"type": 0,
"rect": {
"x": 622,
"y": 210,
"width": 200,
"height": 80,
"center": {
"x": 722,
"y": 250
},
"ex": 822,
"ey": 290
},
"lineWidth": 1,
"rotate": 0,
"offsetRotate": 0,
"globalAlpha": 1,
"dash": 0,
"strokeStyle": "transparent",
"fillStyle": "#ffffff",
"font": {
"color": "#000000",
"fontFamily": "\"Hiragino Sans GB\", \"Microsoft YaHei\", \"Helvetica Neue\", Helvetica, Arial",
"fontSize": 12,
"lineHeight": 1.5,
"fontStyle": "normal",
"fontWeight": "normal",
"textAlign": "center",
"textBaseline": "middle"
},
"animateCycleIndex": 0,
"events": [],
"eventFns": [
"link",
"doAnimate",
"doFn",
"doWindowFn"
],
"id": "531cfaa9",
"name": "echarts",
"tags": [],
"lineDashOffset": 0,
"textOffsetX": 0,
"textOffsetY": 0,
"animateType": "",
"hideInput": True,
"visible": True,
"data": {
"text": "{serviceName}_服务健康分数".format(serviceName=nameAndId[2]),
"echarts": {
"option": {
"title": {
"text": "服务健康分数",
"subtext": "",
"left": "center",
"top": "bottom",
"textStyle": {
"fontFamily": "Microsoft YaHei",
"fontSize": 12,
"fontWeight": "normal",
"color": "#000000",
"lineHeight": 12,
"rich": {
"e": {
"color": "red"
}
}
},
"subtextStyle": {
"fontSize": 12,
"color": "red"
}
},
"tooltip": {
"trigger": "axis",
"padding": [
8,
12
]
},
"grid": [
{
"left": 2,
"right": "32.3%",
"borderColor": "transparent",
"top": 2,
"bottom": 24,
"backgroundColor": "#ffe1a8",
"show": True
},
{
"left": 2,
"right": "33%",
"bottom": 24,
"backgroundColor": "transparent",
"borderColor": "transparent",
"show": True
},
{
"x": "68%",
"right": 2,
"left": "68%",
"top": 2,
"bottom": 24,
"backgroundColor": "#ffe1a8",
"borderColor": "transparent",
"show": True
}
],
"xAxis": [
{
"gridIndex": 0,
"type": "category",
"data": [
1610935980000,
1610936100000,
1610936220000
],
"show": False
},
{
"show": False
}
],
"yAxis": [
{
"gridIndex": 0,
"type": "value",
"show": False
},
{
"show": False
}
],
"series": [
{
"animation": False,
"hoverAnimation": False,
"data": [
70,
70,
70
],
"smooth": True,
"showSymbol": True,
"yAxisIndex": 0,
"symbol": "none",
"xAxisIndex": 0,
"symbolSize": 0,
"type": "line",
"lineStyle": {
"color": "#000000"
},
"itemStyle": {
"color": "#000000"
}
},
{
"tooltip": {
"trigger": "item",
"formatter": "70",
"padding": [
8,
12
],
"position": [
"80%",
"-50%"
]
},
"color": "#000000",
"animation": False,
"avoidLabelOverlap": True,
"hoverAnimation": False,
"data": [
{
"value": 100,
"name": "--"
}
],
"name": "",
"radius": [
"46%",
"46%"
],
"label": {
"show": True,
"position": "center",
"fontSize": 20,
"formatter": "70",
"rich": {
"n": {
"fontSize": 12
}
}
},
"center": [
"83%",
"44%"
],
"type": "pie"
}
],
"severity": [
3,
3,
3
],
"type": "service"
},
"rect": {
"width": 200,
"height": 80
}
},
"data": {
"dataName": "{serviceName}_服务健康分数".format(serviceName=nameAndId[2]),
"id": nameAndId[3],
"isNotFound": False,
"parentId": "{serviceId}".format(serviceId=nameAndId[3]),
"type": "service"
}
},
"zRotate": 0,
"anchors": [
{
"x": 622,
"y": 250,
"direction": 4
},
{
"x": 722,
"y": 210,
"direction": 1
},
{
"x": 822,
"y": 250,
"direction": 2
},
{
"x": 722,
"y": 290,
"direction": 3
}
],
"rotatedAnchors": [
{
"x": 622,
"y": 250,
"direction": 4
},
{
"x": 722,
"y": 210,
"direction": 1
},
{
"x": 822,
"y": 250,
"direction": 2
},
{
"x": 722,
"y": 290,
"direction": 3
}
],
"animateDuration": 0,
"animateFrames": [],
"borderRadius": 0,
"iconSize": None,
"imageAlign": "center",
"gradientAngle": 0,
"gradientRadius": 0.01,
"paddingTop": 0,
"paddingBottom": 0,
"paddingLeft": 0,
"paddingRight": 0,
"elementId": "e406b09",
"paddingLeftNum": 0,
"paddingRightNum": 0,
"paddingTopNum": 0,
"paddingBottomNum": 0,
"textRect": {
"x": 622,
"y": 270,
"width": 200,
"height": 20,
"center": {
"x": 722,
"y": 280
},
"ex": 822,
"ey": 290
},
"fullTextRect": {
"x": 622,
"y": 210,
"width": 200,
"height": 80,
"center": {
"x": 722,
"y": 250
},
"ex": 822,
"ey": 290
},
"iconRect": {
"x": 622,
"y": 210,
"width": 200,
"height": 60,
"center": {
"x": 722,
"y": 240
},
"ex": 822,
"ey": 270
},
"fullIconRect": {
"x": 622,
"y": 210,
"width": 200,
"height": 80,
"center": {
"x": 722,
"y": 250
},
"ex": 822,
"ey": 290
},
"elementRendered": True,
"TID": "ec085cd",
"elementLoaded": True,
"dockWatchers": [
{
"x": 722,
"y": 250
},
{
"x": 622,
"y": 210
},
{
"x": 822,
"y": 210
},
{
"x": 822,
"y": 290
},
{
"x": 622,
"y": 290
}
]
},
{
"type": 0,
"rect": {
"x": 622,
"y": 336.5,
"width": 200,
"height": 80,
"center": {
"x": 722,
"y": 376.5
},
"ex": 822,
"ey": 416.5
},
"lineWidth": 1,
"rotate": 0,
"offsetRotate": 0,
"globalAlpha": 1,
"dash": 0,
"strokeStyle": "transparent",
"fillStyle": "#ffffff",
"font": {
"color": "#000000",
"fontFamily": "\"Hiragino Sans GB\", \"Microsoft YaHei\", \"Helvetica Neue\", Helvetica, Arial",
"fontSize": 12,
"lineHeight": 1.5,
"fontStyle": "normal",
"fontWeight": "normal",
"textAlign": "center",
"textBaseline": "middle"
},
"animateCycleIndex": 0,
"events": [],
"eventFns": [
"link",
"doAnimate",
"doFn",
"doWindowFn"
],
"id": "83348a3",
"name": "echarts",
"tags": [],
"lineDashOffset": 0,
"textOffsetX": 0,
"textOffsetY": 0,
"animateType": "",
"hideInput": True,
"visible": True,
"data": {
"text": "{serviceName}_{KPIName}".format(serviceName=nameAndId[2],KPIName=nameAndId[0]),
"echarts": {
"option": {
"title": {
"text": "{KPIName}".format(KPIName=nameAndId[0]),
"subtext": "",
"left": "center",
"top": "bottom",
"textStyle": {
"fontFamily": "Microsoft YaHei",
"fontSize": 12,
"fontWeight": "normal",
"color": "#000000",
"lineHeight": 12,
"rich": {
"e": {
"color": "red"
}
}
},
"subtextStyle": {
"fontSize": 12,
"color": "red"
}
},
"tooltip": {
"trigger": "axis",
"padding": [
8,
12
]
},
"grid": [
{
"left": 2,
"right": "32.3%",
"borderColor": "transparent",
"top": 2,
"bottom": 24,
"backgroundColor": "#ffe1a8",
"show": True
},
{
"left": 2,
"right": "33%",
"bottom": 24,
"backgroundColor": "transparent",
"borderColor": "transparent",
"show": True
},
{
"x": "68%",
"right": 2,
"left": "68%",
"top": 2,
"bottom": 24,
"backgroundColor": "#ffe1a8",
"borderColor": "transparent",
"show": True
}
],
"xAxis": [
{
"gridIndex": 0,
"type": "category",
"data": [
1610935980000,
1610936100000,
1610936220000
],
"show": False
},
{
"show": False
}
],
"yAxis": [
{
"gridIndex": 0,
"type": "value",
"show": False
},
{
"show": False
}
],
"series": [
{
"animation": False,
"hoverAnimation": False,
"data": [
345,
355,
383
],
"smooth": True,
"showSymbol": True,
"yAxisIndex": 0,
"symbol": "none",
"xAxisIndex": 0,
"symbolSize": 0,
"type": "line",
"lineStyle": {
"color": "#000000"
},
"itemStyle": {
"color": "#000000"
}
},
{
"tooltip": {
"trigger": "item",
"formatter": "383条",
"padding": [
8,
12
],
"position": [
"80%",
"-50%"
]
},
"color": "#000000",
"animation": False,
"avoidLabelOverlap": True,
"hoverAnimation": False,
"data": [
{
"value": 100,
"name": "--"
}
],
"name": "",
"radius": [
"46%",
"46%"
],
"label": {
"show": True,
"position": "center",
"fontSize": 20,
"formatter": "38...",
"rich": {
"n": {
"fontSize": 12
}
}
},
"center": [
"83%",
"44%"
],
"type": "pie"
}
],
"severity": [
3,
3,
3
],
"type": "kpi"
},
"rect": {
"width": 200,
"height": 80
}
},
"data": {
"dataName": "{serviceName}_{KPIName}".format(serviceName=nameAndId[2],KPIName=nameAndId[0]),
"id": nameAndId[1],
"isNotFound": False,
"parentId": "{serviceId}".format(serviceId=nameAndId[3]),
"type": "kpi"
}
},
"zRotate": 0,
"anchors": [
{
"x": 622,
"y": 376.5,
"direction": 4
},
{
"x": 722,
"y": 336.5,
"direction": 1
},
{
"x": 822,
"y": 376.5,
"direction": 2
},
{
"x": 722,
"y": 416.5,
"direction": 3
}
],
"rotatedAnchors": [
{
"x": 622,
"y": 376.5,
"direction": 4
},
{
"x": 722,
"y": 336.5,
"direction": 1
},
{
"x": 822,
"y": 376.5,
"direction": 2
},
{
"x": 722,
"y": 416.5,
"direction": 3
}
],
"animateDuration": 0,
"animateFrames": [],
"borderRadius": 0,
"iconSize": None,
"imageAlign": "center",
"gradientAngle": 0,
"gradientRadius": 0.01,
"paddingTop": 0,
"paddingBottom": 0,
"paddingLeft": 0,
"paddingRight": 0,
"elementId": "58cbaa",
"paddingLeftNum": 0,
"paddingRightNum": 0,
"paddingTopNum": 0,
"paddingBottomNum": 0,
"textRect": {
"x": 622,
"y": 396.5,
"width": 200,
"height": 20,
"center": {
"x": 722,
"y": 406.5
},
"ex": 822,
"ey": 416.5
},
"fullTextRect": {
"x": 622,
"y": 336.5,
"width": 200,
"height": 80,
"center": {
"x": 722,
"y": 376.5
},
"ex": 822,
"ey": 416.5
},
"iconRect": {
"x": 622,
"y": 336.5,
"width": 200,
"height": 60,
"center": {
"x": 722,
"y": 366.5
},
"ex": 822,
"ey": 396.5
},
"fullIconRect": {
"x": 622,
"y": 336.5,
"width": 200,
"height": 80,
"center": {
"x": 722,
"y": 376.5
},
"ex": 822,
"ey": 416.5
},
"elementRendered": True,
"TID": "ec085cd",
"elementLoaded": True,
"dockWatchers": [
{
"x": 722,
"y": 376.5
},
{
"x": 622,
"y": 336.5
},
{
"x": 822,
"y": 336.5
},
{
"x": 822,
"y": 416.5
},
{
"x": 622,
"y": 416.5
}
]
},
{
"type": 1,
"rect": {
"x": 0,
"y": 0,
"width": 0,
"height": 0,
"center": {
"x": 0,
"y": 0
},
"ex": 0,
"ey": 0
},
"lineWidth": 1,
"rotate": 0,
"offsetRotate": 0,
"globalAlpha": 1,
"dash": 0,
"strokeStyle": "#000000",
"fillStyle": "",
"font": {
"color": "",
"fontFamily": "\"Hiragino Sans GB\", \"Microsoft YaHei\", \"Helvetica Neue\", Helvetica, Arial",
"fontSize": 12,
"lineHeight": 1.5,
"fontStyle": "normal",
"fontWeight": "normal",
"textAlign": "center",
"textBaseline": "middle",
"background": "#fff"
},
"animateCycleIndex": 0,
"events": [],
"eventFns": [
"link",
"doAnimate",
"doFn",
"doWindowFn"
],
"id": "d561b63",
"name": "curve",
"tags": [],
"lineDashOffset": 0,
"textOffsetX": 0,
"textOffsetY": 0,
"visible": True,
"data": "",
"controlPoints": [
{
"x": 592,
"y": 315.5,
"direction": 2,
"anchorIndex": 2,
"id": "ab82928"
},
{
"x": 542,
"y": 250,
"direction": 4,
"anchorIndex": 0,
"id": "531cfaa9"
}
],
"fromArrowSize": 5,
"toArrowSize": 5,
"borderWidth": 0,
"borderColor": "#000000",
"animateColor": "",
"animateSpan": 1,
"animatePos": 0,
"isAnimate": False,
"animateFromSize": 0,
"animateToSize": 0,
"animateDotSize": 3,
"from": {
"x": 512,
"y": 315.5,
"direction": 2,
"anchorIndex": 2,
"id": "ab82928"
},
"to": {
"x": 622,
"y": 250,
"direction": 4,
"anchorIndex": 0,
"id": "531cfaa9"
},
"fromArrow": "",
"toArrow": "triangleSolid",
"textRect": None,
"TID": "ec085cd"
},
{
"type": 1,
"rect": {
"x": 0,
"y": 0,
"width": 0,
"height": 0,
"center": {
"x": 0,
"y": 0
},
"ex": 0,
"ey": 0
},
"lineWidth": 1,
"rotate": 0,
"offsetRotate": 0,
"globalAlpha": 1,
"dash": 0,
"strokeStyle": "#000000",
"fillStyle": "",
"font": {
"color": "",
"fontFamily": "\"Hiragino Sans GB\", \"Microsoft YaHei\", \"Helvetica Neue\", Helvetica, Arial",
"fontSize": 12,
"lineHeight": 1.5,
"fontStyle": "normal",
"fontWeight": "normal",
"textAlign": "center",
"textBaseline": "middle",
"background": "#fff"
},
"animateCycleIndex": 0,
"events": [],
"eventFns": [
"link",
"doAnimate",
"doFn",
"doWindowFn"
],
"id": "730113e",
"name": "curve",
"tags": [],
"lineDashOffset": 0,
"textOffsetX": 0,
"textOffsetY": 0,
"visible": True,
"data": "",
"controlPoints": [
{
"x": 592,
"y": 315.5,
"direction": 2,
"anchorIndex": 2,
"id": "ab82928"
},
{
"x": 542,
"y": 376.5,
"direction": 4,
"anchorIndex": 0,
"id": "83348a3"
}
],
"fromArrowSize": 5,
"toArrowSize": 5,
"borderWidth": 0,
"borderColor": "#000000",
"animateColor": "",
"animateSpan": 1,
"animatePos": 0,
"isAnimate": False,
"animateFromSize": 0,
"animateToSize": 0,
"animateDotSize": 3,
"from": {
"x": 512,
"y": 315.5,
"direction": 2,
"anchorIndex": 2,
"id": "ab82928"
},
"to": {
"x": 622,
"y": 376.5,
"direction": 4,
"anchorIndex": 0,
"id": "83348a3"
},
"fromArrow": "",
"toArrow": "triangleSolid",
"textRect": None,
"TID": "ec085cd"
}
]
return data
def inquirePens(self):
nameAndId=[]
if not (self.search):
return False
for key in self.search:
sql='SELECT KAIKpi.`name`,KAIKpi.id,KAIService.`name`,KAIKpi.serviceId FROM KAIService INNER JOIN KAIKpi ON KAIService.id = KAIKpi.serviceId WHERE KAIService.id = {serverID}'.format(serverID=key)
sqldata = select(sql)
for info in sqldata:
nameAndId=list(info)
if not (nameAndId):
return False
data=[
{"type":0, "rect":{"x":546,"y":268,"width":582,"height":117,"center":{"x":837,"y":326.5},"ex":1128,"ey":385}, "lineWidth":1, "rotate":0, "offsetRotate":0,"globalAlpha":1,"dash":0,"strokeStyle":"transparent","fillStyle":"#ffffff", "font":{"color":"#000000","fontFamily":"\"Hiragino Sans GB\", \"Microsoft YaHei\", \"Helvetica Neue\", Helvetica, Arial","fontSize":12,"lineHeight":1.5,"fontStyle":"normal","fontWeight":"normal","textAlign":"center","textBaseline":"middle"}, "animateCycleIndex":0,"events":[],"eventFns":["link","doAnimate","doFn","doWindowFn"],"id":"d0c90c4","name":"echarts","tags":[],"lineDashOffset":0,"textOffsetX":0,"textOffsetY":0,"animateType":"","hideInput":True,"visible":True,
"data":{"text":"{serviceName}_{KPIName}".format(serviceName=nameAndId[2],KPIName=nameAndId[0]),"echarts":{"option":{"title":{"text":"KPI251_606","subtext":"","left":"center","top":"bottom","textStyle":{"fontFamily":"Microsoft YaHei","fontSize":12,"fontWeight":"normal","color":"#000000","lineHeight":12,"rich":{"e":{"color":"red"}}},"subtextStyle":{"fontSize":12,"color":"red"}},"tooltip":{"trigger":"axis","padding":[8,12]},"grid":[{"left":2,"right":"32.3%","borderColor":"transparent","top":2,"bottom":24,"backgroundColor":"rgba(0, 0, 0, 0.15)","show":True},{"left":2,"right":"33%","bottom":24,"backgroundColor":"transparent","borderColor":"transparent","show":True},{"x":"68%","right":2,"left":"68%","top":2,"bottom":24,"backgroundColor":"rgba(0, 0, 0, 0.15)","borderColor":"transparent","show":True}],"xAxis":[{"gridIndex":0,"type":"category","data":[],"show":False},{"show":False}],"yAxis":[{"gridIndex":0,"type":"value","show":False},{"show":False}],"series":[{"animation":False,"hoverAnimation":False,"data":[],"smooth":True,"showSymbol":True,"yAxisIndex":0,"symbol":"none","xAxisIndex":0,"symbolSize":0,"type":"line","lineStyle":{"color":"#000000"},"itemStyle":{"color":"#000000"}},{"tooltip":{"trigger":"item","formatter":"N/A条","padding":[8,12],"position":["80%","-50%"]},"color":"#000000","animation":False,"avoidLabelOverlap":True,"hoverAnimation":False,"data":[{"value":100,"name":"--"}],"name":"","radius":["46%","46%"],"label":{"show":True,"position":"center","fontSize":20,"formatter":"N/A条","rich":{"n":{"fontSize":12}}},"center":["83%","44%"],"type":"pie"}],"severity":[],"type":"kpi"},"rect":{"width":200,"height":80}},"data":{"dataName":"{serviceName}_{KPIName}".format(serviceName=nameAndId[2],KPIName=nameAndId[0]),"id":nameAndId[1],"isNotFound":False,"parentId":"{serviceId}".format(serviceId=nameAndId[3]),"type":"kpi"}},"zRotate":0,"anchors":[{"x":546,"y":326.5,"direction":4},{"x":837,"y":268,"direction":1},{"x":1128,"y":326.5,"direction":2},{"x":837,"y":385,"direction":3}],"rotatedAnchors":[{"x":546,"y":326.5,"direction":4},{"x":837,"y":268,"direction":1},{"x":1128,"y":326.5,"direction":2},{"x":837,"y":385,"direction":3}],"animateDuration":0,"animateFrames":[],"borderRadius":0,"iconSize":None,"imageAlign":"center","gradientAngle":0,"gradientRadius":0.01,"paddingTop":0,"paddingBottom":0,"paddingLeft":0,"paddingRight":0,"elementId":"8c5cca5","paddingLeftNum":0,"paddingRightNum":0,"paddingTopNum":0,"paddingBottomNum":0,"textRect":{"x":546,"y":355.75,"width":582,"height":29.25,"center":{"x":837,"y":370.375},"ex":1128,"ey":385},"fullTextRect":{"x":546,"y":268,"width":582,"height":117,"center":{"x":837,"y":326.5},"ex":1128,"ey":385},"iconRect":{"x":546,"y":268,"width":582,"height":87.75,"center":{"x":837,"y":311.875},"ex":1128,"ey":355.75},"fullIconRect":{"x":546,"y":268,"width":582,"height":117,"center":{"x":837,"y":326.5},"ex":1128,"ey":385},"elementRendered":True,"TID":"c576c97","elementLoaded":True,"dockWatchers":[{"x":837,"y":326.5},{"x":306,"y":274},{"x":888,"y":274},{"x":888,"y":391},{"x":306,"y":391}]},
{"type":0,"rect":{"x":161,"y":286.5,"width":200,"height":80,"center":{"x":261,"y":326.5},"ex":361,"ey":366.5},"lineWidth":1,"rotate":0,"offsetRotate":0,"globalAlpha":1,"dash":0,"strokeStyle":"transparent","fillStyle":"#ffffff","font":{"color":"#000000","fontFamily":"\"Hiragino Sans GB\", \"Microsoft YaHei\", \"Helvetica Neue\", Helvetica, Arial","fontSize":12,"lineHeight":1.5,"fontStyle":"normal","fontWeight":"normal","textAlign":"center","textBaseline":"middle"},"animateCycleIndex":0,"events":[],"eventFns":["link","doAnimate","doFn","doWindowFn"],"id":"f1f3b2c","name":"echarts","tags":[],"lineDashOffset":0,"textOffsetX":0,"textOffsetY":0,"animateType":"","hideInput":True,"visible":True,
"data":{"text":"{serviceName}_服务健康分数".format(serviceName=nameAndId[2]), "echarts":{"option":{"title":{"text":"服务健康分数","subtext":"","left":"center","top":"bottom","textStyle":{"fontFamily":"Microsoft YaHei","fontSize":12,"fontWeight":"normal","color":"#000000","lineHeight":12,"rich":{"e":{"color":"red"}}},"subtextStyle":{"fontSize":12,"color":"red"}},"tooltip":{"trigger":"axis","padding":[8,12]},"grid":[{"left":2,"right":"32.3%","borderColor":"transparent","top":2,"bottom":24,"backgroundColor":"rgba(0, 0, 0, 0.15)","show":True},{"left":2,"right":"33%","bottom":24,"backgroundColor":"transparent","borderColor":"transparent","show":True},{"x":"68%","right":2,"left":"68%","top":2,"bottom":24,"backgroundColor":"rgba(0, 0, 0, 0.15)","borderColor":"transparent","show":True}],"xAxis":[{"gridIndex":0,"type":"category","data":[],"show":False},{"show":False}],"yAxis":[{"gridIndex":0,"type":"value","show":False},{"show":False}],"series":[{"animation":False,"hoverAnimation":False,"data":[],"smooth":True,"showSymbol":True,"yAxisIndex":0,"symbol":"none","xAxisIndex":0,"symbolSize":0,"type":"line","lineStyle":{"color":"#000000"},"itemStyle":{"color":"#000000"}},{"tooltip":{"trigger":"item","formatter":"N/A","padding":[8,12],"position":["80%","-50%"]},"color":"#000000","animation":False,"avoidLabelOverlap":True,"hoverAnimation":False,"data":[{"value":100,"name":"--"}],"name":"","radius":["46%","46%"],"label":{"show":True,"position":"center","fontSize":20,"formatter":"N/A","rich":{"n":{"fontSize":12}}},"center":["83%","44%"],"type":"pie"}],"severity":[],"type":"service"},"rect":{"width":200,"height":80}},"data":{"dataName":"{serviceName}_服务健康分数".format(serviceName=nameAndId[2]),"id":nameAndId[3],"isNotFound":False,"parentId":"{serviceId}".format(serviceId=nameAndId[3]),"type":"service"}},"zRotate":0,"anchors":[{"x":161,"y":326.5,"direction":4},{"x":261,"y":286.5,"direction":1},{"x":361,"y":326.5,"direction":2},{"x":261,"y":366.5,"direction":3}],"rotatedAnchors":[{"x":161,"y":326.5,"direction":4},{"x":261,"y":286.5,"direction":1},{"x":361,"y":326.5,"direction":2},{"x":261,"y":366.5,"direction":3}],"animateDuration":0,"animateFrames":[],"borderRadius":0,"iconSize":None,"imageAlign":"center","gradientAngle":0,"gradientRadius":0.01,"paddingTop":0,"paddingBottom":0,"paddingLeft":0,"paddingRight":0,"elementId":"aa0e60f","paddingLeftNum":0,"paddingRightNum":0,"paddingTopNum":0,"paddingBottomNum":0,"textRect":{"x":161,"y":346.5,"width":200,"height":20,"center":{"x":261,"y":356.5},"ex":361,"ey":366.5},"fullTextRect":{"x":161,"y":286.5,"width":200,"height":80,"center":{"x":261,"y":326.5},"ex":361,"ey":366.5},"iconRect":{"x":161,"y":286.5,"width":200,"height":60,"center":{"x":261,"y":316.5},"ex":361,"ey":346.5},"fullIconRect":{"x":161,"y":286.5,"width":200,"height":80,"center":{"x":261,"y":326.5},"ex":361,"ey":366.5},"elementRendered":True,"TID":"c576c97","elementLoaded":True,"dockWatchers":[{"x":261,"y":326.5},{"x":-17,"y":292.5},{"x":183,"y":292.5},{"x":183,"y":372.5},{"x":-17,"y":372.5}]},
{"type":1,"rect":{"x":0,"y":0,"width":0,"height":0,"center":{"x":0,"y":0},"ex":0,"ey":0},"lineWidth":1,"rotate":0,"offsetRotate":0,"globalAlpha":1,"dash":0,"strokeStyle":"#000000","fillStyle":"","font":{"color":"","fontFamily":"\"Hiragino Sans GB\", \"Microsoft YaHei\", \"Helvetica Neue\", Helvetica, Arial","fontSize":12,"lineHeight":1.5,"fontStyle":"normal","fontWeight":"normal","textAlign":"center","textBaseline":"middle","background":"#fff"},"animateCycleIndex":0,"events":[],"eventFns":["link","doAnimate","doFn","doWindowFn"],"id":"335f03","name":"curve","tags":[],"lineDashOffset":0,"textOffsetX":0,"textOffsetY":0,"visible":True,"data":"","controlPoints":[{"x":422,"y":326.5,"direction":2,"anchorIndex":2,"id":"f1f3b2c"},{"x":485,"y":326.5,"direction":4,"anchorIndex":0,"id":"d0c90c4"}],"fromArrowSize":5,"toArrowSize":5,"borderWidth":0,"borderColor":"#000000","animateColor":"","animateSpan":1,"animatePos":0,"isAnimate":False,"animateFromSize":0,"animateToSize":0,"animateDotSize":3,"from":{"x":361,"y":326.5,"direction":2,"anchorIndex":2,"id":"f1f3b2c"},"to":{"x":546,"y":326.5,"direction":4,"anchorIndex":0,"id":"d0c90c4"},"fromArrow":"","toArrow":"straightLine","textRect":None,"TID":"c576c97"},
{"type":0,"rect":{"x":0,"y":276.5,"width":70,"height":100,"center":{"x":35,"y":326.5},"ex":70,"ey":376.5},"lineWidth":1,"rotate":0,"offsetRotate":0,"globalAlpha":1,"dash":0,"strokeStyle":"#000000","font":{"color":"#000000","fontFamily":"\"Hiragino Sans GB\", \"Microsoft YaHei\", \"Helvetica Neue\", Helvetica, Arial","fontSize":12,"lineHeight":1.5,"fontStyle":"normal","fontWeight":"normal","textAlign":"center","textBaseline":"middle"},"animateCycleIndex":0,"events":[],"eventFns":["link","doAnimate","doFn","doWindowFn"],"id":"ab82928","name":"people","tags":[],"lineDashOffset":0,"textOffsetX":0,"textOffsetY":0,"animateType":"","visible":True,"data":"","zRotate":0,"anchors":[{"x":0,"y":326.5,"direction":4},{"x":35,"y":276.5,"direction":1},{"x":70,"y":326.5,"direction":2},{"x":35,"y":376.5,"direction":3}],"rotatedAnchors":[{"x":0,"y":326.5,"direction":4},{"x":35,"y":276.5,"direction":1},{"x":70,"y":326.5,"direction":2},{"x":35,"y":376.5,"direction":3}],"animateDuration":0,"animateFrames":[],"borderRadius":0,"iconSize":None,"imageAlign":"center","gradientAngle":0,"gradientRadius":0.01,"paddingTop":0,"paddingBottom":0,"paddingLeft":0,"paddingRight":0,"paddingLeftNum":0,"paddingRightNum":0,"paddingTopNum":0,"paddingBottomNum":0,"textRect":{"x":0,"y":0,"width":0,"height":0,"center":{"x":0,"y":0},"ex":0,"ey":0},"fullTextRect":{"x":0,"y":0,"width":0,"height":0,"center":{"x":0,"y":0},"ex":0,"ey":0},"iconRect":{"x":0,"y":0,"width":0,"height":0,"center":{"x":0,"y":0},"ex":0,"ey":0},"elementRendered":False,"TID":"c576c97"},
{"type":1,"rect":{"x":0,"y":0,"width":0,"height":0,"center":{"x":0,"y":0},"ex":0,"ey":0},"lineWidth":1,"rotate":0,"offsetRotate":0,"globalAlpha":1,"dash":0,"strokeStyle":"#000000","fillStyle":"","font":{"color":"","fontFamily":"\"Hiragino Sans GB\", \"Microsoft YaHei\", \"Helvetica Neue\", Helvetica, Arial","fontSize":12,"lineHeight":1.5,"fontStyle":"normal","fontWeight":"normal","textAlign":"center","textBaseline":"middle","background":"#fff"},"animateCycleIndex":0,"events":[],"eventFns":["link","doAnimate","doFn","doWindowFn"],"id":"19931a69","name":"curve","tags":[],"lineDashOffset":0,"textOffsetX":0,"textOffsetY":0,"visible":True,"data":"","controlPoints":[{"x":100,"y":326.5,"direction":2,"anchorIndex":2,"id":"ab82928"},{"x":131,"y":326.5,"direction":4,"anchorIndex":0,"id":"f1f3b2c"}],"fromArrowSize":5,"toArrowSize":5,"borderWidth":0,"borderColor":"#000000","animateColor":"","animateSpan":1,"animatePos":0,"isAnimate":False,"animateFromSize":0,"animateToSize":0,"animateDotSize":3,"from":{"x":70,"y":326.5,"direction":2,"anchorIndex":2,"id":"ab82928"},"to":{"x":161,"y":326.5,"direction":4,"anchorIndex":0,"id":"f1f3b2c"},"fromArrow":"","toArrow":"straightLine","textRect":None,"TID":"c576c97"}
]
return data
def getAlertHttpUrl(self):
if not (self.search):
return False
path = '/etl/input/list?start=0&limit=-1&order=create_time&by=desc'
url = urlJoin.url(path)
headers = setting.header
ports = []
rsp = requests.get(url=url,headers=headers)
inputList=rsp.json()
for key in self.search:
for input in inputList:
if input['id'] == key:
ports.append(input['port'])
break
if len(ports) != 1:
return False
return 'http://{ip}:{port}'.format(ip=setting.testEnv_config['ip'],port=ports[0])
|
AnyRobot
|
/AnyRobot-1.5.4.tar.gz/AnyRobot-1.5.4/aishu/datafaker/profession/entity/kai.py
|
kai.py
|
import random,json
from aishu.datafaker.profession.entity import date
from aishu.datafaker.profession.entity.AssociationMapping import sql
from aishu.public.db_select import select
class ParaDateAnyRobotServer(object):
def __init__(self,key):
self.key = key
self.sql = sql(self.key)
def getSavedSearchId(self):
ar_id_list = []
if isinstance(self.sql, bool):
return 0
Id_date = select(self.sql)
if len(Id_date) == 0:
return 0
for value in Id_date:
ar_id_list.append(value[0])
SavedSearch_Id = random.choice(ar_id_list)
date.saved_search_Id_List.append(SavedSearch_Id)
return date.saved_search_Id_List[0]
def getSavedSearchNameId(self):
# 查询id对应的Name
if isinstance(self.sql, bool):
return 0
Name_date = select(self.sql)
if len(Name_date) == 0:
return 0
return Name_date[0][0]
def getSavedSearchLogGroupId(self):
# 查询id对应的SavedSearchLogGroupId
if isinstance(self.sql, bool):
return 0
date = select(self.sql)
if len(date) == 0:
return 0
return date[0][0].replace("\"", "")
def getSavedSearchLogLibraryId(self):
# 查询对应的SavedSearchLogLibraryId
if isinstance(self.sql, bool):
return 0
date = select(self.sql)
if len(date) == 0:
return 0
return date[0][0].replace("\"", "")
def getAlertRuleNamesId(self):
ar_id_list = []
date = select(self.sql)
if len(date) == 0:
return 0
for value in date:
ar_id_list.append(value[0])
return random.choice(ar_id_list)
def getAlertScenarioId(self):
ar_id_list = []
if isinstance(self.sql, bool):
return 0
Id_date = select(self.sql)
if len(Id_date) == 0:
return 0
for value in Id_date:
ar_id_list.append(value[0])
SavedSearch_Id = random.choice(ar_id_list)
date.AlertScenario_Id_List.append(SavedSearch_Id)
return date.AlertScenario_Id_List[0]
def getDeleteAlertRuleNamesId(self):
# 查询场景策略id对应的规则策略名称
if isinstance(self.sql, bool):
return 0
Name_date = select(self.sql)
if len(Name_date) == 0:
return 0
filter = json.loads(Name_date[0][0])[0]
return filter
def getServiceId(self):
ar_id_list = []
if isinstance(self.sql, bool):
return 0
Id_date = select(self.sql)
if len(Id_date) == 0:
return 0
for value in Id_date:
ar_id_list.append(value[0])
date.Service_Id_List.append(random.choice(ar_id_list))
return date.Service_Id_List[0]
def getKpiId(self):
# 查询服务对应的KPIId
if isinstance(self.sql, bool):
return 0
Name_date = select(self.sql)
if len(Name_date) == 0:
return 0
return Name_date[0][0]
|
AnyRobot
|
/AnyRobot-1.5.4.tar.gz/AnyRobot-1.5.4/aishu/datafaker/profession/entity/RetrievesAssociated.py
|
RetrievesAssociated.py
|
import random
import time
import requests,json
from aishu.public import urlJoin
from aishu.public.operationJson import OperetionJson
from aishu.setting import header
class date(object):
def getPort(self):
# 系统合法参数 20010-20099、162,514,5140
portList = [port for port in range(20010, 20100)]
portList.append(162)
portList.append(514)
portList.append(5140)
port = random.choice(portList)
return port
def getEtlPort(self):
path = "/etl/input/list?start=0&limit=-1"
payload = {}
headers = header
response = requests.request("GET", urlJoin.url(path), headers=headers, data=payload)
date = response.json()
a = OperetionJson(date)
value = a.get_value('port')
if value:
return value
else:
return []
def getEtlPortOld(self):
data = self.getEtlPort()
if len(data) == 0:
port = 0
return port
else:
port = random.choice(data)
return port
def getEtlPortNew(self):
oldNew = self.getEtlPort()
count = 0
flag = True
while flag or count >= 10:
newPort = self.getPort()
count = count + 1
if newPort not in oldNew:
flag = False
return newPort
return ''
def getEtlPortIll(self):
portList = [port for port in range(10000, 20000)]
port = random.choice(portList)
return port
def getOpenlogPort(self):
path = "/etl/input/list?start=0&limit=-1"
payload = {}
headers = header
res = requests.request("GET", urlJoin.url(path), headers=headers, data=payload)
data = res.json()
# 从AR中找到对应的端口,若没找到,则生成对应端口,并返回端口号。
for port_info in data:
if port_info['type'] == 'testtransfer' and port_info['protocol'] == 'tcp' and port_info['status'] == 1:
return port_info['port']
new_port = self.getEtlPortNew()
create_input_data = {
"community": [],
"port": f"{new_port}",
"protocol": "tcp",
"ruleName": None,
"status": 1,
"tagsID": [],
"tags": [],
"timezone": "Asia/Shanghai",
"type": "testtransfer",
"charset": "UTF-8"
}
path1 = "/etl/input"
res1 = requests.request("POST", urlJoin.url(path1), headers=headers, data=json.dumps(create_input_data))
time.sleep(60)
if res1.status_code != 200:
return ''
return new_port
if __name__ == '__main__':
print(date().getOpenlogPort())
|
AnyRobot
|
/AnyRobot-1.5.4.tar.gz/AnyRobot-1.5.4/aishu/datafaker/profession/entity/port.py
|
port.py
|
from aishu import setting
from aishu.datafaker.profession.entity import ip
def sql(key):
"""
对应数据服务的sql语句注册
:param key:
:return:
"""
switcher = {
'OpensearchSLO': {
'sql': "SELECT `id` FROM kai_service where name LIKE '%opensearch服务SLO%' order by create_time desc;",
'database': 'anyrobot'
},
'HostServiceSLO': {
'sql': "SELECT `id` FROM kai_service where name LIKE '%AR主机服务SLO%' order by create_time desc;",
'database': 'anyrobot'
},
'ServiceSLO': {
'sql': "SELECT `id` FROM kai_service where name LIKE '%AR服务SLO%' order by create_time desc;",
'database': 'anyrobot'
},
'MySystemMetric':{
'sql':"SELECT `groupId` FROM loggroup where groupName LIKE '%mysystemmetric%' order by createTime desc;",
'database': 'anyrobot'
},
'CPUSLO': {
'sql': "SELECT `id` FROM kai_service where name LIKE '%Linux主机CPU服务SLO%' order by create_time desc;",
'database': 'anyrobot'
},
'MemorySLO': {
'sql': "SELECT `id` FROM kai_service where name LIKE '%Linux主机内存服务SLO%' order by create_time desc;",
'database': 'anyrobot'
},
'LoadSLO': {
'sql': "SELECT `id` FROM kai_service where name LIKE '%Linux主机平均负载服务SLO%' order by create_time desc;",
'database': 'anyrobot'
},
'IOSLO': {
'sql': "SELECT `id` FROM kai_service where name LIKE '%Linux主机IO利用率服务SLO%' order by create_time desc;",
'database': 'anyrobot'
},
'DiskSLO': {
'sql': "SELECT `id` FROM kai_service where name LIKE '%Linux主机磁盘利用率服务SLO%' order by create_time desc;",
'database': 'anyrobot'
},
'UserID':{'sql':'select userId from User where loginName = "admin";','database':'anyrobot'},
'MLID': {'sql': "select id from MLJob ;", 'database': 'anyrobotml'},
'entityID': {'sql': "select id from KAIEntity;", 'database': 'anyrobot'},
'groupID': {'sql': "select id from KAIEntityGroup;", 'database': 'anyrobot'},
'AlertRuleID': {'sql': "select id from RuleEngineAlert;", 'database': 'anyrobot'},
'kpiID': {'sql': "select id from KAIKpi;", 'database': 'anyrobot'},
'LogTypeID': {'sql': "select dataType from LogWareHouse;", 'database': 'anyrobot'},
'AddEntityID': {
'sql': "select entityId from KAIEntityCondition where conditionValues = '192.168.84.26' AND conditionKeys = 'host';",
'database': 'anyrobot'},
'KpiTemplateID': {'sql': "select id from KAIKpiTemplate;", 'database': 'anyrobot'},
'KpiTemplateID1': {'sql': "select id from KAIKpiTemplate;", 'database': 'anyrobot'},
'KpiTemplateID2': {'sql': "select id from KAIKpiTemplate;", 'database': 'anyrobot'},
'logWareHouseID': {'sql': "SELECT id From LogWareHouse where LENGTH(id)!=8;", 'database': 'anyrobot'},
'logWareHouseId': {'sql': "SELECT id From LogWareHouse where LENGTH(id)!=8;", 'database': 'anyrobot'},
'wareHouseName': {'sql': "SELECT wareHouseName From LogWareHouse;", 'database': 'anyrobot'},
'dataType': {'sql': "SELECT dataType From LogWareHouse where LENGTH(id)!=8;", 'database': 'anyrobot'},
'indexID': {'sql': "SELECT id From IndexParams;", 'database': 'anyrobot'},
'indexName': {'sql': "SELECT indexName From IndexParams;", 'database': 'anyrobot'},
'StreamId': {'sql': "SELECT id From DataStream;", 'database': 'anyrobot'},
'LogGroupIdPare': {'sql': 'SELECT groupId From LogGroup where GroupName!="所有日志";', 'database': 'anyrobot'},
'RoleId': {'sql': 'SELECT roleId From Role where roleName ="admin";', 'database': 'anyrobot'},
'RoleId_Notadmin': {'sql': 'SELECT roleId From Role where roleName != "admin" AND roleName != "user";', 'database': 'anyrobot'},
'tagGroupID': {'sql': "SELECT id From TagGroup;", 'database': 'anyrobot'},
'tagID': {'sql': "SELECT id From Tag;", 'database': 'anyrobot'},
'HostID': {'sql': "SELECT id From AgentHost;", 'database': 'anyrobot'},
'HostIp': {'sql': "SELECT ip From AgentHost;", 'database': 'anyrobot'},
'openapiID': {'sql': "SELECT id From OpenAPIManager;", 'database': 'anyrobotopenLog'},
'UserID_Notadmin': {'sql': 'select userId from User where loginName != "admin" AND status != 0;', 'database': 'anyrobot'},
'JDBCCollectorId': {
'sql': "select collectorId from JDBCCollectorConfig where JDBCCollectorConfig.type='mysqljdbc' AND JDBCCollectorConfig.`sql`='select * from AgentHost';",
'database': 'anyrobot'},
'vSphereID': {
'sql': "SELECT collectorId FROM CollectorConfig WHERE collectorType='vSphere' AND config LIKE '%hlaio.aishu.cn%';",
'database': 'anyrobot'},
'vcenterCollectorId': {
'sql': "SELECT collectorId FROM CollectorConfig WHERE collectorType='vCenter' AND config LIKE '%hlaio.aishu.cn%';",
'database': 'anyrobot'},
'MySQLCollectorId': {'sql': "SELECT collectorId FROM CollectorConfig WHERE collectorType='MySQL Performance';",
'database': 'anyrobot'},
'OracleCollectorId': {
'sql': "SELECT collectorId FROM CollectorConfig WHERE collectorType='Oracle Performance';",
'database': 'anyrobot'},
'AIXCollectorId': {'SQL': "SELECT collectorId FROM CollectorConfig WHERE collectorType='AIX Errpt';",
'database': 'anyrobot'},
'CMDCollectorId': {'sql': "SELECT collectorId FROM CollectorConfig WHERE collectorType='Command Result';",
'database': 'anyrobot'},
'CollectorId': {'sql': "SELECT collectorId FROM CollectorConfig;", 'database': 'anyrobot'},
'DBConnectID': {'sql': "SELECT id FROM DBConnect;", 'database': 'anyrobot'},
'AuthID': {'sql': "SELECT id FROM AgentHostAuth;", 'database': 'anyrobot'},
'authName': {'sql': "SELECT `name` FROM AgentHostAuth;", 'database': 'anyrobot'},
'TemplateID': {'sql': "SELECT id FROM AgentConfigTemplate;", 'database': 'anyrobot'},
'AgentInputTemplateID': {'sql': "SELECT id FROM AgentConfigTemplate WHERE category='input';",
'database': 'anyrobot'},
'AgentOutTemplateID': {'sql': "SELECT id FROM AgentConfigTemplate WHERE category='output';",
'database': 'anyrobot'},
'InputTemplateName': {'sql': "SELECT `name` FROM AgentConfigTemplate WHERE category='input';",
'database': 'anyrobot'},
'OutputTemplateName': {'sql': "SELECT `name` FROM AgentConfigTemplate WHERE category='output';",
'database': 'anyrobot'},
'AgentGroupID': {'sql': "SELECT id FROM AgentGroup;", 'database': 'anyrobot'},
'AgentJobTemplateID': {'sql': "SELECT id FROM AgentJobTemplate", 'database': 'anyrobot'},
'JobID': {'sql': "SELECT id FROM AgentJobInfo;", 'database': 'anyrobot'},
'uploadID': {'sql': "SELECT id FROM Upload;", 'database': 'anyrobot'},
'uninstallHostID': {'sql':"SELECT id From AgentHost WHERE ip='{ip}';".format(ip=ip.date().getAentHostIp()),'database': 'anyrobot'},
'entitygroupId': {'sql':"SELECT id From KAIEntityGroup ;",'database': 'anyrobot'},
'serviceKpiId': {'sql':"SELECT id From KAIKpi ;",'database': 'anyrobot'},
'serviceHeathId': {'sql':"SELECT serviceId From KAIHealth ;",'database': 'anyrobot'},
'KAIAlertId': {'sql':"SELECT id From KAIAlert ;",'database': 'anyrobot'},
'KAIBusinessId': {'sql':"SELECT id From KAIBusiness ;",'database': 'anyrobot'},
'graphName': {'sql':"SELECT graph_name From graph ;",'database': 'anyrobot'},
'ScheduleTaskId': {'sql':"SELECT id From ScheduleTask ;",'database': 'anyrobot'},
'ScheduleTaskId1': {'sql':"SELECT id From ScheduleTask ;",'database': 'anyrobot'},
'UserId': {'sql':"SELECT userId From User ;",'database': 'anyrobot'},
'UserId2': {'sql':"SELECT userId From User ;",'database': 'anyrobot'},
'alertLogId': {'sql':"SELECT alert_scenario_rule_id From RuleEngineAlertLog ;",'database': 'anyrobot'},
'RuleEngineEnableId': {'sql':"SELECT id From RuleEngineAlertScenario Where status = 1;",'database': 'anyrobot'},
'RuleEngineDisableId': {'sql':"SELECT id From RuleEngineAlertScenario Where status = 0;",'database': 'anyrobot'},
'KAIAlertEnableId': {'sql':"SELECT id From KAIAlert Where status = 1 ;",'database': 'anyrobot'},
'KAIAlertDisableId': {'sql':"SELECT id From KAIAlert Where status = 0 ;",'database': 'anyrobot'},
'ScheduleTaskEnableId': {'sql':"SELECT id From ScheduleTask Where status = 1 ;",'database': 'anyrobot'},
'ScheduleTaskDisableId': {'sql':"SELECT id From ScheduleTask Where status = 0 ;",'database': 'anyrobot'},
'ReprotId': {'sql': "SELECT report_id From report WHERE is_share_allowed = 1;", 'database': 'anyrobot'},
'ReprotIdDisable': {'sql': "SELECT report_id From report WHERE is_share_allowed = 0;", 'database': 'anyrobot'},
'ReprotName': {'sql': "SELECT `name` From report ;", 'database': 'anyrobot'},
'ReprotType': {'sql': "SELECT `type` From report ;", 'database': 'anyrobot'},
'CorrelationSearchesId': {'sql': "SELECT `id` From correlate_search ;", 'database': 'anyrobot'}
}
if switcher.get(key) is not None:
if switcher[key].get('database') is not None:
if len(switcher[key]['database']) == 0:
setting.database = 'anyrobot'
else:
setting.database = switcher[key]['database']
return switcher[key]['sql']
else:
return False
|
AnyRobot
|
/AnyRobot-1.5.4.tar.gz/AnyRobot-1.5.4/aishu/datafaker/anyrobot/sqlMapping.py
|
sqlMapping.py
|
import abc
from enum import Enum, EnumMeta, _EnumDict, auto
from typing import List, Callable, AnyStr, Set, TypeVar, Type, Any
SEP_ATTR = "__sep__"
CONVERTER_ATTR = "__converter__"
ITEM_TYPE_ATTR = '__item_type__'
class BaseStrEnumItem(metaclass=abc.ABCMeta):
sep: AnyStr
converter: Callable[[AnyStr], AnyStr]
@abc.abstractmethod
def __init__(self, sep: AnyStr = None, converter: Callable[[AnyStr], AnyStr] = None):
self.sep = sep
self.converter = converter
@abc.abstractmethod
def generate_value(self, name: str) -> AnyStr:
pass
class BaseAnyStrEnum(Enum):
__sep__: AnyStr = None
__converter__: Callable[[str], AnyStr] = None
__item_type__: Type[BaseStrEnumItem] = None
@classmethod
def filter(cls,
contains: AnyStr = None, *,
contained_in: AnyStr = None,
startswith: AnyStr = None,
endswith: AnyStr = None,
case_sensitive: bool = False,
intersection: bool = True,
inverse: bool = False) -> Set['StrEnum']:
"""
:param contains: filter all enum members which are contain some substring
:param startswith: filter all enum members which are start with some substring
:param endswith: filter all enum members which are end with some substring
:param contained_in: filter all enum members which are substrings of some string
:param case_sensitive: defines whether found values must match case of given string
:param inverse: if True, all enum members except found will be returned
:param intersection: indicates whether function should return all found objects or their interception
:return: all found enums
"""
def prepare(value):
if case_sensitive:
return value
return value.lower()
found_sets: List[set] = []
if contains:
contains = prepare(contains)
found_sets.append({e for e in cls if contains in prepare(e)})
if startswith:
startswith = prepare(startswith)
found_sets.append({e for e in cls if prepare(e).startswith(startswith)})
if endswith:
endswith = prepare(endswith)
found_sets.append({e for e in cls if prepare(e).endswith(endswith)})
if contained_in:
contained_in = prepare(contained_in)
found_sets.append({e for e in cls if prepare(e) in contained_in})
if not found_sets:
return set()
if intersection:
found = found_sets[0].intersection(*found_sets[1:])
else:
found = found_sets[0].union(*found_sets[1:])
if inverse:
return {e for e in cls} - found
return found
def _generate_next_value_(*_):
return auto()
class AnyStrEnumMeta(EnumMeta):
# It's here to avoid 'got an unexpected keyword argument' TypeError
@classmethod
def __prepare__(mcs, *args, sep: AnyStr = None, converter: Callable[[AnyStr], AnyStr] = None, **kwargs):
return super().__prepare__(*args, **kwargs)
def __new__(mcs, cls, bases, class_dict, sep: AnyStr = None, converter: Callable[[AnyStr], AnyStr] = None):
mixin_type, base_enum = mcs._get_mixins_(bases)
if not issubclass(base_enum, BaseAnyStrEnum):
raise TypeError(f'Unexpected Enum type \'{base_enum.__name__}\'. '
f'Only {BaseAnyStrEnum.__name__} and its subclasses are allowed')
elif not issubclass(mixin_type, (str, bytes)):
raise TypeError(f'Unexpected mixin type \'{mixin_type.__name__}\'. '
f'Only str, bytes and their subclasses are allowed')
# Resolving Item class for mixin_type
item_type: Type[BaseStrEnumItem] = class_dict.get(ITEM_TYPE_ATTR, base_enum.__item_type__)
if item_type is None:
raise NotImplementedError(f'{cls} must implement {ITEM_TYPE_ATTR}')
elif not issubclass(item_type, BaseStrEnumItem):
raise TypeError(f'{item_type.__name__} must be type of {BaseStrEnumItem.__name__}')
# Trying to get sep and converter from class dict and base enum class
if sep is None:
sep = class_dict.get(SEP_ATTR) or base_enum.__sep__
if converter is None:
converter = class_dict.get(CONVERTER_ATTR) or base_enum.__converter__
item: BaseStrEnumItem = item_type(sep=sep, converter=converter)
new_class_dict = _EnumDict()
for name, type_hint in class_dict.get('__annotations__', {}).items():
if name.startswith('_') or name in class_dict:
continue
mcs.check_type_equals(type_hint, mixin_type)
value = item.generate_value(name)
new_class_dict[name] = value
mcs.check_type_equals(type(value), mixin_type)
for name, value in class_dict.items():
if isinstance(value, BaseStrEnumItem):
value = value.generate_value(name)
elif isinstance(value, auto):
value = item.generate_value(name)
if not name.startswith('_'):
mcs.check_type_equals(type(value), mixin_type)
new_class_dict[name] = value
new_class_dict[SEP_ATTR] = sep
new_class_dict[CONVERTER_ATTR] = converter
new_class_dict[ITEM_TYPE_ATTR] = item_type
return super().__new__(mcs, cls, bases, new_class_dict)
@staticmethod
def check_type_equals(type_to_check: Any, allowed_type: Type[Any]):
if isinstance(type_to_check, TypeVar):
if len(type_to_check.__constraints__) > 1:
raise TypeError(f'Only {allowed_type.__name__} is allowed, '
f'not {type_to_check} {type_to_check.__constraints__}')
elif type_to_check.__constraints__[0] is not allowed_type:
raise TypeError(f'Unexpected type {type_to_check.__constraints__[0].__name__}, '
f'allowed type: {allowed_type.__name__}')
elif type_to_check is not allowed_type:
raise TypeError(f'Unexpected type {getattr(type_to_check, "__name__", type_to_check)}'
f', allowed type: {allowed_type.__name__}')
class StrItem(BaseStrEnumItem):
# https://youtrack.jetbrains.com/issue/PY-24426
# noinspection PyMissingConstructor
def __init__(self, sep: AnyStr = None, converter: Callable[[str], str] = None):
self.sep = sep
self.converter = converter
def generate_value(self, name: str) -> str:
if self.converter:
name = self.converter(name)
if self.sep:
name = name.replace('_', self.sep)
return name
class BytesItem(BaseStrEnumItem):
# https://youtrack.jetbrains.com/issue/PY-24426
# noinspection PyMissingConstructor
def __init__(self, sep: AnyStr = None, converter: Callable[[bytes], bytes] = None):
self.sep = sep
self.converter = converter
def generate_value(self, name: str) -> bytes:
name = bytes(name, 'utf8')
if self.converter:
name = self.converter(name)
if self.sep:
name = name.replace(b'_', self.sep)
return name
auto_str = StrItem
auto_bytes = BytesItem
class StrEnum(str, BaseAnyStrEnum, metaclass=AnyStrEnumMeta):
__sep__: str = None
__converter__: Callable[[str], str] = None
__item_type__ = StrItem
def __str__(self):
return self.value
class BytesEnum(bytes, BaseAnyStrEnum, metaclass=AnyStrEnumMeta):
__sep__: bytes = None
__converter__: Callable[[bytes], bytes] = None
__item_type__: Type[BaseStrEnumItem] = BytesItem
def __str__(self):
return str(self.value)
|
AnyStrEnum
|
/AnyStrEnum-0.2.0-py3-none-any.whl/anystrenum/anystrenum.py
|
anystrenum.py
|
import functools
from typing import Callable
try:
import inflection
except ImportError:
raise RuntimeError(f'"inflection" module not found, install it with pip install -U anystrenum[inflection]')
from anystrenum import StrEnum, BytesEnum
__all__ = [
'CamelizeStrEnum',
'TitelizeStrEnum',
'SingularizeStrEnum',
'DasherizeStrEnum',
'HumanizeStrEnum',
'PluralizeStrEnum',
'TableizeStrEnum',
'UnderscoreStrEnum',
'CamelizeByteEnum',
'DasherizeByteEnum',
'HumanizeByteEnum',
'PluralizeByteEnum',
'SingularizeByteEnum',
'TableizeByteEnum',
'TitelizeByteEnum',
'UnderscoreByteEnum'
]
def to_bytes(func: Callable[[str], str]):
@functools.wraps(func)
def new_func(byte_string: bytes):
return func(byte_string.decode()).encode()
return new_func
class CamelizeStrEnum(StrEnum):
__converter__ = inflection.camelize
class TitelizeStrEnum(StrEnum):
__converter__ = inflection.titleize
class HumanizeStrEnum(StrEnum):
__converter__ = inflection.humanize
class DasherizeStrEnum(StrEnum):
__converter__ = inflection.dasherize
class UnderscoreStrEnum(StrEnum):
__converter__ = inflection.underscore
class PluralizeStrEnum(StrEnum):
__converter__ = inflection.pluralize
class SingularizeStrEnum(StrEnum):
__converter__ = inflection.singularize
class TableizeStrEnum(StrEnum):
__converter__ = inflection.tableize
class CamelizeByteEnum(BytesEnum):
__converter__ = to_bytes(inflection.camelize)
class TitelizeByteEnum(BytesEnum):
__converter__ = to_bytes(inflection.titleize)
class HumanizeByteEnum(BytesEnum):
__converter__ = to_bytes(inflection.humanize)
class DasherizeByteEnum(BytesEnum):
__converter__ = to_bytes(inflection.dasherize)
class UnderscoreByteEnum(BytesEnum):
__converter__ = to_bytes(inflection.underscore)
class PluralizeByteEnum(BytesEnum):
__converter__ = to_bytes(inflection.pluralize)
class SingularizeByteEnum(BytesEnum):
__converter__ = to_bytes(inflection.singularize)
class TableizeByteEnum(BytesEnum):
__converter__ = to_bytes(inflection.tableize)
|
AnyStrEnum
|
/AnyStrEnum-0.2.0-py3-none-any.whl/anystrenum/inflected.py
|
inflected.py
|
Any Valid
=========
The AnyValid class is a wrapper class to be used with, for example, [formencode.validators](http://www.formencode.org/en/latest/modules/validators.html#module-formencode.validators), that lets you partially specify what kind of objects it should match. To see the usefulness of this, it might be easier to first have a look at mock's ANY object ([from unittest.mock's documentation](http://docs.python.org/3/library/unittest.mock.html#any)):
> Sometimes you may need to make assertions about some of the arguments in a call to mock, but either not care about some of the arguments or want to pull them individually out of call_args and make more complex assertions on them.
> To ignore certain arguments you can pass in objects that compare equal to everything. Calls to assert_called_with() and assert_called_once_with() will then succeed no matter what was passed in.
> ```python
>>>
>>> mock = Mock(return_value=None)
>>> mock('foo', bar=object())
>>> mock.assert_called_once_with('foo', bar=ANY)
```
> ANY can also be used in comparisons with call lists like mock_calls:
> ```python
>>>
>>> m = MagicMock(return_value=None)
>>> m(1)
>>> m(1, 2)
>>> m(object())
>>> m.mock_calls == [call(1), call(1, 2), ANY]
True
```
Now, what if you would like to make certain assertions about an argument, but perhaps don't know the exact value, or want to avoid certain values (for example ```None```). This is where AnyValid might come in handy. It provides a really simple way to leverage all the great work that has been put into formencode's validators, so that your testing code can make advanced assertions while being easy to read and maintain.
Examples
========
Simple argument matching:
```python
>>>
>>> from mock import Mock
>>> from any_valid import AnyValid, Int, String
>>>
>>> def check_call(foo, bar):
... try:
... mock = Mock(return_value=None)
... mock(foo, bar=bar)
... mock.assert_called_once_with(AnyValid(String(min_lenght=3)),
... bar=AnyValid(Int(min=2)))
... except AssertionError:
... return False
... return True
...
>>> check_call('fo', 1)
False
>>> check_call(8, 0)
False
>>> check_call('foo', 2)
True
```
Matching a loosely defined dict argument:
```python
>>> from any_valid import AnyValid, Number, OneOf
>>> valid_input = {
... 'core_temperature': AnyValid(Number(min=35, max=41.5)),
... 'protocol': AnyValid(OneOf(['https', 'http'])),
... }
>>> mock = Mock(return_value=None)
>>> mock({'core_temperature': 36.8, 'protocol': 'https'})
>>> mock.assert_called_with(valid_input)
>>>
```
|
AnyValid
|
/AnyValid-1.0.0.tar.gz/AnyValid-1.0.0/README.md
|
README.md
|
# Anyside Python API
Welcome to the Anyside Python API!<br />
[Node.js API](https://www.npmjs.com/package/anyside)
In order to use the Anyside API you will need an API key. You can get your API key by creating an account on [anyside.com](https://anyside.com/).
## Installation
To install the Python API and get started you can simply pip install Anyside into your project.
```sh
$ pip install Anyside
```
## Query Domain
When making a request must pass a anyside domain name.
Response will contain wallest connected with anyside
```python
from anyside import Anyside
anyside = Anyside(api_key='YOUR API KEY')
domain = anyside.query_domain(domain="QUERY DOMAIN NAME Eg. John.any")
print(domain)
"""
#Response Example:
{'Bitcoin': [{'address': '0xBTC-SOMESTUFF', 'name': 'Bitcoin'}],
'Solana': [{'address': '0x...', 'name': 'Solana'}]}
"""
```
## Wallet lookup
When making a request must pass a vaild wallet address.
Response will contain anyside domain
```python
from anyside import Anyside
anyside = Anyside(api_key='YOUR API KEY')
lookupWallet = anyside.lookup_wallet(wallet_address="0xaeA38149566430Anyside7321B04Anyside")
print(lookupWallet)
"""
#Response Example:
{'domain': 'John.any'}
"""
```
Please subscribe on [Anyside](https://anyside.com/) to receive project updates.
You can aslo follow us on [Twitter](https://twitter.com/AnysideNames) and [Discord](https://discord.com/invite/MKDBhDEtUn).
|
Anyside
|
/Anyside-0.0.6.tar.gz/Anyside-0.0.6/README.md
|
README.md
|
# AoE2ScenarioParser
This is a project for editing parts of an `aoe2scenario` file from **Age of Empires 2 Definitive Edition** outside
the in-game editor.
# Getting started
[GitHub Pages]: https://ksneijders.github.io/AoE2ScenarioParser/
## Quick links
- [Installing AoE2ScenarioParser](https://ksneijders.github.io/AoE2ScenarioParser/installation/)
- [Getting Started](https://ksneijders.github.io/AoE2ScenarioParser/getting_started/)
- [Hello World Example](https://ksneijders.github.io/AoE2ScenarioParser/hello_world/)
- [Discord Server](https://discord.gg/DRUtmugXT3)
- [API Docs](https://ksneijders.github.io/AoE2ScenarioParser/api_docs/aoe2_scenario/)
## Documentation
Documentation can be found on **[GitHub Pages]**.
# Discord
If you have any questions regarding the parser, [join the discord]!
[join the discord]: https://discord.gg/DRUtmugXT3
# Progress
Current up-to-date progress can be found on the [changelog.md] page. (Check the `dev` branch for non-released progress).
[changelog.md]: https://github.com/KSneijders/AoE2ScenarioParser/blob/dev/CHANGELOG.md
## Features:
| | View | Add | Edit | Remove |
|------------|--------------------|---------------------|--------------------|---------------------|
| Triggers | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: |
| Conditions | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: |
| Effects | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: |
| Units | n/a *<sup>1</sup> | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: |
| Map | n/a *<sup>1</sup> | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: |
| Players | n/a *<sup>1</sup> | :heavy_check_mark:* | :heavy_check_mark: | :heavy_check_mark:* |
*: You can disable or enable players like in the in-game editor (min 1, max 8).
*<sup>1</sup>: There's no overall formatting function to display these. Though, they can still be printed.
## Support:
| Scenario file version | Read | Write | Reference |
|-----------------------|--------------------|--------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 1.36 | :heavy_check_mark: | :heavy_check_mark: | Version since the**release of the Definitive Edition** |
| 1.37 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [35584] (February 27th, 2020) <br> **Unchanged in**: <br> - Update [36202] (March 30th, 2020) <br> - Update [36906] (April 29th, 2020) <br> - Update [37650] (May 27th, 2020) <br> - Hotfix [37906] (June 2nd, 2020) <br> - Update [39284] (July 20th, 2020) <br> - Hotfix [39515] (July 27th, 2020) <br> - Update [40220] (August 24th, 2020) <br> - Update [40874] (September 22nd, 2020) <br> - Update [41855] (October 20th, 2020) |
| 1.40 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [42848] (November 17th, 2020) <br> **Unchanged in**: <br> - Hotfix [43210] (November 24th, 2020) |
| 1.41 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [44725] (January 25th, 2021) <br> **Unchanged in**: <br> - Hotfix [44834] (January 28th, 2021)<br> - Hotfix [45185] (February 11th, 2021)<br> - Update [46265] (March 24th, 2021)<br> - Update [47820] (May 3rd, 2021) |
| 1.42 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [50292] (July 6th, 2021) <br> **Unchanged in**: <br> - Hotfix [50700] (July 13th, 2021) |
| 1.43 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [51737] (August 10th, 2021) |
| 1.44 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [54480] (October 5th, 2021) <br> **Unchanged in**: <br> - Hotfix [54684] (October 6th, 2021) |
| 1.45 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [56005] (November 17th, 2021) <br> **Unchanged in**: <br> - Update [58259] (January 31st, 2022) |
| 1.46 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [61321] (April 27th, 2022) <br> **Unchanged in**: <br> - Update [63482] (June 28th, 2021) <br> - Hotfix [63581] (July 6th, 2021) |
| 1.47 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [66692] (August 29th, 2022) <br> **Unchanged in**: <br> - Update [73855] (December 7th, 2022) <br> - Update 75350 (Console update) (January 31st, 2023) <br> - Update [78174] (March 8th, 2023) <br> - Hotfix [78757] (March 8th, 2023) |
| 1.48 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [81058] (April 11th, 2023) <br> **Unchanged in**: <br> - Hotfix [82587] (April 28th, 2023) |
| 1.49 | :heavy_check_mark: | :heavy_check_mark: | Version since game update: [83607] (May 16th, 2023) |
[35584]: https://www.ageofempires.com/news/aoe2de-update-35584/
[36202]: https://www.ageofempires.com/news/aoe2de-update-36202/
[36906]: https://www.ageofempires.com/news/aoe2de-update-36906/
[37650]: https://www.ageofempires.com/news/aoe2de-update-37650/
[37906]: https://www.ageofempires.com/news/aoe2de-hotfix-37906/
[39284]: https://www.ageofempires.com/news/aoe2de-update-39284/
[39515]: https://www.ageofempires.com/news/aoe2de-hotfix-39515/
[40220]: https://www.ageofempires.com/news/aoe2de-update-40220/
[40874]: https://www.ageofempires.com/news/aoe2de-update-40874/
[41855]: https://www.ageofempires.com/news/aoe2de-update-41855/
[42848]: https://www.ageofempires.com/news/aoe2de-update-42848/
[43210]: https://www.ageofempires.com/news/aoe2de-hotfix-43210/
[44725]: https://www.ageofempires.com/news/aoeiide-update-44725/
[44834]: https://www.ageofempires.com/news/aoeiide-update-44725/#hotfix-44834
[45185]: https://www.ageofempires.com/news/aoe2de-hotfix-45185/
[46265]: https://www.ageofempires.com/news/aoe2de-update-46295/
[47820]: https://www.ageofempires.com/news/aoe2de-update-47820/
[50292]: https://www.ageofempires.com/news/aoe2de-update-50292/
[50700]: https://www.ageofempires.com/news/aoe2de-update-50292/#hotfix-50700
[51737]: https://www.ageofempires.com/news/aoeiide-update-51737/
[54480]: https://www.ageofempires.com/news/aoeii-de-update-54480/
[54684]: https://www.ageofempires.com/news/aoeii-de-update-54480/#hotfix-54684
[56005]: https://www.ageofempires.com/news/aoeii_de_update_56005/
[58259]: https://www.ageofempires.com/news/aoe-ii-de-update-58259/
[61321]: https://www.ageofempires.com/news/age-of-empires-ii-definitive-edition-update-61321/
[63482]: https://www.ageofempires.com/news/age-of-empires-ii-definitive-edition-update-63482/
[63581]: https://www.ageofempires.com/news/age-of-empires-ii-definitive-edition-update-63482#Hotfix-63581
[66692]: https://www.ageofempires.com/news/age-of-empires-ii-definitive-edition-update-66692/
[73855]: https://www.ageofempires.com/news/age-of-empires-ii-definitive-edition-update-73855/
[78174]: https://www.ageofempires.com/news/age-of-empires-ii-definitive-edition-update-78174/
[78757]: https://www.ageofempires.com/news/age-of-empires-ii-definitive-edition-update-78174#641ecbac39a80
[81058]: https://www.ageofempires.com/news/age-of-empires-ii-definitive-edition-update-81058/
[83607]: https://www.ageofempires.com/news/age-of-empires-ii-definitive-edition-update-83607/
[82587]: https://www.ageofempires.com/news/age-of-empires-ii-definitive-edition-hotfix-82587/
# Authors
- Kerwin Sneijders (Main Author)
- [Alian713](https://github.com/Divy1211) (Dataset Wizard)
# License
MIT License: Please see the [LICENSE file].
[license file]: https://github.com/KSneijders/AoE2ScenarioParser/blob/dev/LICENSE
|
AoE2ScenarioParser
|
/AoE2ScenarioParser-0.1.56.tar.gz/AoE2ScenarioParser-0.1.56/README.md
|
README.md
|
# AoE2ScenarioRms
A library built on top of the [AoE2ScenarioParser].
Allows you to add `triggers` and `XS` to your AoE2:DE scenarios which will add logic to allow for random resource
placements each play-through.
> Keep in mind this project is still heavily a **WORK-IN-PROGRESS**
[AoE2ScenarioParser]: https://github.com/KSneijders/AoE2ScenarioParser
## Example
Please check out the example [here](https://github.com/KSneijders/AoE2ScenarioRms/tree/main/examples).
(no real docs atm)
This project is still a work-in-progress.
So everything can still change without notice (most likely will).
If you'd like to try anyway, for now just clone this repository to the root of your source folder and import from there.
Make sure `AoE2ScenarioParser` is installed: [link](https://github.com/KSneijders/AoE2ScenarioParser).
## Rough todo:
1. ~~Change tile selection to `random.choice(all_possible_tiles)` instead of looking for completely random tiles
Potentially conditionally? Or with parameter? As big surfaces are faster with completely random tiles~~
2. ~~Move XS logic to own class(es)~~
3. ~~Move Debug logic to own class~~
4. ~~Change `asr.write()` as triggers are always added directly, so why isn't the script?~~
5. ~~Add docs, docstrings and tests~~
6. ~~Allow XS to log the amount that spawned succesfully, so you can limit the amount of spawns for performance~~
7. More (?)
## Potential Ideas:
1. Player areas :monkaS:
2. Scale with map size (hardcoded on parser level as map_size cannot be changed dynamically)
3. ~~Support larger objects (currently only 1x1 is supported)~~
4. Automatically figure out what to remove based on CreateObjectConfig configs
5. Add ability to mock the XS spawning process to estimate the amount of necessary successful spawn attempts
6. Ability to bind ID to list of create objects and be able to differentiate distance to each group
7. (Somehow) remove spawn order bias. Currently, the earlier the spawns the more chance they have to succeed because
the map isn't filled up yet.
8. More (?)
---
**Suggestions are always welcome!**
# Authors
- Kerwin Sneijders (Main Author)
# License
MIT License: Please see the [LICENSE file].
[license file]: https://github.com/KSneijders/AoE2ScenarioRms/blob/main/LICENSE
|
AoE2ScenarioRms
|
/AoE2ScenarioRms-0.2.5.tar.gz/AoE2ScenarioRms-0.2.5/README.md
|
README.md
|
class Internal():
def __init__(self):
self.Utils = Internal.Utils(__name__)
self.Utils.main_file_check()
class Utils():
def __init__(self,name) -> None:
import os,sys
self.os,self.sys = os,sys
self.name = name
def clear_console(self) -> None:
self.os.system("cls" if self.os.name in ["nt"] else "clear")
def main_file_check(self) -> None:
if self.name == "__main__": print("You cannot run this file individually."); self.sys.exit(1)
class Chars():
def Quote() -> str:
return "\""
Internal = Internal()
Internal.Utils.main_file_check()
class Locations():
def root() -> str:
return "root"
def header() -> str:
return "header"
def body() -> str:
return "body"
class HTML():
def __init__(self) -> None:
Internal.Utils.main_file_check()
pass
def title(title:str) -> str:
return f"<title>{title}</title>"
class Site():
def __init__(self) -> None:
Internal.Utils.main_file_check()
self.elements = [ ]
self.header_elements = [ ]
self.body_elements = [ ]
self.css_classes = { }
def add_element(self,location:str,element:str) -> None:
if location == Locations.root:
self.elements.append(element)
if location == Locations.header:
self.header_elements.append(element)
if location == Locations.body:
self.body_elements.append(element)
def build(self) -> None:
Elements = ""
for Element in self.header_elements:
Elements = f"{Elements}{Element}"
Header = ""
for Element in self.header_elements:
Header = f"{Header}{Element}"
Body = ""
for Element in self.body_elements:
Body = f"{Body}{Element}"
self.html = f"""<!DOCTYPE html>
<html>
<head>
{Header}
</head>
<body>
{Body}
</body>
</html>
"""
def html(self) -> str:
return self.html
|
Aod-PyHTML
|
/Aod_PyHTML-0.4.2.tar.gz/Aod_PyHTML-0.4.2/PyHTML/pyhtml.py
|
pyhtml.py
|
import asyncio
import hashlib
import json
import random
import string
import sys
import time
import aiohttp
from pythonds3.graphs import Graph
class AofexApi:
A_apis = [] # 全部实例
contract_url = 'http://oapi-contract.aofex.io'
@classmethod
def get_session(cls, session: aiohttp.ClientSession):
cls.session = session
@classmethod
async def Exit(cls):
'''
异步api安全退出
:return:
'''
if hasattr(cls, 'session'):
await cls.session.close()
def __init__(self, token='', secret_key=''):
self._token_ = token
self._secret_key_ = secret_key
type(self).A_apis.append(self)
# A网api的交易所大钱包方法
@classmethod
async def exchange_wallet(cls):
'''
A网所有账户的钱包汇总
:return:
'''
big_wallet = {}
tasks = []
for A_api in cls.A_apis:
async def currencies_statitics(A_api=A_api):
'''
异步统计所有A网账户的所有货币总量
:param user:
:return:
'''
reslut = await A_api.wallet(show_all=1)
if reslut['errno'] == 0: # 及时更新钱包信息到账户api中
A_api.wallet_msg = reslut['result']
# 填充货币金额
for res in reslut['result']:
# 如果有余额
if float(res['available']) + float(res["frozen"]) > 0:
big_wallet[(res['currency'] if res['currency'] != 'BCHABC' else 'BCH')] = \
big_wallet.get((res['currency'] if res['currency'] != 'BCHABC' else 'BCH'), 0) + \
float(res['available']) + float(res["frozen"])
tasks.append(currencies_statitics())
if tasks: await asyncio.wait(tasks)
return big_wallet
@classmethod
async def exchange_wallet2(cls):
'''
一个ClientSession的交易所钱包方法
:return:
'''
big_wallet = {}
# 集中进行请求及后续处理
async with aiohttp.ClientSession() as session:
tasks = []
for api in cls.A_apis:
async def get_account_wallet(api=api):
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/wallet/list'
args['method'] = 'GET'
args['data'] = dict()
# args['data']['currency'] = currency
args['data']['show_all'] = 1
if not 'timeout' in args:
args['timeout'] = 60
if not 'method' in args:
args['method'] = 'GET'
else:
args['method'] = args['method'].upper()
# header设置
if not 'headers' in args:
args['headers'] = {}
if not 'user-agent' in args['headers']:
args['headers'][
'user-agent'] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36"
if not 'data' in args:
args['data'] = {}
args['headers'].update(api.mkHeader(args['data']))
wallet_dict = {}
wallet_dict.update(url=args['url'], params=args['data'], headers=args['headers'],
timeout=int(args['timeout']))
result = {}
async with session.get(**wallet_dict)as r:
result['content'] = await r.text()
result['code'] = r.status
if result['code'] == 200:
account_wallet = json.loads(result['content'])
else:
account_wallet = result
if account_wallet['errno'] == 0: # 及时更新钱包信息到账户api中
api.wallet_msg = account_wallet['result']
# 填充货币金额
for res in account_wallet['result']:
# 如果有余额
if float(res['available']) + float(res["frozen"]) > 0:
big_wallet[(res['currency'] if res['currency'] != 'BCHABC' else 'BCH')] = \
big_wallet.get((res['currency'] if res['currency'] != 'BCHABC' else 'BCH'), 0) + \
float(res['available']) + float(res["frozen"])
tasks.append(get_account_wallet())
if tasks: await asyncio.wait(tasks)
return big_wallet
# http请求方法
async def request(self, args):
if not 'timeout' in args:
args['timeout'] = 60
if not 'method' in args:
args['method'] = 'GET'
else:
args['method'] = args['method'].upper()
# header设置
if not 'headers' in args:
args['headers'] = {}
if not 'user-agent' in args['headers']:
args['headers'][
'user-agent'] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36"
# Cookies
cookies = {}
if 'cookies' in args:
cookies = args['cookies']
if not 'data' in args:
args['data'] = {}
result = {}
args['headers'].update(self.mkHeader(args['data']))
if (not hasattr(type(self), 'session')) or type(self).session.closed:
if hasattr(type(self), 'session') and not type(self).session.closed:
asyncio.ensure_future(type(self).session.close())
type(self).session = aiohttp.ClientSession() # 网络session
if args['method'] == 'GET':
async with type(self).session.get(args['url'],
params=args['data'],
headers=args['headers'],
timeout=int(args['timeout']),
cookies=cookies) as r:
result['content'] = await r.text()
elif args['method'] == 'POST':
async with type(self).session.post(args['url'],
data=args['data'],
headers=args['headers'],
timeout=int(args['timeout']),
cookies=cookies) as r:
result['content'] = await r.text()
else:
return
result['code'] = r.status
# ck = {}
# for cookie in r.cookies:
# ck.update({cookie.name: cookie.value})
# result['cookies'] = ck
# result['headers'] = r.headers
# result['content'] = r.text()
return result
# http 带签名的header生成方法
def mkHeader(self, data: dict):
ran_str = ''.join(random.sample(string.ascii_letters + string.digits, 5))
Nonce = "%d_%s" % (int(time.time()), ran_str)
header = dict()
header['Token'] = self._token_
header['Nonce'] = Nonce
header['Signature'] = self.sign(Nonce, data)
return header
# 签名生成方法
def sign(self, Nonce, data: dict):
tmp = list()
tmp.append(self._token_)
tmp.append(self._secret_key_)
tmp.append(Nonce)
for d, x in data.items():
tmp.append(str(d) + "=" + str(x))
return hashlib.sha1(''.join(sorted(tmp)).encode("utf8")).hexdigest()
async def kline(self, symbol, period, size):
# 获取K线
"""
{
"errno": 0,
"errmsg": "success",
"result": {
"symbol":"EOS-USDT",
"period":"1min",
"ts":"1499223904680",
"data": [{
"id": K线id,
"amount": 成交量,
"count": 成交笔数,
"open": 开盘价,
"close": 收盘价,当K线为最晚的一根时,是最新成交价
"low": 最低价,
"high": 最高价,
"vol": 成交额, 即 sum(每一笔成交价 * 该笔的成交量)
} ]
}
}
:param symbol: 如BTC_USDT 交易对
:param type: K线类型:1min, 5min, 15min, 30min, 1hour, 6hour, 12hour, 1day, 1week
:param size: 获取数量,范围:[1,2000]
:return:
"""
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/market/kline'
args['method'] = 'GET'
args['data'] = dict()
args['data']['symbol'] = symbol
args['data']['period'] = period
args['data']['size'] = size
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'])
else:
return result
async def _calculate_symbol_route(self, symbol: str):
'''
查找symbol的跨市场价格计算路径
:param symbol : 依据交易对
:return:
'''
symbols_result = await (asyncio.create_task if sys.version >= '3.7' else asyncio.ensure_future)(self.symbols())
if symbols_result['errno'] == 0:
A_symbols = [A_symbol['symbol'] for A_symbol in symbols_result['result']]
# 货币对Graph
currency_pairs = Graph()
# 建立无向图Graph
for A_symbol in A_symbols:
base, quote = A_symbol.split('-')
currency_pairs.add_edge(base, quote)
currency_pairs.add_edge(quote, base)
c1, c2 = symbol.split('-')
start = currency_pairs.get_vertex(c1)
end = currency_pairs.get_vertex(c2)
# BFS路径
start.distance = 0
start.previous = None
vert_queue = [start]
while vert_queue:
current_vert = vert_queue.pop(0)
for neigh in current_vert.get_neighbors():
if neigh.color == "white":
neigh.color = "gray"
neigh.distance = current_vert.distance + 1
neigh.previous = current_vert
vert_queue.append(neigh)
current_vert.color = "black"
if current_vert.get_key() == c2:
route = []
while bool(current_vert.previous):
route.append(
f'{current_vert.get_key()}-{current_vert.previous.get_key()}' if f'{current_vert.get_key()}-{current_vert.previous.get_key()}' in A_symbols else f'{current_vert.previous.get_key()}-{current_vert.get_key()}')
current_vert = current_vert.previous
return route[::-1]
async def newest_price(self, symbol: str):
depth_task = (asyncio.create_task if sys.version >= '3.7' else asyncio.ensure_future)(
self.kline(symbol, '1mon', 1))
if (await depth_task)['errno'] == 0:
try:
return float((await depth_task)['result']['data'][0]['close'])
except:
pass
else:
price = 1
expected_base = symbol.split('-')[0]
try:
price_route = await self._calculate_symbol_route(symbol)
except:
pass
else:
if bool(price_route):
price_tasks = [(asyncio.create_task if sys.version >= '3.7' else asyncio.ensure_future)(
self.newest_price(route_symbol)) for route_symbol in price_route]
for i in range(len(price_route)):
price *= (await price_tasks[i]) ** (1 if f'{expected_base}-' in price_route[i] else -1)
expected_base = price_route[i].split('-')[1]
return price
async def kline_contract(self, symbol, period, size=None):
"""
"data":[{
"id": K线id,
"amount": 成交量,
"count": 成交笔数,
"open": 开盘价,
"close": 收盘价,当K线为最晚的一根时,是最新成交价
"low": 最低价,
"high": 最高价,
"vol": 成交额, 即 sum(每一笔成交价 * 该笔的成交量)
}]
:param symbol : BTC-USDT 交易对
:param period : K线类型:1min, 5min, 15min, 30min, 1hour, 6hour, 12hour, 1day, 1week
:param size : 获取数量,范围:[1,2000]
"""
args = dict()
args['url'] = '{}/openApi/contract/kline'.format(self.contract_url)
args['method'] = 'GET'
args['data'] = dict()
args['data']['symbol'] = symbol
args['data']['period'] = period
args['data']['size'] = size
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'])
else:
return result
async def depth(self, symbol):
"""
{
"errno": 0,
"errmsg": "success",
"result": {
"symbol":"EOS-USDT",
"ts":1499223904680,
"bids": [[7964, 0.0678], // [price, amount]
[7963, 0.9162],...]
"asks": [ [7979, 0.0736],
[7980, 1.0292],...]
}
}
:param symbol: 如BTC_USDT 交易对
:return:
"""
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/market/depth'
args['method'] = 'GET'
args['data'] = dict()
args['data']['symbol'] = symbol
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'])
else:
return result
async def trades(self, symbol, size):
"""
{
"errno":0,
"errmsg":"success",
"result":{
"symbol":"EOS-USDT",
"ts":"1499223904680",
"data": [{
"id":17592256642623,
"amount":0.04,
"price":1997,
"direction":"buy",
"ts":1502448920106
},....
] }
}
:param symbol: 如BTC_USDT 交易对
:param size: 获取数量,范围:[1,2000]
:return:
"""
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/market/trade'
args['method'] = 'GET'
args['data'] = dict()
args['data']['symbol'] = symbol
args['data']['size'] = size
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'])
else:
return result
async def symbols(self):
"""
{"errno": 0,
"errmsg": "success",
"result": [
{
"id":1223,
"symbol": "BTC-USDT",
"base_currency": "BTC",
"quote_currency": "USDT",
"min_size": 0.0000001,
"max_size": 10000,
"min_price": 0.001,
"max_price":1000,
"maker_fee":0.002,
"taker_fee":0.002
},
] }
:return:
"""
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/market/symbols'
args['method'] = 'GET'
args['data'] = dict()
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'])
else:
return result
async def wallet(self, show_all=1):
"""
{
"errno": 0,
"errmsg": "success",
"result": [
{ "currency": "BTC",
"available": "0.2323",
"frozen": "0"
}, ]
}
:param currency:交易对
:param show_all:是否需要全部币种(1:需要,不传则有资产的 才有)
:return:
"""
# 查询我的资产
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/wallet/list'
args['method'] = 'GET'
args['data'] = dict()
# args['data']['currency'] = currency
args['data']['show_all'] = show_all
result = await self.request(args)
if result['code'] == 200:
wallet_msg = json.loads(result['content'])
if wallet_msg['errno'] == 0 and wallet_msg['result']:
return wallet_msg
raise ConnectionError(f"Fail to get wallet!\nAccount:{self.short_name}")
async def wallet2(self, currency):
"""
{
"errno": 0,
"errmsg": "success",
"result": [
{ "currency": "BTC",
"available": "0.2323",
"frozen": "0"
}, ]
}
:param currency:交易对
:param show_all:是否需要全部币种(1:需要,不传则有资产的 才有)
:return:
"""
# 查询我的资产
# args = dict()
# args['url'] = 'https://oapi.aofex.io/openApi/wallet/list'
# args['method'] = 'GET'
# args['data'] = dict()
# args['data']['currency'] = currency
# # args['data']['show_all'] = show_all
#
# result = self.request(args)
#
# if result['code'] == 200:
# return json.loads(result['content'].decode('utf-8'))
# else:
# return result
res = await self.wallet(1)
print(res)
for record in res['result']:
if record['currency'] == currency:
ret = []
ret.append(record)
ret2 = {}
ret2['result'] = ret
return ret2
return None
async def rate(self, symbol):
"""
{
"errno": 0,
"errmsg": "success",
"result": {
"maker_fee": 0.00025,
"taker_fee":0.00026
}
}
:param symbol: 交易对
:return:
"""
# 查询交易费率
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/rate'
args['method'] = 'GET'
args['data'] = dict()
args['data']['symbol'] = symbol
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'].decode('utf-8'))
else:
return result
async def add_asyncio(self, symbol, type, amount, price):
# 委托挂单
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/add'
args['method'] = 'POST'
args['data'] = dict()
args['data']['symbol'] = symbol
args['data']['type'] = type
args['data']['amount'] = amount
args['data']['price'] = price
print('Asyncio start')
await self.request(args)
# await loop.run_in_executor(None, self.request, args)
async def add(self, symbol, type, amount, price):
"""
{
"errno": 0,
"errmsg": "success",
"result": {
"order_sn": "BL786401542840282676"
}
}
:param symbol: 交易对 如BTC-USDT
:param type: 订单类型:buy-market:市价买, sell-market:市价卖, buy-limit:限价买, sell-limit:限价卖
:param amount: 限价单表示下单数量,市价买单时表示买多少 钱(usdt),市价卖单时表示卖多少币(btc)
:param price: 下单价格,市价单不传该参数
:return:
"""
# 委托挂单
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/add'
args['method'] = 'POST'
args['data'] = dict()
args['data']['symbol'] = symbol
args['data']['type'] = type
args['data']['amount'] = amount
args['data']['price'] = price
result = await self.request(args)
if result['code'] == 200:
# count_amount_1m()
print(symbol + '(' + type + ' )' + '-->' + 'amount:' + str(amount) + ',' + 'price:' + str(price))
print(symbol + '(' + type + ' )' + '-->' + str(json.loads(result['content'].decode('utf-8'))))
return json.loads(result['content'].decode('utf-8'))
else:
return result
async def cancel(self, order_ids):
"""
#注意,返回成功仅代表撤销申请成功,撤销是否成功从委托详情中获取
{
"errno": 0,
"errmsg": "success",
"result":{
"success":["1","3"],
"failed":["2","4"]
}
}
:param order_ids: 订单id,批量逗号分隔
:return:
"""
# 委托撤单
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/cancel'
args['method'] = 'POST'
args['data'] = dict()
args['data']['order_ids'] = order_ids
# args['data']['symbol'] = symbol
result = await self.request(args)
if result['code'] == 200:
re = json.loads(result['content'].decode('utf-8'))
cancel_num = len(re['result']['success'])
print(cancel_num)
return json.loads(result['content'].decode('utf-8'))
else:
return result
async def cancel2(self, symbol):
"""
#注意,返回成功仅代表撤销申请成功,撤销是否成功从委托详情中获取
{
"errno": 0,
"errmsg": "success",
"result":{
"success":["1","3"],
"failed":["2","4"]
}
}
:param order_ids: 订单id,批量逗号分隔
:return:
"""
# 委托撤单
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/cancel'
args['method'] = 'POST'
args['data'] = dict()
args['data']['symbol'] = symbol
result = await self.request(args)
if result['code'] == 200:
res = json.loads(result['content'].decode('utf-8'))
print(res)
return res
else:
return result
async def currentList(self, symbol):
# 当前委托
"""
{
"errno": 0,
"errmsg": "success",
"result": [{
"order_id":121,
"order_sn":"BL123456789987523",
"symbol":"MCO-BTC",
"ctime":"2018-10-02 10:33:33",
"type":"2",
"side":"buy",
"price":"0.123456",
"number":"1.0000",
"total_price":"0.123456",
"deal_number":"0.00000",
"deal_price":"0.00000",
"status":1 17.
}, ...
}
:param symbol: 交易对(当前交易对必传,全部交易对不传)
:param type: 1=买入,2=卖出,不传即查全部
:return:
"""
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/currentList'
args['method'] = 'GET'
args['data'] = dict()
args['data']['symbol'] = symbol
# args['data']['type'] = type
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'].decode('utf-8'))
else:
return result
async def currentList2(self, symbol, direct='next', limit=100):
# 当前委托
"""
{
"errno": 0,
"errmsg": "success",
"result": [{
"order_id":121,
"order_sn":"BL123456789987523",
"symbol":"MCO-BTC",
"ctime":"2018-10-02 10:33:33",
"type":"2",
"side":"buy",
"price":"0.123456",
"number":"1.0000",
"total_price":"0.123456",
"deal_number":"0.00000",
"deal_price":"0.00000",
"status":1 17.
}, ...
}
:param symbol: 交易对(当前交易对必传,全部交易对不传)
:param type: 1=买入,2=卖出,不传即查全部
:return:
"""
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/currentList'
args['method'] = 'GET'
args['data'] = dict()
args['data']['symbol'] = symbol
args['data']['direct'] = direct
args['data']['limit'] = limit
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'].decode('utf-8'))
else:
return result
async def currentList3(self, symbol, order_sn=None, direct='next', limit=100):
# 当前委托
"""
{
"errno": 0,
"errmsg": "success",
"result": [{
"order_id":121,
"order_sn":"BL123456789987523",
"symbol":"MCO-BTC",
"ctime":"2018-10-02 10:33:33",
"type":"2",
"side":"buy",
"price":"0.123456",
"number":"1.0000",
"total_price":"0.123456",
"deal_number":"0.00000",
"deal_price":"0.00000",
"status":1 17.
}, ...
}
:param symbol: 交易对(当前交易对必传,全部交易对不传)
:param type: 1=买入,2=卖出,不传即查全部
:return:
"""
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/currentList'
args['method'] = 'GET'
args['data'] = dict()
args['data']['symbol'] = symbol
args['data']['from'] = order_sn
args['data']['direct'] = direct
args['data']['limit'] = limit
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'].decode('utf-8'))
else:
return result
async def historyList(self, symbol, limit, fromid, direct):
# 我的历史委托
"""
{
"errno": 0,
"errmsg": "success",
"result": [{
"order_id":121,
"order_sn":"BL123456789987523",
"symbol":"MCO-BTC",
"ctime":"2018-10-02 10:33:33",
"type":"2",
"side":"buy",
"price":"0.123456",
"number":"1.0000",
"total_price":"0.123456",
"deal_number":"0.00000",
"deal_price":"0.00000",
"status":1 17.
}, ...
}
:param symbol: 如BTC_USDT 交易对
:param type: 1=买入,2=卖出,不传即查全部
:param fromid: 查询起始order_id 比如122
:param direct: 查询方向(默认 prev),prev 向前,时间(或 ID) 倒序;next 向后,时间(或 ID)正序)。(举例一 列数:1,2,3,4,5。from=4,prev有3,2,1; next只有5)
:param limit: 分页返回的结果集数量,默认为20,最大为100
:return:
"""
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/historyList'
args['method'] = 'GET'
args['data'] = dict()
args['data']['symbol'] = symbol
# args['data']['type'] = type
args['data']['from'] = fromid
args['data']['direct'] = direct
args['data']['limit'] = limit
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'].decode('utf-8'))
else:
return result
async def historyList2(self, symbol, limit):
# 我的历史委托
"""
{
"errno": 0,
"errmsg": "success",
"result": [{
"order_id":121,
"order_sn":"BL123456789987523",
"symbol":"MCO-BTC",
"ctime":"2018-10-02 10:33:33",
"type":"2",
"side":"buy",
"price":"0.123456",
"number":"1.0000",
"total_price":"0.123456",
"deal_number":"0.00000",
"deal_price":"0.00000",
"status":1 17.
}, ...
}
:param symbol: 如BTC_USDT 交易对
:param type: 1=买入,2=卖出,不传即查全部
:param fromid: 查询起始order_id 比如122
:param direct: 查询方向(默认 prev),prev 向前,时间(或 ID) 倒序;next 向后,时间(或 ID)正序)。(举例一 列数:1,2,3,4,5。from=4,prev有3,2,1; next只有5)
:param limit: 分页返回的结果集数量,默认为20,最大为100
:return:
"""
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/historyList'
args['method'] = 'GET'
args['data'] = dict()
args['data']['symbol'] = symbol
# args['data']['type'] = type
# args['data']['from'] = fromid
# args['data']['direct'] = direct
args['data']['limit'] = limit
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'].decode('utf-8'))
else:
return result
async def detail(self, order_sn):
# 成交记录详情
"""
{
"errno": 0,
"errmsg": "success",
"result":{
"entrust":{
"order_id":121,
"order_sn":"BL123456789987523",
"symbol":"MCO-BTC",
"ctime":"2018-10-02 10:33:33",
"type":"2",
"side":"buy",
"price":"0.123456",
"number":"1.0000",
"total_price":"0.123456",
"deal_number":"0.00000",
"deal_price":"0.00000",
"status":1
},
}
:param order_sn:订单编号
:return:
"""
args = dict()
args['url'] = 'https://oapi.aofex.io/openApi/entrust/detail'
args['method'] = 'GET'
args['data'] = dict()
args['data']['order_sn'] = order_sn
result = await self.request(args)
if result['code'] == 200:
return json.loads(result['content'].decode('utf-8'))
else:
return result
async def currentList4(self, symbol, number):
current_list = []
res_last = await self.currentList(symbol)
print(res_last)
if res_last.get('result', 0):
length = len(res_last['result']) - 1
last_id = res_last['result'][length]['order_sn']
for i in res_last['result']:
if i['status'] == 1 or i['status'] == 2:
current_list.append(i['order_sn'])
flag = True
while flag:
start_id = last_id
res = await self.currentList3(symbol, start_id, 'prev', 100)
if len(res.get('result', 0)) == 1:
break
if res.get('result', 0):
length = len(res['result']) - 1
last_id = res['result'][length]['order_sn']
for i in res['result']:
if i['status'] == 1 or i['status'] == 2:
if i['order_sn'] not in current_list:
current_list.append(i['order_sn'])
if len(current_list) >= number:
break
else:
flag = False
print(current_list)
print('current_list: ', len(current_list))
return current_list
else:
return current_list
async def currentList4_order_details(self, symbol, number):
current_list = []
res_last = await self.currentList(symbol)
if res_last.get('result', 0):
length = len(res_last['result']) - 1
last_id = res_last['result'][length]['order_sn']
for i in res_last['result']:
if i['status'] == 1 or i['status'] == 2:
current_list.append(i)
flag = True
while flag:
start_id = last_id
res = await self.currentList3(symbol, start_id, 'prev', 100)
if len(res.get('result', 0)) == 1:
break
if res.get('result', 0):
length = len(res['result']) - 1
last_id = res['result'][length]['order_sn']
for i in res['result']:
if i['status'] == 1 or i['status'] == 2:
if i['order_sn'] not in current_list:
current_list.append(i)
if len(current_list) >= number:
break
else:
flag = False
return current_list
else:
return current_list
async def currentList5(self, symbol, number, side):
current_list = []
res_last = await self.currentList(symbol)
# print(res_last)
if res_last.get('result', 0):
length = len(res_last['result']) - 1
last_id = res_last['result'][length]['order_sn']
for i in res_last['result']:
if (i['status'] == 1 or i['status'] == 2) and i['side'] == side:
current_list.append(i['order_sn'])
if (i['status'] == 1 or i['status'] == 2) and side is None:
current_list.append(i['order_sn'])
flag = True
while flag:
start_id = last_id
res = await self.currentList3(symbol, start_id, 'prev', 100)
if len(res.get('result', [])) == 1:
break
if res.get('result', 0):
length = len(res['result']) - 1
last_id = res['result'][length]['order_sn']
for i in res['result']:
if (i['status'] == 1 or i['status'] == 2) and i['side'] == side:
if i['order_sn'] not in current_list:
current_list.append(i['order_sn'])
if (i['status'] == 1 or i['status'] == 2) and side is None:
if i['order_sn'] not in current_list:
current_list.append(i['order_sn'])
if len(current_list) >= number:
break
else:
flag = False
# print(current_list)
print('current_list: ', len(current_list))
return current_list
else:
return current_list
if __name__ == '__main__':
async def test():
ao = AofexApi()
price = await ao.newest_price('TVC-RLY')
print(price)
await ao.Exit()
asyncio.run(test())
|
Aofex-SDK
|
/Aofex_SDK-1.1.1-py3-none-any.whl/Aofex_SDK/Aofex_SDK.py
|
Aofex_SDK.py
|
from __future__ import absolute_import
import os.path
import sys
import webbrowser
#
def pythonpath_init():
"""
Prepare "sys.path" for import resolution.
@return: None.
"""
# Get this file's directory path
my_dir = os.path.dirname(os.path.abspath(__file__))
# Remove some paths from "sys.path" to avoid unexpected import resolution.
# For each path in the list
for path in ['', '.', my_dir]:
# If the path is in "sys.path"
if path in sys.path:
# Remove the path from "sys.path"
sys.path.remove(path)
# Add "src" directory to "sys.path".
# This is the import resolution we want.
# Get "src" directory path
src_dir = os.path.dirname(my_dir)
# If "src" directory path is not in "sys.path"
if src_dir not in sys.path:
# Add "src" directory to "sys.path"
sys.path.insert(0, src_dir)
#
def check_dependency_packages():
"""
Check whether dependency packages have been installed.
Print hint message if a package is not installed.
@return: True if all packages have been installed, otherwise False.
"""
# Whether all dependency packages have been installed
result = True
#
try:
# Import package
import tkinter
# Make linter happy
tkinter = tkinter
except ImportError:
# Get message
msg = 'Error: Package "tkinter" is not installed.\n'
# Print message
sys.stderr.write(msg)
# Set result
result = False
#
try:
# Import package
import win32con
# Make linter happy
win32con = win32con
except ImportError:
# Get message
msg = 'Error: Package "pywin32" is not installed.\n'
# Print message
sys.stderr.write(msg)
# Download page URL
url = 'https://sourceforge.net/projects/pywin32/files/pywin32/'
# Open download page
webbrowser.open(url)
# Set result
result = False
# Return whether all dependency packages have been installed
return result
#
def main(args=None):
"""
Program entry function.
Call "pythonpath_init" to prepare "sys.path" for import resolution.
Then call "main_wrap" to implement functionality.
@param args: Command arguments list.
@return: Exit code.
"""
# If not all dependency packages are installed
if not check_dependency_packages():
# Return non-zero exit code
return 1
# Prepare "sys.path" for import resolution
pythonpath_init()
# Import "main_wrap" function
from aoikregistryeditor.mediator import main_wrap
# Call "main_wrap" function
return main_wrap(args=args)
# If this module is the main module
if __name__ == '__main__':
# Call "main" function
sys.exit(main())
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/__main__.py
|
__main__.py
|
from __future__ import absolute_import
from argparse import ArgumentParser
import sys
from tkinter import Tk
from traceback import format_exc
from .aoikimportutil import load_obj
from .registry_editor import RegistryEditor
from .tkinterutil.label import LabelVidget
#
def get_cmdargs_parser():
"""
Create command arguments parser.
@return: Command arguments parser.
"""
# Create command arguments parser
parser = ArgumentParser()
# Specify arguments
#
menu_conf_uri_default = 'aoikregistryeditor.menu_config::MENU_CONFIG'
parser.add_argument(
'-m', '--menu-conf',
dest='menu_config_uri',
default=menu_conf_uri_default,
metavar='MENU_CONF',
help='Menu config object URI. Default is `{}`.'.format(
menu_conf_uri_default
),
)
#
parser.add_argument(
'--menu-conf-default',
dest='print_menu_conf_default',
action='store_true',
help='Print default menu config module.',
)
#
ui_config_func_uri_default = 'aoikregistryeditor.ui_config::configure_ui'
parser.add_argument(
'-u', '--ui-conf',
dest='ui_config_func_uri',
default=ui_config_func_uri_default,
metavar='UI_CONF',
help='UI config function URI. Default is `{}`.'.format(
ui_config_func_uri_default
),
)
#
parser.add_argument(
'--ui-conf-default',
dest='print_ui_conf_default',
action='store_true',
help='Print default UI config module.',
)
#
field_editor_factory_uri_default = \
'aoikregistryeditor.field_editor_config::field_editor_factory'
parser.add_argument(
'-f', '--field-editor',
dest='field_editor_factory_uri',
default=field_editor_factory_uri_default,
metavar='FACTORY',
help='Field editor factory URI. Default is `{}`.'.format(
field_editor_factory_uri_default
),
)
#
parser.add_argument(
'--field-editor-default',
dest='print_field_editor_config_default',
action='store_true',
help='Print default field editor factory config module.',
)
# Return the command arguments parser
return parser
#
def main_core(args=None, step_func=None):
"""
The main function that implements the core functionality.
@param args: Command arguments list.
@param step_func: A function to set step information for the upper context.
@return: Exit code.
"""
# If step function is not given
if step_func is None:
# Raise error
raise ValueError('Argument `step_func` is not given')
# If step function is given.
# Set step info
step_func(title='Parse command arguments')
# Create command arguments parser
args_parser = get_cmdargs_parser()
# If arguments are not given
if args is None:
# Use command arguments
args = sys.argv[1:]
# Parse command arguments
args = args_parser.parse_args(args)
# If print default menu config module
if args.print_menu_conf_default:
# Set step info
step_func(title='Print default menu config module')
# Import default menu config module
from . import menu_config as config_module
# Print default menu config module's content
sys.stdout.write(open(config_module.__file__).read())
# Exit
return
# If not print default menu config module.
# If print default UI config module
if args.print_ui_conf_default:
# Set step info
step_func(title='Print default UI config module')
# Import default UI config module
from . import ui_config as config_module
# Print default UI config module's content
sys.stdout.write(open(config_module.__file__).read())
# Exit
return
# If not print default UI config module.
# If print default field editor factory config module
if args.print_field_editor_config_default:
# Set step info
step_func(title='Print default field editor factory config module')
# Import default field editor config module
from . import field_editor_config as config_module
# Print default field editor config module's content
sys.stdout.write(open(config_module.__file__).read())
# Exit
return
# If not print default field editor factory config module.
# Set step info
step_func(title='Create TK root')
# Create TK root
tk = Tk()
# Add window title
tk.title('AoikRegistryEditor')
# Set step info
step_func(title='Create status bar label')
# Create status bar label
status_bar_label = LabelVidget(master=tk)
# Create status bar set function
def status_bar_set(text):
status_bar_label.config(text=text)
# Set step info
step_func(title='Load field editor factory')
# Get field editor factory function URI
field_editor_factory_uri = args.field_editor_factory_uri
# Load field editor factory function
field_editor_config_module, field_editor_factory = load_obj(
field_editor_factory_uri,
mod_name='aoikregistryeditor._field_editor_config',
retn_mod=True,
)
# Set step info
step_func(title='Create registry editor')
# Create registry editor
editor = RegistryEditor(
field_editor_factory=field_editor_factory,
status_bar_set=status_bar_set,
master=tk,
)
# Set step info
step_func(title='Load menu config')
# Get menu config URI
menu_config_uri = args.menu_config_uri
# Load menu config
menu_config_module, menu_config = load_obj(
menu_config_uri,
mod_name='aoikregistryeditor._menu_config',
retn_mod=True,
)
# Set step info
step_func(title='Create menu tree')
# Create menu tree
menutree = editor.menutree_create(specs=menu_config)
# Set step info
step_func(title='Add menu tree to root window')
# Add the menu tree's top menu to root window
tk.config(menu=menutree.menu_top())
# Set step info
step_func(title='Get UI config info dict')
# Get UI config info dict
ui_info = dict(
tk=tk,
menutree=menutree,
status_bar_label=status_bar_label,
editor=editor,
path_bar_label=editor._path_bar_label,
path_bar=editor._path_bar,
child_keys_labelframe=editor._child_keys_labelframe,
child_keys_listbox=editor._child_keys_listbox,
fields_labelframe=editor._fields_labelframe,
fields_listbox=editor._fields_listbox,
field_editor_labelframe=editor._field_editor_labelframe,
field_add_label=editor._field_add_label,
field_del_label=editor._field_del_label,
field_load_label=editor._field_load_label,
field_save_label=editor._field_save_label,
field_add_dialog=editor._field_add_dialog,
)
# Set step info
step_func(title='Load UI config function')
# Get UI config function URI
ui_config_func_uri = args.ui_config_func_uri
# Load UI config function
ui_config_module, ui_config_func = load_obj(
ui_config_func_uri,
mod_name='aoikregistryeditor._ui_config',
retn_mod=True,
)
# Set step info
step_func(title='Call UI config function')
# Call UI config function
ui_config_func(ui_info)
# Set step info
step_func(title='Run TK event loop')
# Run TK event loop
tk.mainloop()
#
def main_wrap(args=None):
"""
The main function that provides exception handling.
Call "main_core" to implement the core functionality.
@param args: Command arguments list.
@return: Exit code.
"""
# A dict that contains step info
step_info = {
'title': '',
'exit_code': 0
}
# A function that updates step info
def step_func(title=None, exit_code=None):
# If title is not None
if title is not None:
# Update title
step_info['title'] = title
# If exit code is not None
if exit_code is not None:
# Update exit code
step_info['exit_code'] = exit_code
#
try:
# Call "main_core" to implement the core functionality
return main_core(args=args, step_func=step_func)
# Catch keyboard interrupt
except KeyboardInterrupt:
# Return without error
return 0
# Catch other exceptions
except Exception:
# Get step title
step_title = step_info.get('title', '')
# Get traceback
tb_msg = format_exc()
# If step title is not empty
if step_title:
# Get message
msg = '# Error: {}\n---\n{}---\n'.format(step_title, tb_msg)
else:
# Get message
msg = '# Error\n---\n{}---\n'.format(tb_msg)
# Output message
sys.stderr.write(msg)
# Get exit code
exit_code = step_info.get('exit_code', 1)
# Return exit code
return exit_code
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/mediator.py
|
mediator.py
|
from __future__ import absolute_import
import os.path
from tkinter import PhotoImage
from tkinter.ttk import Sizegrip
from tkinter.ttk import Style
import aoikregistryeditor.static
#
def configure_ui(info):
"""
UI config function.
@param info: UI config info dict.
@return: None.
"""
# Background color
bg_color = 'white smoke'
# Create ttk style object
STYLE = Style()
# Configure TFrame style's background
STYLE.configure(
'TFrame',
background=bg_color,
)
# Configure TLabelframe style's background
STYLE.configure(
'TLabelframe',
background=bg_color,
)
# Configure TLabelframe.Label style's background
STYLE.configure(
'TLabelframe.Label',
background=bg_color,
)
# Configure TLabel style's background
STYLE.configure(
'TLabel',
background=bg_color,
)
# Configure TRadiobutton style's background
STYLE.configure(
'TRadiobutton',
background=bg_color,
)
# Get TK root window
tk = info['tk']
# Set window title
tk.title('AoikRegistryEditor')
# Set window geometry
tk.geometry('1280x720')
# Configure layout weights for children.
# Row 0 is for registry editor.
tk.rowconfigure(0, weight=1)
# Row 1 is for status bar
tk.rowconfigure(1, weight=0)
# Use only one column
tk.columnconfigure(0, weight=1)
# Get menu tree
menutree = info['menutree']
# Add `File` menu
menutree.add_menu(pid='/', id='File', index=0)
# Add `Exit` command
menutree.add_command(pid='/File', id='Exit', command=tk.quit)
# Get status bar label
status_bar_label = info['status_bar_label']
# Set status bar label's main frame's height
status_bar_label.widget().config(height=20)
# Set status bar label's background
status_bar_label.config(background='#F0F0F0')
# Lay out the status bar label
status_bar_label.grid(
in_=tk,
row=2,
column=0,
sticky='NSEW',
padx=(5, 0),
)
# Create size grip
sizegrip = Sizegrip(master=tk)
# Lay out the size grip
sizegrip.grid(
in_=tk,
row=2,
column=0,
sticky='E',
)
# Get registry editor
editor = info['editor']
# Lay out the registry editor
editor.grid(
row=0,
column=0,
sticky='NSEW',
)
# Set registry editor's inner padding
editor.config(padding=10)
# Get path bar label
path_bar_label = info['path_bar_label']
# Get static files' directory path
static_dir = os.path.dirname(
os.path.abspath(aoikregistryeditor.static.__file__)
)
# Get path bar label's normal state image file path
image_path = os.path.join(static_dir, 'path_bar_label_normal.png')
# Load path bar label's normal state image file
path_bar_label._normal_image = PhotoImage(file=image_path)
# Get path bar label's disabled state image file path
image_path = os.path.join(static_dir, 'path_bar_label_disabled.png')
# Load path bar label's disabled state image file
path_bar_label._disabled_image = PhotoImage(file=image_path)
# Set path bar label's images
path_bar_label.config(
image=(
path_bar_label._normal_image,
'disabled', path_bar_label._disabled_image,
)
)
# Get path bar textfield
path_bar = info['path_bar']
# Set path bar textfield's font
path_bar.config(font=('Consolas', 12))
# Set path bar textfield's outer padding
path_bar.grid(padx=(3, 0))
# Get child keys labelframe
child_keys_labelframe = info['child_keys_labelframe']
# Set child keys labelframe's outer padding
child_keys_labelframe.grid(pady=(5, 0))
# Set child keys labelframe's inner padding
child_keys_labelframe.config(padding=5)
# Get child keys listbox
child_keys_listbox = info['child_keys_listbox']
# Set child keys listbox's font
child_keys_listbox.config(font=('Consolas', 12))
# Get fields labelframe
fields_labelframe = info['fields_labelframe']
# Set fields labelframe's outer padding
fields_labelframe.grid(padx=(10, 0), pady=(5, 0))
# Set fields labelframe's inner padding
fields_labelframe.config(padding=5)
# Get fields listbox
fields_listbox = info['fields_listbox']
# Set fields listbox's font
fields_listbox.config(font=('Consolas', 12))
# Create event handler to set fields listbox background
def _fields_listbox_set_background():
# If fields listbox is not empty
if fields_listbox.size() > 0:
# Set background color for non-empty listbox
fields_listbox.config(background='white')
# If fields listbox is empty
else:
# Set background color for empty listbox
fields_listbox.config(background='gainsboro')
# Call the event handler to initialize the background color
_fields_listbox_set_background()
# Add the event handler to fields listbox
fields_listbox.handler_add(
fields_listbox.ITEMS_CHANGE_DONE,
_fields_listbox_set_background
)
# Get field editor labelframe
field_editor_labelframe = info['field_editor_labelframe']
# Set field editor labelframe's outer padding
field_editor_labelframe.grid(padx=(10, 0), pady=(5, 0))
# Set field editor labelframe's inner padding
field_editor_labelframe.config(padding=5)
# Get field add label
field_add_label = info['field_add_label']
# Set field add label's main frame size
field_add_label.widget().config(width=40, height=40)
# Get field add label's normal state image file path
image_path = os.path.join(static_dir, 'field_add_normal.png')
# Load field add label's normal state image file
field_add_label._normal_image = PhotoImage(file=image_path)
# Get field add label's active state image file path
image_path = os.path.join(static_dir, 'field_add_active.png')
# Load field add label's active state image file
field_add_label._active_image = PhotoImage(file=image_path)
# Get field add label's hover state image file path
image_path = os.path.join(static_dir, 'field_add_hover.png')
# Load field add label' hover state image file
field_add_label._hover_image = PhotoImage(file=image_path)
# Set field add label's images.
# Notice `disabled` state is excluded from other states.
# Notice `active` state takes precedence over `hover` state.
field_add_label.config(
image=(
field_add_label._normal_image,
'!disabled active', field_add_label._active_image,
'!disabled hover', field_add_label._hover_image,
)
)
# Get field delete label
field_del_label = info['field_del_label']
# Set field delete label's main frame size
field_del_label.widget().config(width=40, height=40)
# Get field delete label's normal state image file path
image_path = os.path.join(static_dir, 'field_del_normal.png')
# Load field delete label's normal state image file
field_del_label._normal_image = PhotoImage(file=image_path)
# Get field delete label's active state image file path
image_path = os.path.join(static_dir, 'field_del_active.png')
# Load field delete label's active state image file
field_del_label._active_image = PhotoImage(file=image_path)
# Get field delete label's hover state image file path
image_path = os.path.join(static_dir, 'field_del_hover.png')
# Load field delete label's hover state image file
field_del_label._hover_image = PhotoImage(file=image_path)
# Set field delete label's images.
# Notice `disabled` state is excluded from other states.
# Notice `active` state takes precedence over `hover` state.
field_del_label.config(
image=(
field_del_label._normal_image,
'!disabled active', field_del_label._active_image,
'!disabled hover', field_del_label._hover_image,
)
)
# Get field load label
field_load_label = info['field_load_label']
# Set field load label's main frame size
field_load_label.widget().config(width=40, height=40)
# Get field load label's normal state image file path
image_path = os.path.join(static_dir, 'field_load_normal.png')
# Load field load label's normal state image file
field_load_label._normal_image = PhotoImage(file=image_path)
# Get field load label's active state image file path
image_path = os.path.join(static_dir, 'field_load_active.png')
# Load field load label's active state image file
field_load_label._active_image = PhotoImage(file=image_path)
# Get field load label's hover state image file path
image_path = os.path.join(static_dir, 'field_load_hover.png')
# Load field load label's hover state image file
field_load_label._hover_image = PhotoImage(file=image_path)
# Set field load label's images.
# Notice `disabled` state is excluded from other states.
# Notice `active` state takes precedence over `hover` state.
field_load_label.config(
image=(
field_load_label._normal_image,
'!disabled active', field_load_label._active_image,
'!disabled hover', field_load_label._hover_image,
)
)
# Get field save label
field_save_label = info['field_save_label']
# Set field save label's main frame size
field_save_label.widget().config(width=40, height=40)
# Get field save label's normal state image file path
image_path = os.path.join(static_dir, 'field_save_normal.png')
# Load field save label's normal state image file
field_save_label._normal_image = PhotoImage(file=image_path)
# Get field save label's active state image file path
image_path = os.path.join(static_dir, 'field_save_active.png')
# Load field save label's active state image file
field_save_label._active_image = PhotoImage(file=image_path)
# Get field save label's hover state image file path
image_path = os.path.join(static_dir, 'field_save_hover.png')
# Load field save label's hover state image file
field_save_label._hover_image = PhotoImage(file=image_path)
# Set field save label's images.
# Notice `disabled` state is excluded from other states.
# Notice `active` state takes precedence over `hover` state.
field_save_label.config(
image=(
field_save_label._normal_image,
'!disabled active', field_save_label._active_image,
'!disabled hover', field_save_label._hover_image,
)
)
# Get field add dialog
field_add_dialog = info['field_add_dialog']
# Set field add dialog's geometry
field_add_dialog.toplevel().geometry('300x110')
# Set field add dialog to not resizable
field_add_dialog.toplevel().resizable(width=False, height=False)
# Set field add dialog's background
field_add_dialog.toplevel().config(background=bg_color)
# Set field add dialog's main frame's outer padding
field_add_dialog.main_frame().grid(padx=5, pady=5)
# Set field add dialog's confirm button's outer padding
field_add_dialog.confirm_button().grid(pady=(15, 0))
# Set field add dialog's cancel button's outer padding
field_add_dialog.cancel_button().grid(pady=(15, 0))
# Set field add dialog's field add type label's outer padding
editor._field_add_type_label.grid(
pady=(10, 0),
)
# Set field add dialog's field add type radio buttons frame's outer padding
editor._field_add_type_rbuttons_frame.grid(
padx=(3, 0),
pady=(10, 0),
)
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/ui_config.py
|
ui_config.py
|
from __future__ import absolute_import
import pywintypes
from win32api import RegCloseKey
from win32api import RegDeleteValue
from win32api import RegEnumKeyEx
from win32api import RegEnumValue
from win32api import RegOpenKeyEx
from win32api import RegQueryValueEx
from win32api import RegSetValueEx
from win32con import KEY_ALL_ACCESS
from win32con import KEY_READ
from win32con import KEY_WOW64_64KEY
from win32con import HKEY_CLASSES_ROOT
from win32con import HKEY_CURRENT_CONFIG
from win32con import HKEY_CURRENT_USER
from win32con import HKEY_LOCAL_MACHINE
from win32con import HKEY_USERS
from win32con import HWND_BROADCAST
from win32con import SMTO_ABORTIFHUNG
from win32con import WM_SETTINGCHANGE
from win32gui import SendMessageTimeout
from .eventor import Eventor
#
def send_WM_SETTINGCHANGE():
"""
Send WM_SETTINGCHANGE to notify registry changes.
@return: None.
"""
#
try:
# Send WM_SETTINGCHANGE to notify registry changes
SendMessageTimeout(
HWND_BROADCAST,
WM_SETTINGCHANGE,
0,
'Environment',
SMTO_ABORTIFHUNG, # Return fast if receiving thread hangs
10, # Timeout in milliseconds
)
# If have error
except pywintypes.error:
# Ignore
pass
# Map registry hive name to hive integer
_HIVE_NAME_TO_INT = {
'HKEY_CLASSES_ROOT': HKEY_CLASSES_ROOT,
'HKEY_CURRENT_CONFIG': HKEY_CURRENT_CONFIG,
'HKEY_CURRENT_USER': HKEY_CURRENT_USER,
'HKEY_LOCAL_MACHINE': HKEY_LOCAL_MACHINE,
'HKEY_USERS': HKEY_USERS,
}
#
def _hive_name_to_int(name):
"""
Map registry hive name to hive integer.
@param name: Hive name.
@return: Hive integer, or None if the hive name not exists.
"""
# Map registry hive name to hive integer
return _HIVE_NAME_TO_INT.get(name, None)
#
def _regkey_handle_get(path, mask=None):
"""
Get registry key handle.
@param path: Registry key path.
@param mask: Permission mask.
@return: Registry key handle, or raise error if failed.
"""
# Split the registry key path into hive name and no-hive path
hive_name, _, nohive_path = path.partition('\\')
# If no-hive path is empty,
# it means path given is solely hive name.
if nohive_path == '':
# Set no-hive path to None
nohive_path = None
# Get hive integer
hive_int = _hive_name_to_int(hive_name)
# If hive integer is not found
if hive_int is None:
# Get error message
msg = 'Invalid registry key path: {}'.format(path)
# Raise error
raise ValueError(msg)
# If hive integer is found.
# If permission mask is not given
if mask is None:
# Set default permission mask
mask = KEY_ALL_ACCESS | KEY_WOW64_64KEY
# Get registry key handle.
# May raise `pywintypes.error`.
regkey_handle = RegOpenKeyEx(
hive_int,
nohive_path,
0, # Always 0
mask,
)
# Return the registry key handle
return regkey_handle
#
def regkey_get(path, mask=None):
"""
Create RegKey object for given registry key path.
@param path: Registry key path.
@param mask: Permission mask.
@return: RegKey object, or None if failed getting the registry key handle.
"""
# If the registry key path is root key path
if path == RegKey.ROOT:
# Return RootRegKey object
return RootRegKey()
# If the registry key path is not root key path.
#
try:
# Get registry key handle
regkey_handle = _regkey_handle_get(path, mask=mask)
# If have error
except Exception:
# Set registry key handle to None
regkey_handle = None
# If failed getting registry key handle
if regkey_handle is None:
# Return None
return None
# If not failed getting registry key handle.
# Create RegKey object
regkey = RegKey(
handle=regkey_handle,
path=path,
)
# Return the RegKey object
return regkey
#
def regkey_exists(path):
"""
Test whether given registry key path exists, and user permissions are
granted to read the registry key.
@param path: Registry key path.
@return: Boolean.
"""
# Create RegKey object for given registry key path
regkey = regkey_get(path, mask=KEY_READ)
# If the RegKey object is created,
# it means the registry key path exists.
if regkey is not None:
# Close the RegKey object
regkey.close()
# Return True
return True
# If the RegKey object is not created,
# it means the registry key path not exists,
# or user permissions are not granted to read the registry key.
else:
# Return False
return False
#
def regkey_parent_path(path):
"""
Get given registry key path's parent registry key path.
@param path: Registry key path.
@return: Parent registry key path.
"""
# If the key path is root key path or hive key path
if path == RegKey.ROOT or path in RegKey.HKEYS:
# Return the root key path
return RegKey.ROOT
# If the key path is not root key path or hive key path
# Get parent key path.
# Assume the given path has path separator in it.
parent_path, sep, child_part = path.rpartition('\\')
# Return the parent key path
return parent_path
#
def regkey_child_names(path):
"""
Get given registry key path's child key names list.
@param path: Registry key path.
@return: Child key names list.
"""
# If the key path is root key path
if path == RegKey.ROOT:
# Return hive key names
return RegKey.HKEYS
# If the key path is not root key path.
# Create RegKey object for given registry key path
regkey = regkey_get(path)
# If the RegKey object is not created
if regkey is None:
# Return None
return None
# If the RegKey object is created.
else:
# Return child key names list
return regkey.child_names()
#
class RegVal(object):
"""
RegVal represents a registry key's field.
"""
def __init__(self, regkey, name, type):
"""
Initialize object.
@param regkey: RegKey object of the registry key containing the field.
@param name: Field name.
@param type: Field type.
@return: None.
"""
# RegKey object
self._regkey = regkey
# Field name
self._name = name
# Field type
self._type = type
def __str__(self):
"""
Get string of the object.
@return: String of the object.
"""
# Return the field name
return self._name
def name(self):
"""
Get field name.
@return: Field name
"""
# Return the field name
return self._name
def name_set(self, name):
"""
Set field name.
@param name: Field name to set.
@return: None.
"""
# Set the field name
self._name = name
def type(self):
"""
Get field type.
@return: Field type
"""
# Return the field type
return self._type
def type_set(self, type):
"""
Set field type.
@param type: Field type to set.
@return: None.
"""
# Set the field type
self._type = type
def data(self):
"""
Get field data.
@return: Field data.
"""
# Read field data from registry.
# Return the field data.
return self._regkey.field_data(name=self._name)
def data_set(self, data):
"""
Set field data.
@param data: Field data to set.
@return: None
"""
# Write field data to registry
success = self._regkey.field_write(
name=self._name,
type=self._type,
data=data,
)
# If have no success
if not success:
# Raise error
raise ValueError(data)
def delete(self):
"""
Delete the field.
@return: Whether the operation is successful.
"""
# Delete the field.
# Return whether the operation is successful.
return self._regkey.field_delete(self._name)
#
class RegKey(object):
"""
RegKey represents a registry key.
"""
# Root path
ROOT = ''
# Hive names list
HKEYS = tuple(_HIVE_NAME_TO_INT.keys())
def __init__(self, handle, path):
"""
Initialize object.
@param handle: Registry key handle.
@param path: Registry key path.
@return: None.
"""
# Registry key handle
self._handle = handle
# Registry key path
self._path = path
def __str__(self):
"""
Get string of the object.
@return: String of the object.
"""
# Return the key path
return self._path
def path(self):
"""
Get key path.
@return: Key path
"""
# Return the key path
return self._path
def child_names(self):
"""
Get child key names list.
@return: Child key names list.
"""
# Ensure registry key handle is set
assert self._handle
# Child key names list
child_name_s = []
# Get child key info tuples
info_tuple_s = RegEnumKeyEx(self._handle)
# For each child key info tuple
for info_tuple in info_tuple_s:
# Get child key name
child_name = info_tuple[0]
# Add the child key name to child key names list
child_name_s.append(child_name)
# Return the child key names list
return child_name_s
def child_paths(self):
"""
Get child key paths list.
Notice this method assumes the key is not root key.
@return: Child key paths list.
"""
# Return child key paths list
return [self._path + '\\' + name for name in self.child_names()]
def fields(self):
"""
Get key fields list. Each field is a RegVal object.
@return: Key fields list.
"""
# Ensure registry key handle is set
assert self._handle
# Fields list
field_s = []
# Field index
field_index = 0
# For each field index
while True:
#
try:
# Get field name and type.
# May raise `pywintypes.error`.
field_name, _, field_type = RegEnumValue(
self._handle,
field_index,
)
# If have error,
# it means no more field
except pywintypes.error:
# Stop the loop
break
# If have no error.
# Create RegVal object
field = RegVal(
regkey=self,
name=field_name,
type=field_type,
)
# Add the RegVal object to fields list
field_s.append(field)
# Increment field index
field_index += 1
# Return the fields list
return field_s
def _field_data_type_tuple(self, name):
"""
Get field data and type tuple: (data, type).
@return: Field data and type tuple: (data, type), or None if have
error.
"""
# Ensure registry key handle is set
assert self._handle
#
try:
# Return field data and type tuple: (data, type)
return RegQueryValueEx(self._handle, name)
# If have error
except pywintypes.error:
# Return None
return None
def field_type(self, name):
"""
Get field type.
@param name: Field name.
@return: Field type, or None if have error.
"""
# Get field data and type tuple
data_type_tuple = self._field_data_type_tuple(name)
# If have error
if data_type_tuple is None:
# Return None
return None
# If have no error
else:
# Get the field type
_, field_type = data_type_tuple
# Return the field type
return field_type
def field_data(self, name):
"""
Get field data.
@param name: Field name.
@return: Field data, or None if have error.
"""
# Get field data and type tuple
data_type_tuple = self._field_data_type_tuple(name)
# If have error
if data_type_tuple is None:
# Return None
return None
# If have no error
else:
# Get the field data
field_data, _ = data_type_tuple
# Return the field data
return field_data
def field_write(self, name, type, data):
"""
Write field.
@param name: Field name.
@param type: Field type.
@param data: Field data.
@return: Whether the operation is successful.
"""
# Ensure registry key handle is set
assert self._handle
#
try:
# Write field
RegSetValueEx(
self._handle,
name,
0,
type,
data,
)
# Send WM_SETTINGCHANGE to notify registry changes
send_WM_SETTINGCHANGE()
# If have no error.
# Return the operation is successful
return True
# If have error
except pywintypes.error:
# Return the operation is not successful
return False
def field_delete(self, name):
"""
Delete field.
@param name: Field name.
@return: Whether the operation is successful.
"""
# Ensure registry key handle is set
assert self._handle
#
try:
# Delete field
RegDeleteValue(
self._handle,
name,
)
# Send WM_SETTINGCHANGE to notify registry changes
send_WM_SETTINGCHANGE()
# If have no error.
# Return the operation is successful
return True
# If have error
except pywintypes.error:
# Return the operation is not successful
return False
def close(self):
"""
Close the registry key handle.
@return: None.
"""
# If the registry key handle is closed
if self.closed():
# Raise error
raise ValueError('Already closed')
# If the registry key handle is not closed
else:
# Close the registry key handle
RegCloseKey(self._handle)
# Set the registry key handle to None
self._handle = None
def closed(self):
"""
Test whether the registry key handle is closed.
@return: Boolean.
"""
# Test whether the registry key handle is closed
return self._handle is None
#
class RootRegKey(RegKey):
"""
RootRegKey represents registry root key that contains the hive keys.
"""
def __init__(self):
"""
Initialize object.
@return: None.
"""
# Initialize RegKey.
# Registry key handle is None for root key.
# Registry key path is empty for root key.
RegKey.__init__(self, handle=None, path='')
def child_names(self):
"""
Get child key names list.
@return: Child key names list.
"""
# Return hive names for root key
return RegKey.HKEYS
def child_paths(self):
"""
Get child key paths list.
@return: Child key paths list.
"""
# Return hive names for root key
return RegKey.HKEYS
def fields(self):
"""
Get key fields list. Each field is a RegVal object.
@return: Key fields list.
"""
# Return empty list for root key
return []
def field_type(self, name):
# Raise error for root key
raise ValueError("Root key has no fields.")
def field_data(self, name):
# Raise error for root key
raise ValueError("Root key has no fields.")
def field_write(self, name, type, data):
# Raise error for root key
raise ValueError("Root key has no fields.")
def close(self):
"""
Close the registry key handle.
@return: None.
"""
# Do nothing for root key
pass
def closed(self):
"""
Test whether the registry key handle is closed.
@return: Boolean.
"""
# Return False for root key
return False
#
class RegKeyPathNavigator(Eventor):
"""
RegKeyPathNavigator has an active registry key path, and provides methods
to change the active registry key path.
"""
# Event notified when active key path is to be changed
PATH_CHANGE_SOON = 'PATH_CHANGE_SOON'
# Event notified when active key path is changed
PATH_CHANGE_DONE = 'PATH_CHANGE_DONE'
# Registry root key path
ROOT = ''
#
def __init__(self, path=None):
"""
Initialize object.
@param path: Active key path. Default is root key path.
@return: None.
"""
# Initialize Eventor
Eventor.__init__(self)
# Active key path
self._path = path if path is not None else self.ROOT
# Go to the active path
self.go_to_path(self._path)
def regkey(self):
"""
Get RegKey object for the active key path.
@return: RegKey object for the active key path.
"""
# Return RegKey object for the active key path
return regkey_get(self.path())
def path(self):
"""
Get the active key path.
@return: Active key path.
"""
# Return the active key path
return self._path
def parent_path(self):
"""
Get the active key path's parent key path.
@return: Active key path's parent key path.
"""
# Return the active key path's parent key path
return regkey_parent_path(self._path)
def child_path(self, child_name):
"""
Get the active key path's child key path, given the child key name.
@param child_name: Child key name.
@return: Active key path's child key path.
"""
# If the active key path is root key path
if self._path == self.ROOT:
# Use child name as child path
child_path = child_name
# If the active key path is not root key path
else:
# Add separator between parent key path and child key name
child_path = self._path + '\\' + child_name
# Return the child key path
return child_path
def child_names(self):
"""
Get the active key path's child key names list.
@return: Active key path's child key names list.
"""
# Return the active key path's child key names list
return regkey_child_names(self._path)
def go_to_path(self, path, check=False):
"""
Go to given key path.
@param path: Key path to go to.
@param check: Whether check if the key path exists, and raise error if
the key path not exists.
@return: New active key path.
"""
# If check key path
if check:
# If the key path not exists
if not regkey_exists(path):
# Raise error
raise ValueError(path)
# If the key path exists.
# Notify pre-change event
self.handler_notify(self.PATH_CHANGE_SOON, self)
# Set new active path
self._path = path
# Notify post-change event
self.handler_notify(self.PATH_CHANGE_DONE, self)
# Return the new active path
return self._path
def go_to_root(self, check=False):
"""
Go to root key path.
@param check: Whether check if the key path exists, and raise error if
the key path not exists.
@return: New active key path.
"""
# Go to root key path
return self.go_to_path(self.ROOT, check=check)
def go_to_parent(self, check=False):
"""
Go to the active key path's parent key path.
@param check: Whether check if the key path exists, and raise error if
the key path not exists.
@return: New active key path.
"""
# Get the active key path's parent key
parent_path = self.parent_path()
# Go to the active key path's parent key
return self.go_to_path(parent_path, check=check)
def go_to_child(self, child_name, check=False):
"""
Go to the active key path's child key path, given the child key name.
@param child_name: Child key name.
@param check: Whether check if the key path exists, and raise error if
the key path not exists.
@return: New active key path.
"""
# Get the active key path's child key path
child_path = self.child_path(child_name)
# Go to the active key path's child key path
return self.go_to_path(child_path, check=check)
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/registry.py
|
registry.py
|
from __future__ import absolute_import
from argparse import ArgumentTypeError
import itertools
import re
import sys
#/
__version__ = '0.2'
#/
def str_nonempty(txt):
if txt != '':
return txt
else:
raise ArgumentTypeError('Empty value is not allowed.')
#/
def str_strip_nonempty(txt):
#/
txt = txt.strip()
#/
if txt != '':
return txt
else:
raise ArgumentTypeError('Empty value is not allowed.')
#/
def bool_0or1(txt):
if txt == '0':
return False
elif txt == '1':
return True
else:
raise ArgumentTypeError('|%s| is not 0 or 1.' % txt)
#/
def float_lt0(txt):
try:
val = float(txt)
assert val < 0
except Exception:
raise ArgumentTypeError('|%s| is not a negative number.' % txt)
return val
#/
def float_le0(txt):
try:
val = float(txt)
assert val <= 0
except Exception:
raise ArgumentTypeError('|%s| is not zero or a negative number.' % txt)
return val
#/
def float_gt0(txt):
try:
val = float(txt)
assert val > 0
except Exception:
raise ArgumentTypeError('|%s| is not a positive number.' % txt)
return val
#/
def float_ge0(txt):
try:
val = float(txt)
assert val >= 0
except Exception:
raise ArgumentTypeError('|%s| is not zero or a positive number.' % txt)
return val
#/
def int_lt0(txt):
try:
val = int(txt)
assert val < 0
except Exception:
raise ArgumentTypeError('|%s| is not a negative integer.' % txt)
return val
#/
def int_le0(txt):
try:
val = int(txt)
assert val <= 0
except Exception:
raise ArgumentTypeError('|%s| is not zero or a negative integer.' % txt)
return val
#/
def int_gt0(txt):
try:
val = int(txt)
assert val > 0
except Exception:
raise ArgumentTypeError('|%s| is not a positive integer.' % txt)
return val
#/
def int_ge0(txt):
try:
val = int(txt)
assert val >= 0
except Exception:
raise ArgumentTypeError('|%s| is not zero or a positive integer.' % txt)
return val
#/
def ensure_exc(parser, spec, args=None):
#/
if args is None:
args = sys.argv[1:]
#/
if isinstance(spec, list):
spec_s = spec
elif isinstance(spec, tuple):
spec_s = [spec]
else:
assert False, spec
#/
for spec in spec_s:
#/
spec_len = len(spec)
#/
if spec_len < 2:
continue
#/
#assert spec_len >= 2
if spec_len == 2:
#/
s0, s1 = spec
#/ if is special syntax e.g. ['-a', ['-b', '-c']]
if isinstance(s1, (list, tuple)):
#/ transform to pairs [('-a', '-b'), ('-a', '-c')]
pair_s = [(s0, x) for x in s1]
#/ if is regular syntax e.g. ['-a', '-b']
else:
#/ transform to pairs [('-a', '-b')]
pair_s = [spec]
#/ if is regular syntax e.g. ['-a', '-b', '-c']
else:
#/ transform to pairs [('-a', '-b'), ('-a', '-c'), ('-b', '-c')]
pair_s = list(itertools.combinations(spec, 2))
#/
for pair in pair_s:
#/
arg_a, arg_b = pair
arg_a_rec = re.compile('^%s($|=|[0-9])' % arg_a)
arg_b_rec = re.compile('^%s($|=|[0-9])' % arg_b)
#/
if any(map(lambda x: bool(arg_a_rec.search(x)), args))\
and any(map(lambda x: bool(arg_b_rec.search(x)), args)):
#/
msg = 'argument %s: not allowed with argument %s' % (arg_a, arg_b)
parser.error(msg)
## raise error
#/
def ensure_one_arg_specs_to_arg_names(specs):
#/
arg_name_s = []
for arg_spec_x in specs:
if isinstance(arg_spec_x, str):
#/
arg_name_s.append(arg_spec_x)
#/
elif isinstance(arg_spec_x, (list, tuple)):
#/
arg_name_s.append(arg_spec_x[0])
#/
else:
assert False, arg_spec_x
#/
return arg_name_s
#/
def ensure_one(parser, spec, args=None):
#/
if args is None:
args = sys.argv[1:]
#/
if isinstance(spec, list):
spec_s = spec
elif isinstance(spec, tuple):
spec_s = [spec]
else:
assert False, spec
#/
for spec in spec_s:
#/
spec_pass = False
#/
arg_spec_s = spec
for arg_spec in arg_spec_s:
#/
sub_spec = None
#/
if isinstance(arg_spec, str):
#/
arg_name = arg_spec
sub_spec = None
#/
elif isinstance(arg_spec, (list, tuple)):
#/
arg_name = arg_spec[0]
sub_spec = arg_spec[1]
#/
else:
assert False, arg_spec
#/
arg_name_rec = re.compile('^%s($|=|[0-9])' % arg_name)
#/
arg_name_exists = any(map(lambda x: bool(arg_name_rec.search(x)), args))
if arg_name_exists:
#/
if isinstance(arg_spec_s, tuple):
#/
exc_arg_name_s = ensure_one_arg_specs_to_arg_names(arg_spec_s)
#/
exc_spec = tuple(exc_arg_name_s)
#/
ensure_exc(parser=parser, spec=exc_spec, args=args)
#/
if sub_spec is not None:
ensure_spec(parser=parser, spec=sub_spec, args=args)
#/
spec_pass = True
break
#/
if not spec_pass:
arg_name_s = ensure_one_arg_specs_to_arg_names(arg_spec_s)
msg = """one of the arguments %s is required""" % (', '.join(arg_name_s))
parser.error(msg)
## raise error
#/
def ensure_two(parser, spec, args=None):
#/
if args is None:
args = sys.argv[1:]
#/
if isinstance(spec, list):
spec_s = spec
elif isinstance(spec, tuple):
spec_s = [spec]
else:
assert False, spec
#/
for spec in spec_s:
#/
arg_a_spec, arg_b_spec = spec
#/
if isinstance(arg_a_spec, (list, tuple)):
arg_a_s = arg_a_spec
else:
arg_a_s = [arg_a_spec]
#/
for arg_a in arg_a_s:
#/
arg_a_rec = re.compile('^%s($|=|[0-9])' % arg_a)
#/
arg_a_exists = any(bool(arg_a_rec.search(arg)) for arg in args)
#/
if arg_a_exists:
#/
if isinstance(arg_b_spec, (list, tuple)):
#/
arg_b_s = arg_b_spec
else:
#/
arg_b_s = [arg_b_spec]
#/
arg_b_rec_s = [re.compile('^%s($|=|[0-9])' % arg_b) for arg_b in arg_b_s]
#/
if isinstance(arg_b_spec, tuple):
req_all_arg_bs = True
else:
req_all_arg_bs = False
#/
arg_b_exists = False
for arg_b_rec in arg_b_rec_s:
#/
arg_b_exists = any(bool(arg_b_rec.search(arg)) for arg in args)
#/
if arg_b_exists:
if not req_all_arg_bs:
break
else:
if req_all_arg_bs:
break
#/
if not arg_b_exists:
#/
if isinstance(arg_b_spec, tuple):
#/
msg = 'argument %s: requires all of the arguments %s' % (arg_a, ', '.join(arg_b_spec))
parser.error(msg)
## raise error
#/
elif isinstance(arg_b_spec, list):
#/
msg = 'argument %s: requires one of the arguments %s' % (arg_a, ', '.join(arg_b_spec))
parser.error(msg)
## raise error
else:
#/
msg = 'argument %s: requires argument %s' % (arg_a, arg_b_spec)
parser.error(msg)
## raise error
#/
SPEC_DI_K_EXC = 'exc'
SPEC_DI_K_ONE = 'one'
SPEC_DI_K_TWO = 'two'
def ensure_spec(parser, spec, args=None):
#/
if args is None:
args = sys.argv[1:]
#/
one_spec = spec.get(SPEC_DI_K_ONE, None)
if one_spec is not None:
ensure_one(parser=parser, spec=one_spec, args=args)
#/
two_spec = spec.get(SPEC_DI_K_TWO, None)
if two_spec is not None:
ensure_two(parser=parser, spec=two_spec, args=args)
#/
exc_spec = spec.get(SPEC_DI_K_EXC, None)
if exc_spec is not None:
ensure_exc(parser=parser, spec=exc_spec, args=args)
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/aoikargutil.py
|
aoikargutil.py
|
from __future__ import absolute_import
import os.path
import sys
import webbrowser
#
def pythonpath_init():
"""
Prepare "sys.path" for import resolution.
@return: None.
"""
# Get this file's directory path
my_dir = os.path.dirname(os.path.abspath(__file__))
# Remove some paths from "sys.path" to avoid unexpected import resolution.
# For each path in the list
for path in ['', '.', my_dir]:
# If the path is in "sys.path"
if path in sys.path:
# Remove the path from "sys.path"
sys.path.remove(path)
# Add "src" directory to "sys.path".
# This is the import resolution we want.
# Get "src" directory path
src_dir = os.path.dirname(my_dir)
# If "src" directory path is not in "sys.path"
if src_dir not in sys.path:
# Add "src" directory to "sys.path"
sys.path.insert(0, src_dir)
#
def check_dependency_packages():
"""
Check whether dependency packages have been installed.
Print hint message if a package is not installed.
@return: True if all packages have been installed, otherwise False.
"""
# Whether all dependency packages have been installed
result = True
#
try:
# Import package
import tkinter
# Make linter happy
tkinter = tkinter
except ImportError:
# Get message
msg = 'Error: Package "tkinter" is not installed.\n'
# Print message
sys.stderr.write(msg)
# Set result
result = False
#
try:
# Import package
import win32con
# Make linter happy
win32con = win32con
except ImportError:
# Get message
msg = 'Error: Package "pywin32" is not installed.\n'
# Print message
sys.stderr.write(msg)
# Download page URL
url = 'https://sourceforge.net/projects/pywin32/files/pywin32/'
# Open download page
webbrowser.open(url)
# Set result
result = False
# Return whether all dependency packages have been installed
return result
#
def main(args=None):
"""
Program entry function.
Call "pythonpath_init" to prepare "sys.path" for import resolution.
Then call "main_wrap" to implement functionality.
@param args: Command arguments list.
@return: Exit code.
"""
# If not all dependency packages are installed
if not check_dependency_packages():
# Return non-zero exit code
return 1
# Prepare "sys.path" for import resolution
pythonpath_init()
# Import "main_wrap" function
from aoikregistryeditor.mediator import main_wrap
# Call "main_wrap" function
return main_wrap(args=args)
# If this module is the main module
if __name__ == '__main__':
# Call "main" function
sys.exit(main())
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/aoikregistryeditor.py
|
aoikregistryeditor.py
|
from __future__ import absolute_import
import imp
import os.path
import sys
try:
from urllib.request import urlopen ## Py3
except ImportError:
from urllib2 import urlopen ## Py2
#/
__version__ = '0.2.3'
#/ define |exec_| and |raise_| that are 2*3 compatible.
##
## Modified from |six|:
## https://bitbucket.org/gutworth/six/src/cc9fce6016db076497454f9352e55b4758ccc07c/six.py?at=default#cl-632
##
## ---BEG
if sys.version_info[0] == 2:
#/
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
#/
exec_("""def raise_(exc, tb=None):
raise exc, None, tb
""")
else:
#/
exec_ = eval('exec')
#/
def raise_(exc, tb=None):
if tb is not None and exc.__traceback__ is not tb:
raise exc.with_traceback(tb)
else:
raise exc
## ---END
#/
def add_to_sys_modules(mod_name, mod_obj=None):
"""Add a module object to |sys.modules|.
@param mod_name: module name, used as key to |sys.modules|.
If |mod_name| is |a.b.c| while modules |a| and |a.b| are not existing,
empty modules will be created for |a| and |a.b| as well.
@param mod_obj: a module object.
If None, an empty module object will be created.
"""
#/
mod_sname_s = mod_name.split('.')
#/
parent_mod_name = ''
parent_mod_obj = None
for mod_sname in mod_sname_s:
#/
if parent_mod_name == '':
cur_mod_name = mod_sname
else:
cur_mod_name = parent_mod_name + '.' + mod_sname
#/
if cur_mod_name == mod_name:
#/
cur_mod_obj = mod_obj
else:
#/
cur_mod_obj = sys.modules.get(cur_mod_name, None)
#/
if cur_mod_obj is None:
#/ create an empty module
cur_mod_obj = imp.new_module(cur_mod_name)
#/
sys.modules[cur_mod_name] = cur_mod_obj
#/
if parent_mod_obj is not None:
setattr(parent_mod_obj, mod_sname, cur_mod_obj)
#/
parent_mod_name = cur_mod_name
parent_mod_obj = cur_mod_obj
#/
def import_module_by_code(mod_code, mod_name, sys_add=True, sys_use=True):
"""Create a module object by code.
@param mod_code: the code that the module contains.
@param mod_name: module name.
@param sys_use: whether use an existing module with the same name in |sys.modules|,
instead of creating a new one.
@param sys_add: whether add the module object to |sys.modules|.
If |sys_add| is on, |mod_name| is used as key to |sys.modules|.
If |sys_add| is on, and if |mod_name| is |a.b.c| while modules
|a| and |a.b| are not existing, empty modules will be created
for |a| and |a.b| as well.
"""
#/
mod_obj_old = sys.modules.get(mod_name, None)
#/
if mod_obj_old is not None and sys_use:
return mod_obj_old
#/
mod_obj = imp.new_module(mod_name)
#/ 3plQeic
exec_(mod_code, mod_obj.__dict__, mod_obj.__dict__)
#/
if sys_add:
#/
add_to_sys_modules(mod_name=mod_name, mod_obj=mod_obj)
#/
return mod_obj
#/
def import_module_by_name(mod_name,
ns_dir=None,
sys_use=True,
sys_add=True,
):
"""Import a module by module name.
@param mod_name: module name in Python namespace.
@param ns_dir: load from which namespace dir.
Namespace dir means the dir is considered as if it's in |sys.path|.
If |ns_dir| is specified, only load from that dir.
Otherwise load from any namespace dirs in |sys.path|.
"""
#/
if ns_dir is None:
#/
try:
mod_obj_old = sys.modules[mod_name]
except KeyError:
mod_obj_old = None
#/
if sys_use:
#/
if mod_obj_old is not None:
return mod_obj_old
#/
#/ 3pRKQd1
#/ if not want to use existing module in "sys.modules", need re-import
## by calling "__import__" at 2eys2rL. But "__import__" will return
## existing module in "sys.modules", so we must delete existing module
## before calling "__import__".
else:
#/
try:
del sys.modules[mod_name]
except KeyError:
pass
#/
try:
#/ 2eys2rL
__import__(mod_name)
## raise ImportError if the module not exists.
## raise any error from the imported module.
except Exception:
#/
if mod_obj_old is not None:
#/ restore to "sys.modules" the old module deleted at 3pRKQd1
sys.modules[mod_name] = mod_obj_old
#/
raise
#/
mod_obj = sys.modules[mod_name]
#/
if not sys_add:
#/
par_mod = None
rdot_idx = mod_name.rfind('.')
if rdot_idx != -1:
#/
par_mod_name = mod_name[0:rdot_idx]
mod_sname = mod_name[rdot_idx+1:]
#/ can None
par_mod = sys.modules.get(par_mod_name, None)
#/
if mod_obj_old is not None:
#/ restore to "sys.modules" the old module deleted at 3pRKQd1
sys.modules[mod_name] = mod_obj_old
#/ restore to parent module's attribute the old module deleted
## at 3pRKQd1
if par_mod is not None \
and getattr(par_mod, mod_sname, None) is mod_obj:
try:
setattr(par_mod, mod_sname, mod_obj_old)
except AttributeError:
pass
#/
else:
#/ delete from "sys.modules" the module newly loaded at 2eys2rL.
try:
del sys.modules[mod_name]
except KeyError:
pass
#/
if par_mod is not None \
and getattr(par_mod, mod_sname, None) is mod_obj:
#/ delete from parent module's attribute the module
## newly loaded at 2eys2rL.
try:
delattr(par_mod, mod_sname)
except AttributeError:
pass
#/
return mod_obj
#/
#assert ns_dir is not None
#/
mod_file_name_s = mod_name.split('.')
## |file_name| means the bare name, without extension.
##
## E.g. 'a.b.c' to ['a', 'b', 'c']
#/
parent_mod_name = '' ## change in each iteration below
mod_file_dir = ns_dir ## change in each iteration below
for mod_file_name in mod_file_name_s:
#/
if parent_mod_name == '':
parent_mod_obj = None
mod_name = mod_file_name
else:
parent_mod_obj = sys.modules[parent_mod_name]
mod_name = parent_mod_name + '.' + mod_file_name
#/
if parent_mod_obj:
__import__(mod_name)
mod_obj = sys.modules[mod_name]
else:
file_handle = None
try:
#/
tup = imp.find_module(mod_file_name, [mod_file_dir])
## raise ImportError
#/
mod_obj = imp.load_module(mod_name, *tup)
## raise any error from the imported module.
#/
file_handle = tup[0]
finally:
if file_handle is not None:
file_handle.close()
#/
parent_mod_name = mod_name
mod_file_dir = os.path.join(mod_file_dir, mod_file_name)
#/
return mod_obj
#/
def import_module_by_path(mod_path, mod_name, sys_add=True, sys_use=True):
"""Import a module by module file path.
@param mod_path: module file path.
@param mod_name: module name to be imported as.
@param sys_use: see func |import_module_by_code|'s same name arg.
@param sys_add: see func |import_module_by_code|'s same name arg.
"""
#/
mod_code = open(mod_path).read()
## raise error
#/
mod_obj = import_module_by_code(
mod_code=mod_code,
mod_name=mod_name,
sys_use=sys_use,
sys_add=sys_add,
)
## raise error
#/
mod_obj.__file__ = mod_path
#/
return mod_obj
#/
def import_module_by_http(uri, mod_name, sys_use=True, sys_add=True):
"""Download module code via HTTP and create the module object from the code.
@param uri: HTTP URI of the module file.
@param mod_name: module name to be imported as.
@param sys_use: see func |import_module_by_code|'s same name arg.
@param sys_add: see func |import_module_by_code|'s same name arg.
"""
#/
resp = urlopen(uri)
## raise error
#/
mod_code = resp.read()
## raise error
#/
mod_obj = import_module_by_code(
mod_code=mod_code,
mod_name=mod_name,
sys_use=sys_use,
sys_add=sys_add,
)
## raise error
#/
return mod_obj
#/
def uri_split(uri, mod_attr_sep='::'):
#/
uri_part_s = uri.split(mod_attr_sep, 2)
## use |split| instead of |partition| to be compatible with Python 2.4-
if len(uri_part_s) == 2:
mod_uri, attr_chain = uri_part_s
else:
mod_uri = uri_part_s[0]
attr_chain = None
#/
if uri.startswith('http://'):
#/
prot = 'http'
#/ mod_uri is file url
#mod_uri = mod_uri
#/
elif uri.startswith('https://'):
prot = 'https'
#/ mod_uri is file url
#mod_uri = mod_uri
#/
elif mod_uri.startswith('py://'):
#/
prot = 'py'
#/ mod_uri is module name
mod_uri = mod_uri[5:]
#/
elif mod_uri.startswith('file://'):
#/
prot = 'file'
#/ mod_uri is file path
mod_uri = mod_uri[7:]
#/
elif mod_uri.endswith('.py'):
## This means if no protocol prefix is present, and the uri ends with |.py|,
## then consider the uri as module file path instead of module name.
#/
prot = 'file'
#/ mod_uri is file path
#mod_uri = mod_uri
else:
#/
prot = 'py'
#/ mod_uri is module name
#mod_uri = mod_uri
#/
res = (prot, mod_uri, attr_chain)
return res
#/
def getattr_chain(obj, attr_chain, sep='.'):
"""Get the last attribute of a specified chain of attributes from a specified object.
E.g. |getattr_chain(x, 'a.b.c')| is equivalent to |x.a.b.c|.
@param obj: an object
@param attr_chain: a chain of attribute names
@param sep: separator for the chain of attribute names
"""
#/
if sep is None:
sep = '.'
#/
attr_name_s = attr_chain.split(sep)
#/
new_obj = obj
for attr_name in attr_name_s:
new_obj = getattr(new_obj, attr_name)
#/
return new_obj
#/
def load_obj(
uri,
mod_name=None,
sys_use=True,
sys_add=True,
mod_attr_sep='::',
attr_chain_sep='.',
retn_mod=False,
uri_parts=None,
):
"""Load an object from a module (specified by module name in Python namespace)
or from a module file (specified by module file path).
@param uri: an uri specifying which object to load.
An |uri| consists of two parts: |module uri| and |attr chain|,
e.g. |a/b/c.py::x.y.z| or |a.b.c::x.y.z|
#/ module uri
|a/b/c.py| or |a.b.c| is the |module uri| part.
Can be either a file path or a module name in Python namespace.
Whether it is a file path is determined by whether it ends with |.py|.
#/ attr chain
|x.y.z| is attribute chain on the module object specified by module uri.
@param mod_name: module name to be imported as.
Only applies when |uri| specifies a module file path, not a module name.
If None, the module file's name is used.
E.g. |path/to/hello.py| gets module name |hello|.
@param sys_use: see func |import_module_by_code|'s same name arg.
@param sys_add: see func |import_module_by_code|'s same name arg.
@param mod_attr_sep: see func |load_obj|'s same name arg.
@param attr_chain_sep: see func |load_obj|'s same name arg.
@retn_mod: see func |load_obj|'s same name arg.
"""
#/
if uri_parts is None:
uri_parts = uri_split(uri=uri, mod_attr_sep=mod_attr_sep)
prot, mod_uri, attr_chain = uri_parts
#/
if prot == 'py':
## This means the uri specifies a module name, e.g. |a.b.c|
#/
mod_name_to_load = mod_uri
## avoid naming collision with func arg |mod_name|.
##
## arg |mod_name| is not used when importing by module name.
## the name of the module to import is specified in arg |uri|.
#/
mod_obj = import_module_by_name(mod_name_to_load,
sys_use=sys_use,
sys_add=sys_add,
)
## raise error
else:
## This means the uri specifies a module file path, e.g. |/a/b/c.py|
#/
mod_file_path = mod_uri
#/
if not mod_name:
_, mod_file_name = os.path.split(mod_file_path)
mod_name, _ = os.path.splitext(mod_file_name)
#/
mod_obj = import_module_by_path(mod_file_path,
mod_name=mod_name,
sys_use=sys_use,
sys_add=sys_add,
)
## raise error
#/
if not attr_chain:
if retn_mod:
return mod_obj, None
else:
return mod_obj
#/
#assert attr_chain
attr_obj = getattr_chain(
obj=mod_obj,
attr_chain=attr_chain,
sep=attr_chain_sep,
)
## raise error
#/
if retn_mod:
return mod_obj, attr_obj
else:
return attr_obj
#/
def load_obj_http(
uri,
mod_name=None,
sys_use=True,
sys_add=True,
mod_attr_sep='::',
attr_chain_sep='.',
retn_mod=False,
uri_parts=None,
):
"""Load an object from a remote module file downloaded via HTTP.
@param uri: specify the remote module file's location and which attribute object to load.
#/ load the module object
https://localhost/aoikimportutil/aoikimportutil.py
#/ load the module object, get its attribute object |load_obj_http|.
https://localhost/aoikimportutil/aoikimportutil.py::load_obj_http
@param mod_name: module name to be imported as.
@param sys_use: see func |import_module_by_code|'s same name arg.
@param sys_add: see func |import_module_by_code|'s same name arg.
@param mod_attr_sep: see func |load_obj|'s same name arg.
@param attr_chain_sep: see func |load_obj|'s same name arg.
@retn_mod: see func |load_obj|'s same name arg.
"""
#/
if uri_parts is None:
uri_parts = uri_split(uri=uri, mod_attr_sep=mod_attr_sep)
_, file_url, attr_chain = uri_parts
#/
if not mod_name:
## |None| or |''|
#/ use file name as module name
_, file_name = os.path.split(file_url)
mod_name, _ = os.path.splitext(file_name)
#/ should not happen, but just in case
if not mod_name:
raise ValueError('Module name can not be inferred from the URI.\n URI is |%s|' % uri)
#/
#assert mod_name
mod_obj = import_module_by_http(
uri=file_url,
mod_name=mod_name,
sys_use=sys_use,
sys_add=sys_add,
)
#/
if not attr_chain:
if retn_mod:
return mod_obj, None
else:
return mod_obj
#/
#assert attr_chain
attr_obj = getattr_chain(
obj=mod_obj,
attr_chain=attr_chain,
sep=attr_chain_sep,
)
## raise error
#/
if retn_mod:
return mod_obj, attr_obj
else:
return attr_obj
#/
def load_obj_local_or_remote(
uri,
mod_name=None,
sys_use=True,
sys_add=True,
mod_attr_sep='::',
attr_chain_sep='.',
retn_mod=False,
):
"""Load an object from local or remote (using HTTP).
Whether it's local or remote depends on
whether the |uri| starts with |http://| or |https://|.
Local loading is done via func |load_obj|.
Remote loading is done via func |load_obj_http|.
@param uri: see func |load_obj| or |load_obj_http|'s same name arg.
@param mod_name: see func |load_obj| or |load_obj_http|'s same name arg.
@param sys_use: see func |import_module_by_code|'s same name arg.
@param sys_add: see func |import_module_by_code|'s same name arg.
@param mod_attr_sep: see func |load_obj| or |load_obj_http|'s same name arg.
@param attr_chain_sep: see func |load_obj| or |load_obj_http|'s same name arg.
@retn_mod: see func |load_obj| or |load_obj_http|'s same name arg.
"""
#/
uri_parts = uri_split(uri=uri, mod_attr_sep=mod_attr_sep)
prot = uri_parts[0]
#/
if prot in ('py', 'file'):
#/
return load_obj(
uri,
mod_name=mod_name,
sys_use=sys_use,
sys_add=sys_add,
mod_attr_sep=mod_attr_sep,
attr_chain_sep=attr_chain_sep,
retn_mod=retn_mod,
uri_parts=uri_parts,
)
#/
elif prot in ('http', 'https'):
#/
return load_obj_http(
uri,
mod_name=mod_name,
sys_use=sys_use,
sys_add=sys_add,
mod_attr_sep=mod_attr_sep,
attr_chain_sep=attr_chain_sep,
retn_mod=retn_mod,
uri_parts=uri_parts,
)
#/
else:
#/
assert 0, uri
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/aoikimportutil.py
|
aoikimportutil.py
|
from __future__ import absolute_import
#
def _create_menu_config():
"""
Create menu config.
@return: Menu config list.
"""
# Registry key paths
ENV_PREFIX = \
r'HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Session Manager'
ENV = ENV_PREFIX + '\Environment'
ENV_PATH = ENV_PREFIX + '\Environment->PATH'
ENV_PATHEXT = ENV_PREFIX + '\Environment->PATHEXT'
ENV_PYTHONPATH = ENV_PREFIX + '\Environment->PYTHONPATH'
ENV_PYTHONIOENCODING = ENV_PREFIX + '\Environment->PYTHONIOENCODING'
CU_ENV = r'HKEY_CURRENT_USER\Environment'
CU_VOLATILE_ENV = r'HKEY_CURRENT_USER\Volatile Environment'
CV = r'HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion'
EXPLORER_FILEEXTS = CV + '\Explorer\FileExts'
# Create menu config list
menu_config = [
# Hive
dict(pid='/', id='Hive', type='menu'),
dict(pid='/Hive', id='ROOT', key=''),
dict(pid='/Hive', id='HKEY_CLASSES_ROOT'),
dict(pid='/Hive', id='HKEY_CURRENT_CONFIG'),
dict(pid='/Hive', id='HKEY_CURRENT_USER'),
dict(pid='/Hive', id='HKEY_LOCAL_MACHINE'),
dict(pid='/Hive', id='HKEY_USERS'),
# Environment
dict(pid='/', id='Environment', type='menu'),
dict(pid='/Environment', id=ENV),
dict(pid='/Environment', id=ENV_PATH),
dict(pid='/Environment', id=ENV_PATHEXT),
dict(pid='/Environment', id=ENV_PYTHONPATH),
dict(pid='/Environment', id=ENV_PYTHONIOENCODING),
dict(pid='/Environment', id='Separator1', type='separator'),
dict(pid='/Environment', id=CU_ENV),
dict(pid='/Environment', id=CU_VOLATILE_ENV),
# File Types
dict(pid='/', id='File Types', type='menu'),
dict(pid='/File Types', id='HKEY_CLASSES_ROOT'),
dict(pid='/File Types', id=r'HKEY_CLASSES_ROOT\*'),
dict(pid='/File Types', id=r'HKEY_CLASSES_ROOT\*\shell'),
dict(pid='/File Types', id=r'HKEY_CLASSES_ROOT\AllFilesystemObjects'),
dict(
pid='/File Types',
id=r'HKEY_CLASSES_ROOT\AllFilesystemObjects\shell',
),
dict(pid='/File Types', id=r'HKEY_CLASSES_ROOT\Folder'),
dict(pid='/File Types', id=r'HKEY_CLASSES_ROOT\Folder\shell'),
dict(pid='/File Types', id=r'HKEY_CLASSES_ROOT\Directory'),
dict(pid='/File Types', id=r'HKEY_CLASSES_ROOT\Directory\shell'),
dict(pid='/File Types', id=r'HKEY_CLASSES_ROOT\DesktopBackground'),
dict(
pid='/File Types',
id=r'HKEY_CLASSES_ROOT\DesktopBackground\shell',
),
dict(pid='/File Types', id='Separator1', type='separator'),
dict(pid='/File Types', id=EXPLORER_FILEEXTS),
dict(pid='/File Types', id='Separator2', type='separator'),
dict(pid='/File Types', id='HKEY_CLASSES_ROOT\.txt'),
]
# Return the menu config list
return menu_config
# Create menu specs
MENU_CONFIG = _create_menu_config()
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/menu_config.py
|
menu_config.py
|
from __future__ import absolute_import
from tkinter import IntVar
from tkinter import messagebox
from tkinter.constants import ACTIVE
from tkinter.constants import DISABLED
from tkinter.constants import NORMAL
from tkinter.ttk import Frame
from tkinter.ttk import Label
from tkinter.ttk import LabelFrame
from tkinter.ttk import Radiobutton
from win32con import KEY_ALL_ACCESS
from win32con import KEY_READ
from win32con import KEY_WRITE
from .registry import RegKeyPathNavigator
from .registry import regkey_exists
from .registry import regkey_get
from .tkinterutil.label import LabelVidget
from .tkinterutil.listbox import ListboxVidget
from .tkinterutil.menu import MenuTree
from .tkinterutil.text import TextVidget
from .tkinterutil.text import EntryVidget
from .tkinterutil.toplevel import center_window
from .tkinterutil.toplevel import DialogVidget
from .tkinterutil.toplevel import get_window_center
from .tkinterutil.vidget import Vidget
#
class FieldEditor(object):
"""
Field editor interface class.
"""
def field_is_supported(self, field):
"""
Test whether given field is supported by the field editor.
@param field: Field's RegVal object.
@return: Boolean.
"""
# Raise error
raise NotImplemented()
def data(self):
"""
Get data in the field editor.
@return: Data in the field editor.
"""
# Raise error
raise NotImplemented()
def data_set(self, data):
"""
Set data in the field editor.
@param data: Data to set.
@return: None.
"""
# Raise error
raise NotImplemented()
def enable(self, enabled):
"""
Enable or disable the filed editor.
@param enabled: Whether enable.
@return: None.
"""
# Raise error
raise NotImplemented()
def enabled(self):
"""
Test whether the filed editor is enabled.
@return: Boolean.
"""
# Raise error
raise NotImplemented()
def widget(self):
"""
Get the filed editor's widget.
@return: Filed editor's widget.
"""
# Raise error
raise NotImplemented()
def destroy(self):
"""
Destroy the filed editor.
@return: None.
"""
# Raise error
raise NotImplemented()
#
class FilteredFieldEditor(FieldEditor):
"""
FilteredFieldEditor contains a TextVidget.
It applies get filter when getting field data from registry, and applies
set filter when setting field data to registry.
"""
def __init__(
self,
field,
get_filter,
set_filter,
master,
normal_bg=None,
disabled_bg=None,
):
"""
Initialize object.
@param field: Field's RegVal object.
@param get_filter: Get filter.
@param set_filter: Set filter.
@param master: Master widget.
@param normal_bg: Normal state background color.
@param disabled_bg: Disabled state background color.
"""
# Field's RegVal object
self._field = field
# Create text vidget
self._text_vidget = TextVidget(master=master)
# Get filter
self._get_filter = get_filter
# Set filter
self._set_filter = set_filter
# Normal state background color
self._normal_bg = normal_bg
# Disabled state background color
self._disabled_bg = disabled_bg
# Whether the field editor is enabled
self._enabled = True
def field(self):
"""
Get field's RegVal object.
@return: Field's RegVal object.
"""
# Return the field's RegVal object
return self._field
def field_set(self, field):
"""
Set field's RegVal object.
@param field: Field's RegVal object to set.
@return: None.
"""
# Set the field's RegVal object
self._field = field
def field_is_supported(self, field):
"""
Test whether given field is supported by the field editor.
@param field: Field's RegVal object.
@return: Boolean.
"""
# Get field type
field_type = field.type()
# Test whether the field type is `String` or `Extended String`
# 1: String.
# 2: Extended String.
return field_type in [1, 2]
def text_vidget(self):
"""
Get the text vidget.
@return: Text vidget.
"""
# Return the text vidget
return self._text_vidget
def data(self):
"""
Get data in the field editor.
@return: Data in the field editor.
"""
# Get the text vidget's data
data = self._text_vidget.text()
# Apply get filter on the data
data = self._get_filter(data)
# Return the filtered data
return data
def data_set(self, data):
"""
Set data in the field editor.
@param data: Data to set.
@return: None.
"""
# Apply set filter on the data
data = self._set_filter(data)
#
self._text_vidget.text_set(data)
def enable(self, enabled):
"""
Enable or disable the filed editor.
@param enabled: Whether enable.
@return: None.
"""
# Set the enabled state
self._enabled = enabled
# If the state is enabled
if self._enabled:
# Set text vidget's state to normal
self._text_vidget.config(state=NORMAL)
# If normal state background color is given
if self._normal_bg is not None:
# Set text vidget's normal state background color
self._text_vidget.config(background=self._normal_bg)
# If the state is not enabled
else:
# If disabled state background color is given
if self._disabled_bg is not None:
# Set text vidget's disabled state background color
self._text_vidget.config(background=self._disabled_bg)
# Set text vidget's state to disabled
self._text_vidget.config(state=DISABLED)
def enabled(self):
"""
Test whether the filed editor is enabled.
@return: Boolean.
"""
# Return whether the filed editor is enabled
return self._enabled
def widget(self):
"""
Get the filed editor's widget.
@return: Filed editor's widget.
"""
# Return the filed editor's widget
return self._text_vidget.widget()
def destroy(self):
"""
Destroy the filed editor.
@return: None.
"""
# Hide the text vidget
self._text_vidget.grid_forget()
# Destroy the text vidget
self._text_vidget.destroy()
#
class RegistryEditor(Vidget):
def __init__(
self,
field_editor_factory,
status_bar_set,
master=None,
):
"""
Initialize object.
@param field_editor_factory: Field editor factory function.
The factory function should take these arguments (see 5WMYV):
- field: Field's RegVal object.
- old_editor: Old field editor object.
- master: Master widget for the new field editor's widget.
The factory function should return a field editor that supports methods
in the FieldEditor interface class.
@param status_bar_set: Status bar set function.
@param master: Master widget.
@return: None.
"""
# Initialize Vidget.
# Create main frame widget.
Vidget.__init__(self, master=master)
# Field editor factory function
self._field_editor_factory = field_editor_factory
# Status bar set function
self._status_bar_set = status_bar_set
# Create registry key path navigator
self._path_nav = RegKeyPathNavigator()
# Create registry key path bar textfield
self._path_bar = EntryVidget(master=self.widget())
# Create child keys listbox
self._child_keys_listbox = ListboxVidget(master=self.widget())
# Child keys listbox's active index cache.
# Key is registry key path.
# Value is active child key index.
self._child_keys_listbox_indexcur_memo = {}
# Create fields listbox
self._fields_listbox = ListboxVidget(
master=self.widget(),
)
# Field editor
self._field_editor = None
# Create `field add` label
self._field_add_label = LabelVidget(master=self.widget())
# Create `field del` label
self._field_del_label = LabelVidget(master=self.widget())
# Create `field load` label
self._field_load_label = LabelVidget(master=self.widget())
# Create `field save` label
self._field_save_label = LabelVidget(master=self.widget())
# Create `field add` dialog
self._field_add_dialog = DialogVidget(
master=self.widget(),
confirm_buttion_text='Ok',
cancel_buttion_text='Cancel',
)
# Create `field add` dialog's view frame
self._field_add_frame = Frame(
master=self._field_add_dialog.toplevel()
)
# Create `field add` dialog's `field name` label
self._field_add_name_label = Label(master=self._field_add_frame)
# Create `field add` dialog's `field name` textfield
self._field_add_name_textfield = EntryVidget(
master=self._field_add_frame
)
# Create `field add` dialog's `field type` label
self._field_add_type_label = Label(
master=self._field_add_frame
)
# Create `field add` dialog's `field type` radio buttons frame
self._field_add_type_rbuttons_frame = Frame(
master=self._field_add_frame,
)
# Create `field add` dialog's radio button variable
self._field_add_type_var = IntVar()
# Create `field add` dialog's radio button for `String` field type
self._field_add_type_v_string_rbutton = Radiobutton(
master=self._field_add_type_rbuttons_frame,
text='String',
variable=self._field_add_type_var,
value=1,
)
# Create `field add` dialog's radio button for `Extended String` field
# type
self._field_add_type_v_extstr_rbutton = Radiobutton(
master=self._field_add_type_rbuttons_frame,
text='Extended String',
variable=self._field_add_type_var,
value=2,
)
# Set `field add` dialog's radio button variable's initial value
self._field_add_type_var.set(1)
# Bind widget event handlers
self._widget_bind()
# Update widget
self._widget_update()
# Go to root key path
self._path_nav.go_to_root()
def _widget_bind(self):
"""
Bind widget event handlers.
@return: None.
"""
# Path bar textfield adds text change event handler
self._path_bar.handler_add(
self._path_bar.TEXT_CHANGE_DONE,
self._path_bar_on_text_change
)
# Path bar textfield adds navigator path change event handler
self._path_nav.handler_add(
self._path_nav.PATH_CHANGE_DONE,
self._path_bar_on_nav_path_change
)
# Child keys listbox adds click event handler
self._child_keys_listbox.handler_add(
'<Button-1>',
self._child_keys_listbox_on_click
)
# Child keys listbox adds double click event handler
self._child_keys_listbox.handler_add(
'<Double-Button-1>',
self._child_keys_listbox_on_double_click
)
# Child keys listbox adds right click event handler
self._child_keys_listbox.handler_add(
'<Button-3>',
self._child_keys_listbox_on_right_click
)
# Child keys listbox adds navigator path change event handler
self._path_nav.handler_add(
self._path_nav.PATH_CHANGE_DONE,
self._child_keys_listbox_on_nav_path_change
)
# Fields listbox adds navigator path change event handler
self._path_nav.handler_add(
self._path_nav.PATH_CHANGE_DONE,
self._fields_listbox_on_nav_pathcur_change
)
# Field editor adds `fields listbox items change` event handler
self._fields_listbox.handler_add(
self._fields_listbox.ITEMS_CHANGE_DONE,
self._field_editor_update
)
# Field editor adds `fields listbox itemcur change` event handler
self._fields_listbox.handler_add(
self._fields_listbox.ITEMCUR_CHANGE_DONE,
self._field_editor_update
)
# `Field add label` adds click event handler
self._field_add_label.bind(
'<Button-1>',
self._field_add_label_on_click,
)
# `Field add label` adds click release event handler
self._field_add_label.bind(
'<ButtonRelease-1>',
self._field_add_label_on_click_release,
)
# `Field add label` adds `child keys listbox items change` event
# handler
self._child_keys_listbox.handler_add(
self._child_keys_listbox.ITEMS_CHANGE_DONE,
self._field_add_label_update,
)
# `Field add label` adds `child keys listbox indexcur change` event
# handler
self._child_keys_listbox.handler_add(
self._child_keys_listbox.ITEMCUR_CHANGE_DONE,
self._field_add_label_update,
)
# `Field delete label` adds click event handler
self._field_del_label.bind(
'<Button-1>',
self._field_del_label_on_click,
)
# `Field delete label` adds click release event handler
self._field_del_label.bind(
'<ButtonRelease-1>',
self._field_del_label_on_click_release,
)
# `Field delete label` adds `fields listbox items change` event handler
self._fields_listbox.handler_add(
self._fields_listbox.ITEMS_CHANGE_DONE,
self._field_del_label_update,
)
# `Field delete label` adds `fields listbox indexcur change` event
# handler
self._fields_listbox.handler_add(
self._fields_listbox.ITEMCUR_CHANGE_DONE,
self._field_del_label_update
)
# `Field load label` adds click event handler
self._field_load_label.bind(
'<Button-1>',
self._field_load_label_on_click,
)
# `Field load label` adds click release event handler
self._field_load_label.bind(
'<ButtonRelease-1>',
self._field_load_label_on_click_release,
)
# `Field save label` adds click event handler
self._field_save_label.bind(
'<Button-1>',
self._field_save_label_on_click,
)
# `Field save label` adds click release event handler
self._field_save_label.bind(
'<ButtonRelease-1>',
self._field_save_label_on_click_release,
)
def _widget_update(self):
"""
Update widget config and layout.
@return: None.
"""
# Configure layout weights for children.
# Row 0 is for path bar.
self.widget().rowconfigure(0, weight=0)
# Row 1 is for child keys listbox, fields listbox, and field editor
self.widget().rowconfigure(1, weight=1)
# Column 0 is for child keys listbox
self.widget().columnconfigure(0, weight=1, uniform='a')
# Column 1 is for fields listbox
self.widget().columnconfigure(1, weight=1, uniform='a')
# Column 2 is for field editor
self.widget().columnconfigure(2, weight=2, uniform='a')
# Create path bar frame
self._path_bar_frame = Frame(master=self.widget())
# Lay out path bar frame
self._path_bar_frame.grid(
row=0,
column=0,
columnspan=3,
sticky='NSEW',
)
# Configure layout weights for children.
# Use only one row.
self._path_bar_frame.rowconfigure(0, weight=1)
# Column 0 is for path bar label
self._path_bar_frame.columnconfigure(0, weight=0)
# Column 1 is for path bar textfield
self._path_bar_frame.columnconfigure(1, weight=1)
# Create path bar label
self._path_bar_label = Label(master=self.widget())
# Configure path bar label
self._path_bar_label.config(text='Key:')
# Lay out path bar label
self._path_bar_label.grid(
in_=self._path_bar_frame,
row=0,
column=0,
sticky='NSEW',
)
# Raise path bar textfield's z-index
self._path_bar.tkraise()
# Lay out path bar textfield
self._path_bar.grid(
in_=self._path_bar_frame,
row=0,
column=1,
sticky='NSEW',
)
# Create child keys labelframe
self._child_keys_labelframe = LabelFrame(master=self.widget())
# Configure child keys labelframe
self._child_keys_labelframe.config(text='Child Keys')
# Lay out child keys labelframe
self._child_keys_labelframe.grid(
in_=self.widget(),
row=1,
column=0,
sticky='NSEW',
)
# Configure layout weights for children.
# Use only one row.
self._child_keys_labelframe.rowconfigure(0, weight=1)
# Use only one column.
self._child_keys_labelframe.columnconfigure(0, weight=1)
# Raise child keys listbox's z-index
self._child_keys_listbox.tkraise()
# Lay out child keys listbox
self._child_keys_listbox.grid(
in_=self._child_keys_labelframe,
row=0,
column=0,
sticky='NSEW',
)
# Create fields labelframe
self._fields_labelframe = LabelFrame(master=self.widget())
# Configure fields labelframe
self._fields_labelframe.config(text='Fields')
# Lay out fields labelframe
self._fields_labelframe.grid(
in_=self.widget(),
row=1,
column=1,
sticky='NSEW',
)
# Configure layout weights for children.
# Row 0 is for field add label and field delete label.
self._fields_labelframe.rowconfigure(0, weight=0)
# Row 1 is for fields listbox
self._fields_labelframe.rowconfigure(1, weight=1)
# Use only one column
self._fields_labelframe.columnconfigure(0, weight=1)
# Raise fields listbox's z-index
self._fields_listbox.tkraise()
# Lay out fields listbox
self._fields_listbox.grid(
in_=self._fields_labelframe,
row=1,
column=0,
sticky='NSEW',
)
# Raise field add label's z-index
self._field_add_label.tkraise()
# Lay out field add label
self._field_add_label.grid(
in_=self._fields_labelframe,
row=0,
column=0,
sticky='W',
)
# Raise field delete label's z-index
self._field_del_label.tkraise()
# Lay out field delete label
self._field_del_label.grid(
in_=self._fields_labelframe,
row=0,
column=0,
sticky='W',
# Add left padding to appear as the second label in the row
padx=(40, 0),
)
# Create field editor labelframe
self._field_editor_labelframe = LabelFrame(master=self.widget())
# Lay out field editor labelframe
self._field_editor_labelframe.grid(
in_=self.widget(),
row=1,
column=2,
sticky='NSEW',
)
# Configure layout weights for children.
# Row 0 is for field load label and field save label.
self._field_editor_labelframe.rowconfigure(0, weight=0)
# Row 1 is for field editor
self._field_editor_labelframe.rowconfigure(1, weight=1)
# Use only one column
self._field_editor_labelframe.columnconfigure(0, weight=1)
# Raise field load label's z-index
self._field_load_label.tkraise()
# Lay out field load label
self._field_load_label.grid(
in_=self._field_editor_labelframe,
row=0,
column=0,
sticky='W',
)
# Raise field save label's z-index
self._field_save_label.tkraise()
# Lay out field save label
self._field_save_label.grid(
in_=self._field_editor_labelframe,
row=0,
column=0,
sticky='W',
# Add left padding to appear as the second label in the row
padx=(40, 0),
)
# Configure layout weights for children.
# Row 0 is for field name.
self._field_add_frame.rowconfigure(0, weight=0)
# Row 1 is for field type.
self._field_add_frame.rowconfigure(1, weight=0)
# Column 0 is for prompt labels
self._field_add_frame.columnconfigure(0, weight=0)
# Column 1 is for textfield and radio buttons.
self._field_add_frame.columnconfigure(1, weight=1)
# Configure `field add` dialog's field name prompt label
self._field_add_name_label.config(text='Field Name:')
# Lay out `field add` dialog's field name prompt label
self._field_add_name_label.grid(
in_=self._field_add_frame,
row=0,
column=0,
sticky='NSEW',
)
# Lay out `field add` dialog's field name textfield
self._field_add_name_textfield.grid(
in_=self._field_add_frame,
row=0,
column=1,
sticky='NSEW',
padx=(5, 0)
)
# Configure `field add` dialog's field type prompt label
self._field_add_type_label.config(text='Field Type:')
# Lay out `field add` dialog's field type prompt label
self._field_add_type_label.grid(
in_=self._field_add_frame,
row=1,
column=0,
sticky='NSEW',
)
# Lay out `field add` dialog's field type radio buttons frame
self._field_add_type_rbuttons_frame.grid(
in_=self._field_add_frame,
row=1,
column=1,
sticky='NSEW',
)
# Lay out `field add` dialog's `String` field type radio button
self._field_add_type_v_string_rbutton.grid(
in_=self._field_add_type_rbuttons_frame,
row=0,
column=0,
sticky='W',
)
# Lay out `field add` dialog's `Extended String` field type radio
# button
self._field_add_type_v_extstr_rbutton.grid(
in_=self._field_add_type_rbuttons_frame,
row=0,
column=1,
sticky='W',
)
# Set `field add` dialog's view widget
self._field_add_dialog.view_set(self._field_add_frame)
# Set `field add` dialog's title
self._field_add_dialog.title('Create field')
def _path_nav_goto(self, path):
"""
Go to registry key path. Show error dialog if failed.
@param path: Registry key path to go to.
@return: Whether successful.
"""
#
try:
# Set path navigator to go to given key path
self._path_nav.go_to_path(path, check=True)
# If have error
except ValueError:
# Show error dialog
messagebox.showwarning(
'Error',
'Cannot open key: `{}`.'.format(path)
)
# Return not successful
return False
# If have no error,
# return successful
return True
def _path_bar_update(self):
"""
Update registry key path bar.
@return: None.
"""
# Get path bar's registry key path
key_path = self._path_bar.text()
# If the registry key path not exists
if not regkey_exists(key_path):
# Set path bar label's state to disabled
self._path_bar_label.config(state=DISABLED)
# If the registry key path exists
else:
# Set path bar label's state to normal
self._path_bar_label.config(state=NORMAL)
def _path_bar_on_text_change(self):
"""
Registry key path bar's text change event handler.
@return: None.
"""
# Update path bar
self._path_bar_update()
# Get path bar's registry key path
key_path = self._path_bar.text()
# If the registry key path exists
if regkey_exists(key_path):
# Set path navigator to go to the registry key path
self._path_nav_goto(key_path)
# If the registry key path not exists,
# do nothing.
def _path_bar_on_nav_path_change(self):
"""
Registry key path bar's `path navigator path change` event handler.
@return: None.
"""
# If the path bar is changing
if self._path_bar.is_changing():
# Do nothing to avoid circular call
return
# If the path bar is not changing.
# Get the path navigator's key path
nav_key_path = self._path_nav.path()
# If the path navigator's key path is not EQ the path bar's key path
if nav_key_path != self._path_bar.text():
# Update the path bar's text
self._path_bar.text_set(text=nav_key_path, notify=False)
# Update the path bar
self._path_bar_update()
def _child_keys_listbox_indexcur_remember(self):
"""
Remember active registry key path's child keys listbox active index.
@return: None.
"""
# Get the path navigator's active registry key path
key_path = self._path_nav.path()
# Get child keys listbox's active index
indexcur = self._child_keys_listbox.indexcur()
# If the active registry key path is not root path,
# and the child keys listbox's active item is `go up` (see 6PMTJ)
if key_path != self._path_nav.ROOT and indexcur == 0:
# Do not remember
return
else:
# Remember the child keys listbox active index
self._child_keys_listbox_indexcur_memo[key_path] = indexcur
def _child_keys_listbox_indexcur_recover(self):
"""
Recover active registry key path's child keys listbox active index.
@return: None.
"""
# Get active registry key path
key_path = self._path_nav.path()
# Get remembered child keys listbox active index for the active
# registry key path
memo_index = self._child_keys_listbox_indexcur_memo.get(key_path, None)
# If have no remembered child keys listbox active index
if memo_index is None:
# Do nothing
return
# If have remembered child keys listbox active index.
# Get child keys listbox last index
index_last = self._child_keys_listbox.index_last()
# If the remembered active index is EQ child keys listbox's active
# index
if memo_index == self._child_keys_listbox.indexcur():
# Do noting
return
# If the remembered active index is GT the last index,
# it means the remembered active index is no longer valid.
elif memo_index > index_last:
try:
# Delete the remembered active index
del self._child_keys_listbox_indexcur_memo[key_path]
# If have KeyError
except KeyError:
# Ignore
pass
# If the remembered active index is not GT the last index
else:
# Set child keys listbox to the remembered active index
self._child_keys_listbox.indexcur_set(
memo_index,
notify=True
)
def _child_keys_listbox_on_click(self, event):
# Remember active registry key path's child keys listbox active index
self._child_keys_listbox_indexcur_remember()
def _child_keys_listbox_on_double_click(self, event):
"""
Child keys listbox double click event handler.
@param event: Tkinter event object.
@return: None.
"""
# Remember active registry key path's child keys listbox active index
self._child_keys_listbox_indexcur_remember()
# Get active key path
old_key_path = self._path_nav.path()
# If double clicked item is `go up` (see 6PMTJ)
if old_key_path != self._path_nav.ROOT \
and self._child_keys_listbox.indexcur() == 0:
# Go to parent key path
success = self._path_nav_goto(self._path_nav.parent_path())
# If have success
if success:
# Get old key name
_, _, old_key_name = old_key_path.rpartition('\\')
# If the old key name is not empty
if old_key_name:
# For each child key names in the child keys listbox
for index, child_key_name in enumerate(
self._child_keys_listbox.items()):
# If the child key name is EQ the old key name
if child_key_name == old_key_name:
# Set the index to active
self._child_keys_listbox.indexcur_set(
index=index,
notify=True,
)
# Stop finding
break
# If have no success,
# do nothing.
# If double clicked item is not `go up` (see 6PMTJ)
else:
# Get child key name
child_key_name = self._child_keys_listbox.itemcur()
# Get child key path
child_key_path = self._path_nav.child_path(child_key_name)
# Go to the child key path
self._path_nav_goto(child_key_path)
def _child_keys_listbox_on_right_click(self, event):
"""
Child keys listbox right click event handler.
@param event: Tkinter event object.
@return: None.
"""
# Get active key path
key_path = self._path_nav.path()
# If the active key path is not root key path
if key_path != self._path_nav.ROOT:
# Go to parent key path
self._path_nav_goto(self._path_nav.parent_path())
# If the active key path is root key path,
# do nothing.
def _child_keys_listbox_on_nav_path_change(self):
"""
Child keys listbox's `path navigator path change` event handler.
@return: None.
"""
# Get active key path
key_path = self._path_nav.path()
# Get path navigator's child key names
child_key_name_s = self._path_nav.child_names()
# If child key names is None,
# it means failed opening the key.
if child_key_name_s is None:
# Show error dialog
messagebox.showwarning(
'Error',
'Cannot read child keys of key: `{}`'.format(key_path)
)
# Set child key names to empty
child_key_name_s = []
# If child key names is not None
else:
# Copy the child key names to a new list
child_key_name_s = list(child_key_name_s)
# Get status message
status_msg = 'Key: `{}`'.format(key_path)
# Set status message to status bar
self._status_bar_set(status_msg)
# Sort child key names
child_key_name_s = list(
sorted(child_key_name_s, key=(lambda x: x.lower()))
)
# 6PMTJ
# If the active key path is not root path
if key_path != self._path_nav.ROOT:
# Insert `go up` item to the child key names list
child_key_name_s.insert(0, '..')
# If the active key path is root path,
# do not insert `go up` item.
# Set the child key names to child keys listbox
self._child_keys_listbox.items_set(child_key_name_s, notify=True)
# Recover remembered active index
self._child_keys_listbox_indexcur_recover()
def _fields_listbox_on_nav_pathcur_change(self):
"""
Fields listbox's `path navigator path change` event handler.
@return: None.
"""
# Get active key path
key_path = self._path_nav.path()
# For each permission mask from lager permission to smaller permission
for mask in [KEY_ALL_ACCESS, KEY_WRITE, KEY_READ]:
# Try getting RegKey object with the permission mask
regkey = regkey_get(path=key_path, mask=mask)
# If have success
if regkey is not None:
# Stop trying
break
# If the RegKey object is None,
# it means the key path can not be opened.
if regkey is None:
# Show error dialog
messagebox.showwarning(
'Error',
'Cannot read fields of key: `{}`'.format(key_path)
)
# Set fields listbox to empty
self._fields_listbox.items_set([], notify=True)
# If the RegKey object is not None.
else:
# Get the registry key's fields
field_s = regkey.fields()
# If the registry key have fields
if field_s:
# Sort the fields by field name
field_s = list(
sorted(field_s, key=(lambda x: x.name().lower()))
)
# Set the fields to the fields listbox
self._fields_listbox.items_set(field_s, notify=True)
# Set fields listbox's indexcur to 0
self._fields_listbox.indexcur_set(0, notify=True)
else:
# Set fields listbox to empty
self._fields_listbox.items_set([], notify=True)
def _field_editor_update(self):
"""
Update field editor.
@return: None.
"""
# Get old field editor
old_field_editor = self._field_editor
# Get fields listbox's active registry field
field = self._fields_listbox.itemcur()
# 5WMYV
# Create new field editor.
# Notice the factory function may return the old editor object.
self._field_editor = self._field_editor_factory(
field=field,
old_editor=old_field_editor,
master=self._field_editor_labelframe,
)
# Set enabled flag on
is_enabled = True
# Field data.
# Use `None.__class__` to distinguish with None returned at 5GN0P.
field_data = None.__class__
# If have no active registry field
if field is None:
# Set enabled flag off
is_enabled = False
# If have active registry field
else:
# Get active key path
key_path = self._path_nav.path()
# Get status message
status_msg = 'Key: `{}->{}`'.format(key_path, field.name())
# Set status message to status bar
self._status_bar_set(status_msg)
# Test whether the field is supported by the field editor
is_enabled = self._field_editor.field_is_supported(field)
# If the field is supported by the field editor
if is_enabled:
# 5GN0P
# Read field data from registry
field_data = field.data()
# If field data is None
if field_data is None:
# Set enabled flag off
is_enabled = False
else:
# Set enabled flag on
is_enabled = True
# If the field is not supported by the field editor,
# no need read field data from registry.
# If enabled flag is on
if is_enabled:
# Get field editor labelframe's label
labelframe_text = 'Field `{}`'.format(field.name())
# Set field editor labelframe's label
self._field_editor_labelframe.config(text=labelframe_text)
# Set field editor to enabled
self._field_editor.enable(True)
# Set field editor data
self._field_editor.data_set(field_data)
# Set field load label's state to normal
self._field_load_label.config(state=NORMAL)
# Set field save label's state to normal
self._field_save_label.config(state=NORMAL)
# If enabled flag is off
else:
# Set field editor labelframe's label
self._field_editor_labelframe.config(text='Field')
# Set field editor data to empty
self._field_editor.data_set('')
# Set field editor to disabled
self._field_editor.enable(False)
# Set field load label's state to disabled
self._field_load_label.config(state=DISABLED)
# Set field save label's state to disabled
self._field_save_label.config(state=DISABLED)
# Lay out field editor
self._field_editor.widget().grid(
in_=self._field_editor_labelframe,
row=1,
column=0,
columnspan=2,
sticky='NSEW',
pady=(0, 10),
)
# If old field editor is not new field editor
if old_field_editor is not self._field_editor:
# If old field editor is not None
if old_field_editor is not None:
# Destroy old field editor
old_field_editor.destroy()
# If failed reading field data at 5GN0P
if field_data is None:
# Show error dialog
messagebox.showwarning(
'Error',
'Failed reading field data.'
)
def _field_add_label_update(self):
"""
Update field add label.
@return: None.
"""
# If active key path is root key path
if self._path_nav.path() == self._path_nav.ROOT:
# Set field add label's state to disabled
self._field_add_label.config(state=DISABLED)
# If active key path is not root key path
else:
# Set field add label's state to normal
self._field_add_label.config(state=NORMAL)
def _field_add_label_on_click(self, event):
"""
Field add label click event handler.
@event: Tkinter event object.
@return: None.
"""
# If field add label is in disabled state
if self._field_add_label.instate([DISABLED]):
# Do nothing
return
# If field add label is not in disabled state
else:
# Set field add label's state to active
self._field_add_label.config(state=ACTIVE)
def _field_add_label_on_click_release(self, event):
"""
Field add label click release event handler.
@event: Tkinter event object.
@return: None.
"""
# If field add label is in disabled state
if self._field_add_label.instate([DISABLED]):
# Do nothing
return
# If field add label is not in disabled state.
# Set field add label's state to normal
self._field_add_label.config(state=NORMAL)
# If active key path is root key path
if self._path_nav.path() == self._path_nav.ROOT:
# Do nothing
return
# If active key path is not root key path.
# Get active key path's RegKey object
regkey = self._path_nav.regkey()
# If have no RegKey object,
# it means the active key path is not accessible
if regkey is None:
# Do nothing
return
# If have RegKey object.
# Create confirm button event handler
def confirm_handler():
"""
`field add` dialog's confirm button event handler.
@return: None.
"""
# Get field name.
# Notice empty string is valid field name.
field_name = self._field_add_name_textfield.text()
# Get field type
field_type = self._field_add_type_var.get()
# If the field type is not valid.
# 1: String.
# 2: Extended String.
if field_type not in [1, 2]:
# Raise error
raise ValueError(field_type)
# If the field type is valid.
# For each field in fields listbox
for field in self._fields_listbox.items():
# If the field name exists
if field.name().lower() == field_name.lower():
# Show error dialog
messagebox.showwarning(
'Error',
'Field name exists: `{}`.'.format(field_name)
)
# Ignore.
# Notice the `field add` dialog is still showing.
return
# If the field name not exists.
# Create registry field
success = regkey.field_write(
name=field_name,
type=field_type,
data='',
)
# If have no success
if not success:
# Show error dialog
messagebox.showwarning(
'Error',
'Failed creating field: `{}`.'.format(field_name)
)
# Notice the `field add` dialog is still showing.
# If have success
else:
# Update fields listbox
self._fields_listbox_on_nav_pathcur_change()
# For each field in fields listbox
for index, field in enumerate(self._fields_listbox.items()):
# If the field's name is EQ the newly created field name
if field.name().lower() == field_name.lower():
# Set the index to active
self._fields_listbox.indexcur_set(
index=index,
notify=True,
)
# Stop finding
break
# Hide `field add` dialog
self._field_add_dialog.withdraw()
# Release focus grab on `field add` dialog
self._field_add_dialog.grab_release()
# Set field name textfield to empty
self._field_add_name_textfield.text_set('')
# Set focus on the field name textfield
self._field_add_name_textfield.text_widget().focus()
# Set field type to `String`
self._field_add_type_var.set(1)
# Set confirm handler
self._field_add_dialog.confirm_handler_set(confirm_handler)
# Set focus grab on `field add` dialog
self._field_add_dialog.grab_set()
# Show `field add` dialog
self._field_add_dialog.deiconify()
# Center `field add` dialog around the main window
center_window(
self._field_add_dialog.toplevel(),
point=get_window_center(self.widget().winfo_toplevel()),
)
def _field_del_label_update(self):
"""
Update field delete label.
@return: None.
"""
# If fields listbox has not active index
if self._fields_listbox.indexcur() == -1:
# Set field delete label's state to disabled
self._field_del_label.config(state=DISABLED)
# If fields listbox has active index
else:
# Set field delete label's state to normal
self._field_del_label.config(state=NORMAL)
def _field_del_label_on_click(self, event):
"""
Field delete label click event handler.
@event: Tkinter event object.
@return: None.
"""
# If field delete label is in disabled state
if self._field_del_label.instate([DISABLED]):
# Do nothing
return
# If field delete label is not in disabled state
else:
# Set field delete label's state to active
self._field_del_label.config(state=ACTIVE)
def _field_del_label_on_click_release(self, event):
"""
Field delete label click release event handler.
@event: Tkinter event object.
@return: None.
"""
# If field delete label is in disabled state
if self._field_del_label.instate([DISABLED]):
# Do nothing
return
# If field delete label is not in disabled state.
# Set field delete label's state to normal
self._field_del_label.config(state=NORMAL)
# Get active field
field = self._fields_listbox.itemcur()
# If have no active field
if field is None:
# Ignore the click event
return
# If have active field.
# Get field name
field_name = field.name()
# Show confirmation dialog
is_confirmed = messagebox.askokcancel(
title='Delete field'.format(field_name),
message='Delete field `{}`?'.format(field_name)
)
# If the operation is canceled
if not is_confirmed:
# Do nothing
return
# If the operation is not canceled.
# Get old active index
old_indexcur = self._fields_listbox.indexcur()
# Delete the registry field
success = field.delete()
# If have no success
if not success:
# Show error dialog
messagebox.showwarning(
'Error',
'Failed deleting field `{}`.'.format(field_name)
)
# If have success
else:
# Update fields listbox
self._fields_listbox_on_nav_pathcur_change()
# Get fields listbox's last index
index_last = self._fields_listbox.index_last()
# If old active index is not valid
if old_indexcur > index_last:
# Use last index as new active index
indexcur = index_last
# If old active index is valid
else:
# Use old indexcur as new active index
indexcur = old_indexcur
# Set new indexcur
self._fields_listbox.indexcur_set(
indexcur,
notify=True,
)
def _field_load_label_on_click(self, event):
"""
Field load label click event handler.
@event: Tkinter event object.
@return: None.
"""
# If field load label is in disabled state
if self._field_load_label.instate([DISABLED]):
# Do nothing
return
# If field load label is not in disabled state
else:
# Set field load label's state to active
self._field_load_label.config(state=ACTIVE)
def _field_load_label_on_click_release(self, event):
"""
Field load label click release event handler.
@event: Tkinter event object.
@return: None.
"""
# If field load label is in disabled state
if self._field_load_label.instate([DISABLED]):
# Do nothing
return
# If field load label is not in disabled state.
else:
# Set field load label's state to normal
self._field_load_label.config(state=NORMAL)
# Update field editor
self._field_editor_update()
def _field_save_label_on_click(self, event):
"""
Field save label click event handler.
@event: Tkinter event object.
@return: None.
"""
# If field save label is in disabled state
if self._field_save_label.instate([DISABLED]):
# Do nothing
return
# If field save label is not in disabled state
else:
# Set field save label's state to active
self._field_save_label.config(state=ACTIVE)
def _field_save_label_on_click_release(self, event):
"""
Field save label click release event handler.
@event: Tkinter event object.
@return: None.
"""
# If field save label is in disabled state
if self._field_save_label.instate([DISABLED]):
# Do nothing
return
# If field save label is not in disabled state.
# Set field save label's state to normal
self._field_save_label.config(state=NORMAL)
# Get fields listbox's active field
field = self._fields_listbox.itemcur()
# If have no active field
if field is None:
# Do nothing
return
# If have active field
else:
try:
# Get field editor data
data = self._field_editor.data()
# Write data to registry field
field.data_set(data=data)
# If have error
except Exception:
# Show error dialog
messagebox.showwarning(
'Error',
"Failed writing data to registry."
)
def menutree_create(self, specs, id_sep=None):
"""
Create menu tree by specs.
@param specs: A list of spec dicts. Each spec dict can have these keys:
- pid: Parent menu item ID.
- id: Item ID.
- type: Item type, one of ['menu', 'separator', 'command'].
Default is `command`.
- id_is_full: Whether `id` is full ID. Default is False.
If `id` is not full ID, the full ID is generated by concatenating
`pid`, `id_sep`, and `id`.
- label': Item label. Default is use `id` value.
- key: Registry key path. Used if `type` is 'command'.
The registry key path can contain a field name pointer `->` (see
2T5EK).
@param id_sep: ID parts separator used when converting a relative ID to
full ID. Default is `/`.
@return: MenuTree object.
"""
# Create menu tree
menutree = MenuTree(master=self.widget())
# ID parts separator
id_sep = id_sep if id_sep is not None else '/'
# For each spec dict
for spec in specs:
# Get item type.
# Default is `command`.
item_type = spec.get('type', 'command')
# Get item PID
pid = spec['pid']
# Get item ID
id = spec['id']
# Get whether item ID is full ID.
# Default is False.
id_is_full = spec.get('id_is_full', False)
# Get item label.
# Default is use `id` value.
label = spec.get('label', id)
# If the item type is `menu`
if item_type == 'menu':
# Create menu item
menutree.add_menu(
pid=pid,
id=id,
id_is_full=id_is_full,
id_sep=id_sep,
label=label,
)
# If the item type is `separator`
elif item_type == 'separator':
# Create separator item
menutree.add_separator(
pid=pid,
id=id,
id_is_full=id_is_full,
)
# If the item type is `command`
elif item_type == 'command':
# Get registry key path
key_path = spec.get('key', None)
# If registry key path is not given
if key_path is None:
# Use given item ID as registry key path
key_path = id
# Get whether item ID is full ID.
# Default is False.
id_is_full = spec.get('id_is_full', False)
# Get menu item label.
# Default is use full item ID.
label = spec.get('label', None)
# 2T5EK
# If the key path contains field name pointer `->`
if key_path.find('->') != -1:
# Split the key path to real key path and field name
key_path, _, field_name = key_path.partition('->')
# If the key path not contains field name pointer `->`
else:
# Set field name to None
field_name = None
# Create click event handler
def on_click(
key_path=key_path,
field_name=field_name,
):
"""
Command item's click event handler.
@param key_path: Registry key path.
@param field_name: Registry key's field name.
@return: None.
"""
# Go to the key path
success = self._path_nav_goto(key_path)
# If have no success
if not success:
# Ignore
return
# If have success.
# If field name is specified in the spec
if field_name is not None:
# For each field in fields listbox
for index, field in enumerate(
self._fields_listbox.items()
):
# If the field's name is EQ field name in the spec
if field.name().lower() == field_name.lower():
# Set the index to active
self._fields_listbox.indexcur_set(
index,
focus=True,
notify=True,
)
# Stop finding
break
# If field name is not specified in the spec,
# no need set active item for fields listbox.
# Create command item
menutree.add_command(
pid=pid,
id=id,
command=on_click,
id_is_full=id_is_full,
id_sep=id_sep,
label=label,
)
# If the item type is something else
else:
# Raise error
raise ValueError(item_type)
# Return the menu tree
return menutree
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/registry_editor.py
|
registry_editor.py
|
from __future__ import absolute_import
#
class Event(object):
"""
Event object that contains event info. These attributes are available:
- event: Event name.
- arg: Event argument.
- notifier: Event notifier.
"""
def __init__(
self,
event,
arg=None,
notifier=None,
):
"""
Initialize object.
@param event: Event name.
@param arg: Event argument.
@param notifier: Event notifier.
@return: None.
"""
# Event name
self.event = event
# Event argument
self.arg = arg
# Event notifier
self.notifier = notifier
#
class _EventHandlerWrapper(object):
"""
Event handler wrapper that calls original event handler with or without
event argument, according to `need_arg` value.
"""
def __init__(self, handler, need_arg):
"""
Initialize object.
@param handler: Original event handler.
@param need_arg: Whether original event handler needs event argument.
@return: None.
"""
# Original event handler
self.handler = handler
# Whether original event handler needs event argument
self._need_arg = need_arg
def __eq__(self, other):
"""
Equality operator.
@param other: The other object.
@return: True if wrapped handlers are equal, otherwise False.
"""
# If the other object is not of the same type
if not isinstance(other, self.__class__):
# Return False
return False
# If the other object is of the same type
else:
# Test whether wrapped handlers are equal
return self.handler == other.handler
def __call__(self, arg):
"""
Event handler function.
@param arg: Event argument.
@return: Original event handler's result.
"""
# If original event handler needs event argument
if self._need_arg:
# Call original event handler with argument.
# Return call result.
return self.handler(arg)
# If original event handler not needs event argument
else:
# Call original event handler without argument.
# Return call result.
return self.handler()
#
class Eventor(object):
"""
Eventor provides methods for registering event handlers and notifying them
of events.
"""
def __init__(self):
"""
Initialize object.
@return: None.
"""
# Create event handlers dict.
# Key is event name.
# Value is a list of handlers for the event.
self._event_handlers = {}
def handler_add(self, event, handler, need_arg=False):
"""
Add event handler for an event.
@param event: Event name. `None` means every event.
@param handler: Event handler.
@param need_arg: Whether the event handler needs event argument.
@return: None.
"""
# Create event handler wrapper
handler_wrapper = _EventHandlerWrapper(handler, need_arg=need_arg)
# If handlers list for the event has not been created
if event not in self._event_handlers:
# Create handlers list for the event.
# Add the handler wrapper to the handlers list.
self._event_handlers[event] = [handler_wrapper]
# If handlers list for the event has been created
else:
# If the handler wrapper has been added before
if handler_wrapper in self._event_handlers[event]:
# Get error message
msg = """Handler `{}` has already been added for event\
`{}`.""".format(handler, event)
# Raise error
raise ValueError(msg)
# If the handler wrapper has not been added before
else:
# Add the handler wrapper to the handlers list.
self._event_handlers[event].append(handler_wrapper)
def handler_remove(self, handler):
"""
Remove event handler.
@param handler: Event handler to remove.
@return: None.
"""
# `Remove infos` list.
# Each info is a tuple: (handler_wrapper, handler_list, event).
remove_info_s = []
# For each event name
for event in self._event_handlers:
# Get handlers list for the event
handler_wrapper_s = self._event_handlers[event]
# For each handler wrapper
for handler_wrapper in handler_wrapper_s:
# If the handler wrapper should be removed
if handler_wrapper.handler == handler:
# Add `remove info` to `remove infos` list
remove_info_s.append(
(handler_wrapper, handler_wrapper_s, event)
)
# If `remove infos` list is empty
if not remove_info_s:
# Return
return
# If `remove infos` list is not empty.
# For each `remove info`
for remove_info in remove_info_s:
# Get handler wrapper, handlers list, and event name
handler_wrapper, handler_wrapper_s, event = remove_info
# Remove the handler wrapper from the handlers list
handler_wrapper_s.remove(handler_wrapper)
# If the handlers list is empty
if not handler_wrapper:
# Remove the handlers list
del self._event_handlers[event]
def handler_remove_all(self):
"""
Remove all event handlers.
@return: None.
"""
# Set event handlers dict to empty
self._event_handlers = {}
def handler_notify(
self,
event,
arg=None,
notifier=None,
need_info=False,
):
"""
Notify event handlers of given event.
@param event: Event name.
@param arg: Event argument.
@param notifier: Event notifier. Default is `self`.
Event notifier is used only if `need_info` is True.
@param need_info: Whether need create event info object and pass the
event info object as event argument to event handlers.
@return: None.
"""
# If the event has no handlers,
# and there are no `None` handlers listening on every event.
if event not in self._event_handlers \
and None not in self._event_handlers:
# Return
return
# If the event has handlers,
# or there are `None` handlers listening on every event.
# If need event info object
if need_info:
# Create event info object.
# Use the event info object as event argument.
arg = Event(
event=event,
arg=arg,
notifier=notifier if notifier is not None else self,
)
# If not need event info object
else:
# Use the event argument as-is
arg = arg
# If the event has handlers,
if event in self._event_handlers:
# For each handler in the event's handlers list
for handler in self._event_handlers[event]:
# Call the handler
handler(arg)
# If there are `None` handlers listening on every event
if None in self._event_handlers:
# For each handler in the `None` handlers list
for handler in self._event_handlers[None]:
# Call the handler
handler(arg)
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/eventor.py
|
eventor.py
|
from __future__ import absolute_import
from tkinter.ttk import Frame
#
class Vidget(object):
"""
Vidget contains a widget, instead of being a widget itself using
inheritance. The benefit is that `An editor has a GUI` is more natural than
`An editor is a GUI`.
Vidget delegates widget-related methods, e.g. `grid`, `pack`, and `place`,
to the widget contained, so that the Vidget object can be used just like a
widget.
"""
def __init__(
self,
master=None,
widget=None,
config_target=None,
):
"""
Initialize object.
@param master: Master widget.
@param widget: Main widget. If not given, will create a Frame widget.
@param config_target: The widget to call `config` method on. Default is
the main widget.
@return: None.
"""
# Master widget
self._vidget_master = master
# Config target widget
self._vidget_config_target = config_target
# If main widget is given
if widget is not None:
# Set main widget
self._vidget_widget = widget
# If main widget is not given
else:
# Create default main Frame widget
self._vidget_widget = Frame(master=master)
# Configure children layout weights
self._vidget_widget.rowconfigure(0, weight=1)
self._vidget_widget.columnconfigure(0, weight=1)
def master(self):
"""
Get the master widget.
@return: Master widget.
"""
# Return the master widget
return self._vidget_master
def widget(self):
"""
Get the main widget.
@return: Main widget.
"""
# Return the main widget
return self._vidget_widget
def config(self, *args, **kwargs):
"""
Call `config` method on the config target widget.
@param args: Positional arguments.
@param kwargs: Keyword arguments.
@return: Calls result.
"""
# Call `config` method on the config target widget
return self.config_target().config(*args, **kwargs)
def config_target(self):
"""
Get the config target widget.
@return: Config target widget. Default is the main widget.
"""
# If the config target widget is given
if self._vidget_config_target is not None:
# Return the config target widget
return self._vidget_config_target
# If the config target widget is not given
else:
# Return the main widget
return self.widget()
def config_target_set(self, target):
"""
Set the config target widget.
@param target: Config target widget. `None` means the main widget.
@return: None.
"""
# Set the config target widget
self._vidget_config_target = target
def state(self, *args, **kwargs):
"""
Call `state` method on the config target widget.
@param args: Positional arguments.
@param kwargs: Keyword arguments.
@return: Call result.
"""
# Call `state` method on the config target widget
return self.config_target().state(*args, **kwargs)
def instate(self, *args, **kwargs):
"""
Call `instate` method on the config target widget.
@param args: Positional arguments.
@param kwargs: Keyword arguments.
@return: Call result.
"""
# Call `instate` method on the config target widget
return self.config_target().instate(*args, **kwargs)
def bind(self, *args, **kwargs):
"""
Call `bind` method on the config target widget.
@param args: Positional arguments.
@param kwargs: Keyword arguments.
@return: Call result.
"""
# Call `bind` method on the config target widget
return self.config_target().bind(*args, **kwargs)
def tkraise(self, *args, **kwargs):
"""
Call `tkraise` method on the main widget.
@param args: Positional arguments.
@param kwargs: Keyword arguments.
@return: Call result.
"""
# Call `tkraise` method on the main widget
return self.widget().tkraise(*args, **kwargs)
def lower(self, *args, **kwargs):
"""
Call `lower` method on the main widget.
@param args: Positional arguments.
@param kwargs: Keyword arguments.
@return: Call result.
"""
# Call `lower` method on the main widget
return self.widget().lower(*args, **kwargs)
def grid(self, *args, **kwargs):
"""
Call `grid` method on the main widget.
@param args: Positional arguments.
@param kwargs: Keyword arguments.
@return: Call result.
"""
# Call `grid` method on the main widget
return self.widget().grid(*args, **kwargs)
def pack(self, *args, **kwargs):
"""
Call `pack` method on the main widget.
@param args: Positional arguments.
@param kwargs: Keyword arguments.
@return: Call result.
"""
# Call `pack` method on the main widget
return self.widget().pack(*args, **kwargs)
def place(self, *args, **kwargs):
"""
Call `place` method on the main widget.
@param args: Positional arguments.
@param kwargs: Keyword arguments.
@return: Call result.
"""
# Call `place` method on the main widget
return self.widget().place(*args, **kwargs)
def grid_forget(self):
"""
Call `grid_forget` method on the main widget.
@return: Call result.
"""
# Call `grid_forget` method on the main widget
return self.widget().grid_forget()
def grid_propagate(self, value):
"""
Call `grid_propagate` method on the main widget.
@param value: Whether propagate.
@return: Call result.
"""
# Call `grid_propagate` method on the main widget
return self.widget().grid_propagate(value)
def pack_forget(self):
"""
Call `pack_forget` method on the main widget.
@return: Call result.
"""
# Call `pack_forget` method on the main widget
return self.widget().pack_forget()
def pack_propagate(self, value):
"""
Call `pack_propagate` method on the main widget.
@param value: Whether propagate.
@return: Call result.
"""
# Call `pack_propagate` method on the main widget
return self.widget().pack_propagate(value)
def place_forget(self):
"""
Call `place_forget` method on the main widget.
@return: Call result.
"""
# Call `place_forget` method on the main widget
return self.widget().place_forget()
def destroy(self):
"""
Call `destroy` method on the main widget.
@return: Call result.
"""
# Call `destroy` method on the main widget
return self.widget().destroy()
def after(self, *args, **kwargs):
"""
Call `after` method on the main widget.
@param args: Positional arguments.
@param kwargs: Keyword arguments.
@return: Call result.
"""
# Call `after` method on the main widget
return self.widget().after(*args, **kwargs)
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/tkinterutil/vidget.py
|
vidget.py
|
from __future__ import absolute_import
from tkinter import Listbox
from tkinter.constants import DISABLED
from tkinter.constants import END
from tkinter.constants import HORIZONTAL
from tkinter.constants import VERTICAL
from tkinter.ttk import Scrollbar
from .eventor import Eventor
from .vidget import Vidget
#
class _HiddenScrollbar(Scrollbar):
"""
Scrollbar that hides if slider's both ends reached extreme position.
"""
def set(self, lo, hi):
"""
Set scrollbar slider's end positions.
@param lo: Low end position. A float value between 0.0 and 1.0.
@param hi: High end position. A float value between 0.0 and 1.0.
@return: None.
"""
# If scrollbar slider's both ends reached extreme position
if float(lo) <= 0.0 and float(hi) >= 1.0:
# Hide the scrollbar
self.grid_remove()
# If not scrollbar slider's both ends reached extreme position
else:
# Show the scrollbar
self.grid()
# Call super version
Scrollbar.set(self, lo, hi)
#
class ListboxVidget(Vidget, Eventor):
"""
ListboxVidget contains a Listbox widget. It adds the following abilities:
- Store items of any type, unlike Listbox widget that only stores texts.
- Remember selected item even if the listbox widget lost focus.
- Notify pre-change and post-change events.
"""
# Error raised when trying to change the listbox while a change is going on
class CircularCallError(ValueError):
pass
# Error raised when trying to change the listbox while it is disabled
class DisabledError(ValueError):
pass
# Event notified when the listbox's items are to be changed
ITEMS_CHANGE_SOON = 'ITEMS_CHANGE_SOON'
# Event notified when the listbox's items are changed
ITEMS_CHANGE_DONE = 'ITEMS_CHANGE_DONE'
# Event notified when the listbox's active item is to be changed
ITEMCUR_CHANGE_SOON = 'ITEMCUR_CHANGE_SOON'
# Event notified when the listbox's active item is changed
ITEMCUR_CHANGE_DONE = 'ITEMCUR_CHANGE_DONE'
# Events list
EVENTS = (
ITEMS_CHANGE_SOON,
ITEMS_CHANGE_DONE,
ITEMCUR_CHANGE_SOON,
ITEMCUR_CHANGE_DONE,
)
def __init__(
self,
items=None,
item_to_text=None,
normal_bg='',
normal_fg='',
active_bg='sky blue',
active_fg='white',
selected_bg='steel blue',
selected_fg='white',
master=None,
):
"""
Initialize object.
@param items: Items list.
@param item_to_text: Item-to-text function. Default is `str`.
@param normal_bg: Unselected item background color.
@param normal_fg: Unselected item foreground color.
@param active_bg: Active item background color. `Active` means the item
is selected (in general meaning) but the listbox has no focus.
@param active_fg: Active item foreground color. `Active` means the item
is selected (in general meaning) but the listbox has no focus.
@param selected_bg: Selected item background color. `Selected` means
the item is selected (in general meaning) and the listbox has focus.
@param selected_fg: Selected item foreground color. `Selected` means
the item is selected (in general meaning) and the listbox has focus.
@param master: Master widget.
@return: None.
"""
# Initialize Vidget.
# Create main frame widget.
Vidget.__init__(
self,
master=master,
)
# Initialize Eventor
Eventor.__init__(self)
# If items list is given
if items is not None:
# If items list is not list
if not isinstance(items, list):
# Raise error
raise TypeError(items)
# If items list is list.
# If items list is not given, or items list is given and is list
# Items list
self._items = items if items is not None else []
# Item-to-text function. Default is `str`.
self._item_to_text = item_to_text if item_to_text is not None else str
# Unselected item background color
self._normal_fg = normal_fg
# Unselected item foreground color
self._normal_bg = normal_bg
# Active item background color
self._active_fg = active_fg
# Active item foreground color
self._active_bg = active_bg
# Selected item background color
self._selected_fg = selected_fg
# Selected item foreground color
self._selected_bg = selected_bg
# Whether the listbox is changing
self._is_changing = False
# Active index. `-1` means void, i.e. no item is active.
self._indexcur = -1
# Whether active index is being reset to same value
self._is_resetting = False
# Create listbox widget
self._listbox = Listbox(
master=self.widget(),
relief='groove',
activestyle='none',
highlightthickness=0,
# Active index cache only supports single-selection mode for now.
# See 2N6OR.
selectmode='single',
)
# Set the listbox widget as config target
self.config_target_set(self._listbox)
# Create x-axis scrollbar
self._scrollbar_xview = _HiddenScrollbar(
self.widget(),
orient=HORIZONTAL,
)
# Create y-axis scrollbar
self._scrollbar_yview = _HiddenScrollbar(
self.widget(),
orient=VERTICAL,
)
# Mount scrollbars
self._listbox.config(xscrollcommand=self._scrollbar_xview.set)
self._listbox.config(yscrollcommand=self._scrollbar_yview.set)
self._scrollbar_xview.config(command=self._listbox.xview)
self._scrollbar_yview.config(command=self._listbox.yview)
# Bind single-click event handler
self._listbox.bind('<Button-1>', self._on_single_click)
# Bind double-click event handler
self._listbox.bind('<Double-Button-1>', self._on_double_click)
# Update listbox widget
self._listbox_widget_update(keep_active=False)
# Update widget
self._widget_update()
def _widget_update(self):
"""
Update widget.
@return: None.
"""
# Row 0 for listbox and y-axis scrollbar
self.widget().rowconfigure(0, weight=1)
# Row 1 for x-axis scrollbar
self.widget().rowconfigure(1, weight=0)
# Column 0 for listbox and x-axis scrollbar
self.widget().columnconfigure(0, weight=1)
# Column 1 for y-axis scrollbar
self.widget().columnconfigure(1, weight=0)
# Lay out listbox
self._listbox.grid(row=0, column=0, sticky='NSEW')
# Lay out x-axis scrollbar
self._scrollbar_xview.grid(row=1, column=0, sticky='EW')
# Lay out y-axis scrollbar
self._scrollbar_yview.grid(row=0, column=1, sticky='NS')
def is_enabled(self):
"""
Test whether the listbox is enabled.
@return: Boolean.
"""
# Return whether the listbox is enabled
return self._listbox.config('state')[4] != DISABLED
def is_changing(self):
"""
Test whether the listbox is changing.
@return: Boolean.
"""
# Return whether the listbox is changing
return self._is_changing
def is_resetting(self):
"""
Test whether the listbox is setting active index to the same value.
@return: Boolean.
"""
# Return whether the listbox is setting active index to the same value
return self._is_resetting
def size(self):
"""
Get number of items.
@return: Number of items.
"""
# Return number of items
return len(self._items)
def items(self):
"""
Get items list.
Notice do not change the list outside.
@return: Items list.
"""
# Return items list
return self._items
def items_set(
self,
items,
notify=True,
keep_active=False,
):
"""
Set items list.
Notice do not change the list outside.
@param items: Items list.
@param notify: Whether notify pre-change and post-change events.
@param keep_active: Whether keep or clear active index.
@return: None.
"""
# If the items is not list
if not isinstance(items, list):
# Raise error
raise TypeError(items)
# If the items is list.
# If the listbox is disabled
if not self.is_enabled():
# Raise error
raise ListboxVidget.DisabledError()
# If the listbox is not disabled.
# If the listbox is changing
if self._is_changing:
# Raise error
raise ListboxVidget.CircularCallError()
# If the listbox is not changing.
# Set changing flag on
self._is_changing = True
# If notify events
if notify:
# Notify pre-change event
self.handler_notify(self.ITEMS_CHANGE_SOON)
# Store the new items
self._items = items
# Update listbox widget
self._listbox_widget_update(
keep_active=keep_active
)
# If notify events
if notify:
# Notify post-change event
self.handler_notify(self.ITEMS_CHANGE_DONE)
# Set changing flag off
self._is_changing = False
def index_is_valid(self, index):
"""
Test whether given index is valid. Notice -1 is not valid.
@param index: Index to test.
@return: Boolean.
"""
# Test whether given index is valid
return 0 <= index and index < self.size()
def index_is_valid_or_void(self, index):
"""
Test whether given index is valid or is -1.
@param index: Index to test.
@return: Boolean.
"""
# Test whether given index is valid or is -1
return index == -1 or self.index_is_valid(index)
def index_first(self):
"""
Get the first item's index.
@return: First item's index, or -1 if the listbox is empty.
"""
# Return the first item's index
return 0 if self.size() > 0 else -1
def index_last(self):
"""
Get the last item's index.
@return: Last item's index, or -1 if the listbox is empty.
"""
# Return the last item's index
return self.size() - 1
def indexcur(self, internal=False, raise_error=False):
"""
Get the active index.
@param internal: See 2N6OR.
@return: The active index. If no active active, either return -1, or
raise IndexError if `raise_error` is True.
"""
# Get active indexes
indexcurs = self._indexcurs(internal=internal)
# If have active indexes
if indexcurs:
# Return the first active index
return indexcurs[0]
# If no active indexes
else:
# If raise error
if raise_error:
# Raise error
raise IndexError(-1)
# If not raise error
else:
# Return -1
return -1
def _indexcurs(self, internal=False):
"""
Get active indexes list.
2N6OR
@param internal: Whether use listbox widget's selected indexes, instead
of cached active index.
Notice listbox widget has no selected indexes if it has no focus.
Notice using cached active index only supports single-selection mode,
which means the result list has at most one index.
@return: Active indexes list.
"""
# If use listbox widget's selected indexes
if internal:
# Return listbox widget's selected indexes list
return [int(x) for x in self._listbox.curselection()]
# If not use listbox widget's selected indexes
else:
# If cached active index is valid
if self.index_is_valid(self._indexcur):
# Return a list with the cached active index
return [self._indexcur]
# If cached active index is not valid
else:
# Return empty list
return []
def indexcur_set(
self,
index,
focus=False,
notify=True,
notify_arg=None,
):
"""
Set active index.
@param index: The index to set.
@param focus: Whether set focus on the listbox widget.
@param notify: Whether notify pre-change and post-change events.
@param notify_arg: Event argument.
@return: None.
"""
# If the index is not valid or -1
if not self.index_is_valid_or_void(index):
# Raise error
raise IndexError(index)
# If the index is valid or is -1.
# If the listbox is not enabled
if not self.is_enabled():
# Raise error
raise ListboxVidget.DisabledError()
# If the listbox is enabled.
# If the listbox is changing
if self._is_changing:
# Raise error
raise ListboxVidget.CircularCallError()
# If the listbox is not changing.
# Set changing flag on
self._is_changing = True
# Get old active index
old_indexcur = self._indexcur
# Set resetting flag on if new and old indexes are equal
self._is_resetting = (index == old_indexcur)
# If notify events
if notify:
# Notify pre-change event
self.handler_notify(self.ITEMCUR_CHANGE_SOON, notify_arg)
# If old active index is valid
if self.index_is_valid(old_indexcur):
# Set old active item's background color to normal color
self._listbox.itemconfig(old_indexcur, background=self._normal_bg)
# Set old active item's foreground color to normal color
self._listbox.itemconfig(old_indexcur, foreground=self._normal_fg)
# Cache new active index
self._indexcur = index
# Clear listbox widget's selection
self._listbox.selection_clear(0, END)
# Set listbox widget's selection
self._listbox.selection_set(index)
# Set listbox widget's activated index
self._listbox.activate(index)
# If new active index is valid
if index != -1:
# Set new active item's background color to active color
self._listbox.itemconfig(index, background=self._active_bg)
# Set new active item's foreground color to active color
self._listbox.itemconfig(index, foreground=self._active_fg)
# If set focus
if focus:
# Set focus on the listbox widget
self._listbox.focus_set()
# If new active index is valid
if index != -1:
# Make the active item visible
self._listbox.see(index)
# If notify events
if notify:
# Notify post-change event
self.handler_notify(self.ITEMCUR_CHANGE_DONE, notify_arg)
# Set resetting flag off
self._is_resetting = False
# Set changing flag off
self._is_changing = False
def indexcur_set_by_event(
self,
event,
focus=False,
notify=True,
notify_arg=None,
):
"""
Set active index using a Tkinter event object that contains coordinates
of the active item.
@param event: Tkinter event object.
@param focus: Whether set focus on the listbox widget.
@param notify: Whether notify pre-change and post-change events.
@param notify_arg: Event argument.
@return: None.
"""
# Get the event's y co-ordinate's nearest listbox item index
index = self._listbox.nearest(event.y)
# If the index is not valid
if not self.index_is_valid_or_void(index):
# Ignore the event
return
# If the index is valid
else:
# Set the index as active index
self.indexcur_set(
index=index,
focus=focus,
notify=notify,
notify_arg=notify_arg,
)
def item(self, index):
"""
Get item at given index.
@return: Item at given index, or IndexError if the index is not valid.
"""
return self.items()[index]
def itemcur(self, internal=False, raise_error=False):
"""
Get the active item.
@param internal: See 2N6OR.
@param raise_error: Whether raise error if no active item.
@return: The active item. If no active item, if `raise_error` is
True, raise IndexError, otherwise return None.
"""
# Get active index.
# May raise IndexError if `raise_error` is True.
indexcur = self.indexcur(
internal=internal,
raise_error=raise_error,
)
# If no active index
if indexcur == -1:
# Return None
return None
# If have active index
else:
# Return the active item
return self.items()[indexcur]
def item_insert(
self,
item,
index=None,
notify=True,
keep_active=True,
):
"""
Insert item at given index.
@param item: Item to insert.
@param index: Index to insert. `None` means active index, and if no
active index, insert at the end.
@param notify: Whether notify pre-change and post-change events.
@param keep_active: Whether keep or clear active index.
@return: None.
"""
# If notify events
if notify:
# Notify pre-change events
self.handler_notify(self.ITEMCUR_CHANGE_SOON)
self.handler_notify(self.ITEMS_CHANGE_SOON)
# Get old active index
active_index = self.indexcur()
# If the index is None,
# it means use active index.
if index is None:
# Use active index.
# `-1` works and means appending.
index = active_index
# Insert the item to the items list
self._items.insert(index, item)
# If old active index is valid
if active_index != -1:
# If old active index is GE the inserted index
if active_index >= index:
# Shift active index by one
active_index += 1
# If old active index is not GE the inserted index, use it as-is.
# Set new active index
self.indexcur_set(index=active_index, notify=False)
# Update listbox widget
self._listbox_widget_update(
keep_active=keep_active
)
# If notify events
if notify:
# Notify post-change events
self.handler_notify(self.ITEMS_CHANGE_DONE)
self.handler_notify(self.ITEMCUR_CHANGE_DONE)
def item_remove(
self,
index,
notify=True,
keep_active=True,
):
"""
Remove item at given index.
@param index: Index to remove.
@param notify: Whether notify pre-change and post-change events.
@param keep_active: Whether keep or clear active index.
@return: None.
"""
# If the index is not valid
if not self.index_is_valid(index):
# Raise error
raise ValueError(index)
# If the index is valid.
# If notify events
if notify:
# Notify pre-change events
self.handler_notify(self.ITEMCUR_CHANGE_SOON)
self.handler_notify(self.ITEMS_CHANGE_SOON)
# Get old active index
active_index = self.indexcur()
# Remove item at the index
del self._items[index]
# If old active index is valid
if active_index != -1:
# Get the last index
index_last = self.index_last()
# If old active index is GT the last index
if active_index > index_last:
# Use the last index as new active index
active_index = index_last
# If old active index is not GT the last index, use it as-is.
# Set new active index
self.indexcur_set(index=active_index, notify=False)
# Update listbox widget
self._listbox_widget_update(
keep_active=keep_active
)
# If notify events
if notify:
# Notify post-change events
self.handler_notify(self.ITEMS_CHANGE_DONE)
self.handler_notify(self.ITEMCUR_CHANGE_DONE)
def handler_add(
self,
event,
handler,
need_arg=False,
):
"""
Add event handler for an event.
If the event is ListboxVidget event, add the event handler to Eventor.
If the event is not ListboxVidget event, add the event handler to
listbox widget.
Notice this method overrides `Eventor.handler_add` in order to add
non-ListboxVidget event handler to listbox widget.
@param event: Event name.
@param handler: Event handler.
@param need_arg: Whether the event handler needs event argument.
@return: None.
"""
# If the event is ListboxVidget event
if event in self.EVENTS:
# Add the event handler to Eventor
return Eventor.handler_add(
self,
event=event,
handler=handler,
need_arg=need_arg,
)
# If the event is not ListboxVidget event,
# it is assumed to be Tkinter widget event.
else:
# Add the event handler to listbox widget
return self.bind(
event=event,
handler=handler,
)
def bind(
self,
event,
handler,
):
"""
Add event handler to listbox widget.
ListboxVidget internally uses `<Button-1>` and `<Double-Button-1>` to
capture active index changes. So if the given event is `<Button-1>` or
`<Double-Button-1>`, the given handler will be wrapped.
@param event: Event name.
@param handler: Event handler.
@return: None.
"""
# If the event is not `<Button-1>` or `<Double-Button-1>`
if event not in ['<Button-1>', '<Double-Button-1>']:
# Add the event handler to listbox widget
self._listbox.bind(event, handler)
# If the event is `<Button-1>` or `<Double-Button-1>`
else:
# Create event handler wrapper
def handler_wrapper(e):
"""
Event handler wrapper that sets new active index and then calls
the wrapped event handler.
Setting new active index is needed because when this handler is
called by Tkinter, the active index of the listbox is still
old.
@param e: Tkinter event object.
@return: None.
"""
# Set new active index
self.indexcur_set_by_event(e, notify=True)
# Call the wrapped event handler
handler(e)
# Add the event handler wrapper to the listbox widget
self._listbox.bind(event, handler_wrapper)
def _on_single_click(self, event):
"""
`<Button-1>` event handler that updates active index.
@param event: Tkinter event object.
@return: None.
"""
# Updates active index
self.indexcur_set_by_event(event, notify=True)
def _on_double_click(self, event):
"""
`<Double-Button-1>` event handler that updates active index.
@param event: Tkinter event object.
@return: None.
"""
# Updates active index
self.indexcur_set_by_event(event, notify=True)
def _listbox_widget_update(
self,
keep_active,
):
"""
Update listbox widget's items and selection.
@param keep_active: Whether keep or clear active index.
@return: None.
"""
# Remove old items from listbox widget
self._listbox.delete(0, END)
# Insert new items into listbox widget.
# For each ListboxVidget items.
for index, item in enumerate(self.items()):
# Get item text
item_text = self._item_to_text(item)
# Insert the item text into listbox widget
self._listbox.insert(index, item_text)
# Set the item's normal background color
self._listbox.itemconfig(index, background=self._normal_bg)
# Set the item's normal foreground color
self._listbox.itemconfig(index, foreground=self._normal_fg)
# Set the item's selected background color
self._listbox.itemconfig(index, selectbackground=self._selected_bg)
# Set the item's selected foreground color
self._listbox.itemconfig(index, selectforeground=self._selected_fg)
# If keep active index
if keep_active:
# Use old active index
indexcur = self._indexcur
# If not keep active index
else:
# Set active index to -1
indexcur = self._indexcur = -1
# Clear old selection
self._listbox.selection_clear(0, END)
# Set new selection.
# `-1` works.
self._listbox.selection_set(indexcur)
# Set new active index.
# `-1` works.
self._listbox.activate(indexcur)
# If new active index is valid
if indexcur != -1:
# Set active background color
self._listbox.itemconfig(indexcur, background=self._active_bg)
# Set active foreground color
self._listbox.itemconfig(indexcur, foreground=self._active_fg)
# Make the active item visible
self._listbox.see(indexcur)
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/tkinterutil/listbox.py
|
listbox.py
|
from __future__ import absolute_import
from collections import OrderedDict
from tkinter import Menu
from tkinter.constants import END
#
class MenuTree(object):
"""
MenuTree provides methods to operate a tree of menu items.
These methods refer to menu items using custom string IDs, instead of
internal object references.
Notice do not change the menu tree outside.
"""
# Info dict keys.
# Item ID key
INFO_K_ID = 'INFO_K_ID'
# Item ID key for top menu
INFO_K_ID_V_TOP = '/'
# Item PID key
INFO_K_PID = 'INFO_K_PID'
# Item widget key
INFO_K_ITEM_WIDGET = 'INFO_K_ITEM_WIDGET'
# Item index key
INFO_K_ITEM_INDEX = 'INFO_K_ITEM_INDEX'
#
def __init__(
self,
id_to_full=None,
id_to_label=None,
id_sep=None,
master=None,
):
"""
Initialize object.
@param id_to_full: Function converts relative ID to full ID.
@param id_to_label: Function converts ID to menu label.
@param id_sep: ID parts separator.
@param master: Master widget.
@return: None.
"""
# Master widget
self._master = master
# ID parts separator.
self._id_sep = id_sep if id_sep is not None else '/'
# Function converts relative ID to full ID
self._id_to_full = id_to_full \
if id_to_full is not None else self._id_to_full_default
# Function converts ID to menu label
self._id_to_label = id_to_label \
if id_to_label is not None else self._id_to_label_default
# Dict that contains menu item infos.
# Key is menu item ID.
# Value is menu item info dict.
self._id_to_info = OrderedDict()
# Create top menu.
# `tearoff` is not allowed.
self._menu_top = Menu(master=self._master, tearoff=False)
# Create top menu's info dict
self._id_to_info[self.INFO_K_ID_V_TOP] = {
# Top menu's ID is `/`
self.INFO_K_ID: self.INFO_K_ID_V_TOP,
# Top menu's PID is None
self.INFO_K_PID: None,
# Top menu's item widget
self.INFO_K_ITEM_WIDGET: self._menu_top,
# Top menu's item index is None
self.INFO_K_ITEM_INDEX: None,
}
def master(self):
"""
Get the master widget.
@return: Master widget.
"""
# Return the master widget
return self._master
def menu_top(self):
"""
Get the top menu widget.
Notice do not change the menu tree outside.
@return: Top menu widget.
"""
# Return the top menu widget
return self._menu_top
def item_widget(self, id):
"""
Get item widget by item ID.
@param id: Item ID.
@return: Item widget.
"""
# Get item info dict
info = self._id_to_info.get(id, None)
# If item info dict not exists
if info is None:
# Raise error
raise ValueError('Item ID not exists: `{}`'.format(id))
# If item info dict exists
else:
# Return the item widget
return info[self.INFO_K_ITEM_WIDGET]
def item_index(self, id):
"""
Get item index as child of a parent menu.
@param id: Item ID.
@return: Item index as child of a parent menu.
"""
# Get item info dict
info = self._id_to_info.get(id, None)
# If item info dict not exists
if info is None:
# Raise error
raise ValueError('Item ID not exists: `{}`'.format(id))
# If item info dict exists
else:
# Return the item index as child of a parent menu
return info[self.INFO_K_ITEM_INDEX]
def item_ids(self):
"""
Get item IDs list.
@return: Item IDs list.
"""
# Return the item IDs list.
return list(self._id_to_info.keys())
def item_exists(self, id):
"""
Test whether given item ID exists.
@param id: Item ID.
@return: Boolean.
"""
# Return whether given item ID exists
return id in self._id_to_info
def item_is_menu(self, id):
"""
Test whether given item ID refers to menu item.
@param id: Item ID.
@return: Boolean.
"""
# Get item info dict
info = self._id_to_info.get(id, None)
# If item info dict not exists
if info is None:
# Raise error
raise ValueError('Item ID not exists: `{}`'.format(id))
# If item info dict exists
else:
# Get item widget
widget = info[self.INFO_K_ITEM_WIDGET]
# Return whether the item widget is menu
return isinstance(widget, Menu)
def item_child_indexes(self, id):
"""
Get given menu item's child item indexes.
@param id: Menu item ID.
@return: Menu item's child item indexes.
"""
# If given item ID not refers to menu item
if not self.item_is_menu(id):
# Raise error
raise ValueError('Item ID not refers to menu: `{}`'.format(id))
# If given item ID refers to menu item.
# Child indexes list
child_index_s = []
# For each item info dict
for info in self._id_to_info.values():
# Get the item PID
pid = info[self.INFO_K_PID]
# If the item's parent item is the given menu item.
if pid == id:
# Get the item's index
index = info[self.INFO_K_ITEM_INDEX]
# Add the item's index to the child indexes list
child_index_s.append(index)
# Sort the child indexes list
child_index_s.sort()
# Return the child indexes list
return child_index_s
def item_child_index_last(self, id):
"""
Get given menu item's last child item index.
Notice if given menu item has been added child items without using
methods of this class, this may give different result compared to
`self._item_child_index_last_internal`.
@param id: Menu item ID.
@return: Menu item's last child item index, or -1 if no child item.
"""
# If given item ID not refers to menu item
if not self.item_is_menu(id):
# Raise error
raise ValueError('Item ID not refers to menu: `{}`'.format(id))
# If given item ID refers to menu item.
# Get given menu item's child item indexes.
# Can be empty.
index_s = self.item_child_indexes(id=id)
# If have child item
if index_s:
# Return the last child item index
return index_s[-1]
# If no child item
else:
# Return -1
return -1
def _item_child_index_last_internal(self, id):
"""
Get given menu item's last child item index, according to the menu
widget's internal state.
@param id: Menu item ID.
@return: Menu item's last child item index, or -1 if no child item.
"""
# Get item info dict
info = self._id_to_info.get(id, None)
# If item info dict not exists
if info is None:
# Raise error
raise ValueError('Item ID not exists: `{}`'.format(id))
# If item info dict exists
else:
# Get widget
widget = info[self.INFO_K_ITEM_WIDGET]
# If the widget is not menu
if not isinstance(widget, Menu):
# Raise error
raise ValueError('Item ID not refers to menu: `{}`'.format(id))
# If the widget is menu.
# Get the last child item index.
# Can be None.
index_end = widget.index(END)
# If no child item
if index_end is None:
# Return -1
return -1
# If have child item
else:
# Return the last child item index
return index_end
def menu(self, id):
"""
Get menu widget by item ID.
@param id: Menu item ID.
@return: Menu widget.
"""
# Get item info dict
info = self._id_to_info.get(id, None)
# If item info dict not exists
if info is None:
# Raise error
raise ValueError('Item ID not exists: `{}`'.format(id))
# If item info dict exists
else:
# Get item widget
widget = info[self.INFO_K_ITEM_WIDGET]
# If the item widget is not menu widget
if not isinstance(widget, Menu):
# Raise error
raise ValueError('Item ID not refers to menu: `{}`'.format(id))
# If the item widget is menu widget
else:
# Return the menu widget
return widget
def _add_info_dict(self, pid, id, index, widget):
"""
Add item info dict. Should be called after adding given widget to the
parent menu referred to by `pid`.
@param pid: Menu item PID.
@param id: Menu item ID.
@param widget: Menu widget, or one of ['commmand', 'separator'].
@return: None.
"""
# If item ID exists
if self.item_exists(id):
# Raise error
raise ValueError('Item ID exists: `{}`'.format(id))
# If item ID not exists.
# If PID not refers to a menu
if not self.item_is_menu(pid):
# Raise error
raise ValueError('Item ID not refers to menu: `{}`'.format(pid))
# If PID refers to a menu.
# Get parent menu's last child index
index_last = self.item_child_index_last(pid)
# Get parent menu's internal last child index
index_last_internal = self._item_child_index_last_internal(pid)
# If the two indexes are not off by one,
# it means this method is not called after adding given widget to the
# parent menu referred to by `pid`.
if index_last != index_last_internal - 1:
# Raise error
raise ValueError(
'Menu item has been modified outside: `{}`'.format(pid)
)
# If the two indexes are off by one.
# If given index is not valid
if not (0 <= index <= index_last + 1):
# Raise error
raise ValueError('Item index is not valid: `{}`'.format(index))
# Shift indexes of other child item info dicts.
# For each item info dict.
for info in self._id_to_info.values():
# Get the item's PID
pid_x = info[self.INFO_K_PID]
# If the item's PID is EQ given PID
if pid_x == pid:
# Get the item's index
index_x = info[self.INFO_K_ITEM_INDEX]
# If the item's index is GE given index
if index_x >= index:
# Shift the item's index by 1
info[self.INFO_K_ITEM_INDEX] = index_x + 1
# Add item info dict for the item
self._id_to_info[id] = {
self.INFO_K_ID: id,
self.INFO_K_PID: pid,
self.INFO_K_ITEM_INDEX: index,
self.INFO_K_ITEM_WIDGET: widget,
}
def add_item(
self,
widget_factory,
pid,
id,
id_is_full=False,
id_sep=None,
index=None,
label=None,
):
"""
Add an item to a parent menu item.
@param widget_factory: Widget factory.
@param pid: Menu item PID.
@param id: Menu item ID.
@param id_is_full: Whether given item ID is full.
@param id_sep: ID parts separator.
@param label: Menu label.
@param index: Item index as child of a parent menu. Default is the end.
@return: None.
"""
# If given PID not refers to menu widget
if not self.item_is_menu(pid):
# Raise error
raise ValueError('Item ID not refers to menu: `{}`'.format(pid))
# If given PID refers to menu widget.
# ID parts separator
id_sep = id_sep if id_sep is not None else self._id_sep
# If given item ID is full
if id_is_full:
# Use the given item ID as full ID
full_id = id
# If given item ID is not full
else:
# Convert the relative ID to full ID
full_id = self._id_to_full(id=id, pid=pid, id_sep=id_sep)
# If the item ID exists
if self.item_exists(full_id):
# Raise error
raise ValueError('Item ID exists: `{}`'.format(full_id))
# If the item ID not exists.
# If menu label is not given
if label is None:
# Convert the item ID to menu label
label = self._id_to_label(id=full_id, id_sep=id_sep)
# Get last item index
last_index = self.item_child_index_last(pid)
# Get internal last item index
last_index_internal = self._item_child_index_last_internal(pid)
# If the two indexes are not consistent
if last_index != last_index_internal:
# Raise error
raise ValueError(
'Menu item has been modified outside: `{}`'.format(pid)
)
# If the two indexes are consistent.
# If index is not given
if index is None:
# Set index to last item index plus one
index = last_index + 1
# If index is not valid
if not (0 <= index <= last_index + 1):
# Raise error
raise ValueError('Item index is not valid: `{}`'.format(index))
# If index is valid.
# Get parent menu
parent_menu = self.menu(pid)
# Create widget
widget = widget_factory(
parent_menu=parent_menu,
index=index,
label=label,
)
# Add item info dict
self._add_info_dict(
pid=pid,
id=full_id,
index=index,
widget=widget,
)
def remove_item(
self,
id,
):
"""
Remove an item.
If the item is a menu, remove its child items recursively.
@param id: Item ID.
@return: None.
"""
# If given item ID is top menu ID
if id == self.INFO_K_ID_V_TOP:
# Raise error
raise ValueError('Cannot remove top menu: `{}`'.format(id))
# If given item ID is not top menu ID.
# Get item info dict
info = self._id_to_info.get(id, None)
# If item info dict not exists
if info is None:
# Raise error
raise ValueError('Item ID not exists: `{}`'.format(id))
# If item info dict exists
# Get item PID
item_pid = info[self.INFO_K_PID]
# Get item index
item_index = info[self.INFO_K_ITEM_INDEX]
# Get parent menu
parent_menu = self.menu(item_pid)
# Remove item at the index
parent_menu.delete(item_index)
# For each x item id
for x_item_id in self.item_ids():
# If the x item id not exists.
# This is possible because the code below remove child items
# recursively.
if not self.item_exists(x_item_id):
# Ignore the x item id
continue
# If the x item id exists.
# Get x item's info dict
x_info = self._id_to_info[x_item_id]
# Get x item's PID
x_pid = x_info[self.INFO_K_PID]
# If x item's PID is EQ ID of the item to remove,
# it means it is a child item of the item to remove.
if x_pid == id:
# Remove the child item
self.remove_item(
x_item_id,
)
# If x item's PID is EQ PID of the item to remove,
# it means it is a sibling item of the item to remove.
elif x_pid == item_pid:
# Get x item's index
x_index = x_info[self.INFO_K_ITEM_INDEX]
# If x item's index is GT index of the item to remove
if x_index > item_index:
# Shift x item's index by one
x_info[self.INFO_K_ITEM_INDEX] = x_index - 1
# If x item is something else
else:
# Ignore the x item
continue
# Delete the item's info dict
del self._id_to_info[id]
def add_menu(
self,
pid,
id,
id_is_full=False,
id_sep=None,
index=None,
label=None,
):
"""
Add menu item.
@param pid: Menu item PID.
@param id: Menu item ID.
@param id_is_full: Whether given item ID is full.
@param id_sep: ID parts separator.
@param index: Item index as child of a parent menu. Default is the end.
@param label: Menu label.
@return: None.
"""
# Create widget factory
def widget_factory(**kwargs):
"""
Widget factory function.
@param kwargs: Keyword arguments given by caller method `add_item`.
@return: Menu widget.
"""
# Get parent menu
parent_menu = kwargs['parent_menu']
# Create menu widget.
# `tearoff` is not allowed.
menu = Menu(master=parent_menu, tearoff=False)
# Get item index
index = kwargs['index']
# Get item label
label = kwargs['label']
# Insert the menu widget into the parent menu
parent_menu.insert_cascade(index=index, menu=menu, label=label)
# Return the menu widget
return menu
# Add item
self.add_item(
widget_factory=widget_factory,
pid=pid,
id=id,
id_is_full=id_is_full,
id_sep=id_sep,
index=index,
label=label,
)
def add_command(
self,
pid,
id,
command,
id_is_full=False,
id_sep=None,
index=None,
label=None,
):
"""
Add command item.
@param pid: Menu item PID.
@param id: Menu item ID.
@param command: Command function.
@param id_is_full: Whether given item ID is full.
@param id_sep: ID parts separator.
@param index: Item index as child of a parent menu. Default is the end.
@param label: Menu label.
@return: None.
"""
# Create widget factory
def widget_factory(**kwargs):
"""
Widget factory function.
@param kwargs: Keyword arguments given by caller method `add_item`.
@return: Internal widget type name `command`.
"""
# Get parent menu
parent_menu = kwargs['parent_menu']
# Get item index
index = kwargs['index']
# Get item label
label = kwargs['label']
# Insert command widget into the parent menu
parent_menu.insert_command(
index=index,
label=label,
command=command,
)
# Return the internal widget type name
return 'command'
# Add item
self.add_item(
widget_factory=widget_factory,
pid=pid,
id=id,
id_is_full=id_is_full,
id_sep=id_sep,
index=index,
label=label,
)
def add_separator(
self,
pid,
id,
id_is_full=False,
id_sep=None,
index=None,
):
"""
Add separator item.
@param pid: Menu item PID.
@param id: Menu item ID.
@param id_is_full: Whether given item ID is full.
@param id_sep: ID parts separator.
@param index: Item index as child of a parent menu. Default is the end.
@return: None.
"""
# Create widget factory
def widget_factory(**kwargs):
"""
Widget factory function.
@param kwargs: Keyword arguments given by caller method `add_item`.
@return: Internal widget type name `separator`.
"""
# Get parent menu
parent_menu = kwargs['parent_menu']
# Get index
index = kwargs['index']
# Insert separator widget into the parent menu
parent_menu.insert_separator(index=index)
# Return the internal widget name
return 'separator'
# Add item
self.add_item(
widget_factory=widget_factory,
pid=pid,
id=id,
id_is_full=id_is_full,
id_sep=id_sep,
index=index,
)
def _id_to_full_default(self, id, pid, id_sep):
"""
Default function that converts relative ID to full ID.
E.g. `Exit`-> `/File/Exit`
@param id: Menu item relative ID.
@param pid: Menu item PID.
@param id_sep: ID parts separator.
@return: Item full ID.
"""
# If parent menu is top menu
if pid == self.INFO_K_ID_V_TOP:
# If the separator is EQ top menu ID, e.g. `/`
if id_sep == self.INFO_K_ID_V_TOP:
# Not add separator between parent ID and child relative ID
return pid + id
# If parent menu is not top menu,
# or parent menu is top menu but the separator is not EQ top menu ID.
# Add separator between parent ID and child relative ID
return pid + id_sep + id
def _id_to_label_default(self, id, id_sep):
"""
Default function that converts ID to menu label.
E.g. `/File/Exit`-> `Exit`
@param id: Menu item ID.
@param id_sep: ID parts separator.
@return: Menu label.
"""
# Split the item ID into prefix and label
prefix, _, label = id.rpartition(id_sep)
# Return the label
return label
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/tkinterutil/menu.py
|
menu.py
|
from __future__ import absolute_import
from tkinter import Spinbox
from tkinter import Text
from tkinter.ttk import Combobox
from tkinter.ttk import Entry
from tkinter.ttk import Scrollbar
from tkinter.constants import ACTIVE
from tkinter.constants import DISABLED
from tkinter.constants import END
from tkinter.constants import NORMAL
from .eventor import Eventor
from .vidget import Vidget
#
class _HiddenScrollbar(Scrollbar):
"""
Scrollbar that hides if slider's both ends reached extreme position.
"""
def set(self, lo, hi):
"""
Set scrollbar slider's end positions.
@param lo: Low end position. A float value between 0.0 and 1.0.
@param hi: High end position. A float value between 0.0 and 1.0.
@return: None.
"""
# If scrollbar slider's both ends reached extreme position
if float(lo) <= 0.0 and float(hi) >= 1.0:
# Hide the scrollbar
self.grid_remove()
# If not scrollbar slider's both ends reached extreme position
else:
# Show the scrollbar
self.grid()
# Call super version
Scrollbar.set(self, lo, hi)
#
class EntryVidget(
Vidget,
Eventor,
):
"""
EntryVidget contains a main Frame and an Entry widget.
The entry widget takes all space of the main frame.
The main frame can be used to configure the size of the entry widget.
EntryVidget adds the following abilities:
- Simplify the use of validator function.
- Notify pre-change and and post-change events.
"""
# Default validator function
_DEFAULT_VALIDATOR = (lambda x: True)
# Event notified when text is to be changed
TEXT_CHANGE_SOON = 'TEXT_CHANGE_SOON'
# Event notified when text is changed
TEXT_CHANGE_DONE = 'TEXT_CHANGE_DONE'
def __init__(
self,
text=None,
validator=None,
widget_type=None,
master=None,
):
"""
Initialize object.
@param text: Initial text. Default is empty.
@param validator: Validator function that determines whether text
entered by user or set by `text_set` method is valid. The validator
function takes the new value as argument and returns True if the new
value is valid.
@param widget_type: One of ['Entry', 'Spinbox', 'Combobox']. Default is
'Entry'.
@param master: Master widget.
@return: None.
"""
# Initialize Vidget.
# Create main frame widget.
Vidget.__init__(self, master=master)
# Initialize Eventor
Eventor.__init__(self)
# If widget type is None or `Entry`
if widget_type is None or widget_type == 'Entry':
# Create Entry widget
self._text_widget = Entry(master=self.widget())
# If widget type is `Spinbox`
elif widget_type == 'Spinbox':
# Create Spinbox widget
self._text_widget = Spinbox(master=self.widget())
# If widget type is `Combobox`
elif widget_type == 'Combobox':
# Create Combobox widget
self._text_widget = Combobox(master=self.widget())
# If widget type is something else
else:
# Raise error
raise ValueError(widget_type)
# Set the text widget as config target
self.config_target_set(self._text_widget)
# Whether the text widget's value is changing
self._is_changing = False
# Old widget state
self._old_widget_state = NORMAL
# Validator function
self._validator = validator \
if validator is not None else EntryVidget._DEFAULT_VALIDATOR
# Create validator wrapper
self._validator_wrapper = self._validator_wrapper_create()
# Register the validator wrapper with Tkinter. Get reference ID.
self._validator_wrapper_ref_id = \
self.text_widget().winfo_toplevel().register(
self._validator_wrapper
)
# Mount the validator wrapper to the text widget
self._validator_wrapper_mount()
# If the text widget is Combobox
if isinstance(self._text_widget, Combobox):
# Bind selected event to event handler
self._text_widget.bind(
'<<ComboboxSelected>>', self._on_combobox_selected
)
# Cached text
self._text = self._text_widget.get()
# Set initial text
self.text_set(text if text is not None else '', notify=False)
# Update widget
self._widget_update()
def _widget_update(self):
"""
Update widget config and layout.
@return: None.
"""
# Do not use children to compute main frame's geometry
self.widget().grid_propagate(False)
# Configure layout weights for children
self.widget().rowconfigure(0, weight=1)
self.widget().columnconfigure(0, weight=1)
# Lay out the text widget to take all space of the main frame
self._text_widget.grid(
in_=self.widget(),
row=0,
column=0,
sticky='NSEW',
)
def text_widget(self):
"""
Get the text widget.
@return: Text widget.
"""
# Return the text widget
return self._text_widget
def text(self):
"""
Get cached text.
`self._text` and `self._text_widget.get()` usually give same value.
But from within the validator wrapper at 3Q7EB, when the new value is
being validated, the new value is only available in `self._text`.
Tkinter widget's interval value has not been updated yet.
@return: Cached text.
"""
# Return the cached text
return self._text
def text_set(
self,
text,
notify=True,
notify_arg=None,
is_validator=False,
):
"""
Set text.
@param text: Text to set.
@param notify: Whether notify text change events.
@param notify_arg: Event argument.
@param is_validator: Whether caller is validator.
@return: None.
"""
# If the text is not valid
if not self.text_is_valid(text):
# Raise error
raise ValueError('Text is not valid: {}'.format(text))
# If the text is valid.
# If the text is changing
if self._is_changing:
# Raise error
raise ValueError('Text is changing')
# If the text is not changing.
# Set text changing flag on
self._is_changing = True
# If notify event
if notify:
# Notify pre-change event
self.handler_notify(
self.TEXT_CHANGE_SOON,
arg=notify_arg,
need_info=True,
)
# Cache the text
self._text = text
# If caller is not validator,
# need change text widget's value.
if not is_validator:
# Unmount the validator wrapper before changing text widget's value
self._validator_wrapper_unmount()
# Set text widget to NORMAL state
self.state_set(NORMAL)
# Delete the old text from text widget.
# This will not trigger validation because the validator wrapper
# has been unmounted.
self._text_widget.delete(0, END)
# Insert the new text into text widget.
self._text_widget.insert(0, text)
# Set text widget to previous state
self.state_set_back()
# Mount the validator wrapper after changing text widget's value
self._validator_wrapper_mount()
# If caller is validator
# no need change text widget's value.
# If the cached text is not EQ text widget's value
if self._text != self._text_widget.get():
# If caller is not validator
if not is_validator:
# Set changing flag off
self._is_changing = False
# Raise error
raise ValueError(
'Inconsistent state. `{}` != `{}`'.format(
repr(self._text),
repr(self._text_widget.get()),
)
)
# If caller is validator,
# this is normal because text widget's value will be updated after
# the validator returns.
# If notify event
if notify:
# Notify post-change event
self.handler_notify(
self.TEXT_CHANGE_DONE,
arg=notify_arg,
need_info=True,
)
# Set changing flag off
self._is_changing = False
def enabled(self):
"""
Test whether the text widget is not in DISABLED state.
@return: Boolean.
"""
# Get the text widget's state. One of [NORMAL, DISABLED, ACTIVE].
state = str(self.text_widget()['state'])
# Test whether the text widget is not in DISABLED state
return state != DISABLED
def disabled(self):
"""
Test whether the text widget is in DISABLED state.
@return: Boolean.
"""
# Get the text widget's state. One of [NORMAL, DISABLED, ACTIVE].
state = str(self.text_widget()['state'])
# Test whether the text widget is in DISABLED state
return state == DISABLED
def state_set(self, state):
"""
Set the text widget's state.
@param state: State to set.
@return: None.
"""
# If given state is not valid
if state not in [NORMAL, DISABLED, ACTIVE]:
# Raise error
raise ValueError(state)
# If given state is valid.
# Store old state
self._old_widget_state = str(self.text_widget()['state'])
# Set new state
self.text_widget()['state'] = state
def state_set_back(self):
"""
Set the text widget to old state.
@return: None.
"""
# If old state is not valid
if self._old_widget_state not in [NORMAL, DISABLED, ACTIVE]:
# Raise error
raise ValueError(self._old_widget_state)
# If old state is valid.
# Set the text widget to old state
self.text_widget()['state'] = self._old_widget_state
def is_changing(self):
"""
Test whether the text widget's value is changing.
@return: Boolean.
"""
# Return whether the text widget's value is changing
return self._is_changing
def text_is_valid(self, text):
"""
Test whether given text is valid according to validator.
@param text: Text to test.
@return: Boolean.
"""
# Return whether given text is valid according to validator
return self._validator(text)
def _validator_wrapper_create(self):
"""
Create validator wrapper.
The wrapper calls original validator to validate the new text value.
If the new text value is valid, the wrapper will set the text widget
to the new value, and notify text change events.
@return: Validator function wrapper.
"""
# 3Q7EB
# Create validator wrapper
def validator_wrapper(new_value):
# If the text widget is changing
if self._is_changing:
# Raise error
raise ValueError('Text is changing')
# If the validator function is not given
if self._validator is None:
# Set validation result to True
is_valid = True
# If the validator function is given
else:
try:
# Get validation result
is_valid = self._validator(new_value)
# If have error
except Exception:
# Set validation result to False
is_valid = False
# If the new value is valid
if is_valid:
# If the text widget is changing
if self._is_changing:
# Raise error
raise ValueError('Text is changing')
# Set the text widget's value.
# Notify text change events.
self.text_set(
new_value,
notify=True,
is_validator=True,
)
# If the text widget is changing
if self._is_changing:
# Raise error
raise ValueError('Text is changing')
# If the new value is not valid,
# do nothing.
# Return the validation result
return is_valid
# Return the validator wrapper
return validator_wrapper
def _validator_wrapper_mount(self):
"""
Mount the validator wrapper to the text widget.
@return: None.
"""
# Mount the validator wrapper to the text widget
self.text_widget().config(
# Validation type
validate='key',
# Validator function reference ID, and argument type.
# Argument type `%P` means the new value.
validatecommand=(self._validator_wrapper_ref_id, '%P')
)
def _validator_wrapper_unmount(self):
"""
Unmount the validator wrapper from the text widget.
@return: None.
"""
# Unmount the validator wrapper from the text widget.
# Notice `validatecommand=None` does not work.
self.text_widget().config(validatecommand='')
def _on_combobox_selected(self, event):
"""
Combobox selected event handler.
@param event: Tkinter event object.
@return: None.
"""
# Get new value
new_value = self.text_widget().get()
# If the new value is not valid
if not self._validator(new_value):
# Set old value back
self.text_widget().set(self._text)
# If the new value is valid
else:
# Set the new value.
# Notify text change events.
self.text_set(new_value, notify=True)
#
class TextVidget(Vidget):
"""
TextVidget contains a main Frame widget, a Text widget and a Scrollbar
widget.
"""
def __init__(
self,
text=None,
master=None,
):
"""
Initialize object.
@param text: Initial text. Default is empty.
@param master: Master widget.
@return: None.
"""
# Initialize Vidget.
# Create main frame widget.
Vidget.__init__(self, master=master)
# Create scrollbar widget
self._scrollbar = _HiddenScrollbar(master=self.widget())
# Create text widget
self._text_widget = Text(
master=self.widget(),
# Enable undo
undo=True,
# Auto add undo separators
autoseparators=True,
# Unlimited number of undos
maxundo=-1,
)
# Set the text widget as config target
self.config_target_set(self._text_widget)
# Mount the scrollbar
self._text_widget.config(yscrollcommand=self._scrollbar.set)
self._scrollbar.config(command=self._text_widget.yview)
# Update widget
self._widget_update()
# Set initial text
self.text_add(text if text is not None else '')
def _widget_update(self):
"""
Update widget.
@return: None.
"""
# Configure children layout weights
self.widget().rowconfigure(0, weight=1)
# Column 0 is for the text widget
self.widget().columnconfigure(0, weight=1)
# Column 1 is for the scrollbar widget
self.widget().columnconfigure(1, weight=0)
# Lay out the text widget
self._text_widget.grid(
row=0,
column=0,
sticky='NSEW',
)
# Lay out the scrollbar widget
self._scrollbar.grid(
row=0,
column=1,
sticky='NSEW',
)
def scrollbar_widget(self):
"""
Get the scrollbar widget.
@return: Scrollbar widget.
"""
# Return the scrollbar widget
return self._scrollbar
def text_widget(self):
"""
Get the text widget.
@return: Text widget.
"""
# Return the text widget
return self._text_widget
def text(self):
"""
Get text.
@return: Text.
"""
# Return the text
return self._text_widget.get('1.0', END + '-1c')
def text_set(self, text):
"""
Set text.
@param text: Text to set.
@return: None.
"""
# Delete old text
self._text_widget.delete('1.0', END)
# Add new text
self.text_add(text)
def text_add(self, text):
"""
Add text.
@param text: Text to add.
@return: None.
"""
# Add the text to the end
self._text_widget.insert(END, text)
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/tkinterutil/text.py
|
text.py
|
from __future__ import absolute_import
from tkinter import Toplevel
from tkinter.ttk import Button
from tkinter.ttk import Frame
#
def get_window_center(window):
"""
Get a window widget's center point (cx, cy).
@param window: Window widget.
@return: Center point (cx, cy).
"""
# Get center x
cx = window.winfo_x() + (window.winfo_width() // 2)
# Get center y
cy = window.winfo_y() + (window.winfo_height() // 2)
# Return the center point
return cx, cy
#
def center_window(window, point=None):
"""
Center a window widget around a point.
Modified from `http://stackoverflow.com/a/10018670`.
@param window: Window widget.
@param point: Point to center around. Default is screen center point.
@return: None.
"""
# Get content width
content_width = window.winfo_width()
# Get frame width
outframe_width = window.winfo_rootx() - window.winfo_x()
# Get window full width
window_width = content_width + 2 * outframe_width
# Get content height
content_height = window.winfo_height()
# Get title bar height
titlebar_height = window.winfo_rooty() - window.winfo_y()
# Get window full height
window_height = content_height + titlebar_height + outframe_width
# If center point is given
if point:
# Use the center point's x and y
cx, cy = point
# If center point is not given
else:
# Use screen center point's x
cx = window.winfo_screenwidth() // 2
# Use screen center point's y
cy = window.winfo_screenheight() // 2
# Get position x
x = cx - (window_width // 2)
# Get position y
y = cy - (window_height // 2)
# Set the window's geometry
window.geometry('{}x{}+{}+{}'.format(content_width, content_height, x, y))
#
class ToplevelVidget(object):
"""
ToplevelVidget contains a Toplevel widget.
"""
def __init__(
self,
close_handler=None,
master=None,
):
"""
Initialize object.
@param close_handler: Window close button event handler.
@param master: Master widget.
@return: None.
"""
# Create toplevel widget
self._toplevel = Toplevel(master=master)
# Hide the toplevel widget
self._toplevel.withdraw()
# Window close button event handler
self._close_handler = close_handler \
if close_handler is not None else self._close_handler_default
# Register window close button event handler
self._toplevel.protocol('WM_DELETE_WINDOW', self._close_handler)
def toplevel(self):
"""
Get the toplevel widget.
@return: toplevel widget.
"""
return self._toplevel
def __getattr__(self, name):
"""
Delegate attribute lookup to the toplevel widget.
@return: Attribute value, or raise AttributeError.
"""
return getattr(self._toplevel, name)
def _close_handler_default(self):
"""
Default window close button event handler.
@return: None.
"""
# Hide the toplevel widget
self._toplevel.withdraw()
# Release grab on the toplevel widget
self._toplevel.grab_release()
#
class DialogVidget(ToplevelVidget):
"""
DialogVidget contains a Toplevel widget, a main Frame widget, a custom view
widget, and two button widgets for `Confirm` and `Cancel`.
"""
def __init__(
self,
view_widget=None,
confirm_handler=None,
confirm_buttion_text='Confirm',
cancel_handler=None,
cancel_buttion_text='Cancel',
close_handler=None,
master=None,
):
"""
Initialize object.
@param view_widget: Custom view widget.
@param confirm_handler: Confirm button event handler.
@param confirm_buttion_text: Confirm button text.
@param cancel_handler: Cancel button event handler.
@param cancel_buttion_text: Cancel button text.
@param close_handler: Window close button event handler.
@param master: Master widget.
@return: None.
"""
# Initialize ToplevelVidget
ToplevelVidget.__init__(
self,
close_handler=close_handler,
master=master,
)
# Create main frame
self._frame = Frame(master=self._toplevel)
# Custom view widget
self._view_widget = view_widget
# Confirm button event handler
self._confirm_handler = confirm_handler \
if confirm_handler is not None else self._confirm_handler_default
# Create confirm button
self._confirm_button = Button(
master=self._toplevel,
text=confirm_buttion_text,
command=self._confirm_handler,
)
# Cancel button event handler
self._cancel_handler = cancel_handler \
if cancel_handler is not None else self._cancel_handler_default
# Create cancel button
self._cancel_button = Button(
master=self._toplevel,
text=cancel_buttion_text,
command=self._cancel_handler,
)
# If the view widget is given
if self._view_widget is not None:
# Set view widget
self.view_set(self._view_widget)
# Update widget
self._widget_update()
def _widget_update(self):
"""
Update widget.
@return: None.
"""
# Configure layout weights for children
self._toplevel.rowconfigure(0, weight=1)
self._toplevel.columnconfigure(0, weight=1)
# Lay out the main frame widget
self._frame.grid(
in_=self._toplevel,
row=0,
column=0,
sticky='NSEW',
)
# Do not use children to compute main frame's geometry info
self._frame.grid_propagate(False)
# Configure layout weights for children.
# Row 0 is for the view widget.
self._frame.rowconfigure(0, weight=1)
# Row 1 is for the confirm and cancel button widgets.
self._frame.rowconfigure(0, weight=0)
# Use only one column
self._frame.columnconfigure(0, weight=1)
# Lay out the confirm button
self._confirm_button.grid(
in_=self._frame,
row=1,
column=0,
sticky='W',
)
# Lay out the cancel button
self._cancel_button.grid(
in_=self._frame,
row=1,
column=0,
sticky='E',
)
def main_frame(self):
"""
Get the main frame widget.
@return: Main frame widget.
"""
# Return the main frame widget
return self._frame
def view_set(self, widget):
"""
Set view widget.
@param widget: View widget.
@return: None.
"""
# Hide old view widget
if self._view_widget is not None:
self._view_widget.grid_forget()
# Store new view widget
self._view_widget = widget
# Lay out new view widget
self._view_widget.grid(
in_=self._frame,
row=0,
column=0,
sticky='NSEW',
)
def confirm_button(self):
"""
Get the confirm button widget.
@return: Confirm button widget.
"""
# Return the confirm button widget
return self._confirm_button
def confirm_handler_set(self, handler):
"""
Set confirm button event handler.
@handler: Confirm button event handler.
@return: None.
"""
# Store confirm button event handler
self._confirm_handler = handler
# Set confirm button event handler
self._confirm_button.config(command=self._confirm_handler)
def _confirm_handler_default(self):
"""
Default confirm button event handler.
@return: None.
"""
# Do nothing
pass
def cancel_button(self):
"""
Get the cancel button widget.
@return: Cancel button widget.
"""
# Return the cancel button widget
return self._cancel_button
def cancel_handler_set(self, handler):
"""
Set cancel button event handler.
@handler: Cancel button event handler.
@return: None.
"""
# Store cancel button event handler
self._cancel_handler = handler
# Set cancel button event handler
self._cancel_button.config(command=self._cancel_handler)
def _cancel_handler_default(self):
"""
Default cancel button event handler.
@return: None.
"""
# Hide the toplevel widget
self._toplevel.withdraw()
# Release grab on the toplevel widget
self._toplevel.grab_release()
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/tkinterutil/toplevel.py
|
toplevel.py
|
from __future__ import absolute_import
#
class Event(object):
"""
Event object that contains event info. These attributes are available:
- event: Event name.
- arg: Event argument.
- notifier: Event notifier.
"""
def __init__(
self,
event,
arg=None,
notifier=None,
):
"""
Initialize object.
@param event: Event name.
@param arg: Event argument.
@param notifier: Event notifier.
@return: None.
"""
# Event name
self.event = event
# Event argument
self.arg = arg
# Event notifier
self.notifier = notifier
#
class _EventHandlerWrapper(object):
"""
Event handler wrapper that calls original event handler with or without
event argument, according to `need_arg` value.
"""
def __init__(self, handler, need_arg):
"""
Initialize object.
@param handler: Original event handler.
@param need_arg: Whether original event handler needs event argument.
@return: None.
"""
# Original event handler
self.handler = handler
# Whether original event handler needs event argument
self._need_arg = need_arg
def __eq__(self, other):
"""
Equality operator.
@param other: The other object.
@return: True if wrapped handlers are equal, otherwise False.
"""
# If the other object is not of the same type
if not isinstance(other, self.__class__):
# Return False
return False
# If the other object is of the same type
else:
# Test whether wrapped handlers are equal
return self.handler == other.handler
def __call__(self, arg):
"""
Event handler function.
@param arg: Event argument.
@return: Original event handler's result.
"""
# If original event handler needs event argument
if self._need_arg:
# Call original event handler with argument.
# Return call result.
return self.handler(arg)
# If original event handler not needs event argument
else:
# Call original event handler without argument.
# Return call result.
return self.handler()
#
class Eventor(object):
"""
Eventor provides methods for registering event handlers and notifying them
of events.
"""
def __init__(self):
"""
Initialize object.
@return: None.
"""
# Create event handlers dict.
# Key is event name.
# Value is a list of handlers for the event.
self._event_handlers = {}
def handler_add(self, event, handler, need_arg=False):
"""
Add event handler for an event.
@param event: Event name. `None` means every event.
@param handler: Event handler.
@param need_arg: Whether the event handler needs event argument.
@return: None.
"""
# Create event handler wrapper
handler_wrapper = _EventHandlerWrapper(handler, need_arg=need_arg)
# If handlers list for the event has not been created
if event not in self._event_handlers:
# Create handlers list for the event.
# Add the handler wrapper to the handlers list.
self._event_handlers[event] = [handler_wrapper]
# If handlers list for the event has been created
else:
# If the handler wrapper has been added before
if handler_wrapper in self._event_handlers[event]:
# Get error message
msg = """Handler `{}` has already been added for event\
`{}`.""".format(handler, event)
# Raise error
raise ValueError(msg)
# If the handler wrapper has not been added before
else:
# Add the handler wrapper to the handlers list.
self._event_handlers[event].append(handler_wrapper)
def handler_remove(self, handler):
"""
Remove event handler.
@param handler: Event handler to remove.
@return: None.
"""
# `Remove infos` list.
# Each info is a tuple: (handler_wrapper, handler_list, event).
remove_info_s = []
# For each event name
for event in self._event_handlers:
# Get handlers list for the event
handler_wrapper_s = self._event_handlers[event]
# For each handler wrapper
for handler_wrapper in handler_wrapper_s:
# If the handler wrapper should be removed
if handler_wrapper.handler == handler:
# Add `remove info` to `remove infos` list
remove_info_s.append(
(handler_wrapper, handler_wrapper_s, event)
)
# If `remove infos` list is empty
if not remove_info_s:
# Return
return
# If `remove infos` list is not empty.
# For each `remove info`
for remove_info in remove_info_s:
# Get handler wrapper, handlers list, and event name
handler_wrapper, handler_wrapper_s, event = remove_info
# Remove the handler wrapper from the handlers list
handler_wrapper_s.remove(handler_wrapper)
# If the handlers list is empty
if not handler_wrapper:
# Remove the handlers list
del self._event_handlers[event]
def handler_remove_all(self):
"""
Remove all event handlers.
@return: None.
"""
# Set event handlers dict to empty
self._event_handlers = {}
def handler_notify(
self,
event,
arg=None,
notifier=None,
need_info=False,
):
"""
Notify event handlers of given event.
@param event: Event name.
@param arg: Event argument.
@param notifier: Event notifier. Default is `self`.
Event notifier is used only if `need_info` is True.
@param need_info: Whether need create event info object and pass the
event info object as event argument to event handlers.
@return: None.
"""
# If the event has no handlers,
# and there are no `None` handlers listening on every event.
if event not in self._event_handlers \
and None not in self._event_handlers:
# Return
return
# If the event has handlers,
# or there are `None` handlers listening on every event.
# If need event info object
if need_info:
# Create event info object.
# Use the event info object as event argument.
arg = Event(
event=event,
arg=arg,
notifier=notifier if notifier is not None else self,
)
# If not need event info object
else:
# Use the event argument as-is
arg = arg
# If the event has handlers,
if event in self._event_handlers:
# For each handler in the event's handlers list
for handler in self._event_handlers[event]:
# Call the handler
handler(arg)
# If there are `None` handlers listening on every event
if None in self._event_handlers:
# For each handler in the `None` handlers list
for handler in self._event_handlers[None]:
# Call the handler
handler(arg)
|
AoikRegistryEditor
|
/AoikRegistryEditor-0.1.0-py3-none-any.whl/aoikregistryeditor/tkinterutil/eventor.py
|
eventor.py
|
from CharmCord.Functions import *
from CharmCord.all_functions import all_Funcs, date_funcs
from datetime import datetime as D_T
from pytz import timezone
timezones = (
timezone('EST'),
timezone('UTC'),
timezone('US/Pacific')
)
class FunctionHandler:
def __init__(self):
self.funcs = {}
def register_functions(self):
for line in all_Funcs:
function = eval(line.replace("$", ""))
self.funcs[line.replace("\n", "").lower()] = function
async def execute_functions(self, keyword, args, context):
if keyword in date_funcs:
return await self.funcs[keyword](args, context, timezones, format_datetime)
return await self.funcs[keyword](args, context)
async def noArguments(entry: str, Functions, context):
from .all_functions import no_arg_Funcs
for func in no_arg_Funcs:
if func in entry:
entry = entry.replace(func, str(await Functions.execute_functions(func.lower(), None, context)))
return entry
def slashArgs(args, Code):
if '$slashArgs' in Code:
while "$slashArgs" in str(Code):
count = 0
end = None
balance = 0
start = Code.index("$slashArgs") + 10
look = Code[start:len(Code)]
for i in look:
if i == "[":
start = count
count += 1
balance += 1
continue
if i == "]":
end = count
balance -= 1
count += 1
if balance == 0 and start is not None and end is not None:
try:
# Replace $args with arguments
Code = str(Code).replace(f"$slashArgs[{look[start + 1:end]}]", args[int(look[start + 1:end]) - 1])
break
except IndexError:
raise SyntaxError(F"$slashArgs[{int(look[start + 1:end])}] Not Provided")
return Code
async def findBracketPairs(entry: str, Functions, context):
test = [line.strip() for line in entry.split("\n") if len(line.strip()) >= 3]
starts = 0
for i in test:
if i.strip().startswith("$") and i[-1] != "]":
try:
test[starts] = (test[starts].strip() + " " + test[starts + 1].strip()).strip()
test.remove(test[starts + 1])
starts += 1
except IndexError:
starts -= 1
test[starts] = (test[starts].strip() + " " + test[starts + 1].strip()).strip()
test.remove(test[starts + 1])
starts += 1
elif i.endswith("]") and i[0].strip() != "$":
test[starts] = (test[starts - 1].strip() + " " + test[starts].strip()).strip()
test.remove(test[starts - 1])
starts += 1
elif i[-1].strip() != "]" and i[0].strip() != "$":
test[starts] = (test[starts - 1] + " " + test[starts].strip())
test.remove(test[starts - 1])
else:
continue
try:
if test[-1].endswith("]") and test[-2][-1] != "]":
test[-2] = test[-2] + " " + test[-1].strip()
test.remove(test[-1])
except:
pass
if len(test) == 0:
test = [line.strip() for line in entry.split("\n") if len(line.strip()) >= 3]
for code in test:
code = code.strip()
first = None
last = None
count = 0
balance1 = 0
for i in code:
if i == "[" and first is None:
first = count
balance1 += 1
count += 1
continue
if i == "[":
balance1 += 1
elif i == "]":
last = count
balance1 -= 1
if first is not None and last is not None and balance1 == 0:
break
count += 1
argument = str(code[first + 1:last])
keyword = code[0:first]
find = [first, last, keyword, argument, context]
while "[" in str(argument) and "]" in str(argument) and "$" in str(argument):
count = 0
start = None
end = None
balance = 0
for i in argument:
if i == "$" and start is None and argument[count + 1] != '$':
start = count
elif i == "[":
balance += 1
elif i == "]":
end = count
balance -= 1
count += 1
if balance == 0 and start is not None and end is not None:
break
if start != 0:
argument = argument[:start] + str(await findBracketPairs(argument[start:end + 1], Functions, context)) + argument[end + 1:]
else:
argument = str(await findBracketPairs(argument, Functions, context)) + argument[end + 1:]
find = [first, last, keyword, argument, context]
if find[2].lower() in Functions.funcs:
name = await Functions.execute_functions(find[2].lower(), find[3], find[4])
else:
name = find[2]
try:
return name
except Exception as e:
raise Exception(f"Error at: {e}")
def ifs(args):
choices = ["==", ">=" "<=", "<", ">", "!="]
if "$if" in args:
if args.count("$if") > 1:
raise SyntaxError("Too many $if")
pass
else:
return
while "$if" in args:
start = args[args.index("$if[") + 4:]
count = 1
counter = 0
for i in start:
if i == "[":
count += 1
counter += 1
elif i == "]":
count -= 1
counter += 1
end = counter
counter += 1
if count == 0:
break
statement = args[args.index("$if[") + 4:end + 8]
for i in choices:
if i in args:
if i in ["==", "!="]:
vals = statement.split(i)
val1 = vals[0]
val2 = vals[1]
else:
vals = statement.split(i)
val1 = int(vals[0])
val2 = int(vals[1])
test = eval(f"val1 {i} val2")
if test:
args = args.replace(f"$if[{statement}]\n", "")
return args
else:
return False
return False
def checkArgs(args, Code):
if '$args' in Code:
while "$args" in str(Code):
count = 0
end = None
balance = 0
start = Code.index("$args") + 5
look = Code[start:len(Code)]
for i in look:
if i == "[":
start = count
count += 1
balance += 1
continue
if i == "]":
end = count
balance -= 1
count += 1
if balance == 0 and start is not None and end is not None:
try:
# Replace $args with arguments
Code = str(Code).replace(f"$args[{look[start + 1:end]}]", args[int(look[start + 1:end]) - 1])
break
except IndexError:
raise SyntaxError(F"$args[{int(look[start + 1:end])}] Not Provided")
return Code
async def checkArgCheck(args, Code, Context):
if "$argCheck" in Code:
if Code.count("$argCheck") > 1:
raise Exception("Too many $argCheck in a single command | Max is 1!")
start = Code.index("$argCheck[") + 10
area = Code[start:]
try:
if ";" in area[:area.index("]")]:
argTotal = area[:area.index(";")]
warning = area[area.index(";") + 1:area.index("]")]
if len(args) < int(argTotal):
await Context.channel.send(warning)
return 'Failed'
Code = Code.replace(f"$argCheck[{argTotal}{area[area.index(';'):area.index(']')]}]\n", "")
return Code
else:
argTotal = area[:area.index("]")]
if len(args) < int(argTotal):
return 'Failed'
Code = Code.replace(f"$argCheck[{argTotal}]\n", "")
return Code
except Exception as e:
print(e)
raise SyntaxError("Not enough arguments in $argCheck!")
return Code
def format_datetime(datetime: D_T, FORM: str, TIMEZONE):
UnformatedDatetime = datetime.astimezone(TIMEZONE)
UnformatedDatetimeTuple = (
UnformatedDatetime.year, UnformatedDatetime.month, UnformatedDatetime.day, UnformatedDatetime.hour, UnformatedDatetime.minute,
UnformatedDatetime.second, UnformatedDatetime.microsecond)
year, month, day, hour, minute, second, microsecond = UnformatedDatetimeTuple
AM_PM = "AM" if hour < 12 else "PM"
hour = hour if hour < 12 else hour - 12
FORM = FORM.lower().strip()
if FORM == "full":
desiredDateForm = f"USA: {month}/{day}/{year} at {hour} :{minute} :{second} :{microsecond} {AM_PM}"
elif FORM == "year":
desiredDateForm = str(year)
elif FORM == "month":
desiredDateForm = str(month)
elif FORM == "day":
desiredDateForm = str(day)
elif FORM == "hour":
desiredDateForm = str(hour)
elif FORM == "minute":
desiredDateForm = str(minute)
elif FORM == "second":
desiredDateForm = str(second)
elif FORM == "microsecond":
desiredDateForm = str(microsecond)
elif FORM == "ampm":
desiredDateForm = AM_PM
else:
desiredDateForm = "ERROR"
return desiredDateForm
|
Aoipy
|
/Aoipy-0.15.0-py3-none-any.whl/CharmCord/tools.py
|
tools.py
|
import discord
from discord.ext import commands
from CharmCord.tools import FunctionHandler
from .CommandHandler import load_commands
import asyncio
global TotalFuncs
global bots
global all_commands
global all_vars
class CharmCord:
# Global variables
global bots
global all_commands
global all_vars
def __init__(self, prefix, case_insensitive, intents: tuple, activity, help_command, load_command_dir):
# Global variables
global bots
global all_commands
# Initialize Start class
self.prefix = prefix
self.case_insensitive = case_insensitive
self.intented = intents
self._help_command = help_command
self._clients = ''
self.intent = ''
self._activity = activity
all_commands = {}
self.all_variables = {}
# Determine intents
if "all" in self.intented:
self.intent = discord.Intents.all()
elif "default" in self.intented:
self.intent = discord.Intents.default()
else:
self.intent = discord.Intents.default()
# Enable specific intents
if "message" in self.intented:
self.intent.message_content = True
if "members" in self.intented:
self.intent.members = True
if "presences" in self.intented:
self.intent.presences = True
# Create bot instances
if self._activity is None:
self._clients = commands.Bot(command_prefix=self.prefix, case_insensitive=self.case_insensitive, intents=self.intent,
help_command=self._help_command)
bots = self._clients
else:
self._clients = commands.Bot(command_prefix=self.prefix, case_insensitive=self.case_insensitive, intents=self.intent,
activity=self._activity,
help_command=self._help_command)
bots = self._clients
try:
load_commands(load_command_dir)
except FileNotFoundError:
pass
import json
try:
with open("variables.json", "r") as var:
pass
except FileNotFoundError:
with open("variables.json", "w") as var:
go = {"STRD": True}
json.dump(go, var)
# super().__init__(intents=self.intent)
def run(self, token: str):
bots.run(token)
def variables(self, vars: dict):
global all_vars
for key, value in vars.items():
self.all_variables[key] = value
all_vars = self.all_variables
def slashCommand(self, Name: str, Code: str, Args: list, Description: str):
from .SlashCommands import SlashCommands
sl = SlashCommands().slashCommand
sl(
Name=Name,
Code=Code,
Args=Args,
Description=Description.lower()
)
def command(self, Name: str, Code: str, Aliases=[]):
from .Commands import Commands
co = Commands().command
co(
Name=Name,
Code=Code,
Aliases=Aliases
)
def onChannelUpdated(self, Code):
@bots.event
async def on_guild_channel_update(before, after):
from CharmCord.Functions.Events.oldChannel import options as old
from CharmCord.Functions.Events.newChannel import options as new
for i in old.keys():
old[i] = eval(f"before.{i}")
if Code is not None:
from CharmCord.tools import findBracketPairs
await findBracketPairs(Code, TotalFuncs, None)
def onChannelDeleted(self, Code=None):
@bots.event
async def on_guild_channel_delete(channel):
from CharmCord.Functions.Events.deletedChannel import options
options["name"] = channel.name
options['id'] = channel.id
# more options coming
if Code is not None:
from CharmCord.tools import findBracketPairs
await findBracketPairs(Code, TotalFuncs, None)
def onReady(self, Code):
@bots.event
async def on_ready():
from CharmCord.tools import findBracketPairs
await findBracketPairs(Code, TotalFuncs, None)
def CharmClient(prefix: str, case_insensitive: bool = False, intents: tuple = ("default",), activity=None, help_command=None,
load_command_dir="commands"):
# Global variables
global bots
global TotalFuncs
global all_commands
# Initialize FunctionHandler and register functions
Functions = FunctionHandler()
TotalFuncs = Functions
Functions.register_functions()
# Create Start instance and return working bot
_final = CharmCord(prefix, case_insensitive, intents, activity, help_command, load_command_dir)
working = _final
return working
|
Aoipy
|
/Aoipy-0.15.0-py3-none-any.whl/CharmCord/Classes/CharmCord.py
|
CharmCord.py
|
The Aoss_Tower_a1_Conversion package takes the ascii data in aosstower
and converts them into netcdf4 files. This conversion allows calibration
factors to be applied and converted to geophysical units.
Typical usage looks like:
from datetime import datetime as date
from convertFromASCIIToNETCDF import bundler
#enter some date time obj
myDatetime = datetime(2003, 06, 05, 0, 0, 0)
myDatetime2 = datetime(2004, 05, 14, 0, 0, 0)
bundler.writeRange(myDateTime, myDateTime2)
Bundler can either convert a range of ascii files based upon a start or
end date, or from 2003-05-28 to the present day (all ascii files).
All paths and date formats are generated by the Util class.
If user wants to modify them, they need to do so by changing Util's
values.
|
Aoss_Tower_a1_Conversion
|
/Aoss_Tower_a1_Conversion-1.7.0.tar.gz/Aoss_Tower_a1_Conversion-1.7.0/docs/README.txt
|
README.txt
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.