code
stringlengths 110
64.5k
| apis
list | extract_api
stringlengths 123
69.9k
|
---|---|---|
from datetime import datetime
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class Right(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Province(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Amphoe(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
province_id: int
name: str
class Tambon(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
amphoe_id: int
name: str
class Religion(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class National(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Occupation(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class MaritalStatus(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class AcademicDegree(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Allergy(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Vehicle(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Language(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Relationship(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class IdType(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class FeedbackType(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class VisibilityLevel(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Module(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class ModuleFunction(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
|
[
"sqlmodel.Field"
] |
[((136, 147), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (145, 147), False, 'from fastapi import APIRouter\n'), ((209, 246), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (214, 246), False, 'from sqlmodel import Field, SQLModel\n'), ((325, 362), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (330, 362), False, 'from sqlmodel import Field, SQLModel\n'), ((439, 476), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (444, 476), False, 'from sqlmodel import Field, SQLModel\n'), ((574, 611), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (579, 611), False, 'from sqlmodel import Field, SQLModel\n'), ((709, 746), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (714, 746), False, 'from sqlmodel import Field, SQLModel\n'), ((825, 862), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (830, 862), False, 'from sqlmodel import Field, SQLModel\n'), ((943, 980), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (948, 980), False, 'from sqlmodel import Field, SQLModel\n'), ((1064, 1101), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1069, 1101), False, 'from sqlmodel import Field, SQLModel\n'), ((1186, 1223), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1191, 1223), False, 'from sqlmodel import Field, SQLModel\n'), ((1301, 1338), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1306, 1338), False, 'from sqlmodel import Field, SQLModel\n'), ((1416, 1453), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1421, 1453), False, 'from sqlmodel import Field, SQLModel\n'), ((1532, 1569), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1537, 1569), False, 'from sqlmodel import Field, SQLModel\n'), ((1652, 1689), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1657, 1689), False, 'from sqlmodel import Field, SQLModel\n'), ((1766, 1803), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1771, 1803), False, 'from sqlmodel import Field, SQLModel\n'), ((1886, 1923), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1891, 1923), False, 'from sqlmodel import Field, SQLModel\n'), ((2009, 2046), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2014, 2046), False, 'from sqlmodel import Field, SQLModel\n'), ((2123, 2160), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2128, 2160), False, 'from sqlmodel import Field, SQLModel\n'), ((2368, 2405), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2373, 2405), False, 'from sqlmodel import Field, SQLModel\n')]
|
from typing import Optional
from sqlmodel import Field, SQLModel, create_engine
class Team(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
headquarters: str
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str
age: Optional[int] = None
team_id: Optional[int] = Field(default=None, foreign_key="team.id")
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def main():
create_db_and_tables()
if __name__ == "__main__":
main()
|
[
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine",
"sqlmodel.Field"
] |
[((541, 577), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (554, 577), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((141, 178), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (146, 178), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((275, 312), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (280, 312), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((408, 450), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""team.id"""'}), "(default=None, foreign_key='team.id')\n", (413, 450), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((612, 648), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (640, 648), False, 'from sqlmodel import Field, SQLModel, create_engine\n')]
|
"""initial2
Revision ID: 9d9a<PASSWORD>dbfd7
Revises: <PASSWORD>
Create Date: 2021-11-01 04:28:38.426261
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '9d9a746db<PASSWORD>'
down_revision = 'a<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('images',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('url', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_images_id'), 'images', ['id'], unique=False)
op.create_index(op.f('ix_images_url'), 'images', ['url'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_images_url'), table_name='images')
op.drop_index(op.f('ix_images_id'), table_name='images')
op.drop_table('images')
# ### end Alembic commands ###
|
[
"sqlmodel.sql.sqltypes.AutoString"
] |
[((994, 1017), 'alembic.op.drop_table', 'op.drop_table', (['"""images"""'], {}), "('images')\n", (1007, 1017), False, 'from alembic import op\n'), ((561, 590), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (584, 590), True, 'import sqlalchemy as sa\n'), ((617, 637), 'alembic.op.f', 'op.f', (['"""ix_images_id"""'], {}), "('ix_images_id')\n", (621, 637), False, 'from alembic import op\n'), ((691, 712), 'alembic.op.f', 'op.f', (['"""ix_images_url"""'], {}), "('ix_images_url')\n", (695, 712), False, 'from alembic import op\n'), ((885, 906), 'alembic.op.f', 'op.f', (['"""ix_images_url"""'], {}), "('ix_images_url')\n", (889, 906), False, 'from alembic import op\n'), ((947, 967), 'alembic.op.f', 'op.f', (['"""ix_images_id"""'], {}), "('ix_images_id')\n", (951, 967), False, 'from alembic import op\n'), ((452, 464), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (462, 464), True, 'import sqlalchemy as sa\n'), ((504, 538), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (536, 538), False, 'import sqlmodel\n')]
|
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import AbstractSet, Any, Dict, Mapping, Optional, Sequence, Union
from pydantic.fields import Undefined, UndefinedType
from sqlalchemy import Column
from sqlmodel import Field
from dbgen.utils.typing import NoArgAnyCallable
def Attribute(
default: Any = Undefined,
*,
default_factory: Optional[NoArgAnyCallable] = None,
alias: str = None,
title: str = None,
description: str = None,
exclude: Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any] = None,
include: Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any] = None,
const: bool = None,
gt: float = None,
ge: float = None,
lt: float = None,
le: float = None,
multiple_of: float = None,
min_items: int = None,
max_items: int = None,
min_length: int = None,
max_length: int = None,
allow_mutation: bool = True,
regex: str = None,
primary_key: bool = False,
foreign_key: Optional[Any] = None,
nullable: Union[bool, UndefinedType] = Undefined,
index: Union[bool, UndefinedType] = Undefined,
sa_column: Union[Column, UndefinedType] = Undefined,
sa_column_args: Union[Sequence[Any], UndefinedType] = Undefined,
sa_column_kwargs: Union[Mapping[str, Any], UndefinedType] = Undefined,
schema_extra: Optional[Dict[str, Any]] = None,
) -> Any:
field = Field(
default,
default_factory=default_factory,
alias=alias,
title=title,
description=description,
exclude=exclude,
include=include,
const=const,
gt=gt,
ge=ge,
lt=lt,
le=le,
multiple_of=multiple_of,
min_items=min_items,
max_items=max_items,
min_length=min_length,
max_length=max_length,
allow_mutation=allow_mutation,
regex=regex,
primary_key=primary_key,
foreign_key=foreign_key,
nullable=nullable,
index=index,
sa_column=sa_column,
sa_column_args=sa_column_args,
sa_column_kwargs=sa_column_kwargs,
schema_extra=schema_extra,
)
return field
|
[
"sqlmodel.Field"
] |
[((1965, 2523), 'sqlmodel.Field', 'Field', (['default'], {'default_factory': 'default_factory', 'alias': 'alias', 'title': 'title', 'description': 'description', 'exclude': 'exclude', 'include': 'include', 'const': 'const', 'gt': 'gt', 'ge': 'ge', 'lt': 'lt', 'le': 'le', 'multiple_of': 'multiple_of', 'min_items': 'min_items', 'max_items': 'max_items', 'min_length': 'min_length', 'max_length': 'max_length', 'allow_mutation': 'allow_mutation', 'regex': 'regex', 'primary_key': 'primary_key', 'foreign_key': 'foreign_key', 'nullable': 'nullable', 'index': 'index', 'sa_column': 'sa_column', 'sa_column_args': 'sa_column_args', 'sa_column_kwargs': 'sa_column_kwargs', 'schema_extra': 'schema_extra'}), '(default, default_factory=default_factory, alias=alias, title=title,\n description=description, exclude=exclude, include=include, const=const,\n gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, min_items=\n min_items, max_items=max_items, min_length=min_length, max_length=\n max_length, allow_mutation=allow_mutation, regex=regex, primary_key=\n primary_key, foreign_key=foreign_key, nullable=nullable, index=index,\n sa_column=sa_column, sa_column_args=sa_column_args, sa_column_kwargs=\n sa_column_kwargs, schema_extra=schema_extra)\n', (1970, 2523), False, 'from sqlmodel import Field\n')]
|
import numpy as nm
import six
from sfepy import data_dir
from sfepy.base.base import Struct, output
from sfepy.terms.terms_hyperelastic_ul import HyperElasticULFamilyData
from sfepy.homogenization.micmac import get_homog_coefs_nonlinear
import sfepy.linalg as la
hyperelastic_data = {}
def post_process(out, pb, state, extend=False):
if isinstance(state, dict):
pass
else:
pb.update_materials_flag = 2
stress = pb.evaluate('ev_integrate_mat.1.Omega(solid.S, u)',
mode='el_avg')
out['cauchy_stress'] = Struct(name='output_data',
mode='cell',
data=stress,
dofs=None)
strain = pb.evaluate('ev_integrate_mat.1.Omega(solid.E, u)',
mode='el_avg')
out['green_strain'] = Struct(name='output_data',
mode='cell',
data=strain,
dofs=None)
pb.update_materials_flag = 0
if pb.conf.options.get('recover_micro', False):
happ = pb.homogen_app
if pb.ts.step == 0:
rname = pb.conf.options.recovery_region
rcells = pb.domain.regions[rname].get_cells()
sh = hyperelastic_data['homog_mat_shape']
happ.app_options.store_micro_idxs = sh[1] * rcells
else:
hpb = happ.problem
recovery_hook = hpb.conf.options.get('recovery_hook', None)
if recovery_hook is not None:
recovery_hook = hpb.conf.get_function(recovery_hook)
rname = pb.conf.options.recovery_region
rcoors = []
for ii in happ.app_options.store_micro_idxs:
key = happ.get_micro_cache_key('coors', ii, pb.ts.step)
if key in happ.micro_state_cache:
rcoors.append(happ.micro_state_cache[key])
recovery_hook(hpb, rcoors, pb.domain.regions[rname], pb.ts)
return out
def get_homog_mat(ts, coors, mode, term=None, problem=None, **kwargs):
if problem.update_materials_flag == 2 and mode == 'qp':
out = hyperelastic_data['homog_mat']
return {k: nm.array(v) for k, v in six.iteritems(out)}
elif problem.update_materials_flag == 0 or not mode == 'qp':
return
output('get_homog_mat')
dim = problem.domain.mesh.dim
update_var = problem.conf.options.mesh_update_variables[0]
state_u = problem.equations.variables[update_var]
state_u.field.clear_mappings()
family_data = problem.family_data(state_u, term.region,
term.integral, term.integration)
mtx_f = family_data.mtx_f.reshape((coors.shape[0],)
+ family_data.mtx_f.shape[-2:])
out = get_homog_coefs_nonlinear(ts, coors, mode, mtx_f,
term=term, problem=problem,
iteration=problem.iiter, **kwargs)
out['E'] = 0.5 * (la.dot_sequences(mtx_f, mtx_f, 'ATB') - nm.eye(dim))
hyperelastic_data['time'] = ts.step
hyperelastic_data['homog_mat_shape'] = family_data.det_f.shape[:2]
hyperelastic_data['homog_mat'] = \
{k: nm.array(v) for k, v in six.iteritems(out)}
return out
def ulf_iteration_hook(pb, nls, vec, it, err, err0):
vec = pb.equations.make_full_vec(vec)
pb.equations.set_variables_from_state(vec)
update_var = pb.conf.options.mesh_update_variables[0]
state_u = pb.equations.variables[update_var]
nods = state_u.field.get_dofs_in_region(state_u.field.region, merge=True)
coors = pb.domain.get_mesh_coors().copy()
coors[nods, :] += state_u().reshape(len(nods), state_u.n_components)
if len(state_u.field.mappings0) == 0:
state_u.field.save_mappings()
state_u.field.clear_mappings()
pb.set_mesh_coors(coors, update_fields=False, actual=True,
clear_all=False)
pb.iiter = it
pb.update_materials_flag = True
pb.update_materials()
pb.update_materials_flag = False
class MyEvalResidual(object):
def __init__(self, problem, matrix_hook=None):
self.problem = problem
self.matrix_hook = problem.matrix_hook
def eval_residual(self, vec, is_full=False):
if not is_full:
vec = self.problem.equations.make_full_vec(vec)
vec_r = self.problem.equations.eval_residuals(vec * 0)
return vec_r
def ulf_init(pb):
pb.family_data = HyperElasticULFamilyData()
pb.init_solvers()
pb.nls.fun = MyEvalResidual(pb).eval_residual
pb.nls_iter_hook = ulf_iteration_hook
pb.domain.mesh.coors_act = pb.domain.mesh.coors.copy()
pb_vars = pb.get_variables()
pb_vars['u'].init_data()
pb.update_materials_flag = True
pb.iiter = 0
options = {
'output_dir': 'output',
'mesh_update_variables': ['u'],
'nls_iter_hook': ulf_iteration_hook,
'pre_process_hook': ulf_init,
'micro_filename': 'examples/homogenization/nonlinear_homogenization.py',
'recover_micro': True,
'recovery_region': 'Recovery',
'post_process_hook': post_process,
}
materials = {
'solid': 'get_homog',
}
fields = {
'displacement': ('real', 'vector', 'Omega', 1),
}
variables = {
'u': ('unknown field', 'displacement'),
'v': ('test field', 'displacement', 'u'),
}
filename_mesh = data_dir + '/meshes/2d/its2D.mesh'
regions = {
'Omega': 'all',
'Left': ('vertices in (x < 0.001)', 'facet'),
'Bottom': ('vertices in (y < 0.001 )', 'facet'),
'Recovery': ('cell 49, 81', 'cell'),
}
ebcs = {
'l': ('Left', {'u.all': 0.0}),
'b': ('Bottom', {'u.all': 'move_bottom'}),
}
centre = nm.array([0, 0], dtype=nm.float64)
def move_bottom(ts, coor, **kwargs):
from sfepy.linalg import rotation_matrix2d
vec = coor[:, 0:2] - centre
angle = 3 * ts.step
print('angle:', angle)
mtx = rotation_matrix2d(angle)
out = nm.dot(vec, mtx) - vec
return out
functions = {
'move_bottom': (move_bottom,),
'get_homog': (get_homog_mat,),
}
equations = {
'balance_of_forces':
"""dw_nonsym_elastic.1.Omega(solid.A, v, u)
= - dw_lin_prestress.1.Omega(solid.S, v)""",
}
solvers = {
'ls': ('ls.scipy_direct', {}),
'newton': ('nls.newton', {
'eps_a': 1e-3,
'eps_r': 1e-3,
'i_max': 20,
}),
'ts': ('ts.simple', {
't0': 0,
't1': 1,
'n_step': 3 + 1,
})
}
|
[
"sfepy.base.base.Struct",
"sfepy.linalg.rotation_matrix2d",
"sfepy.homogenization.micmac.get_homog_coefs_nonlinear",
"sfepy.linalg.dot_sequences",
"sfepy.base.base.output",
"sfepy.terms.terms_hyperelastic_ul.HyperElasticULFamilyData"
] |
[((5887, 5921), 'numpy.array', 'nm.array', (['[0, 0]'], {'dtype': 'nm.float64'}), '([0, 0], dtype=nm.float64)\n', (5895, 5921), True, 'import numpy as nm\n'), ((2515, 2538), 'sfepy.base.base.output', 'output', (['"""get_homog_mat"""'], {}), "('get_homog_mat')\n", (2521, 2538), False, 'from sfepy.base.base import Struct, output\n'), ((2994, 3111), 'sfepy.homogenization.micmac.get_homog_coefs_nonlinear', 'get_homog_coefs_nonlinear', (['ts', 'coors', 'mode', 'mtx_f'], {'term': 'term', 'problem': 'problem', 'iteration': 'problem.iiter'}), '(ts, coors, mode, mtx_f, term=term, problem=\n problem, iteration=problem.iiter, **kwargs)\n', (3019, 3111), False, 'from sfepy.homogenization.micmac import get_homog_coefs_nonlinear\n'), ((4686, 4712), 'sfepy.terms.terms_hyperelastic_ul.HyperElasticULFamilyData', 'HyperElasticULFamilyData', ([], {}), '()\n', (4710, 4712), False, 'from sfepy.terms.terms_hyperelastic_ul import HyperElasticULFamilyData\n'), ((6102, 6126), 'sfepy.linalg.rotation_matrix2d', 'rotation_matrix2d', (['angle'], {}), '(angle)\n', (6119, 6126), False, 'from sfepy.linalg import rotation_matrix2d\n'), ((575, 638), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'stress', 'dofs': 'None'}), "(name='output_data', mode='cell', data=stress, dofs=None)\n", (581, 638), False, 'from sfepy.base.base import Struct, output\n'), ((898, 961), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'strain', 'dofs': 'None'}), "(name='output_data', mode='cell', data=strain, dofs=None)\n", (904, 961), False, 'from sfepy.base.base import Struct, output\n'), ((3418, 3429), 'numpy.array', 'nm.array', (['v'], {}), '(v)\n', (3426, 3429), True, 'import numpy as nm\n'), ((6137, 6153), 'numpy.dot', 'nm.dot', (['vec', 'mtx'], {}), '(vec, mtx)\n', (6143, 6153), True, 'import numpy as nm\n'), ((2386, 2397), 'numpy.array', 'nm.array', (['v'], {}), '(v)\n', (2394, 2397), True, 'import numpy as nm\n'), ((3202, 3239), 'sfepy.linalg.dot_sequences', 'la.dot_sequences', (['mtx_f', 'mtx_f', '"""ATB"""'], {}), "(mtx_f, mtx_f, 'ATB')\n", (3218, 3239), True, 'import sfepy.linalg as la\n'), ((3242, 3253), 'numpy.eye', 'nm.eye', (['dim'], {}), '(dim)\n', (3248, 3253), True, 'import numpy as nm\n'), ((3442, 3460), 'six.iteritems', 'six.iteritems', (['out'], {}), '(out)\n', (3455, 3460), False, 'import six\n'), ((2410, 2428), 'six.iteritems', 'six.iteritems', (['out'], {}), '(out)\n', (2423, 2428), False, 'import six\n')]
|
import uuid
from logging import getLogger
from typing import Optional
from fastapi import UploadFile
from sqlmodel import select, Session
from .models import User
logger = getLogger("uvicorn.error")
def get_user(username: str, session: Session) -> Optional[User]:
statement = select(User).where(User.username == username)
user = session.exec(statement).first()
if user:
return user
return None
def save_file(file: UploadFile) -> str:
filename = uuid.uuid4()
path = f"static/{filename}"
with open(path, "wb") as f:
content = file.file.read()
f.write(content)
return path
|
[
"sqlmodel.select"
] |
[((175, 201), 'logging.getLogger', 'getLogger', (['"""uvicorn.error"""'], {}), "('uvicorn.error')\n", (184, 201), False, 'from logging import getLogger\n'), ((480, 492), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (490, 492), False, 'import uuid\n'), ((285, 297), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (291, 297), False, 'from sqlmodel import select, Session\n')]
|
from unittest.mock import patch
from sqlmodel import create_engine
from ...conftest import get_testing_print_function
def test_tutorial(clear_sqlmodel):
from docs_src.tutorial.where import tutorial010 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
calls = []
new_print = get_testing_print_function(calls)
with patch("builtins.print", new=new_print):
mod.main()
assert calls == [
[{"name": "Tarantula", "secret_name": "<NAME>", "age": 32, "id": 4}],
[{"name": "<NAME>", "secret_name": "<NAME>", "age": 35, "id": 5}],
[
{
"name": "<NAME>",
"secret_name": "<NAME>",
"age": 93,
"id": 7,
}
],
]
|
[
"sqlmodel.create_engine"
] |
[((267, 296), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (280, 296), False, 'from sqlmodel import create_engine\n'), ((373, 411), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'new': 'new_print'}), "('builtins.print', new=new_print)\n", (378, 411), False, 'from unittest.mock import patch\n'), ((421, 431), 'docs_src.tutorial.where.tutorial010.main', 'mod.main', ([], {}), '()\n', (429, 431), True, 'from docs_src.tutorial.where import tutorial010 as mod\n')]
|
#!/usr/bin/env python
"""
Plot quadrature points for the given geometry and integration order.
"""
from optparse import OptionParser
import sfepy.postprocess.plot_quadrature as pq
usage = '%prog [options]\n' + __doc__.rstrip()
helps = {
'geometry' :
'reference element geometry, one of "2_3", "2_4", "3_4", "3_8"'
' [default: %default]',
'order' :
'quadrature order [default: %default]',
'min_radius' :
'min. radius of points corresponding to the min. weight'
' [default: %default]',
'max_radius' :
'max. radius of points corresponding to the max. weight'
' [default: %default]',
'show_colorbar' :
'show colorbar for quadrature weights'
}
def main():
parser = OptionParser(usage=usage, version='%prog')
parser.add_option('-g', '--geometry', metavar='name',
action='store', dest='geometry',
default='2_4', help=helps['geometry'])
parser.add_option('-n', '--order', metavar='order', type=int,
action='store', dest='order',
default=2, help=helps['order'])
parser.add_option('-r', '--min-radius', metavar='float', type=float,
action='store', dest='min_radius',
default=10, help=helps['min_radius'])
parser.add_option('-R', '--max-radius', metavar='float', type=float,
action='store', dest='max_radius',
default=50, help=helps['max_radius'])
parser.add_option('-c', '--show-colorbar',
action='store_true', dest='show_colorbar',
default=False, help=helps['show_colorbar'])
options, args = parser.parse_args()
if len(args) != 0:
parser.print_help(),
return
pq.plot_quadrature(None, options.geometry, options.order,
options.min_radius, options.max_radius,
options.show_colorbar)
pq.plt.show()
if __name__ == '__main__':
main()
|
[
"sfepy.postprocess.plot_quadrature.plt.show",
"sfepy.postprocess.plot_quadrature.plot_quadrature"
] |
[((722, 764), 'optparse.OptionParser', 'OptionParser', ([], {'usage': 'usage', 'version': '"""%prog"""'}), "(usage=usage, version='%prog')\n", (734, 764), False, 'from optparse import OptionParser\n'), ((1782, 1907), 'sfepy.postprocess.plot_quadrature.plot_quadrature', 'pq.plot_quadrature', (['None', 'options.geometry', 'options.order', 'options.min_radius', 'options.max_radius', 'options.show_colorbar'], {}), '(None, options.geometry, options.order, options.\n min_radius, options.max_radius, options.show_colorbar)\n', (1800, 1907), True, 'import sfepy.postprocess.plot_quadrature as pq\n'), ((1953, 1966), 'sfepy.postprocess.plot_quadrature.plt.show', 'pq.plt.show', ([], {}), '()\n', (1964, 1966), True, 'import sfepy.postprocess.plot_quadrature as pq\n')]
|
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import pickle
from tempfile import TemporaryFile
import numpy as np
from megengine.core import Buffer, Parameter, tensor
from megengine.test import assertTensorClose
def test_tensor_serialization():
def tensor_eq(a, b):
assert a.dtype == b.dtype
assert a.device == b.device
assert a.requires_grad == b.requires_grad
assertTensorClose(a, b)
with TemporaryFile() as f:
data = np.random.randint(low=0, high=7, size=[233])
a = tensor(data, device="xpux", dtype=np.int32)
pickle.dump(a, f)
f.seek(0)
b = pickle.load(f)
tensor_eq(a, b)
with TemporaryFile() as f:
a = Parameter(np.random.random(size=(233, 2)).astype(np.float32))
pickle.dump(a, f)
f.seek(0)
b = pickle.load(f)
assert isinstance(b, Parameter)
tensor_eq(a, b)
with TemporaryFile() as f:
a = Buffer(np.random.random(size=(2, 233)).astype(np.float32))
pickle.dump(a, f)
f.seek(0)
b = pickle.load(f)
assert isinstance(b, Buffer)
tensor_eq(a, b)
|
[
"megengine.core.tensor",
"megengine.test.assertTensorClose"
] |
[((733, 756), 'megengine.test.assertTensorClose', 'assertTensorClose', (['a', 'b'], {}), '(a, b)\n', (750, 756), False, 'from megengine.test import assertTensorClose\n'), ((767, 782), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (780, 782), False, 'from tempfile import TemporaryFile\n'), ((804, 848), 'numpy.random.randint', 'np.random.randint', ([], {'low': '(0)', 'high': '(7)', 'size': '[233]'}), '(low=0, high=7, size=[233])\n', (821, 848), True, 'import numpy as np\n'), ((861, 904), 'megengine.core.tensor', 'tensor', (['data'], {'device': '"""xpux"""', 'dtype': 'np.int32'}), "(data, device='xpux', dtype=np.int32)\n", (867, 904), False, 'from megengine.core import Buffer, Parameter, tensor\n'), ((913, 930), 'pickle.dump', 'pickle.dump', (['a', 'f'], {}), '(a, f)\n', (924, 930), False, 'import pickle\n'), ((961, 975), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (972, 975), False, 'import pickle\n'), ((1010, 1025), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (1023, 1025), False, 'from tempfile import TemporaryFile\n'), ((1114, 1131), 'pickle.dump', 'pickle.dump', (['a', 'f'], {}), '(a, f)\n', (1125, 1131), False, 'import pickle\n'), ((1162, 1176), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1173, 1176), False, 'import pickle\n'), ((1251, 1266), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (1264, 1266), False, 'from tempfile import TemporaryFile\n'), ((1352, 1369), 'pickle.dump', 'pickle.dump', (['a', 'f'], {}), '(a, f)\n', (1363, 1369), False, 'import pickle\n'), ((1400, 1414), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1411, 1414), False, 'import pickle\n'), ((1054, 1085), 'numpy.random.random', 'np.random.random', ([], {'size': '(233, 2)'}), '(size=(233, 2))\n', (1070, 1085), True, 'import numpy as np\n'), ((1292, 1323), 'numpy.random.random', 'np.random.random', ([], {'size': '(2, 233)'}), '(size=(2, 233))\n', (1308, 1323), True, 'import numpy as np\n')]
|
import typing as t
from sqlmodel import SQLModel, Field, Relationship
from datetime import datetime
from .discussions import DB_Discussion
class DB_Post(SQLModel, table=True):
__tablename__ = 'posts'
id: t.Optional[int] = Field(default=None, primary_key=True)
"""The ID of the post. This is handled by the database."""
discussion_id: int = Field(foreign_key='discussions.id')
discussion: t.Optional[DB_Discussion] = Relationship(back_populates='posts')
"""Discussion that this post belongs to."""
number: int = Field(default=1)
"""The number/order of the post in the discussion."""
created_at: datetime = Field(default=datetime.utcnow())
"""When was this post created. Default is now."""
type: str = Field(max_length=100, default='comment')
"""The type of the post. Can be `'comment'` for standard post."""
content: t.Text
"""The post's content, in HTML."""
edited_at: t.Optional[datetime]
"""When was the post edited at?"""
hidden_at: t.Optional[datetime]
"""When was the post hidden at?"""
ip_address: t.Optional[str] = Field(max_length=45)
"""The IP address of the user that created the post."""
is_private: bool = Field(default=False)
"""Whether or not the post is private."""
is_approved: bool = Field(default=True)
"""Whether or not the post is approved."""
|
[
"sqlmodel.Relationship",
"sqlmodel.Field"
] |
[((235, 272), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (240, 272), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((362, 397), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""discussions.id"""'}), "(foreign_key='discussions.id')\n", (367, 397), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((442, 478), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""posts"""'}), "(back_populates='posts')\n", (454, 478), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((546, 562), 'sqlmodel.Field', 'Field', ([], {'default': '(1)'}), '(default=1)\n', (551, 562), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((751, 791), 'sqlmodel.Field', 'Field', ([], {'max_length': '(100)', 'default': '"""comment"""'}), "(max_length=100, default='comment')\n", (756, 791), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1108, 1128), 'sqlmodel.Field', 'Field', ([], {'max_length': '(45)'}), '(max_length=45)\n', (1113, 1128), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1213, 1233), 'sqlmodel.Field', 'Field', ([], {'default': '(False)'}), '(default=False)\n', (1218, 1233), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1304, 1323), 'sqlmodel.Field', 'Field', ([], {'default': '(True)'}), '(default=True)\n', (1309, 1323), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((662, 679), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (677, 679), False, 'from datetime import datetime\n')]
|
import megengine as mge
import megengine.functional as F
import numpy as np
def bilinear_sampler(img, coords, mode="bilinear", mask=False):
"""Wrapper for grid_sample, uses pixel coordinates"""
H, W = img.shape[-2:]
img = F.remap(img, coords, border_mode="constant")
if mask:
mask = (
(coords[:, :, :, 0:1] < 0)
| (coords[:, :, :, 0:1] > W - 1)
| (coords[:, :, :, 1:2] < 0)
| (coords[:, :, :, 1:2] > H - 1)
)
mask = F.logical_not(mask)
return img, mask.astype("float32")
return img
def coords_grid(batch, ht, wd):
x_grid, y_grid = np.meshgrid(np.arange(wd), np.arange(ht))
y_grid, x_grid = mge.tensor(y_grid, dtype="float32"), mge.tensor(
x_grid, dtype="float32"
)
coords = F.stack([x_grid, y_grid], axis=0)
coords = F.repeat(F.expand_dims(coords, axis=0), batch, axis=0)
return coords
def manual_pad(x, pady, padx):
if pady > 0:
u = F.repeat(x[:, :, 0:1, :], pady, axis=2)
d = F.repeat(x[:, :, -1:, :], pady, axis=2)
x = F.concat([u, x, d], axis=2)
if padx > 0:
l = F.repeat(x[:, :, :, 0:1], padx, axis=3)
r = F.repeat(x[:, :, :, -1:], padx, axis=3)
x = F.concat([l, x, r], axis=3)
return x
|
[
"megengine.functional.remap",
"megengine.functional.stack",
"megengine.tensor",
"megengine.functional.expand_dims",
"megengine.functional.concat",
"megengine.functional.repeat",
"megengine.functional.logical_not"
] |
[((237, 281), 'megengine.functional.remap', 'F.remap', (['img', 'coords'], {'border_mode': '"""constant"""'}), "(img, coords, border_mode='constant')\n", (244, 281), True, 'import megengine.functional as F\n'), ((805, 838), 'megengine.functional.stack', 'F.stack', (['[x_grid, y_grid]'], {'axis': '(0)'}), '([x_grid, y_grid], axis=0)\n', (812, 838), True, 'import megengine.functional as F\n'), ((508, 527), 'megengine.functional.logical_not', 'F.logical_not', (['mask'], {}), '(mask)\n', (521, 527), True, 'import megengine.functional as F\n'), ((654, 667), 'numpy.arange', 'np.arange', (['wd'], {}), '(wd)\n', (663, 667), True, 'import numpy as np\n'), ((669, 682), 'numpy.arange', 'np.arange', (['ht'], {}), '(ht)\n', (678, 682), True, 'import numpy as np\n'), ((705, 740), 'megengine.tensor', 'mge.tensor', (['y_grid'], {'dtype': '"""float32"""'}), "(y_grid, dtype='float32')\n", (715, 740), True, 'import megengine as mge\n'), ((742, 777), 'megengine.tensor', 'mge.tensor', (['x_grid'], {'dtype': '"""float32"""'}), "(x_grid, dtype='float32')\n", (752, 777), True, 'import megengine as mge\n'), ((861, 890), 'megengine.functional.expand_dims', 'F.expand_dims', (['coords'], {'axis': '(0)'}), '(coords, axis=0)\n', (874, 890), True, 'import megengine.functional as F\n'), ((987, 1026), 'megengine.functional.repeat', 'F.repeat', (['x[:, :, 0:1, :]', 'pady'], {'axis': '(2)'}), '(x[:, :, 0:1, :], pady, axis=2)\n', (995, 1026), True, 'import megengine.functional as F\n'), ((1039, 1078), 'megengine.functional.repeat', 'F.repeat', (['x[:, :, -1:, :]', 'pady'], {'axis': '(2)'}), '(x[:, :, -1:, :], pady, axis=2)\n', (1047, 1078), True, 'import megengine.functional as F\n'), ((1091, 1118), 'megengine.functional.concat', 'F.concat', (['[u, x, d]'], {'axis': '(2)'}), '([u, x, d], axis=2)\n', (1099, 1118), True, 'import megengine.functional as F\n'), ((1148, 1187), 'megengine.functional.repeat', 'F.repeat', (['x[:, :, :, 0:1]', 'padx'], {'axis': '(3)'}), '(x[:, :, :, 0:1], padx, axis=3)\n', (1156, 1187), True, 'import megengine.functional as F\n'), ((1200, 1239), 'megengine.functional.repeat', 'F.repeat', (['x[:, :, :, -1:]', 'padx'], {'axis': '(3)'}), '(x[:, :, :, -1:], padx, axis=3)\n', (1208, 1239), True, 'import megengine.functional as F\n'), ((1252, 1279), 'megengine.functional.concat', 'F.concat', (['[l, x, r]'], {'axis': '(3)'}), '([l, x, r], axis=3)\n', (1260, 1279), True, 'import megengine.functional as F\n')]
|
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import sys
import pytest
from psycopg import connect as pg3_connect
from sqlalchemy import MetaData
from sqlmodel import Session, create_engine, text
from dbgen.configuration import config
from dbgen.core.entity import BaseEntity
from dbgen.core.metadata import meta_registry
@pytest.fixture()
def clear_registry():
# Clear the tables in the metadata for the default base model
BaseEntity.metadata.clear()
# Clear the Models associated with the registry, to avoid warnings
BaseEntity._sa_registry.dispose()
yield
BaseEntity.metadata.clear()
BaseEntity._sa_registry.dispose()
@pytest.fixture(scope="module")
def sql_engine():
dsn = os.environ.get('TEST_DSN', config.main_dsn)
engine = create_engine(dsn)
return engine
@pytest.fixture(scope="function")
def connection(sql_engine):
"""sql_engine connection"""
metadata = MetaData()
metadata.reflect(sql_engine)
metadata.drop_all(sql_engine)
connection = sql_engine.connect()
yield connection
connection.close()
@pytest.fixture(scope="function")
def session(connection):
transaction = connection.begin()
session = Session(bind=connection, autocommit=False, autoflush=True)
yield session
transaction.rollback()
transaction.close()
session.close()
@pytest.fixture(scope="function")
def seed_db(connection):
connection.execute(text("CREATE table users (id serial primary key, name text);"))
for user in range(100):
connection.execute(text(f"INSERT into users(name) values ('user_{user}');"))
connection.commit()
yield
connection.execute(text("drop table users;"))
connection.commit()
@pytest.fixture(scope="function")
def make_db(connection):
pass
metadata = MetaData()
metadata.reflect(connection)
metadata.drop_all(connection)
BaseEntity.metadata.create_all(connection)
connection.commit()
yield
BaseEntity.metadata.drop_all(connection)
connection.commit()
@pytest.fixture(scope="function")
def raw_connection(make_db, sql_engine):
raw = sql_engine.raw_connection()
yield raw
raw.close()
@pytest.fixture(scope="function")
def raw_pg3_connection(make_db, sql_engine):
connection = pg3_connect(str(sql_engine.url))
yield connection
connection.close()
@pytest.fixture
def debug_logger():
custom_logger = logging.getLogger("dbgen")
custom_logger.propagate = True
custom_logger.setLevel(logging.DEBUG)
log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s Test"
formatter = logging.Formatter(log_format)
console_handler = logging.StreamHandler(stream=sys.stdout)
console_handler.setFormatter(formatter)
custom_logger.addHandler(console_handler)
return custom_logger
@pytest.fixture(scope='function')
def recreate_meta(connection):
connection.execute(text(f'create schema if not exists {config.meta_schema}'))
meta_registry.metadata.drop_all(connection)
meta_registry.metadata.create_all(connection)
yield
|
[
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.text"
] |
[((903, 919), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (917, 919), False, 'import pytest\n'), ((1232, 1262), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1246, 1262), False, 'import pytest\n'), ((1388, 1420), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1402, 1420), False, 'import pytest\n'), ((1659, 1691), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1673, 1691), False, 'import pytest\n'), ((1919, 1951), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1933, 1951), False, 'import pytest\n'), ((2288, 2320), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (2302, 2320), False, 'import pytest\n'), ((2602, 2634), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (2616, 2634), False, 'import pytest\n'), ((2747, 2779), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (2761, 2779), False, 'import pytest\n'), ((3386, 3418), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (3400, 3418), False, 'import pytest\n'), ((1012, 1039), 'dbgen.core.entity.BaseEntity.metadata.clear', 'BaseEntity.metadata.clear', ([], {}), '()\n', (1037, 1039), False, 'from dbgen.core.entity import BaseEntity\n'), ((1115, 1148), 'dbgen.core.entity.BaseEntity._sa_registry.dispose', 'BaseEntity._sa_registry.dispose', ([], {}), '()\n', (1146, 1148), False, 'from dbgen.core.entity import BaseEntity\n'), ((1163, 1190), 'dbgen.core.entity.BaseEntity.metadata.clear', 'BaseEntity.metadata.clear', ([], {}), '()\n', (1188, 1190), False, 'from dbgen.core.entity import BaseEntity\n'), ((1195, 1228), 'dbgen.core.entity.BaseEntity._sa_registry.dispose', 'BaseEntity._sa_registry.dispose', ([], {}), '()\n', (1226, 1228), False, 'from dbgen.core.entity import BaseEntity\n'), ((1291, 1334), 'os.environ.get', 'os.environ.get', (['"""TEST_DSN"""', 'config.main_dsn'], {}), "('TEST_DSN', config.main_dsn)\n", (1305, 1334), False, 'import os\n'), ((1348, 1366), 'sqlmodel.create_engine', 'create_engine', (['dsn'], {}), '(dsn)\n', (1361, 1366), False, 'from sqlmodel import Session, create_engine, text\n'), ((1496, 1506), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (1504, 1506), False, 'from sqlalchemy import MetaData\n'), ((1768, 1826), 'sqlmodel.Session', 'Session', ([], {'bind': 'connection', 'autocommit': '(False)', 'autoflush': '(True)'}), '(bind=connection, autocommit=False, autoflush=True)\n', (1775, 1826), False, 'from sqlmodel import Session, create_engine, text\n'), ((2371, 2381), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (2379, 2381), False, 'from sqlalchemy import MetaData\n'), ((2453, 2495), 'dbgen.core.entity.BaseEntity.metadata.create_all', 'BaseEntity.metadata.create_all', (['connection'], {}), '(connection)\n', (2483, 2495), False, 'from dbgen.core.entity import BaseEntity\n'), ((2534, 2574), 'dbgen.core.entity.BaseEntity.metadata.drop_all', 'BaseEntity.metadata.drop_all', (['connection'], {}), '(connection)\n', (2562, 2574), False, 'from dbgen.core.entity import BaseEntity\n'), ((2977, 3003), 'logging.getLogger', 'logging.getLogger', (['"""dbgen"""'], {}), "('dbgen')\n", (2994, 3003), False, 'import logging\n'), ((3175, 3204), 'logging.Formatter', 'logging.Formatter', (['log_format'], {}), '(log_format)\n', (3192, 3204), False, 'import logging\n'), ((3227, 3267), 'logging.StreamHandler', 'logging.StreamHandler', ([], {'stream': 'sys.stdout'}), '(stream=sys.stdout)\n', (3248, 3267), False, 'import logging\n'), ((3536, 3579), 'dbgen.core.metadata.meta_registry.metadata.drop_all', 'meta_registry.metadata.drop_all', (['connection'], {}), '(connection)\n', (3567, 3579), False, 'from dbgen.core.metadata import meta_registry\n'), ((3584, 3629), 'dbgen.core.metadata.meta_registry.metadata.create_all', 'meta_registry.metadata.create_all', (['connection'], {}), '(connection)\n', (3617, 3629), False, 'from dbgen.core.metadata import meta_registry\n'), ((2000, 2062), 'sqlmodel.text', 'text', (['"""CREATE table users (id serial primary key, name text);"""'], {}), "('CREATE table users (id serial primary key, name text);')\n", (2004, 2062), False, 'from sqlmodel import Session, create_engine, text\n'), ((2234, 2259), 'sqlmodel.text', 'text', (['"""drop table users;"""'], {}), "('drop table users;')\n", (2238, 2259), False, 'from sqlmodel import Session, create_engine, text\n'), ((3473, 3530), 'sqlmodel.text', 'text', (['f"""create schema if not exists {config.meta_schema}"""'], {}), "(f'create schema if not exists {config.meta_schema}')\n", (3477, 3530), False, 'from sqlmodel import Session, create_engine, text\n'), ((2119, 2175), 'sqlmodel.text', 'text', (['f"""INSERT into users(name) values (\'user_{user}\');"""'], {}), '(f"INSERT into users(name) values (\'user_{user}\');")\n', (2123, 2175), False, 'from sqlmodel import Session, create_engine, text\n')]
|
from unittest.mock import patch
from sqlmodel import create_engine
from ...conftest import get_testing_print_function
expected_calls = [
[
[
{
"id": 7,
"name": "Captain North America",
"secret_name": "<NAME>",
"age": 93,
}
]
]
]
def test_tutorial(clear_sqlmodel):
from docs_src.tutorial.offset_and_limit import tutorial003 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
calls = []
new_print = get_testing_print_function(calls)
with patch("builtins.print", new=new_print):
mod.main()
assert calls == expected_calls
|
[
"sqlmodel.create_engine"
] |
[((502, 531), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (515, 531), False, 'from sqlmodel import create_engine\n'), ((608, 646), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'new': 'new_print'}), "('builtins.print', new=new_print)\n", (613, 646), False, 'from unittest.mock import patch\n'), ((656, 666), 'docs_src.tutorial.offset_and_limit.tutorial003.main', 'mod.main', ([], {}), '()\n', (664, 666), True, 'from docs_src.tutorial.offset_and_limit import tutorial003 as mod\n')]
|
from time import sleep
from sqlmodel import select
from icon_governance.config import settings
from icon_governance.log import logger
from icon_governance.metrics import prom_metrics
from icon_governance.models.preps import Prep
from icon_governance.utils.rpc import convert_hex_int, getStake, post_rpc_json
def get_prep_stake(session):
result = session.execute(select(Prep))
preps = result.scalars().all()
for prep in preps:
prep.stake = convert_hex_int(post_rpc_json(getStake(prep.address))["stake"]) / 1e18
session.merge(prep)
session.commit()
def prep_stake_cron(session):
while True:
logger.info("Starting stake cron")
get_prep_stake(session)
logger.info("Prep stake ran.")
prom_metrics.preps_stake_cron_ran.inc()
sleep(settings.CRON_SLEEP_SEC)
if __name__ == "__main__":
from icon_governance.db import session_factory
get_prep_stake(session_factory())
|
[
"sqlmodel.select"
] |
[((370, 382), 'sqlmodel.select', 'select', (['Prep'], {}), '(Prep)\n', (376, 382), False, 'from sqlmodel import select\n'), ((645, 679), 'icon_governance.log.logger.info', 'logger.info', (['"""Starting stake cron"""'], {}), "('Starting stake cron')\n", (656, 679), False, 'from icon_governance.log import logger\n'), ((720, 750), 'icon_governance.log.logger.info', 'logger.info', (['"""Prep stake ran."""'], {}), "('Prep stake ran.')\n", (731, 750), False, 'from icon_governance.log import logger\n'), ((759, 798), 'icon_governance.metrics.prom_metrics.preps_stake_cron_ran.inc', 'prom_metrics.preps_stake_cron_ran.inc', ([], {}), '()\n', (796, 798), False, 'from icon_governance.metrics import prom_metrics\n'), ((807, 837), 'time.sleep', 'sleep', (['settings.CRON_SLEEP_SEC'], {}), '(settings.CRON_SLEEP_SEC)\n', (812, 837), False, 'from time import sleep\n'), ((938, 955), 'icon_governance.db.session_factory', 'session_factory', ([], {}), '()\n', (953, 955), False, 'from icon_governance.db import session_factory\n'), ((494, 516), 'icon_governance.utils.rpc.getStake', 'getStake', (['prep.address'], {}), '(prep.address)\n', (502, 516), False, 'from icon_governance.utils.rpc import convert_hex_int, getStake, post_rpc_json\n')]
|
from typing import Optional
from sqlmodel import Field, SQLModel, Field
from pydantic import validator
from datetime import datetime, date
from fastapi import HTTPException
import re
class AppUser(SQLModel, table=True):
"""Create an SQLModel for users"""
id: Optional[int] = Field(default=None, primary_key=True)
username: str
first_name: str
last_name: str
email: str
role_id: int
team_id: Optional[int] = None
start_date: date
created_at: datetime
updated_at: datetime
is_active: bool
__table_args__ = {"schema": "app_db"}
@validator("first_name", always=True)
def valid_first_name(cls, first_name):
assert first_name.replace(
" ", ""
).isalpha(), "only alphabet letters allowed in first name"
if first_name[0].isupper() == False:
raise HTTPException(
status_code=400, detail="first name should start with a capital letter"
)
return first_name
@validator("last_name", always=True)
def valid_last_name(cls, ln_input):
assert ln_input.replace(
" ", ""
).isalpha(), "only alphabet letters allowed in last name"
return ln_input
@validator("email", always=True)
def valid_email(cls, email_input):
regex = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"
assert re.fullmatch(regex, email_input), "email format incorrect"
return email_input
|
[
"sqlmodel.Field"
] |
[((286, 323), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (291, 323), False, 'from sqlmodel import Field, SQLModel, Field\n'), ((587, 623), 'pydantic.validator', 'validator', (['"""first_name"""'], {'always': '(True)'}), "('first_name', always=True)\n", (596, 623), False, 'from pydantic import validator\n'), ((1001, 1036), 'pydantic.validator', 'validator', (['"""last_name"""'], {'always': '(True)'}), "('last_name', always=True)\n", (1010, 1036), False, 'from pydantic import validator\n'), ((1226, 1257), 'pydantic.validator', 'validator', (['"""email"""'], {'always': '(True)'}), "('email', always=True)\n", (1235, 1257), False, 'from pydantic import validator\n'), ((1383, 1415), 're.fullmatch', 're.fullmatch', (['regex', 'email_input'], {}), '(regex, email_input)\n', (1395, 1415), False, 'import re\n'), ((852, 943), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""first name should start with a capital letter"""'}), "(status_code=400, detail=\n 'first name should start with a capital letter')\n", (865, 943), False, 'from fastapi import HTTPException\n')]
|
from select import select
from app.schemas.common import (
IGetResponseBase,
IPostResponseBase,
IDeleteResponseBase,
)
from app.utils.text_nlp import analyze_text
from app.schemas.text_inference import (
TextInferenceCreate,
TextInferenceRead,
)
from fastapi_pagination import Page, Params
from sqlmodel.ext.asyncio.session import AsyncSession
from fastapi import APIRouter, Depends, HTTPException, Query
from app.api import deps
from app import crud
from app.models import TextInference
from app.models import TextInferenceBase
from app.models.user import User
from sqlmodel import select
router = APIRouter()
@router.get(
"/text-classification-inferences/",
response_model=IGetResponseBase[Page[TextInferenceRead]],
)
async def get_text_classification_inferences(
params: Params = Depends(),
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
inferences = await crud.text_inference.get_multi_paginated(
db_session, params=params
)
return IGetResponseBase[Page[TextInferenceRead]](data=inferences)
@router.get(
"/text-classification-inferences/order_by_created_at/",
response_model=IGetResponseBase[Page[TextInferenceRead]],
)
async def text_classification_inferences_order_by_created_at(
params: Params = Depends(),
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
query = select(TextInference).order_by(TextInference.created_at)
inferences = await crud.text_inference.get_multi_paginated(
db_session, query=query, params=params
)
return IGetResponseBase[Page[TextInferenceRead]](data=inferences)
@router.post(
"/text-classification-predict/", response_model=IPostResponseBase[TextInferenceRead]
)
async def predict(
request: TextInferenceBase,
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
text = request.text
result = await analyze_text(text)
text = result[0]
res = result[1]
inference = TextInferenceCreate(text=text, result=res)
my_inference = await crud.text_inference.create_inference(
db_session, obj_in=inference, user_id=current_user.id
)
return IPostResponseBase(data=TextInferenceRead.from_orm(my_inference))
|
[
"sqlmodel.select"
] |
[((620, 631), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (629, 631), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((818, 827), 'fastapi.Depends', 'Depends', ([], {}), '()\n', (825, 827), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((860, 880), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (867, 880), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((907, 944), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (914, 944), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1345, 1354), 'fastapi.Depends', 'Depends', ([], {}), '()\n', (1352, 1354), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1387, 1407), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (1394, 1407), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1434, 1471), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (1441, 1471), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1921, 1941), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (1928, 1941), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1968, 2005), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (1975, 2005), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((2131, 2173), 'app.schemas.text_inference.TextInferenceCreate', 'TextInferenceCreate', ([], {'text': 'text', 'result': 'res'}), '(text=text, result=res)\n', (2150, 2173), False, 'from app.schemas.text_inference import TextInferenceCreate, TextInferenceRead\n'), ((972, 1038), 'app.crud.text_inference.get_multi_paginated', 'crud.text_inference.get_multi_paginated', (['db_session'], {'params': 'params'}), '(db_session, params=params)\n', (1011, 1038), False, 'from app import crud\n'), ((1568, 1647), 'app.crud.text_inference.get_multi_paginated', 'crud.text_inference.get_multi_paginated', (['db_session'], {'query': 'query', 'params': 'params'}), '(db_session, query=query, params=params)\n', (1607, 1647), False, 'from app import crud\n'), ((2054, 2072), 'app.utils.text_nlp.analyze_text', 'analyze_text', (['text'], {}), '(text)\n', (2066, 2072), False, 'from app.utils.text_nlp import analyze_text\n'), ((2200, 2296), 'app.crud.text_inference.create_inference', 'crud.text_inference.create_inference', (['db_session'], {'obj_in': 'inference', 'user_id': 'current_user.id'}), '(db_session, obj_in=inference, user_id=\n current_user.id)\n', (2236, 2296), False, 'from app import crud\n'), ((1488, 1509), 'sqlmodel.select', 'select', (['TextInference'], {}), '(TextInference)\n', (1494, 1509), False, 'from sqlmodel import select\n'), ((2341, 2381), 'app.schemas.text_inference.TextInferenceRead.from_orm', 'TextInferenceRead.from_orm', (['my_inference'], {}), '(my_inference)\n', (2367, 2381), False, 'from app.schemas.text_inference import TextInferenceCreate, TextInferenceRead\n')]
|
import os
from fastapi import FastAPI
from sqlmodel import create_engine, SQLModel
from .configurations import env
from .models import * # init models package
class AppFactory(object):
def __init__(self):
self._app = None
@staticmethod
def _get_all_router():
from pigeon.blog.services.routers import __all_routers__
return __all_routers__
def _apply_router(self):
if not isinstance(self._app, FastAPI):
raise RuntimeError("self._app isn't initialized.")
routers = AppFactory._get_all_router()
for r in routers:
self._app.include_router(r)
def _ensure_sql(self):
if not isinstance(self._app, FastAPI):
return
@self._app.on_event("startup")
def sql_startup():
engine = get_engine()
SQLModel.metadata.create_all(engine)
@self._app.on_event("shutdown")
def sql_shutdown():
pass
def __call__(self, *args, **kwargs):
self._app = FastAPI(
title="Pigeon Blog",
)
self._apply_router()
self._ensure_sql()
return self._app
|
[
"sqlmodel.SQLModel.metadata.create_all"
] |
[((1026, 1054), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""Pigeon Blog"""'}), "(title='Pigeon Blog')\n", (1033, 1054), False, 'from fastapi import FastAPI\n'), ((841, 877), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (869, 877), False, 'from sqlmodel import create_engine, SQLModel\n')]
|
#!/usr/bin/env python3
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
from typing import Callable, Union
import megengine as mge
import megengine.functional as F
import megengine.module as M
from .activations import activation
__all__ = ["conv2d", "norm2d", "pool2d", "gap2d", "linear", "SE", "DropPath"]
def conv2d(
w_in: int,
w_out: int,
k: int,
*,
stride: int = 1,
dilation: int = 1,
groups: int = 1,
bias: bool = False,
) -> M.Conv2d:
"""Helper for building a conv2d layer.
It will calculate padding automatically.
Args:
w_in: input width.
w_out: output width.
k: kernel size.
stride: stride. Default: ``1``
dilation: dilation. Default: ``1``
groups: groups. Default: ``1``
bias: enable bias or not. Default: ``False``
Returns:
A conv2d module.
"""
assert k % 2 == 1, "Only odd size kernels supported to avoid padding issues."
s, p, d, g, b = stride, (k - 1) * dilation // 2, dilation, groups, bias
return M.Conv2d(w_in, w_out, k, stride=s, padding=p, dilation=d, groups=g, bias=b)
def norm2d(name: Union[str, Callable], w_in: int, **kwargs) -> M.Module:
"""Helper for building a norm2d layer.
Args:
norm_name: normalization name, supports ``None``, ``"BN"``, ``"GN"``, ``"IN"``, ``"LN"``
and ``"SyncBN"``.
w_in: input width.
Returns:
A norm2d module.
"""
if name is None:
return M.Identity()
if callable(name):
return name(w_in, **kwargs)
if isinstance(name, str):
norm_funcs = {
"BN": M.BatchNorm2d,
"GN": M.GroupNorm,
"IN": M.InstanceNorm,
"LN": M.LayerNorm,
"SyncBN": M.SyncBatchNorm,
}
if name in norm_funcs.keys():
return norm_funcs[name](w_in, **kwargs)
raise ValueError(f"Norm name '{name}' not supported")
def pool2d(k: int, *, stride: int = 1, name: str = "max") -> M.Module:
"""Helper for building a pool2d layer.
Args:
k: kernel size.
stride: stride. Default: ``1``
name: pooling name, supports ``"avg"`` and ``"max"``.
Returns:
A pool2d module.
"""
assert k % 2 == 1, "Only odd size kernels supported to avoid padding issues."
pool_funcs = {
"avg": M.AvgPool2d,
"max": M.MaxPool2d,
}
if name not in pool_funcs.keys():
raise ValueError(f"Pool name '{name}' not supported")
return pool_funcs[name](k, stride=stride, padding=(k - 1) // 2)
def gap2d(shape=1) -> M.AdaptiveAvgPool2d:
"""Helper for building a gap2d layer.
Args:
shape: output shape. Default: ``1``
Returns:
A gap2d module.
"""
return M.AdaptiveAvgPool2d(shape)
def linear(w_in: int, w_out: int, *, bias: bool = False) -> M.Linear:
"""Helper for building a linear layer.
Args:
w_in: input width.
w_out: output width.
bias: enable bias or not. Default: ``False``
Returns:
A linear module.
"""
return M.Linear(w_in, w_out, bias=bias)
class SE(M.Module):
"""Squeeze-and-Excitation (SE) block: AvgPool, FC, Act, FC, Sigmoid.
Args:
w_in: input width.
w_se: se width.
act_name: activation name.
approx_sigmoid: approximated sigmoid function.
Attributes:
avg_pool: gad2d layer.
f_ex: sequantial which conbines conv2d -> act -> conv2d -> sigmoid.
"""
def __init__(self, w_in: int, w_se: int, act_name: str, approx_sigmoid: bool = False):
super().__init__()
self.avg_pool = gap2d()
self.f_ex = M.Sequential(
conv2d(w_in, w_se, 1, bias=True),
activation(act_name),
conv2d(w_se, w_in, 1, bias=True),
activation("hsigmoid") if approx_sigmoid else M.Sigmoid(),
)
def forward(self, x: mge.Tensor) -> mge.Tensor:
return x * self.f_ex(self.avg_pool(x))
class DropPath(M.Dropout):
"""DropPath block.
Args:
drop_prob: the probability to drop (set to zero) each path.
"""
def forward(self, x: mge.Tensor):
if not self.training or self.drop_prob == 0.0:
return x
shape = (x.shape[0],) + (1,) * (x.ndim - 1)
mask = F.ones(shape)
mask = F.dropout(mask, self.drop_prob, training=self.training)
return x * mask
|
[
"megengine.module.AdaptiveAvgPool2d",
"megengine.module.Identity",
"megengine.module.Sigmoid",
"megengine.module.Linear",
"megengine.functional.dropout",
"megengine.functional.ones",
"megengine.module.Conv2d"
] |
[((1058, 1133), 'megengine.module.Conv2d', 'M.Conv2d', (['w_in', 'w_out', 'k'], {'stride': 's', 'padding': 'p', 'dilation': 'd', 'groups': 'g', 'bias': 'b'}), '(w_in, w_out, k, stride=s, padding=p, dilation=d, groups=g, bias=b)\n', (1066, 1133), True, 'import megengine.module as M\n'), ((2780, 2806), 'megengine.module.AdaptiveAvgPool2d', 'M.AdaptiveAvgPool2d', (['shape'], {}), '(shape)\n', (2799, 2806), True, 'import megengine.module as M\n'), ((3100, 3132), 'megengine.module.Linear', 'M.Linear', (['w_in', 'w_out'], {'bias': 'bias'}), '(w_in, w_out, bias=bias)\n', (3108, 3132), True, 'import megengine.module as M\n'), ((1500, 1512), 'megengine.module.Identity', 'M.Identity', ([], {}), '()\n', (1510, 1512), True, 'import megengine.module as M\n'), ((4325, 4338), 'megengine.functional.ones', 'F.ones', (['shape'], {}), '(shape)\n', (4331, 4338), True, 'import megengine.functional as F\n'), ((4354, 4409), 'megengine.functional.dropout', 'F.dropout', (['mask', 'self.drop_prob'], {'training': 'self.training'}), '(mask, self.drop_prob, training=self.training)\n', (4363, 4409), True, 'import megengine.functional as F\n'), ((3881, 3892), 'megengine.module.Sigmoid', 'M.Sigmoid', ([], {}), '()\n', (3890, 3892), True, 'import megengine.module as M\n')]
|
import uuid
from datetime import datetime
from typing import Optional
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import JSON
from sqlmodel import Field, Relationship
from api.db.models.base import BaseModel, BaseTable
class OutOfBandBase(BaseModel):
msg_type: str = Field(nullable=False)
msg: dict = Field(default={}, sa_column=Column(JSON))
sender_id: uuid.UUID = None
recipient_id: uuid.UUID = None
sandbox_id: uuid.UUID = None
action: Optional[str] = Field(nullable=True)
class OutOfBand(OutOfBandBase, BaseTable, table=True):
__tablename__ = "out_of_band"
# optional else, required on save
sender_id: uuid.UUID = Field(foreign_key="line_of_business.id")
recipient_id: uuid.UUID = Field(foreign_key="line_of_business.id")
sandbox_id: uuid.UUID = Field(foreign_key="sandbox.id")
# relationships
sender: Optional["Lob"] = Relationship( # noqa: F821
sa_relationship_kwargs={
"primaryjoin": "OutOfBand.sender_id==Lob.id",
"lazy": "joined",
}
)
recipient: Optional["Lob"] = Relationship( # noqa: F821
sa_relationship_kwargs={
"primaryjoin": "OutOfBand.recipient_id==Lob.id",
"lazy": "joined",
}
)
class Config:
arbitrary_types_allowed = True
class OutOfBandCreate(OutOfBandBase):
pass
class OutOfBandRead(OutOfBandBase):
id: uuid.UUID
created_at: datetime
updated_at: datetime
class OutOfBandUpdate(BaseModel):
id: uuid.UUID
name: Optional[str] = None
action: Optional[str] = None
|
[
"sqlmodel.Relationship",
"sqlmodel.Field"
] |
[((297, 318), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (302, 318), False, 'from sqlmodel import Field, Relationship\n'), ((505, 525), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)'}), '(nullable=True)\n', (510, 525), False, 'from sqlmodel import Field, Relationship\n'), ((683, 723), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""line_of_business.id"""'}), "(foreign_key='line_of_business.id')\n", (688, 723), False, 'from sqlmodel import Field, Relationship\n'), ((754, 794), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""line_of_business.id"""'}), "(foreign_key='line_of_business.id')\n", (759, 794), False, 'from sqlmodel import Field, Relationship\n'), ((823, 854), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""sandbox.id"""'}), "(foreign_key='sandbox.id')\n", (828, 854), False, 'from sqlmodel import Field, Relationship\n'), ((906, 1011), 'sqlmodel.Relationship', 'Relationship', ([], {'sa_relationship_kwargs': "{'primaryjoin': 'OutOfBand.sender_id==Lob.id', 'lazy': 'joined'}"}), "(sa_relationship_kwargs={'primaryjoin':\n 'OutOfBand.sender_id==Lob.id', 'lazy': 'joined'})\n", (918, 1011), False, 'from sqlmodel import Field, Relationship\n'), ((1104, 1212), 'sqlmodel.Relationship', 'Relationship', ([], {'sa_relationship_kwargs': "{'primaryjoin': 'OutOfBand.recipient_id==Lob.id', 'lazy': 'joined'}"}), "(sa_relationship_kwargs={'primaryjoin':\n 'OutOfBand.recipient_id==Lob.id', 'lazy': 'joined'})\n", (1116, 1212), False, 'from sqlmodel import Field, Relationship\n'), ((363, 375), 'sqlalchemy.Column', 'Column', (['JSON'], {}), '(JSON)\n', (369, 375), False, 'from sqlalchemy import Column\n')]
|
from typing import Optional
from sqlmodel import Field, Session, SQLModel, create_engine, select
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str
age: Optional[int] = None
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def create_heroes():
hero_1 = Hero(name="Deadpond", secret_name="<NAME>")
hero_2 = Hero(name="Spider-Boy", secret_name="<NAME>")
hero_3 = Hero(name="Rusty-Man", secret_name="<NAME>", age=48)
hero_4 = Hero(name="Tarantula", secret_name="<NAME>", age=32)
hero_5 = Hero(name="<NAME>", secret_name="<NAME>", age=35)
hero_6 = Hero(name="<NAME>", secret_name="<NAME>", age=36)
hero_7 = Hero(name="Captain North America", secret_name="<NAME>", age=93)
with Session(engine) as session:
session.add(hero_1)
session.add(hero_2)
session.add(hero_3)
session.add(hero_4)
session.add(hero_5)
session.add(hero_6)
session.add(hero_7)
session.commit()
def select_heroes():
with Session(engine) as session:
statement = select(Hero).where(Hero.id == 1)
results = session.exec(statement)
hero = results.first()
print("Hero:", hero)
def main():
create_db_and_tables()
create_heroes()
select_heroes()
if __name__ == "__main__":
main()
|
[
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.Field",
"sqlmodel.select",
"sqlmodel.create_engine"
] |
[((351, 387), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (364, 387), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((158, 195), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (163, 195), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((422, 458), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (450, 458), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((944, 959), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (951, 959), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1226, 1241), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1233, 1241), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1274, 1286), 'sqlmodel.select', 'select', (['Hero'], {}), '(Hero)\n', (1280, 1286), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n')]
|
"""Add schools
Revision ID: 423e059e8b64
Revises: 58d2280520b8
Create Date: 2022-02-12 07:44:42.189067+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "423e059e8b64"
down_revision = "58d2280520b8"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"schools",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.add_column("applications", sa.Column("school_id", sa.Integer(), nullable=False))
op.create_foreign_key(None, "applications", "schools", ["school_id"], ["id"])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, "applications", type_="foreignkey")
op.drop_column("applications", "school_id")
op.drop_table("schools")
# ### end Alembic commands ###
|
[
"sqlmodel.sql.sqltypes.AutoString"
] |
[((710, 787), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""applications"""', '"""schools"""', "['school_id']", "['id']"], {}), "(None, 'applications', 'schools', ['school_id'], ['id'])\n", (731, 787), False, 'from alembic import op\n'), ((912, 972), 'alembic.op.drop_constraint', 'op.drop_constraint', (['None', '"""applications"""'], {'type_': '"""foreignkey"""'}), "(None, 'applications', type_='foreignkey')\n", (930, 972), False, 'from alembic import op\n'), ((977, 1020), 'alembic.op.drop_column', 'op.drop_column', (['"""applications"""', '"""school_id"""'], {}), "('applications', 'school_id')\n", (991, 1020), False, 'from alembic import op\n'), ((1025, 1049), 'alembic.op.drop_table', 'op.drop_table', (['"""schools"""'], {}), "('schools')\n", (1038, 1049), False, 'from alembic import op\n'), ((581, 610), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (604, 610), True, 'import sqlalchemy as sa\n'), ((463, 475), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (473, 475), True, 'import sqlalchemy as sa\n'), ((520, 554), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (552, 554), False, 'import sqlmodel\n'), ((675, 687), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (685, 687), True, 'import sqlalchemy as sa\n')]
|
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
from collections import OrderedDict
from enum import Enum
from functools import cmp_to_key
from typing import Set # pylint: disable=unused-import
from typing import Callable, Dict, Sequence
import numpy as np
from megengine import Tensor
from megengine.functional import sqrt
from ..converter_ir.ir_graph import IRGraph
from .ir_op import (
AddOpr,
Conv2dOpr,
ConvRelu2dOpr,
Deconv2dOpr,
DropoutOpr,
ExpOpr,
FlattenOpr,
FuseMulAdd3Opr,
GetSubTensorOpr,
HardSigmoidOpr,
HardSwishOpr,
IdentityOpr,
LeakyReluOpr,
MulOpr,
OpBase,
PadOpr,
ReduceOpr,
ReluOpr,
ReshapeOpr,
ResizeOpr,
SoftmaxOpr,
SqueezeOpr,
SubOpr,
TanHOpr,
TransposeOpr,
TrueDivOpr,
_PoolOpr,
)
from .ir_tensor import AxisOrder, IRTensor
class TransformerRule(Enum):
# general rules
NOPE = 1
# for TFLite
REDUCE_AXIS_AS_INPUT = 100
REMOVE_RESHAPE_INPUT = 101
# FUSE_FOR_RELU6 pass should happen before FUSE_ACTIVATION
FUSE_FOR_RELU6 = 102 ##
EXPAND_CONVRELU = 102.1
CONV_ADD_ZERO_BIAS = 103
FUSE_FOR_CONV_BIAS = 103.1
FUSE_CONV_BN = 104
DECONV_ADD_ZERO_BIAS = 105
# DEPTHWISE_CONV_RESHAPE_WEIGHT requirs RESHAPE_BIAS_TO_1DIM
DEPTHWISE_CONV_RESHAPE_WEIGHT = 106
FUSE_SOFTMAX = 107
# RESHAPE_BIAS_TO_1DIM should happen before DECONV_SHAPE_AS_INPUT
RESHAPE_BIAS_TO_1DIM = 108
DECONV_SHAPE_AS_INPUT = 109
FUSE_ASTYPE = 110 ##
PADDING_FOR_CONV_AND_POOLING = 111
TRANSPOSE_PATTERN_AS_INPUT = 112
# FUSE_FOR_LEAKY_RELU should happen before EXPAND_MUL_ADD3
FUSE_FOR_LEAKY_RELU = 113
EXPAND_MUL_ADD3 = 114
EXPAND_ADD_SIGMOID = 115 ##
FUSE_FOR_DECONV_BIAS = 117
FUSE_FOR_FULLY_CONNECTED = 118 ##
# for TFLite Converter
SLICE_PARAMS_AS_INPUTS_AND_MAKE_SQUEEZE = 119
RESIZE_PARAMS_AS_INPUT = 120
REPLACE_FLATTEN_TO_RESHAPE = 120.1
# remove reshape
REMOVE_RESHAPE_REALTED_OP = 121
REMOVE_DROPOUT = 122
FUSE_ACTIVATION = 123
REMOVE_IDENTITY = 124
REMOVE_RELU = 125
REMOVE_UNRELATED_IROP = 130
ADD_FAKE_HSIGMOID_OUT = 131
RENAME_CAFFE_LAYER_TENSOR = 132
def cmp_rules(a, b):
if a.value < b.value:
return -1
if a.value > b.value:
return 1
return 0
class IRTransform:
def __init__(self, transformer_options):
if not isinstance(transformer_options, Sequence):
transformer_options = [
transformer_options,
]
# bias of depthwise_conv must be 1 dim
if TransformerRule.DEPTHWISE_CONV_RESHAPE_WEIGHT in transformer_options:
if TransformerRule.RESHAPE_BIAS_TO_1DIM not in transformer_options:
transformer_options.append(TransformerRule.RESHAPE_BIAS_TO_1DIM)
self.trans_options = sorted(transformer_options, key=cmp_to_key(cmp_rules))
def transform(self, ir_graph):
for option in self.trans_options:
TRANSFORMMAP[option](ir_graph)
return ir_graph
TRANSFORMMAP: Dict[Enum, Callable] = {}
def _register_tranformation_rule(transformer_option):
def callback(impl):
TRANSFORMMAP[transformer_option] = impl
return callback
def cal_pad_mode(tm_opr):
out_shape = tm_opr.out_tensors[0].shape
inp_shape = tm_opr.inp_tensors[0].shape
if out_shape[2:] == inp_shape[2:]:
return "SAME"
else:
return "VALID"
@_register_tranformation_rule(TransformerRule.REMOVE_RESHAPE_INPUT)
def _remove_reshape_input(net):
for op in net.all_oprs:
if not isinstance(op, ReshapeOpr):
continue
if len(op.inp_tensors) == 2:
del op.inp_tensors[1]
@_register_tranformation_rule(TransformerRule.TRANSPOSE_PATTERN_AS_INPUT)
def _transpose_pattern_as_input(net):
for op in net.all_oprs:
if not isinstance(op, TransposeOpr):
continue
perm_tensor = IRTensor(
name=op.inp_tensors[0].name + "_perm",
shape=np.array(op.pattern).shape,
dtype=np.int32,
np_data=np.array(op.pattern, dtype=np.int32),
owner_opr=op,
q_type=np.int32,
axis=None,
)
op.add_inp_tensors(perm_tensor)
@_register_tranformation_rule(TransformerRule.REDUCE_AXIS_AS_INPUT)
def _reduce_axis_as_input(net):
for op in net.all_oprs:
if not isinstance(op, ReduceOpr):
continue
axis_tensor = IRTensor(
name=op.inp_tensors[0].name + "_axis",
shape=[1],
dtype=np.int32,
np_data=np.array(op.axis, dtype=np.int32),
owner_opr=op,
q_type=np.int32,
axis=None,
)
op.add_inp_tensors(axis_tensor)
@_register_tranformation_rule(TransformerRule.PADDING_FOR_CONV_AND_POOLING)
def _make_padding(net: IRGraph):
def have_padding(opr):
if isinstance(opr, Conv2dOpr):
if cal_pad_mode(opr) == "SAME":
return False
if hasattr(opr, "padding") and (opr.padding[0] > 0 or opr.padding[1] > 0):
return True
return False
insert_intended = OrderedDict() # type: OrderedDict
for op in net.all_oprs:
if not isinstance(op, (Conv2dOpr, _PoolOpr)):
continue
if have_padding(op):
assert op.inp_tensors[0].ndim == 4, "ERROR: unsupported padding mode"
np_data = np.array(
[
0,
0,
op.padding[0],
op.padding[0],
op.padding[1],
op.padding[1],
0,
0,
],
dtype=np.int32,
)
new_tensor_id = max(net._tensor_ids) + 1
pad_in_tensor = IRTensor(
name=op.inp_tensors[0].name + "_paddings",
shape=[4, 2],
dtype=np.int32,
owner_opr=None,
np_data=np_data,
q_type=np.int32,
axis=None,
)
net.add_tensor(new_tensor_id, pad_in_tensor)
shape = list(op.inp_tensors[0].shape)
new_tensor_id = max(net._tensor_ids) + 1
pad_out_tensor = IRTensor(
name=op.inp_tensors[0].name + "_pad_out",
shape=[
shape[0],
shape[1],
shape[2] + op.padding[0] * 2,
shape[3] + op.padding[1] * 2,
],
dtype=op.inp_tensors[0].dtype,
)
if (
hasattr(op.inp_tensors[0], "scale")
and op.inp_tensors[0].scale is not None
):
pad_out_tensor.scale = op.inp_tensors[0].scale
pad_out_tensor.q_dtype = op.inp_tensors[0].q_dtype
if hasattr(op.inp_tensors[0], "zero_point"):
pad_out_tensor.zero_point = op.inp_tensors[0].zero_point
net.add_tensor(new_tensor_id, pad_out_tensor)
pad_opr = PadOpr()
pad_opr.inp_tensors = [op.inp_tensors[0], pad_in_tensor]
index = op.inp_tensors[0].user_opr.index(op)
op.inp_tensors[0].user_opr[index] = pad_opr
pad_opr.out_tensors = [pad_out_tensor]
pad_out_tensor.owner_opr = pad_opr
op.inp_tensors = [pad_out_tensor] + op.inp_tensors[1:]
pad_out_tensor.user_opr.append(op)
index = net._opr_ids.index(id(op))
insert_intended[index] = (id(pad_opr), pad_opr)
for index, generated_pair in list(insert_intended.items())[::-1]:
net._opr_ids.insert(index, generated_pair[0])
net.all_oprs.insert(index, generated_pair[1])
@_register_tranformation_rule(TransformerRule.DECONV_SHAPE_AS_INPUT)
def _deconv_shape_as_input(net: IRGraph):
for op in net.all_oprs:
if not isinstance(op, Deconv2dOpr):
continue
result_shape = op.out_tensors[0].shape
np_data = np.array(
[result_shape[0], result_shape[2], result_shape[3], result_shape[1],],
dtype=np.int32,
)
new_tensor_id = max(net._tensor_ids) + 1
shape_symvar = IRTensor(
name=op.inp_tensors[0].name + "_deconv_out_shape",
shape=[4],
dtype=np.int32,
owner_opr=op,
np_data=np_data,
q_type=np.int32,
axis=None,
)
shape_tensor = net.get_tensor(new_tensor_id, shape_symvar)
if len(op.inp_tensors) == 2:
op.inp_tensors = [
shape_tensor,
op.inp_tensors[1],
op.inp_tensors[0],
]
else:
op.inp_tensors = [
shape_tensor,
op.inp_tensors[1],
op.inp_tensors[0],
op.inp_tensors[2],
]
@_register_tranformation_rule(TransformerRule.RESIZE_PARAMS_AS_INPUT)
def _resize_params_as_input(net):
for op in net.all_oprs:
if not isinstance(op, ResizeOpr):
continue
if len(op.inp_tensors) == 2:
continue
out_size_tensor = IRTensor(
name=op.inp_tensors[0].name + "_out_size",
shape=(2,),
dtype=np.int32,
np_data=np.array(op.out_size, dtype=np.int32),
q_type=np.int32,
axis=None,
)
op.add_inp_tensors(out_size_tensor)
@_register_tranformation_rule(TransformerRule.CONV_ADD_ZERO_BIAS)
def _add_bias_for_conv(net: IRGraph):
for op in net.all_oprs:
if not isinstance(op, Conv2dOpr):
continue
if len(op.inp_tensors) == 3:
continue
weight_shape = op.inp_tensors[1].shape
bias_shape = (
weight_shape[0]
if len(weight_shape) == 4
else weight_shape[0] * weight_shape[1]
)
bias_shape = (1, bias_shape, 1, 1)
bias = np.zeros(bias_shape, dtype=np.float32)
bias_tensor = IRTensor(
name=op.inp_tensors[0].name + "_bias",
shape=bias_shape,
dtype=np.float32,
np_data=bias,
axis=AxisOrder.NCHW,
)
if op.inp_tensors[0].scale and op.inp_tensors[1].scale:
bias_tensor.set_qparams(
op.inp_tensors[0].scale * op.inp_tensors[1].scale, 0
)
bias_tensor.q_dtype = "int32"
op.inp_tensors.append(bias_tensor)
@_register_tranformation_rule(TransformerRule.DECONV_ADD_ZERO_BIAS)
def _add_bias_for_deconv(net: IRGraph):
for op in net.all_oprs:
if not isinstance(op, Deconv2dOpr):
continue
if len(op.inp_tensors) == 3:
continue
weight_shape = op.inp_tensors[1].shape
bias_shape = (
weight_shape[1]
if len(weight_shape) == 4
else weight_shape[0] * weight_shape[2]
)
bias_shape = (1, bias_shape, 1, 1)
bias = np.zeros(bias_shape, dtype=np.float32)
bias_tensor = IRTensor(
name=op.inp_tensors[0].name + "_bias",
shape=bias_shape,
dtype=np.float32,
np_data=bias,
axis=AxisOrder.NCHW,
)
if op.inp_tensors[0].scale and op.inp_tensors[1].scale:
bias_tensor.set_qparams(
op.inp_tensors[0].scale * op.inp_tensors[1].scale, 0
)
bias_tensor.q_dtype = "int32"
op.inp_tensors.append(bias_tensor)
@_register_tranformation_rule(TransformerRule.RESHAPE_BIAS_TO_1DIM)
def _reshape_bias_to_1dim(net: IRGraph):
for op in net.all_oprs:
if not isinstance(op, (Deconv2dOpr, Conv2dOpr)):
continue
if len(op.inp_tensors) == 2:
continue
bias = op.inp_tensors[2]
if bias.ndim == 4:
bias.shape = (bias.shape[1],)
bias.np_data = bias.np_data.reshape(-1)
@_register_tranformation_rule(TransformerRule.DEPTHWISE_CONV_RESHAPE_WEIGHT)
def _depthwise_conv_reshape_weight(net: IRGraph):
# general group conv is not supported for TFLite
for op in net.all_oprs:
if not isinstance(op, Conv2dOpr):
continue
if op.groups == 1:
continue
weight = op.inp_tensors[1] # G, oc/G, ic/G, kh, kw
ic, cm = weight.shape[1] * op.groups, weight.shape[2]
h, w = weight.shape[3:5]
weight.shape = (ic, cm, h, w) # oc, ic/G, kh, kw
weight.np_data = weight.np_data.reshape(ic, cm, h, w)
@_register_tranformation_rule(TransformerRule.FUSE_ACTIVATION)
def _fuse_activation(net):
delete_intended = []
for op_id, op in zip(net._opr_ids, net.all_oprs):
if isinstance(op, (ReluOpr, TanHOpr)):
prev_ops = net.find_inp_oprs(op)
if len(prev_ops) == 0:
continue
prev_op = prev_ops[0]
if not isinstance(prev_op, OpBase):
continue
if prev_op.activation != "IDENTITY" or prev_op.name == "Deconv2d":
continue
activation = op.name.upper()
prev_op.activation = activation
prev_op.out_tensors = op.out_tensors
for t in prev_op.out_tensors:
t.owner_opr = prev_op
delete_intended.append(net._opr_ids.index(op_id))
for delete_idx in delete_intended[::-1]:
net.delete_ops(delete_idx)
@_register_tranformation_rule(TransformerRule.SLICE_PARAMS_AS_INPUTS_AND_MAKE_SQUEEZE)
def _make_slice_as_inputs(net: IRGraph):
for op in net.all_oprs:
if not isinstance(op, GetSubTensorOpr):
continue
ndim = op.inp_tensors[0].ndim
def make_input(axis, param, init_value):
# make inputs: begin, end and step.
ret = [init_value] * ndim # pylint:disable=cell-var-from-loop
for k, v in zip(axis, param):
ret[k] = v
ret = IRTensor(
name=op.name + "_fake_input", # pylint:disable=cell-var-from-loop
shape=[len(ret)],
dtype=np.int32,
np_data=np.array(ret, dtype=np.int32),
owner_opr=op, # pylint:disable=cell-var-from-loop
q_type=np.int32,
)
return ret
begins_tensor = make_input(op.axis, op.begin_params, 0)
ends_tensor = make_input(op.axis, op.end_params, np.iinfo(np.int32).max)
steps_tensor = make_input(op.axis, op.step_params, 1)
op.inp_tensors = [op.inp_tensors[0], begins_tensor, ends_tensor, steps_tensor]
# TFLite slice do not support squeeze axis, so insert a squeeze opr here.
# infer actual output shape of tflite slice
desired_out_shape = op.out_tensors[0].shape
actual_out_shape = [1] * ndim
idx = 0
for i in range(ndim):
if i in op.squeeze_axis:
continue
actual_out_shape[i] = desired_out_shape[idx]
idx += 1
slice_out_tensor = IRTensor(
name=op.name + "fake_output",
shape=actual_out_shape,
dtype=op.out_tensors[0].dtype,
q_type=op.out_tensors[0].q_dtype,
owner_opr=op,
)
old_out = op.out_tensors
op.out_tensors = [slice_out_tensor]
squeeze = SqueezeOpr(op.squeeze_axis)
squeeze.inp_tensors = [slice_out_tensor]
squeeze.out_tensors = old_out
idx = net._opr_ids.index(id(op)) + 1
net.add_op(squeeze, idx)
# caffe transormer rules
class PatternNode:
def __init__(self, type, is_output=False, const_value=None):
self.op = None
self.type = type
self.inp_oprs = []
self.inp_const = []
self.inp_tensors = []
self.is_output = is_output
self.const_value = const_value
def check_const_value(self, op):
inp_tensors = [v.np_data for v in op.inp_tensors]
for const in self.const_value:
idx = const[0]
if idx == -1:
find = False
for index, v in enumerate(inp_tensors):
if np.array_equal(const[1], v):
find = True
del inp_tensors[index]
break
if not find:
return False
elif not np.array_equal(const[1], inp_tensors[idx]):
return False
return True
get_type = lambda op: type(op).__name__
def match(node, opr):
node_queue = [node]
opr_queue = [opr]
matched_opr = set()
matched_node = set()
while len(node_queue) != 0:
cur_node = node_queue.pop(0)
cur_opr = opr_queue.pop(0)
if cur_node.type != get_type(cur_opr) and cur_node.type != "*" or cur_opr.skip:
return False
if cur_node.op == None:
cur_node.op = cur_opr
if cur_node.const_value != None:
if not cur_node.check_const_value(cur_opr):
return False
elif cur_node.op != cur_opr:
return False
matched_opr.add(cur_opr)
matched_node.add(cur_node)
for i, var in enumerate(cur_opr.inp_tensors):
if var.np_data is not None:
cur_node.inp_const.append([i, var.np_data])
else:
cur_node.inp_tensors.append([i, var])
if len(cur_node.inp_oprs) == 0:
continue
if len(cur_node.inp_oprs) != len(cur_opr.inp_oprs):
return False
for i, j in zip(cur_node.inp_oprs, cur_opr.inp_oprs):
node_queue.append(i)
opr_queue.append(j)
for n in matched_node:
if n.is_output:
continue
for op in n.op.out_oprs:
if op not in matched_opr:
return False
return True
def get_softmax_axis(ndim: int) -> int:
if ndim in (0, 1, 3):
return 0
return 1
@_register_tranformation_rule(TransformerRule.FUSE_SOFTMAX)
def _fuse_softmax(net: IRGraph):
matches = OrderedDict() # type: OrderedDict
for op in net.all_oprs:
if not isinstance(op, TrueDivOpr):
continue
try:
prev_op = net.find_inp_oprs(op)[1]
cur_index = net._opr_ids.index(id(op))
if (
not isinstance(prev_op, ReduceOpr)
or prev_op.mode != "SUM"
or prev_op.axis != get_softmax_axis(prev_op.inp_tensors[0].ndim)
or net._opr_ids.index(id(prev_op)) != cur_index - 1
):
continue
prev_op = net.find_inp_oprs(op)[0]
if (
not isinstance(prev_op, ExpOpr)
or net._opr_ids.index(id(prev_op)) != cur_index - 2
):
continue
prev_op = net.find_inp_oprs(prev_op)[0]
if (
not isinstance(prev_op, SubOpr)
or net._opr_ids.index(id(prev_op)) != cur_index - 3
):
continue
prev_op = net.find_inp_oprs(prev_op)[1]
if (
not isinstance(prev_op, ReduceOpr)
or prev_op.mode != "MAX"
or prev_op.axis != get_softmax_axis(prev_op.inp_tensors[0].ndim)
or net._opr_ids.index(id(prev_op)) != cur_index - 4
):
continue
except IndexError: # doesn't match
continue
softmax_opr = SoftmaxOpr(axis=get_softmax_axis(prev_op.inp_tensors[0].ndim))
softmax_opr.beta = 1
softmax_opr.inp_tensors = prev_op.inp_tensors[:1]
for i in softmax_opr.inp_tensors:
i.user_opr.append(softmax_opr)
softmax_opr.out_tensors = op.out_tensors
softmax_out_oprs = net.find_out_oprs(op)
matches[id(prev_op)] = (id(prev_op), softmax_opr, softmax_out_oprs)
for original_id, generated_pair in list(matches.items())[::-1]:
index = net._opr_ids.index(original_id)
for out_op in generated_pair[2]:
generated_pair[1].out_tensors[0].user_opr.append(out_op)
del net._opr_ids[index : index + 5]
del net.all_oprs[index : index + 5]
net._opr_ids.insert(index, generated_pair[0])
net.all_oprs.insert(index, generated_pair[1])
@_register_tranformation_rule(TransformerRule.FUSE_FOR_LEAKY_RELU)
def _fuse_leaky_relu(net: IRGraph):
"""
Elemwise(ADD) + Elemwise(MUL) + Elemwise(MAX) + Elemwise(MIN) -> LeakyRelu
"""
for opr in net.all_oprs:
if (
opr.name == "Add"
and len(net.find_inp_oprs(opr)) == 2
and net.find_inp_oprs(opr)[0].name == "Max"
and net.find_inp_oprs(opr)[1].name == "Mul"
):
max_op = net.find_inp_oprs(opr)[0]
mul_op = net.find_inp_oprs(opr)[1]
if not mul_op.inp_tensors[1].shape == (1,):
continue
if not max_op.inp_tensors[1].shape == (1,):
continue
if (
len(net.find_inp_oprs(mul_op)) != 1
or net.find_inp_oprs(mul_op)[0].name != "Min"
or net.find_inp_oprs(mul_op)[0].inp_tensors[1].shape != (1,)
):
continue
min_op = net.find_inp_oprs(mul_op)[0]
if not min_op.inp_tensors[1].shape == (1,):
continue
if max_op.inp_tensors[0] != min_op.inp_tensors[0]:
continue
leaky_relu = LeakyReluOpr(
negative_slope=float(mul_op.inp_tensors[1].np_data)
)
leaky_relu.inp_tensors = [max_op.inp_tensors[0]]
max_op.inp_tensors[0].user_opr.remove(max_op)
max_op.inp_tensors[0].user_opr.remove(min_op)
max_op.inp_tensors[0].user_opr.append(leaky_relu)
leaky_relu.out_tensors = opr.out_tensors
opr.out_tensors[0].owner_opr = leaky_relu
index = net.all_oprs.index(max_op)
del net.all_oprs[index : index + 4]
del net._opr_ids[index : index + 4]
net.add_op(leaky_relu, index)
@_register_tranformation_rule(TransformerRule.FUSE_FOR_CONV_BIAS)
def _fuse_for_conv_bias(net: IRGraph):
"""
ConvolutionForward + Elemwise(ADD) -> ConvForwardBias
"""
for opr in net.all_oprs:
if (
opr.name == "Conv2d"
and len(net.find_out_oprs(opr)) == 1
and net.find_out_oprs(opr)[0].name == "Add"
):
bias_op = net.find_out_oprs(opr)[0]
if not (
(
bias_op.inp_tensors[1].np_data is not None
and len(bias_op.inp_tensors[1].np_data.reshape(-1))
== opr.inp_tensors[1].shape[0]
)
or (
(
bias_op.inp_tensors[0].np_data is not None
and len(bias_op.inp_tensors[0].np_data.reshape(-1))
== opr.inp_tensors[1].shape[0]
)
)
):
continue
bias_idx = 0 if bias_op.inp_tensors[0].np_data is not None else 1
if len(opr.inp_tensors) == 2:
opr.inp_tensors.append(bias_op.inp_tensors[bias_idx])
else:
bias_shape = opr.inp_tensors[2].np_data.shape
add_tensor = bias_op.inp_tensors[bias_idx].np_data
if add_tensor.shape != bias_shape:
add_tensor = add_tensor.reshape(bias_shape)
opr.inp_tensors[2].np_data += add_tensor
if bias_op in opr.out_tensors[0].user_opr:
opr.out_tensors[0].user_opr.remove(bias_op)
bias_out_op = net.find_out_oprs(bias_op)
if len(bias_out_op) > 0:
for op in bias_out_op:
op.inp_tensors[0] = opr.out_tensors[0]
opr.out_tensors[0].user_opr.append(op)
else:
# last op of the graph
assert bias_op.out_tensors[0] in net.graph_outputs
index = net.graph_outputs.index(bias_op.out_tensors[0])
net.graph_outputs[index] = opr.out_tensors[0]
opr.activation = bias_op.activation
index = net.all_oprs.index(bias_op)
del net.all_oprs[index]
del net._opr_ids[index]
@_register_tranformation_rule(TransformerRule.FUSE_FOR_DECONV_BIAS)
def _fuse_for_deconv_bias(net: IRGraph):
for opr in net.all_oprs:
if (
opr.name == "Deconv2d"
and len(net.find_out_oprs(opr)) == 1
and net.find_out_oprs(opr)[0].name == "Add"
):
bias_op = net.find_out_oprs(opr)[0]
if not (
(
bias_op.inp_tensors[1].np_data is not None
and len(bias_op.inp_tensors[1].np_data.reshape(-1))
== opr.inp_tensors[1].shape[1]
)
or (
(
bias_op.inp_tensors[0].np_data is not None
and len(bias_op.inp_tensors[0].np_data.reshape(-1))
== opr.inp_tensors[1].shape[1]
)
)
):
continue
bias_idx = 0 if bias_op.inp_tensors[0].np_data is not None else 1
if len(opr.inp_tensors) == 3: # shape, weight, input, bias
opr.inp_tensors.append(bias_op.inp_tensors[bias_idx])
else:
bias_shape = opr.inp_tensors[3].np_data.shape
add_tensor = bias_op.inp_tensors[bias_idx].np_data
if add_tensor.shape != bias_shape:
add_tensor = add_tensor.reshape(bias_shape)
opr.inp_tensors[3].np_data += add_tensor
if bias_op in opr.out_tensors[0].user_opr:
opr.out_tensors[0].user_opr.remove(bias_op)
bias_out_op = net.find_out_oprs(bias_op)
if len(bias_out_op) > 0:
for op in bias_out_op:
op.inp_tensors[0] = opr.out_tensors[0]
opr.out_tensors[0].user_opr.append(op)
else:
# last op of the graph
assert bias_op.out_tensors[0] in net.graph_outputs
index = net.graph_outputs.index(bias_op.out_tensors[0])
net.graph_outputs[index] = opr.out_tensors[0]
opr.activation = bias_op.activation
index = net.all_oprs.index(bias_op)
del net.all_oprs[index]
del net._opr_ids[index]
@_register_tranformation_rule(TransformerRule.EXPAND_MUL_ADD3)
def _expand_mul_add3(net: IRGraph):
for op in net.all_oprs:
if not isinstance(op, FuseMulAdd3Opr):
continue
last_op = net.find_inp_oprs(op)
assert len(last_op) == 1
mul_out_tensor = IRTensor(
name=op.inp_tensors[0].name + "_mul_out",
shape=op.inp_tensors[0].shape,
dtype=op.inp_tensors[0].dtype,
)
new_tensor_id = max(net._tensor_ids) + 1
net.add_tensor(new_tensor_id, mul_out_tensor)
mul_op = MulOpr()
mul_out_tensor.owner_opr = mul_op
mul_op.inp_tensors = op.inp_tensors[:2]
for o in mul_op.inp_tensors:
index = o.user_opr.index(op)
o.user_opr[index] = mul_op
mul_op.out_tensors = [mul_out_tensor]
add_op = AddOpr()
add_op.inp_tensors = [mul_out_tensor, op.inp_tensors[2]]
mul_out_tensor.user_opr.append(add_op)
add_op.out_tensors = op.out_tensors
index = net._opr_ids.index(id(op))
net.delete_ops(index)
net.add_op(mul_op, index)
net.add_op(add_op, index + 1)
@_register_tranformation_rule(TransformerRule.REPLACE_FLATTEN_TO_RESHAPE)
def _replace_flatten_to_reshape(net: IRGraph):
for opr in net.all_oprs:
if isinstance(opr, FlattenOpr):
out_shape = tuple(list(opr.inp_tensors[0].shape[: opr.start_axis]) + [-1])
reshape_op = ReshapeOpr(out_shape=out_shape)
reshape_op.inp_tensors = opr.inp_tensors
for t in reshape_op.inp_tensors:
idx = t.user_opr.index(opr)
t.user_opr[idx] = reshape_op
reshape_op.out_tensors = opr.out_tensors
for t in reshape_op.out_tensors:
t.owner_opr = reshape_op
net.replace_op(opr, reshape_op)
@_register_tranformation_rule(TransformerRule.REMOVE_RESHAPE_REALTED_OP)
def _remove_reshape_tensors(net: IRGraph):
for opr in net.all_oprs:
if isinstance(opr, ReshapeOpr) and len(opr.inp_tensors) > 1:
opr.inp_tensors = opr.inp_tensors[:1]
@_register_tranformation_rule(TransformerRule.REMOVE_DROPOUT)
def _remove_dropout(net: IRGraph):
for opr in net.all_oprs:
for idx, inp in enumerate(opr.inp_tensors):
owner_opr = inp.owner_opr
if isinstance(owner_opr, DropoutOpr) and owner_opr.drop_prob == 0:
opr.inp_tensors[idx] = owner_opr.inp_tensors[0]
for idx, out in enumerate(net.graph_outputs):
owner_opr = out.owner_opr
if isinstance(owner_opr, DropoutOpr) and owner_opr.drop_prob == 0:
net.graph_outputs[idx] = owner_opr.inp_tensors[0]
@_register_tranformation_rule(TransformerRule.REMOVE_RELU)
def _remove_relu(net: IRGraph):
for opr in net.all_oprs:
for idx, inp in enumerate(opr.inp_tensors):
owner_opr = inp.owner_opr
if isinstance(owner_opr, ReluOpr):
opr.inp_tensors[idx] = owner_opr.inp_tensors[0]
for idx, out in enumerate(net.graph_outputs):
owner_opr = out.owner_opr
if isinstance(owner_opr, ReluOpr):
net.graph_outputs[idx] = owner_opr.inp_tensors[0]
visited_tensor = set() # type: set
def _dfs_recursive(op_set, tensor):
owner_opr = tensor.owner_opr
op_set.add(owner_opr)
if tensor in visited_tensor:
return
visited_tensor.add(tensor)
if isinstance(owner_opr, IRGraph) or owner_opr is None:
return
for tt in owner_opr.inp_tensors:
_dfs_recursive(op_set, tt)
@_register_tranformation_rule(TransformerRule.REMOVE_UNRELATED_IROP)
def _remove_unrelated_op(net: IRGraph):
match_sets = set() # type: Set[OpBase]
for out_tensor in net.graph_outputs:
_dfs_recursive(match_sets, out_tensor)
remove_idx = []
for opr in net.all_oprs:
if opr not in match_sets:
index = net._opr_ids.index(id(opr))
remove_idx.append(index)
for i in remove_idx[::-1]:
net.delete_ops(i)
@_register_tranformation_rule(TransformerRule.ADD_FAKE_HSIGMOID_OUT)
def _add_fake_hsigmoid_tensor(net: IRGraph):
for opr in net.all_oprs:
if isinstance(opr, (HardSwishOpr, HardSigmoidOpr)):
add_3_out_tensor = IRTensor(
opr.out_tensors[0].name + "_fake_add3_out",
opr.inp_tensors[0].shape,
opr.inp_tensors[0].dtype,
q_type=opr.inp_tensors[0].q_dtype,
scale=opr.inp_tensors[0].scale,
zero_point=opr.inp_tensors[0].zero_point,
)
opr.add_inp_tensors(add_3_out_tensor)
relu6_out_tensor = IRTensor(
opr.out_tensors[0].name + "_relu6_out",
opr.inp_tensors[0].shape,
opr.inp_tensors[0].dtype,
q_type=opr.inp_tensors[0].q_dtype,
scale=opr.inp_tensors[0].scale,
zero_point=opr.inp_tensors[0].zero_point,
)
opr.add_inp_tensors(relu6_out_tensor)
if isinstance(opr, HardSwishOpr):
div6_out_tensor = IRTensor(
opr.out_tensors[0].name + "_div_out",
opr.inp_tensors[0].shape,
opr.inp_tensors[0].dtype,
q_type=opr.inp_tensors[0].q_dtype,
scale=opr.inp_tensors[0].scale,
zero_point=opr.inp_tensors[0].zero_point,
)
opr.add_inp_tensors(div6_out_tensor)
def fold_conv_bn(
conv_weight, conv_bias, conv_groups, gamma, beta, bn_mean, bn_var, eps
):
conv_bias = conv_bias.reshape(1, -1, 1, 1)
gamma = gamma.reshape(1, -1, 1, 1)
beta = beta.reshape(1, -1, 1, 1)
bn_mean = bn_mean.reshape(1, -1, 1, 1)
bn_var = bn_var.reshape(1, -1, 1, 1)
# bn_istd = 1 / bn_std
bn_istd = 1.0 / sqrt(bn_var + eps) # type: ignore[attr-defined]
# w_fold = gamma / bn_std * W
scale_factor = gamma * bn_istd
if conv_groups == 1:
w_fold = conv_weight * scale_factor.reshape(-1, 1, 1, 1)
else:
w_fold = conv_weight * scale_factor.reshape(conv_groups, -1, 1, 1, 1)
# b_fold = gamma * (b - bn_mean) / bn_std + beta
b_fold = beta + gamma * (conv_bias - bn_mean) * bn_istd
return w_fold, b_fold
@_register_tranformation_rule(TransformerRule.FUSE_CONV_BN)
def _fuse_conv_bn(net: IRGraph):
for opr in net.all_oprs:
if (
opr.name == "BatchNormalization"
and len(net.find_inp_oprs(opr)) == 1
and net.find_inp_oprs(opr)[0].name == "Conv2d"
and len(net.find_out_oprs(net.find_inp_oprs(opr)[0])) == 1
and net.find_out_oprs(net.find_inp_oprs(opr)[0])[0] == opr
):
gamma = (
Tensor(opr.weight) # type: ignore[attr-defined]
if opr.weight is not None # type: ignore[attr-defined]
else Tensor(opr.inp_tensors[1].np_data)
)
beta = (
Tensor(opr.bias) # type: ignore[attr-defined]
if opr.bias is not None # type: ignore[attr-defined]
else Tensor(opr.inp_tensors[2].np_data)
)
bn_mean = (
Tensor(opr.mean) # type: ignore[attr-defined]
if opr.mean is not None # type: ignore[attr-defined]
else Tensor(opr.inp_tensors[3].np_data)
)
bn_var = (
Tensor(opr.var) # type: ignore[attr-defined]
if opr.var is not None # type: ignore[attr-defined]
else Tensor(opr.inp_tensors[4].np_data)
)
conv_op = net.find_inp_oprs(opr)[0]
conv_weight = conv_op.inp_tensors[1].np_data
if len(conv_op.inp_tensors) == 2:
# add conv bias tensor
weight_shape = conv_op.inp_tensors[1].shape
bias_shape = (
weight_shape[0]
if len(weight_shape) == 4
else weight_shape[0] * weight_shape[1]
)
bias_shape = (1, bias_shape, 1, 1)
conv_bias = IRTensor(
name=conv_op.inp_tensors[0].name + "_bias",
shape=bias_shape,
dtype=np.float32,
np_data=np.zeros(bias_shape, dtype=np.float32),
owner_opr=conv_op,
)
if conv_op.inp_tensors[0].scale and conv_op.inp_tensors[1].scale:
conv_bias.set_qparams(
conv_op.inp_tensors[0].scale * conv_op.inp_tensors[1].scale, 0
)
conv_bias.q_dtype = "int32"
conv_op.inp_tensors.append(conv_bias)
conv_bias = conv_op.inp_tensors[2].np_data.reshape(1, -1, 1, 1)
w_fold, b_fold = fold_conv_bn(
conv_weight,
conv_bias,
conv_op.groups,
gamma,
beta,
bn_mean,
bn_var,
opr.eps, # type: ignore[attr-defined]
)
conv_op.inp_tensors[1].np_data = w_fold.numpy()
conv_op.inp_tensors[2].np_data = b_fold.numpy()
# delete bn opr
conv_op.out_tensors[0] = opr.out_tensors[-1]
conv_op.out_tensors[0].owner_opr = conv_op
index = net._opr_ids.index(id(opr))
net.delete_ops(index)
@_register_tranformation_rule(TransformerRule.REMOVE_IDENTITY)
def _remove_identity(net: IRGraph):
delete_intended = []
for op_id, opr in zip(net._opr_ids, net.all_oprs):
if not isinstance(opr, IdentityOpr):
continue
user_ops = net.find_out_oprs(opr)
for user in user_ops:
idx = user.inp_tensors.index(opr.out_tensors[0])
user.inp_tensors[idx] = opr.inp_tensors[0]
idx = opr.inp_tensors[0].user_opr.index(opr)
opr.inp_tensors[0].user_opr[idx] = user
delete_intended.append(net._opr_ids.index(op_id))
for delete_idx in delete_intended[::-1]:
net.delete_ops(delete_idx)
@_register_tranformation_rule(TransformerRule.EXPAND_CONVRELU)
def _expand_conv_relu(net: IRGraph):
for opr in net.all_oprs:
if not isinstance(opr, ConvRelu2dOpr):
continue
conv_op = Conv2dOpr(
stride=opr.stride,
padding=opr.padding,
dilation=opr.dilation,
groups=opr.groups,
)
conv_op.inp_tensors = opr.inp_tensors
for t in conv_op.inp_tensors:
idx = t.user_opr.index(opr)
t.user_opr[idx] = conv_op
conv_out_tensor = IRTensor(
name=opr.inp_tensors[0].name + "_conv_out",
shape=opr.out_tensors[0].shape,
dtype=opr.out_tensors[0].dtype,
scale=opr.out_tensors[0].scale,
zero_point=opr.out_tensors[0].zero_point,
q_type=opr.out_tensors[0].q_dtype,
owner_opr=conv_op,
)
conv_op.out_tensors = [conv_out_tensor]
conv_out_tensor.owner_opr = conv_op
idx = net.all_oprs.index(opr)
net.add_op(conv_op, idx)
relu_op = ReluOpr()
relu_op.inp_tensors = conv_op.out_tensors
conv_out_tensor.user_opr.append(relu_op)
relu_op.out_tensors = opr.out_tensors
for t in relu_op.out_tensors:
t.owner_opr = relu_op
net.replace_op(opr, relu_op)
|
[
"megengine.Tensor",
"megengine.functional.sqrt"
] |
[((5519, 5532), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (5530, 5532), False, 'from collections import OrderedDict\n'), ((18529, 18542), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (18540, 18542), False, 'from collections import OrderedDict\n'), ((8433, 8532), 'numpy.array', 'np.array', (['[result_shape[0], result_shape[2], result_shape[3], result_shape[1]]'], {'dtype': 'np.int32'}), '([result_shape[0], result_shape[2], result_shape[3], result_shape[1\n ]], dtype=np.int32)\n', (8441, 8532), True, 'import numpy as np\n'), ((10396, 10434), 'numpy.zeros', 'np.zeros', (['bias_shape'], {'dtype': 'np.float32'}), '(bias_shape, dtype=np.float32)\n', (10404, 10434), True, 'import numpy as np\n'), ((11433, 11471), 'numpy.zeros', 'np.zeros', (['bias_shape'], {'dtype': 'np.float32'}), '(bias_shape, dtype=np.float32)\n', (11441, 11471), True, 'import numpy as np\n'), ((33030, 33048), 'megengine.functional.sqrt', 'sqrt', (['(bn_var + eps)'], {}), '(bn_var + eps)\n', (33034, 33048), False, 'from megengine.functional import sqrt\n'), ((5791, 5893), 'numpy.array', 'np.array', (['[0, 0, op.padding[0], op.padding[0], op.padding[1], op.padding[1], 0, 0]'], {'dtype': 'np.int32'}), '([0, 0, op.padding[0], op.padding[0], op.padding[1], op.padding[1],\n 0, 0], dtype=np.int32)\n', (5799, 5893), True, 'import numpy as np\n'), ((3220, 3241), 'functools.cmp_to_key', 'cmp_to_key', (['cmp_rules'], {}), '(cmp_rules)\n', (3230, 3241), False, 'from functools import cmp_to_key\n'), ((4441, 4477), 'numpy.array', 'np.array', (['op.pattern'], {'dtype': 'np.int32'}), '(op.pattern, dtype=np.int32)\n', (4449, 4477), True, 'import numpy as np\n'), ((4955, 4988), 'numpy.array', 'np.array', (['op.axis'], {'dtype': 'np.int32'}), '(op.axis, dtype=np.int32)\n', (4963, 4988), True, 'import numpy as np\n'), ((9740, 9777), 'numpy.array', 'np.array', (['op.out_size'], {'dtype': 'np.int32'}), '(op.out_size, dtype=np.int32)\n', (9748, 9777), True, 'import numpy as np\n'), ((14872, 14890), 'numpy.iinfo', 'np.iinfo', (['np.int32'], {}), '(np.int32)\n', (14880, 14890), True, 'import numpy as np\n'), ((33947, 33965), 'megengine.Tensor', 'Tensor', (['opr.weight'], {}), '(opr.weight)\n', (33953, 33965), False, 'from megengine import Tensor\n'), ((34089, 34123), 'megengine.Tensor', 'Tensor', (['opr.inp_tensors[1].np_data'], {}), '(opr.inp_tensors[1].np_data)\n', (34095, 34123), False, 'from megengine import Tensor\n'), ((34175, 34191), 'megengine.Tensor', 'Tensor', (['opr.bias'], {}), '(opr.bias)\n', (34181, 34191), False, 'from megengine import Tensor\n'), ((34313, 34347), 'megengine.Tensor', 'Tensor', (['opr.inp_tensors[2].np_data'], {}), '(opr.inp_tensors[2].np_data)\n', (34319, 34347), False, 'from megengine import Tensor\n'), ((34402, 34418), 'megengine.Tensor', 'Tensor', (['opr.mean'], {}), '(opr.mean)\n', (34408, 34418), False, 'from megengine import Tensor\n'), ((34540, 34574), 'megengine.Tensor', 'Tensor', (['opr.inp_tensors[3].np_data'], {}), '(opr.inp_tensors[3].np_data)\n', (34546, 34574), False, 'from megengine import Tensor\n'), ((34628, 34643), 'megengine.Tensor', 'Tensor', (['opr.var'], {}), '(opr.var)\n', (34634, 34643), False, 'from megengine import Tensor\n'), ((34764, 34798), 'megengine.Tensor', 'Tensor', (['opr.inp_tensors[4].np_data'], {}), '(opr.inp_tensors[4].np_data)\n', (34770, 34798), False, 'from megengine import Tensor\n'), ((4365, 4385), 'numpy.array', 'np.array', (['op.pattern'], {}), '(op.pattern)\n', (4373, 4385), True, 'import numpy as np\n'), ((14582, 14611), 'numpy.array', 'np.array', (['ret'], {'dtype': 'np.int32'}), '(ret, dtype=np.int32)\n', (14590, 14611), True, 'import numpy as np\n'), ((16601, 16628), 'numpy.array_equal', 'np.array_equal', (['const[1]', 'v'], {}), '(const[1], v)\n', (16615, 16628), True, 'import numpy as np\n'), ((16826, 16868), 'numpy.array_equal', 'np.array_equal', (['const[1]', 'inp_tensors[idx]'], {}), '(const[1], inp_tensors[idx])\n', (16840, 16868), True, 'import numpy as np\n'), ((35512, 35550), 'numpy.zeros', 'np.zeros', (['bias_shape'], {'dtype': 'np.float32'}), '(bias_shape, dtype=np.float32)\n', (35520, 35550), True, 'import numpy as np\n')]
|
"""Initial model generation
Revision ID: a2ced875a244
Revises:
Create Date: 2021-10-28 09:24:53.225445
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = 'a2ced875a244'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('category',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('playlists', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_category_id'), 'category', ['id'], unique=False)
op.create_index(op.f('ix_category_name'), 'category', ['name'], unique=False)
op.create_index(op.f('ix_category_playlists'), 'category', ['playlists'], unique=False)
op.create_table('user',
sa.Column('entity_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('last_login', sa.DateTime(), nullable=True),
sa.Column('admin', sa.Boolean(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.Column('password', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_admin'), 'user', ['admin'], unique=False)
op.create_index(op.f('ix_user_created_at'), 'user', ['created_at'], unique=False)
op.create_index(op.f('ix_user_entity_id'), 'user', ['entity_id'], unique=False)
op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False)
op.create_index(op.f('ix_user_last_login'), 'user', ['last_login'], unique=False)
op.create_index(op.f('ix_user_name'), 'user', ['name'], unique=False)
op.create_index(op.f('ix_user_password'), 'user', ['password'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_user_password'), table_name='user')
op.drop_index(op.f('ix_user_name'), table_name='user')
op.drop_index(op.f('ix_user_last_login'), table_name='user')
op.drop_index(op.f('ix_user_id'), table_name='user')
op.drop_index(op.f('ix_user_entity_id'), table_name='user')
op.drop_index(op.f('ix_user_created_at'), table_name='user')
op.drop_index(op.f('ix_user_admin'), table_name='user')
op.drop_table('user')
op.drop_index(op.f('ix_category_playlists'), table_name='category')
op.drop_index(op.f('ix_category_name'), table_name='category')
op.drop_index(op.f('ix_category_id'), table_name='category')
op.drop_table('category')
# ### end Alembic commands ###
|
[
"sqlmodel.sql.sqltypes.AutoString"
] |
[((2554, 2575), 'alembic.op.drop_table', 'op.drop_table', (['"""user"""'], {}), "('user')\n", (2567, 2575), False, 'from alembic import op\n'), ((2784, 2809), 'alembic.op.drop_table', 'op.drop_table', (['"""category"""'], {}), "('category')\n", (2797, 2809), False, 'from alembic import op\n'), ((626, 655), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (649, 655), True, 'import sqlalchemy as sa\n'), ((682, 704), 'alembic.op.f', 'op.f', (['"""ix_category_id"""'], {}), "('ix_category_id')\n", (686, 704), False, 'from alembic import op\n'), ((760, 784), 'alembic.op.f', 'op.f', (['"""ix_category_name"""'], {}), "('ix_category_name')\n", (764, 784), False, 'from alembic import op\n'), ((842, 871), 'alembic.op.f', 'op.f', (['"""ix_category_playlists"""'], {}), "('ix_category_playlists')\n", (846, 871), False, 'from alembic import op\n'), ((1403, 1432), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1426, 1432), True, 'import sqlalchemy as sa\n'), ((1459, 1480), 'alembic.op.f', 'op.f', (['"""ix_user_admin"""'], {}), "('ix_user_admin')\n", (1463, 1480), False, 'from alembic import op\n'), ((1535, 1561), 'alembic.op.f', 'op.f', (['"""ix_user_created_at"""'], {}), "('ix_user_created_at')\n", (1539, 1561), False, 'from alembic import op\n'), ((1621, 1646), 'alembic.op.f', 'op.f', (['"""ix_user_entity_id"""'], {}), "('ix_user_entity_id')\n", (1625, 1646), False, 'from alembic import op\n'), ((1705, 1723), 'alembic.op.f', 'op.f', (['"""ix_user_id"""'], {}), "('ix_user_id')\n", (1709, 1723), False, 'from alembic import op\n'), ((1775, 1801), 'alembic.op.f', 'op.f', (['"""ix_user_last_login"""'], {}), "('ix_user_last_login')\n", (1779, 1801), False, 'from alembic import op\n'), ((1861, 1881), 'alembic.op.f', 'op.f', (['"""ix_user_name"""'], {}), "('ix_user_name')\n", (1865, 1881), False, 'from alembic import op\n'), ((1935, 1959), 'alembic.op.f', 'op.f', (['"""ix_user_password"""'], {}), "('ix_user_password')\n", (1939, 1959), False, 'from alembic import op\n'), ((2135, 2159), 'alembic.op.f', 'op.f', (['"""ix_user_password"""'], {}), "('ix_user_password')\n", (2139, 2159), False, 'from alembic import op\n'), ((2198, 2218), 'alembic.op.f', 'op.f', (['"""ix_user_name"""'], {}), "('ix_user_name')\n", (2202, 2218), False, 'from alembic import op\n'), ((2257, 2283), 'alembic.op.f', 'op.f', (['"""ix_user_last_login"""'], {}), "('ix_user_last_login')\n", (2261, 2283), False, 'from alembic import op\n'), ((2322, 2340), 'alembic.op.f', 'op.f', (['"""ix_user_id"""'], {}), "('ix_user_id')\n", (2326, 2340), False, 'from alembic import op\n'), ((2379, 2404), 'alembic.op.f', 'op.f', (['"""ix_user_entity_id"""'], {}), "('ix_user_entity_id')\n", (2383, 2404), False, 'from alembic import op\n'), ((2443, 2469), 'alembic.op.f', 'op.f', (['"""ix_user_created_at"""'], {}), "('ix_user_created_at')\n", (2447, 2469), False, 'from alembic import op\n'), ((2508, 2529), 'alembic.op.f', 'op.f', (['"""ix_user_admin"""'], {}), "('ix_user_admin')\n", (2512, 2529), False, 'from alembic import op\n'), ((2594, 2623), 'alembic.op.f', 'op.f', (['"""ix_category_playlists"""'], {}), "('ix_category_playlists')\n", (2598, 2623), False, 'from alembic import op\n'), ((2666, 2690), 'alembic.op.f', 'op.f', (['"""ix_category_name"""'], {}), "('ix_category_name')\n", (2670, 2690), False, 'from alembic import op\n'), ((2733, 2755), 'alembic.op.f', 'op.f', (['"""ix_category_id"""'], {}), "('ix_category_id')\n", (2737, 2755), False, 'from alembic import op\n'), ((439, 473), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (471, 473), False, 'import sqlmodel\n'), ((519, 553), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (551, 553), False, 'import sqlmodel\n'), ((592, 604), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (602, 604), True, 'import sqlalchemy as sa\n'), ((969, 1003), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1001, 1003), False, 'import sqlmodel\n'), ((1044, 1078), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1076, 1078), False, 'import sqlmodel\n'), ((1125, 1138), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1136, 1138), True, 'import sqlalchemy as sa\n'), ((1185, 1198), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1196, 1198), True, 'import sqlalchemy as sa\n'), ((1239, 1251), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1249, 1251), True, 'import sqlalchemy as sa\n'), ((1290, 1302), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1300, 1302), True, 'import sqlalchemy as sa\n'), ((1346, 1380), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1378, 1380), False, 'import sqlmodel\n')]
|
import uuid
from datetime import datetime
from typing import Optional
from sqlalchemy import UniqueConstraint
from sqlmodel import Field, Relationship
from pydantic_factories import ModelFactory, Use
from faker import Faker
from api.db.models.base import BaseModel, BaseTable
class StudentBase(BaseModel):
name: str = Field(index=True, nullable=False)
sandbox_id: uuid.UUID = None
# faber line of business data for student degree credentials
degree: Optional[str] = Field(default=None, nullable=True)
age: Optional[int] = Field(default=None, nullable=True)
student_id: Optional[str] = Field(default=None, nullable=True)
date: Optional[datetime] = Field(default=None, nullable=True)
# track invitation information
# this is for this LOB to track this entity in Traction
invitation_state: Optional[str] = Field(default=None, nullable=True)
connection_id: Optional[uuid.UUID] = Field(default=None)
# for matching this student with their traction tenant
# this would not be in this LOB data at all!!!
# the entity/person/business that this record represents
# would be tracking this in their system/data
wallet_id: Optional[uuid.UUID] = None
alias: Optional[str] = Field(default=None, nullable=True)
class Student(StudentBase, BaseTable, table=True):
__table_args__ = (UniqueConstraint("name", "sandbox_id"),)
sandbox: Optional["Sandbox"] = Relationship(back_populates="students") # noqa: F821
sandbox_id: uuid.UUID = Field(foreign_key="sandbox.id")
wallet_id: uuid.UUID = Field(default=None, nullable=True)
class StudentCreate(StudentBase):
pass
class StudentRead(StudentBase):
id: uuid.UUID
created_at: datetime
updated_at: datetime
degree: Optional[str] = None
age: Optional[int] = None
student_id: Optional[str] = None
date: Optional[datetime] = None
class StudentUpdate(StudentBase):
name: Optional[str] = None
# FACTORIES
class StudentCreateFactory(ModelFactory):
__model__ = StudentCreate
name = Use(Faker().name)
degree = None
age = None
student_id = None
date = None
wallet_id = None
alias = None
invitation_state = None
connection_id = None
|
[
"sqlmodel.Relationship",
"sqlmodel.Field"
] |
[((325, 358), 'sqlmodel.Field', 'Field', ([], {'index': '(True)', 'nullable': '(False)'}), '(index=True, nullable=False)\n', (330, 358), False, 'from sqlmodel import Field, Relationship\n'), ((486, 520), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (491, 520), False, 'from sqlmodel import Field, Relationship\n'), ((546, 580), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (551, 580), False, 'from sqlmodel import Field, Relationship\n'), ((613, 647), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (618, 647), False, 'from sqlmodel import Field, Relationship\n'), ((679, 713), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (684, 713), False, 'from sqlmodel import Field, Relationship\n'), ((848, 882), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (853, 882), False, 'from sqlmodel import Field, Relationship\n'), ((924, 943), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (929, 943), False, 'from sqlmodel import Field, Relationship\n'), ((1235, 1269), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (1240, 1269), False, 'from sqlmodel import Field, Relationship\n'), ((1422, 1461), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""students"""'}), "(back_populates='students')\n", (1434, 1461), False, 'from sqlmodel import Field, Relationship\n'), ((1505, 1536), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""sandbox.id"""'}), "(foreign_key='sandbox.id')\n", (1510, 1536), False, 'from sqlmodel import Field, Relationship\n'), ((1564, 1598), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (1569, 1598), False, 'from sqlmodel import Field, Relationship\n'), ((1345, 1383), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""name"""', '"""sandbox_id"""'], {}), "('name', 'sandbox_id')\n", (1361, 1383), False, 'from sqlalchemy import UniqueConstraint\n'), ((2053, 2060), 'faker.Faker', 'Faker', ([], {}), '()\n', (2058, 2060), False, 'from faker import Faker\n')]
|
from typing import Optional, List
from sqlalchemy import String
from sqlalchemy.sql.schema import Column
from sqlmodel import SQLModel, Field, Relationship
class CustomerProductLink(SQLModel, table=True):
customer_id: Optional[int] = Field(
default=None, foreign_key='customer.id', primary_key=True
)
product_id: Optional[int] = Field(
default=None, foreign_key='product.id', primary_key=True
)
class AddressBase(SQLModel):
street_name: str
house_number: str
city: str
zip_code: str
class Address(AddressBase, table=True):
id: int = Field(default=None, primary_key=True)
customers: List['Customer'] = Relationship(back_populates='address')
class AddressOut(AddressBase):
pass
class AddressIn(AddressBase):
pass
class CustomerBase(SQLModel):
first_name: str
last_name: str
birth_date: str
gender: str
mobile_number: str
email: str
class Customer(CustomerBase, table=True):
id: int = Field(default=None, primary_key=True)
address_id: Optional[int] = Field(default=None, foreign_key='address.id')
address: Optional[Address] = Relationship(back_populates='customers',
sa_relationship_kwargs={'lazy': 'selectin'})
mobile_number: str = Field(sa_column=Column('mobile_number', String, unique=True))
email: str = Field(sa_column=Column('email', String, unique=True))
products: List['Product'] = Relationship(back_populates='customers', link_model=CustomerProductLink,
sa_relationship_kwargs={'lazy': 'selectin'})
class CustomerOut(CustomerBase):
id: int
address: Optional[AddressOut]
class CustomerIn(CustomerBase):
address: Optional[AddressIn]
class ProductBase(SQLModel):
name: Optional[str] = None
class Product(ProductBase, table=True):
id: int = Field(default=None, primary_key=True)
name: str = Field(sa_column=Column('name', String, unique=True))
customers: List[Customer] = Relationship(back_populates='products', link_model=CustomerProductLink)
class ProductOut(ProductBase):
id: int
name: str
class ProductIn(ProductBase):
name: str
class ProductUpdate(ProductBase):
product_id: int
|
[
"sqlmodel.Relationship",
"sqlmodel.Field"
] |
[((241, 305), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""customer.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='customer.id', primary_key=True)\n", (246, 305), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((352, 415), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""product.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='product.id', primary_key=True)\n", (357, 415), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((592, 629), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (597, 629), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((664, 702), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""address"""'}), "(back_populates='address')\n", (676, 702), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((989, 1026), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (994, 1026), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1059, 1104), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""address.id"""'}), "(default=None, foreign_key='address.id')\n", (1064, 1104), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1138, 1227), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""customers"""', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='customers', sa_relationship_kwargs={'lazy':\n 'selectin'})\n", (1150, 1227), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1461, 1582), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""customers"""', 'link_model': 'CustomerProductLink', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='customers', link_model=CustomerProductLink,\n sa_relationship_kwargs={'lazy': 'selectin'})\n", (1473, 1582), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1890, 1927), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1895, 1927), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2030, 2101), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""products"""', 'link_model': 'CustomerProductLink'}), "(back_populates='products', link_model=CustomerProductLink)\n", (2042, 2101), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1311, 1355), 'sqlalchemy.sql.schema.Column', 'Column', (['"""mobile_number"""', 'String'], {'unique': '(True)'}), "('mobile_number', String, unique=True)\n", (1317, 1355), False, 'from sqlalchemy.sql.schema import Column\n'), ((1390, 1426), 'sqlalchemy.sql.schema.Column', 'Column', (['"""email"""', 'String'], {'unique': '(True)'}), "('email', String, unique=True)\n", (1396, 1426), False, 'from sqlalchemy.sql.schema import Column\n'), ((1960, 1995), 'sqlalchemy.sql.schema.Column', 'Column', (['"""name"""', 'String'], {'unique': '(True)'}), "('name', String, unique=True)\n", (1966, 1995), False, 'from sqlalchemy.sql.schema import Column\n')]
|
r"""
Biot problem - deformable porous medium with the no-penetration boundary
condition on a boundary region enforced using Lagrange multipliers.
The non-penetration condition is enforced weakly using the Lagrange
multiplier :math:`\lambda`. There is also a rigid body movement
constraint imposed on the :math:`\Gamma_{outlet}` region using the
linear combination boundary conditions.
Find :math:`\ul{u}`, :math:`p` and :math:`\lambda` such that:
.. math::
\int_{\Omega} D_{ijkl}\ e_{ij}(\ul{v}) e_{kl}(\ul{u})
- \int_{\Omega} p\ \alpha_{ij} e_{ij}(\ul{v})
+ \int_{\Gamma_{walls}} \lambda \ul{n} \cdot \ul{v}
= 0
\;, \quad \forall \ul{v} \;,
\int_{\Omega} q\ \alpha_{ij} e_{ij}(\ul{u})
+ \int_{\Omega} K_{ij} \nabla_i q \nabla_j p
= 0
\;, \quad \forall q \;,
\int_{\Gamma_{walls}} \hat\lambda \ul{n} \cdot \ul{u}
= 0
\;, \quad \forall \hat\lambda \;,
\ul{u} \cdot \ul{n} = 0 \mbox{ on } \Gamma_{walls} \;,
where
.. math::
D_{ijkl} = \mu (\delta_{ik} \delta_{jl}+\delta_{il} \delta_{jk}) +
\lambda \ \delta_{ij} \delta_{kl}
\;.
"""
from __future__ import absolute_import
from examples.multi_physics.biot_npbc import (cinc_simple, define_regions,
get_pars)
def define():
from sfepy import data_dir
filename = data_dir + '/meshes/3d/cylinder.mesh'
output_dir = 'output'
return define_input(filename, output_dir)
def post_process(out, pb, state, extend=False):
from sfepy.base.base import Struct
dvel = pb.evaluate('ev_diffusion_velocity.2.Omega( m.K, p )',
mode='el_avg')
out['dvel'] = Struct(name='output_data', var_name='p',
mode='cell', data=dvel, dofs=None)
stress = pb.evaluate('ev_cauchy_stress.2.Omega( m.D, u )',
mode='el_avg')
out['cauchy_stress'] = Struct(name='output_data', var_name='u',
mode='cell', data=stress, dofs=None)
return out
def define_input(filename, output_dir):
filename_mesh = filename
options = {
'output_dir' : output_dir,
'output_format' : 'vtk',
'post_process_hook' : 'post_process',
## 'file_per_var' : True,
'ls' : 'ls',
'nls' : 'newton',
}
functions = {
'cinc_simple0' : (lambda coors, domain:
cinc_simple(coors, 0),),
'cinc_simple1' : (lambda coors, domain:
cinc_simple(coors, 1),),
'cinc_simple2' : (lambda coors, domain:
cinc_simple(coors, 2),),
'get_pars' : (lambda ts, coors, mode=None, **kwargs:
get_pars(ts, coors, mode,
output_dir=output_dir, **kwargs),),
}
regions, dim = define_regions(filename_mesh)
fields = {
'displacement': ('real', 'vector', 'Omega', 1),
'pressure': ('real', 'scalar', 'Omega', 1),
'multiplier': ('real', 'scalar', 'Walls', 1),
}
variables = {
'u' : ('unknown field', 'displacement', 0),
'v' : ('test field', 'displacement', 'u'),
'p' : ('unknown field', 'pressure', 1),
'q' : ('test field', 'pressure', 'p'),
'ul' : ('unknown field', 'multiplier', 2),
'vl' : ('test field', 'multiplier', 'ul'),
}
ebcs = {
'inlet' : ('Inlet', {'p.0' : 1.0, 'u.all' : 0.0}),
'outlet' : ('Outlet', {'p.0' : -1.0}),
}
lcbcs = {
'rigid' : ('Outlet', {'u.all' : None}, None, 'rigid'),
}
materials = {
'm' : 'get_pars',
}
equations = {
'eq_1' :
"""dw_lin_elastic.2.Omega( m.D, v, u )
- dw_biot.2.Omega( m.alpha, v, p )
+ dw_non_penetration.2.Walls( v, ul )
= 0""",
'eq_2' :
"""dw_biot.2.Omega( m.alpha, u, q )
+ dw_diffusion.2.Omega( m.K, q, p )
= 0""",
'eq_3' :
"""dw_non_penetration.2.Walls( u, vl )
= 0""",
}
solvers = {
'ls' : ('ls.scipy_direct', {}),
'newton' : ('nls.newton', {}),
}
return locals()
|
[
"sfepy.base.base.Struct"
] |
[((1657, 1732), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'var_name': '"""p"""', 'mode': '"""cell"""', 'data': 'dvel', 'dofs': 'None'}), "(name='output_data', var_name='p', mode='cell', data=dvel, dofs=None)\n", (1663, 1732), False, 'from sfepy.base.base import Struct\n'), ((1889, 1966), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'var_name': '"""u"""', 'mode': '"""cell"""', 'data': 'stress', 'dofs': 'None'}), "(name='output_data', var_name='u', mode='cell', data=stress, dofs=None)\n", (1895, 1966), False, 'from sfepy.base.base import Struct\n'), ((2822, 2851), 'examples.multi_physics.biot_npbc.define_regions', 'define_regions', (['filename_mesh'], {}), '(filename_mesh)\n', (2836, 2851), False, 'from examples.multi_physics.biot_npbc import cinc_simple, define_regions, get_pars\n'), ((2398, 2419), 'examples.multi_physics.biot_npbc.cinc_simple', 'cinc_simple', (['coors', '(0)'], {}), '(coors, 0)\n', (2409, 2419), False, 'from examples.multi_physics.biot_npbc import cinc_simple, define_regions, get_pars\n'), ((2497, 2518), 'examples.multi_physics.biot_npbc.cinc_simple', 'cinc_simple', (['coors', '(1)'], {}), '(coors, 1)\n', (2508, 2518), False, 'from examples.multi_physics.biot_npbc import cinc_simple, define_regions, get_pars\n'), ((2596, 2617), 'examples.multi_physics.biot_npbc.cinc_simple', 'cinc_simple', (['coors', '(2)'], {}), '(coors, 2)\n', (2607, 2617), False, 'from examples.multi_physics.biot_npbc import cinc_simple, define_regions, get_pars\n'), ((2704, 2762), 'examples.multi_physics.biot_npbc.get_pars', 'get_pars', (['ts', 'coors', 'mode'], {'output_dir': 'output_dir'}), '(ts, coors, mode, output_dir=output_dir, **kwargs)\n', (2712, 2762), False, 'from examples.multi_physics.biot_npbc import cinc_simple, define_regions, get_pars\n')]
|
from sqlmodel import Session
from sfm.database import engine
from sfm.config import get_settings
from sfm.utils import verify_api_auth_token
from fastapi import Depends, HTTPException
from fastapi.security import HTTPBearer, HTTPBasicCredentials
from passlib.context import CryptContext
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
app_settings = get_settings()
security = HTTPBearer()
def get_db(): # pragma: no cover
db = Session(engine)
try:
yield db
finally:
db.close()
def has_access(
credentials: HTTPBasicCredentials = Depends(security),
): # pragma: no cover
token = credentials.credentials
verified = verify_api_auth_token(token)
if verified:
return True
else:
raise HTTPException(status_code=403, detail="Incorrect Credentials")
|
[
"sqlmodel.Session"
] |
[((302, 353), 'passlib.context.CryptContext', 'CryptContext', ([], {'schemes': "['bcrypt']", 'deprecated': '"""auto"""'}), "(schemes=['bcrypt'], deprecated='auto')\n", (314, 353), False, 'from passlib.context import CryptContext\n'), ((369, 383), 'sfm.config.get_settings', 'get_settings', ([], {}), '()\n', (381, 383), False, 'from sfm.config import get_settings\n'), ((395, 407), 'fastapi.security.HTTPBearer', 'HTTPBearer', ([], {}), '()\n', (405, 407), False, 'from fastapi.security import HTTPBearer, HTTPBasicCredentials\n'), ((453, 468), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (460, 468), False, 'from sqlmodel import Session\n'), ((585, 602), 'fastapi.Depends', 'Depends', (['security'], {}), '(security)\n', (592, 602), False, 'from fastapi import Depends, HTTPException\n'), ((678, 706), 'sfm.utils.verify_api_auth_token', 'verify_api_auth_token', (['token'], {}), '(token)\n', (699, 706), False, 'from sfm.utils import verify_api_auth_token\n'), ((768, 830), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(403)', 'detail': '"""Incorrect Credentials"""'}), "(status_code=403, detail='Incorrect Credentials')\n", (781, 830), False, 'from fastapi import Depends, HTTPException\n')]
|
import os
from dotenv import load_dotenv
from dateutil.parser import parse
from sqlmodel import Session, select, SQLModel, create_engine
import requests
from youtube.models import YouTube
load_dotenv()
YT_CHANNEL = os.environ["YT_CHANNEL"]
YOUTUBE_API_KEY = os.environ["YOUTUBE_API_KEY"]
DATABASE_URL = os.environ["DATABASE_URL"]
YOUTUBE_VIDEO = "youtube#video"
BASE_URL = (
"https://www.googleapis.com/youtube/v3/search?key={key}"
"&channelId={channel}&part=snippet,id&order=date&maxResults=20"
)
engine = create_engine(DATABASE_URL, echo=False)
def get_session():
with Session(engine) as session:
yield session
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def get_videos_from_channel(channel: str = YT_CHANNEL) -> list[dict]:
base_url = BASE_URL.format(key=YOUTUBE_API_KEY,
channel=channel)
next_page, url = None, base_url
videos = []
while True:
if next_page is not None:
url = base_url + f"&pageToken={next_page}"
response = requests.get(url).json()
for vid in response["items"]:
if vid["id"]["kind"] != "youtube#video":
continue
videos.append(vid)
if "nextPageToken" not in response:
break
next_page = response["nextPageToken"]
return videos
def insert_youtube_videos(session: Session, videos: list[dict]) -> None:
num_inserted = 0
for video in videos:
video_id = video["id"]["videoId"]
title = video["snippet"]["title"]
description = video["snippet"]["description"]
thumb = video["snippet"]["thumbnails"]["medium"]["url"]
published = video["snippet"]["publishTime"]
statement = select(YouTube).where(YouTube.video_id == video_id)
results = session.exec(statement)
if results.first() is not None:
continue
youtube = YouTube(
video_id=video_id,
title=title,
description=description,
thumb=thumb,
published=parse(published),
)
session.add(youtube)
num_inserted += 1
session.commit()
statement = select(YouTube)
results = session.exec(statement)
total_records = len(results.all())
print(f"Total records: {total_records} (newly inserted: {num_inserted})")
if __name__ == "__main__":
create_db_and_tables()
videos = get_videos_from_channel()
with Session(engine) as session:
insert_youtube_videos(session, videos)
|
[
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.select"
] |
[((191, 204), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (202, 204), False, 'from dotenv import load_dotenv\n'), ((522, 561), 'sqlmodel.create_engine', 'create_engine', (['DATABASE_URL'], {'echo': '(False)'}), '(DATABASE_URL, echo=False)\n', (535, 561), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((676, 712), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (704, 712), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((2206, 2221), 'sqlmodel.select', 'select', (['YouTube'], {}), '(YouTube)\n', (2212, 2221), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((592, 607), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (599, 607), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((2481, 2496), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2488, 2496), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((1062, 1079), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1074, 1079), False, 'import requests\n'), ((1760, 1775), 'sqlmodel.select', 'select', (['YouTube'], {}), '(YouTube)\n', (1766, 1775), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((2083, 2099), 'dateutil.parser.parse', 'parse', (['published'], {}), '(published)\n', (2088, 2099), False, 'from dateutil.parser import parse\n')]
|
"""
The Dirichlet, periodic and linear combination boundary condition
classes, as well as the initial condition class.
"""
from __future__ import absolute_import
import numpy as nm
from sfepy.base.base import basestr, Container, Struct
from sfepy.discrete.functions import Function
import six
def get_condition_value(val, functions, kind, name):
"""
Check a boundary/initial condition value type and return the value or
corresponding function.
"""
if type(val) == str:
if functions is not None:
try:
fun = functions[val]
except IndexError:
raise ValueError('unknown function %s given for %s %s!'
% (val, kind, name))
else:
raise ValueError('no functions given for %s %s!' % (kind, name))
elif (isinstance(val, Function) or nm.isscalar(val)
or isinstance(val, nm.ndarray)):
fun = val
else:
raise ValueError('unknown value type for %s %s!'
% (kind, name))
return fun
def _get_region(name, regions, bc_name):
try:
region = regions[name]
except IndexError:
msg = "no region '%s' used in condition %s!" % (name, bc_name)
raise IndexError(msg)
return region
class Conditions(Container):
"""
Container for various conditions.
"""
@staticmethod
def from_conf(conf, regions):
conds = []
for key, cc in six.iteritems(conf):
times = cc.get('times', None)
if 'ebc' in key:
region = _get_region(cc.region, regions, cc.name)
cond = EssentialBC(cc.name, region, cc.dofs, key=key,
times=times)
elif 'epbc' in key:
rs = [_get_region(ii, regions, cc.name) for ii in cc.region]
cond = PeriodicBC(cc.name, rs, cc.dofs, cc.match, key=key,
times=times)
elif 'lcbc' in key:
if isinstance(cc.region, basestr):
rs = [_get_region(cc.region, regions, cc.name), None]
else:
rs = [_get_region(ii, regions, cc.name)
for ii in cc.region]
cond = LinearCombinationBC(cc.name, rs, cc.dofs,
cc.dof_map_fun, cc.kind,
key=key,
times=times,
arguments=cc.get('arguments', None))
elif 'ic' in key:
region = _get_region(cc.region, regions, cc.name)
cond = InitialCondition(cc.name, region, cc.dofs, key=key)
else:
raise ValueError('unknown condition type! (%s)' % key)
conds.append(cond)
obj = Conditions(conds)
return obj
def group_by_variables(self, groups=None):
"""
Group boundary conditions of each variable. Each condition is a
group is a single condition.
Parameters
----------
groups : dict, optional
If present, update the `groups` dictionary.
Returns
-------
out : dict
The dictionary with variable names as keys and lists of
single condition instances as values.
"""
if groups is None:
out = {}
else:
out = groups
for cond in self:
for single_cond in cond.iter_single():
vname = single_cond.dofs[0].split('.')[0]
out.setdefault(vname, Conditions()).append(single_cond)
return out
def canonize_dof_names(self, dofs):
"""
Canonize the DOF names using the full list of DOFs of a
variable.
"""
for cond in self:
cond.canonize_dof_names(dofs)
def sort(self):
"""
Sort boundary conditions by their key.
"""
self._objs.sort(key=lambda a: a.key)
self.update()
def zero_dofs(self):
"""
Set all boundary condition values to zero, if applicable.
"""
for cond in self:
if isinstance(cond, EssentialBC):
cond.zero_dofs()
def _canonize(dofs, all_dofs):
"""
Helper function.
"""
vname, dd = dofs.split('.')
if dd == 'all':
cdofs = all_dofs
elif dd[0] == '[':
cdofs = [vname + '.' + ii.strip()
for ii in dd[1:-1].split(',')]
else:
cdofs = [dofs]
return cdofs
class Condition(Struct):
"""
Common boundary condition methods.
"""
def __init__(self, name, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.is_single = False
def iter_single(self):
"""
Create a single condition instance for each item in self.dofs
and yield it.
"""
for dofs, val in six.iteritems(self.dofs):
single_cond = self.copy(name=self.name)
single_cond.is_single = True
single_cond.dofs = [dofs, val]
yield single_cond
def canonize_dof_names(self, dofs):
"""
Canonize the DOF names using the full list of DOFs of a
variable.
Assumes single condition instance.
"""
self.dofs[0] = _canonize(self.dofs[0], dofs)
class EssentialBC(Condition):
"""
Essential boundary condidion.
Parameters
----------
name : str
The boundary condition name.
region : Region instance
The region where the boundary condition is applied.
dofs : dict
The boundary condition specification defining the constrained
DOFs and their values.
key : str, optional
The sorting key.
times : list or str, optional
The list of time intervals or a function returning True at time
steps, when the condition applies.
"""
def __init__(self, name, region, dofs, key='', times=None):
Condition.__init__(self, name=name, region=region, dofs=dofs, key=key,
times=times)
def zero_dofs(self):
"""
Set all essential boundary condition values to zero.
"""
if self.is_single:
self.dofs[1] = 0.0
else:
new_dofs = {}
for key in six.iterkeys(self.dofs):
new_dofs[key] = 0.0
self.dofs = new_dofs
class PeriodicBC(Condition):
"""
Periodic boundary condidion.
Parameters
----------
name : str
The boundary condition name.
regions : list of two Region instances
The master region and the slave region where the DOFs should match.
dofs : dict
The boundary condition specification defining the DOFs in the master
region and the corresponding DOFs in the slave region.
match : str
The name of function for matching corresponding nodes in the
two regions.
key : str, optional
The sorting key.
times : list or str, optional
The list of time intervals or a function returning True at time
steps, when the condition applies.
"""
def __init__(self, name, regions, dofs, match, key='', times=None):
Condition.__init__(self, name=name, regions=regions, dofs=dofs,
match=match, key=key, times=times)
def canonize_dof_names(self, dofs):
"""
Canonize the DOF names using the full list of DOFs of a
variable.
Assumes single condition instance.
"""
self.dofs[0] = _canonize(self.dofs[0], dofs)
self.dofs[1] = _canonize(self.dofs[1], dofs)
class LinearCombinationBC(Condition):
"""
Linear combination boundary condidion.
Parameters
----------
name : str
The boundary condition name.
regions : list of two Region instances
The constrained (master) DOFs region and the new (slave) DOFs
region. The latter can be None if new DOFs are not field variable DOFs.
dofs : dict
The boundary condition specification defining the constrained
DOFs and the new DOFs (can be None).
dof_map_fun : str
The name of function for mapping the constrained DOFs to new DOFs (can
be None).
kind : str
The linear combination condition kind.
key : str, optional
The sorting key.
times : list or str, optional
The list of time intervals or a function returning True at time
steps, when the condition applies.
arguments: tuple, optional
Additional arguments, depending on the condition kind.
"""
def __init__(self, name, regions, dofs, dof_map_fun, kind, key='',
times=None, arguments=None):
Condition.__init__(self, name=name, regions=regions, dofs=dofs,
dof_map_fun=dof_map_fun, kind=kind,
key=key, times=times, arguments=arguments)
def get_var_names(self):
"""
Get names of variables corresponding to the constrained and new DOFs.
"""
names = [self.dofs[0].split('.')[0]]
if self.dofs[1] is not None:
names.append(self.dofs[1].split('.')[0])
return names
def canonize_dof_names(self, dofs0, dofs1=None):
"""
Canonize the DOF names using the full list of DOFs of a
variable.
Assumes single condition instance.
"""
self.dofs[0] = _canonize(self.dofs[0], dofs0)
if self.dofs[1] is not None:
self.dofs[1] = _canonize(self.dofs[1], dofs1)
class InitialCondition(Condition):
"""
Initial condidion.
Parameters
----------
name : str
The initial condition name.
region : Region instance
The region where the initial condition is applied.
dofs : dict
The initial condition specification defining the constrained
DOFs and their values.
key : str, optional
The sorting key.
"""
def __init__(self, name, region, dofs, key=''):
Condition.__init__(self, name=name, region=region, dofs=dofs, key=key)
|
[
"sfepy.base.base.Struct.__init__"
] |
[((1475, 1494), 'six.iteritems', 'six.iteritems', (['conf'], {}), '(conf)\n', (1488, 1494), False, 'import six\n'), ((4772, 4814), 'sfepy.base.base.Struct.__init__', 'Struct.__init__', (['self'], {'name': 'name'}), '(self, name=name, **kwargs)\n', (4787, 4814), False, 'from sfepy.base.base import basestr, Container, Struct\n'), ((5015, 5039), 'six.iteritems', 'six.iteritems', (['self.dofs'], {}), '(self.dofs)\n', (5028, 5039), False, 'import six\n'), ((869, 885), 'numpy.isscalar', 'nm.isscalar', (['val'], {}), '(val)\n', (880, 885), True, 'import numpy as nm\n'), ((6435, 6458), 'six.iterkeys', 'six.iterkeys', (['self.dofs'], {}), '(self.dofs)\n', (6447, 6458), False, 'import six\n')]
|
r"""
Poisson equation.
This example demonstrates parametric study capabilities of Application
classes. In particular (written in the strong form):
.. math::
c \Delta t = f \mbox{ in } \Omega,
t = 2 \mbox{ on } \Gamma_1 \;,
t = -2 \mbox{ on } \Gamma_2 \;,
f = 1 \mbox{ in } \Omega_1 \;,
f = 0 \mbox{ otherwise,}
where :math:`\Omega` is a square domain, :math:`\Omega_1 \in \Omega` is
a circular domain.
Now let's see what happens if :math:`\Omega_1` diameter changes.
Run::
$ ./simple.py <this file>
and then look in 'output/r_omega1' directory, try for example::
$ ./postproc.py output/r_omega1/circles_in_square*.vtk
Remark: this simple case could be achieved also by defining
:math:`\Omega_1` by a time-dependent function and solve the static
problem as a time-dependent problem. However, the approach below is much
more general.
Find :math:`t` such that:
.. math::
\int_{\Omega} c \nabla s \cdot \nabla t
= 0
\;, \quad \forall s \;.
"""
from __future__ import absolute_import
import os
import numpy as nm
from sfepy import data_dir
from sfepy.base.base import output
# Mesh.
filename_mesh = data_dir + '/meshes/2d/special/circles_in_square.vtk'
# Options. The value of 'parametric_hook' is the function that does the
# parametric study.
options = {
'nls' : 'newton', # Nonlinear solver
'ls' : 'ls', # Linear solver
'parametric_hook' : 'vary_omega1_size',
'output_dir' : 'output/r_omega1',
}
# Domain and subdomains.
default_diameter = 0.25
regions = {
'Omega' : 'all',
'Gamma_1' : ('vertices in (x < -0.999)', 'facet'),
'Gamma_2' : ('vertices in (x > 0.999)', 'facet'),
'Omega_1' : 'vertices by select_circ',
}
# FE field defines the FE approximation: 2_3_P1 = 2D, P1 on triangles.
field_1 = {
'name' : 'temperature',
'dtype' : 'real',
'shape' : (1,),
'region' : 'Omega',
'approx_order' : 1,
}
# Unknown and test functions (FE sense).
variables = {
't' : ('unknown field', 'temperature', 0),
's' : ('test field', 'temperature', 't'),
}
# Dirichlet boundary conditions.
ebcs = {
't1' : ('Gamma_1', {'t.0' : 2.0}),
't2' : ('Gamma_2', {'t.0' : -2.0}),
}
# Material coefficient c and source term value f.
material_1 = {
'name' : 'coef',
'values' : {
'val' : 1.0,
}
}
material_2 = {
'name' : 'source',
'values' : {
'val' : 10.0,
}
}
# Numerical quadrature and the equation.
integral_1 = {
'name' : 'i',
'order' : 2,
}
equations = {
'Poisson' : """dw_laplace.i.Omega( coef.val, s, t )
= dw_volume_lvf.i.Omega_1( source.val, s )"""
}
# Solvers.
solver_0 = {
'name' : 'ls',
'kind' : 'ls.scipy_direct',
}
solver_1 = {
'name' : 'newton',
'kind' : 'nls.newton',
'i_max' : 1,
'eps_a' : 1e-10,
'eps_r' : 1.0,
'macheps' : 1e-16,
'lin_red' : 1e-2, # Linear system error < (eps_a * lin_red).
'ls_red' : 0.1,
'ls_red_warp' : 0.001,
'ls_on' : 1.1,
'ls_min' : 1e-5,
'check' : 0,
'delta' : 1e-6,
}
functions = {
'select_circ': (lambda coors, domain=None:
select_circ(coors[:,0], coors[:,1], 0, default_diameter),),
}
# Functions.
def select_circ( x, y, z, diameter ):
"""Select circular subdomain of a given diameter."""
r = nm.sqrt( x**2 + y**2 )
out = nm.where(r < diameter)[0]
n = out.shape[0]
if n <= 3:
raise ValueError( 'too few vertices selected! (%d)' % n )
return out
def vary_omega1_size( problem ):
"""Vary size of \Omega1. Saves also the regions into options['output_dir'].
Input:
problem: Problem instance
Return:
a generator object:
1. creates new (modified) problem
2. yields the new (modified) problem and output container
3. use the output container for some logging
4. yields None (to signal next iteration to Application)
"""
from sfepy.discrete import Problem
from sfepy.solvers.ts import get_print_info
output.prefix = 'vary_omega1_size:'
diameters = nm.linspace( 0.1, 0.6, 7 ) + 0.001
ofn_trunk, output_format = problem.ofn_trunk, problem.output_format
output_dir = problem.output_dir
join = os.path.join
conf = problem.conf
cf = conf.get_raw( 'functions' )
n_digit, aux, d_format = get_print_info( len( diameters ) + 1 )
for ii, diameter in enumerate( diameters ):
output( 'iteration %d: diameter %3.2f' % (ii, diameter) )
cf['select_circ'] = (lambda coors, domain=None:
select_circ(coors[:,0], coors[:,1], 0, diameter),)
conf.edit('functions', cf)
problem = Problem.from_conf(conf)
problem.save_regions( join( output_dir, ('regions_' + d_format) % ii ),
['Omega_1'] )
region = problem.domain.regions['Omega_1']
if not region.has_cells():
raise ValueError('region %s has no cells!' % region.name)
ofn_trunk = ofn_trunk + '_' + (d_format % ii)
problem.setup_output(output_filename_trunk=ofn_trunk,
output_dir=output_dir,
output_format=output_format)
out = []
yield problem, out
out_problem, state = out[-1]
filename = join( output_dir,
('log_%s.txt' % d_format) % ii )
fd = open( filename, 'w' )
log_item = '$r(\Omega_1)$: %f\n' % diameter
fd.write( log_item )
fd.write( 'solution:\n' )
nm.savetxt(fd, state())
fd.close()
yield None
|
[
"sfepy.base.base.output",
"sfepy.discrete.Problem.from_conf"
] |
[((3347, 3371), 'numpy.sqrt', 'nm.sqrt', (['(x ** 2 + y ** 2)'], {}), '(x ** 2 + y ** 2)\n', (3354, 3371), True, 'import numpy as nm\n'), ((3381, 3403), 'numpy.where', 'nm.where', (['(r < diameter)'], {}), '(r < diameter)\n', (3389, 3403), True, 'import numpy as nm\n'), ((4097, 4121), 'numpy.linspace', 'nm.linspace', (['(0.1)', '(0.6)', '(7)'], {}), '(0.1, 0.6, 7)\n', (4108, 4121), True, 'import numpy as nm\n'), ((4450, 4505), 'sfepy.base.base.output', 'output', (["('iteration %d: diameter %3.2f' % (ii, diameter))"], {}), "('iteration %d: diameter %3.2f' % (ii, diameter))\n", (4456, 4505), False, 'from sfepy.base.base import output\n'), ((4699, 4722), 'sfepy.discrete.Problem.from_conf', 'Problem.from_conf', (['conf'], {}), '(conf)\n', (4716, 4722), False, 'from sfepy.discrete import Problem\n')]
|
"""
Classes holding information on global DOFs and mapping of all DOFs -
equations (active DOFs).
Helper functions for the equation mapping.
"""
import numpy as nm
import scipy.sparse as sp
from sfepy.base.base import assert_, Struct, basestr
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value, EssentialBC, \
PeriodicBC, DGPeriodicBC, DGEssentialBC
def expand_nodes_to_dofs(nods, n_dof_per_node):
"""
Expand DOF node indices into DOFs given a constant number of DOFs
per node.
"""
dofs = nm.repeat(nods, n_dof_per_node)
dofs.shape = (nods.shape[0], n_dof_per_node)
idof = nm.arange(n_dof_per_node, dtype=nm.int32)
dofs = n_dof_per_node * dofs + idof
return dofs
def expand_nodes_to_equations(nods, dof_names, all_dof_names):
"""
Expand vector of node indices to equations (DOF indices) based on
the DOF-per-node count.
DOF names must be already canonized.
Returns
-------
eq : array
The equations/DOF indices in the node-by-node order.
"""
dpn = len(all_dof_names)
nc = len(dof_names)
eq = nm.empty(len(nods) * nc, dtype=nm.int32)
for ii, dof in enumerate(dof_names):
idof = all_dof_names.index(dof)
eq[ii::nc] = dpn * nods + idof
return eq
def resolve_chains(master_slave, chains):
"""
Resolve EPBC chains - e.g. in corner nodes.
"""
for chain in chains:
slave = chain[-1]
master_slave[chain[:-1]] = slave + 1
master_slave[slave] = - chain[0] - 1 # Any of masters...
def group_chains(chain_list):
"""
Group EPBC chains.
"""
chains = []
while len(chain_list):
chain = set(chain_list.pop(0))
## print ':', chain
ii = 0
while ii < len(chain_list):
c1 = sorted(chain_list[ii])
## print '--', ii, c1, chain
is0 = c1[0] in chain
is1 = c1[1] in chain
if is0 and is1:
chain_list.pop(ii)
elif is0 or is1:
chain.update(c1)
chain_list.pop(ii)
ii = 0
else:
ii += 1
## print ii, chain, chain_list
## print '->', chain
## print chain_list
chains.append(list(chain))
## print 'EPBC chain groups:', chains
aux = {}
for chain in chains:
aux.setdefault(len(chain), [0])[0] += 1
## print 'EPBC chain counts:', aux
return chains
class DofInfo(Struct):
"""
Global DOF information, i.e. ordering of DOFs of the state (unknown)
variables in the global state vector.
"""
def __init__(self, name):
Struct.__init__(self, name=name)
self.n_var = 0
self.var_names = []
self.n_dof = {}
self.ptr = [0]
self.indx = {}
self.details = {}
def _update_after_append(self, name):
self.ptr.append(self.ptr[-1] + self.n_dof[name])
ii = self.n_var
self.indx[name] = slice(int(self.ptr[ii]), int(self.ptr[ii+1]))
self.n_var += 1
def append_variable(self, var, active=False):
"""
Append DOFs of the given variable.
Parameters
----------
var : Variable instance
The variable to append.
active : bool, optional
When True, only active (non-constrained) DOFs are considered.
"""
name = var.name
if name in self.var_names:
raise ValueError('variable %s already present!' % name)
self.var_names.append(name)
self.n_dof[name], self.details[name] = var.get_dof_info(active=active)
self._update_after_append(name)
def append_raw(self, name, n_dof):
"""
Append raw DOFs.
Parameters
----------
name : str
The name of variable the DOFs correspond to.
n_dof : int
The number of DOFs.
"""
if name in self.var_names:
raise ValueError('variable %s already present!' % name)
self.var_names.append(name)
self.n_dof[name], self.details[name] = n_dof, None
self._update_after_append(name)
def update(self, name, n_dof):
"""
Set the number of DOFs of the given variable.
Parameters
----------
name : str
The name of variable the DOFs correspond to.
n_dof : int
The number of DOFs.
"""
if not name in self.var_names:
raise ValueError('variable %s is not present!' % name)
ii = self.var_names.index(name)
delta = n_dof - self.n_dof[name]
self.n_dof[name] = n_dof
for iv, nn in enumerate(self.var_names[ii:]):
self.ptr[ii+iv+1] += delta
self.indx[nn] = slice(self.ptr[ii+iv], self.ptr[ii+iv+1])
def get_info(self, var_name):
"""
Return information on DOFs of the given variable.
Parameters
----------
var_name : str
The name of the variable.
"""
return Struct(name='%s_dof_info' % var_name,
var_name=var_name,
n_dof=self.n_dof[var_name],
indx=self.indx[var_name],
details=self.details[var_name])
def get_subset_info(self, var_names):
"""
Return global DOF information for selected variables
only. Silently ignores non-existing variable names.
Parameters
----------
var_names : list
The names of the selected variables.
"""
di = DofInfo(self.name + ':subset')
for var_name in var_names:
if var_name not in self.var_names:
continue
di.append_raw(var_name, self.n_dof[var_name])
return di
def get_n_dof_total(self):
"""
Return the total number of DOFs of all state variables.
"""
return self.ptr[-1]
def is_active_bc(bc, ts=None, functions=None):
"""
Check whether the given boundary condition is active in the current
time.
Returns
-------
active : bool
True if the condition `bc` is active.
"""
if (bc.times is None) or (ts is None):
active = True
elif isinstance(bc.times, list):
for tt in bc.times:
if tt[0] <= ts.time < tt[1]:
active = True
break
else:
active = False
else:
if isinstance(bc.times, basestr):
if functions is not None:
fun = functions[bc.times]
else:
raise ValueError('no functions given for bc %s!' % bc.name)
elif isinstance(bc.times, Function):
fun = bc.times
else:
raise ValueError('unknown times type! (%s)'
% type(bc.times))
active = fun(ts)
return active
class EquationMap(Struct):
"""
Map all DOFs to equations for active DOFs.
"""
def __init__(self, name, dof_names, var_di):
Struct.__init__(self, name=name, dof_names=dof_names, var_di=var_di)
self.dpn = len(self.dof_names)
self.eq = nm.arange(var_di.n_dof, dtype=nm.int32)
self.n_dg_ebc = 0
self.dg_ebc_names = {}
self.dg_ebc = {}
self.dg_ebc_val = {}
self.n_dg_epbc = 0
self.dg_epbc_names = []
self.dg_epbc = []
def _init_empty(self, field):
self.val_ebc = nm.empty((0,), dtype=field.dtype)
if field.get('unused_dofs') is None:
self.eqi = nm.arange(self.var_di.n_dof, dtype=nm.int32)
else:
self._mark_unused(field)
self.eqi = nm.compress(self.eq >= 0, self.eq)
self.eq[self.eqi] = nm.arange(self.eqi.shape[0], dtype=nm.int32)
self.eq_ebc = nm.empty((0,), dtype=nm.int32)
self.master = nm.empty((0,), dtype=nm.int32)
self.slave = nm.empty((0,), dtype=nm.int32)
self.n_eq = self.eqi.shape[0]
self.n_ebc = self.eq_ebc.shape[0]
self.n_epbc = self.master.shape[0]
def _mark_unused(self, field):
unused_dofs = field.get('unused_dofs')
if unused_dofs is not None:
unused = expand_nodes_to_equations(field.unused_dofs,
self.dof_names, self.dof_names)
self.eq[unused] = -3
def map_equations(self, bcs, field, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Parameters
----------
bcs : Conditions instance
The Dirichlet or periodic boundary conditions (single
condition instances). The dof names in the conditions must
already be canonized.
field : Field instance
The field of the variable holding the DOFs.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The registered functions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
warn : bool, optional
If True, warn about BC on non-existent nodes.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
Notes
-----
- Periodic bc: master and slave DOFs must belong to the same
field (variables can differ, though).
"""
if bcs is None:
self._init_empty(field)
return set()
eq_ebc = nm.zeros((self.var_di.n_dof,), dtype=nm.int32)
val_ebc = nm.zeros((self.var_di.n_dof,), dtype=field.dtype)
master_slave = nm.zeros((self.var_di.n_dof,), dtype=nm.int32)
chains = []
active_bcs = set()
for bc in bcs:
# Skip conditions that are not active in the current time.
if not is_active_bc(bc, ts=ts, functions=functions):
continue
active_bcs.add(bc.key)
if isinstance(bc, DGEssentialBC):
ntype = "DGEBC"
region = bc.region
elif isinstance(bc, DGPeriodicBC):
ntype = "DGEPBC"
region = bc.regions[0]
elif isinstance(bc, EssentialBC):
ntype = 'EBC'
region = bc.region
elif isinstance(bc, PeriodicBC):
ntype = 'EPBC'
region = bc.regions[0]
if warn:
clean_msg = ('warning: ignoring nonexistent %s node (%s) in '
% (ntype, self.var_di.var_name))
else:
clean_msg = None
# Get master region nodes.
master_nod_list = field.get_dofs_in_region(region)
if len(master_nod_list) == 0:
continue
if ntype == 'EBC': # EBC.
dofs, val = bc.dofs
##
# Evaluate EBC values.
fun = get_condition_value(val, functions, 'EBC', bc.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(ts, coors,
bc=bc, problem=problem)
nods, vv = field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dof_names)
# Duplicates removed here...
eq_ebc[eq] = 1
if vv is not None: val_ebc[eq] = nm.ravel(vv)
elif ntype == "DGEBC":
dofs, val = bc.dofs
##
# Evaluate EBC values.
fun = get_condition_value(val, functions, 'EBC', bc.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(ts, coors,
bc=bc, problem=problem)
values = field.get_bc_facet_values(fun, region, diff=bc.diff)
bc2bfi = field.get_bc_facet_idx(region)
self.dg_ebc_val.setdefault(bc.diff, []).append(values)
self.dg_ebc.setdefault(bc.diff, []).append(bc2bfi)
self.n_dg_ebc += 1
elif ntype == "DGEPBC":
# ensure matching boundaries?
master_bc2bfi = field.get_bc_facet_idx(region)
slave_bc2bfi = field.get_bc_facet_idx(bc.regions[1])
self.dg_epbc.append((master_bc2bfi, slave_bc2bfi))
self.n_dg_epbc += 1
else: # EPBC.
region = bc.regions[1]
slave_nod_list = field.get_dofs_in_region(region)
nmaster = nm.unique(master_nod_list)
# Treat fields not covering the whole domain.
if nmaster[0] == -1:
nmaster = nmaster[1:]
nslave = nm.unique(slave_nod_list)
# Treat fields not covering the whole domain.
if nslave[0] == -1:
nslave = nslave[1:]
## print nmaster + 1
## print nslave + 1
if nmaster.shape != nslave.shape:
msg = 'EPBC list lengths do not match!\n(%s,\n %s)' %\
(nmaster, nslave)
raise ValueError(msg)
if (nmaster.shape[0] == 0) and (nslave.shape[0] == 0):
continue
mcoor = field.get_coor(nmaster)
scoor = field.get_coor(nslave)
fun = get_condition_value(bc.match, functions, 'EPBC', bc.name)
if isinstance(fun, Function):
i1, i2 = fun(mcoor, scoor)
else:
i1, i2 = fun
## print nm.c_[mcoor[i1], scoor[i2]]
## print nm.c_[nmaster[i1], nslave[i2]] + 1
meq = expand_nodes_to_equations(nmaster[i1], bc.dofs[0],
self.dof_names)
seq = expand_nodes_to_equations(nslave[i2], bc.dofs[1],
self.dof_names)
m_assigned = nm.where(master_slave[meq] != 0)[0]
s_assigned = nm.where(master_slave[seq] != 0)[0]
if m_assigned.size or s_assigned.size: # Chain EPBC.
aux = master_slave[meq[m_assigned]]
sgn = nm.sign(aux)
om_chain = zip(meq[m_assigned], (aux - sgn) * sgn)
chains.extend(om_chain)
aux = master_slave[seq[s_assigned]]
sgn = nm.sign(aux)
os_chain = zip(seq[s_assigned], (aux - sgn) * sgn)
chains.extend(os_chain)
m_chain = zip(meq[m_assigned], seq[m_assigned])
chains.extend(m_chain)
msd = nm.setdiff1d(s_assigned, m_assigned)
s_chain = zip(meq[msd], seq[msd])
chains.extend(s_chain)
msa = nm.union1d(m_assigned, s_assigned)
ii = nm.setdiff1d(nm.arange(meq.size), msa)
master_slave[meq[ii]] = seq[ii] + 1
master_slave[seq[ii]] = - meq[ii] - 1
else:
master_slave[meq] = seq + 1
master_slave[seq] = - meq - 1
chains = group_chains(chains)
resolve_chains(master_slave, chains)
self.master = nm.nonzero(master_slave > 0)[0]
self.slave = master_slave[self.master] - 1
# Propagate EBCs via PBCs.
mask = eq_ebc[self.master] > 0
im0 = self.master[mask]
im1 = self.slave[mask]
mask = eq_ebc[self.slave] > 0
is0 = self.slave[mask]
is1 = self.master[mask]
val_ebc[im1] = val_ebc[im0]
eq_ebc[im1] = eq_ebc[im0]
val_ebc[is1] = val_ebc[is0]
eq_ebc[is1] = eq_ebc[is0]
self.eq_ebc = nm.nonzero(eq_ebc > 0)[0]
self.val_ebc = val_ebc[self.eq_ebc]
assert_((self.eq_ebc.shape == self.val_ebc.shape))
self.eq[self.eq_ebc] = -2
self.eq[self.master] = -1
self._mark_unused(field)
self.eqi = self.eq[self.eq >= 0]
self.eq[self.eqi] = nm.arange(self.eqi.shape[0], dtype=nm.int32)
self.eq[self.master] = self.eq[self.slave]
self.n_eq = self.eqi.shape[0]
self.n_ebc = self.eq_ebc.shape[0]
self.n_epbc = self.master.shape[0]
return active_bcs
def get_operator(self):
"""
Get the matrix operator :math:`R` corresponding to the equation
mapping, such that the restricted matrix :math:`A_r` can be
obtained from the full matrix :math:`A` by :math:`A_r = R^T A
R`. All the matrices are w.r.t. a single variables that uses
this mapping.
Returns
-------
mtx : coo_matrix
The matrix :math:`R`.
"""
# EBC.
rows = self.eqi
cols = nm.arange(self.n_eq, dtype=nm.int32)
# EPBC.
ic = self.eq[self.slave]
ii = ic >= 0
rows = nm.r_[rows, self.master[ii]]
cols = nm.r_[cols, ic[ii]]
ones = nm.ones(rows.shape[0], dtype=nm.float64)
mtx = sp.coo_matrix((ones, (rows, cols)),
shape=(self.eq.shape[0], self.n_eq))
return mtx
|
[
"sfepy.base.base.Struct",
"sfepy.discrete.conditions.get_condition_value",
"sfepy.base.base.Struct.__init__",
"sfepy.base.base.assert_"
] |
[((570, 601), 'numpy.repeat', 'nm.repeat', (['nods', 'n_dof_per_node'], {}), '(nods, n_dof_per_node)\n', (579, 601), True, 'import numpy as nm\n'), ((663, 704), 'numpy.arange', 'nm.arange', (['n_dof_per_node'], {'dtype': 'nm.int32'}), '(n_dof_per_node, dtype=nm.int32)\n', (672, 704), True, 'import numpy as nm\n'), ((2711, 2743), 'sfepy.base.base.Struct.__init__', 'Struct.__init__', (['self'], {'name': 'name'}), '(self, name=name)\n', (2726, 2743), False, 'from sfepy.base.base import assert_, Struct, basestr\n'), ((5124, 5271), 'sfepy.base.base.Struct', 'Struct', ([], {'name': "('%s_dof_info' % var_name)", 'var_name': 'var_name', 'n_dof': 'self.n_dof[var_name]', 'indx': 'self.indx[var_name]', 'details': 'self.details[var_name]'}), "(name='%s_dof_info' % var_name, var_name=var_name, n_dof=self.n_dof[\n var_name], indx=self.indx[var_name], details=self.details[var_name])\n", (5130, 5271), False, 'from sfepy.base.base import assert_, Struct, basestr\n'), ((7147, 7215), 'sfepy.base.base.Struct.__init__', 'Struct.__init__', (['self'], {'name': 'name', 'dof_names': 'dof_names', 'var_di': 'var_di'}), '(self, name=name, dof_names=dof_names, var_di=var_di)\n', (7162, 7215), False, 'from sfepy.base.base import assert_, Struct, basestr\n'), ((7274, 7313), 'numpy.arange', 'nm.arange', (['var_di.n_dof'], {'dtype': 'nm.int32'}), '(var_di.n_dof, dtype=nm.int32)\n', (7283, 7313), True, 'import numpy as nm\n'), ((7570, 7603), 'numpy.empty', 'nm.empty', (['(0,)'], {'dtype': 'field.dtype'}), '((0,), dtype=field.dtype)\n', (7578, 7603), True, 'import numpy as nm\n'), ((7928, 7958), 'numpy.empty', 'nm.empty', (['(0,)'], {'dtype': 'nm.int32'}), '((0,), dtype=nm.int32)\n', (7936, 7958), True, 'import numpy as nm\n'), ((7982, 8012), 'numpy.empty', 'nm.empty', (['(0,)'], {'dtype': 'nm.int32'}), '((0,), dtype=nm.int32)\n', (7990, 8012), True, 'import numpy as nm\n'), ((8034, 8064), 'numpy.empty', 'nm.empty', (['(0,)'], {'dtype': 'nm.int32'}), '((0,), dtype=nm.int32)\n', (8042, 8064), True, 'import numpy as nm\n'), ((9733, 9779), 'numpy.zeros', 'nm.zeros', (['(self.var_di.n_dof,)'], {'dtype': 'nm.int32'}), '((self.var_di.n_dof,), dtype=nm.int32)\n', (9741, 9779), True, 'import numpy as nm\n'), ((9798, 9847), 'numpy.zeros', 'nm.zeros', (['(self.var_di.n_dof,)'], {'dtype': 'field.dtype'}), '((self.var_di.n_dof,), dtype=field.dtype)\n', (9806, 9847), True, 'import numpy as nm\n'), ((9871, 9917), 'numpy.zeros', 'nm.zeros', (['(self.var_di.n_dof,)'], {'dtype': 'nm.int32'}), '((self.var_di.n_dof,), dtype=nm.int32)\n', (9879, 9917), True, 'import numpy as nm\n'), ((16298, 16346), 'sfepy.base.base.assert_', 'assert_', (['(self.eq_ebc.shape == self.val_ebc.shape)'], {}), '(self.eq_ebc.shape == self.val_ebc.shape)\n', (16305, 16346), False, 'from sfepy.base.base import assert_, Struct, basestr\n'), ((16522, 16566), 'numpy.arange', 'nm.arange', (['self.eqi.shape[0]'], {'dtype': 'nm.int32'}), '(self.eqi.shape[0], dtype=nm.int32)\n', (16531, 16566), True, 'import numpy as nm\n'), ((17268, 17304), 'numpy.arange', 'nm.arange', (['self.n_eq'], {'dtype': 'nm.int32'}), '(self.n_eq, dtype=nm.int32)\n', (17277, 17304), True, 'import numpy as nm\n'), ((17471, 17511), 'numpy.ones', 'nm.ones', (['rows.shape[0]'], {'dtype': 'nm.float64'}), '(rows.shape[0], dtype=nm.float64)\n', (17478, 17511), True, 'import numpy as nm\n'), ((17526, 17598), 'scipy.sparse.coo_matrix', 'sp.coo_matrix', (['(ones, (rows, cols))'], {'shape': '(self.eq.shape[0], self.n_eq)'}), '((ones, (rows, cols)), shape=(self.eq.shape[0], self.n_eq))\n', (17539, 17598), True, 'import scipy.sparse as sp\n'), ((7673, 7717), 'numpy.arange', 'nm.arange', (['self.var_di.n_dof'], {'dtype': 'nm.int32'}), '(self.var_di.n_dof, dtype=nm.int32)\n', (7682, 7717), True, 'import numpy as nm\n'), ((7793, 7827), 'numpy.compress', 'nm.compress', (['(self.eq >= 0)', 'self.eq'], {}), '(self.eq >= 0, self.eq)\n', (7804, 7827), True, 'import numpy as nm\n'), ((7860, 7904), 'numpy.arange', 'nm.arange', (['self.eqi.shape[0]'], {'dtype': 'nm.int32'}), '(self.eqi.shape[0], dtype=nm.int32)\n', (7869, 7904), True, 'import numpy as nm\n'), ((15735, 15763), 'numpy.nonzero', 'nm.nonzero', (['(master_slave > 0)'], {}), '(master_slave > 0)\n', (15745, 15763), True, 'import numpy as nm\n'), ((16220, 16242), 'numpy.nonzero', 'nm.nonzero', (['(eq_ebc > 0)'], {}), '(eq_ebc > 0)\n', (16230, 16242), True, 'import numpy as nm\n'), ((11182, 11233), 'sfepy.discrete.conditions.get_condition_value', 'get_condition_value', (['val', 'functions', '"""EBC"""', 'bc.name'], {}), "(val, functions, 'EBC', bc.name)\n", (11201, 11233), False, 'from sfepy.discrete.conditions import get_condition_value, EssentialBC, PeriodicBC, DGPeriodicBC, DGEssentialBC\n'), ((11712, 11724), 'numpy.ravel', 'nm.ravel', (['vv'], {}), '(vv)\n', (11720, 11724), True, 'import numpy as nm\n'), ((11877, 11928), 'sfepy.discrete.conditions.get_condition_value', 'get_condition_value', (['val', 'functions', '"""EBC"""', 'bc.name'], {}), "(val, functions, 'EBC', bc.name)\n", (11896, 11928), False, 'from sfepy.discrete.conditions import get_condition_value, EssentialBC, PeriodicBC, DGPeriodicBC, DGEssentialBC\n'), ((12915, 12941), 'numpy.unique', 'nm.unique', (['master_nod_list'], {}), '(master_nod_list)\n', (12924, 12941), True, 'import numpy as nm\n'), ((13109, 13134), 'numpy.unique', 'nm.unique', (['slave_nod_list'], {}), '(slave_nod_list)\n', (13118, 13134), True, 'import numpy as nm\n'), ((13778, 13835), 'sfepy.discrete.conditions.get_condition_value', 'get_condition_value', (['bc.match', 'functions', '"""EPBC"""', 'bc.name'], {}), "(bc.match, functions, 'EPBC', bc.name)\n", (13797, 13835), False, 'from sfepy.discrete.conditions import get_condition_value, EssentialBC, PeriodicBC, DGPeriodicBC, DGEssentialBC\n'), ((14403, 14435), 'numpy.where', 'nm.where', (['(master_slave[meq] != 0)'], {}), '(master_slave[meq] != 0)\n', (14411, 14435), True, 'import numpy as nm\n'), ((14468, 14500), 'numpy.where', 'nm.where', (['(master_slave[seq] != 0)'], {}), '(master_slave[seq] != 0)\n', (14476, 14500), True, 'import numpy as nm\n'), ((14655, 14667), 'numpy.sign', 'nm.sign', (['aux'], {}), '(aux)\n', (14662, 14667), True, 'import numpy as nm\n'), ((14866, 14878), 'numpy.sign', 'nm.sign', (['aux'], {}), '(aux)\n', (14873, 14878), True, 'import numpy as nm\n'), ((15133, 15169), 'numpy.setdiff1d', 'nm.setdiff1d', (['s_assigned', 'm_assigned'], {}), '(s_assigned, m_assigned)\n', (15145, 15169), True, 'import numpy as nm\n'), ((15294, 15328), 'numpy.union1d', 'nm.union1d', (['m_assigned', 's_assigned'], {}), '(m_assigned, s_assigned)\n', (15304, 15328), True, 'import numpy as nm\n'), ((15367, 15386), 'numpy.arange', 'nm.arange', (['meq.size'], {}), '(meq.size)\n', (15376, 15386), True, 'import numpy as nm\n')]
|
import typing as t
from sqlalchemy.orm import RelationshipProperty, selectinload
from sqlmodel import select
def get_schema_context(info):
schema_context = info.context["auto_schema"]
return schema_context
def get_model_for_type(info, type_):
schema_context = get_schema_context(info)
model = schema_context["type_to_model"][type_]
return model
def get_strawberry_fields_for_type(info, type_):
schema_context = get_schema_context(info)
strawberry_fields = schema_context["type_to_type_definition"][type_].fields
return strawberry_fields
def get_mapper_for_column(info, column):
return column.property.mapper
def get_type_for_column(info, column):
schema_context = get_schema_context(info)
return schema_context["mapper_to_type"][get_mapper_for_column(info, column)]
def get_graphql_python_name_map_for_type(info, type_):
"""Create a mapping from graphql field names to python attribute names"""
strawberry_fields = get_strawberry_fields_for_type(info, type_)
name_map = {
f.get_graphql_name(info.schema.config.auto_camel_case): f.python_name
for f in strawberry_fields
}
return name_map
def get_selected_field_columns(info, type_, selected_fields, model=None):
model = get_model_for_type(info, type_) if model is None else model
name_map = get_graphql_python_name_map_for_type(info, type_)
selected_field_columns = [
(s, getattr(model, name_map[s.name])) for s in selected_fields
]
return selected_field_columns
def get_selected_scalar_non_scalar_field_columns(
info, type_, selected_fields, model=None
):
selected_field_columns = get_selected_field_columns(
info, type_, selected_fields, model
)
scalar_field_columns = [
fc
for fc in selected_field_columns
if not isinstance(fc[1].property, RelationshipProperty)
]
non_scalar_field_columns = [
c
for c in selected_field_columns
if isinstance(c[1].property, RelationshipProperty)
]
return scalar_field_columns, non_scalar_field_columns
def do_nested_select(info, type_, query, selected_field, column, parent_model):
selected_fields = [s for s in selected_field.selections]
model = get_model_for_type(info, type_)
(
scalar_field_columns,
non_scalar_field_columns,
) = get_selected_scalar_non_scalar_field_columns(
info, type_, selected_fields, model
)
# TODO: selectinload is good for one to many relationships because it does
# not create cartesian product issues.
# https://docs.sqlalchemy.org/en/14/orm/loading_relationships.html#what-kind-of-loading-to-use
# however we probably want joined loading for many to one relationships
# and we can set innerjoin to true if the relationship is nonnullable
# https://docs.sqlalchemy.org/en/14/orm/loading_relationships.html#joined-eager-loading
# one issue with selectinload is it will not work with nested relationships
# that have compositie primary keys. this shows up on sql server
# https://docs.sqlalchemy.org/en/14/orm/loading_relationships.html#select-in-loading
subquery = selectinload(column)
query = query.options(subquery)
# TODO: this nested select is untested and probably doesn't work we want to
# use chained loading to specify futher levels
# https://docs.sqlalchemy.org/en/14/orm/loading_relationships.html#relationship-loading-with-loader-options
if non_scalar_field_columns:
for field_column in non_scalar_field_columns:
field, column = field_column
column_type = get_type_for_column(info, column)
do_nested_select(info, column_type, subquery, field, column, model)
return query
def eq_filter(column, value):
return column == value
def neq_filter(column, value):
return column != value
def lt_filter(column, value):
return column < value
def lte_filter(column, value):
return column <= value
def gt_filter(column, value):
return column > value
def gte_filter(column, value):
return column >= value
def contains_filter(column, value):
return column.contains(value)
# TODO: write more filters
filter_map = {
"eq": eq_filter,
"neq": neq_filter,
"lt": lt_filter,
"lte": lte_filter,
"gt": gt_filter,
"gte": gte_filter,
"contains": contains_filter,
}
def do_filter(info, type_, query, column, filter):
from api.strawberry_sqlalchemy.schema_generation import (
NonScalarComparison,
ScalarComparison,
)
scalar = isinstance(filter, ScalarComparison)
non_scalar = isinstance(filter, NonScalarComparison)
if scalar:
for filter_key in filter.__dict__.keys():
value = getattr(filter, filter_key)
if value is not None:
filter_func = filter_map[filter_key]
query = query.where(filter_func(column, value))
elif non_scalar:
# TODO: implement non scalar filter processing
raise NotImplementedError("Non scalar filters are not yet implemented.")
return query
def do_where(info, type_, query, where_clause):
from api.strawberry_sqlalchemy.schema_generation import (
NonScalarComparison,
ScalarComparison,
)
if where_clause is None:
return query
isinstance(where_clause, ScalarComparison)
non_scalar = isinstance(where_clause, NonScalarComparison)
model = get_model_for_type(info, type_)
name_map = get_graphql_python_name_map_for_type(info, type_)
if non_scalar:
for name in where_clause.__dict__.keys():
filter_ = getattr(where_clause, name)
if filter_ is not None:
column = (
None if name not in name_map else getattr(model, name_map[name])
)
query = do_filter(info, type_, query, column, filter_)
return query
def create_all_type_resolver(type_: type):
"""create a resolver for all instances of a type. Supports various filters"""
from api.strawberry_sqlalchemy.schema_generation import (
create_non_scalar_comparison_expression,
create_non_scalar_order_by_expression,
create_non_scalar_select_columns_enum,
)
def all_type_resolver(
self,
info,
where: t.Optional[create_non_scalar_comparison_expression(type_)] = None,
limit: t.Optional[int] = None,
offset: t.Optional[int] = None,
orderBy: t.Optional[create_non_scalar_order_by_expression(type_)] = None,
distinctOn: t.Optional[
t.List[create_non_scalar_select_columns_enum(type_)]
] = None,
) -> t.List[type_]:
# handle the case where we are querying a many attribute
# in a one to many relationship
# the many attribute uses an all_type_query resolver so that the user
# can supply filters. but strawberry.field.get_result tries to
# load the nested attribute using the resolver.
# because we are using eager loading we actually just want to access
# the attribute on the parent using get_attr(model, python_name)
# we don't want to generate a nested query
# TODO: to check that we are not at the root we check that the prev
# path is not None. Not sure if this is always true!
if info.path.prev is not None:
return getattr(self, info.python_name)
model = get_model_for_type(info, type_)
db = info.context["db"]
selected_fields = [s for s in info.selected_fields[0].selections]
(
scalar_field_columns,
non_scalar_field_columns,
) = get_selected_scalar_non_scalar_field_columns(info, type_, selected_fields)
query = select(model)
query = do_where(info, type_, query, where)
if non_scalar_field_columns:
for field_column in non_scalar_field_columns:
field, column = field_column
column_type = get_type_for_column(info, column)
query = do_nested_select(info, column_type, query, field, column, model)
rows = db.exec(query).all()
return rows
return all_type_resolver
|
[
"sqlmodel.select"
] |
[((3185, 3205), 'sqlalchemy.orm.selectinload', 'selectinload', (['column'], {}), '(column)\n', (3197, 3205), False, 'from sqlalchemy.orm import RelationshipProperty, selectinload\n'), ((7813, 7826), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (7819, 7826), False, 'from sqlmodel import select\n'), ((6373, 6419), 'api.strawberry_sqlalchemy.schema_generation.create_non_scalar_comparison_expression', 'create_non_scalar_comparison_expression', (['type_'], {}), '(type_)\n', (6412, 6419), False, 'from api.strawberry_sqlalchemy.schema_generation import create_non_scalar_comparison_expression, create_non_scalar_order_by_expression, create_non_scalar_select_columns_enum\n'), ((6536, 6580), 'api.strawberry_sqlalchemy.schema_generation.create_non_scalar_order_by_expression', 'create_non_scalar_order_by_expression', (['type_'], {}), '(type_)\n', (6573, 6580), False, 'from api.strawberry_sqlalchemy.schema_generation import create_non_scalar_comparison_expression, create_non_scalar_order_by_expression, create_non_scalar_select_columns_enum\n'), ((6641, 6685), 'api.strawberry_sqlalchemy.schema_generation.create_non_scalar_select_columns_enum', 'create_non_scalar_select_columns_enum', (['type_'], {}), '(type_)\n', (6678, 6685), False, 'from api.strawberry_sqlalchemy.schema_generation import create_non_scalar_comparison_expression, create_non_scalar_order_by_expression, create_non_scalar_select_columns_enum\n')]
|
"""
Functions for building queries, from nodes or SQL.
"""
import ast
import datetime
import operator
import re
from typing import Any, Callable, Dict, List, Literal, Optional, Set, Tuple, cast
from dateutil.parser import parse
from sqlalchemy.engine.url import make_url
from sqlalchemy.schema import Column as SqlaColumn
from sqlalchemy.sql.elements import BinaryExpression
from sqlalchemy.sql.expression import ClauseElement
from sqlmodel import Session, select
from sqloxide import parse_sql
from datajunction.constants import DEFAULT_DIMENSION_COLUMN
from datajunction.errors import DJError, DJInvalidInputException, ErrorCode
from datajunction.models.node import Node, NodeType
from datajunction.models.query import QueryCreate
from datajunction.sql.dag import (
get_database_for_nodes,
get_dimensions,
get_referenced_columns_from_sql,
get_referenced_columns_from_tree,
)
from datajunction.sql.parse import (
find_nodes_by_key,
find_nodes_by_key_with_parent,
get_expression_from_projection,
)
from datajunction.sql.transpile import get_query, get_select_for_node
from datajunction.typing import (
Expression,
Identifier,
Join,
Projection,
Relation,
Select,
)
from datajunction.utils import get_session
FILTER_RE = re.compile(r"([\w\./_]+)(<=|<|>=|>|!=|=)(.+)")
FilterOperator = Literal[">", ">=", "<", "<=", "=", "!="]
COMPARISONS: Dict[FilterOperator, Callable[[Any, Any], bool]] = {
">": operator.gt,
">=": operator.ge,
"<": operator.lt,
"<=": operator.le,
"=": operator.eq,
"!=": operator.ne,
}
def parse_filter(filter_: str) -> Tuple[str, FilterOperator, str]:
"""
Parse a filter into name, op, value.
"""
match = FILTER_RE.match(filter_)
if not match:
raise DJInvalidInputException(
message=f'The filter "{filter_}" is invalid',
errors=[
DJError(
code=ErrorCode.INVALID_FILTER_PATTERN,
message=(
f'The filter "{filter_}" is not a valid filter. Filters should '
"consist of a dimension name, follow by a valid operator "
"(<=|<|>=|>|!=|=), followed by a value. If the value is a "
"string or date/time it should be enclosed in single quotes."
),
debug={"context": {"filter": filter_}},
),
],
)
name, operator_, value = match.groups()
operator_ = cast(FilterOperator, operator_)
return name, operator_, value
def get_filter(columns: Dict[str, SqlaColumn], filter_: str) -> BinaryExpression:
"""
Build a SQLAlchemy filter.
"""
name, operator_, value = parse_filter(filter_)
if name not in columns:
raise Exception(f"Invalid column name: {name}")
column = columns[name]
comparison = COMPARISONS[operator_]
if column.type.python_type in [datetime.date, datetime.datetime]:
try:
value = str(parse(value))
except Exception as ex:
raise Exception(f"Invalid date or datetime value: {value}") from ex
else:
try:
value = ast.literal_eval(value)
except Exception as ex:
raise Exception(f"Invalid value: {value}") from ex
return comparison(column, value)
def get_dimensions_from_filters(filters: List[str]) -> Set[str]:
"""
Extract dimensions from filters passed to the metric API.
"""
return {parse_filter(filter_)[0] for filter_ in filters}
def get_query_for_node( # pylint: disable=too-many-locals
session: Session,
node: Node,
groupbys: List[str],
filters: List[str],
database_id: Optional[int] = None,
) -> QueryCreate:
"""
Return a DJ QueryCreate object from a given node.
"""
# check that groupbys and filters are valid dimensions
requested_dimensions = set(groupbys) | get_dimensions_from_filters(filters)
valid_dimensions = set(get_dimensions(node))
if not requested_dimensions <= valid_dimensions:
invalid = sorted(requested_dimensions - valid_dimensions)
plural = "s" if len(invalid) > 1 else ""
raise Exception(f"Invalid dimension{plural}: {', '.join(invalid)}")
# which columns are needed from the parents; this is used to determine the database
# where the query will run
referenced_columns = get_referenced_columns_from_sql(node.expression, node.parents)
# extract all referenced dimensions so we can join the node with them
dimensions: Dict[str, Node] = {}
for dimension in requested_dimensions:
name, column = dimension.rsplit(".", 1)
if (
name not in {parent.name for parent in node.parents}
and name not in dimensions
):
dimensions[name] = session.exec(select(Node).where(Node.name == name)).one()
referenced_columns[name].add(column)
# find database
nodes = [node]
nodes.extend(dimensions.values())
database = get_database_for_nodes(session, nodes, referenced_columns, database_id)
# base query
node_select = get_select_for_node(node, database)
source = node_select.froms[0]
# join with dimensions
for dimension in dimensions.values():
subquery = get_select_for_node(
dimension,
database,
referenced_columns[dimension.name],
).alias(dimension.name)
condition = find_on_clause(node, source, dimension, subquery)
node_select = node_select.select_from(source.join(subquery, condition))
columns = {
f"{column.table.name}.{column.name}": column
for from_ in node_select.froms
for column in from_.columns
}
# filter
node_select = node_select.filter(
*[get_filter(columns, filter_) for filter_ in filters]
)
# groupby
node_select = node_select.group_by(*[columns[groupby] for groupby in groupbys])
# add groupbys to projection as well
for groupby in groupbys:
node_select.append_column(columns[groupby])
dialect = make_url(database.URI).get_dialect()
sql = str(
node_select.compile(dialect=dialect(), compile_kwargs={"literal_binds": True}),
)
return QueryCreate(database_id=database.id, submitted_query=sql)
def find_on_clause(
node: Node,
node_select: Select,
dimension: Node,
subquery: Select,
) -> ClauseElement:
"""
Return the on clause for a node/dimension selects.
"""
for parent in node.parents:
for column in parent.columns:
if column.dimension == dimension:
dimension_column = column.dimension_column or DEFAULT_DIMENSION_COLUMN
return (
node_select.columns[column.name] # type: ignore
== subquery.columns[dimension_column] # type: ignore
)
raise Exception(f"Node {node.name} has no columns with dimension {dimension.name}")
# pylint: disable=too-many-branches, too-many-locals, too-many-statements
def get_query_for_sql(sql: str) -> QueryCreate:
"""
Return a query given a SQL expression querying the repo.
Eg:
SELECT
"core.users.gender", "core.num_comments"
FROM metrics
WHERE "core.comments.user_id" > 1
GROUP BY
"core.users.gender"
This works by converting metrics (``core.num_comments``) into their selection
definition (``COUNT(*)``), updating the sources to include the metrics parents
(including joining with dimensions), and updating column references in the
``WHERE``, ``GROUP BY``, etc.
"""
session = next(get_session())
tree = parse_sql(sql, dialect="ansi")
query_select = tree[0]["Query"]["body"]["Select"]
# fetch all metric and dimension nodes
nodes = {node.name: node for node in session.exec(select(Node))}
# extract metrics and dimensions from the query
identifiers = {
identifier["value"]
for identifier in find_nodes_by_key(query_select, "Identifier")
}
for compound_identifier in find_nodes_by_key(query_select, "CompoundIdentifier"):
identifiers.add(".".join(part["value"] for part in compound_identifier))
requested_metrics: Set[Node] = set()
requested_dimensions: Set[Node] = set()
for identifier in identifiers:
if identifier in nodes and nodes[identifier].type == NodeType.METRIC:
requested_metrics.add(nodes[identifier])
continue
if "." not in identifier:
raise Exception(f"Invalid dimension: {identifier}")
name, column = identifier.rsplit(".", 1)
if name not in nodes:
raise Exception(f"Invalid dimension: {identifier}")
node = nodes[name]
if node.type != NodeType.DIMENSION:
continue
column_names = {column.name for column in node.columns}
if column not in column_names:
raise Exception(f"Invalid dimension: {identifier}")
requested_dimensions.add(node)
# update ``FROM``/``JOIN`` based on requests metrics and dimensions
parents = process_metrics(query_select, requested_metrics, requested_dimensions)
# update metric references in the projection
projection = query_select["projection"]
metric_names = {metric.name for metric in requested_metrics}
for expression, parent in list(
find_nodes_by_key_with_parent(projection, "UnnamedExpr"),
):
replace_metric_identifier(expression, parent, nodes, metric_names)
for expression_with_alias, parent in list(
find_nodes_by_key_with_parent(projection, "ExprWithAlias"),
):
alias = expression_with_alias["alias"]
expression = expression_with_alias["expr"]
replace_metric_identifier(expression, parent, nodes, metric_names, alias)
# update metric references in ``ORDER BY`` and ``HAVING``
for part in (tree[0]["Query"]["order_by"], query_select["having"]):
for identifier, parent in list(
find_nodes_by_key_with_parent(part, "Identifier"),
):
name = identifier["value"]
if name not in nodes:
if "." in name and name.rsplit(".", 1)[0] in nodes:
# not a metric, but a column reference
continue
raise Exception(f"Invalid identifier: {name}")
node = nodes[name]
metric_tree = parse_sql(node.expression, dialect="ansi")
parent.pop("Identifier")
parent.update(
get_expression_from_projection(
metric_tree[0]["Query"]["body"]["Select"]["projection"][0],
),
)
# replace dimension references
parts = [
query_select[part]
for part in ("projection", "selection", "group_by", "sort_by")
]
parts.append(tree[0]["Query"]["order_by"])
for part in parts:
for identifier, parent in list(
find_nodes_by_key_with_parent(part, "Identifier"),
):
if identifier["value"] not in identifiers:
continue
name, column = identifier["value"].rsplit(".", 1)
parent.pop("Identifier")
parent["CompoundIdentifier"] = [
{"quote_style": '"', "value": name},
{"quote_style": '"', "value": column},
]
parents.extend(requested_dimensions)
referenced_columns = get_referenced_columns_from_tree(tree, parents)
database = get_database_for_nodes(session, parents, referenced_columns)
dialect = make_url(database.URI).get_dialect()
query = get_query(None, parents, tree, database, dialect.name)
sql = str(query.compile(dialect=dialect(), compile_kwargs={"literal_binds": True}))
return QueryCreate(database_id=database.id, submitted_query=sql)
def process_metrics(
query_select: Select,
requested_metrics: Set[Node],
requested_dimensions: Set[Node],
) -> List[Node]:
"""
Process metrics in the query, updating ``FROM`` and adding any joins.
Modifies ``query_select`` inplace and Returns the parents.
"""
if not requested_metrics:
if not requested_dimensions:
return []
if len(requested_dimensions) > 1:
raise Exception(
"Cannot query from multiple dimensions when no metric is specified",
)
dimension = list(requested_dimensions)[0]
query_select["from"] = [
{
"joins": [],
"relation": {
"Table": {
"alias": None,
"args": [],
"name": [{"quote_style": '"', "value": dimension.name}],
"with_hints": [],
},
},
},
]
return [dimension]
# check that there is a metric with the superset of parents from all metrics
main_metric = sorted(
requested_metrics,
key=lambda metric: (len(metric.parents), metric.name),
reverse=True,
)[0]
for metric in requested_metrics:
if not set(metric.parents) <= set(main_metric.parents):
raise Exception(
f"Metrics {metric.name} and {main_metric.name} have non-shared parents",
)
# replace the ``from`` part of the parse tree with the ``from`` from the metric that
# has all the necessary parents
metric_tree = parse_sql(main_metric.expression, dialect="ansi")
query_select["from"] = metric_tree[0]["Query"]["body"]["Select"]["from"]
# join to any dimensions
for dimension in requested_dimensions:
query_select["from"][0]["joins"].append(
get_dimension_join(main_metric, dimension),
)
return main_metric.parents
def replace_metric_identifier(
expression: Expression,
parent: Projection,
nodes: Dict[str, Node],
metric_names: Set[str],
alias: Optional[Identifier] = None,
) -> None:
"""
Replace any metric reference in ``expression`` with its SQL.
"""
if "CompoundIdentifier" in expression:
expression["Identifier"] = {
"quote_style": None,
"value": ".".join(
part["value"] for part in expression.pop("CompoundIdentifier")
),
}
elif "Identifier" not in expression:
return
name = expression["Identifier"]["value"]
if name not in metric_names:
return
# if this is an unnamed expression remove the key from the parent, since it will be
# replaced with an expression with alias
parent.pop("UnnamedExpr", None)
node = nodes[name]
metric_tree = parse_sql(node.expression, dialect="ansi")
parent["ExprWithAlias"] = {
"alias": alias or {"quote_style": '"', "value": node.name},
"expr": get_expression_from_projection(
metric_tree[0]["Query"]["body"]["Select"]["projection"][0],
),
}
def get_join_columns(node: Node, dimension: Node) -> Tuple[str, str, str]:
"""
Return the columns to perform a join between a node and a dimension.
"""
for parent in node.parents:
for column in parent.columns:
if column.dimension == dimension:
return (
parent.name,
column.name,
column.dimension_column or DEFAULT_DIMENSION_COLUMN,
)
raise Exception(f"Node {node.name} has no columns with dimension {dimension.name}")
def get_dimension_join(node: Node, dimension: Node) -> Join:
"""
Return the join between a node and a dimension.
"""
parent_name, node_column, dimension_column = get_join_columns(node, dimension)
relation: Relation = {
"Table": {
"alias": None,
"args": [],
"name": [{"quote_style": None, "value": dimension.name}],
"with_hints": [],
},
}
return {
"join_operator": {
"Inner": {
"On": {
"BinaryOp": {
"left": {
"CompoundIdentifier": [
{"quote_style": None, "value": parent_name},
{"quote_style": None, "value": node_column},
],
},
"op": "Eq",
"right": {
"CompoundIdentifier": [
{"quote_style": None, "value": dimension.name},
{"quote_style": None, "value": dimension_column},
],
},
},
},
"Using": [],
},
},
"relation": relation,
}
|
[
"sqlmodel.select"
] |
[((1275, 1322), 're.compile', 're.compile', (['"""([\\\\w\\\\./_]+)(<=|<|>=|>|!=|=)(.+)"""'], {}), "('([\\\\w\\\\./_]+)(<=|<|>=|>|!=|=)(.+)')\n", (1285, 1322), False, 'import re\n'), ((2526, 2557), 'typing.cast', 'cast', (['FilterOperator', 'operator_'], {}), '(FilterOperator, operator_)\n', (2530, 2557), False, 'from typing import Any, Callable, Dict, List, Literal, Optional, Set, Tuple, cast\n'), ((4418, 4480), 'datajunction.sql.dag.get_referenced_columns_from_sql', 'get_referenced_columns_from_sql', (['node.expression', 'node.parents'], {}), '(node.expression, node.parents)\n', (4449, 4480), False, 'from datajunction.sql.dag import get_database_for_nodes, get_dimensions, get_referenced_columns_from_sql, get_referenced_columns_from_tree\n'), ((5043, 5114), 'datajunction.sql.dag.get_database_for_nodes', 'get_database_for_nodes', (['session', 'nodes', 'referenced_columns', 'database_id'], {}), '(session, nodes, referenced_columns, database_id)\n', (5065, 5114), False, 'from datajunction.sql.dag import get_database_for_nodes, get_dimensions, get_referenced_columns_from_sql, get_referenced_columns_from_tree\n'), ((5151, 5186), 'datajunction.sql.transpile.get_select_for_node', 'get_select_for_node', (['node', 'database'], {}), '(node, database)\n', (5170, 5186), False, 'from datajunction.sql.transpile import get_query, get_select_for_node\n'), ((6273, 6330), 'datajunction.models.query.QueryCreate', 'QueryCreate', ([], {'database_id': 'database.id', 'submitted_query': 'sql'}), '(database_id=database.id, submitted_query=sql)\n', (6284, 6330), False, 'from datajunction.models.query import QueryCreate\n'), ((7722, 7752), 'sqloxide.parse_sql', 'parse_sql', (['sql'], {'dialect': '"""ansi"""'}), "(sql, dialect='ansi')\n", (7731, 7752), False, 'from sqloxide import parse_sql\n'), ((8130, 8183), 'datajunction.sql.parse.find_nodes_by_key', 'find_nodes_by_key', (['query_select', '"""CompoundIdentifier"""'], {}), "(query_select, 'CompoundIdentifier')\n", (8147, 8183), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((11505, 11552), 'datajunction.sql.dag.get_referenced_columns_from_tree', 'get_referenced_columns_from_tree', (['tree', 'parents'], {}), '(tree, parents)\n', (11537, 11552), False, 'from datajunction.sql.dag import get_database_for_nodes, get_dimensions, get_referenced_columns_from_sql, get_referenced_columns_from_tree\n'), ((11569, 11629), 'datajunction.sql.dag.get_database_for_nodes', 'get_database_for_nodes', (['session', 'parents', 'referenced_columns'], {}), '(session, parents, referenced_columns)\n', (11591, 11629), False, 'from datajunction.sql.dag import get_database_for_nodes, get_dimensions, get_referenced_columns_from_sql, get_referenced_columns_from_tree\n'), ((11693, 11747), 'datajunction.sql.transpile.get_query', 'get_query', (['None', 'parents', 'tree', 'database', 'dialect.name'], {}), '(None, parents, tree, database, dialect.name)\n', (11702, 11747), False, 'from datajunction.sql.transpile import get_query, get_select_for_node\n'), ((11848, 11905), 'datajunction.models.query.QueryCreate', 'QueryCreate', ([], {'database_id': 'database.id', 'submitted_query': 'sql'}), '(database_id=database.id, submitted_query=sql)\n', (11859, 11905), False, 'from datajunction.models.query import QueryCreate\n'), ((13543, 13592), 'sqloxide.parse_sql', 'parse_sql', (['main_metric.expression'], {'dialect': '"""ansi"""'}), "(main_metric.expression, dialect='ansi')\n", (13552, 13592), False, 'from sqloxide import parse_sql\n'), ((14773, 14815), 'sqloxide.parse_sql', 'parse_sql', (['node.expression'], {'dialect': '"""ansi"""'}), "(node.expression, dialect='ansi')\n", (14782, 14815), False, 'from sqloxide import parse_sql\n'), ((4007, 4027), 'datajunction.sql.dag.get_dimensions', 'get_dimensions', (['node'], {}), '(node)\n', (4021, 4027), False, 'from datajunction.sql.dag import get_database_for_nodes, get_dimensions, get_referenced_columns_from_sql, get_referenced_columns_from_tree\n'), ((7695, 7708), 'datajunction.utils.get_session', 'get_session', ([], {}), '()\n', (7706, 7708), False, 'from datajunction.utils import get_session\n'), ((9444, 9500), 'datajunction.sql.parse.find_nodes_by_key_with_parent', 'find_nodes_by_key_with_parent', (['projection', '"""UnnamedExpr"""'], {}), "(projection, 'UnnamedExpr')\n", (9473, 9500), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((9639, 9697), 'datajunction.sql.parse.find_nodes_by_key_with_parent', 'find_nodes_by_key_with_parent', (['projection', '"""ExprWithAlias"""'], {}), "(projection, 'ExprWithAlias')\n", (9668, 9697), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((14932, 15027), 'datajunction.sql.parse.get_expression_from_projection', 'get_expression_from_projection', (["metric_tree[0]['Query']['body']['Select']['projection'][0]"], {}), "(metric_tree[0]['Query']['body']['Select'][\n 'projection'][0])\n", (14962, 15027), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((3203, 3226), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (3219, 3226), False, 'import ast\n'), ((6115, 6137), 'sqlalchemy.engine.url.make_url', 'make_url', (['database.URI'], {}), '(database.URI)\n', (6123, 6137), False, 'from sqlalchemy.engine.url import make_url\n'), ((8047, 8092), 'datajunction.sql.parse.find_nodes_by_key', 'find_nodes_by_key', (['query_select', '"""Identifier"""'], {}), "(query_select, 'Identifier')\n", (8064, 8092), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((10073, 10122), 'datajunction.sql.parse.find_nodes_by_key_with_parent', 'find_nodes_by_key_with_parent', (['part', '"""Identifier"""'], {}), "(part, 'Identifier')\n", (10102, 10122), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((10485, 10527), 'sqloxide.parse_sql', 'parse_sql', (['node.expression'], {'dialect': '"""ansi"""'}), "(node.expression, dialect='ansi')\n", (10494, 10527), False, 'from sqloxide import parse_sql\n'), ((11029, 11078), 'datajunction.sql.parse.find_nodes_by_key_with_parent', 'find_nodes_by_key_with_parent', (['part', '"""Identifier"""'], {}), "(part, 'Identifier')\n", (11058, 11078), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((11644, 11666), 'sqlalchemy.engine.url.make_url', 'make_url', (['database.URI'], {}), '(database.URI)\n', (11652, 11666), False, 'from sqlalchemy.engine.url import make_url\n'), ((3034, 3046), 'dateutil.parser.parse', 'parse', (['value'], {}), '(value)\n', (3039, 3046), False, 'from dateutil.parser import parse\n'), ((5310, 5386), 'datajunction.sql.transpile.get_select_for_node', 'get_select_for_node', (['dimension', 'database', 'referenced_columns[dimension.name]'], {}), '(dimension, database, referenced_columns[dimension.name])\n', (5329, 5386), False, 'from datajunction.sql.transpile import get_query, get_select_for_node\n'), ((7905, 7917), 'sqlmodel.select', 'select', (['Node'], {}), '(Node)\n', (7911, 7917), False, 'from sqlmodel import Session, select\n'), ((10608, 10703), 'datajunction.sql.parse.get_expression_from_projection', 'get_expression_from_projection', (["metric_tree[0]['Query']['body']['Select']['projection'][0]"], {}), "(metric_tree[0]['Query']['body']['Select'][\n 'projection'][0])\n", (10638, 10703), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((1898, 2240), 'datajunction.errors.DJError', 'DJError', ([], {'code': 'ErrorCode.INVALID_FILTER_PATTERN', 'message': 'f"""The filter "{filter_}" is not a valid filter. Filters should consist of a dimension name, follow by a valid operator (<=|<|>=|>|!=|=), followed by a value. If the value is a string or date/time it should be enclosed in single quotes."""', 'debug': "{'context': {'filter': filter_}}"}), '(code=ErrorCode.INVALID_FILTER_PATTERN, message=\n f\'The filter "{filter_}" is not a valid filter. Filters should consist of a dimension name, follow by a valid operator (<=|<|>=|>|!=|=), followed by a value. If the value is a string or date/time it should be enclosed in single quotes.\'\n , debug={\'context\': {\'filter\': filter_}})\n', (1905, 2240), False, 'from datajunction.errors import DJError, DJInvalidInputException, ErrorCode\n'), ((4856, 4868), 'sqlmodel.select', 'select', (['Node'], {}), '(Node)\n', (4862, 4868), False, 'from sqlmodel import Session, select\n')]
|
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import io
from tempfile import mkstemp
import numpy as np
import pytest
import megengine.core.tensor.megbrain_graph as G
import megengine.functional as F
import megengine.optimizer as optim
import megengine.utils.comp_graph_tools as cgtools
from megengine import Parameter, tensor
from megengine.autodiff import GradManager
from megengine.core._trace_option import set_symbolic_shape
from megengine.core.ops import builtin as ops
from megengine.core.ops.builtin import Elemwise
from megengine.core.tensor.utils import isscalar
from megengine.functional import exp, log
from megengine.jit import exclude_from_trace, trace
from megengine.module import Module
from megengine.random import normal, uniform
def test_trace():
for symbolic in [False, True]:
@trace(symbolic=symbolic)
def f(x):
return -x
x = tensor([1])
y = f(x).numpy()
for i in range(3):
np.testing.assert_equal(f(x).numpy(), y)
def test_output_copy_trace():
class Simple(Module):
def __init__(self):
super().__init__()
self.a = Parameter([1.0], dtype=np.float32)
def forward(self, x):
x = x * self.a
# will result into a copy of output in grad
x = F.exp(x)
return x
net = Simple()
gm = GradManager().attach(net.parameters())
opt = optim.SGD(net.parameters(), 1e-3, momentum=0.9)
data = tensor(np.arange(4).reshape(2, 2), dtype="float32")
@trace(symbolic=False)
def train_f1(d):
with gm:
loss = net(d)
gm.backward(loss)
opt.step().clear_grad()
return loss
@trace(symbolic=True)
def train_f2(d):
with gm:
loss = net(d)
gm.backward(loss)
opt.step().clear_grad()
return loss
for i in range(2):
y1 = train_f1(data).numpy()
y2 = train_f2(data).numpy()
np.testing.assert_equal(y1, y2)
def test_exclude_from_trace():
for symbolic in [False, True]:
@trace(symbolic=symbolic)
def f(x):
x = -x
with exclude_from_trace():
if i % 2:
x = -x
x = -x
return x
x = tensor([1])
for i in range(3):
y = f(x).numpy()
np.testing.assert_equal(f(x).numpy(), y)
def test_print_in_trace():
for symbolic in [False]: # cannot read value in symbolic mode
@trace(symbolic=symbolic)
def f(x):
nonlocal buf
x = -x
buf = x.numpy()
x = -x
return x
buf = None
x = tensor([1])
for i in range(3):
y = f(x).numpy()
z = buf
buf = None
np.testing.assert_equal(f(x).numpy(), y)
np.testing.assert_equal(z, buf)
def test_dump():
@trace(symbolic=True, capture_as_const=True)
def f(a, b):
return a + b
a = tensor([2])
b = tensor([4])
y = f(a, b).numpy()
for i in range(3):
np.testing.assert_equal(f(a, b).numpy(), y)
file = io.BytesIO()
dump_info = f.dump(file)
assert dump_info.nr_opr == 3
np.testing.assert_equal(dump_info.inputs, ["arg_0", "arg_1"])
np.testing.assert_equal(dump_info.outputs, ["ADD(arg_0,arg_1)[4]"])
file.seek(0)
result = cgtools.load_and_inference(file, [a, b])
np.testing.assert_equal(result[0], y)
def test_capture_dump():
a = tensor([2])
@trace(symbolic=True, capture_as_const=True)
def f(x):
return x * a
x = tensor([3])
y = f(x).numpy()
for i in range(3):
np.testing.assert_equal(f(x).numpy(), y)
file = io.BytesIO()
f.dump(file)
file.seek(0)
result = cgtools.load_and_inference(file, [x])
np.testing.assert_equal(result[0], y)
def test_dump_volatile():
p = tensor([2])
@trace(symbolic=True, capture_as_const=True)
def f(x):
return x * p
x = tensor([3])
y = f(x).numpy()
for i in range(3):
np.testing.assert_equal(f(x).numpy(), y)
file = io.BytesIO()
f.dump(file, optimize_for_inference=False)
file.seek(0)
cg, _, outputs = G.load_graph(file)
(out,) = outputs
assert (
cgtools.get_owner_opr_type(cgtools.get_owner_opr_inputs(out)[1])
== "ImmutableTensor"
)
def test_trace_profiler():
for symbolic in [False, True]:
@trace(symbolic=symbolic, profiling=True)
def f(x):
return -x
x = tensor([1])
y = f(x).numpy()
f(x)
f(x) # XXX: has to run twice
out = f.get_profile()
assert out.get("profiler")
@pytest.mark.skip(reason="force opt_level=0 when building graph")
def test_goptions():
@trace(symbolic=True, opt_level=0, capture_as_const=True)
def f(x):
# directly return x / x will not trigger gopt
# since there's no way to tell the two x are the same
y = 2.0 * x
return y / y
@trace(symbolic=True, opt_level=1, capture_as_const=True)
def g(x):
y = 2.0 * x
return y / y
d = tensor(0.0)
assert not np.isfinite(f(d).numpy())
np.testing.assert_equal(g(d).numpy().item(), 1.0)
@pytest.mark.skip(reason="force opt_level=0 when building graph")
def test_goptions_log_sum_exp():
@trace(symbolic=True, opt_level=0, capture_as_const=True)
def f(x, y):
return log(exp(x) + exp(y))
@trace(symbolic=True, opt_level=1, capture_as_const=True)
def g(x, y):
return log(exp(x) + exp(y))
val = 1.0e4
d = tensor(val)
o = tensor(0.0)
assert not np.isfinite(f(d, o).numpy())
np.testing.assert_almost_equal(g(d, o), val)
@pytest.mark.skip(reason="could not use opt_level=0 with dump")
def test_goptions_log_exp():
@trace(symbolic=True, opt_level=0, capture_as_const=True)
def f(x):
return log(exp(x))
@trace(symbolic=True, opt_level=1, capture_as_const=True)
def g(x):
return log(exp(x))
f(tensor(1.0))
_, out = mkstemp()
f.dump(out, optimize_for_inference=False)
*_, outputs = G.load_graph(out)
oprs_1 = cgtools.get_oprs_seq(outputs)
g(tensor(1.0))
g.dump(out, optimize_for_inference=False)
*_, outputs = G.load_graph(out)
oprs_2 = cgtools.get_oprs_seq(outputs)
assert len(oprs_1) - len(oprs_2) == 2
def test_optimize_for_inference():
@trace(symbolic=True, capture_as_const=True)
def f(x):
return exp(x)
_, out = mkstemp()
f(tensor(5.0))
f.dump(out, enable_io16xc32=True)
res = G.load_graph(out)
computing_input = res.output_vars_list[0].owner.inputs[0]
assert computing_input.dtype == np.float16
def test_optimize_for_inference_broadcast():
a = tensor(np.ones(1, dtype=np.float32))
@trace(capture_as_const=True, symbolic_shape=True)
def f():
return a._broadcast(tensor([1, 10], dtype=np.int32))
f()
f.dump(io.BytesIO())
def test_trace_cvt_bool():
x = tensor([0], dtype=np.int32)
@trace(symbolic=True)
def f(x):
a = x.shape
b = a[0]
assert isscalar(b)
return b == 0
for i in range(3):
np.testing.assert_equal(f(x).numpy(), False)
def test_trace_reshape():
for symbolic in [False, True]:
x1 = tensor(np.random.randn(2, 10, 10))
x2 = tensor(np.random.randn(4, 10, 10))
x3 = tensor(np.random.randn(8, 10, 10))
@trace(symbolic=symbolic, capture_as_const=True)
def f(x):
y = x.reshape(x.shape[0], 100)
return y
f(x1)
f(x2)
f(x3)
def test_trace_topk():
x = tensor([5, 2, 7, 1, 0, 3, 2])
@trace(symbolic=True)
def f(x):
y = F.topk(x, 3)
np.testing.assert_equal(y[0].shape.numpy(), np.array([3,]))
return y
for i in range(3):
f(x)
def test_trace_warp_perspective():
inp_shape = (1, 1, 4, 4)
x = tensor(np.arange(16, dtype=np.float32).reshape(inp_shape))
M_shape = (1, 3, 3)
M = tensor(
np.array(
[[1.0, 0.0, 1.0], [0.0, 1.0, 1.0], [0.0, 0.0, 1.0]], dtype=np.float32
).reshape(M_shape)
)
@trace(symbolic=True)
def f(x, M):
out = F.warp_perspective(x, M, (2, 2))
np.testing.assert_equal(out.shape.numpy(), np.array([1, 1, 2, 2]))
return out
for i in range(1):
f(x, M)
def test_raise_on_trace():
step_count = 0
catch_count = 0
bad_step = 10
class CatchMe(Exception):
pass
a = tensor([1, 2, 3, 4])
b = tensor([5, 6, 7, 8])
c = tensor([9, 0, 1, 2])
@trace
def add_abc(a, b, c):
ps = a + b
result = ps + c
if step_count == bad_step:
raise CatchMe("catch me")
return result
for i in range(100):
try:
d = add_abc(a, b, c)
except CatchMe as e:
catch_count += 1
else:
np.testing.assert_equal(d.numpy(), (a + b + c).numpy())
step_count += 1
assert catch_count == 1
def test_trace_broadcast():
for symbolic in [False, True]:
x1 = tensor(np.random.randn(3, 1, 1))
x2 = tensor(np.random.randn(1, 4, 1))
x3 = tensor(np.random.randn(1, 1, 5))
@trace(symbolic=symbolic, capture_as_const=True)
def f(x):
y = F.broadcast_to(x, (3, 4, 5))
return y
f(x1)
f(x2)
f(x3)
def test_trace_nms():
def make_inputs(n):
boxes = np.zeros((n, 4))
boxes[:, :2] = np.random.rand(n, 2) * 100
boxes[:, 2:] = np.random.rand(n, 2) * 100 + 100
scores = np.random.rand(n)
return tensor(boxes), tensor(scores)
@trace(symbolic=False)
def f(boxes, scores):
# with tracing, max_output must be specified
results = F.nn.nms(boxes, scores=scores, iou_thresh=0.5, max_output=20)
# without tracing, max output can be inferred inside nms
with exclude_from_trace():
_ = F.nn.nms(boxes, scores=scores, iou_thresh=0.5)
return results
f(*make_inputs(10))
f(*make_inputs(20))
f(*make_inputs(30))
def test_trace_valid_broadcast():
x1 = tensor(np.random.randn(1, 1))
x2 = tensor(np.random.randn(1, 2))
shape = (tensor([2]), tensor([2]))
@trace(symbolic=False)
def f(x, shape):
y = F.broadcast_to(x, shape)
return y
f(x1, shape)
f(x2, shape)
def test_clip():
x = tensor(np.random.randn(10, 10))
@trace(symbolic=True)
def f(x, lower, upper):
y = F.clip(x, lower, upper)
return y
for i in range(3):
f(x, tensor([0]), tensor([1]))
# test returning noncontiguous tensor from trace
def test_slice():
@trace
def f(x):
return x[:, 1::2]
x = F.arange(8).reshape(2, 4)
f(x)
y = f(x)
np.testing.assert_array_equal(y.numpy(), x.numpy()[:, 1::2])
y + y
def test_random():
def run_test(op):
for symbolic_shape in [True, False]:
@trace(symbolic=True, symbolic_shape=symbolic_shape)
def f():
out = op(size=[10, 10])
out_shape = out.shape
assert out_shape is not None
if not isinstance(out_shape, tuple):
assert out.shape.numpy() is not None
return out
for _ in range(3):
f()
run_test(uniform)
run_test(normal)
|
[
"megengine.utils.comp_graph_tools.load_and_inference",
"megengine.functional.arange",
"megengine.autodiff.GradManager",
"megengine.jit.trace",
"megengine.functional.clip",
"megengine.core.tensor.utils.isscalar",
"megengine.utils.comp_graph_tools.get_owner_opr_inputs",
"megengine.tensor",
"megengine.functional.nn.nms",
"megengine.core.tensor.megbrain_graph.load_graph",
"megengine.functional.topk",
"megengine.functional.exp",
"megengine.functional.broadcast_to",
"megengine.jit.exclude_from_trace",
"megengine.Parameter",
"megengine.utils.comp_graph_tools.get_oprs_seq",
"megengine.functional.warp_perspective"
] |
[((5110, 5174), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""force opt_level=0 when building graph"""'}), "(reason='force opt_level=0 when building graph')\n", (5126, 5174), False, 'import pytest\n'), ((5666, 5730), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""force opt_level=0 when building graph"""'}), "(reason='force opt_level=0 when building graph')\n", (5682, 5730), False, 'import pytest\n'), ((6148, 6210), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""could not use opt_level=0 with dump"""'}), "(reason='could not use opt_level=0 with dump')\n", (6164, 6210), False, 'import pytest\n'), ((1871, 1892), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(False)'}), '(symbolic=False)\n', (1876, 1892), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((2049, 2069), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)'}), '(symbolic=True)\n', (2054, 2069), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((3305, 3348), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'capture_as_const': '(True)'}), '(symbolic=True, capture_as_const=True)\n', (3310, 3348), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((3396, 3407), 'megengine.tensor', 'tensor', (['[2]'], {}), '([2])\n', (3402, 3407), False, 'from megengine import Parameter, tensor\n'), ((3416, 3427), 'megengine.tensor', 'tensor', (['[4]'], {}), '([4])\n', (3422, 3427), False, 'from megengine import Parameter, tensor\n'), ((3540, 3552), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (3550, 3552), False, 'import io\n'), ((3619, 3680), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['dump_info.inputs', "['arg_0', 'arg_1']"], {}), "(dump_info.inputs, ['arg_0', 'arg_1'])\n", (3642, 3680), True, 'import numpy as np\n'), ((3685, 3752), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['dump_info.outputs', "['ADD(arg_0,arg_1)[4]']"], {}), "(dump_info.outputs, ['ADD(arg_0,arg_1)[4]'])\n", (3708, 3752), True, 'import numpy as np\n'), ((3783, 3823), 'megengine.utils.comp_graph_tools.load_and_inference', 'cgtools.load_and_inference', (['file', '[a, b]'], {}), '(file, [a, b])\n', (3809, 3823), True, 'import megengine.utils.comp_graph_tools as cgtools\n'), ((3828, 3865), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['result[0]', 'y'], {}), '(result[0], y)\n', (3851, 3865), True, 'import numpy as np\n'), ((3901, 3912), 'megengine.tensor', 'tensor', (['[2]'], {}), '([2])\n', (3907, 3912), False, 'from megengine import Parameter, tensor\n'), ((3919, 3962), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'capture_as_const': '(True)'}), '(symbolic=True, capture_as_const=True)\n', (3924, 3962), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((4007, 4018), 'megengine.tensor', 'tensor', (['[3]'], {}), '([3])\n', (4013, 4018), False, 'from megengine import Parameter, tensor\n'), ((4125, 4137), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (4135, 4137), False, 'import io\n'), ((4185, 4222), 'megengine.utils.comp_graph_tools.load_and_inference', 'cgtools.load_and_inference', (['file', '[x]'], {}), '(file, [x])\n', (4211, 4222), True, 'import megengine.utils.comp_graph_tools as cgtools\n'), ((4227, 4264), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['result[0]', 'y'], {}), '(result[0], y)\n', (4250, 4264), True, 'import numpy as np\n'), ((4301, 4312), 'megengine.tensor', 'tensor', (['[2]'], {}), '([2])\n', (4307, 4312), False, 'from megengine import Parameter, tensor\n'), ((4319, 4362), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'capture_as_const': '(True)'}), '(symbolic=True, capture_as_const=True)\n', (4324, 4362), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((4407, 4418), 'megengine.tensor', 'tensor', (['[3]'], {}), '([3])\n', (4413, 4418), False, 'from megengine import Parameter, tensor\n'), ((4525, 4537), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (4535, 4537), False, 'import io\n'), ((4623, 4641), 'megengine.core.tensor.megbrain_graph.load_graph', 'G.load_graph', (['file'], {}), '(file)\n', (4635, 4641), True, 'import megengine.core.tensor.megbrain_graph as G\n'), ((5201, 5257), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'opt_level': '(0)', 'capture_as_const': '(True)'}), '(symbolic=True, opt_level=0, capture_as_const=True)\n', (5206, 5257), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((5435, 5491), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'opt_level': '(1)', 'capture_as_const': '(True)'}), '(symbolic=True, opt_level=1, capture_as_const=True)\n', (5440, 5491), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((5556, 5567), 'megengine.tensor', 'tensor', (['(0.0)'], {}), '(0.0)\n', (5562, 5567), False, 'from megengine import Parameter, tensor\n'), ((5769, 5825), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'opt_level': '(0)', 'capture_as_const': '(True)'}), '(symbolic=True, opt_level=0, capture_as_const=True)\n', (5774, 5825), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((5885, 5941), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'opt_level': '(1)', 'capture_as_const': '(True)'}), '(symbolic=True, opt_level=1, capture_as_const=True)\n', (5890, 5941), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((6020, 6031), 'megengine.tensor', 'tensor', (['val'], {}), '(val)\n', (6026, 6031), False, 'from megengine import Parameter, tensor\n'), ((6040, 6051), 'megengine.tensor', 'tensor', (['(0.0)'], {}), '(0.0)\n', (6046, 6051), False, 'from megengine import Parameter, tensor\n'), ((6245, 6301), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'opt_level': '(0)', 'capture_as_const': '(True)'}), '(symbolic=True, opt_level=0, capture_as_const=True)\n', (6250, 6301), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((6349, 6405), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'opt_level': '(1)', 'capture_as_const': '(True)'}), '(symbolic=True, opt_level=1, capture_as_const=True)\n', (6354, 6405), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((6480, 6489), 'tempfile.mkstemp', 'mkstemp', ([], {}), '()\n', (6487, 6489), False, 'from tempfile import mkstemp\n'), ((6554, 6571), 'megengine.core.tensor.megbrain_graph.load_graph', 'G.load_graph', (['out'], {}), '(out)\n', (6566, 6571), True, 'import megengine.core.tensor.megbrain_graph as G\n'), ((6585, 6614), 'megengine.utils.comp_graph_tools.get_oprs_seq', 'cgtools.get_oprs_seq', (['outputs'], {}), '(outputs)\n', (6605, 6614), True, 'import megengine.utils.comp_graph_tools as cgtools\n'), ((6699, 6716), 'megengine.core.tensor.megbrain_graph.load_graph', 'G.load_graph', (['out'], {}), '(out)\n', (6711, 6716), True, 'import megengine.core.tensor.megbrain_graph as G\n'), ((6730, 6759), 'megengine.utils.comp_graph_tools.get_oprs_seq', 'cgtools.get_oprs_seq', (['outputs'], {}), '(outputs)\n', (6750, 6759), True, 'import megengine.utils.comp_graph_tools as cgtools\n'), ((6845, 6888), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'capture_as_const': '(True)'}), '(symbolic=True, capture_as_const=True)\n', (6850, 6888), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((6939, 6948), 'tempfile.mkstemp', 'mkstemp', ([], {}), '()\n', (6946, 6948), False, 'from tempfile import mkstemp\n'), ((7017, 7034), 'megengine.core.tensor.megbrain_graph.load_graph', 'G.load_graph', (['out'], {}), '(out)\n', (7029, 7034), True, 'import megengine.core.tensor.megbrain_graph as G\n'), ((7242, 7291), 'megengine.jit.trace', 'trace', ([], {'capture_as_const': '(True)', 'symbolic_shape': '(True)'}), '(capture_as_const=True, symbolic_shape=True)\n', (7247, 7291), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((7437, 7464), 'megengine.tensor', 'tensor', (['[0]'], {'dtype': 'np.int32'}), '([0], dtype=np.int32)\n', (7443, 7464), False, 'from megengine import Parameter, tensor\n'), ((7471, 7491), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)'}), '(symbolic=True)\n', (7476, 7491), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((8092, 8121), 'megengine.tensor', 'tensor', (['[5, 2, 7, 1, 0, 3, 2]'], {}), '([5, 2, 7, 1, 0, 3, 2])\n', (8098, 8121), False, 'from megengine import Parameter, tensor\n'), ((8128, 8148), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)'}), '(symbolic=True)\n', (8133, 8148), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((8622, 8642), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)'}), '(symbolic=True)\n', (8627, 8642), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((8980, 9000), 'megengine.tensor', 'tensor', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (8986, 9000), False, 'from megengine import Parameter, tensor\n'), ((9009, 9029), 'megengine.tensor', 'tensor', (['[5, 6, 7, 8]'], {}), '([5, 6, 7, 8])\n', (9015, 9029), False, 'from megengine import Parameter, tensor\n'), ((9038, 9058), 'megengine.tensor', 'tensor', (['[9, 0, 1, 2]'], {}), '([9, 0, 1, 2])\n', (9044, 9058), False, 'from megengine import Parameter, tensor\n'), ((10163, 10184), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(False)'}), '(symbolic=False)\n', (10168, 10184), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((10762, 10783), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(False)'}), '(symbolic=False)\n', (10767, 10783), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((10959, 10979), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)'}), '(symbolic=True)\n', (10964, 10979), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((1146, 1170), 'megengine.jit.trace', 'trace', ([], {'symbolic': 'symbolic'}), '(symbolic=symbolic)\n', (1151, 1170), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((1224, 1235), 'megengine.tensor', 'tensor', (['[1]'], {}), '([1])\n', (1230, 1235), False, 'from megengine import Parameter, tensor\n'), ((2450, 2474), 'megengine.jit.trace', 'trace', ([], {'symbolic': 'symbolic'}), '(symbolic=symbolic)\n', (2455, 2474), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((2657, 2668), 'megengine.tensor', 'tensor', (['[1]'], {}), '([1])\n', (2663, 2668), False, 'from megengine import Parameter, tensor\n'), ((2885, 2909), 'megengine.jit.trace', 'trace', ([], {'symbolic': 'symbolic'}), '(symbolic=symbolic)\n', (2890, 2909), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((3072, 3083), 'megengine.tensor', 'tensor', (['[1]'], {}), '([1])\n', (3078, 3083), False, 'from megengine import Parameter, tensor\n'), ((4858, 4898), 'megengine.jit.trace', 'trace', ([], {'symbolic': 'symbolic', 'profiling': '(True)'}), '(symbolic=symbolic, profiling=True)\n', (4863, 4898), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((4952, 4963), 'megengine.tensor', 'tensor', (['[1]'], {}), '([1])\n', (4958, 4963), False, 'from megengine import Parameter, tensor\n'), ((6454, 6465), 'megengine.tensor', 'tensor', (['(1.0)'], {}), '(1.0)\n', (6460, 6465), False, 'from megengine import Parameter, tensor\n'), ((6622, 6633), 'megengine.tensor', 'tensor', (['(1.0)'], {}), '(1.0)\n', (6628, 6633), False, 'from megengine import Parameter, tensor\n'), ((6918, 6924), 'megengine.functional.exp', 'exp', (['x'], {}), '(x)\n', (6921, 6924), False, 'from megengine.functional import exp, log\n'), ((6955, 6966), 'megengine.tensor', 'tensor', (['(5.0)'], {}), '(5.0)\n', (6961, 6966), False, 'from megengine import Parameter, tensor\n'), ((7206, 7234), 'numpy.ones', 'np.ones', (['(1)'], {'dtype': 'np.float32'}), '(1, dtype=np.float32)\n', (7213, 7234), True, 'import numpy as np\n'), ((7386, 7398), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (7396, 7398), False, 'import io\n'), ((7558, 7569), 'megengine.core.tensor.utils.isscalar', 'isscalar', (['b'], {}), '(b)\n', (7566, 7569), False, 'from megengine.core.tensor.utils import isscalar\n'), ((7886, 7933), 'megengine.jit.trace', 'trace', ([], {'symbolic': 'symbolic', 'capture_as_const': '(True)'}), '(symbolic=symbolic, capture_as_const=True)\n', (7891, 7933), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((8175, 8187), 'megengine.functional.topk', 'F.topk', (['x', '(3)'], {}), '(x, 3)\n', (8181, 8187), True, 'import megengine.functional as F\n'), ((8674, 8706), 'megengine.functional.warp_perspective', 'F.warp_perspective', (['x', 'M', '(2, 2)'], {}), '(x, M, (2, 2))\n', (8692, 8706), True, 'import megengine.functional as F\n'), ((9713, 9760), 'megengine.jit.trace', 'trace', ([], {'symbolic': 'symbolic', 'capture_as_const': '(True)'}), '(symbolic=symbolic, capture_as_const=True)\n', (9718, 9760), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((9952, 9968), 'numpy.zeros', 'np.zeros', (['(n, 4)'], {}), '((n, 4))\n', (9960, 9968), True, 'import numpy as np\n'), ((10093, 10110), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (10107, 10110), True, 'import numpy as np\n'), ((10282, 10343), 'megengine.functional.nn.nms', 'F.nn.nms', (['boxes'], {'scores': 'scores', 'iou_thresh': '(0.5)', 'max_output': '(20)'}), '(boxes, scores=scores, iou_thresh=0.5, max_output=20)\n', (10290, 10343), True, 'import megengine.functional as F\n'), ((10655, 10676), 'numpy.random.randn', 'np.random.randn', (['(1)', '(1)'], {}), '(1, 1)\n', (10670, 10676), True, 'import numpy as np\n'), ((10694, 10715), 'numpy.random.randn', 'np.random.randn', (['(1)', '(2)'], {}), '(1, 2)\n', (10709, 10715), True, 'import numpy as np\n'), ((10730, 10741), 'megengine.tensor', 'tensor', (['[2]'], {}), '([2])\n', (10736, 10741), False, 'from megengine import Parameter, tensor\n'), ((10743, 10754), 'megengine.tensor', 'tensor', (['[2]'], {}), '([2])\n', (10749, 10754), False, 'from megengine import Parameter, tensor\n'), ((10817, 10841), 'megengine.functional.broadcast_to', 'F.broadcast_to', (['x', 'shape'], {}), '(x, shape)\n', (10831, 10841), True, 'import megengine.functional as F\n'), ((10928, 10951), 'numpy.random.randn', 'np.random.randn', (['(10)', '(10)'], {}), '(10, 10)\n', (10943, 10951), True, 'import numpy as np\n'), ((11020, 11043), 'megengine.functional.clip', 'F.clip', (['x', 'lower', 'upper'], {}), '(x, lower, upper)\n', (11026, 11043), True, 'import megengine.functional as F\n'), ((1480, 1514), 'megengine.Parameter', 'Parameter', (['[1.0]'], {'dtype': 'np.float32'}), '([1.0], dtype=np.float32)\n', (1489, 1514), False, 'from megengine import Parameter, tensor\n'), ((1645, 1653), 'megengine.functional.exp', 'F.exp', (['x'], {}), '(x)\n', (1650, 1653), True, 'import megengine.functional as F\n'), ((1705, 1718), 'megengine.autodiff.GradManager', 'GradManager', ([], {}), '()\n', (1716, 1718), False, 'from megengine.autodiff import GradManager\n'), ((2340, 2371), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['y1', 'y2'], {}), '(y1, y2)\n', (2363, 2371), True, 'import numpy as np\n'), ((3249, 3280), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['z', 'buf'], {}), '(z, buf)\n', (3272, 3280), True, 'import numpy as np\n'), ((6335, 6341), 'megengine.functional.exp', 'exp', (['x'], {}), '(x)\n', (6338, 6341), False, 'from megengine.functional import exp, log\n'), ((6439, 6445), 'megengine.functional.exp', 'exp', (['x'], {}), '(x)\n', (6442, 6445), False, 'from megengine.functional import exp, log\n'), ((7333, 7364), 'megengine.tensor', 'tensor', (['[1, 10]'], {'dtype': 'np.int32'}), '([1, 10], dtype=np.int32)\n', (7339, 7364), False, 'from megengine import Parameter, tensor\n'), ((7752, 7778), 'numpy.random.randn', 'np.random.randn', (['(2)', '(10)', '(10)'], {}), '(2, 10, 10)\n', (7767, 7778), True, 'import numpy as np\n'), ((7800, 7826), 'numpy.random.randn', 'np.random.randn', (['(4)', '(10)', '(10)'], {}), '(4, 10, 10)\n', (7815, 7826), True, 'import numpy as np\n'), ((7848, 7874), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)', '(10)'], {}), '(8, 10, 10)\n', (7863, 7874), True, 'import numpy as np\n'), ((8240, 8253), 'numpy.array', 'np.array', (['[3]'], {}), '([3])\n', (8248, 8253), True, 'import numpy as np\n'), ((8758, 8780), 'numpy.array', 'np.array', (['[1, 1, 2, 2]'], {}), '([1, 1, 2, 2])\n', (8766, 8780), True, 'import numpy as np\n'), ((9585, 9609), 'numpy.random.randn', 'np.random.randn', (['(3)', '(1)', '(1)'], {}), '(3, 1, 1)\n', (9600, 9609), True, 'import numpy as np\n'), ((9631, 9655), 'numpy.random.randn', 'np.random.randn', (['(1)', '(4)', '(1)'], {}), '(1, 4, 1)\n', (9646, 9655), True, 'import numpy as np\n'), ((9677, 9701), 'numpy.random.randn', 'np.random.randn', (['(1)', '(1)', '(5)'], {}), '(1, 1, 5)\n', (9692, 9701), True, 'import numpy as np\n'), ((9795, 9823), 'megengine.functional.broadcast_to', 'F.broadcast_to', (['x', '(3, 4, 5)'], {}), '(x, (3, 4, 5))\n', (9809, 9823), True, 'import megengine.functional as F\n'), ((9992, 10012), 'numpy.random.rand', 'np.random.rand', (['n', '(2)'], {}), '(n, 2)\n', (10006, 10012), True, 'import numpy as np\n'), ((10127, 10140), 'megengine.tensor', 'tensor', (['boxes'], {}), '(boxes)\n', (10133, 10140), False, 'from megengine import Parameter, tensor\n'), ((10142, 10156), 'megengine.tensor', 'tensor', (['scores'], {}), '(scores)\n', (10148, 10156), False, 'from megengine import Parameter, tensor\n'), ((10422, 10442), 'megengine.jit.exclude_from_trace', 'exclude_from_trace', ([], {}), '()\n', (10440, 10442), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((10460, 10506), 'megengine.functional.nn.nms', 'F.nn.nms', (['boxes'], {'scores': 'scores', 'iou_thresh': '(0.5)'}), '(boxes, scores=scores, iou_thresh=0.5)\n', (10468, 10506), True, 'import megengine.functional as F\n'), ((11098, 11109), 'megengine.tensor', 'tensor', (['[0]'], {}), '([0])\n', (11104, 11109), False, 'from megengine import Parameter, tensor\n'), ((11111, 11122), 'megengine.tensor', 'tensor', (['[1]'], {}), '([1])\n', (11117, 11122), False, 'from megengine import Parameter, tensor\n'), ((11253, 11264), 'megengine.functional.arange', 'F.arange', (['(8)'], {}), '(8)\n', (11261, 11264), True, 'import megengine.functional as F\n'), ((11478, 11529), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'symbolic_shape': 'symbolic_shape'}), '(symbolic=True, symbolic_shape=symbolic_shape)\n', (11483, 11529), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((1820, 1832), 'numpy.arange', 'np.arange', (['(4)'], {}), '(4)\n', (1829, 1832), True, 'import numpy as np\n'), ((2529, 2549), 'megengine.jit.exclude_from_trace', 'exclude_from_trace', ([], {}), '()\n', (2547, 2549), False, 'from megengine.jit import exclude_from_trace, trace\n'), ((4711, 4744), 'megengine.utils.comp_graph_tools.get_owner_opr_inputs', 'cgtools.get_owner_opr_inputs', (['out'], {}), '(out)\n', (4739, 4744), True, 'import megengine.utils.comp_graph_tools as cgtools\n'), ((5862, 5868), 'megengine.functional.exp', 'exp', (['x'], {}), '(x)\n', (5865, 5868), False, 'from megengine.functional import exp, log\n'), ((5871, 5877), 'megengine.functional.exp', 'exp', (['y'], {}), '(y)\n', (5874, 5877), False, 'from megengine.functional import exp, log\n'), ((5978, 5984), 'megengine.functional.exp', 'exp', (['x'], {}), '(x)\n', (5981, 5984), False, 'from megengine.functional import exp, log\n'), ((5987, 5993), 'megengine.functional.exp', 'exp', (['y'], {}), '(y)\n', (5990, 5993), False, 'from megengine.functional import exp, log\n'), ((8391, 8422), 'numpy.arange', 'np.arange', (['(16)'], {'dtype': 'np.float32'}), '(16, dtype=np.float32)\n', (8400, 8422), True, 'import numpy as np\n'), ((8491, 8570), 'numpy.array', 'np.array', (['[[1.0, 0.0, 1.0], [0.0, 1.0, 1.0], [0.0, 0.0, 1.0]]'], {'dtype': 'np.float32'}), '([[1.0, 0.0, 1.0], [0.0, 1.0, 1.0], [0.0, 0.0, 1.0]], dtype=np.float32)\n', (8499, 8570), True, 'import numpy as np\n'), ((10042, 10062), 'numpy.random.rand', 'np.random.rand', (['n', '(2)'], {}), '(n, 2)\n', (10056, 10062), True, 'import numpy as np\n')]
|
import sys
sys.path.append('.')
import time
import megengine as mge
from model.RIFE import Model
model = Model()
model.eval()
I0 = mge.random(1, 3, 480, 640)
I1 = mge.random(1, 3, 480, 640)
for i in range(100):
pred = model.inference(I0, I1)
mge._full_sync()
time_stamp = time.time()
for i in range(100):
pred = model.inference(I0, I1)
mge._full_sync()
print((time.time() - time_stamp) / 100)
|
[
"megengine.random",
"megengine._full_sync"
] |
[((11, 31), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (26, 31), False, 'import sys\n'), ((106, 113), 'model.RIFE.Model', 'Model', ([], {}), '()\n', (111, 113), False, 'from model.RIFE import Model\n'), ((133, 159), 'megengine.random', 'mge.random', (['(1)', '(3)', '(480)', '(640)'], {}), '(1, 3, 480, 640)\n', (143, 159), True, 'import megengine as mge\n'), ((165, 191), 'megengine.random', 'mge.random', (['(1)', '(3)', '(480)', '(640)'], {}), '(1, 3, 480, 640)\n', (175, 191), True, 'import megengine as mge\n'), ((248, 264), 'megengine._full_sync', 'mge._full_sync', ([], {}), '()\n', (262, 264), True, 'import megengine as mge\n'), ((278, 289), 'time.time', 'time.time', ([], {}), '()\n', (287, 289), False, 'import time\n'), ((346, 362), 'megengine._full_sync', 'mge._full_sync', ([], {}), '()\n', (360, 362), True, 'import megengine as mge\n'), ((370, 381), 'time.time', 'time.time', ([], {}), '()\n', (379, 381), False, 'import time\n')]
|
"""
Classes of variables for equations/terms.
"""
from __future__ import print_function
from __future__ import absolute_import
from collections import deque
import numpy as nm
from sfepy.base.base import (real_types, complex_types, assert_, get_default,
output, OneTypeList, Container, Struct, basestr,
iter_dict_of_lists)
from sfepy.base.timing import Timer
import sfepy.linalg as la
from sfepy.discrete.functions import Function
from sfepy.discrete.conditions import get_condition_value
from sfepy.discrete.integrals import Integral
from sfepy.discrete.common.dof_info import (DofInfo, EquationMap,
expand_nodes_to_equations,
is_active_bc)
from sfepy.discrete.fem.lcbc_operators import LCBCOperators
from sfepy.discrete.common.mappings import get_physical_qps
from sfepy.discrete.evaluate_variable import eval_real, eval_complex
import six
from six.moves import range
is_state = 0
is_virtual = 1
is_parameter = 2
is_field = 10
def create_adof_conns(conn_info, var_indx=None, active_only=True, verbose=True):
"""
Create active DOF connectivities for all variables referenced in
`conn_info`.
If a variable has not the equation mapping, a trivial mapping is assumed
and connectivity with all DOFs active is created.
DOF connectivity key is a tuple ``(primary variable name, region name,
type, is_trace flag)``.
Notes
-----
If `active_only` is False, the DOF connectivities contain all DOFs, with
the E(P)BC-constrained ones stored as `-1 - <DOF number>`, so that the full
connectivities can be reconstructed for the matrix graph creation.
"""
var_indx = get_default(var_indx, {})
def _create(var, econn):
offset = var_indx.get(var.name, slice(0, 0)).start
if var.eq_map is None:
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if isinstance(var, DGFieldVariable):
eq = nm.arange(var.n_dof, dtype=nm.int32)
else:
if active_only:
eq = var.eq_map.eq
else:
eq = nm.arange(var.n_dof, dtype=nm.int32)
eq[var.eq_map.eq_ebc] = -1 - (var.eq_map.eq_ebc + offset)
eq[var.eq_map.master] = eq[var.eq_map.slave]
adc = create_adof_conn(eq, econn, var.n_components, offset)
return adc
def _assign(adof_conns, info, region, var, field, is_trace):
key = (var.name, region.name, info.dc_type.type, is_trace)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=is_trace)
if econn is None: return
adof_conns[key] = _create(var, econn)
if info.is_trace:
key = (var.name, region.name, info.dc_type.type, False)
if not key in adof_conns:
econn = field.get_econn(info.dc_type, region, is_trace=False)
adof_conns[key] = _create(var, econn)
if verbose:
output('setting up dof connectivities...')
timer = Timer(start=True)
adof_conns = {}
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
if info.primary is not None:
var = info.primary
field = var.get_field()
field.setup_extra_data(info.ps_tg, info, info.is_trace)
region = info.get_region()
_assign(adof_conns, info, region, var, field, info.is_trace)
if info.has_virtual and not info.is_trace:
var = info.virtual
field = var.get_field()
field.setup_extra_data(info.v_tg, info, False)
aux = var.get_primary()
var = aux if aux is not None else var
region = info.get_region(can_trace=False)
_assign(adof_conns, info, region, var, field, False)
if verbose:
output('...done in %.2f s' % timer.stop())
return adof_conns
def create_adof_conn(eq, conn, dpn, offset):
"""
Given a node connectivity, number of DOFs per node and equation mapping,
create the active dof connectivity.
Locally (in a connectivity row), the DOFs are stored DOF-by-DOF (u_0 in all
local nodes, u_1 in all local nodes, ...).
Globally (in a state vector), the DOFs are stored node-by-node (u_0, u_1,
..., u_X in node 0, u_0, u_1, ..., u_X in node 1, ...).
"""
if dpn == 1:
aux = nm.take(eq, conn)
adc = aux + nm.asarray(offset * (aux >= 0), dtype=nm.int32)
else:
n_el, n_ep = conn.shape
adc = nm.empty((n_el, n_ep * dpn), dtype=conn.dtype)
ii = 0
for idof in range(dpn):
aux = nm.take(eq, dpn * conn + idof)
adc[:, ii : ii + n_ep] = aux + nm.asarray(offset * (aux >= 0),
dtype=nm.int32)
ii += n_ep
return adc
def expand_basis(basis, dpn):
"""
Expand basis for variables with several components (DOFs per node), in a
way compatible with :func:`create_adof_conn()`, according to `dpn`
(DOF-per-node count).
"""
n_c, n_bf = basis.shape[-2:]
ebasis = nm.zeros(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)
for ic in range(n_c):
for ir in range(dpn):
ebasis[..., n_c*ir+ic, ir*n_bf:(ir+1)*n_bf] = basis[..., ic, :]
return ebasis
class Variables(Container):
"""
Container holding instances of Variable.
"""
@staticmethod
def from_conf(conf, fields):
"""
This method resets the variable counters for automatic order!
"""
Variable.reset()
obj = Variables()
for key, val in six.iteritems(conf):
var = Variable.from_conf(key, val, fields)
obj[var.name] = var
obj.setup_dtype()
obj.setup_ordering()
return obj
def __init__(self, variables=None):
Container.__init__(self, OneTypeList(Variable),
state=set(),
virtual=set(),
parameter=set(),
has_virtual_dcs=False,
has_lcbc=False,
has_lcbc_rhs=False,
has_eq_map=False,
ordered_state=[],
ordered_virtual=[])
if variables is not None:
for var in variables:
self[var.name] = var
self.setup_ordering()
self.setup_dtype()
self.adof_conns = {}
def __setitem__(self, ii, var):
Container.__setitem__(self, ii, var)
if var.is_state():
self.state.add(var.name)
elif var.is_virtual():
self.virtual.add(var.name)
elif var.is_parameter():
self.parameter.add(var.name)
var._variables = self
self.setup_ordering()
self.setup_dof_info()
def setup_dtype(self):
"""
Setup data types of state variables - all have to be of the same
data type, one of nm.float64 or nm.complex128.
"""
dtypes = {nm.complex128 : 0, nm.float64 : 0}
for var in self.iter_state(ordered=False):
dtypes[var.dtype] += 1
if dtypes[nm.float64] and dtypes[nm.complex128]:
raise ValueError("All variables must have the same dtype!")
elif dtypes[nm.float64]:
self.dtype = nm.float64
elif dtypes[nm.complex128]:
self.dtype = nm.complex128
else:
self.dtype = None
def link_duals(self):
"""
Link state variables with corresponding virtual variables,
and assign link to self to each variable instance.
Usually, when solving a PDE in the weak form, each state
variable has a corresponding virtual variable.
"""
for ii in self.state:
self[ii].dual_var_name = None
for ii in self.virtual:
vvar = self[ii]
try:
self[vvar.primary_var_name].dual_var_name = vvar.name
except IndexError:
pass
def get_dual_names(self):
"""
Get names of pairs of dual variables.
Returns
-------
duals : dict
The dual names as virtual name : state name pairs.
"""
duals = {}
for name in self.virtual:
duals[name] = self[name].primary_var_name
return duals
def setup_ordering(self):
"""
Setup ordering of variables.
"""
self.link_duals()
orders = []
for var in self:
try:
orders.append(var._order)
except:
pass
orders.sort()
self.ordered_state = [None] * len(self.state)
for var in self.iter_state(ordered=False):
ii = orders.index(var._order)
self.ordered_state[ii] = var.name
self.ordered_virtual = [None] * len(self.virtual)
ii = 0
for var in self.iter_state(ordered=False):
if var.dual_var_name is not None:
self.ordered_virtual[ii] = var.dual_var_name
ii += 1
def has_virtuals(self):
return len(self.virtual) > 0
def setup_dof_info(self, make_virtual=False):
"""
Setup global DOF information.
"""
self.di = DofInfo('state_dof_info')
for var_name in self.ordered_state:
self.di.append_variable(self[var_name])
if make_virtual:
self.vdi = DofInfo('virtual_dof_info')
for var_name in self.ordered_virtual:
self.vdi.append_variable(self[var_name])
else:
self.vdi = self.di
def setup_lcbc_operators(self, lcbcs, ts=None, functions=None):
"""
Prepare linear combination BC operator matrix and right-hand side
vector.
"""
from sfepy.discrete.common.region import are_disjoint
if lcbcs is None:
self.lcdi = self.adi
return
self.lcbcs = lcbcs
if (ts is None) or ((ts is not None) and (ts.step == 0)):
regs = []
var_names = []
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
regs.append(bc.regions[0])
var_names.append(vns[0])
if bc.regions[1] is not None:
regs.append(bc.regions[1])
var_names.append(vns[1])
for i0 in range(len(regs) - 1):
for i1 in range(i0 + 1, len(regs)):
if ((var_names[i0] == var_names[i1])
and not are_disjoint(regs[i0], regs[i1])):
raise ValueError('regions %s and %s are not disjoint!'
% (regs[i0].name, regs[i1].name))
ops = LCBCOperators('lcbcs', self, functions=functions)
for bcs in self.lcbcs:
for bc in bcs.iter_single():
vns = bc.get_var_names()
dofs = [self[vn].dofs for vn in vns if vn is not None]
bc.canonize_dof_names(*dofs)
if not is_active_bc(bc, ts=ts, functions=functions):
continue
output('lcbc:', bc.name)
ops.add_from_bc(bc, ts)
aux = ops.make_global_operator(self.adi)
self.mtx_lcbc, self.vec_lcbc, self.lcdi = aux
self.has_lcbc = self.mtx_lcbc is not None
self.has_lcbc_rhs = self.vec_lcbc is not None
def get_lcbc_operator(self):
if self.has_lcbc:
return self.mtx_lcbc
else:
raise ValueError('no LCBC defined!')
def equation_mapping(self, ebcs, epbcs, ts, functions, problem=None,
active_only=True):
"""
Create the mapping of active DOFs from/to all DOFs for all state
variables.
Parameters
----------
ebcs : Conditions instance
The essential (Dirichlet) boundary conditions.
epbcs : Conditions instance
The periodic boundary conditions.
ts : TimeStepper instance
The time stepper.
functions : Functions instance
The user functions for boundary conditions.
problem : Problem instance, optional
The problem that can be passed to user functions as a context.
active_only : bool
If True, the active DOF info ``self.adi`` uses the reduced (active
DOFs only) numbering. Otherwise it is the same as ``self.di``.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.ebcs = ebcs
self.epbcs = epbcs
##
# Assing EBC, PBC to variables and regions.
if ebcs is not None:
self.bc_of_vars = self.ebcs.group_by_variables()
else:
self.bc_of_vars = {}
if epbcs is not None:
self.bc_of_vars = self.epbcs.group_by_variables(self.bc_of_vars)
##
# List EBC nodes/dofs for each variable.
active_bcs = set()
for var_name in self.di.var_names:
var = self[var_name]
bcs = self.bc_of_vars.get(var.name, None)
var_di = self.di.get_info(var_name)
active = var.equation_mapping(bcs, var_di, ts, functions,
problem=problem)
active_bcs.update(active)
if self.has_virtual_dcs:
vvar = self[var.dual_var_name]
vvar_di = self.vdi.get_info(var_name)
active = vvar.equation_mapping(bcs, vvar_di, ts, functions,
problem=problem)
active_bcs.update(active)
self.adi = DofInfo('active_state_dof_info')
for var_name in self.ordered_state:
self.adi.append_variable(self[var_name], active=active_only)
if self.has_virtual_dcs:
self.avdi = DofInfo('active_virtual_dof_info')
for var_name in self.ordered_virtual:
self.avdi.append_variable(self[var_name], active=active_only)
else:
self.avdi = self.adi
self.has_eq_map = True
return active_bcs
def get_matrix_shape(self):
if not self.has_eq_map:
raise ValueError('call equation_mapping() first!')
return (self.avdi.ptr[-1], self.adi.ptr[-1])
def setup_initial_conditions(self, ics, functions):
self.ics = ics
self.ic_of_vars = self.ics.group_by_variables()
for var_name in self.di.var_names:
var = self[var_name]
ics = self.ic_of_vars.get(var.name, None)
if ics is None: continue
var.setup_initial_conditions(ics, self.di, functions)
for var_name in self.parameter:
var = self[var_name]
if hasattr(var, 'special') and ('ic' in var.special):
setter, sargs, skwargs = var._get_setter('ic', functions)
var.set_data(setter(*sargs, **skwargs))
output('IC data of %s set by %s()' % (var.name, setter.name))
def set_adof_conns(self, adof_conns):
"""
Set all active DOF connectivities to `self` as well as relevant
sub-dicts to the individual variables.
"""
self.adof_conns = adof_conns
for var in self:
var.adof_conns = {}
for key, val in six.iteritems(adof_conns):
if key[0] in self.names:
var = self[key[0]]
var.adof_conns[key] = val
var = var.get_dual()
if var is not None:
var.adof_conns[key] = val
def create_state_vector(self):
vec = nm.zeros((self.di.ptr[-1],), dtype=self.dtype)
return vec
def create_stripped_state_vector(self):
vec = nm.zeros((self.adi.ptr[-1],), dtype=self.dtype)
return vec
def apply_ebc(self, vec, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions
defined for the state variables to vector `vec`.
"""
for var in self.iter_state():
var.apply_ebc(vec, self.di.indx[var.name].start, force_values)
def apply_ic(self, vec, force_values=None):
"""
Apply initial conditions defined for the state variables to
vector `vec`.
"""
for var in self.iter_state():
var.apply_ic(vec, self.di.indx[var.name].start, force_values)
def strip_state_vector(self, vec, follow_epbc=False, svec=None):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
If 'follow_epbc' is True, values of EPBC master dofs are not simply
thrown away, but added to the corresponding slave dofs, just like when
assembling. For vectors with state (unknown) variables it should be set
to False, for assembled vectors it should be set to True.
"""
if svec is None:
svec = nm.empty((self.adi.ptr[-1],), dtype=self.dtype)
for var in self.iter_state():
aindx = self.adi.indx[var.name]
svec[aindx] = var.get_reduced(vec, self.di.indx[var.name].start,
follow_epbc)
return svec
def make_full_vec(self, svec, force_value=None, vec=None):
"""
Make a full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Parameters
----------
svec : array
The reduced DOF vector.
force_value : float, optional
Passing a `force_value` overrides the EBC values.
vec : array, optional
If given, the buffer for storing the result (zeroed).
Returns
-------
vec : array
The full DOF vector.
"""
self.check_vector_size(svec, stripped=True)
if self.has_lcbc:
if self.has_lcbc_rhs:
svec = self.mtx_lcbc * svec + self.vec_lcbc
else:
svec = self.mtx_lcbc * svec
if vec is None:
vec = self.create_state_vector()
for var in self.iter_state():
indx = self.di.indx[var.name]
aindx = self.adi.indx[var.name]
var.get_full(svec, aindx.start, force_value, vec, indx.start)
return vec
def has_ebc(self, vec, force_values=None):
for var_name in self.di.var_names:
eq_map = self[var_name].eq_map
i0 = self.di.indx[var_name].start
ii = i0 + eq_map.eq_ebc
if force_values is None:
if not nm.allclose(vec[ii], eq_map.val_ebc):
return False
else:
if isinstance(force_values, dict):
if not nm.allclose(vec[ii], force_values[var_name]):
return False
else:
if not nm.allclose(vec[ii], force_values):
return False
# EPBC.
if not nm.allclose(vec[i0+eq_map.master], vec[i0+eq_map.slave]):
return False
return True
def get_indx(self, var_name, stripped=False, allow_dual=False):
var = self[var_name]
if not var.is_state():
if allow_dual and var.is_virtual():
var_name = var.primary_var_name
else:
msg = '%s is not a state part' % var_name
raise IndexError(msg)
if stripped:
return self.adi.indx[var_name]
else:
return self.di.indx[var_name]
def check_vector_size(self, vec, stripped=False):
"""
Check whether the shape of the DOF vector corresponds to the
total number of DOFs of the state variables.
Parameters
----------
vec : array
The vector of DOF values.
stripped : bool
If True, the size of the DOF vector should be reduced,
i.e. without DOFs fixed by boundary conditions.
"""
if not stripped:
n_dof = self.di.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (variables) == %d (DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
else:
if self.has_lcbc:
n_dof = self.lcdi.get_n_dof_total()
else:
n_dof = self.adi.get_n_dof_total()
if vec.size != n_dof:
msg = 'incompatible data size!' \
' (%d (active variables) == %d (reduced DOF vector))' \
% (n_dof, vec.size)
raise ValueError(msg)
def get_state_part_view(self, state, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
return state[self.get_indx(var_name, stripped)]
def set_state_part(self, state, part, var_name, stripped=False):
self.check_vector_size(state, stripped=stripped)
state[self.get_indx(var_name, stripped)] = part
def get_state_parts(self, vec=None):
"""
Return parts of a state vector corresponding to individual state
variables.
Parameters
----------
vec : array, optional
The state vector. If not given, then the data stored in the
variables are returned instead.
Returns
-------
out : dict
The dictionary of the state parts.
"""
if vec is not None:
self.check_vector_size(vec)
out = {}
for var in self.iter_state():
if vec is None:
out[var.name] = var()
else:
out[var.name] = vec[self.di.indx[var.name]]
return out
def set_data(self, data, step=0, ignore_unknown=False,
preserve_caches=False):
"""
Set data (vectors of DOF values) of variables.
Parameters
----------
data : array
The state vector or dictionary of {variable_name : data vector}.
step : int, optional
The time history step, 0 (default) = current.
ignore_unknown : bool, optional
Ignore unknown variable names if `data` is a dict.
preserve_caches : bool
If True, do not invalidate evaluate caches of variables.
"""
if data is None: return
if isinstance(data, dict):
for key, val in six.iteritems(data):
try:
var = self[key]
except (ValueError, IndexError):
if ignore_unknown:
pass
else:
raise KeyError('unknown variable! (%s)' % key)
else:
var.set_data(val, step=step,
preserve_caches=preserve_caches)
elif isinstance(data, nm.ndarray):
self.check_vector_size(data)
for ii in self.state:
var = self[ii]
var.set_data(data, self.di.indx[var.name], step=step,
preserve_caches=preserve_caches)
else:
raise ValueError('unknown data class! (%s)' % data.__class__)
def set_from_state(self, var_names, state, var_names_state):
"""
Set variables with names in `var_names` from state variables with names
in `var_names_state` using DOF values in the state vector `state`.
"""
self.check_vector_size(state)
if isinstance(var_names, basestr):
var_names = [var_names]
var_names_state = [var_names_state]
for ii, var_name in enumerate(var_names):
var_name_state = var_names_state[ii]
if self[var_name_state].is_state():
self[var_name].set_data(state, self.di.indx[var_name_state])
else:
msg = '%s is not a state part' % var_name_state
raise IndexError(msg)
def state_to_output(self, vec, fill_value=None, var_info=None,
extend=True, linearization=None):
"""
Convert a state vector to a dictionary of output data usable by
Mesh.write().
"""
di = self.di
if var_info is None:
self.check_vector_size(vec)
var_info = {}
for name in di.var_names:
var_info[name] = (False, name)
out = {}
for key, indx in six.iteritems(di.indx):
var = self[key]
if key not in list(var_info.keys()): continue
is_part, name = var_info[key]
if is_part:
aux = vec
else:
aux = vec[indx]
out.update(var.create_output(aux, key=name, extend=extend,
fill_value=fill_value,
linearization=linearization))
return out
def iter_state(self, ordered=True):
if ordered:
for ii in self.ordered_state:
yield self[ii]
else:
for ii in self.state:
yield self[ii]
def init_history(self):
for var in self.iter_state():
var.init_history()
def time_update(self, ts, functions, verbose=True):
if verbose:
output('updating variables...')
for var in self:
var.time_update(ts, functions)
if verbose:
output('...done')
def advance(self, ts):
for var in self.iter_state():
var.advance(ts)
class Variable(Struct):
_count = 0
_orders = []
_all_var_names = set()
@staticmethod
def reset():
Variable._count = 0
Variable._orders = []
Variable._all_var_names = set()
@staticmethod
def from_conf(key, conf, fields):
aux = conf.kind.split()
if len(aux) == 2:
kind, family = aux
elif len(aux) == 3:
kind, family = aux[0], '_'.join(aux[1:])
else:
raise ValueError('variable kind is 2 or 3 words! (%s)' % conf.kind)
history = conf.get('history', None)
if history is not None:
try:
history = int(history)
assert_(history >= 0)
except (ValueError, TypeError):
raise ValueError('history must be integer >= 0! (got "%s")'
% history)
order = conf.get('order', None)
if order is not None:
order = int(order)
primary_var_name = conf.get('dual', None)
if primary_var_name is None:
if hasattr(conf, 'like'):
primary_var_name = get_default(conf.like, '(set-to-None)')
else:
primary_var_name = None
special = conf.get('special', None)
if family == 'field':
try:
fld = fields[conf.field]
except IndexError:
msg = 'field "%s" does not exist!' % conf.field
raise KeyError(msg)
if "DG" in fld.family_name:
obj = DGFieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
obj = FieldVariable(conf.name, kind, fld, order, primary_var_name,
special=special, key=key, history=history)
else:
raise ValueError('unknown variable family! (%s)' % family)
return obj
def __init__(self, name, kind, order=None, primary_var_name=None,
special=None, flags=None, **kwargs):
Struct.__init__(self, name=name, **kwargs)
self.flags = set()
if flags is not None:
for flag in flags:
self.flags.add(flag)
self.indx = slice(None)
self.n_dof = None
self.step = 0
self.dt = 1.0
self.initial_condition = None
self.dual_var_name = None
self.eq_map = None
if self.is_virtual():
self.data = None
else:
self.data = deque()
self.data.append(None)
self._set_kind(kind, order, primary_var_name, special=special)
Variable._all_var_names.add(name)
def _set_kind(self, kind, order, primary_var_name, special=None):
if kind == 'unknown':
self.flags.add(is_state)
if order is not None:
if order in Variable._orders:
raise ValueError('order %d already used!' % order)
else:
self._order = order
Variable._orders.append(order)
else:
self._order = Variable._count
Variable._orders.append(self._order)
Variable._count += 1
self.dof_name = self.name
elif kind == 'test':
if primary_var_name == self.name:
raise ValueError('primary variable for %s cannot be %s!'
% (self.name, primary_var_name))
self.flags.add(is_virtual)
msg = 'test variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
self.dof_name = self.primary_var_name
elif kind == 'parameter':
self.flags.add(is_parameter)
msg = 'parameter variable %s: related unknown missing' % self.name
self.primary_var_name = get_default(primary_var_name, None, msg)
if self.primary_var_name == '(set-to-None)':
self.primary_var_name = None
self.dof_name = self.name
else:
self.dof_name = self.primary_var_name
if special is not None:
self.special = special
else:
raise NotImplementedError('unknown variable kind: %s' % kind)
self.kind = kind
def _setup_dofs(self, n_nod, n_components, val_shape):
"""
Setup number of DOFs and DOF names.
"""
self.n_nod = n_nod
self.n_components = n_components
self.val_shape = val_shape
self.n_dof = self.n_nod * self.n_components
self.dofs = [self.dof_name + ('.%d' % ii)
for ii in range(self.n_components)]
def get_primary(self):
"""
Get the corresponding primary variable.
Returns
-------
var : Variable instance
The primary variable, or `self` for state
variables or if `primary_var_name` is None, or None if no other
variables are defined.
"""
if self.is_state():
var = self
elif self.primary_var_name is not None:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
else:
var = self
return var
def get_dual(self):
"""
Get the dual variable.
Returns
-------
var : Variable instance
The primary variable for non-state variables, or the dual
variable for state variables.
"""
if self.is_state():
if ((self._variables is not None)
and (self.dual_var_name in self._variables.names)):
var = self._variables[self.dual_var_name]
else:
var = None
else:
if ((self._variables is not None)
and (self.primary_var_name in self._variables.names)):
var = self._variables[self.primary_var_name]
else:
var = None
return var
def is_state(self):
return is_state in self.flags
def is_virtual(self):
return is_virtual in self.flags
def is_parameter(self):
return is_parameter in self.flags
def is_state_or_parameter(self):
return (is_state in self.flags) or (is_parameter in self.flags)
def is_kind(self, kind):
return eval('self.is_%s()' % kind)
def is_real(self):
return self.dtype in real_types
def is_complex(self):
return self.dtype in complex_types
def is_finite(self, step=0, derivative=None, dt=None):
return nm.isfinite(self(step=step, derivative=derivative, dt=dt)).all()
def get_primary_name(self):
if self.is_state():
name = self.name
else:
name = self.primary_var_name
return name
def init_history(self):
"""Initialize data of variables with history."""
if self.history is None: return
self.data = deque((self.history + 1) * [None])
self.step = 0
def time_update(self, ts, functions):
"""Implemented in subclasses."""
pass
def advance(self, ts):
"""
Advance in time the DOF state history. A copy of the DOF vector
is made to prevent history modification.
"""
if self.history is None: return
self.step = ts.step + 1
if self.history > 0:
# Copy the current step data to the history data, shift history,
# initialize if needed. The current step data are left intact.
# Note: cannot use self.data.rotate() due to data sharing with
# State.
for ii in range(self.history, 0, -1):
if self.data[ii] is None:
self.data[ii] = nm.empty_like(self.data[0])
self.data[ii][:] = self.data[ii - 1]
# Advance evaluate cache.
for step_cache in six.itervalues(self.evaluate_cache):
steps = sorted(step_cache.keys())
for step in steps:
if step is None:
# Special caches with possible custom advance()
# function.
for key, val in six.iteritems(step_cache[step]):
if hasattr(val, '__advance__'):
val.__advance__(ts, val)
elif -step < self.history:
step_cache[step-1] = step_cache[step]
if len(steps) and (steps[0] is not None):
step_cache.pop(steps[-1])
def init_data(self, step=0):
"""
Initialize the dof vector data of time step `step` to zeros.
"""
if self.is_state_or_parameter():
data = nm.zeros((self.n_dof,), dtype=self.dtype)
self.set_data(data, step=step)
def set_constant(self, val):
"""
Set the variable to a constant value.
"""
data = nm.empty((self.n_dof,), dtype=self.dtype)
data.fill(val)
self.set_data(data)
def set_data(self, data=None, indx=None, step=0,
preserve_caches=False):
"""
Set data (vector of DOF values) of the variable.
Parameters
----------
data : array
The vector of DOF values.
indx : int, optional
If given, `data[indx]` is used.
step : int, optional
The time history step, 0 (default) = current.
preserve_caches : bool
If True, do not invalidate evaluate caches of the variable.
"""
data = data.ravel()
if indx is None:
indx = slice(0, len(data))
else:
indx = slice(int(indx.start), int(indx.stop))
n_data_dof = indx.stop - indx.start
if self.n_dof != n_data_dof:
msg = 'incompatible data shape! (%d (variable) == %d (data))' \
% (self.n_dof, n_data_dof)
raise ValueError(msg)
elif (step > 0) or (-step >= len(self.data)):
raise ValueError('step %d out of range! ([%d, 0])'
% (step, -(len(self.data) - 1)))
else:
self.data[step] = data
self.indx = indx
if not preserve_caches:
self.invalidate_evaluate_cache(step=step)
def __call__(self, step=0, derivative=None, dt=None):
"""
Return vector of degrees of freedom of the variable.
Parameters
----------
step : int, default 0
The time step (0 means current, -1 previous, ...).
derivative : None or 'dt'
If not None, return time derivative of the DOF vector,
approximated by the backward finite difference.
Returns
-------
vec : array
The DOF vector. If `derivative` is None: a view of the data vector,
otherwise: required derivative of the DOF vector
at time step given by `step`.
Notes
-----
If the previous time step is requested in step 0, the step 0
DOF vector is returned instead.
"""
if derivative is None:
if (self.step == 0) and (step == -1):
data = self.data[0]
else:
data = self.data[-step]
if data is None:
raise ValueError('data of variable are not set! (%s, step %d)' \
% (self.name, step))
return data[self.indx]
else:
if self.history is None:
msg = 'set history type of variable %s to use derivatives!'\
% self.name
raise ValueError(msg)
dt = get_default(dt, self.dt)
return (self(step=step) - self(step=step-1)) / dt
def get_initial_condition(self):
if self.initial_condition is None:
return 0.0
else:
return self.initial_condition
class FieldVariable(Variable):
"""
A finite element field variable.
field .. field description of variable (borrowed)
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
Variable.__init__(self, name, kind, order, primary_var_name,
special, flags, history=history, **kwargs)
self._set_field(field)
self.has_field = True
self.has_bc = True
self._variables = None
self.clear_evaluate_cache()
def _set_field(self, field):
"""
Set field of the variable.
Takes reference to a Field instance. Sets dtype according to
field.dtype. Sets `dim` attribute to spatial dimension.
"""
self.is_surface = field.is_surface
self.field = field
self._setup_dofs(field.n_nod, field.n_components, field.val_shape)
self.flags.add(is_field)
self.dtype = field.dtype
self.dim = field.domain.shape.dim
def _get_setter(self, kind, functions, **kwargs):
"""
Get the setter function of the variable and its arguments depending in
the setter kind.
"""
if not (hasattr(self, 'special') and (kind in self.special)):
return
setter_name = self.special[kind]
setter = functions[setter_name]
region = self.field.region
nod_list = self.field.get_dofs_in_region(region)
nods = nm.unique(nod_list)
coors = self.field.get_coor(nods)
if kind == 'setter':
sargs = (kwargs.get('ts'), coors)
elif kind == 'ic':
sargs = (coors, )
skwargs = {'region' : region}
return setter, sargs, skwargs
def get_field(self):
return self.field
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
Get the reference element mapping of the underlying field.
See Also
--------
sfepy.discrete.common.fields.Field.get_mapping
"""
if region is None:
region = self.field.region
out = self.field.get_mapping(region, integral, integration,
get_saved=get_saved,
return_key=return_key)
return out
def get_dof_conn(self, dc_type, is_trace=False, trace_region=None):
"""
Get active dof connectivity of a variable.
Notes
-----
The primary and dual variables must have the same Region.
"""
if self.is_virtual():
var = self.get_primary()
# No primary variable can occur in single term evaluations.
var_name = var.name if var is not None else self.name
else:
var_name = self.name
if not is_trace:
region_name = dc_type.region_name
else:
aux = self.field.domain.regions[dc_type.region_name]
region = aux.get_mirror_region(trace_region)
region_name = region.name
key = (var_name, region_name, dc_type.type, is_trace)
dc = self.adof_conns[key]
return dc
def get_dof_info(self, active=False):
details = Struct(name='field_var_dof_details',
n_nod=self.n_nod,
dpn=self.n_components)
if active:
n_dof = self.n_adof
else:
n_dof = self.n_dof
return n_dof, details
def time_update(self, ts, functions):
"""
Store time step, set variable data for variables with the setter
function.
"""
if ts is not None:
self.dt = ts.dt
if hasattr(self, 'special') and ('setter' in self.special):
setter, sargs, skwargs = self._get_setter('setter', functions,
ts=ts)
self.set_data(setter(*sargs, **skwargs))
output('data of %s set by %s()' % (self.name, setter.name))
def set_from_qp(self, data_qp, integral, step=0):
"""
Set DOFs of variable using values in quadrature points
corresponding to the given integral.
"""
data_vertex = self.field.average_qp_to_vertices(data_qp, integral)
# Field nodes values.
data = self.field.interp_v_vals_to_n_vals(data_vertex)
data = data.ravel()
self.indx = slice(0, len(data))
self.data[step] = data
def set_from_mesh_vertices(self, data):
"""
Set the variable using values at the mesh vertices.
"""
ndata = self.field.interp_v_vals_to_n_vals(data)
self.set_data(ndata)
def set_from_function(self, fun, step=0):
"""
Set the variable data (the vector of DOF values) using a function of
space coordinates.
Parameters
----------
fun : callable
The function of coordinates returning DOF values of shape
`(n_coor, n_components)`.
step : int, optional
The time history step, 0 (default) = current.
"""
_, vv = self.field.set_dofs(fun, self.field.region, self.n_components)
self.set_data(vv.ravel(), step=step)
def equation_mapping(self, bcs, var_di, ts, functions, problem=None,
warn=False):
"""
Create the mapping of active DOFs from/to all DOFs.
Sets n_adof.
Returns
-------
active_bcs : set
The set of boundary conditions active in the current time.
"""
self.eq_map = EquationMap('eq_map', self.dofs, var_di)
if bcs is not None:
bcs.canonize_dof_names(self.dofs)
bcs.sort()
active_bcs = self.eq_map.map_equations(bcs, self.field, ts, functions,
problem=problem, warn=warn)
self.n_adof = self.eq_map.n_eq
return active_bcs
def setup_initial_conditions(self, ics, di, functions, warn=False):
"""
Setup of initial conditions.
"""
ics.canonize_dof_names(self.dofs)
ics.sort()
self.initial_condition = nm.zeros((di.n_dof[self.name],),
dtype=self.dtype)
for ic in ics:
region = ic.region
dofs, val = ic.dofs
if warn:
clean_msg = ('warning: ignoring nonexistent' \
' IC node (%s) in ' % self.name)
else:
clean_msg = None
nod_list = self.field.get_dofs_in_region(region)
if len(nod_list) == 0:
continue
fun = get_condition_value(val, functions, 'IC', ic.name)
if isinstance(fun, Function):
aux = fun
fun = lambda coors: aux(coors, ic=ic)
nods, vv = self.field.set_dofs(fun, region, len(dofs), clean_msg)
eq = expand_nodes_to_equations(nods, dofs, self.dofs)
self.initial_condition[eq] = nm.ravel(vv)
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions for given approximation.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 5 ints
The `(n_el, n_qp, dim, n_en, n_comp)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn, n_comp)` for surface shape kind and
`(n_nod, 0, 0, 1, n_comp)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_comp` = number of variable components in a point/node
- `n_nod` = number of element nodes
"""
aux = self.field.get_data_shape(integral, integration=integration,
region_name=region_name)
data_shape = aux + (self.n_components,)
return data_shape
def clear_evaluate_cache(self):
"""
Clear current evaluate cache.
"""
self.evaluate_cache = {}
def invalidate_evaluate_cache(self, step=0):
"""
Invalidate variable data in evaluate cache for time step given
by `step` (0 is current, -1 previous, ...).
This should be done, for example, prior to every nonlinear
solver iteration.
"""
for step_cache in six.itervalues(self.evaluate_cache):
for key in list(step_cache.keys()):
if key == step: # Given time step to clear.
step_cache.pop(key)
def evaluate(self, mode='val',
region=None, integral=None, integration=None,
step=0, time_derivative=None, is_trace=False,
trace_region=None, dt=None, bf=None):
"""
Evaluate various quantities related to the variable according to
`mode` in quadrature points defined by `integral`.
The evaluated data are cached in the variable instance in
`evaluate_cache` attribute.
Parameters
----------
mode : one of 'val', 'grad', 'div', 'cauchy_strain'
The evaluation mode.
region : Region instance, optional
The region where the evaluation occurs. If None, the
underlying field region is used.
integral : Integral instance, optional
The integral defining quadrature points in which the
evaluation occurs. If None, the first order volume integral
is created. Must not be None for surface integrations.
integration : 'volume', 'surface', 'surface_extra', or 'point'
The term integration type. If None, it is derived from
`integral`.
step : int, default 0
The time step (0 means current, -1 previous, ...).
time_derivative : None or 'dt'
If not None, return time derivative of the data,
approximated by the backward finite difference.
is_trace : bool, default False
Indicate evaluation of trace of the variable on a boundary
region.
dt : float, optional
The time step to be used if `derivative` is `'dt'`. If None,
the `dt` attribute of the variable is used.
bf : Base function, optional
The base function to be used in 'val' mode.
Returns
-------
out : array
The 4-dimensional array of shape
`(n_el, n_qp, n_row, n_col)` with the requested data,
where `n_row`, `n_col` depend on `mode`.
"""
if integration == 'custom':
msg = 'cannot use FieldVariable.evaluate() with custom integration!'
raise ValueError(msg)
step_cache = self.evaluate_cache.setdefault(mode, {})
cache = step_cache.setdefault(step, {})
field = self.field
if region is None:
region = field.region
if is_trace:
region = region.get_mirror_region(trace_region)
if (region is not field.region) and not region.is_empty:
assert_(field.region.contains(region))
if integral is None:
integral = Integral('aux_1', 1)
if integration is None:
integration = 'volume' if region.can_cells else 'surface'
geo, _, key = field.get_mapping(region, integral, integration,
return_key=True)
key += (time_derivative, is_trace)
if key in cache:
out = cache[key]
else:
vec = self(step=step, derivative=time_derivative, dt=dt)
ct = integration
if integration == 'surface_extra':
ct = 'volume'
conn = field.get_econn(ct, region, is_trace, integration)
shape = self.get_data_shape(integral, integration, region.name)
if self.dtype == nm.float64:
out = eval_real(vec, conn, geo, mode, shape, bf)
else:
out = eval_complex(vec, conn, geo, mode, shape, bf)
cache[key] = out
return out
def get_state_in_region(self, region, reshape=True, step=0):
"""
Get DOFs of the variable in the given region.
Parameters
----------
region : Region
The selected region.
reshape : bool
If True, reshape the DOF vector to a 2D array with the individual
components as columns. Otherwise a 1D DOF array of the form [all
DOFs in region node 0, all DOFs in region node 1, ...] is returned.
step : int, default 0
The time step (0 means current, -1 previous, ...).
Returns
-------
out : array
The selected DOFs.
"""
nods = self.field.get_dofs_in_region(region, merge=True)
eq = nm.empty((len(nods) * self.n_components,), dtype=nm.int32)
for idof in range(self.n_components):
eq[idof::self.n_components] = self.n_components * nods \
+ idof + self.indx.start
out = self.data[step][eq]
if reshape:
out.shape = (len(nods), self.n_components)
return out
def apply_ebc(self, vec, offset=0, force_values=None):
"""
Apply essential (Dirichlet) and periodic boundary conditions to
vector `vec`, starting at `offset`.
"""
eq_map = self.eq_map
ii = offset + eq_map.eq_ebc
# EBC,
if force_values is None:
vec[ii] = eq_map.val_ebc
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
# EPBC.
vec[offset+eq_map.master] = vec[offset+eq_map.slave]
def apply_ic(self, vec, offset=0, force_values=None):
"""
Apply initial conditions conditions to vector `vec`, starting at
`offset`.
"""
ii = slice(offset, offset + self.n_dof)
if force_values is None:
vec[ii] = self.get_initial_condition()
else:
if isinstance(force_values, dict):
vec[ii] = force_values[self.name]
else:
vec[ii] = force_values
def get_reduced(self, vec, offset=0, follow_epbc=False):
"""
Get the reduced DOF vector, with EBC and PBC DOFs removed.
Notes
-----
The full vector starts in `vec` at `offset`. If 'follow_epbc' is True,
values of EPBC master DOFs are not simply thrown away, but added to the
corresponding slave DOFs, just like when assembling. For vectors with
state (unknown) variables it should be set to False, for assembled
vectors it should be set to True.
"""
eq_map = self.eq_map
ii = offset + eq_map.eqi
r_vec = vec[ii]
if follow_epbc:
master = offset + eq_map.master
slave = eq_map.eq[eq_map.slave]
ii = slave >= 0
la.assemble1d(r_vec, slave[ii], vec[master[ii]])
return r_vec
def get_full(self, r_vec, r_offset=0, force_value=None,
vec=None, offset=0):
"""
Get the full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Notes
-----
The reduced vector starts in `r_vec` at `r_offset`.
Passing a `force_value` overrides the EBC values. Optionally,
`vec` argument can be provided to store the full vector (in
place) starting at `offset`.
"""
if vec is None:
vec = nm.empty(self.n_dof, dtype=r_vec.dtype)
else:
vec = vec[offset:offset+self.n_dof]
eq_map = self.eq_map
r_vec = r_vec[r_offset:r_offset+eq_map.n_eq]
# EBC.
vec[eq_map.eq_ebc] = get_default(force_value, eq_map.val_ebc)
# Reduced vector values.
vec[eq_map.eqi] = r_vec
# EPBC.
vec[eq_map.master] = vec[eq_map.slave]
unused_dofs = self.field.get('unused_dofs')
if unused_dofs is not None:
vec[:] = self.field.restore_substituted(vec)
return vec
def create_output(self, vec=None, key=None, extend=True, fill_value=None,
linearization=None):
"""
Convert the DOF vector to a dictionary of output data usable by
Mesh.write().
Parameters
----------
vec : array, optional
An alternative DOF vector to be used instead of the variable
DOF vector.
key : str, optional
The key to be used in the output dictionary instead of the
variable name.
extend : bool
Extend the DOF values to cover the whole domain.
fill_value : float or complex
The value used to fill the missing DOF values if `extend` is True.
linearization : Struct or None
The linearization configuration for higher order approximations.
"""
linearization = get_default(linearization, Struct(kind='strip'))
if vec is None:
vec = self()
key = get_default(key, self.name)
aux = nm.reshape(vec,
(self.n_dof // self.n_components, self.n_components))
out = self.field.create_output(aux, self.name, dof_names=self.dofs,
key=key, extend=extend,
fill_value=fill_value,
linearization=linearization)
return out
def get_element_diameters(self, cells, mode, square=False):
"""Get diameters of selected elements."""
field = self.field
domain = field.domain
cells = nm.array(cells)
diameters = nm.empty((cells.shape[0],), dtype=nm.float64)
integral = Integral('i_tmp', 1)
vg, _ = field.get_mapping(field.region, integral, 'volume')
diameters = domain.get_element_diameters(cells, vg, mode, square=square)
return diameters
def save_as_mesh(self, filename):
"""
Save the field mesh and the variable values into a file for
visualization. Only the vertex values are stored.
"""
mesh = self.field.create_mesh(extra_nodes=False)
vec = self()
n_nod, n_dof, dpn = mesh.n_nod, self.n_dof, self.n_components
aux = nm.reshape(vec, (n_dof // dpn, dpn))
ext = self.field.extend_dofs(aux, 0.0)
out = {}
if self.field.approx_order != 0:
out[self.name] = Struct(name='output_data',
mode='vertex', data=ext,
var_name=self.name, dofs=self.dofs)
else:
ext.shape = (ext.shape[0], 1, ext.shape[1], 1)
out[self.name] = Struct(name='output_data',
mode='cell', data=ext,
var_name=self.name, dofs=self.dofs)
mesh.write(filename, io='auto', out=out)
def has_same_mesh(self, other):
"""
Returns
-------
flag : int
The flag can be either 'different' (different meshes), 'deformed'
(slightly deformed same mesh), or 'same' (same).
"""
f1 = self.field
f2 = other.field
c1 = f1.get_coor()
c2 = f2.get_coor()
if c1.shape != c2.shape:
flag = 'different'
else:
eps = 10.0 * nm.finfo(nm.float64).eps
if nm.allclose(c1, c2, rtol=eps, atol=0.0):
flag = 'same'
elif nm.allclose(c1, c2, rtol=0.1, atol=0.0):
flag = 'deformed'
else:
flag = 'different'
return flag
def get_interp_coors(self, strategy='interpolation', interp_term=None):
"""
Get the physical coordinates to interpolate into, based on the strategy
used.
"""
if strategy == 'interpolation':
coors = self.field.get_coor()
elif strategy == 'projection':
region = self.field.region
integral = Integral(term=interp_term)
coors = get_physical_qps(region, integral)
else:
raise ValueError('unknown interpolation strategy! (%s)' % strategy)
return coors
def evaluate_at(self, coors, mode='val', strategy='general',
close_limit=0.1, get_cells_fun=None,
cache=None, ret_cells=False,
ret_status=False, ret_ref_coors=False, verbose=False):
"""
Evaluate the variable in the given physical coordinates. Convenience
wrapper around :func:`Field.evaluate_at()
<sfepy.discrete.common.fields.Field.evaluate_at()>`, see its
docstring for more details.
"""
source_vals = self().reshape((self.n_nod, self.n_components))
out = self.field.evaluate_at(coors, source_vals,
mode=mode,
strategy=strategy,
close_limit=close_limit,
get_cells_fun=get_cells_fun,
cache=cache,
ret_cells=ret_cells,
ret_status=ret_status,
ret_ref_coors=ret_ref_coors,
verbose=verbose)
return out
def set_from_other(self, other, strategy='projection', close_limit=0.1):
"""
Set the variable using another variable. Undefined values (e.g. outside
the other mesh) are set to numpy.nan, or extrapolated.
Parameters
----------
strategy : 'projection' or 'interpolation'
The strategy to set the values: the L^2 orthogonal projection (not
implemented!), or a direct interpolation to the nodes (nodal
elements only!)
Notes
-----
If the other variable uses the same field mesh, the coefficients are
set directly.
"""
flag_same_mesh = self.has_same_mesh(other)
if flag_same_mesh == 'same':
self.set_data(other())
return
if strategy == 'interpolation':
coors = self.get_interp_coors(strategy)
elif strategy == 'projection':
## interp_term = Term() # TODO
## coors = self.get_interp_coors(strategy, interp_term)
pass
else:
raise ValueError('unknown interpolation strategy! (%s)' % strategy)
vals = other.evaluate_at(coors, strategy='general',
close_limit=close_limit)
if strategy == 'interpolation':
self.set_data(vals)
elif strategy == 'projection':
raise NotImplementedError('unsupported strategy! (%s)' % strategy)
else:
raise ValueError('unknown interpolation strategy! (%s)' % strategy)
class DGFieldVariable(FieldVariable):
"""
Fieald variable specificaly intended for use with DGFields, bypasses
application of EBC and EPBC as this is done in DGField.
Is instance checked in create_adof_conns.
"""
def __init__(self, name, kind, field, order=None, primary_var_name=None,
special=None, flags=None, history=None, **kwargs):
FieldVariable.__init__(self, name, kind, field, order=order,
primary_var_name=primary_var_name,
special=special, flags=flags,
history=history, **kwargs)
from sfepy.discrete.dg.fields import DGField
if isinstance(field, DGField):
pass
else:
raise ValueError("Attempted to use DGFieldVariable with non DGField!")
def apply_ebc(self, vec, offset=0, force_values=None):
pass
def get_full(self, r_vec, r_offset=0, force_value=None,
vec=None, offset=0):
"""
Get the full DOF vector satisfying E(P)BCs from a reduced DOF
vector.
Notes
-----
The reduced vector starts in `r_vec` at `r_offset`.
Passing a `force_value` overrides the EBC values. Optionally,
`vec` argument can be provided to store the full vector (in
place) starting at `offset`.
"""
if vec is None:
vec = nm.empty(self.n_dof, dtype=r_vec.dtype)
else:
vec = vec[offset:offset+self.n_dof]
eq_map = self.eq_map
r_vec = r_vec[r_offset:r_offset+eq_map.n_eq]
# overide to hotfix second application of EBCs
# # EBC.
# vec[eq_map.eq_ebc] = get_default(force_value, eq_map.val_ebc)
# Reduced vector values, for DG this is full vector as eq_map.eq
# contains all dofs, cf. create_adof_conns
vec[eq_map.eqi] = r_vec
# EPBC.
# vec[eq_map.master] = vec[eq_map.slave]
unused_dofs = self.field.get('unused_dofs')
if unused_dofs is not None:
vec[:] = self.field.restore_substituted(vec)
return vec
|
[
"sfepy.discrete.conditions.get_condition_value",
"sfepy.discrete.common.dof_info.is_active_bc",
"sfepy.base.base.assert_",
"sfepy.discrete.common.dof_info.DofInfo",
"sfepy.discrete.integrals.Integral",
"sfepy.linalg.assemble1d",
"sfepy.discrete.evaluate_variable.eval_complex",
"sfepy.discrete.common.mappings.get_physical_qps",
"sfepy.base.base.Struct",
"sfepy.base.base.iter_dict_of_lists",
"sfepy.discrete.common.dof_info.expand_nodes_to_equations",
"sfepy.discrete.fem.lcbc_operators.LCBCOperators",
"sfepy.base.base.Container.__setitem__",
"sfepy.base.base.get_default",
"sfepy.base.timing.Timer",
"sfepy.base.base.Struct.__init__",
"sfepy.base.base.OneTypeList",
"sfepy.discrete.evaluate_variable.eval_real",
"sfepy.discrete.common.dof_info.EquationMap",
"sfepy.discrete.common.region.are_disjoint",
"sfepy.base.base.output"
] |
[((1762, 1787), 'sfepy.base.base.get_default', 'get_default', (['var_indx', '{}'], {}), '(var_indx, {})\n', (1773, 1787), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((3236, 3283), 'sfepy.base.base.iter_dict_of_lists', 'iter_dict_of_lists', (['conn_info'], {'return_keys': '(True)'}), '(conn_info, return_keys=True)\n', (3254, 3283), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((5260, 5323), 'numpy.zeros', 'nm.zeros', (['(basis.shape[:2] + (dpn, n_bf * dpn))'], {'dtype': 'nm.float64'}), '(basis.shape[:2] + (dpn, n_bf * dpn), dtype=nm.float64)\n', (5268, 5323), True, 'import numpy as nm\n'), ((5338, 5348), 'six.moves.range', 'range', (['n_c'], {}), '(n_c)\n', (5343, 5348), False, 'from six.moves import range\n'), ((3112, 3154), 'sfepy.base.base.output', 'output', (['"""setting up dof connectivities..."""'], {}), "('setting up dof connectivities...')\n", (3118, 3154), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((3171, 3188), 'sfepy.base.timing.Timer', 'Timer', ([], {'start': '(True)'}), '(start=True)\n', (3176, 3188), False, 'from sfepy.base.timing import Timer\n'), ((4523, 4540), 'numpy.take', 'nm.take', (['eq', 'conn'], {}), '(eq, conn)\n', (4530, 4540), True, 'import numpy as nm\n'), ((4666, 4712), 'numpy.empty', 'nm.empty', (['(n_el, n_ep * dpn)'], {'dtype': 'conn.dtype'}), '((n_el, n_ep * dpn), dtype=conn.dtype)\n', (4674, 4712), True, 'import numpy as nm\n'), ((4748, 4758), 'six.moves.range', 'range', (['dpn'], {}), '(dpn)\n', (4753, 4758), False, 'from six.moves import range\n'), ((5368, 5378), 'six.moves.range', 'range', (['dpn'], {}), '(dpn)\n', (5373, 5378), False, 'from six.moves import range\n'), ((5787, 5806), 'six.iteritems', 'six.iteritems', (['conf'], {}), '(conf)\n', (5800, 5806), False, 'import six\n'), ((6716, 6752), 'sfepy.base.base.Container.__setitem__', 'Container.__setitem__', (['self', 'ii', 'var'], {}), '(self, ii, var)\n', (6737, 6752), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((9545, 9570), 'sfepy.discrete.common.dof_info.DofInfo', 'DofInfo', (['"""state_dof_info"""'], {}), "('state_dof_info')\n", (9552, 9570), False, 'from sfepy.discrete.common.dof_info import DofInfo, EquationMap, expand_nodes_to_equations, is_active_bc\n'), ((11122, 11171), 'sfepy.discrete.fem.lcbc_operators.LCBCOperators', 'LCBCOperators', (['"""lcbcs"""', 'self'], {'functions': 'functions'}), "('lcbcs', self, functions=functions)\n", (11135, 11171), False, 'from sfepy.discrete.fem.lcbc_operators import LCBCOperators\n'), ((14125, 14157), 'sfepy.discrete.common.dof_info.DofInfo', 'DofInfo', (['"""active_state_dof_info"""'], {}), "('active_state_dof_info')\n", (14132, 14157), False, 'from sfepy.discrete.common.dof_info import DofInfo, EquationMap, expand_nodes_to_equations, is_active_bc\n'), ((15812, 15837), 'six.iteritems', 'six.iteritems', (['adof_conns'], {}), '(adof_conns)\n', (15825, 15837), False, 'import six\n'), ((16123, 16169), 'numpy.zeros', 'nm.zeros', (['(self.di.ptr[-1],)'], {'dtype': 'self.dtype'}), '((self.di.ptr[-1],), dtype=self.dtype)\n', (16131, 16169), True, 'import numpy as nm\n'), ((16248, 16295), 'numpy.zeros', 'nm.zeros', (['(self.adi.ptr[-1],)'], {'dtype': 'self.dtype'}), '((self.adi.ptr[-1],), dtype=self.dtype)\n', (16256, 16295), True, 'import numpy as nm\n'), ((25060, 25082), 'six.iteritems', 'six.iteritems', (['di.indx'], {}), '(di.indx)\n', (25073, 25082), False, 'import six\n'), ((28314, 28356), 'sfepy.base.base.Struct.__init__', 'Struct.__init__', (['self'], {'name': 'name'}), '(self, name=name, **kwargs)\n', (28329, 28356), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((33468, 33502), 'collections.deque', 'deque', (['((self.history + 1) * [None])'], {}), '((self.history + 1) * [None])\n', (33473, 33502), False, 'from collections import deque\n'), ((35488, 35529), 'numpy.empty', 'nm.empty', (['(self.n_dof,)'], {'dtype': 'self.dtype'}), '((self.n_dof,), dtype=self.dtype)\n', (35496, 35529), True, 'import numpy as nm\n'), ((40050, 40069), 'numpy.unique', 'nm.unique', (['nod_list'], {}), '(nod_list)\n', (40059, 40069), True, 'import numpy as nm\n'), ((41864, 41941), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""field_var_dof_details"""', 'n_nod': 'self.n_nod', 'dpn': 'self.n_components'}), "(name='field_var_dof_details', n_nod=self.n_nod, dpn=self.n_components)\n", (41870, 41941), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((44260, 44300), 'sfepy.discrete.common.dof_info.EquationMap', 'EquationMap', (['"""eq_map"""', 'self.dofs', 'var_di'], {}), "('eq_map', self.dofs, var_di)\n", (44271, 44300), False, 'from sfepy.discrete.common.dof_info import DofInfo, EquationMap, expand_nodes_to_equations, is_active_bc\n'), ((44848, 44898), 'numpy.zeros', 'nm.zeros', (['(di.n_dof[self.name],)'], {'dtype': 'self.dtype'}), '((di.n_dof[self.name],), dtype=self.dtype)\n', (44856, 44898), True, 'import numpy as nm\n'), ((47525, 47560), 'six.itervalues', 'six.itervalues', (['self.evaluate_cache'], {}), '(self.evaluate_cache)\n', (47539, 47560), False, 'import six\n'), ((52103, 52127), 'six.moves.range', 'range', (['self.n_components'], {}), '(self.n_components)\n', (52108, 52127), False, 'from six.moves import range\n'), ((55064, 55104), 'sfepy.base.base.get_default', 'get_default', (['force_value', 'eq_map.val_ebc'], {}), '(force_value, eq_map.val_ebc)\n', (55075, 55104), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((56386, 56413), 'sfepy.base.base.get_default', 'get_default', (['key', 'self.name'], {}), '(key, self.name)\n', (56397, 56413), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((56429, 56498), 'numpy.reshape', 'nm.reshape', (['vec', '(self.n_dof // self.n_components, self.n_components)'], {}), '(vec, (self.n_dof // self.n_components, self.n_components))\n', (56439, 56498), True, 'import numpy as nm\n'), ((57003, 57018), 'numpy.array', 'nm.array', (['cells'], {}), '(cells)\n', (57011, 57018), True, 'import numpy as nm\n'), ((57040, 57085), 'numpy.empty', 'nm.empty', (['(cells.shape[0],)'], {'dtype': 'nm.float64'}), '((cells.shape[0],), dtype=nm.float64)\n', (57048, 57085), True, 'import numpy as nm\n'), ((57106, 57126), 'sfepy.discrete.integrals.Integral', 'Integral', (['"""i_tmp"""', '(1)'], {}), "('i_tmp', 1)\n", (57114, 57126), False, 'from sfepy.discrete.integrals import Integral\n'), ((57656, 57692), 'numpy.reshape', 'nm.reshape', (['vec', '(n_dof // dpn, dpn)'], {}), '(vec, (n_dof // dpn, dpn))\n', (57666, 57692), True, 'import numpy as nm\n'), ((1925, 1961), 'numpy.arange', 'nm.arange', (['var.n_dof'], {'dtype': 'nm.int32'}), '(var.n_dof, dtype=nm.int32)\n', (1934, 1961), True, 'import numpy as nm\n'), ((4561, 4608), 'numpy.asarray', 'nm.asarray', (['(offset * (aux >= 0))'], {'dtype': 'nm.int32'}), '(offset * (aux >= 0), dtype=nm.int32)\n', (4571, 4608), True, 'import numpy as nm\n'), ((4778, 4808), 'numpy.take', 'nm.take', (['eq', '(dpn * conn + idof)'], {}), '(eq, dpn * conn + idof)\n', (4785, 4808), True, 'import numpy as nm\n'), ((6046, 6067), 'sfepy.base.base.OneTypeList', 'OneTypeList', (['Variable'], {}), '(Variable)\n', (6057, 6067), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((9716, 9743), 'sfepy.discrete.common.dof_info.DofInfo', 'DofInfo', (['"""virtual_dof_info"""'], {}), "('virtual_dof_info')\n", (9723, 9743), False, 'from sfepy.discrete.common.dof_info import DofInfo, EquationMap, expand_nodes_to_equations, is_active_bc\n'), ((14333, 14367), 'sfepy.discrete.common.dof_info.DofInfo', 'DofInfo', (['"""active_virtual_dof_info"""'], {}), "('active_virtual_dof_info')\n", (14340, 14367), False, 'from sfepy.discrete.common.dof_info import DofInfo, EquationMap, expand_nodes_to_equations, is_active_bc\n'), ((17438, 17485), 'numpy.empty', 'nm.empty', (['(self.adi.ptr[-1],)'], {'dtype': 'self.dtype'}), '((self.adi.ptr[-1],), dtype=self.dtype)\n', (17446, 17485), True, 'import numpy as nm\n'), ((23008, 23027), 'six.iteritems', 'six.iteritems', (['data'], {}), '(data)\n', (23021, 23027), False, 'import six\n'), ((25944, 25975), 'sfepy.base.base.output', 'output', (['"""updating variables..."""'], {}), "('updating variables...')\n", (25950, 25975), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((26078, 26095), 'sfepy.base.base.output', 'output', (['"""...done"""'], {}), "('...done')\n", (26084, 26095), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((28784, 28791), 'collections.deque', 'deque', ([], {}), '()\n', (28789, 28791), False, 'from collections import deque\n'), ((34168, 34194), 'six.moves.range', 'range', (['self.history', '(0)', '(-1)'], {}), '(self.history, 0, -1)\n', (34173, 34194), False, 'from six.moves import range\n'), ((34425, 34460), 'six.itervalues', 'six.itervalues', (['self.evaluate_cache'], {}), '(self.evaluate_cache)\n', (34439, 34460), False, 'import six\n'), ((35284, 35325), 'numpy.zeros', 'nm.zeros', (['(self.n_dof,)'], {'dtype': 'self.dtype'}), '((self.n_dof,), dtype=self.dtype)\n', (35292, 35325), True, 'import numpy as nm\n'), ((38274, 38298), 'sfepy.base.base.get_default', 'get_default', (['dt', 'self.dt'], {}), '(dt, self.dt)\n', (38285, 38298), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((42604, 42663), 'sfepy.base.base.output', 'output', (["('data of %s set by %s()' % (self.name, setter.name))"], {}), "('data of %s set by %s()' % (self.name, setter.name))\n", (42610, 42663), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((45366, 45416), 'sfepy.discrete.conditions.get_condition_value', 'get_condition_value', (['val', 'functions', '"""IC"""', 'ic.name'], {}), "(val, functions, 'IC', ic.name)\n", (45385, 45416), False, 'from sfepy.discrete.conditions import get_condition_value\n'), ((45635, 45683), 'sfepy.discrete.common.dof_info.expand_nodes_to_equations', 'expand_nodes_to_equations', (['nods', 'dofs', 'self.dofs'], {}), '(nods, dofs, self.dofs)\n', (45660, 45683), False, 'from sfepy.discrete.common.dof_info import DofInfo, EquationMap, expand_nodes_to_equations, is_active_bc\n'), ((45726, 45738), 'numpy.ravel', 'nm.ravel', (['vv'], {}), '(vv)\n', (45734, 45738), True, 'import numpy as nm\n'), ((50338, 50358), 'sfepy.discrete.integrals.Integral', 'Integral', (['"""aux_1"""', '(1)'], {}), "('aux_1', 1)\n", (50346, 50358), False, 'from sfepy.discrete.integrals import Integral\n'), ((54247, 54295), 'sfepy.linalg.assemble1d', 'la.assemble1d', (['r_vec', 'slave[ii]', 'vec[master[ii]]'], {}), '(r_vec, slave[ii], vec[master[ii]])\n', (54260, 54295), True, 'import sfepy.linalg as la\n'), ((54833, 54872), 'numpy.empty', 'nm.empty', (['self.n_dof'], {'dtype': 'r_vec.dtype'}), '(self.n_dof, dtype=r_vec.dtype)\n', (54841, 54872), True, 'import numpy as nm\n'), ((56299, 56319), 'sfepy.base.base.Struct', 'Struct', ([], {'kind': '"""strip"""'}), "(kind='strip')\n", (56305, 56319), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((57829, 57920), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""vertex"""', 'data': 'ext', 'var_name': 'self.name', 'dofs': 'self.dofs'}), "(name='output_data', mode='vertex', data=ext, var_name=self.name,\n dofs=self.dofs)\n", (57835, 57920), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((58091, 58181), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'ext', 'var_name': 'self.name', 'dofs': 'self.dofs'}), "(name='output_data', mode='cell', data=ext, var_name=self.name, dofs=\n self.dofs)\n", (58097, 58181), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((58800, 58839), 'numpy.allclose', 'nm.allclose', (['c1', 'c2'], {'rtol': 'eps', 'atol': '(0.0)'}), '(c1, c2, rtol=eps, atol=0.0)\n', (58811, 58839), True, 'import numpy as nm\n'), ((63766, 63805), 'numpy.empty', 'nm.empty', (['self.n_dof'], {'dtype': 'r_vec.dtype'}), '(self.n_dof, dtype=r_vec.dtype)\n', (63774, 63805), True, 'import numpy as nm\n'), ((2047, 2083), 'numpy.arange', 'nm.arange', (['var.n_dof'], {'dtype': 'nm.int32'}), '(var.n_dof, dtype=nm.int32)\n', (2056, 2083), True, 'import numpy as nm\n'), ((4852, 4899), 'numpy.asarray', 'nm.asarray', (['(offset * (aux >= 0))'], {'dtype': 'nm.int32'}), '(offset * (aux >= 0), dtype=nm.int32)\n', (4862, 4899), True, 'import numpy as nm\n'), ((11518, 11542), 'sfepy.base.base.output', 'output', (['"""lcbc:"""', 'bc.name'], {}), "('lcbc:', bc.name)\n", (11524, 11542), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((15444, 15505), 'sfepy.base.base.output', 'output', (["('IC data of %s set by %s()' % (var.name, setter.name))"], {}), "('IC data of %s set by %s()' % (var.name, setter.name))\n", (15450, 15505), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((19482, 19542), 'numpy.allclose', 'nm.allclose', (['vec[i0 + eq_map.master]', 'vec[i0 + eq_map.slave]'], {}), '(vec[i0 + eq_map.master], vec[i0 + eq_map.slave])\n', (19493, 19542), True, 'import numpy as nm\n'), ((26880, 26901), 'sfepy.base.base.assert_', 'assert_', (['(history >= 0)'], {}), '(history >= 0)\n', (26887, 26901), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((27330, 27369), 'sfepy.base.base.get_default', 'get_default', (['conf.like', '"""(set-to-None)"""'], {}), "(conf.like, '(set-to-None)')\n", (27341, 27369), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((29898, 29938), 'sfepy.base.base.get_default', 'get_default', (['primary_var_name', 'None', 'msg'], {}), '(primary_var_name, None, msg)\n', (29909, 29938), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((30996, 31020), 'six.moves.range', 'range', (['self.n_components'], {}), '(self.n_components)\n', (31001, 31020), False, 'from six.moves import range\n'), ((51090, 51132), 'sfepy.discrete.evaluate_variable.eval_real', 'eval_real', (['vec', 'conn', 'geo', 'mode', 'shape', 'bf'], {}), '(vec, conn, geo, mode, shape, bf)\n', (51099, 51132), False, 'from sfepy.discrete.evaluate_variable import eval_real, eval_complex\n'), ((51174, 51219), 'sfepy.discrete.evaluate_variable.eval_complex', 'eval_complex', (['vec', 'conn', 'geo', 'mode', 'shape', 'bf'], {}), '(vec, conn, geo, mode, shape, bf)\n', (51186, 51219), False, 'from sfepy.discrete.evaluate_variable import eval_real, eval_complex\n'), ((58889, 58928), 'numpy.allclose', 'nm.allclose', (['c1', 'c2'], {'rtol': '(0.1)', 'atol': '(0.0)'}), '(c1, c2, rtol=0.1, atol=0.0)\n', (58900, 58928), True, 'import numpy as nm\n'), ((59418, 59444), 'sfepy.discrete.integrals.Integral', 'Integral', ([], {'term': 'interp_term'}), '(term=interp_term)\n', (59426, 59444), False, 'from sfepy.discrete.integrals import Integral\n'), ((59465, 59499), 'sfepy.discrete.common.mappings.get_physical_qps', 'get_physical_qps', (['region', 'integral'], {}), '(region, integral)\n', (59481, 59499), False, 'from sfepy.discrete.common.mappings import get_physical_qps\n'), ((2220, 2256), 'numpy.arange', 'nm.arange', (['var.n_dof'], {'dtype': 'nm.int32'}), '(var.n_dof, dtype=nm.int32)\n', (2229, 2256), True, 'import numpy as nm\n'), ((11426, 11470), 'sfepy.discrete.common.dof_info.is_active_bc', 'is_active_bc', (['bc'], {'ts': 'ts', 'functions': 'functions'}), '(bc, ts=ts, functions=functions)\n', (11438, 11470), False, 'from sfepy.discrete.common.dof_info import DofInfo, EquationMap, expand_nodes_to_equations, is_active_bc\n'), ((19071, 19107), 'numpy.allclose', 'nm.allclose', (['vec[ii]', 'eq_map.val_ebc'], {}), '(vec[ii], eq_map.val_ebc)\n', (19082, 19107), True, 'import numpy as nm\n'), ((30180, 30220), 'sfepy.base.base.get_default', 'get_default', (['primary_var_name', 'None', 'msg'], {}), '(primary_var_name, None, msg)\n', (30191, 30220), False, 'from sfepy.base.base import real_types, complex_types, assert_, get_default, output, OneTypeList, Container, Struct, basestr, iter_dict_of_lists\n'), ((34274, 34301), 'numpy.empty_like', 'nm.empty_like', (['self.data[0]'], {}), '(self.data[0])\n', (34287, 34301), True, 'import numpy as nm\n'), ((58759, 58779), 'numpy.finfo', 'nm.finfo', (['nm.float64'], {}), '(nm.float64)\n', (58767, 58779), True, 'import numpy as nm\n'), ((19238, 19282), 'numpy.allclose', 'nm.allclose', (['vec[ii]', 'force_values[var_name]'], {}), '(vec[ii], force_values[var_name])\n', (19249, 19282), True, 'import numpy as nm\n'), ((19370, 19404), 'numpy.allclose', 'nm.allclose', (['vec[ii]', 'force_values'], {}), '(vec[ii], force_values)\n', (19381, 19404), True, 'import numpy as nm\n'), ((34732, 34763), 'six.iteritems', 'six.iteritems', (['step_cache[step]'], {}), '(step_cache[step])\n', (34745, 34763), False, 'import six\n'), ((10918, 10950), 'sfepy.discrete.common.region.are_disjoint', 'are_disjoint', (['regs[i0]', 'regs[i1]'], {}), '(regs[i0], regs[i1])\n', (10930, 10950), False, 'from sfepy.discrete.common.region import are_disjoint\n')]
|
"""empty message
Revision ID: de316f0831f9
Revises: 6<PASSWORD>1e462e9
Create Date: 2021-11-19 23:38:39.754126
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "de316f0831f9"
down_revision = "60f151e462e9"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
sale_type = postgresql.ENUM(
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
"SALE_IN_TRANSFER",
"SALE_IN_BILLET",
"SALE_OTHERS",
name="saletype",
)
sale_type.create(op.get_bind())
op.add_column(
"orders",
sa.Column(
"sale_type",
sa.Enum(
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
"SALE_IN_TRANSFER",
"SALE_IN_BILLET",
"SALE_OTHERS",
name="saletype",
),
nullable=True,
),
)
op.drop_index("ix_balance_operation", table_name="balance")
op.drop_index("ix_balance_type", table_name="balance")
op.drop_column("balance", "type")
op.alter_column("clients", "email", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column("clients", "phone", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column("clients", "owner_id", existing_type=postgresql.UUID(), nullable=False)
op.alter_column("orders", "owner_id", existing_type=postgresql.UUID(), nullable=False)
op.add_column("clients", sa.Column("zip_code", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
op.add_column("clients", sa.Column("address", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
op.add_column("order_details", sa.Column("value", sa.Float(), nullable=True))
op.drop_column("order_details", "sell_value")
op.drop_column("order_details", "cost")
op.drop_index("ix_fiscal_note_items_fiscal_note_id", table_name="fiscal_note_items")
op.drop_index("ix_fiscal_note_items_item_id", table_name="fiscal_note_items")
op.drop_index("ix_fiscal_note_items_owner_id", table_name="fiscal_note_items")
op.drop_index("ix_fiscal_note_items_sugested_sell_value", table_name="fiscal_note_items")
op.drop_table("fiscal_note_items")
op.drop_index("ix_fiscal_notes_owner_id", table_name="fiscal_notes")
op.drop_index("ix_fiscal_notes_purchase_date", table_name="fiscal_notes")
op.drop_table("fiscal_notes")
op.drop_index("ix_users_email", table_name="users")
op.create_index(op.f("ix_users_email"), "users", ["email"], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column("orders", "owner_id", existing_type=postgresql.UUID(), nullable=True)
op.drop_column("orders", "sale_type")
op.alter_column("clients", "owner_id", existing_type=postgresql.UUID(), nullable=True)
op.alter_column("clients", "phone", existing_type=sa.VARCHAR(), nullable=True)
op.alter_column("clients", "email", existing_type=sa.VARCHAR(), nullable=True)
op.add_column(
"balance",
sa.Column("type", postgresql.ENUM("DEBT", "CREDIT", name="balancetype"), autoincrement=False, nullable=True),
)
op.create_index("ix_balance_type", "balance", ["type"], unique=False)
op.create_index("ix_balance_operation", "balance", ["operation"], unique=False)
op.drop_column("clients", "address")
op.drop_column("clients", "zip_code")
op.add_column(
"order_details",
sa.Column("cost", postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False),
)
op.add_column(
"order_details",
sa.Column("sell_value", postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False),
)
op.drop_column("order_details", "value")
op.drop_index(op.f("ix_users_email"), table_name="users")
op.create_index("ix_users_email", "users", ["email"], unique=False)
op.create_table(
"fiscal_notes",
sa.Column("id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("description", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column("purchase_date", sa.DATE(), autoincrement=False, nullable=False),
sa.Column("owner_id", postgresql.UUID(), autoincrement=False, nullable=True),
sa.Column("file_id", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(["file_id"], ["files.bucket_key"], name="fiscal_notes_file_id_fkey"),
sa.ForeignKeyConstraint(["owner_id"], ["users.id"], name="fiscal_notes_owner_id_fkey"),
sa.PrimaryKeyConstraint("id", name="fiscal_notes_pkey"),
postgresql_ignore_search_path=False,
)
op.create_index("ix_fiscal_notes_purchase_date", "fiscal_notes", ["purchase_date"], unique=False)
op.create_index("ix_fiscal_notes_owner_id", "fiscal_notes", ["owner_id"], unique=False)
op.create_table(
"fiscal_note_items",
sa.Column("id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("buy_value", postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False),
sa.Column(
"sugested_sell_value", postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False
),
sa.Column("owner_id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("fiscal_note_id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("item_id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("file_id", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(["file_id"], ["files.bucket_key"], name="fiscal_note_items_file_id_fkey"),
sa.ForeignKeyConstraint(["fiscal_note_id"], ["fiscal_notes.id"], name="fiscal_note_items_fiscal_note_id_fkey"),
sa.ForeignKeyConstraint(["item_id"], ["items.id"], name="fiscal_note_items_item_id_fkey"),
sa.ForeignKeyConstraint(["owner_id"], ["users.id"], name="fiscal_note_items_owner_id_fkey"),
sa.PrimaryKeyConstraint("id", name="fiscal_note_items_pkey"),
)
op.create_index(
"ix_fiscal_note_items_sugested_sell_value", "fiscal_note_items", ["sugested_sell_value"], unique=False
)
op.create_index("ix_fiscal_note_items_owner_id", "fiscal_note_items", ["owner_id"], unique=False)
op.create_index("ix_fiscal_note_items_item_id", "fiscal_note_items", ["item_id"], unique=False)
op.create_index("ix_fiscal_note_items_fiscal_note_id", "fiscal_note_items", ["fiscal_note_id"], unique=False)
# ### end Alembic commands ###
|
[
"sqlmodel.sql.sqltypes.AutoString"
] |
[((461, 620), 'sqlalchemy.dialects.postgresql.ENUM', 'postgresql.ENUM', (['"""SALE_IN_PIX"""', '"""SALE_IN_DEBT"""', '"""SALE_IN_CREDIT"""', '"""SALE_IN_MONEY"""', '"""SALE_IN_TRANSFER"""', '"""SALE_IN_BILLET"""', '"""SALE_OTHERS"""'], {'name': '"""saletype"""'}), "('SALE_IN_PIX', 'SALE_IN_DEBT', 'SALE_IN_CREDIT',\n 'SALE_IN_MONEY', 'SALE_IN_TRANSFER', 'SALE_IN_BILLET', 'SALE_OTHERS',\n name='saletype')\n", (476, 620), False, 'from sqlalchemy.dialects import postgresql\n'), ((1150, 1209), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_balance_operation"""'], {'table_name': '"""balance"""'}), "('ix_balance_operation', table_name='balance')\n", (1163, 1209), False, 'from alembic import op\n'), ((1214, 1268), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_balance_type"""'], {'table_name': '"""balance"""'}), "('ix_balance_type', table_name='balance')\n", (1227, 1268), False, 'from alembic import op\n'), ((1273, 1306), 'alembic.op.drop_column', 'op.drop_column', (['"""balance"""', '"""type"""'], {}), "('balance', 'type')\n", (1287, 1306), False, 'from alembic import op\n'), ((1950, 1995), 'alembic.op.drop_column', 'op.drop_column', (['"""order_details"""', '"""sell_value"""'], {}), "('order_details', 'sell_value')\n", (1964, 1995), False, 'from alembic import op\n'), ((2000, 2039), 'alembic.op.drop_column', 'op.drop_column', (['"""order_details"""', '"""cost"""'], {}), "('order_details', 'cost')\n", (2014, 2039), False, 'from alembic import op\n'), ((2045, 2134), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_note_items_fiscal_note_id"""'], {'table_name': '"""fiscal_note_items"""'}), "('ix_fiscal_note_items_fiscal_note_id', table_name=\n 'fiscal_note_items')\n", (2058, 2134), False, 'from alembic import op\n'), ((2134, 2211), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_note_items_item_id"""'], {'table_name': '"""fiscal_note_items"""'}), "('ix_fiscal_note_items_item_id', table_name='fiscal_note_items')\n", (2147, 2211), False, 'from alembic import op\n'), ((2216, 2294), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_note_items_owner_id"""'], {'table_name': '"""fiscal_note_items"""'}), "('ix_fiscal_note_items_owner_id', table_name='fiscal_note_items')\n", (2229, 2294), False, 'from alembic import op\n'), ((2299, 2393), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_note_items_sugested_sell_value"""'], {'table_name': '"""fiscal_note_items"""'}), "('ix_fiscal_note_items_sugested_sell_value', table_name=\n 'fiscal_note_items')\n", (2312, 2393), False, 'from alembic import op\n'), ((2393, 2427), 'alembic.op.drop_table', 'op.drop_table', (['"""fiscal_note_items"""'], {}), "('fiscal_note_items')\n", (2406, 2427), False, 'from alembic import op\n'), ((2432, 2500), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_notes_owner_id"""'], {'table_name': '"""fiscal_notes"""'}), "('ix_fiscal_notes_owner_id', table_name='fiscal_notes')\n", (2445, 2500), False, 'from alembic import op\n'), ((2505, 2578), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_notes_purchase_date"""'], {'table_name': '"""fiscal_notes"""'}), "('ix_fiscal_notes_purchase_date', table_name='fiscal_notes')\n", (2518, 2578), False, 'from alembic import op\n'), ((2583, 2612), 'alembic.op.drop_table', 'op.drop_table', (['"""fiscal_notes"""'], {}), "('fiscal_notes')\n", (2596, 2612), False, 'from alembic import op\n'), ((2617, 2668), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_users_email"""'], {'table_name': '"""users"""'}), "('ix_users_email', table_name='users')\n", (2630, 2668), False, 'from alembic import op\n'), ((2961, 2998), 'alembic.op.drop_column', 'op.drop_column', (['"""orders"""', '"""sale_type"""'], {}), "('orders', 'sale_type')\n", (2975, 2998), False, 'from alembic import op\n'), ((3422, 3491), 'alembic.op.create_index', 'op.create_index', (['"""ix_balance_type"""', '"""balance"""', "['type']"], {'unique': '(False)'}), "('ix_balance_type', 'balance', ['type'], unique=False)\n", (3437, 3491), False, 'from alembic import op\n'), ((3496, 3575), 'alembic.op.create_index', 'op.create_index', (['"""ix_balance_operation"""', '"""balance"""', "['operation']"], {'unique': '(False)'}), "('ix_balance_operation', 'balance', ['operation'], unique=False)\n", (3511, 3575), False, 'from alembic import op\n'), ((3580, 3616), 'alembic.op.drop_column', 'op.drop_column', (['"""clients"""', '"""address"""'], {}), "('clients', 'address')\n", (3594, 3616), False, 'from alembic import op\n'), ((3621, 3658), 'alembic.op.drop_column', 'op.drop_column', (['"""clients"""', '"""zip_code"""'], {}), "('clients', 'zip_code')\n", (3635, 3658), False, 'from alembic import op\n'), ((3983, 4023), 'alembic.op.drop_column', 'op.drop_column', (['"""order_details"""', '"""value"""'], {}), "('order_details', 'value')\n", (3997, 4023), False, 'from alembic import op\n'), ((4091, 4158), 'alembic.op.create_index', 'op.create_index', (['"""ix_users_email"""', '"""users"""', "['email']"], {'unique': '(False)'}), "('ix_users_email', 'users', ['email'], unique=False)\n", (4106, 4158), False, 'from alembic import op\n'), ((4939, 5041), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_notes_purchase_date"""', '"""fiscal_notes"""', "['purchase_date']"], {'unique': '(False)'}), "('ix_fiscal_notes_purchase_date', 'fiscal_notes', [\n 'purchase_date'], unique=False)\n", (4954, 5041), False, 'from alembic import op\n'), ((5041, 5132), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_notes_owner_id"""', '"""fiscal_notes"""', "['owner_id']"], {'unique': '(False)'}), "('ix_fiscal_notes_owner_id', 'fiscal_notes', ['owner_id'],\n unique=False)\n", (5056, 5132), False, 'from alembic import op\n'), ((6370, 6493), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_note_items_sugested_sell_value"""', '"""fiscal_note_items"""', "['sugested_sell_value']"], {'unique': '(False)'}), "('ix_fiscal_note_items_sugested_sell_value',\n 'fiscal_note_items', ['sugested_sell_value'], unique=False)\n", (6385, 6493), False, 'from alembic import op\n'), ((6508, 6610), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_note_items_owner_id"""', '"""fiscal_note_items"""', "['owner_id']"], {'unique': '(False)'}), "('ix_fiscal_note_items_owner_id', 'fiscal_note_items', [\n 'owner_id'], unique=False)\n", (6523, 6610), False, 'from alembic import op\n'), ((6610, 6710), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_note_items_item_id"""', '"""fiscal_note_items"""', "['item_id']"], {'unique': '(False)'}), "('ix_fiscal_note_items_item_id', 'fiscal_note_items', [\n 'item_id'], unique=False)\n", (6625, 6710), False, 'from alembic import op\n'), ((6710, 6823), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_note_items_fiscal_note_id"""', '"""fiscal_note_items"""', "['fiscal_note_id']"], {'unique': '(False)'}), "('ix_fiscal_note_items_fiscal_note_id', 'fiscal_note_items',\n ['fiscal_note_id'], unique=False)\n", (6725, 6823), False, 'from alembic import op\n'), ((705, 718), 'alembic.op.get_bind', 'op.get_bind', ([], {}), '()\n', (716, 718), False, 'from alembic import op\n'), ((2689, 2711), 'alembic.op.f', 'op.f', (['"""ix_users_email"""'], {}), "('ix_users_email')\n", (2693, 2711), False, 'from alembic import op\n'), ((4043, 4065), 'alembic.op.f', 'op.f', (['"""ix_users_email"""'], {}), "('ix_users_email')\n", (4047, 4065), False, 'from alembic import op\n'), ((4629, 4726), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['file_id']", "['files.bucket_key']"], {'name': '"""fiscal_notes_file_id_fkey"""'}), "(['file_id'], ['files.bucket_key'], name=\n 'fiscal_notes_file_id_fkey')\n", (4652, 4726), True, 'import sqlalchemy as sa\n'), ((4731, 4822), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['owner_id']", "['users.id']"], {'name': '"""fiscal_notes_owner_id_fkey"""'}), "(['owner_id'], ['users.id'], name=\n 'fiscal_notes_owner_id_fkey')\n", (4754, 4822), True, 'import sqlalchemy as sa\n'), ((4827, 4882), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {'name': '"""fiscal_notes_pkey"""'}), "('id', name='fiscal_notes_pkey')\n", (4850, 4882), True, 'import sqlalchemy as sa\n'), ((5871, 5973), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['file_id']", "['files.bucket_key']"], {'name': '"""fiscal_note_items_file_id_fkey"""'}), "(['file_id'], ['files.bucket_key'], name=\n 'fiscal_note_items_file_id_fkey')\n", (5894, 5973), True, 'import sqlalchemy as sa\n'), ((5978, 6093), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['fiscal_note_id']", "['fiscal_notes.id']"], {'name': '"""fiscal_note_items_fiscal_note_id_fkey"""'}), "(['fiscal_note_id'], ['fiscal_notes.id'], name=\n 'fiscal_note_items_fiscal_note_id_fkey')\n", (6001, 6093), True, 'import sqlalchemy as sa\n'), ((6098, 6192), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['item_id']", "['items.id']"], {'name': '"""fiscal_note_items_item_id_fkey"""'}), "(['item_id'], ['items.id'], name=\n 'fiscal_note_items_item_id_fkey')\n", (6121, 6192), True, 'import sqlalchemy as sa\n'), ((6197, 6293), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['owner_id']", "['users.id']"], {'name': '"""fiscal_note_items_owner_id_fkey"""'}), "(['owner_id'], ['users.id'], name=\n 'fiscal_note_items_owner_id_fkey')\n", (6220, 6293), True, 'import sqlalchemy as sa\n'), ((6298, 6358), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {'name': '"""fiscal_note_items_pkey"""'}), "('id', name='fiscal_note_items_pkey')\n", (6321, 6358), True, 'import sqlalchemy as sa\n'), ((813, 960), 'sqlalchemy.Enum', 'sa.Enum', (['"""SALE_IN_PIX"""', '"""SALE_IN_DEBT"""', '"""SALE_IN_CREDIT"""', '"""SALE_IN_MONEY"""', '"""SALE_IN_TRANSFER"""', '"""SALE_IN_BILLET"""', '"""SALE_OTHERS"""'], {'name': '"""saletype"""'}), "('SALE_IN_PIX', 'SALE_IN_DEBT', 'SALE_IN_CREDIT', 'SALE_IN_MONEY',\n 'SALE_IN_TRANSFER', 'SALE_IN_BILLET', 'SALE_OTHERS', name='saletype')\n", (820, 960), True, 'import sqlalchemy as sa\n'), ((1361, 1373), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (1371, 1373), True, 'import sqlalchemy as sa\n'), ((1445, 1457), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (1455, 1457), True, 'import sqlalchemy as sa\n'), ((1532, 1549), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (1547, 1549), False, 'from sqlalchemy.dialects import postgresql\n'), ((1624, 1641), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (1639, 1641), False, 'from sqlalchemy.dialects import postgresql\n'), ((1710, 1744), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1742, 1744), False, 'import sqlmodel\n'), ((1812, 1846), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1844, 1846), False, 'import sqlmodel\n'), ((1918, 1928), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (1926, 1928), True, 'import sqlalchemy as sa\n'), ((2923, 2940), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (2938, 2940), False, 'from sqlalchemy.dialects import postgresql\n'), ((3056, 3073), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (3071, 3073), False, 'from sqlalchemy.dialects import postgresql\n'), ((3144, 3156), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (3154, 3156), True, 'import sqlalchemy as sa\n'), ((3227, 3239), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (3237, 3239), True, 'import sqlalchemy as sa\n'), ((3320, 3373), 'sqlalchemy.dialects.postgresql.ENUM', 'postgresql.ENUM', (['"""DEBT"""', '"""CREDIT"""'], {'name': '"""balancetype"""'}), "('DEBT', 'CREDIT', name='balancetype')\n", (3335, 3373), False, 'from sqlalchemy.dialects import postgresql\n'), ((3729, 3770), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (3756, 3770), False, 'from sqlalchemy.dialects import postgresql\n'), ((3892, 3933), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (3919, 3933), False, 'from sqlalchemy.dialects import postgresql\n'), ((4228, 4245), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (4243, 4245), False, 'from sqlalchemy.dialects import postgresql\n'), ((4318, 4330), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (4328, 4330), True, 'import sqlalchemy as sa\n'), ((4405, 4414), 'sqlalchemy.DATE', 'sa.DATE', ([], {}), '()\n', (4412, 4414), True, 'import sqlalchemy as sa\n'), ((4484, 4501), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (4499, 4501), False, 'from sqlalchemy.dialects import postgresql\n'), ((4569, 4581), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (4579, 4581), True, 'import sqlalchemy as sa\n'), ((5203, 5220), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (5218, 5220), False, 'from sqlalchemy.dialects import postgresql\n'), ((5291, 5332), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (5318, 5332), False, 'from sqlalchemy.dialects import postgresql\n'), ((5426, 5467), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (5453, 5467), False, 'from sqlalchemy.dialects import postgresql\n'), ((5546, 5563), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (5561, 5563), False, 'from sqlalchemy.dialects import postgresql\n'), ((5639, 5656), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (5654, 5656), False, 'from sqlalchemy.dialects import postgresql\n'), ((5725, 5742), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (5740, 5742), False, 'from sqlalchemy.dialects import postgresql\n'), ((5811, 5823), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (5821, 5823), True, 'import sqlalchemy as sa\n')]
|
from __future__ import absolute_import
import numpy as nm
import numpy.linalg as nla
from sfepy.base.base import find_subclasses, assert_, Struct
from sfepy.linalg import combine, insert_strided_axis
from six.moves import range
from functools import reduce
# Requires fixed vertex numbering!
vertex_maps = {3 : [[0, 0, 0],
[1, 0, 0],
[1, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 1],
[1, 1, 1],
[0, 1, 1]],
2 : [[0, 0],
[1, 0],
[1, 1],
[0, 1]],
1 : [[0],
[1]]}
def transform_basis(transform, bf):
"""
Transform a basis `bf` using `transform` array of matrices.
"""
if bf.ndim == 3:
nbf = nm.einsum('cij,qdj->cqdi', transform, bf)
else:
nbf = nm.einsum('cij,oqdj->cqdi', transform, bf)
return nbf
class LagrangeNodes(Struct):
"""Helper class for defining nodes of Lagrange elements."""
@staticmethod
def append_edges(nodes, nts, iseq, nt, edges, order):
delta = 1.0 / float(order)
for ii, edge in enumerate(edges):
n1 = nodes[edge[0],:].copy()
n2 = nodes[edge[1],:].copy()
for ie in range(order - 1):
c2 = ie + 1
c1 = order - c2
nts[iseq] = [nt, ii]
aux = [int(round(tmp)) for tmp in delta * (c1 * n1 + c2 * n2)]
nodes[iseq,:] = aux
iseq += 1
return iseq
@staticmethod
def append_faces(nodes, nts, iseq, nt, faces, order):
delta = 1.0 / float(order)
for ii, face in enumerate(faces):
n1 = nodes[face[0],:].copy()
n2 = nodes[face[1],:].copy()
n3 = nodes[face[2],:].copy()
for i1 in range(order - 2):
for i2 in range(order - 2 - i1):
c3 = i1 + 1
c2 = i2 + 1
c1 = order - c3 - c2
nts[iseq] = [nt, ii]
aux = [int(round(tmp)) for tmp
in delta * (c1 * n1 + c2 * n2 + c3 * n3)]
nodes[iseq,:] = aux
iseq += 1
return iseq
@staticmethod
def append_bubbles(nodes, nts, iseq, nt, order):
delta = 1.0 / float(order)
n1 = nodes[0,:].copy()
n2 = nodes[1,:].copy()
n3 = nodes[2,:].copy()
n4 = nodes[3,:].copy()
for i1 in range(order - 3):
for i2 in range(order - 3):
for i3 in range(order - 3 - i1 - i2):
c4 = i1 + 1
c3 = i2 + 1
c2 = i3 + 1
c1 = order - c4 - c3 - c2
nts[iseq] = [nt, 0]
aux = [int(round(tmp)) for tmp
in delta * (c1 * n1 + c2 * n2 + c3 * n3 + c4 * n4)]
nodes[iseq,:] = aux
iseq += 1
return iseq
@staticmethod
def append_tp_edges(nodes, nts, iseq, nt, edges, ao):
delta = 1.0 / float(ao)
for ii, edge in enumerate(edges):
n1 = nodes[edge[0],:].copy()
n2 = nodes[edge[1],:].copy()
for ie in range(ao - 1):
c2 = ie + 1
c1 = ao - c2
nts[iseq] = [nt, ii]
aux = [int(round(tmp)) for tmp in delta * (c1 * n1 + c2 * n2)]
nodes[iseq,:] = aux
iseq += 1
return iseq
@staticmethod
def append_tp_faces(nodes, nts, iseq, nt, faces, ao):
delta = 1.0 / (float(ao) ** 2)
for ii, face in enumerate( faces ):
n1 = nodes[face[0],:].copy()
n2 = nodes[face[1],:].copy()
n3 = nodes[face[2],:].copy()
n4 = nodes[face[3],:].copy()
for i1 in range(ao - 1):
for i2 in range(ao - 1):
c4 = i1 + 1
c3 = i2 + 1
c2 = ao - c4
c1 = ao - c3
nts[iseq] = [nt, ii]
aux = [int(round(tmp)) for tmp
in delta * (c1 * c2 * n1 + c2 * c3 * n2
+ c3 * c4 * n3 + c4 * c1 * n4)]
nodes[iseq,:] = aux
iseq += 1
return iseq
@staticmethod
def append_tp_bubbles(nodes, nts, iseq, nt, ao):
delta = 1.0 / (float(ao) ** 3)
n1 = nodes[0,:].copy()
n2 = nodes[1,:].copy()
n3 = nodes[2,:].copy()
n4 = nodes[3,:].copy()
n5 = nodes[4,:].copy()
n6 = nodes[5,:].copy()
n7 = nodes[6,:].copy()
n8 = nodes[7,:].copy()
for i1 in range(ao - 1):
for i2 in range(ao - 1):
for i3 in range(ao - 1):
c6 = i1 + 1
c5 = i2 + 1
c4 = i3 + 1
c3 = ao - c6
c2 = ao - c5
c1 = ao - c4
nts[iseq] = [nt, 0]
aux = [int(round(tmp)) for tmp
in delta * (c1 * c2 * c3 * n1 + c4 * c2 * c3 * n2
+ c5 * c4 * c3 * n3 + c1 * c3 * c5 * n4
+ c1 * c2 * c6 * n5 + c4 * c2 * c6 * n6
+ c5 * c4 * c6 * n7 + c1 * c6 * c5 * n8)]
nodes[iseq,:] = aux
iseq += 1
return iseq
class NodeDescription(Struct):
"""
Describe FE nodes defined on different parts of a reference element.
"""
def _describe_facets(self, ii):
nts = self.node_types[ii]
ik = nm.where(nts[1:,1] > nts[:-1,1])[0]
if len(ik) == 0:
ifacets = None
n_dof = 0
else:
ii = ii.astype(nm.int32)
ik = nm.r_[0, ik + 1, nts.shape[0]]
ifacets = [ii[ik[ir] : ik[ir+1]] for ir in range(len(ik) - 1)]
n_dof = len(ifacets[0])
return ifacets, n_dof
def _describe_other(self, ii):
if len(ii):
return ii, len(ii)
else:
return None, 0
def _get_facet_nodes(self, ifacets, nodes):
if ifacets is None:
return None
else:
return [nodes[ii] for ii in ifacets]
def _get_nodes(self, ii, nodes):
if ii is None:
return None
else:
return nodes[ii]
def __init__(self, node_types, nodes):
self.node_types = node_types
# Vertex nodes.
ii = nm.where(node_types[:,0] == 0)[0]
self.vertex, self.n_vertex_nod = self._describe_other(ii)
self.vertex_nodes = self._get_nodes(self.vertex, nodes)
# Edge nodes.
ii = nm.where(node_types[:,0] == 1)[0]
self.edge, self.n_edge_nod = self._describe_facets(ii)
self.edge_nodes = self._get_facet_nodes(self.edge, nodes)
# Face nodes.
ii = nm.where(node_types[:,0] == 2)[0]
self.face, self.n_face_nod = self._describe_facets(ii)
self.face_nodes = self._get_facet_nodes(self.face, nodes)
# Bubble nodes.
ii = nm.where(node_types[:,0] == 3)[0]
self.bubble, self.n_bubble_nod = self._describe_other(ii)
self.bubble_nodes = self._get_nodes(self.bubble, nodes)
def has_extra_nodes(self):
"""
Return True if the element has some edge, face or bubble nodes.
"""
return (self.n_edge_nod + self.n_face_nod + self.n_bubble_nod) > 0
class PolySpace(Struct):
"""Abstract polynomial space class."""
_all = None
keys = {
(1, 2) : 'simplex',
(2, 3) : 'simplex',
(3, 4) : 'simplex',
(2, 4) : 'tensor_product',
(3, 8) : 'tensor_product',
}
@staticmethod
def any_from_args(name, geometry, order, base='lagrange',
force_bubble=False):
"""
Construct a particular polynomial space classes according to the
arguments passed in.
"""
if name is None:
name = PolySpace.suggest_name(geometry, order, base, force_bubble)
if PolySpace._all is None:
PolySpace._all = find_subclasses(globals(), [PolySpace])
table = PolySpace._all
key = '%s_%s' % (base, PolySpace.keys[(geometry.dim,
geometry.n_vertex)])
if (geometry.name == '1_2') and (key not in table):
key = '%s_%s' % (base, 'tensor_product')
if force_bubble:
key += '_bubble'
return table[key](name, geometry, order)
@staticmethod
def suggest_name(geometry, order, base='lagrange',
force_bubble=False):
"""
Suggest the polynomial space name given its constructor parameters.
"""
aux = geometry.get_interpolation_name()[:-1]
if force_bubble:
return aux + ('%dB' % order)
else:
return aux + ('%d' % order)
def __init__(self, name, geometry, order):
self.name = name
self.geometry = geometry
self.order = order
self.bbox = nm.vstack((geometry.coors.min(0), geometry.coors.max(0)))
def eval_base(self, coors, diff=0, ori=None, force_axis=False,
transform=None, suppress_errors=False, eps=1e-15):
"""
Evaluate the basis or its first or second derivatives in points given
by coordinates. The real work is done in _eval_base() implemented in
subclasses.
Note that the second derivative code is a work-in-progress and only
`coors` and `transform` arguments are used.
Parameters
----------
coors : array_like
The coordinates of points where the basis is evaluated. See Notes.
diff : 0, 1 or 2
If nonzero, return the given derivative.
ori : array_like, optional
Optional orientation of element facets for per element basis.
force_axis : bool
If True, force the resulting array shape to have one more axis even
when `ori` is None.
transform : array_like, optional
The basis transform array.
suppress_errors : bool
If True, do not report points outside the reference domain.
eps : float
Accuracy for comparing coordinates.
Returns
-------
base : array
The basis (shape (n_coor, 1, n_base)) or its first derivative
(shape (n_coor, dim, n_base)) or its second derivative (shape
(n_coor, dim, dim, n_base)) evaluated in the given points. An
additional axis is pre-pended of length n_cell, if `ori` is given,
or of length 1, if `force_axis` is True.
Notes
-----
If coors.ndim == 3, several point sets are assumed, with equal number
of points in each of them. This is the case, for example, of the
values of the volume base functions on the element facets. The indexing
(of bf_b(g)) is then (ifa,iqp,:,n_ep), so that the facet can be set in
C using FMF_SetCell.
"""
coors = nm.asarray(coors)
if not coors.ndim in (2, 3):
raise ValueError('coordinates must have 2 or 3 dimensions! (%d)'
% coors.ndim)
if (coors.ndim == 2):
base = self._eval_base(coors, diff=diff, ori=ori,
suppress_errors=suppress_errors,
eps=eps)
if (base.ndim == 3) and force_axis:
base = base[None, ...]
if not base.flags['C_CONTIGUOUS']:
base = nm.ascontiguousarray(base)
else: # Several point sets.
if diff:
bdim = self.geometry.dim
else:
bdim = 1
base = nm.empty((coors.shape[0], coors.shape[1],
bdim, self.n_nod), dtype=nm.float64)
for ii, _coors in enumerate(coors):
base[ii] = self._eval_base(_coors, diff=diff, ori=ori,
suppress_errors=suppress_errors,
eps=eps)
if transform is not None:
base = transform_basis(transform, base)
return base
def get_mtx_i(self):
return self.mtx_i
def describe_nodes(self):
return NodeDescription(self.nts, self.nodes)
class LagrangePolySpace(PolySpace):
def create_context(self, cmesh, eps, check_errors, i_max, newton_eps,
tdim=None):
from sfepy.discrete.fem.extmods.bases import CLagrangeContext
ref_coors = self.geometry.coors
if cmesh is not None:
mesh_coors = cmesh.coors
conn = cmesh.get_conn(cmesh.tdim, 0)
mesh_conn = conn.indices.reshape(cmesh.n_el, -1).astype(nm.int32)
if tdim is None:
tdim = cmesh.tdim
else:
mesh_coors = mesh_conn = None
if tdim is None:
raise ValueError('supply either cmesh or tdim!')
ctx = CLagrangeContext(order=self.order,
tdim=tdim,
nodes=self.nodes,
ref_coors=ref_coors,
mesh_coors=mesh_coors,
mesh_conn=mesh_conn,
mtx_i=self.get_mtx_i(),
eps=eps,
check_errors=check_errors,
i_max=i_max,
newton_eps=newton_eps)
return ctx
def _eval_base(self, coors, diff=0, ori=None,
suppress_errors=False, eps=1e-15):
"""
See :func:`PolySpace.eval_base()`.
"""
if diff == 2:
base = self._eval_hessian(coors)
else:
base = self.eval_ctx.evaluate(coors, diff=diff,
eps=eps,
check_errors=not suppress_errors)
return base
class LagrangeSimplexPolySpace(LagrangePolySpace):
"""Lagrange polynomial space on a simplex domain."""
name = 'lagrange_simplex'
def __init__(self, name, geometry, order, init_context=True):
PolySpace.__init__(self, name, geometry, order)
n_v = geometry.n_vertex
mtx = nm.ones((n_v, n_v), nm.float64)
mtx[0:n_v-1,:] = nm.transpose(geometry.coors)
self.mtx_i = nm.ascontiguousarray(nla.inv(mtx))
self.rhs = nm.ones((n_v,), nm.float64)
self.nodes, self.nts, node_coors = self._define_nodes()
self.node_coors = nm.ascontiguousarray(node_coors)
self.n_nod = self.nodes.shape[0]
if init_context:
self.eval_ctx = self.create_context(None, 0, 1e-15, 100, 1e-8,
tdim=n_v - 1)
else:
self.eval_ctx = None
def _define_nodes(self):
# Factorial.
fac = lambda n : reduce(lambda a, b : a * (b + 1), range(n), 1)
geometry = self.geometry
n_v, dim = geometry.n_vertex, geometry.dim
order = self.order
n_nod = fac(order + dim) // (fac(order) * fac(dim))
## print n_nod, gd
nodes = nm.zeros((n_nod, n_v), nm.int32)
nts = nm.zeros((n_nod, 2), nm.int32)
if order == 0:
nts[0,:] = [3, 0]
nodes[0,:] = nm.zeros((n_v,), nm.int32)
else:
iseq = 0
# Vertex nodes.
nts[0:n_v,0] = 0
nts[0:n_v,1] = nm.arange(n_v, dtype = nm.int32)
aux = order * nm.identity(n_v, dtype = nm.int32)
nodes[iseq:iseq+n_v,:] = aux
iseq += n_v
if dim == 1:
iseq = LagrangeNodes.append_edges(nodes, nts, iseq, 3,
[[0, 1]], order)
elif dim == 2:
iseq = LagrangeNodes.append_edges(nodes, nts, iseq, 1,
geometry.edges, order)
iseq = LagrangeNodes.append_faces(nodes, nts, iseq, 3,
[[0, 1, 2]], order)
elif dim == 3:
iseq = LagrangeNodes.append_edges(nodes, nts, iseq, 1,
geometry.edges, order)
iseq = LagrangeNodes.append_faces(nodes, nts, iseq, 2,
geometry.faces, order)
iseq = LagrangeNodes.append_bubbles(nodes, nts, iseq, 3,
order)
else:
raise NotImplementedError
## print nm.concatenate((nts, nodes), 1)
# Check orders.
orders = nm.sum(nodes, 1)
if not nm.all(orders == order):
raise AssertionError('wrong orders! (%d == all of %s)'
% (order, orders))
# Coordinates of the nodes.
if order == 0:
tmp = nm.ones((n_nod, n_v), nm.int32)
node_coors = nm.dot(tmp, geometry.coors) / n_v
else:
node_coors = nm.dot(nodes, geometry.coors) / order
return nodes, nts, node_coors
def _eval_hessian(self, coors):
"""
Evaluate the second derivatives of the basis.
"""
def get_bc(coor):
rhs = nm.concatenate((coor, [1]))
bc = nm.dot(self.mtx_i, rhs)
return bc
def get_val(bc, node, omit=[]):
val = nm.ones(1, nm.float64)
for i1 in range(bc.shape[0]):
if i1 in omit: continue
for i2 in range(node[i1]):
val *= (self.order * bc[i1] - i2) / (i2 + 1.0)
return val
def get_der(bc1, node1, omit=[]):
val = nm.zeros(1, nm.float64)
for i1 in range(node1):
if i1 in omit: continue
aux = nm.ones(1, nm.float64)
for i2 in range(node1):
if (i1 == i2) or (i2 in omit): continue
aux *= (self.order * bc1 - i2) / (i2 + 1.0)
val += aux * self.order / (i1 + 1.0)
return val
n_v = self.mtx_i.shape[0]
dim = n_v - 1
mi = self.mtx_i[:, :dim]
bfgg = nm.zeros((coors.shape[0], dim, dim, self.n_nod),
dtype=nm.float64)
for ic, coor in enumerate(coors):
bc = get_bc(coor)
for ii, node in enumerate(self.nodes):
for ig1, bc1 in enumerate(bc): # 1. derivative w.r.t. bc1.
for ig2, bc2 in enumerate(bc): # 2. derivative w.r.t. bc2.
if ig1 == ig2:
val = get_val(bc, node, omit=[ig1])
vv = 0.0
for i1 in range(node[ig1]):
aux = get_der(bc2, node[ig2], omit=[i1])
vv += aux * self.order / (i1 + 1.0)
val *= vv
else:
val = get_val(bc, node, omit=[ig1, ig2])
val *= get_der(bc1, node[ig1])
val *= get_der(bc2, node[ig2])
bfgg[ic, :, :, ii] += val * mi[ig1] * mi[ig2][:, None]
return bfgg
class LagrangeSimplexBPolySpace(LagrangeSimplexPolySpace):
"""Lagrange polynomial space with forced bubble function on a simplex
domain."""
name = 'lagrange_simplex_bubble'
def __init__(self, name, geometry, order, init_context=True):
LagrangeSimplexPolySpace.__init__(self, name, geometry, order,
init_context=False)
nodes, nts, node_coors = self.nodes, self.nts, self.node_coors
shape = [nts.shape[0] + 1, 2]
nts = nm.resize(nts, shape)
nts[-1,:] = [3, 0]
shape = [nodes.shape[0] + 1, nodes.shape[1]]
nodes = nm.resize(nodes, shape)
# Make a 'hypercubic' (cubic in 2D) node.
nodes[-1,:] = 1
n_v = self.geometry.n_vertex
tmp = nm.ones((n_v,), nm.int32)
node_coors = nm.vstack((node_coors,
nm.dot(tmp, self.geometry.coors) / n_v))
self.nodes, self.nts = nodes, nts
self.node_coors = nm.ascontiguousarray(node_coors)
self.bnode = nodes[-1:,:]
self.n_nod = self.nodes.shape[0]
if init_context:
self.eval_ctx = self.create_context(None, 0, 1e-15, 100, 1e-8,
tdim=n_v - 1)
else:
self.eval_ctx = None
def create_context(self, *args, **kwargs):
ctx = LagrangePolySpace.create_context(self, *args, **kwargs)
ctx.is_bubble = 1
return ctx
class LagrangeTensorProductPolySpace(LagrangePolySpace):
"""Lagrange polynomial space on a tensor product domain."""
name = 'lagrange_tensor_product'
def __init__(self, name, geometry, order, init_context=True):
PolySpace.__init__(self, name, geometry, order)
g1d = Struct(n_vertex = 2,
dim = 1,
coors = self.bbox[:,0:1].copy())
self.ps1d = LagrangeSimplexPolySpace('P_aux', g1d, order,
init_context=False)
self.nodes, self.nts, node_coors = self._define_nodes()
self.node_coors = nm.ascontiguousarray(node_coors)
self.n_nod = self.nodes.shape[0]
if init_context:
tdim = int(nm.sqrt(geometry.n_vertex))
self.eval_ctx = self.create_context(None, 0, 1e-15, 100, 1e-8,
tdim=tdim)
else:
self.eval_ctx = None
def _define_nodes(self):
geometry = self.geometry
order = self.order
n_v, dim = geometry.n_vertex, geometry.dim
vertex_map = order * nm.array(vertex_maps[dim], dtype=nm.int32)
n_nod = (order + 1) ** dim
nodes = nm.zeros((n_nod, 2 * dim), nm.int32)
nts = nm.zeros((n_nod, 2), nm.int32)
if order == 0:
nts[0,:] = [3, 0]
nodes[0,:] = nm.zeros((n_nod,), nm.int32)
else:
iseq = 0
# Vertex nodes.
nts[0:n_v,0] = 0
nts[0:n_v,1] = nm.arange( n_v, dtype = nm.int32 )
order * nm.identity( n_v, dtype = nm.int32 )
if dim == 3:
for ii in range(n_v):
i1, i2, i3 = vertex_map[ii]
nodes[iseq,:] = [order - i1, i1,
order - i2, i2,
order - i3, i3]
iseq += 1
elif dim == 2:
for ii in range(n_v):
i1, i2 = vertex_map[ii]
nodes[iseq,:] = [order - i1, i1, order - i2, i2]
iseq += 1
else:
for ii in range(n_v):
i1 = vertex_map[ii][0]
nodes[iseq,:] = [order - i1, i1]
iseq += 1
if dim == 1:
iseq = LagrangeNodes.append_tp_edges(nodes, nts, iseq, 3,
[[0, 1]], order)
elif dim == 2:
iseq = LagrangeNodes.append_tp_edges(nodes, nts, iseq, 1,
geometry.edges, order)
iseq = LagrangeNodes.append_tp_faces(nodes, nts, iseq, 3,
[[0, 1, 2, 3]], order)
elif dim == 3:
iseq = LagrangeNodes.append_tp_edges(nodes, nts, iseq, 1,
geometry.edges, order)
iseq = LagrangeNodes.append_tp_faces(nodes, nts, iseq, 2,
geometry.faces, order)
iseq = LagrangeNodes.append_tp_bubbles(nodes, nts, iseq, 3,
order)
else:
raise NotImplementedError
# Check orders.
orders = nm.sum(nodes, 1)
if not nm.all(orders == order * dim):
raise AssertionError('wrong orders! (%d == all of %s)'
% (order * dim, orders))
# Coordinates of the nodes.
if order == 0:
tmp = nm.ones((n_nod, n_v), nm.int32)
node_coors = nm.dot(tmp, geometry.coors) / n_v
else:
c_min, c_max = self.bbox[:,0]
cr = nm.arange(2 * dim)
node_coors = (nodes[:,cr[::2]] * c_min
+ nodes[:,cr[1::2]] * c_max) / order
return nodes, nts, node_coors
def _eval_base_debug(self, coors, diff=False, ori=None,
suppress_errors=False, eps=1e-15):
"""Python version of eval_base()."""
dim = self.geometry.dim
ev = self.ps1d.eval_base
if diff:
base = nm.ones((coors.shape[0], dim, self.n_nod), dtype=nm.float64)
for ii in range(dim):
self.ps1d.nodes = self.nodes[:,2*ii:2*ii+2].copy()
self.ps1d.n_nod = self.n_nod
for iv in range(dim):
if ii == iv:
base[:,iv:iv+1,:] *= ev(coors[:,ii:ii+1].copy(),
diff=True,
suppress_errors=suppress_errors,
eps=eps)
else:
base[:,iv:iv+1,:] *= ev(coors[:,ii:ii+1].copy(),
diff=False,
suppress_errors=suppress_errors,
eps=eps)
else:
base = nm.ones((coors.shape[0], 1, self.n_nod), dtype=nm.float64)
for ii in range(dim):
self.ps1d.nodes = self.nodes[:,2*ii:2*ii+2].copy()
self.ps1d.n_nod = self.n_nod
base *= ev(coors[:,ii:ii+1].copy(),
diff=diff,
suppress_errors=suppress_errors,
eps=eps)
return base
def _eval_hessian(self, coors):
"""
Evaluate the second derivatives of the basis.
"""
evh = self.ps1d.eval_base
dim = self.geometry.dim
bfgg = nm.zeros((coors.shape[0], dim, dim, self.n_nod),
dtype=nm.float64)
v0s = []
v1s = []
v2s = []
for ii in range(dim):
self.ps1d.nodes = self.nodes[:,2*ii:2*ii+2].copy()
self.ps1d.n_nod = self.n_nod
ev = self.ps1d.create_context(None, 0, 1e-15, 100, 1e-8,
tdim=1).evaluate
v0s.append(ev(coors[:, ii:ii+1].copy())[:, 0, :])
v1s.append(ev(coors[:, ii:ii+1].copy(), diff=1)[:, 0, :])
v2s.append(evh(coors[:, ii:ii+1], diff=2)[:, 0, 0, :])
for ir in range(dim):
vv = v2s[ir] # Destroys v2s!
for ik in range(dim):
if ik == ir: continue
vv *= v0s[ik]
bfgg[:, ir, ir, :] = vv
for ic in range(dim):
if ic == ir: continue
val = v1s[ir] * v1s[ic]
for ik in range(dim):
if (ik == ir) or (ik == ic): continue
val *= v0s[ik]
bfgg[:, ir, ic, :] += val
return bfgg
def get_mtx_i(self):
return self.ps1d.mtx_i
class LobattoTensorProductPolySpace(PolySpace):
"""
Hierarchical polynomial space using Lobatto functions.
Each row of the `nodes` attribute defines indices of Lobatto functions that
need to be multiplied together to evaluate the corresponding shape
function. This defines the ordering of basis functions on the reference
element.
"""
name = 'lobatto_tensor_product'
def __init__(self, name, geometry, order):
PolySpace.__init__(self, name, geometry, order)
aux = self._define_nodes()
self.nodes, self.nts, node_coors, self.face_axes, self.sfnodes = aux
self.node_coors = nm.ascontiguousarray(node_coors)
self.n_nod = self.nodes.shape[0]
aux = nm.where(self.nodes > 0, self.nodes, 1)
self.node_orders = nm.prod(aux, axis=1)
self.edge_indx = nm.where(self.nts[:, 0] == 1)[0]
self.face_indx = nm.where(self.nts[:, 0] == 2)[0]
self.face_axes_nodes = self._get_face_axes_nodes(self.face_axes)
def _get_counts(self):
order = self.order
dim = self.geometry.dim
n_nod = (order + 1) ** dim
n_per_edge = (order - 1)
n_per_face = (order - 1) ** (dim - 1)
n_bubble = (order - 1) ** dim
return n_nod, n_per_edge, n_per_face, n_bubble
def _define_nodes(self):
geometry = self.geometry
order = self.order
n_v, dim = geometry.n_vertex, geometry.dim
n_nod, n_per_edge, n_per_face, n_bubble = self._get_counts()
nodes = nm.zeros((n_nod, dim), nm.int32)
nts = nm.zeros((n_nod, 2), nm.int32)
# Vertex nodes.
nts[0:n_v, 0] = 0
nts[0:n_v, 1] = nm.arange(n_v, dtype=nm.int32)
nodes[0:n_v] = nm.array(vertex_maps[dim], dtype=nm.int32)
ii = n_v
# Edge nodes.
if (dim > 1) and (n_per_edge > 0):
ik = nm.arange(2, order + 1, dtype=nm.int32)
zo = nm.zeros((n_per_edge, 2), dtype=nm.int32)
zo[:, 1] = 1
for ie, edge in enumerate(geometry.edges):
n1, n2 = nodes[edge]
ifix = nm.where(n1 == n2)[0]
irun = nm.where(n1 != n2)[0][0]
ic = n1[ifix]
nodes[ii:ii + n_per_edge, ifix] = zo[:, ic]
nodes[ii:ii + n_per_edge, irun] = ik
nts[ii:ii + n_per_edge] = [[1, ie]]
ii += n_per_edge
# 3D face nodes.
face_axes = []
sfnodes = None
if (dim == 3) and (n_per_face > 0):
n_face = len(geometry.faces)
sfnodes = nm.zeros((n_per_face * n_face, dim), nm.int32)
ii0 = ii
ik = nm.arange(2, order + 1, dtype=nm.int32)
zo = nm.zeros((n_per_face, 2), dtype=nm.int32)
zo[:, 1] = 1
for ifa, face in enumerate(geometry.faces):
ns = nodes[face]
diff = nm.diff(ns, axis=0)
asum = nm.abs(diff).sum(axis=0)
ifix = nm.where(asum == 0)[0][0]
ic = ns[0, ifix]
irun1 = nm.where(asum == 2)[0][0]
irun2 = nm.where(asum == 1)[0][0]
iy, ix = nm.meshgrid(ik, ik)
nodes[ii:ii + n_per_face, ifix] = zo[:, ic]
nodes[ii:ii + n_per_face, irun1] = ix.ravel()
nodes[ii:ii + n_per_face, irun2] = iy.ravel()
nts[ii:ii + n_per_face] = [[2, ifa]]
ij = ii - ii0
sfnodes[ij:ij + n_per_face, ifix] = zo[:, ic]
sfnodes[ij:ij + n_per_face, irun1] = iy.ravel()
sfnodes[ij:ij + n_per_face, irun2] = ix.ravel()
face_axes.append([irun1, irun2])
ii += n_per_face
face_axes = nm.array(face_axes)
# Bubble nodes.
if n_bubble > 0:
ik = nm.arange(2, order + 1, dtype=nm.int32)
nodes[ii:] = nm.array([aux for aux in combine([ik] * dim)])
nts[ii:ii + n_bubble] = [[3, 0]]
ii += n_bubble
assert_(ii == n_nod)
# Coordinates of the "nodes". All nodes on a facet have the same
# coordinates - the centre of the facet.
c_min, c_max = self.bbox[:, 0]
node_coors = nm.zeros(nodes.shape, dtype=nm.float64)
node_coors[:n_v] = nodes[:n_v]
if (dim > 1) and (n_per_edge > 0):
ie = nm.where(nts[:, 0] == 1)[0]
node_coors[ie] = node_coors[geometry.edges[nts[ie, 1]]].mean(1)
if (dim == 3) and (n_per_face > 0):
ifa = nm.where(nts[:, 0] == 2)[0]
node_coors[ifa] = node_coors[geometry.faces[nts[ifa, 1]]].mean(1)
if n_bubble > 0:
ib = nm.where(nts[:, 0] == 3)[0]
node_coors[ib] = node_coors[geometry.conn].mean(0)
return nodes, nts, node_coors, face_axes, sfnodes
def _get_face_axes_nodes(self, face_axes):
if not len(face_axes): return None
nodes = self.nodes[self.face_indx]
n_per_face = self._get_counts()[2]
anodes = nm.tile(nodes[:n_per_face, face_axes[0]], (6, 1))
return anodes
def _eval_base(self, coors, diff=False, ori=None,
suppress_errors=False, eps=1e-15):
"""
See PolySpace.eval_base().
"""
from .extmods.lobatto_bases import eval_lobatto_tensor_product as ev
c_min, c_max = self.bbox[:, 0]
base = ev(coors, self.nodes, c_min, c_max, self.order, diff)
if ori is not None:
ebase = nm.tile(base, (ori.shape[0], 1, 1, 1))
if self.edge_indx.shape[0]:
# Orient edge functions.
ie, ii = nm.where(ori[:, self.edge_indx] == 1)
ii = self.edge_indx[ii]
ebase[ie, :, :, ii] *= -1.0
if self.face_indx.shape[0]:
# Orient face functions.
fori = ori[:, self.face_indx]
# ... normal axis order
ie, ii = nm.where((fori == 1) | (fori == 2))
ii = self.face_indx[ii]
ebase[ie, :, :, ii] *= -1.0
# ... swapped axis order
sbase = ev(coors, self.sfnodes, c_min, c_max, self.order, diff)
sbase = insert_strided_axis(sbase, 0, ori.shape[0])
# ...overwrite with swapped axes basis.
ie, ii = nm.where(fori >= 4)
ii2 = self.face_indx[ii]
ebase[ie, :, :, ii2] = sbase[ie, :, :, ii]
# ...deal with orientation.
ie, ii = nm.where((fori == 5) | (fori == 6))
ii = self.face_indx[ii]
ebase[ie, :, :, ii] *= -1.0
base = ebase
return base
|
[
"sfepy.linalg.combine",
"sfepy.base.base.assert_",
"sfepy.linalg.insert_strided_axis"
] |
[((859, 900), 'numpy.einsum', 'nm.einsum', (['"""cij,qdj->cqdi"""', 'transform', 'bf'], {}), "('cij,qdj->cqdi', transform, bf)\n", (868, 900), True, 'import numpy as nm\n'), ((926, 968), 'numpy.einsum', 'nm.einsum', (['"""cij,oqdj->cqdi"""', 'transform', 'bf'], {}), "('cij,oqdj->cqdi', transform, bf)\n", (935, 968), True, 'import numpy as nm\n'), ((2587, 2603), 'six.moves.range', 'range', (['(order - 3)'], {}), '(order - 3)\n', (2592, 2603), False, 'from six.moves import range\n'), ((4855, 4868), 'six.moves.range', 'range', (['(ao - 1)'], {}), '(ao - 1)\n', (4860, 4868), False, 'from six.moves import range\n'), ((11384, 11401), 'numpy.asarray', 'nm.asarray', (['coors'], {}), '(coors)\n', (11394, 11401), True, 'import numpy as nm\n'), ((14715, 14746), 'numpy.ones', 'nm.ones', (['(n_v, n_v)', 'nm.float64'], {}), '((n_v, n_v), nm.float64)\n', (14722, 14746), True, 'import numpy as nm\n'), ((14772, 14800), 'numpy.transpose', 'nm.transpose', (['geometry.coors'], {}), '(geometry.coors)\n', (14784, 14800), True, 'import numpy as nm\n'), ((14876, 14903), 'numpy.ones', 'nm.ones', (['(n_v,)', 'nm.float64'], {}), '((n_v,), nm.float64)\n', (14883, 14903), True, 'import numpy as nm\n'), ((14995, 15027), 'numpy.ascontiguousarray', 'nm.ascontiguousarray', (['node_coors'], {}), '(node_coors)\n', (15015, 15027), True, 'import numpy as nm\n'), ((15619, 15651), 'numpy.zeros', 'nm.zeros', (['(n_nod, n_v)', 'nm.int32'], {}), '((n_nod, n_v), nm.int32)\n', (15627, 15651), True, 'import numpy as nm\n'), ((15666, 15696), 'numpy.zeros', 'nm.zeros', (['(n_nod, 2)', 'nm.int32'], {}), '((n_nod, 2), nm.int32)\n', (15674, 15696), True, 'import numpy as nm\n'), ((17158, 17174), 'numpy.sum', 'nm.sum', (['nodes', '(1)'], {}), '(nodes, 1)\n', (17164, 17174), True, 'import numpy as nm\n'), ((18725, 18791), 'numpy.zeros', 'nm.zeros', (['(coors.shape[0], dim, dim, self.n_nod)'], {'dtype': 'nm.float64'}), '((coors.shape[0], dim, dim, self.n_nod), dtype=nm.float64)\n', (18733, 18791), True, 'import numpy as nm\n'), ((20302, 20323), 'numpy.resize', 'nm.resize', (['nts', 'shape'], {}), '(nts, shape)\n', (20311, 20323), True, 'import numpy as nm\n'), ((20421, 20444), 'numpy.resize', 'nm.resize', (['nodes', 'shape'], {}), '(nodes, shape)\n', (20430, 20444), True, 'import numpy as nm\n'), ((20571, 20596), 'numpy.ones', 'nm.ones', (['(n_v,)', 'nm.int32'], {}), '((n_v,), nm.int32)\n', (20578, 20596), True, 'import numpy as nm\n'), ((20784, 20816), 'numpy.ascontiguousarray', 'nm.ascontiguousarray', (['node_coors'], {}), '(node_coors)\n', (20804, 20816), True, 'import numpy as nm\n'), ((21893, 21925), 'numpy.ascontiguousarray', 'nm.ascontiguousarray', (['node_coors'], {}), '(node_coors)\n', (21913, 21925), True, 'import numpy as nm\n'), ((22493, 22529), 'numpy.zeros', 'nm.zeros', (['(n_nod, 2 * dim)', 'nm.int32'], {}), '((n_nod, 2 * dim), nm.int32)\n', (22501, 22529), True, 'import numpy as nm\n'), ((22544, 22574), 'numpy.zeros', 'nm.zeros', (['(n_nod, 2)', 'nm.int32'], {}), '((n_nod, 2), nm.int32)\n', (22552, 22574), True, 'import numpy as nm\n'), ((24650, 24666), 'numpy.sum', 'nm.sum', (['nodes', '(1)'], {}), '(nodes, 1)\n', (24656, 24666), True, 'import numpy as nm\n'), ((27068, 27134), 'numpy.zeros', 'nm.zeros', (['(coors.shape[0], dim, dim, self.n_nod)'], {'dtype': 'nm.float64'}), '((coors.shape[0], dim, dim, self.n_nod), dtype=nm.float64)\n', (27076, 27134), True, 'import numpy as nm\n'), ((27229, 27239), 'six.moves.range', 'range', (['dim'], {}), '(dim)\n', (27234, 27239), False, 'from six.moves import range\n'), ((27692, 27702), 'six.moves.range', 'range', (['dim'], {}), '(dim)\n', (27697, 27702), False, 'from six.moves import range\n'), ((28893, 28925), 'numpy.ascontiguousarray', 'nm.ascontiguousarray', (['node_coors'], {}), '(node_coors)\n', (28913, 28925), True, 'import numpy as nm\n'), ((28982, 29021), 'numpy.where', 'nm.where', (['(self.nodes > 0)', 'self.nodes', '(1)'], {}), '(self.nodes > 0, self.nodes, 1)\n', (28990, 29021), True, 'import numpy as nm\n'), ((29049, 29069), 'numpy.prod', 'nm.prod', (['aux'], {'axis': '(1)'}), '(aux, axis=1)\n', (29056, 29069), True, 'import numpy as nm\n'), ((29785, 29817), 'numpy.zeros', 'nm.zeros', (['(n_nod, dim)', 'nm.int32'], {}), '((n_nod, dim), nm.int32)\n', (29793, 29817), True, 'import numpy as nm\n'), ((29832, 29862), 'numpy.zeros', 'nm.zeros', (['(n_nod, 2)', 'nm.int32'], {}), '((n_nod, 2), nm.int32)\n', (29840, 29862), True, 'import numpy as nm\n'), ((29938, 29968), 'numpy.arange', 'nm.arange', (['n_v'], {'dtype': 'nm.int32'}), '(n_v, dtype=nm.int32)\n', (29947, 29968), True, 'import numpy as nm\n'), ((29992, 30034), 'numpy.array', 'nm.array', (['vertex_maps[dim]'], {'dtype': 'nm.int32'}), '(vertex_maps[dim], dtype=nm.int32)\n', (30000, 30034), True, 'import numpy as nm\n'), ((32036, 32055), 'numpy.array', 'nm.array', (['face_axes'], {}), '(face_axes)\n', (32044, 32055), True, 'import numpy as nm\n'), ((32316, 32336), 'sfepy.base.base.assert_', 'assert_', (['(ii == n_nod)'], {}), '(ii == n_nod)\n', (32323, 32336), False, 'from sfepy.base.base import find_subclasses, assert_, Struct\n'), ((32521, 32560), 'numpy.zeros', 'nm.zeros', (['nodes.shape'], {'dtype': 'nm.float64'}), '(nodes.shape, dtype=nm.float64)\n', (32529, 32560), True, 'import numpy as nm\n'), ((33322, 33371), 'numpy.tile', 'nm.tile', (['nodes[:n_per_face, face_axes[0]]', '(6, 1)'], {}), '(nodes[:n_per_face, face_axes[0]], (6, 1))\n', (33329, 33371), True, 'import numpy as nm\n'), ((1338, 1354), 'six.moves.range', 'range', (['(order - 1)'], {}), '(order - 1)\n', (1343, 1354), False, 'from six.moves import range\n'), ((1914, 1930), 'six.moves.range', 'range', (['(order - 2)'], {}), '(order - 2)\n', (1919, 1930), False, 'from six.moves import range\n'), ((2627, 2643), 'six.moves.range', 'range', (['(order - 3)'], {}), '(order - 3)\n', (2632, 2643), False, 'from six.moves import range\n'), ((3356, 3369), 'six.moves.range', 'range', (['(ao - 1)'], {}), '(ao - 1)\n', (3361, 3369), False, 'from six.moves import range\n'), ((3972, 3985), 'six.moves.range', 'range', (['(ao - 1)'], {}), '(ao - 1)\n', (3977, 3985), False, 'from six.moves import range\n'), ((4892, 4905), 'six.moves.range', 'range', (['(ao - 1)'], {}), '(ao - 1)\n', (4897, 4905), False, 'from six.moves import range\n'), ((5845, 5879), 'numpy.where', 'nm.where', (['(nts[1:, 1] > nts[:-1, 1])'], {}), '(nts[1:, 1] > nts[:-1, 1])\n', (5853, 5879), True, 'import numpy as nm\n'), ((6741, 6772), 'numpy.where', 'nm.where', (['(node_types[:, 0] == 0)'], {}), '(node_types[:, 0] == 0)\n', (6749, 6772), True, 'import numpy as nm\n'), ((6941, 6972), 'numpy.where', 'nm.where', (['(node_types[:, 0] == 1)'], {}), '(node_types[:, 0] == 1)\n', (6949, 6972), True, 'import numpy as nm\n'), ((7140, 7171), 'numpy.where', 'nm.where', (['(node_types[:, 0] == 2)'], {}), '(node_types[:, 0] == 2)\n', (7148, 7171), True, 'import numpy as nm\n'), ((7341, 7372), 'numpy.where', 'nm.where', (['(node_types[:, 0] == 3)'], {}), '(node_types[:, 0] == 3)\n', (7349, 7372), True, 'import numpy as nm\n'), ((12112, 12190), 'numpy.empty', 'nm.empty', (['(coors.shape[0], coors.shape[1], bdim, self.n_nod)'], {'dtype': 'nm.float64'}), '((coors.shape[0], coors.shape[1], bdim, self.n_nod), dtype=nm.float64)\n', (12120, 12190), True, 'import numpy as nm\n'), ((14843, 14855), 'numpy.linalg.inv', 'nla.inv', (['mtx'], {}), '(mtx)\n', (14850, 14855), True, 'import numpy.linalg as nla\n'), ((15776, 15802), 'numpy.zeros', 'nm.zeros', (['(n_v,)', 'nm.int32'], {}), '((n_v,), nm.int32)\n', (15784, 15802), True, 'import numpy as nm\n'), ((15924, 15954), 'numpy.arange', 'nm.arange', (['n_v'], {'dtype': 'nm.int32'}), '(n_v, dtype=nm.int32)\n', (15933, 15954), True, 'import numpy as nm\n'), ((17190, 17213), 'numpy.all', 'nm.all', (['(orders == order)'], {}), '(orders == order)\n', (17196, 17213), True, 'import numpy as nm\n'), ((17412, 17443), 'numpy.ones', 'nm.ones', (['(n_nod, n_v)', 'nm.int32'], {}), '((n_nod, n_v), nm.int32)\n', (17419, 17443), True, 'import numpy as nm\n'), ((17779, 17806), 'numpy.concatenate', 'nm.concatenate', (['(coor, [1])'], {}), '((coor, [1]))\n', (17793, 17806), True, 'import numpy as nm\n'), ((17824, 17847), 'numpy.dot', 'nm.dot', (['self.mtx_i', 'rhs'], {}), '(self.mtx_i, rhs)\n', (17830, 17847), True, 'import numpy as nm\n'), ((17930, 17952), 'numpy.ones', 'nm.ones', (['(1)', 'nm.float64'], {}), '(1, nm.float64)\n', (17937, 17952), True, 'import numpy as nm\n'), ((17975, 17993), 'six.moves.range', 'range', (['bc.shape[0]'], {}), '(bc.shape[0])\n', (17980, 17993), False, 'from six.moves import range\n'), ((18231, 18254), 'numpy.zeros', 'nm.zeros', (['(1)', 'nm.float64'], {}), '(1, nm.float64)\n', (18239, 18254), True, 'import numpy as nm\n'), ((18277, 18289), 'six.moves.range', 'range', (['node1'], {}), '(node1)\n', (18282, 18289), False, 'from six.moves import range\n'), ((22398, 22440), 'numpy.array', 'nm.array', (['vertex_maps[dim]'], {'dtype': 'nm.int32'}), '(vertex_maps[dim], dtype=nm.int32)\n', (22406, 22440), True, 'import numpy as nm\n'), ((22654, 22682), 'numpy.zeros', 'nm.zeros', (['(n_nod,)', 'nm.int32'], {}), '((n_nod,), nm.int32)\n', (22662, 22682), True, 'import numpy as nm\n'), ((22804, 22834), 'numpy.arange', 'nm.arange', (['n_v'], {'dtype': 'nm.int32'}), '(n_v, dtype=nm.int32)\n', (22813, 22834), True, 'import numpy as nm\n'), ((24682, 24711), 'numpy.all', 'nm.all', (['(orders == order * dim)'], {}), '(orders == order * dim)\n', (24688, 24711), True, 'import numpy as nm\n'), ((24916, 24947), 'numpy.ones', 'nm.ones', (['(n_nod, n_v)', 'nm.int32'], {}), '((n_nod, n_v), nm.int32)\n', (24923, 24947), True, 'import numpy as nm\n'), ((25094, 25112), 'numpy.arange', 'nm.arange', (['(2 * dim)'], {}), '(2 * dim)\n', (25103, 25112), True, 'import numpy as nm\n'), ((25543, 25603), 'numpy.ones', 'nm.ones', (['(coors.shape[0], dim, self.n_nod)'], {'dtype': 'nm.float64'}), '((coors.shape[0], dim, self.n_nod), dtype=nm.float64)\n', (25550, 25603), True, 'import numpy as nm\n'), ((25627, 25637), 'six.moves.range', 'range', (['dim'], {}), '(dim)\n', (25632, 25637), False, 'from six.moves import range\n'), ((26441, 26499), 'numpy.ones', 'nm.ones', (['(coors.shape[0], 1, self.n_nod)'], {'dtype': 'nm.float64'}), '((coors.shape[0], 1, self.n_nod), dtype=nm.float64)\n', (26448, 26499), True, 'import numpy as nm\n'), ((26523, 26533), 'six.moves.range', 'range', (['dim'], {}), '(dim)\n', (26528, 26533), False, 'from six.moves import range\n'), ((27767, 27777), 'six.moves.range', 'range', (['dim'], {}), '(dim)\n', (27772, 27777), False, 'from six.moves import range\n'), ((27907, 27917), 'six.moves.range', 'range', (['dim'], {}), '(dim)\n', (27912, 27917), False, 'from six.moves import range\n'), ((29095, 29124), 'numpy.where', 'nm.where', (['(self.nts[:, 0] == 1)'], {}), '(self.nts[:, 0] == 1)\n', (29103, 29124), True, 'import numpy as nm\n'), ((29153, 29182), 'numpy.where', 'nm.where', (['(self.nts[:, 0] == 2)'], {}), '(self.nts[:, 0] == 2)\n', (29161, 29182), True, 'import numpy as nm\n'), ((30135, 30174), 'numpy.arange', 'nm.arange', (['(2)', '(order + 1)'], {'dtype': 'nm.int32'}), '(2, order + 1, dtype=nm.int32)\n', (30144, 30174), True, 'import numpy as nm\n'), ((30192, 30233), 'numpy.zeros', 'nm.zeros', (['(n_per_edge, 2)'], {'dtype': 'nm.int32'}), '((n_per_edge, 2), dtype=nm.int32)\n', (30200, 30233), True, 'import numpy as nm\n'), ((30852, 30898), 'numpy.zeros', 'nm.zeros', (['(n_per_face * n_face, dim)', 'nm.int32'], {}), '((n_per_face * n_face, dim), nm.int32)\n', (30860, 30898), True, 'import numpy as nm\n'), ((30938, 30977), 'numpy.arange', 'nm.arange', (['(2)', '(order + 1)'], {'dtype': 'nm.int32'}), '(2, order + 1, dtype=nm.int32)\n', (30947, 30977), True, 'import numpy as nm\n'), ((30995, 31036), 'numpy.zeros', 'nm.zeros', (['(n_per_face, 2)'], {'dtype': 'nm.int32'}), '((n_per_face, 2), dtype=nm.int32)\n', (31003, 31036), True, 'import numpy as nm\n'), ((32123, 32162), 'numpy.arange', 'nm.arange', (['(2)', '(order + 1)'], {'dtype': 'nm.int32'}), '(2, order + 1, dtype=nm.int32)\n', (32132, 32162), True, 'import numpy as nm\n'), ((33798, 33836), 'numpy.tile', 'nm.tile', (['base', '(ori.shape[0], 1, 1, 1)'], {}), '(base, (ori.shape[0], 1, 1, 1))\n', (33805, 33836), True, 'import numpy as nm\n'), ((1958, 1979), 'six.moves.range', 'range', (['(order - 2 - i1)'], {}), '(order - 2 - i1)\n', (1963, 1979), False, 'from six.moves import range\n'), ((2671, 2697), 'six.moves.range', 'range', (['(order - 3 - i1 - i2)'], {}), '(order - 3 - i1 - i2)\n', (2676, 2697), False, 'from six.moves import range\n'), ((4013, 4026), 'six.moves.range', 'range', (['(ao - 1)'], {}), '(ao - 1)\n', (4018, 4026), False, 'from six.moves import range\n'), ((4933, 4946), 'six.moves.range', 'range', (['(ao - 1)'], {}), '(ao - 1)\n', (4938, 4946), False, 'from six.moves import range\n'), ((11923, 11949), 'numpy.ascontiguousarray', 'nm.ascontiguousarray', (['base'], {}), '(base)\n', (11943, 11949), True, 'import numpy as nm\n'), ((15390, 15398), 'six.moves.range', 'range', (['n'], {}), '(n)\n', (15395, 15398), False, 'from six.moves import range\n'), ((15983, 16015), 'numpy.identity', 'nm.identity', (['n_v'], {'dtype': 'nm.int32'}), '(n_v, dtype=nm.int32)\n', (15994, 16015), True, 'import numpy as nm\n'), ((17469, 17496), 'numpy.dot', 'nm.dot', (['tmp', 'geometry.coors'], {}), '(tmp, geometry.coors)\n', (17475, 17496), True, 'import numpy as nm\n'), ((17543, 17572), 'numpy.dot', 'nm.dot', (['nodes', 'geometry.coors'], {}), '(nodes, geometry.coors)\n', (17549, 17572), True, 'import numpy as nm\n'), ((18062, 18077), 'six.moves.range', 'range', (['node[i1]'], {}), '(node[i1])\n', (18067, 18077), False, 'from six.moves import range\n'), ((18354, 18376), 'numpy.ones', 'nm.ones', (['(1)', 'nm.float64'], {}), '(1, nm.float64)\n', (18361, 18376), True, 'import numpy as nm\n'), ((18403, 18415), 'six.moves.range', 'range', (['node1'], {}), '(node1)\n', (18408, 18415), False, 'from six.moves import range\n'), ((22016, 22042), 'numpy.sqrt', 'nm.sqrt', (['geometry.n_vertex'], {}), '(geometry.n_vertex)\n', (22023, 22042), True, 'import numpy as nm\n'), ((22859, 22891), 'numpy.identity', 'nm.identity', (['n_v'], {'dtype': 'nm.int32'}), '(n_v, dtype=nm.int32)\n', (22870, 22891), True, 'import numpy as nm\n'), ((22947, 22957), 'six.moves.range', 'range', (['n_v'], {}), '(n_v)\n', (22952, 22957), False, 'from six.moves import range\n'), ((24973, 25000), 'numpy.dot', 'nm.dot', (['tmp', 'geometry.coors'], {}), '(tmp, geometry.coors)\n', (24979, 25000), True, 'import numpy as nm\n'), ((25778, 25788), 'six.moves.range', 'range', (['dim'], {}), '(dim)\n', (25783, 25788), False, 'from six.moves import range\n'), ((28023, 28033), 'six.moves.range', 'range', (['dim'], {}), '(dim)\n', (28028, 28033), False, 'from six.moves import range\n'), ((31176, 31195), 'numpy.diff', 'nm.diff', (['ns'], {'axis': '(0)'}), '(ns, axis=0)\n', (31183, 31195), True, 'import numpy as nm\n'), ((31452, 31471), 'numpy.meshgrid', 'nm.meshgrid', (['ik', 'ik'], {}), '(ik, ik)\n', (31463, 31471), True, 'import numpy as nm\n'), ((32661, 32685), 'numpy.where', 'nm.where', (['(nts[:, 0] == 1)'], {}), '(nts[:, 0] == 1)\n', (32669, 32685), True, 'import numpy as nm\n'), ((32828, 32852), 'numpy.where', 'nm.where', (['(nts[:, 0] == 2)'], {}), '(nts[:, 0] == 2)\n', (32836, 32852), True, 'import numpy as nm\n'), ((32977, 33001), 'numpy.where', 'nm.where', (['(nts[:, 0] == 3)'], {}), '(nts[:, 0] == 3)\n', (32985, 33001), True, 'import numpy as nm\n'), ((33944, 33981), 'numpy.where', 'nm.where', (['(ori[:, self.edge_indx] == 1)'], {}), '(ori[:, self.edge_indx] == 1)\n', (33952, 33981), True, 'import numpy as nm\n'), ((34260, 34295), 'numpy.where', 'nm.where', (['((fori == 1) | (fori == 2))'], {}), '((fori == 1) | (fori == 2))\n', (34268, 34295), True, 'import numpy as nm\n'), ((34526, 34569), 'sfepy.linalg.insert_strided_axis', 'insert_strided_axis', (['sbase', '(0)', 'ori.shape[0]'], {}), '(sbase, 0, ori.shape[0])\n', (34545, 34569), False, 'from sfepy.linalg import combine, insert_strided_axis\n'), ((34652, 34671), 'numpy.where', 'nm.where', (['(fori >= 4)'], {}), '(fori >= 4)\n', (34660, 34671), True, 'import numpy as nm\n'), ((34842, 34877), 'numpy.where', 'nm.where', (['((fori == 5) | (fori == 6))'], {}), '((fori == 5) | (fori == 6))\n', (34850, 34877), True, 'import numpy as nm\n'), ((20674, 20706), 'numpy.dot', 'nm.dot', (['tmp', 'self.geometry.coors'], {}), '(tmp, self.geometry.coors)\n', (20680, 20706), True, 'import numpy as nm\n'), ((23249, 23259), 'six.moves.range', 'range', (['n_v'], {}), '(n_v)\n', (23254, 23259), False, 'from six.moves import range\n'), ((23448, 23458), 'six.moves.range', 'range', (['n_v'], {}), '(n_v)\n', (23453, 23458), False, 'from six.moves import range\n'), ((30374, 30392), 'numpy.where', 'nm.where', (['(n1 == n2)'], {}), '(n1 == n2)\n', (30382, 30392), True, 'import numpy as nm\n'), ((30419, 30437), 'numpy.where', 'nm.where', (['(n1 != n2)'], {}), '(n1 != n2)\n', (30427, 30437), True, 'import numpy as nm\n'), ((31219, 31231), 'numpy.abs', 'nm.abs', (['diff'], {}), '(diff)\n', (31225, 31231), True, 'import numpy as nm\n'), ((31267, 31286), 'numpy.where', 'nm.where', (['(asum == 0)'], {}), '(asum == 0)\n', (31275, 31286), True, 'import numpy as nm\n'), ((31350, 31369), 'numpy.where', 'nm.where', (['(asum == 2)'], {}), '(asum == 2)\n', (31358, 31369), True, 'import numpy as nm\n'), ((31400, 31419), 'numpy.where', 'nm.where', (['(asum == 1)'], {}), '(asum == 1)\n', (31408, 31419), True, 'import numpy as nm\n'), ((32213, 32232), 'sfepy.linalg.combine', 'combine', (['([ik] * dim)'], {}), '([ik] * dim)\n', (32220, 32232), False, 'from sfepy.linalg import combine, insert_strided_axis\n'), ((19274, 19290), 'six.moves.range', 'range', (['node[ig1]'], {}), '(node[ig1])\n', (19279, 19290), False, 'from six.moves import range\n')]
|
from __future__ import absolute_import
import numpy as nm
import sfepy.linalg as la
from sfepy.discrete.integrals import Integral
from sfepy.discrete import PolySpace
from six.moves import range
def prepare_remap(indices, n_full):
"""
Prepare vector for remapping range `[0, n_full]` to its subset given
by `indices`.
"""
remap = nm.empty((n_full,), dtype=nm.int32)
remap.fill(-1)
remap[indices] = nm.arange(indices.shape[0], dtype=nm.int32)
return remap
def invert_remap(remap):
"""
Return the inverse of `remap`, i.e. a mapping from a sub-range
indices to a full range, see :func:`prepare_remap()`.
"""
if remap is not None:
inverse = nm.where(remap >= 0)[0].astype(nm.int32)
else:
inverse = None
return inverse
def prepare_translate(old_indices, new_indices):
"""
Prepare vector for translating `old_indices` to `new_indices`.
Returns
-------
translate : array
The translation vector. Then `new_ar = translate[old_ar]`.
"""
old_indices = nm.asarray(old_indices)
new_indices = nm.asarray(new_indices)
translate = nm.zeros(old_indices.max() + 1, dtype=new_indices.dtype)
translate[old_indices] = new_indices
return translate
def compute_nodal_normals(nodes, region, field, return_imap=False):
"""
Nodal normals are computed by simple averaging of element normals of
elements every node is contained in.
"""
dim = region.dim
field.domain.create_surface_group(region)
field.setup_surface_data(region)
# Custom integral with quadrature points in nodes.
ps = PolySpace.any_from_args('', field.gel.surface_facet,
field.approx_order)
qp_coors = ps.node_coors
# Unit normals -> weights = ones.
qp_weights = nm.ones(qp_coors.shape[0], dtype=nm.float64)
integral = Integral('aux', coors=qp_coors, weights=qp_weights)
normals = nm.zeros((nodes.shape[0], dim), dtype=nm.float64)
mask = nm.zeros((nodes.max() + 1,), dtype=nm.int32)
imap = nm.empty_like(mask)
imap.fill(nodes.shape[0]) # out-of-range index for normals.
imap[nodes] = nm.arange(nodes.shape[0], dtype=nm.int32)
cmap, _ = field.get_mapping(region, integral, 'surface')
e_normals = cmap.normal[..., 0]
sd = field.surface_data[region.name]
econn = sd.get_connectivity()
mask[econn] += 1
# normals[imap[econn]] += e_normals
im = imap[econn]
for ii, en in enumerate(e_normals):
normals[im[ii]] += en
# All nodes must have a normal.
if not nm.all(mask[nodes] > 0):
raise ValueError('region %s has not complete faces!' % region.name)
norm = la.norm_l2_along_axis(normals)[:, nm.newaxis]
if (norm < 1e-15).any():
raise ValueError('zero nodal normal! (a node in volume?)')
normals /= norm
if return_imap:
return normals, imap
else:
return normals
def _get_edge_path(graph, seed, mask, cycle=False):
"""
Get a path in an edge graph starting with seed. The mask is incremented by
one at positions of the path vertices.
"""
if mask[seed]:
return []
path = [seed]
mask[seed] = 1
row = graph[seed].indices
nv = len(row)
while nv:
if nv == 2:
if mask[row[0]]:
if mask[row[1]]:
if cycle:
path.append(seed)
break
else:
vert = row[1]
else:
vert = row[0]
elif mask[row[0]]:
break
else:
vert = row[0]
path.append(vert)
mask[vert] = 1
row = graph[vert].indices
nv = len(row)
path = nm.array(path, dtype=nm.int32)
return path
def get_edge_paths(graph, mask):
"""
Get all edge paths in a graph with non-masked vertices. The mask is
updated.
"""
nodes = nm.unique(graph.indices)
npv = nm.diff(graph.indptr)
if npv.max() > 2:
raise ValueError('more than 2 edges sharing a vertex!')
seeds = nm.where(npv == 1)[0]
# 1. get paths.
paths = []
for seed in seeds:
path = _get_edge_path(graph, seed, mask)
if len(path):
paths.append(path)
# 2. get possible remaing cycles.
while 1:
ii = nm.where(mask[nodes] == 0)[0]
if not len(ii):
break
path = _get_edge_path(graph, nodes[ii[0]], mask, cycle=True)
if len(path):
paths.append(path)
return paths
def compute_nodal_edge_dirs(nodes, region, field, return_imap=False):
"""
Nodal edge directions are computed by simple averaging of direction vectors
of edges a node is contained in. Edges are assumed to be straight and a
node must be on a single edge (a border node) or shared by exactly two
edges.
"""
coors = region.domain.mesh.coors
dim = coors.shape[1]
graph = region.get_edge_graph()
imap = prepare_remap(nodes, nodes.max() + 1)
mask = nm.zeros_like(imap)
try:
paths = get_edge_paths(graph, mask)
except ValueError:
raise ValueError('more than 2 edges sharing a vertex in region %s!'
% region.name)
# All nodes must have an edge direction.
if not nm.all(mask[nodes]):
raise ValueError('region %s has not complete edges!' % region.name)
edge_dirs = nm.zeros((nodes.shape[0], dim), dtype=nm.float64)
for path in paths:
pcoors = coors[path]
edirs = nm.diff(pcoors, axis=0)
la.normalize_vectors(edirs, eps=1e-12)
im = imap[nm.c_[path[:-1], path[1:]]]
for ii, edir in enumerate(edirs):
edge_dirs[im[ii]] += edir
la.normalize_vectors(edge_dirs, eps=1e-12)
if return_imap:
return edge_dirs, imap
else:
return edge_dirs
def get_min_value(dofs):
"""
Get a reasonable minimal value of DOFs suitable for extending over a
whole domain.
"""
if dofs.shape[1] > 1: # Vector.
val = 0.0
else: # Scalar.
val = dofs.min()
return val
def extend_cell_data(data, domain, rname, val=None, is_surface=False,
average_surface=True):
"""
Extend cell data defined in a region to the whole domain.
Parameters
----------
data : array
The data defined in the region.
domain : FEDomain instance
The FE domain.
rname : str
The region name.
val : float, optional
The value for filling cells not covered by the region. If not given,
the smallest value in data is used.
is_surface : bool
If True, the data are defined on a surface region. In that case the
values are averaged or summed into the cells containing the region
surface faces (a cell can have several faces of the surface), see
`average_surface`.
average_surface : bool
If True, the data defined on a surface region are averaged, otherwise
the data are summed.
Returns
-------
edata : array
The data extended to all domain elements.
"""
n_el = domain.shape.n_el
if data.shape[0] == n_el: return data
if val is None:
if data.shape[2] > 1: # Vector.
val = nm.amin(nm.abs(data))
else: # Scalar.
val = nm.amin(data)
edata = nm.empty((n_el,) + data.shape[1:], dtype=data.dtype)
edata.fill(val)
region = domain.regions[rname]
if not is_surface:
edata[region.get_cells()] = data
else:
cells = region.get_cells(true_cells_only=False)
ucells = nm.unique(cells)
if len(cells) != len(region.facets):
raise ValueError('region %s has an inner face!'
% region.name)
if average_surface:
avg = nm.bincount(cells, minlength=n_el)[ucells]
else:
avg = 1.0
for ic in range(data.shape[2]):
if nm.isrealobj(data):
evals = nm.bincount(cells, weights=data[:, 0, ic, 0],
minlength=n_el)[ucells]
else:
evals = (nm.bincount(cells, weights=data[:, 0, ic, 0].real,
minlength=n_el)[ucells]
+ 1j *
nm.bincount(cells, weights=data[:, 0, ic, 0].imag,
minlength=n_el)[ucells])
edata[ucells, 0, ic, 0] = evals / avg
return edata
def refine_mesh(filename, level):
"""
Uniformly refine `level`-times a mesh given by `filename`.
The refined mesh is saved to a file with name constructed from base
name of `filename` and `level`-times appended `'_r'` suffix.
Parameters
----------
filename : str
The mesh file name.
level : int
The refinement level.
"""
import os
from sfepy.base.base import output
from sfepy.discrete.fem import Mesh, FEDomain
if level > 0:
mesh = Mesh.from_file(filename)
domain = FEDomain(mesh.name, mesh)
for ii in range(level):
output('refine %d...' % ii)
domain = domain.refine()
output('... %d nodes %d elements'
% (domain.shape.n_nod, domain.shape.n_el))
suffix = os.path.splitext(filename)[1]
filename = domain.name + suffix
domain.mesh.write(filename, io='auto')
return filename
|
[
"sfepy.discrete.integrals.Integral",
"sfepy.linalg.normalize_vectors",
"sfepy.discrete.PolySpace.any_from_args",
"sfepy.linalg.norm_l2_along_axis",
"sfepy.discrete.fem.Mesh.from_file",
"sfepy.discrete.fem.FEDomain",
"sfepy.base.base.output"
] |
[((352, 387), 'numpy.empty', 'nm.empty', (['(n_full,)'], {'dtype': 'nm.int32'}), '((n_full,), dtype=nm.int32)\n', (360, 387), True, 'import numpy as nm\n'), ((428, 471), 'numpy.arange', 'nm.arange', (['indices.shape[0]'], {'dtype': 'nm.int32'}), '(indices.shape[0], dtype=nm.int32)\n', (437, 471), True, 'import numpy as nm\n'), ((1061, 1084), 'numpy.asarray', 'nm.asarray', (['old_indices'], {}), '(old_indices)\n', (1071, 1084), True, 'import numpy as nm\n'), ((1103, 1126), 'numpy.asarray', 'nm.asarray', (['new_indices'], {}), '(new_indices)\n', (1113, 1126), True, 'import numpy as nm\n'), ((1633, 1705), 'sfepy.discrete.PolySpace.any_from_args', 'PolySpace.any_from_args', (['""""""', 'field.gel.surface_facet', 'field.approx_order'], {}), "('', field.gel.surface_facet, field.approx_order)\n", (1656, 1705), False, 'from sfepy.discrete import PolySpace\n'), ((1823, 1867), 'numpy.ones', 'nm.ones', (['qp_coors.shape[0]'], {'dtype': 'nm.float64'}), '(qp_coors.shape[0], dtype=nm.float64)\n', (1830, 1867), True, 'import numpy as nm\n'), ((1884, 1935), 'sfepy.discrete.integrals.Integral', 'Integral', (['"""aux"""'], {'coors': 'qp_coors', 'weights': 'qp_weights'}), "('aux', coors=qp_coors, weights=qp_weights)\n", (1892, 1935), False, 'from sfepy.discrete.integrals import Integral\n'), ((1951, 2000), 'numpy.zeros', 'nm.zeros', (['(nodes.shape[0], dim)'], {'dtype': 'nm.float64'}), '((nodes.shape[0], dim), dtype=nm.float64)\n', (1959, 2000), True, 'import numpy as nm\n'), ((2068, 2087), 'numpy.empty_like', 'nm.empty_like', (['mask'], {}), '(mask)\n', (2081, 2087), True, 'import numpy as nm\n'), ((2170, 2211), 'numpy.arange', 'nm.arange', (['nodes.shape[0]'], {'dtype': 'nm.int32'}), '(nodes.shape[0], dtype=nm.int32)\n', (2179, 2211), True, 'import numpy as nm\n'), ((3767, 3797), 'numpy.array', 'nm.array', (['path'], {'dtype': 'nm.int32'}), '(path, dtype=nm.int32)\n', (3775, 3797), True, 'import numpy as nm\n'), ((3962, 3986), 'numpy.unique', 'nm.unique', (['graph.indices'], {}), '(graph.indices)\n', (3971, 3986), True, 'import numpy as nm\n'), ((3997, 4018), 'numpy.diff', 'nm.diff', (['graph.indptr'], {}), '(graph.indptr)\n', (4004, 4018), True, 'import numpy as nm\n'), ((5069, 5088), 'numpy.zeros_like', 'nm.zeros_like', (['imap'], {}), '(imap)\n', (5082, 5088), True, 'import numpy as nm\n'), ((5454, 5503), 'numpy.zeros', 'nm.zeros', (['(nodes.shape[0], dim)'], {'dtype': 'nm.float64'}), '((nodes.shape[0], dim), dtype=nm.float64)\n', (5462, 5503), True, 'import numpy as nm\n'), ((5776, 5818), 'sfepy.linalg.normalize_vectors', 'la.normalize_vectors', (['edge_dirs'], {'eps': '(1e-12)'}), '(edge_dirs, eps=1e-12)\n', (5796, 5818), True, 'import sfepy.linalg as la\n'), ((7421, 7473), 'numpy.empty', 'nm.empty', (['((n_el,) + data.shape[1:])'], {'dtype': 'data.dtype'}), '((n_el,) + data.shape[1:], dtype=data.dtype)\n', (7429, 7473), True, 'import numpy as nm\n'), ((2587, 2610), 'numpy.all', 'nm.all', (['(mask[nodes] > 0)'], {}), '(mask[nodes] > 0)\n', (2593, 2610), True, 'import numpy as nm\n'), ((2700, 2730), 'sfepy.linalg.norm_l2_along_axis', 'la.norm_l2_along_axis', (['normals'], {}), '(normals)\n', (2721, 2730), True, 'import sfepy.linalg as la\n'), ((4119, 4137), 'numpy.where', 'nm.where', (['(npv == 1)'], {}), '(npv == 1)\n', (4127, 4137), True, 'import numpy as nm\n'), ((5340, 5359), 'numpy.all', 'nm.all', (['mask[nodes]'], {}), '(mask[nodes])\n', (5346, 5359), True, 'import numpy as nm\n'), ((5573, 5596), 'numpy.diff', 'nm.diff', (['pcoors'], {'axis': '(0)'}), '(pcoors, axis=0)\n', (5580, 5596), True, 'import numpy as nm\n'), ((5605, 5643), 'sfepy.linalg.normalize_vectors', 'la.normalize_vectors', (['edirs'], {'eps': '(1e-12)'}), '(edirs, eps=1e-12)\n', (5625, 5643), True, 'import sfepy.linalg as la\n'), ((7679, 7695), 'numpy.unique', 'nm.unique', (['cells'], {}), '(cells)\n', (7688, 7695), True, 'import numpy as nm\n'), ((7992, 8012), 'six.moves.range', 'range', (['data.shape[2]'], {}), '(data.shape[2])\n', (7997, 8012), False, 'from six.moves import range\n'), ((9087, 9111), 'sfepy.discrete.fem.Mesh.from_file', 'Mesh.from_file', (['filename'], {}), '(filename)\n', (9101, 9111), False, 'from sfepy.discrete.fem import Mesh, FEDomain\n'), ((9129, 9154), 'sfepy.discrete.fem.FEDomain', 'FEDomain', (['mesh.name', 'mesh'], {}), '(mesh.name, mesh)\n', (9137, 9154), False, 'from sfepy.discrete.fem import Mesh, FEDomain\n'), ((9173, 9185), 'six.moves.range', 'range', (['level'], {}), '(level)\n', (9178, 9185), False, 'from six.moves import range\n'), ((4367, 4393), 'numpy.where', 'nm.where', (['(mask[nodes] == 0)'], {}), '(mask[nodes] == 0)\n', (4375, 4393), True, 'import numpy as nm\n'), ((7394, 7407), 'numpy.amin', 'nm.amin', (['data'], {}), '(data)\n', (7401, 7407), True, 'import numpy as nm\n'), ((8029, 8047), 'numpy.isrealobj', 'nm.isrealobj', (['data'], {}), '(data)\n', (8041, 8047), True, 'import numpy as nm\n'), ((9199, 9226), 'sfepy.base.base.output', 'output', (["('refine %d...' % ii)"], {}), "('refine %d...' % ii)\n", (9205, 9226), False, 'from sfepy.base.base import output\n'), ((9276, 9352), 'sfepy.base.base.output', 'output', (["('... %d nodes %d elements' % (domain.shape.n_nod, domain.shape.n_el))"], {}), "('... %d nodes %d elements' % (domain.shape.n_nod, domain.shape.n_el))\n", (9282, 9352), False, 'from sfepy.base.base import output\n'), ((9390, 9416), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (9406, 9416), False, 'import os\n'), ((7338, 7350), 'numpy.abs', 'nm.abs', (['data'], {}), '(data)\n', (7344, 7350), True, 'import numpy as nm\n'), ((7893, 7927), 'numpy.bincount', 'nm.bincount', (['cells'], {'minlength': 'n_el'}), '(cells, minlength=n_el)\n', (7904, 7927), True, 'import numpy as nm\n'), ((701, 721), 'numpy.where', 'nm.where', (['(remap >= 0)'], {}), '(remap >= 0)\n', (709, 721), True, 'import numpy as nm\n'), ((8073, 8134), 'numpy.bincount', 'nm.bincount', (['cells'], {'weights': 'data[:, 0, ic, 0]', 'minlength': 'n_el'}), '(cells, weights=data[:, 0, ic, 0], minlength=n_el)\n', (8084, 8134), True, 'import numpy as nm\n'), ((8223, 8289), 'numpy.bincount', 'nm.bincount', (['cells'], {'weights': 'data[:, 0, ic, 0].real', 'minlength': 'n_el'}), '(cells, weights=data[:, 0, ic, 0].real, minlength=n_el)\n', (8234, 8289), True, 'import numpy as nm\n'), ((8392, 8458), 'numpy.bincount', 'nm.bincount', (['cells'], {'weights': 'data[:, 0, ic, 0].imag', 'minlength': 'n_el'}), '(cells, weights=data[:, 0, ic, 0].imag, minlength=n_el)\n', (8403, 8458), True, 'import numpy as nm\n')]
|
"""
dayong.operations
~~~~~~~~~~~~~~~~~
Data model operations which include retrieval and update commands.
"""
from typing import Any
import tanjun
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import SQLModel, select
from sqlmodel.engine.result import ScalarResult
from sqlmodel.ext.asyncio.session import AsyncSession
from dayong.abc import Database
from dayong.core.configs import DayongConfig, DayongDynamicLoader
class DatabaseImpl(Database):
"""Implementaion of a database connection for transacting and interacting with
database tables —those that derive from SQLModel.
"""
_conn: AsyncEngine
@staticmethod
async def update(instance: Any, update: Any) -> Any:
"""Overwrite value of class attribute.
Args:
instance (Any): A Class instance.
update (Any): A dictionary containing the attributes to be overwritten.
Returns:
Any: A class instance with updated attribute values.
"""
for key, value in update.items():
setattr(instance, key, value)
return instance
async def connect(
self, config: DayongConfig = tanjun.injected(type=DayongConfig)
) -> None:
self._conn = create_async_engine(
config.database_uri
if config.database_uri
else DayongDynamicLoader().load().database_uri
)
async def create_table(self) -> None:
async with self._conn.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
async def add_row(self, table_model: SQLModel) -> None:
async with AsyncSession(self._conn) as session:
session.add(table_model)
await session.commit()
async def remove_row(self, table_model: SQLModel, attribute: str) -> None:
model = type(table_model)
async with AsyncSession(self._conn) as session:
# Temp ignore incompatible type passed to `exec()`. See:
# https://github.com/tiangolo/sqlmodel/issues/54
# https://github.com/tiangolo/sqlmodel/pull/58
row: ScalarResult[Any] = await session.exec(
select(model).where(
getattr(model, attribute) == getattr(table_model, attribute)
) # type: ignore
)
await session.delete(row.one())
await session.commit()
async def get_row(self, table_model: SQLModel, attribute: str) -> ScalarResult[Any]:
model = type(table_model)
async with AsyncSession(self._conn) as session:
# Temp ignore incompatible type passed to `exec()`. See:
# https://github.com/tiangolo/sqlmodel/issues/54
# https://github.com/tiangolo/sqlmodel/pull/58
row: ScalarResult[Any] = await session.exec(
select(model).where(
getattr(model, attribute) == getattr(table_model, attribute)
) # type: ignore
)
return row
async def get_all_row(self, table_model: type[SQLModel]) -> ScalarResult[Any]:
async with AsyncSession(self._conn) as session:
return await session.exec(select(table_model)) # type: ignore
async def update_row(self, table_model: SQLModel, attribute: str) -> None:
model = type(table_model)
table = table_model.__dict__
async with AsyncSession(self._conn) as session:
row: ScalarResult[Any] = await session.exec(
select(model).where(
getattr(model, attribute) == getattr(table_model, attribute)
) # type: ignore
)
task = row.one()
task = await self.update(task, table)
session.add(task)
await session.commit()
await session.refresh(task)
|
[
"sqlmodel.ext.asyncio.session.AsyncSession",
"sqlmodel.select"
] |
[((1190, 1224), 'tanjun.injected', 'tanjun.injected', ([], {'type': 'DayongConfig'}), '(type=DayongConfig)\n', (1205, 1224), False, 'import tanjun\n'), ((1650, 1674), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (1662, 1674), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((1892, 1916), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (1904, 1916), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((2563, 2587), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (2575, 2587), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((3134, 3158), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (3146, 3158), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((3417, 3441), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (3429, 3441), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((3209, 3228), 'sqlmodel.select', 'select', (['table_model'], {}), '(table_model)\n', (3215, 3228), False, 'from sqlmodel import SQLModel, select\n'), ((1366, 1387), 'dayong.core.configs.DayongDynamicLoader', 'DayongDynamicLoader', ([], {}), '()\n', (1385, 1387), False, 'from dayong.core.configs import DayongConfig, DayongDynamicLoader\n'), ((2191, 2204), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (2197, 2204), False, 'from sqlmodel import SQLModel, select\n'), ((2862, 2875), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (2868, 2875), False, 'from sqlmodel import SQLModel, select\n'), ((3527, 3540), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (3533, 3540), False, 'from sqlmodel import SQLModel, select\n')]
|
from typing import TYPE_CHECKING, List, Optional
from uuid import UUID
from sqlalchemy import event
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import DomainURLORMModel, url_pre_save
from joj.horse.models.link_tables import ProblemProblemSetLink
from joj.horse.schemas.base import Operation
from joj.horse.schemas.problem_set import ProblemSetDetail
from joj.horse.utils.errors import BizError, ErrorCode
if TYPE_CHECKING:
from joj.horse.models import Domain, Problem, Record, User
class ProblemSet(DomainURLORMModel, ProblemSetDetail, table=True): # type: ignore[call-arg]
__tablename__ = "problem_sets"
__table_args__ = (UniqueConstraint("domain_id", "url"),)
domain_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False
)
)
domain: "Domain" = Relationship(back_populates="problem_sets")
owner_id: Optional[UUID] = Field(
sa_column=Column(
GUID,
ForeignKey("users.id", ondelete="SET NULL"),
nullable=True,
)
)
owner: Optional["User"] = Relationship(back_populates="owned_problem_sets")
# problems_link: List["Problem"] = Relationship(
# back_populates="problem_problem_set_links",
# # link_model=ProblemProblemSetLink,
# sa_relationship_kwargs={
# "secondary": ProblemProblemSetLink,
# "order_by": "ProblemProblemSetLink.position",
# "collection_class": ordering_list("position"),
# },
# )
# maintain the order of many to many relationship
problem_problem_set_links: List[ProblemProblemSetLink] = Relationship(
back_populates="problem_set",
sa_relationship_kwargs={
"order_by": "ProblemProblemSetLink.position",
"collection_class": ordering_list("position"),
},
)
problems: List["Problem"] = Relationship(
back_populates="problem_sets",
link_model=ProblemProblemSetLink,
sa_relationship_kwargs={
"order_by": "ProblemProblemSetLink.position",
},
)
records: List["Record"] = Relationship(back_populates="problem_set")
async def operate_problem(
self, problem: "Problem", operation: Operation, position: Optional[int] = None
) -> None:
assert problem.domain_id == self.domain_id
link = await ProblemProblemSetLink.get_or_none(
problem_set_id=self.id, problem_id=problem.id
)
if operation == Operation.Create:
if link is not None:
raise BizError(ErrorCode.IntegrityError, "problem already added")
link = ProblemProblemSetLink(problem_set_id=self.id, problem_id=problem.id)
else:
if link is None:
raise BizError(ErrorCode.IntegrityError, "problem not added")
if operation == Operation.Read:
return
if operation in (Operation.Update, Operation.Delete):
self.problem_problem_set_links.remove(link)
if operation in (Operation.Create, Operation.Update):
if position is None:
self.problem_problem_set_links.append(link)
else:
self.problem_problem_set_links.insert(position, link)
if operation == Operation.Delete:
await link.delete_model(commit=False)
await self.save_model()
event.listen(ProblemSet, "before_insert", url_pre_save)
event.listen(ProblemSet, "before_update", url_pre_save)
|
[
"sqlmodel.Relationship"
] |
[((3592, 3647), 'sqlalchemy.event.listen', 'event.listen', (['ProblemSet', '"""before_insert"""', 'url_pre_save'], {}), "(ProblemSet, 'before_insert', url_pre_save)\n", (3604, 3647), False, 'from sqlalchemy import event\n'), ((3648, 3703), 'sqlalchemy.event.listen', 'event.listen', (['ProblemSet', '"""before_update"""', 'url_pre_save'], {}), "(ProblemSet, 'before_update', url_pre_save)\n", (3660, 3703), False, 'from sqlalchemy import event\n'), ((1038, 1081), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem_sets"""'}), "(back_populates='problem_sets')\n", (1050, 1081), False, 'from sqlmodel import Field, Relationship\n'), ((1295, 1344), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""owned_problem_sets"""'}), "(back_populates='owned_problem_sets')\n", (1307, 1344), False, 'from sqlmodel import Field, Relationship\n'), ((2094, 2251), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem_sets"""', 'link_model': 'ProblemProblemSetLink', 'sa_relationship_kwargs': "{'order_by': 'ProblemProblemSetLink.position'}"}), "(back_populates='problem_sets', link_model=\n ProblemProblemSetLink, sa_relationship_kwargs={'order_by':\n 'ProblemProblemSetLink.position'})\n", (2106, 2251), False, 'from sqlmodel import Field, Relationship\n'), ((2327, 2369), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem_set"""'}), "(back_populates='problem_set')\n", (2339, 2369), False, 'from sqlmodel import Field, Relationship\n'), ((825, 861), 'sqlalchemy.schema.UniqueConstraint', 'UniqueConstraint', (['"""domain_id"""', '"""url"""'], {}), "('domain_id', 'url')\n", (841, 861), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((2576, 2661), 'joj.horse.models.link_tables.ProblemProblemSetLink.get_or_none', 'ProblemProblemSetLink.get_or_none', ([], {'problem_set_id': 'self.id', 'problem_id': 'problem.id'}), '(problem_set_id=self.id, problem_id=problem.id\n )\n', (2609, 2661), False, 'from joj.horse.models.link_tables import ProblemProblemSetLink\n'), ((2855, 2923), 'joj.horse.models.link_tables.ProblemProblemSetLink', 'ProblemProblemSetLink', ([], {'problem_set_id': 'self.id', 'problem_id': 'problem.id'}), '(problem_set_id=self.id, problem_id=problem.id)\n', (2876, 2923), False, 'from joj.horse.models.link_tables import ProblemProblemSetLink\n'), ((938, 982), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""domains.id"""'], {'ondelete': '"""CASCADE"""'}), "('domains.id', ondelete='CASCADE')\n", (948, 982), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((1177, 1220), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""users.id"""'], {'ondelete': '"""SET NULL"""'}), "('users.id', ondelete='SET NULL')\n", (1187, 1220), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((2017, 2042), 'sqlalchemy.ext.orderinglist.ordering_list', 'ordering_list', (['"""position"""'], {}), "('position')\n", (2030, 2042), False, 'from sqlalchemy.ext.orderinglist import ordering_list\n'), ((2776, 2835), 'joj.horse.utils.errors.BizError', 'BizError', (['ErrorCode.IntegrityError', '"""problem already added"""'], {}), "(ErrorCode.IntegrityError, 'problem already added')\n", (2784, 2835), False, 'from joj.horse.utils.errors import BizError, ErrorCode\n'), ((2989, 3044), 'joj.horse.utils.errors.BizError', 'BizError', (['ErrorCode.IntegrityError', '"""problem not added"""'], {}), "(ErrorCode.IntegrityError, 'problem not added')\n", (2997, 3044), False, 'from joj.horse.utils.errors import BizError, ErrorCode\n')]
|
"""Criação dos bancos de dados"""
from sqlmodel import SQLModel
from mitmirror.infra.entities import *
from .database_config import engine
def create_db():
"""Criando bancos de dados"""
base = SQLModel.metadata.create_all(engine)
return base
|
[
"sqlmodel.SQLModel.metadata.create_all"
] |
[((204, 240), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (232, 240), False, 'from sqlmodel import SQLModel\n')]
|
#!/usr/bin/env python
# -*-coding=utf-8-*-
from megengine.logger import get_logger
logger = get_logger(__name__)
try:
from tensorboardX import SummaryWriter
from tensorboardX.proto.attr_value_pb2 import AttrValue
from tensorboardX.proto.graph_pb2 import GraphDef
from tensorboardX.proto.node_def_pb2 import NodeDef
from tensorboardX.proto.plugin_text_pb2 import TextPluginData
from tensorboardX.proto.step_stats_pb2 import (
DeviceStepStats,
RunMetadata,
StepStats,
)
from tensorboardX.proto.summary_pb2 import Summary, SummaryMetadata
from tensorboardX.proto.tensor_pb2 import TensorProto
from tensorboardX.proto.tensor_shape_pb2 import TensorShapeProto
from tensorboardX.proto.versions_pb2 import VersionDef
except ImportError:
logger.error(
"TensorBoard and TensorboardX are required for visualize.", exc_info=True,
)
def tensor_shape_proto(shape):
"""Creates an object matching
https://github.com/tensorflow/tensorboard/blob/master/tensorboard/compat/proto/tensor_shape.proto
"""
return TensorShapeProto(dim=[TensorShapeProto.Dim(size=d) for d in shape])
def attr_value_proto(shape, dtype, attr):
"""Creates a dict of objects matching
https://github.com/tensorflow/tensorboard/blob/master/tensorboard/compat/proto/attr_value.proto
specifically designed for a NodeDef. The values have been
reverse engineered from standard TensorBoard logged data.
"""
attr_proto = {}
if shape is not None:
shapeproto = tensor_shape_proto(shape)
attr_proto["_output_shapes"] = AttrValue(
list=AttrValue.ListValue(shape=[shapeproto])
)
if dtype is not None:
attr_proto["dtype"] = AttrValue(s=dtype.encode(encoding="utf-8"))
if attr is not None:
for key in attr.keys():
attr_proto[key] = AttrValue(s=attr[key].encode(encoding="utf-8"))
return attr_proto
def node_proto(
name, op="UnSpecified", input=None, outputshape=None, dtype=None, attributes={}
):
"""Creates an object matching
https://github.com/tensorflow/tensorboard/blob/master/tensorboard/compat/proto/node_def.proto
"""
if input is None:
input = []
if not isinstance(input, list):
input = [input]
return NodeDef(
name=name.encode(encoding="utf_8"),
op=op,
input=input,
attr=attr_value_proto(outputshape, dtype, attributes),
)
def node(
name, op="UnSpecified", input=None, outputshape=None, dtype=None, attributes={}
):
return node_proto(name, op, input, outputshape, dtype, attributes)
def graph(node_list):
graph_def = GraphDef(node=node_list, versions=VersionDef(producer=22))
stepstats = RunMetadata(
step_stats=StepStats(dev_stats=[DeviceStepStats(device="/device:CPU:0")])
)
return graph_def, stepstats
def text(tag, text):
plugin_data = SummaryMetadata.PluginData(
plugin_name="text", content=TextPluginData(version=0).SerializeToString()
)
smd = SummaryMetadata(plugin_data=plugin_data)
string_val = []
for item in text:
string_val.append(item.encode(encoding="utf_8"))
tensor = TensorProto(
dtype="DT_STRING",
string_val=string_val,
tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=len(text))]),
)
return Summary(value=[Summary.Value(tag=tag, metadata=smd, tensor=tensor)])
class NodeRaw:
def __init__(self, name, op, input, outputshape, dtype, attributes):
self.name = name
self.op = op
self.input = input
self.outputshape = outputshape
self.dtype = dtype
self.attributes = attributes
class SummaryWriterExtend(SummaryWriter):
def __init__(
self,
logdir=None,
comment="",
purge_step=None,
max_queue=10,
flush_secs=120,
filename_suffix="",
write_to_disk=True,
log_dir=None,
**kwargs
):
self.node_raw_dict = {}
super().__init__(
logdir,
comment,
purge_step,
max_queue,
flush_secs,
filename_suffix,
write_to_disk,
log_dir,
**kwargs,
)
def add_text(self, tag, text_string_list, global_step=None, walltime=None):
"""Add text data to summary.
Args:
tag (string): Data identifier
text_string_list (string list): String to save
global_step (int): Global step value to record
walltime (float): Optional override default walltime (time.time())
seconds after epoch of event
Examples::
# text can be divided into three levels by tag and global_step
from writer import SummaryWriterExtend
writer = SummaryWriterExtend()
writer.add_text('level1.0/level2.0', ['text0'], 0)
writer.add_text('level1.0/level2.0', ['text1'], 1)
writer.add_text('level1.0/level2.1', ['text2'])
writer.add_text('level1.1', ['text3'])
"""
self._get_file_writer().add_summary(
text(tag, text_string_list), global_step, walltime
)
def add_node_raw(
self,
name,
op="UnSpecified",
input=[],
outputshape=None,
dtype=None,
attributes={},
):
"""Add node raw datas that can help build graph.After add all nodes, call
add_graph_by_node_raw_list() to build graph and add graph data to summary.
Args:
name (string): opr name.
op (string): opr class name.
input (string list): input opr name.
outputshape (list): output shape.
dtype (string): output data dtype.
attributes (dict): attributes info.
Examples::
from writer import SummaryWriterExtend
writer = SummaryWriterExtend()
writer.add_node_raw('node1', 'opr1', outputshape=[6, 2, 3], dtype="float32", attributes={
"peak_size": "12MB", "mmory_alloc": "2MB, percent: 16.7%"})
writer.add_node_raw('node2', 'opr2', outputshape=[6, 2, 3], dtype="float32", input="node1", attributes={
"peak_size": "12MB", "mmory_alloc": "2MB, percent: 16.7%"})
writer.add_graph_by_node_raw_list()
"""
# self.node_raw_list.append(
# node(name, op, input, outputshape, dtype, attributes))
self.node_raw_dict[name] = NodeRaw(
name, op, input, outputshape, dtype, dict(attributes)
)
def add_node_raw_name_suffix(self, name, suffix):
"""Give node name suffix in order to finding this node by 'search nodes'
Args:
name (string): opr name.
suffix (string): nam suffix.
"""
old_name = self.node_raw_dict[name].name
new_name = old_name + suffix
# self.node_raw_dict[new_name] = self.node_raw_dict.pop(name)
self.node_raw_dict[name].name = new_name
for node_name, node in self.node_raw_dict.items():
node.input = [new_name if x == old_name else x for x in node.input]
def add_node_raw_attributes(self, name, attributes):
"""
Args:
name (string): opr name.
attributes (dict): attributes info that need to be added.
"""
for key, value in attributes.items():
self.node_raw_dict[name].attributes[key] = value
def add_graph_by_node_raw_list(self):
"""Build graph and add graph data to summary."""
node_raw_list = []
for key, value in self.node_raw_dict.items():
node_raw_list.append(
node(
value.name,
value.op,
value.input,
value.outputshape,
value.dtype,
value.attributes,
)
)
self._get_file_writer().add_graph(graph(node_raw_list))
|
[
"megengine.logger.get_logger"
] |
[((94, 114), 'megengine.logger.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (104, 114), False, 'from megengine.logger import get_logger\n'), ((3050, 3090), 'tensorboardX.proto.summary_pb2.SummaryMetadata', 'SummaryMetadata', ([], {'plugin_data': 'plugin_data'}), '(plugin_data=plugin_data)\n', (3065, 3090), False, 'from tensorboardX.proto.summary_pb2 import Summary, SummaryMetadata\n'), ((2709, 2732), 'tensorboardX.proto.versions_pb2.VersionDef', 'VersionDef', ([], {'producer': '(22)'}), '(producer=22)\n', (2719, 2732), False, 'from tensorboardX.proto.versions_pb2 import VersionDef\n'), ((1118, 1146), 'tensorboardX.proto.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': 'd'}), '(size=d)\n', (1138, 1146), False, 'from tensorboardX.proto.tensor_shape_pb2 import TensorShapeProto\n'), ((1642, 1681), 'tensorboardX.proto.attr_value_pb2.AttrValue.ListValue', 'AttrValue.ListValue', ([], {'shape': '[shapeproto]'}), '(shape=[shapeproto])\n', (1661, 1681), False, 'from tensorboardX.proto.attr_value_pb2 import AttrValue\n'), ((3390, 3441), 'tensorboardX.proto.summary_pb2.Summary.Value', 'Summary.Value', ([], {'tag': 'tag', 'metadata': 'smd', 'tensor': 'tensor'}), '(tag=tag, metadata=smd, tensor=tensor)\n', (3403, 3441), False, 'from tensorboardX.proto.summary_pb2 import Summary, SummaryMetadata\n'), ((2988, 3013), 'tensorboardX.proto.plugin_text_pb2.TextPluginData', 'TextPluginData', ([], {'version': '(0)'}), '(version=0)\n', (3002, 3013), False, 'from tensorboardX.proto.plugin_text_pb2 import TextPluginData\n'), ((2803, 2842), 'tensorboardX.proto.step_stats_pb2.DeviceStepStats', 'DeviceStepStats', ([], {'device': '"""/device:CPU:0"""'}), "(device='/device:CPU:0')\n", (2818, 2842), False, 'from tensorboardX.proto.step_stats_pb2 import DeviceStepStats, RunMetadata, StepStats\n')]
|
from sqlite3.dbapi2 import Timestamp, adapt
from typing import Optional
from sqlmodel import Field, SQLModel
from pydantic import validator
from datetime import datetime, date
class Rate(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="user.id")
client_id: int = Field(foreign_key="client.id")
valid_from: date
valid_to: date
amount: float # currency: EUR
created_at: datetime
updated_at: datetime
is_active: bool
|
[
"sqlmodel.Field"
] |
[((236, 273), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (241, 273), False, 'from sqlmodel import Field, SQLModel\n'), ((293, 321), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""user.id"""'}), "(foreign_key='user.id')\n", (298, 321), False, 'from sqlmodel import Field, SQLModel\n'), ((343, 373), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""client.id"""'}), "(foreign_key='client.id')\n", (348, 373), False, 'from sqlmodel import Field, SQLModel\n')]
|
# -*- coding: utf-8 -*-
from typing import List
from database import engine
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from models import Device, Project, Task, TaskWithProject
from sqlmodel import Session, SQLModel, select
# init fastapi
app: FastAPI = FastAPI()
# init CORS
app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["*"])
# mount api endpoint
api: FastAPI = FastAPI(title="Scitizen")
app.mount("/api", api)
@app.on_event("startup")
def on_startup() -> None:
"""Setup event on app.
It is used to create the database and the tables.
"""
SQLModel.metadata.create_all(engine)
@api.post("/devices/{device_uuid}", response_model=Device)
def upsert_device(device_uuid: str, device: Device) -> Device:
"""Upsert a device.
It is used to create a device in the database if it does not already exists,
else it is used to update the existing one.
Args:
device_uuid:
The uuid of the device to upsert.
device:
The device data.
Returns:
The upserted device.
"""
with Session(engine) as session:
# check if the device exists
statement = select(Device).where(Device.uuid == device_uuid)
result = session.exec(statement).first()
# if not, create it
if result is None:
result = device
# sync the data
for key, value in device.dict(exclude_unset=True).items():
setattr(result, key, value)
# persist the data to the database
session.add(result)
session.commit()
session.refresh(result)
return result
@api.get("/devices/{device_uuid}", response_model=Device)
def select_device(device_uuid: str):
"""Select a device.
It is used to get a device data from the database.
Args:
device_uuid:
The uuid of the device to get the data from.
Returns:
The device data.
"""
with Session(engine) as session:
statement = select(Device).where(Device.uuid == device_uuid)
result = session.exec(statement).first()
return result
@api.get("/devices", response_model=List[Device])
def select_devices():
"""Select all devices.
It is used to get all devices data from the database.
Returns:
All devices data.
"""
with Session(engine) as session:
statement = select(Device)
results = session.exec(statement).all()
return results
@api.post("/projects/{project_uuid}", response_model=Project)
def upsert_project(project_uuid: str, project: Project) -> Project:
"""Upsert a project.
It is used to create a project in the database if it does not already exists,
else it is used to update the existing one.
Args:
project_uuid:
The uuid of the project to upsert.
project:
The project data.
Returns:
The upserted project.
"""
with Session(engine) as session:
# check if the project exists
statement = select(Project).where(Project.uuid == project_uuid)
result = session.exec(statement).first()
# if not, create it
if result is None:
result = project
# sync the data
for key, value in project.dict(exclude_unset=True).items():
setattr(result, key, value)
# persist the data to the database
session.add(result)
session.commit()
session.refresh(result)
return result
@api.get("/projects/{project_uuid}", response_model=Project)
def select_project(project_uuid: str):
"""Select a project.
It is used to get a project data from the database.
Args:
project_uuid:
The uuid of the project to get the data from.
Returns:
The project data.
"""
with Session(engine) as session:
statement = select(Project).where(Project.uuid == project_uuid)
result = session.exec(statement).first()
return result
@api.get("/projects", response_model=List[Project])
def select_projects():
"""Select all projects.
It is used to get all projects data from the database.
Returns:
All projects data.
"""
with Session(engine) as session:
statement = select(Project)
results = session.exec(statement).all()
return results
@api.post("/tasks/{task_uuid}", response_model=Task)
def upsert_task(task_uuid: str, task: Task) -> Task:
"""Upsert a task.
It is used to create a task in the database if it does not already exists,
else it is used to update the existing one.
Args:
task_uuid:
The uuid of the task to upsert.
task:
The task data.
Returns:
The upserted task.
"""
with Session(engine) as session:
# check if the task exists
statement = select(Task).where(Task.uuid == task_uuid)
result = session.exec(statement).first()
# if not, create it
if result is None:
result = task
# sync the data
for key, value in task.dict(exclude_unset=True).items():
setattr(result, key, value)
# persist the data to the database
session.add(result)
session.commit()
session.refresh(result)
return result
@api.get("/tasks/{task_uuid}", response_model=TaskWithProject)
def select_task(task_uuid: str):
"""Select a task.
It is used to get a task data from the database.
Args:
task_uuid:
The uuid of the task to get the data from.
Returns:
The task data.
"""
with Session(engine) as session:
statement = select(Task, Project).join(Project).where(Task.uuid == task_uuid)
task, project = session.exec(statement).first() # type: ignore
result = TaskWithProject()
for key, value in task.dict().items():
setattr(result, key, value)
result.project = project
return result
@api.get("/tasks", response_model=List[TaskWithProject])
def select_tasks():
"""Select all tasks.
It is used to get all tasks data from the database.
Returns:
All tasks data.
"""
with Session(engine) as session:
statement = select(Task, Project).join(Project)
results = session.exec(statement).all()
tasks = []
for task, project in results:
result = TaskWithProject()
for key, value in task.dict().items():
setattr(result, key, value)
result.project = project
tasks.append(result)
return tasks
@api.put("/tasks/clean")
def clean_tasks():
"""Clean all tasks.
It is used to run maintenance queries on the database in order to keep
consistent data on tasks.
"""
with Session(engine) as session:
with open("./data/clean_failed_tasks.sql", "r", encoding="utf-8") as stream:
statement = stream.read()
session.exec(statement)
with open("./data/clean_succeeded_tasks.sql", "r", encoding="utf-8") as stream:
statement = stream.read()
session.exec(statement)
session.commit()
|
[
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.select"
] |
[((293, 302), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (300, 302), False, 'from fastapi import FastAPI\n'), ((430, 455), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""Scitizen"""'}), "(title='Scitizen')\n", (437, 455), False, 'from fastapi import FastAPI\n'), ((627, 663), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (655, 663), False, 'from sqlmodel import Session, SQLModel, select\n'), ((1112, 1127), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1119, 1127), False, 'from sqlmodel import Session, SQLModel, select\n'), ((1978, 1993), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1985, 1993), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2362, 2377), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2369, 2377), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2410, 2424), 'sqlmodel.select', 'select', (['Device'], {}), '(Device)\n', (2416, 2424), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2959, 2974), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2966, 2974), False, 'from sqlmodel import Session, SQLModel, select\n'), ((3841, 3856), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3848, 3856), False, 'from sqlmodel import Session, SQLModel, select\n'), ((4234, 4249), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (4241, 4249), False, 'from sqlmodel import Session, SQLModel, select\n'), ((4282, 4297), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (4288, 4297), False, 'from sqlmodel import Session, SQLModel, select\n'), ((4787, 4802), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (4794, 4802), False, 'from sqlmodel import Session, SQLModel, select\n'), ((5632, 5647), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (5639, 5647), False, 'from sqlmodel import Session, SQLModel, select\n'), ((5835, 5852), 'models.TaskWithProject', 'TaskWithProject', ([], {}), '()\n', (5850, 5852), False, 'from models import Device, Project, Task, TaskWithProject\n'), ((6210, 6225), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (6217, 6225), False, 'from sqlmodel import Session, SQLModel, select\n'), ((6818, 6833), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (6825, 6833), False, 'from sqlmodel import Session, SQLModel, select\n'), ((6420, 6437), 'models.TaskWithProject', 'TaskWithProject', ([], {}), '()\n', (6435, 6437), False, 'from models import Device, Project, Task, TaskWithProject\n'), ((1197, 1211), 'sqlmodel.select', 'select', (['Device'], {}), '(Device)\n', (1203, 1211), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2026, 2040), 'sqlmodel.select', 'select', (['Device'], {}), '(Device)\n', (2032, 2040), False, 'from sqlmodel import Session, SQLModel, select\n'), ((3045, 3060), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (3051, 3060), False, 'from sqlmodel import Session, SQLModel, select\n'), ((3889, 3904), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (3895, 3904), False, 'from sqlmodel import Session, SQLModel, select\n'), ((4870, 4882), 'sqlmodel.select', 'select', (['Task'], {}), '(Task)\n', (4876, 4882), False, 'from sqlmodel import Session, SQLModel, select\n'), ((6258, 6279), 'sqlmodel.select', 'select', (['Task', 'Project'], {}), '(Task, Project)\n', (6264, 6279), False, 'from sqlmodel import Session, SQLModel, select\n'), ((5680, 5701), 'sqlmodel.select', 'select', (['Task', 'Project'], {}), '(Task, Project)\n', (5686, 5701), False, 'from sqlmodel import Session, SQLModel, select\n')]
|
#! /usr/bin/env python3
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import argparse
import logging
import re
from collections import namedtuple
import numpy as np
from tqdm import tqdm
import megengine as mge
from megengine.core.tensor.dtype import is_quantize
from megengine.logger import _imperative_rt_logger, get_logger, set_mgb_log_level
from megengine.utils.module_stats import (
enable_receptive_field,
get_activation_stats,
get_op_stats,
get_param_stats,
print_activations_stats,
print_op_stats,
print_param_stats,
print_summary,
sizeof_fmt,
sum_activations_stats,
sum_op_stats,
sum_param_stats,
)
from megengine.utils.network import Network
logger = get_logger(__name__)
def visualize(
model_path: str,
log_path: str,
input: np.ndarray = None,
inp_dict: dict = None,
cal_params: bool = True,
cal_flops: bool = True,
cal_activations: bool = True,
logging_to_stdout: bool = True,
bar_length_max: int = 20,
):
r"""Load megengine dumped model and visualize graph structure with tensorboard log files.
Can also record and print model's statistics like :func:`~.module_stats`
Args:
model_path: dir path for megengine dumped model.
log_path: dir path for tensorboard graph log.
input: user defined input data for running model and calculating stats, alternative with inp_dict, used when the model has only one input.
inp_dict: input dict for running model and calculating stats, alternative with input, used when the model has more than one input. When both input and inp_dict are None, a random input will be used.
cal_params: whether calculate and record params size.
cal_flops: whether calculate and record op flops.
cal_activations: whether calculate and record op activations.
logging_to_stdout: whether print all calculated statistic details.
bar_length_max: size of bar indicating max flops or parameter size in net stats.
model_path: str:
log_path: str:
input: np.ndarray:
inp_dict: dict:
cal_params: bool:
cal_flops: bool:
cal_activations: bool:
logging_to_stdout: bool:
bar_length_max: int:
"""
if log_path:
try:
from tensorboard.compat.proto.attr_value_pb2 import AttrValue
from tensorboard.compat.proto.config_pb2 import RunMetadata
from tensorboard.compat.proto.graph_pb2 import GraphDef
from tensorboard.compat.proto.node_def_pb2 import NodeDef
from tensorboard.compat.proto.step_stats_pb2 import (
AllocatorMemoryUsed,
DeviceStepStats,
NodeExecStats,
StepStats,
)
from tensorboard.compat.proto.tensor_shape_pb2 import TensorShapeProto
from tensorboard.compat.proto.versions_pb2 import VersionDef
from tensorboardX import SummaryWriter
except ImportError:
logger.error(
"TensorBoard and TensorboardX are required for visualize.",
exc_info=True,
)
return
enable_receptive_field()
graph = Network.load(model_path)
graph.reset_batch_size(1)
has_input = False
if input is not None or inp_dict is not None:
has_input = True
repl_dict = {}
inp_vars = graph.input_vars
if inp_dict is not None:
assert len(inp_dict) == len(
inp_vars
), "Inputs are not sufficient for calculation."
for v in inp_vars:
new_input = graph.make_const(inp_dict[v.name], name=v.name)
repl_dict[v] = new_input
else:
assert len(inp_vars) == 1, "The graph needs more than one input."
inp_var = inp_vars[0]
repl_dict[inp_var] = graph.make_const(input, name=inp_var.name)
graph.replace_vars(repl_dict=repl_dict)
graph._compile()
def process_name(name):
# nodes that start with point or contain float const will lead to display bug
if not re.match(r"^[+-]?\d*\.\d*", name):
name = name.replace(".", "/")
return name.encode(encoding="utf-8")
summary = [["item", "value"]]
node_list = []
flops_list = []
params_list = []
activations_list = []
total_stats = namedtuple(
"total_stats", ["param_size", "param_dims", "flops", "act_size", "act_dims"]
)
stats_details = namedtuple("module_stats", ["params", "flops", "activations"])
for node in tqdm(graph.all_oprs):
if hasattr(node, "output_idx"):
node_oup = node.outputs[node.output_idx]
else:
if len(node.outputs) != 1:
logger.warning(
"OpNode {} has more than one output and not has 'output_idx' attr.".format(
node
)
)
node_oup = node.outputs[0]
inp_list = [process_name(var.owner.name) for var in node.inputs]
if log_path:
# detail format see tensorboard/compat/proto/attr_value.proto
attr = {
"_output_shapes": AttrValue(
list=AttrValue.ListValue(
shape=[
TensorShapeProto(
dim=[
TensorShapeProto.Dim(size=d) for d in node_oup.shape
]
)
]
)
),
"params": AttrValue(s=str(node.params).encode(encoding="utf-8")),
"dtype": AttrValue(s=str(node_oup.dtype).encode(encoding="utf-8")),
}
if cal_flops:
flops_stats = get_op_stats(node, node.inputs, node.outputs)
if flops_stats is not None:
# add op flops attr
if log_path and hasattr(flops_stats, "flops_num"):
attr["flops"] = AttrValue(
s=sizeof_fmt(flops_stats["flops"]).encode(encoding="utf-8")
)
flops_stats["name"] = node.name
flops_stats["class_name"] = node.type
flops_list.append(flops_stats)
if cal_activations:
acts = get_activation_stats(node_oup, has_input=has_input)
acts["name"] = node.name
acts["class_name"] = node.type
activations_list.append(acts)
if cal_params:
if node.type == "ImmutableTensor":
param_stats = get_param_stats(node_oup)
# add tensor size attr
if log_path:
attr["size"] = AttrValue(
s=sizeof_fmt(param_stats["size"]).encode(encoding="utf-8")
)
param_stats["name"] = node.name
params_list.append(param_stats)
if log_path:
node_list.append(
NodeDef(
name=process_name(node.name),
op=node.type,
input=inp_list,
attr=attr,
)
)
# summary
extra_info = {
"#ops": len(graph.all_oprs),
"#params": len(params_list),
}
(
total_flops,
total_param_dims,
total_param_size,
total_act_dims,
total_act_size,
) = (0, 0, 0, 0, 0)
if cal_params:
total_param_dims, total_param_size, params_list = sum_param_stats(
params_list, bar_length_max
)
extra_info["total_param_dims"] = sizeof_fmt(total_param_dims, suffix="")
extra_info["total_param_size"] = sizeof_fmt(total_param_size)
if logging_to_stdout:
print_param_stats(params_list)
if cal_flops:
total_flops, flops_list = sum_op_stats(flops_list, bar_length_max)
extra_info["total_flops"] = sizeof_fmt(total_flops, suffix="OPs")
if logging_to_stdout:
print_op_stats(flops_list)
if cal_activations:
total_act_dims, total_act_size, activations_list = sum_activations_stats(
activations_list, bar_length_max
)
extra_info["total_act_dims"] = sizeof_fmt(total_act_dims, suffix="")
extra_info["total_act_size"] = sizeof_fmt(total_act_size)
if logging_to_stdout:
print_activations_stats(activations_list, has_input=has_input)
if cal_flops and cal_params:
extra_info["flops/param_size"] = "{:3.3f}".format(
total_flops / total_param_size
)
if log_path:
graph_def = GraphDef(node=node_list, versions=VersionDef(producer=22))
device = "/device:CPU:0"
stepstats = RunMetadata(
step_stats=StepStats(dev_stats=[DeviceStepStats(device=device)])
)
writer = SummaryWriter(log_path)
writer._get_file_writer().add_graph((graph_def, stepstats))
print_summary(**extra_info)
return (
total_stats(
param_size=total_param_size,
param_dims=total_param_dims,
flops=total_flops,
act_size=total_act_size,
act_dims=total_act_dims,
),
stats_details(
params=params_list, flops=flops_list, activations=activations_list
),
)
def main():
parser = argparse.ArgumentParser(
description="load a megengine dumped model and export log file for tensorboard visualization.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("model_path", help="dumped model path.")
parser.add_argument("--log_path", help="tensorboard log path.")
parser.add_argument(
"--load_input_data",
help="load input data from pickle file; it should be a numpy array or a dict of numpy array",
)
parser.add_argument(
"--bar_length_max",
type=int,
default=20,
help="size of bar indicating max flops or parameter size in net stats.",
)
parser.add_argument(
"--cal_params",
action="store_true",
help="whether calculate and record params size.",
)
parser.add_argument(
"--cal_flops",
action="store_true",
help="whether calculate and record op flops.",
)
parser.add_argument(
"--cal_activations",
action="store_true",
help="whether calculate and record op activations.",
)
parser.add_argument(
"--logging_to_stdout",
action="store_true",
help="whether print all calculated statistic details.",
)
parser.add_argument(
"--all",
action="store_true",
help="whether print all stats. Tensorboard logs will be placed in './log' if not specified.",
)
args = parser.parse_args()
if args.load_input_data:
logger.info("load data from {}".format(args.load_input_data))
data = mge.load(args.load_input_data)
if isinstance(data, dict):
for v in data.values():
assert isinstance(
v, np.ndarray
), "data should provide ndarray; got {} instead".format(v)
args.inp_dict = data
elif isinstance(data, np.ndarray):
args.input = data
else:
logger.error("input data should be a numpy array or a dict of numpy array")
if args.all:
args.cal_params = True
args.cal_flops = True
args.cal_activations = True
args.logging_to_stdout = True
if not args.log_path:
args.log_path = "./log"
kwargs = vars(args)
kwargs.pop("all")
kwargs.pop("load_input_data")
visualize(**kwargs)
if __name__ == "__main__":
main()
|
[
"megengine.utils.module_stats.sum_op_stats",
"megengine.logger.get_logger",
"megengine.utils.module_stats.print_param_stats",
"megengine.utils.module_stats.sum_param_stats",
"megengine.utils.module_stats.get_param_stats",
"megengine.utils.module_stats.print_op_stats",
"megengine.utils.network.Network.load",
"megengine.utils.module_stats.get_op_stats",
"megengine.utils.module_stats.sum_activations_stats",
"megengine.utils.module_stats.sizeof_fmt",
"megengine.utils.module_stats.get_activation_stats",
"megengine.utils.module_stats.print_summary",
"megengine.load",
"megengine.utils.module_stats.print_activations_stats",
"megengine.utils.module_stats.enable_receptive_field"
] |
[((1019, 1039), 'megengine.logger.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (1029, 1039), False, 'from megengine.logger import _imperative_rt_logger, get_logger, set_mgb_log_level\n'), ((3458, 3482), 'megengine.utils.module_stats.enable_receptive_field', 'enable_receptive_field', ([], {}), '()\n', (3480, 3482), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((3496, 3520), 'megengine.utils.network.Network.load', 'Network.load', (['model_path'], {}), '(model_path)\n', (3508, 3520), False, 'from megengine.utils.network import Network\n'), ((4678, 4770), 'collections.namedtuple', 'namedtuple', (['"""total_stats"""', "['param_size', 'param_dims', 'flops', 'act_size', 'act_dims']"], {}), "('total_stats', ['param_size', 'param_dims', 'flops', 'act_size',\n 'act_dims'])\n", (4688, 4770), False, 'from collections import namedtuple\n'), ((4801, 4863), 'collections.namedtuple', 'namedtuple', (['"""module_stats"""', "['params', 'flops', 'activations']"], {}), "('module_stats', ['params', 'flops', 'activations'])\n", (4811, 4863), False, 'from collections import namedtuple\n'), ((4881, 4901), 'tqdm.tqdm', 'tqdm', (['graph.all_oprs'], {}), '(graph.all_oprs)\n', (4885, 4901), False, 'from tqdm import tqdm\n'), ((9338, 9365), 'megengine.utils.module_stats.print_summary', 'print_summary', ([], {}), '(**extra_info)\n', (9351, 9365), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((9745, 9930), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""load a megengine dumped model and export log file for tensorboard visualization."""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description=\n 'load a megengine dumped model and export log file for tensorboard visualization.'\n , formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n", (9768, 9930), False, 'import argparse\n'), ((7889, 7933), 'megengine.utils.module_stats.sum_param_stats', 'sum_param_stats', (['params_list', 'bar_length_max'], {}), '(params_list, bar_length_max)\n', (7904, 7933), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((7997, 8036), 'megengine.utils.module_stats.sizeof_fmt', 'sizeof_fmt', (['total_param_dims'], {'suffix': '""""""'}), "(total_param_dims, suffix='')\n", (8007, 8036), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((8078, 8106), 'megengine.utils.module_stats.sizeof_fmt', 'sizeof_fmt', (['total_param_size'], {}), '(total_param_size)\n', (8088, 8106), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((8233, 8273), 'megengine.utils.module_stats.sum_op_stats', 'sum_op_stats', (['flops_list', 'bar_length_max'], {}), '(flops_list, bar_length_max)\n', (8245, 8273), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((8310, 8347), 'megengine.utils.module_stats.sizeof_fmt', 'sizeof_fmt', (['total_flops'], {'suffix': '"""OPs"""'}), "(total_flops, suffix='OPs')\n", (8320, 8347), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((8501, 8556), 'megengine.utils.module_stats.sum_activations_stats', 'sum_activations_stats', (['activations_list', 'bar_length_max'], {}), '(activations_list, bar_length_max)\n', (8522, 8556), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((8618, 8655), 'megengine.utils.module_stats.sizeof_fmt', 'sizeof_fmt', (['total_act_dims'], {'suffix': '""""""'}), "(total_act_dims, suffix='')\n", (8628, 8655), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((8695, 8721), 'megengine.utils.module_stats.sizeof_fmt', 'sizeof_fmt', (['total_act_size'], {}), '(total_act_size)\n', (8705, 8721), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((9241, 9264), 'tensorboardX.SummaryWriter', 'SummaryWriter', (['log_path'], {}), '(log_path)\n', (9254, 9264), False, 'from tensorboardX import SummaryWriter\n'), ((11326, 11356), 'megengine.load', 'mge.load', (['args.load_input_data'], {}), '(args.load_input_data)\n', (11334, 11356), True, 'import megengine as mge\n'), ((4417, 4452), 're.match', 're.match', (['"""^[+-]?\\\\d*\\\\.\\\\d*"""', 'name'], {}), "('^[+-]?\\\\d*\\\\.\\\\d*', name)\n", (4425, 4452), False, 'import re\n'), ((6131, 6176), 'megengine.utils.module_stats.get_op_stats', 'get_op_stats', (['node', 'node.inputs', 'node.outputs'], {}), '(node, node.inputs, node.outputs)\n', (6143, 6176), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((6670, 6721), 'megengine.utils.module_stats.get_activation_stats', 'get_activation_stats', (['node_oup'], {'has_input': 'has_input'}), '(node_oup, has_input=has_input)\n', (6690, 6721), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((8149, 8179), 'megengine.utils.module_stats.print_param_stats', 'print_param_stats', (['params_list'], {}), '(params_list)\n', (8166, 8179), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((8390, 8416), 'megengine.utils.module_stats.print_op_stats', 'print_op_stats', (['flops_list'], {}), '(flops_list)\n', (8404, 8416), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((8764, 8826), 'megengine.utils.module_stats.print_activations_stats', 'print_activations_stats', (['activations_list'], {'has_input': 'has_input'}), '(activations_list, has_input=has_input)\n', (8787, 8826), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((6945, 6970), 'megengine.utils.module_stats.get_param_stats', 'get_param_stats', (['node_oup'], {}), '(node_oup)\n', (6960, 6970), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((9045, 9068), 'tensorboard.compat.proto.versions_pb2.VersionDef', 'VersionDef', ([], {'producer': '(22)'}), '(producer=22)\n', (9055, 9068), False, 'from tensorboard.compat.proto.versions_pb2 import VersionDef\n'), ((9181, 9211), 'tensorboard.compat.proto.step_stats_pb2.DeviceStepStats', 'DeviceStepStats', ([], {'device': 'device'}), '(device=device)\n', (9196, 9211), False, 'from tensorboard.compat.proto.step_stats_pb2 import AllocatorMemoryUsed, DeviceStepStats, NodeExecStats, StepStats\n'), ((6393, 6425), 'megengine.utils.module_stats.sizeof_fmt', 'sizeof_fmt', (["flops_stats['flops']"], {}), "(flops_stats['flops'])\n", (6403, 6425), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((7111, 7142), 'megengine.utils.module_stats.sizeof_fmt', 'sizeof_fmt', (["param_stats['size']"], {}), "(param_stats['size'])\n", (7121, 7142), False, 'from megengine.utils.module_stats import enable_receptive_field, get_activation_stats, get_op_stats, get_param_stats, print_activations_stats, print_op_stats, print_param_stats, print_summary, sizeof_fmt, sum_activations_stats, sum_op_stats, sum_param_stats\n'), ((5718, 5746), 'tensorboard.compat.proto.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': 'd'}), '(size=d)\n', (5738, 5746), False, 'from tensorboard.compat.proto.tensor_shape_pb2 import TensorShapeProto\n')]
|
"""
Global interpolation functions.
"""
import time
import numpy as nm
from sfepy.base.base import output, get_default_attr
from sfepy.discrete.fem.mesh import make_inverse_connectivity
from sfepy.discrete.fem.extmods.bases import find_ref_coors
def get_ref_coors(field, coors, strategy='kdtree', close_limit=0.1, cache=None,
verbose=True):
"""
Get reference element coordinates and elements corresponding to given
physical coordinates.
Parameters
----------
field : Field instance
The field defining the approximation.
coors : array
The physical coordinates.
strategy : str, optional
The strategy for finding the elements that contain the
coordinates. Only 'kdtree' is supported for the moment.
close_limit : float, optional
The maximum limit distance of a point from the closest
element allowed for extrapolation.
cache : Struct, optional
To speed up a sequence of evaluations, the field mesh, the inverse
connectivity of the field mesh and the KDTree instance can be cached as
`cache.mesh`, `cache.offsets`, `cache.iconn` and
`cache.kdtree`. Optionally, the cache can also contain the reference
element coordinates as `cache.ref_coors`, `cache.cells` and
`cache.status`, if the evaluation occurs in the same coordinates
repeatedly. In that case the KDTree related data are ignored.
verbose : bool
If False, reduce verbosity.
Returns
-------
ref_coors : array
The reference coordinates.
cells : array
The cell indices corresponding to the reference coordinates.
status : array
The status: 0 is success, 1 is extrapolation within `close_limit`, 2 is
extrapolation outside `close_limit`, 3 is failure.
"""
ref_coors = get_default_attr(cache, 'ref_coors', None)
if ref_coors is None:
mesh = get_default_attr(cache, 'mesh', None)
if mesh is None:
mesh = field.create_mesh(extra_nodes=False)
scoors = mesh.coors
output('reference field: %d vertices' % scoors.shape[0],
verbose=verbose)
iconn = get_default_attr(cache, 'iconn', None)
if iconn is None:
offsets, iconn = make_inverse_connectivity(mesh.conns,
mesh.n_nod,
ret_offsets=True)
ii = nm.where(offsets[1:] == offsets[:-1])[0]
if len(ii):
raise ValueError('some vertices not in any element! (%s)'
% ii)
else:
offsets = cache.offsets
if strategy == 'kdtree':
kdtree = get_default_attr(cache, 'kdtree', None)
if kdtree is None:
from scipy.spatial import cKDTree as KDTree
tt = time.clock()
kdtree = KDTree(scoors)
output('kdtree: %f s' % (time.clock()-tt), verbose=verbose)
tt = time.clock()
ics = kdtree.query(coors)[1]
output('kdtree query: %f s' % (time.clock()-tt), verbose=verbose)
tt = time.clock()
ics = nm.asarray(ics, dtype=nm.int32)
vertex_coorss, nodess, mtx_is = [], [], []
conns = []
for ig, ap in field.aps.iteritems():
ps = ap.interp.gel.interp.poly_spaces['v']
vertex_coorss.append(ps.geometry.coors)
nodess.append(ps.nodes)
mtx_is.append(ps.get_mtx_i())
conns.append(mesh.conns[ig].copy())
# Get reference element coordinates corresponding to
# destination coordinates.
ref_coors = nm.empty_like(coors)
cells = nm.empty((coors.shape[0], 2), dtype=nm.int32)
status = nm.empty((coors.shape[0],), dtype=nm.int32)
find_ref_coors(ref_coors, cells, status, coors,
ics, offsets, iconn,
scoors, conns,
vertex_coorss, nodess, mtx_is,
1, close_limit, 1e-15, 100, 1e-8)
output('ref. coordinates: %f s' % (time.clock()-tt),
verbose=verbose)
elif strategy == 'crawl':
raise NotImplementedError
else:
raise ValueError('unknown search strategy! (%s)' % strategy)
else:
ref_coors = cache.ref_coors
cells = cache.cells
status = cache.status
return ref_coors, cells, status
|
[
"sfepy.base.base.output",
"sfepy.discrete.fem.extmods.bases.find_ref_coors",
"sfepy.base.base.get_default_attr",
"sfepy.discrete.fem.mesh.make_inverse_connectivity"
] |
[((1856, 1898), 'sfepy.base.base.get_default_attr', 'get_default_attr', (['cache', '"""ref_coors"""', 'None'], {}), "(cache, 'ref_coors', None)\n", (1872, 1898), False, 'from sfepy.base.base import output, get_default_attr\n'), ((1940, 1977), 'sfepy.base.base.get_default_attr', 'get_default_attr', (['cache', '"""mesh"""', 'None'], {}), "(cache, 'mesh', None)\n", (1956, 1977), False, 'from sfepy.base.base import output, get_default_attr\n'), ((2096, 2169), 'sfepy.base.base.output', 'output', (["('reference field: %d vertices' % scoors.shape[0])"], {'verbose': 'verbose'}), "('reference field: %d vertices' % scoors.shape[0], verbose=verbose)\n", (2102, 2169), False, 'from sfepy.base.base import output, get_default_attr\n'), ((2202, 2240), 'sfepy.base.base.get_default_attr', 'get_default_attr', (['cache', '"""iconn"""', 'None'], {}), "(cache, 'iconn', None)\n", (2218, 2240), False, 'from sfepy.base.base import output, get_default_attr\n'), ((2296, 2363), 'sfepy.discrete.fem.mesh.make_inverse_connectivity', 'make_inverse_connectivity', (['mesh.conns', 'mesh.n_nod'], {'ret_offsets': '(True)'}), '(mesh.conns, mesh.n_nod, ret_offsets=True)\n', (2321, 2363), False, 'from sfepy.discrete.fem.mesh import make_inverse_connectivity\n'), ((2776, 2815), 'sfepy.base.base.get_default_attr', 'get_default_attr', (['cache', '"""kdtree"""', 'None'], {}), "(cache, 'kdtree', None)\n", (2792, 2815), False, 'from sfepy.base.base import output, get_default_attr\n'), ((3076, 3088), 'time.clock', 'time.clock', ([], {}), '()\n', (3086, 3088), False, 'import time\n'), ((3226, 3238), 'time.clock', 'time.clock', ([], {}), '()\n', (3236, 3238), False, 'import time\n'), ((3257, 3288), 'numpy.asarray', 'nm.asarray', (['ics'], {'dtype': 'nm.int32'}), '(ics, dtype=nm.int32)\n', (3267, 3288), True, 'import numpy as nm\n'), ((3801, 3821), 'numpy.empty_like', 'nm.empty_like', (['coors'], {}), '(coors)\n', (3814, 3821), True, 'import numpy as nm\n'), ((3842, 3887), 'numpy.empty', 'nm.empty', (['(coors.shape[0], 2)'], {'dtype': 'nm.int32'}), '((coors.shape[0], 2), dtype=nm.int32)\n', (3850, 3887), True, 'import numpy as nm\n'), ((3909, 3952), 'numpy.empty', 'nm.empty', (['(coors.shape[0],)'], {'dtype': 'nm.int32'}), '((coors.shape[0],), dtype=nm.int32)\n', (3917, 3952), True, 'import numpy as nm\n'), ((3966, 4119), 'sfepy.discrete.fem.extmods.bases.find_ref_coors', 'find_ref_coors', (['ref_coors', 'cells', 'status', 'coors', 'ics', 'offsets', 'iconn', 'scoors', 'conns', 'vertex_coorss', 'nodess', 'mtx_is', '(1)', 'close_limit', '(1e-15)', '(100)', '(1e-08)'], {}), '(ref_coors, cells, status, coors, ics, offsets, iconn, scoors,\n conns, vertex_coorss, nodess, mtx_is, 1, close_limit, 1e-15, 100, 1e-08)\n', (3980, 4119), False, 'from sfepy.discrete.fem.extmods.bases import find_ref_coors\n'), ((2492, 2529), 'numpy.where', 'nm.where', (['(offsets[1:] == offsets[:-1])'], {}), '(offsets[1:] == offsets[:-1])\n', (2500, 2529), True, 'import numpy as nm\n'), ((2929, 2941), 'time.clock', 'time.clock', ([], {}), '()\n', (2939, 2941), False, 'import time\n'), ((2967, 2981), 'scipy.spatial.cKDTree', 'KDTree', (['scoors'], {}), '(scoors)\n', (2973, 2981), True, 'from scipy.spatial import cKDTree as KDTree\n'), ((3173, 3185), 'time.clock', 'time.clock', ([], {}), '()\n', (3183, 3185), False, 'import time\n'), ((4258, 4270), 'time.clock', 'time.clock', ([], {}), '()\n', (4268, 4270), False, 'import time\n'), ((3023, 3035), 'time.clock', 'time.clock', ([], {}), '()\n', (3033, 3035), False, 'import time\n')]
|
from http import HTTPStatus
from typing import List
from fastapi import APIRouter, Depends, Query, Response
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlmodel import func, select
from icon_governance.db import get_session
from icon_governance.models.delegations import Delegation
router = APIRouter()
@router.get("/governance/delegations/{address}")
async def get_delegations(
response: Response,
address: str,
skip: int = Query(0),
limit: int = Query(100, gt=0, lt=101),
session: AsyncSession = Depends(get_session),
) -> List[Delegation]:
"""Return list of delegations."""
query = (
select(Delegation)
.where(Delegation.address == address)
.offset(skip)
.limit(limit)
.order_by(Delegation.value.desc())
)
result = await session.execute(query)
delegations = result.scalars().all()
# Check if exists
if len(delegations) == 0:
return Response(status_code=HTTPStatus.NO_CONTENT.value)
# Return the count in header
query_count = select([func.count(Delegation.address)]).where(Delegation.address == address)
result_count = await session.execute(query_count)
total_count = str(result_count.scalars().all()[0])
response.headers["x-total-count"] = total_count
return delegations
@router.get("/governance/votes/{address}")
async def get_delegations(
address: str,
response: Response,
skip: int = Query(0),
limit: int = Query(100, gt=0, lt=101),
session: AsyncSession = Depends(get_session),
) -> List[Delegation]:
"""Return list of votes."""
query = (
select(Delegation)
.where(Delegation.prep_address == address)
.offset(skip)
.limit(limit)
.order_by(Delegation.value.desc())
)
result = await session.execute(query)
delegations = result.scalars().all()
# Check if exists
if len(delegations) == 0:
return Response(status_code=HTTPStatus.NO_CONTENT.value)
# Return the count in header
query_count = select([func.count(Delegation.address)]).where(Delegation.prep_address == address)
result_count = await session.execute(query_count)
total_count = str(result_count.scalars().all()[0])
response.headers["x-total-count"] = total_count
return delegations
|
[
"sqlmodel.func.count",
"sqlmodel.select"
] |
[((340, 351), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (349, 351), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((488, 496), 'fastapi.Query', 'Query', (['(0)'], {}), '(0)\n', (493, 496), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((515, 539), 'fastapi.Query', 'Query', (['(100)'], {'gt': '(0)', 'lt': '(101)'}), '(100, gt=0, lt=101)\n', (520, 539), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((569, 589), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (576, 589), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((1479, 1487), 'fastapi.Query', 'Query', (['(0)'], {}), '(0)\n', (1484, 1487), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((1506, 1530), 'fastapi.Query', 'Query', (['(100)'], {'gt': '(0)', 'lt': '(101)'}), '(100, gt=0, lt=101)\n', (1511, 1530), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((1560, 1580), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1567, 1580), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((801, 824), 'icon_governance.models.delegations.Delegation.value.desc', 'Delegation.value.desc', ([], {}), '()\n', (822, 824), False, 'from icon_governance.models.delegations import Delegation\n'), ((984, 1033), 'fastapi.Response', 'Response', ([], {'status_code': 'HTTPStatus.NO_CONTENT.value'}), '(status_code=HTTPStatus.NO_CONTENT.value)\n', (992, 1033), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((1791, 1814), 'icon_governance.models.delegations.Delegation.value.desc', 'Delegation.value.desc', ([], {}), '()\n', (1812, 1814), False, 'from icon_governance.models.delegations import Delegation\n'), ((1974, 2023), 'fastapi.Response', 'Response', ([], {'status_code': 'HTTPStatus.NO_CONTENT.value'}), '(status_code=HTTPStatus.NO_CONTENT.value)\n', (1982, 2023), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((1094, 1124), 'sqlmodel.func.count', 'func.count', (['Delegation.address'], {}), '(Delegation.address)\n', (1104, 1124), False, 'from sqlmodel import func, select\n'), ((2084, 2114), 'sqlmodel.func.count', 'func.count', (['Delegation.address'], {}), '(Delegation.address)\n', (2094, 2114), False, 'from sqlmodel import func, select\n'), ((674, 692), 'sqlmodel.select', 'select', (['Delegation'], {}), '(Delegation)\n', (680, 692), False, 'from sqlmodel import func, select\n'), ((1659, 1677), 'sqlmodel.select', 'select', (['Delegation'], {}), '(Delegation)\n', (1665, 1677), False, 'from sqlmodel import func, select\n')]
|
#!/usr/bin/env python
"""
Plot quadrature points for the given geometry and integration order.
"""
from __future__ import absolute_import, print_function
import sys
sys.path.append('.')
from argparse import ArgumentParser
import sfepy.postprocess.plot_quadrature as pq
helps = {
'geometry' :
'reference element geometry, one of "2_3", "2_4", "3_4", "3_8"'
' [default: %(default)s]',
'order' :
'quadrature order [default: %(default)s]',
'boundary' :
'plot boundary quadrature points',
'min_radius' :
'min. radius of points corresponding to the min. weight'
' [default: %(default)s]',
'max_radius' :
'max. radius of points corresponding to the max. weight'
' [default: %(default)s]',
'show_colorbar' :
'show colorbar for quadrature weights',
'show_labels' :
'label quadrature points',
'print_qp' :
'print quadrature points and weights',
}
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('--version', action='version', version='%(prog)s')
parser.add_argument('-g', '--geometry', metavar='name',
action='store', dest='geometry',
default='2_4', help=helps['geometry'])
parser.add_argument('-n', '--order', metavar='order', type=int,
action='store', dest='order',
default=2, help=helps['order'])
parser.add_argument('-b', '--boundary',
action='store_true', dest='boundary',
default=False, help=helps['boundary'])
parser.add_argument('-r', '--min-radius', metavar='float', type=float,
action='store', dest='min_radius',
default=10, help=helps['min_radius'])
parser.add_argument('-R', '--max-radius', metavar='float', type=float,
action='store', dest='max_radius',
default=50, help=helps['max_radius'])
parser.add_argument('-c', '--show-colorbar',
action='store_true', dest='show_colorbar',
default=False, help=helps['show_colorbar'])
parser.add_argument('-l', '---show-labels',
action='store_true', dest='show_labels',
default=False, help=helps['show_labels'])
parser.add_argument('-p', '--print-qp',
action='store_true', dest='print_qp',
default=False, help=helps['print_qp'])
options = parser.parse_args()
aux = pq.plot_quadrature(None, options.geometry, options.order,
boundary=options.boundary,
min_radius=options.min_radius,
max_radius=options.max_radius,
show_colorbar=options.show_colorbar,
show_labels=options.show_labels)
if options.print_qp:
ax, coors, weights = aux
for ic, coor in enumerate(coors):
print(ic, coor, weights[ic])
pq.plt.show()
if __name__ == '__main__':
main()
|
[
"sfepy.postprocess.plot_quadrature.plt.show",
"sfepy.postprocess.plot_quadrature.plot_quadrature"
] |
[((165, 185), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (180, 185), False, 'import sys\n'), ((943, 978), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (957, 978), False, 'from argparse import ArgumentParser\n'), ((2550, 2775), 'sfepy.postprocess.plot_quadrature.plot_quadrature', 'pq.plot_quadrature', (['None', 'options.geometry', 'options.order'], {'boundary': 'options.boundary', 'min_radius': 'options.min_radius', 'max_radius': 'options.max_radius', 'show_colorbar': 'options.show_colorbar', 'show_labels': 'options.show_labels'}), '(None, options.geometry, options.order, boundary=options.\n boundary, min_radius=options.min_radius, max_radius=options.max_radius,\n show_colorbar=options.show_colorbar, show_labels=options.show_labels)\n', (2568, 2775), True, 'import sfepy.postprocess.plot_quadrature as pq\n'), ((3059, 3072), 'sfepy.postprocess.plot_quadrature.plt.show', 'pq.plt.show', ([], {}), '()\n', (3070, 3072), True, 'import sfepy.postprocess.plot_quadrature as pq\n')]
|
import megengine as mge
import megengine.module as M
import pytest
from basecls.models.snet import SNV2Block, SNV2XceptionBlock
@pytest.mark.parametrize("w_in", [32, 48])
@pytest.mark.parametrize("w_out", [64])
@pytest.mark.parametrize("w_mid", [32, 24])
@pytest.mark.parametrize("stride", [1, 2])
@pytest.mark.parametrize("kernel", [3, 5])
@pytest.mark.parametrize("se_r", [0.0, 0.25])
@pytest.mark.parametrize("drop_path_prob", [0.0, 0.1])
@pytest.mark.parametrize("norm_name", ["BN"])
@pytest.mark.parametrize("act_name", ["relu"])
def test_block(
w_in: int,
w_out: int,
w_mid: int,
*,
kernel: int,
stride: int,
norm_name: str,
act_name: str,
se_r: float,
drop_path_prob: float,
):
m = SNV2Block(
w_in,
w_out,
w_mid,
kernel=kernel,
stride=stride,
norm_name=norm_name,
act_name=act_name,
se_r=se_r,
drop_path_prob=drop_path_prob,
)
assert isinstance(m, M.Module)
m(mge.random.normal(size=(2, w_in * 2 // stride, 8, 8)))
@pytest.mark.parametrize("w_in", [32])
@pytest.mark.parametrize("w_out", [64])
@pytest.mark.parametrize("w_mid", [32])
@pytest.mark.parametrize("stride", [1, 2])
@pytest.mark.parametrize("kernel", [7, "x"])
@pytest.mark.parametrize("se_r", [0.25])
@pytest.mark.parametrize("drop_path_prob", [0.1])
@pytest.mark.parametrize("norm_name", ["BN"])
@pytest.mark.parametrize("act_name", ["relu"])
def test_x_block(
w_in: int,
w_out: int,
w_mid: int,
*,
kernel: int,
stride: int,
norm_name: str,
act_name: str,
se_r: float,
drop_path_prob: float,
):
m = SNV2XceptionBlock(
w_in,
w_out,
w_mid,
kernel=kernel,
stride=stride,
norm_name=norm_name,
act_name=act_name,
se_r=se_r,
drop_path_prob=drop_path_prob,
)
assert isinstance(m, M.Module)
m(mge.random.normal(size=(2, w_in * 2 // stride, 8, 8)))
|
[
"megengine.random.normal"
] |
[((132, 173), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""w_in"""', '[32, 48]'], {}), "('w_in', [32, 48])\n", (155, 173), False, 'import pytest\n'), ((175, 213), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""w_out"""', '[64]'], {}), "('w_out', [64])\n", (198, 213), False, 'import pytest\n'), ((215, 257), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""w_mid"""', '[32, 24]'], {}), "('w_mid', [32, 24])\n", (238, 257), False, 'import pytest\n'), ((259, 300), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""stride"""', '[1, 2]'], {}), "('stride', [1, 2])\n", (282, 300), False, 'import pytest\n'), ((302, 343), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""kernel"""', '[3, 5]'], {}), "('kernel', [3, 5])\n", (325, 343), False, 'import pytest\n'), ((345, 389), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""se_r"""', '[0.0, 0.25]'], {}), "('se_r', [0.0, 0.25])\n", (368, 389), False, 'import pytest\n'), ((391, 444), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""drop_path_prob"""', '[0.0, 0.1]'], {}), "('drop_path_prob', [0.0, 0.1])\n", (414, 444), False, 'import pytest\n'), ((446, 490), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""norm_name"""', "['BN']"], {}), "('norm_name', ['BN'])\n", (469, 490), False, 'import pytest\n'), ((492, 537), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""act_name"""', "['relu']"], {}), "('act_name', ['relu'])\n", (515, 537), False, 'import pytest\n'), ((1057, 1094), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""w_in"""', '[32]'], {}), "('w_in', [32])\n", (1080, 1094), False, 'import pytest\n'), ((1096, 1134), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""w_out"""', '[64]'], {}), "('w_out', [64])\n", (1119, 1134), False, 'import pytest\n'), ((1136, 1174), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""w_mid"""', '[32]'], {}), "('w_mid', [32])\n", (1159, 1174), False, 'import pytest\n'), ((1176, 1217), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""stride"""', '[1, 2]'], {}), "('stride', [1, 2])\n", (1199, 1217), False, 'import pytest\n'), ((1219, 1262), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""kernel"""', "[7, 'x']"], {}), "('kernel', [7, 'x'])\n", (1242, 1262), False, 'import pytest\n'), ((1264, 1303), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""se_r"""', '[0.25]'], {}), "('se_r', [0.25])\n", (1287, 1303), False, 'import pytest\n'), ((1305, 1353), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""drop_path_prob"""', '[0.1]'], {}), "('drop_path_prob', [0.1])\n", (1328, 1353), False, 'import pytest\n'), ((1355, 1399), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""norm_name"""', "['BN']"], {}), "('norm_name', ['BN'])\n", (1378, 1399), False, 'import pytest\n'), ((1401, 1446), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""act_name"""', "['relu']"], {}), "('act_name', ['relu'])\n", (1424, 1446), False, 'import pytest\n'), ((736, 882), 'basecls.models.snet.SNV2Block', 'SNV2Block', (['w_in', 'w_out', 'w_mid'], {'kernel': 'kernel', 'stride': 'stride', 'norm_name': 'norm_name', 'act_name': 'act_name', 'se_r': 'se_r', 'drop_path_prob': 'drop_path_prob'}), '(w_in, w_out, w_mid, kernel=kernel, stride=stride, norm_name=\n norm_name, act_name=act_name, se_r=se_r, drop_path_prob=drop_path_prob)\n', (745, 882), False, 'from basecls.models.snet import SNV2Block, SNV2XceptionBlock\n'), ((1647, 1805), 'basecls.models.snet.SNV2XceptionBlock', 'SNV2XceptionBlock', (['w_in', 'w_out', 'w_mid'], {'kernel': 'kernel', 'stride': 'stride', 'norm_name': 'norm_name', 'act_name': 'act_name', 'se_r': 'se_r', 'drop_path_prob': 'drop_path_prob'}), '(w_in, w_out, w_mid, kernel=kernel, stride=stride,\n norm_name=norm_name, act_name=act_name, se_r=se_r, drop_path_prob=\n drop_path_prob)\n', (1664, 1805), False, 'from basecls.models.snet import SNV2Block, SNV2XceptionBlock\n'), ((999, 1052), 'megengine.random.normal', 'mge.random.normal', ([], {'size': '(2, w_in * 2 // stride, 8, 8)'}), '(size=(2, w_in * 2 // stride, 8, 8))\n', (1016, 1052), True, 'import megengine as mge\n'), ((1918, 1971), 'megengine.random.normal', 'mge.random.normal', ([], {'size': '(2, w_in * 2 // stride, 8, 8)'}), '(size=(2, w_in * 2 // stride, 8, 8))\n', (1935, 1971), True, 'import megengine as mge\n')]
|
"""Implementing balance and fiscal_note_items tables
Revision ID: 6099ed2a58e0
Revises: <KEY>
Create Date: 2021-10-11 14:52:44.126077
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "6099ed2a58e0"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"balance",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("value", sa.Float(), nullable=False),
sa.Column("type", sa.Enum("DEBT", "CREDIT", name="balancetype"), nullable=False),
sa.Column(
"operation",
sa.Enum(
"PAYMENT_OF_EMPLOYEES",
"PAYMENT_OF_SUPPLIERS",
"ANOTHER_PAYMENTS",
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
name="operationtype",
),
nullable=False,
),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["owner_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_balance_created_at"), "balance", ["created_at"], unique=False)
op.create_index(op.f("ix_balance_operation"), "balance", ["operation"], unique=False)
op.create_index(op.f("ix_balance_owner_id"), "balance", ["owner_id"], unique=False)
op.create_index(op.f("ix_balance_type"), "balance", ["type"], unique=False)
op.create_table(
"fiscal_note_items",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("buy_value", sa.Float(), nullable=False),
sa.Column("sugested_sell_value", sa.Float(), nullable=False),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("fiscal_note_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("item_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("file_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.ForeignKeyConstraint(
["file_id"],
["files.bucket_key"],
),
sa.ForeignKeyConstraint(
["fiscal_note_id"],
["fiscal_notes.id"],
),
sa.ForeignKeyConstraint(
["item_id"],
["items.id"],
),
sa.ForeignKeyConstraint(
["owner_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_fiscal_note_items_fiscal_note_id"), "fiscal_note_items", ["fiscal_note_id"], unique=False)
op.create_index(op.f("ix_fiscal_note_items_item_id"), "fiscal_note_items", ["item_id"], unique=False)
op.create_index(op.f("ix_fiscal_note_items_owner_id"), "fiscal_note_items", ["owner_id"], unique=False)
op.create_index(
op.f("ix_fiscal_note_items_sugested_sell_value"), "fiscal_note_items", ["sugested_sell_value"], unique=False
)
op.create_index(op.f("ix_clients_owner_id"), "clients", ["owner_id"], unique=False)
op.create_index(op.f("ix_fiscal_notes_owner_id"), "fiscal_notes", ["owner_id"], unique=False)
op.alter_column("items", "cost", existing_type=postgresql.DOUBLE_PRECISION(precision=53), nullable=True)
op.create_index(op.f("ix_items_amount"), "items", ["amount"], unique=False)
op.alter_column("order_details", "order_id", existing_type=postgresql.UUID(), nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_users_password_hash"), table_name="users")
op.drop_index(op.f("ix_users_name"), table_name="users")
op.drop_index(op.f("ix_orders_description"), table_name="orders")
op.drop_index(op.f("ix_order_details_sell_value"), table_name="order_details")
op.drop_index(op.f("ix_order_details_item_name"), table_name="order_details")
op.drop_index(op.f("ix_order_details_cost"), table_name="order_details")
op.alter_column("order_details", "order_id", existing_type=postgresql.UUID(), nullable=False)
op.drop_index(op.f("ix_items_value"), table_name="items")
op.drop_index(op.f("ix_items_cost"), table_name="items")
op.drop_index(op.f("ix_items_amount"), table_name="items")
op.alter_column("items", "cost", existing_type=postgresql.DOUBLE_PRECISION(precision=53), nullable=False)
op.drop_index(op.f("ix_fiscal_notes_owner_id"), table_name="fiscal_notes")
op.drop_index(op.f("ix_fiscal_notes_file_id"), table_name="fiscal_notes")
op.drop_index(op.f("ix_fiscal_notes_description"), table_name="fiscal_notes")
op.drop_index(op.f("ix_files_uploaded_at"), table_name="files")
op.drop_index(op.f("ix_clients_phone"), table_name="clients")
op.drop_index(op.f("ix_clients_owner_id"), table_name="clients")
op.drop_index(op.f("ix_clients_created_at"), table_name="clients")
op.drop_index(op.f("ix_fiscal_note_items_sugested_sell_value"), table_name="fiscal_note_items")
op.drop_index(op.f("ix_fiscal_note_items_owner_id"), table_name="fiscal_note_items")
op.drop_index(op.f("ix_fiscal_note_items_item_id"), table_name="fiscal_note_items")
op.drop_index(op.f("ix_fiscal_note_items_fiscal_note_id"), table_name="fiscal_note_items")
op.drop_index(op.f("ix_fiscal_note_items_file_id"), table_name="fiscal_note_items")
op.drop_index(op.f("ix_fiscal_note_items_buy_value"), table_name="fiscal_note_items")
op.drop_table("fiscal_note_items")
op.drop_index(op.f("ix_balance_value"), table_name="balance")
op.drop_index(op.f("ix_balance_type"), table_name="balance")
op.drop_index(op.f("ix_balance_owner_id"), table_name="balance")
op.drop_index(op.f("ix_balance_operation"), table_name="balance")
op.drop_index(op.f("ix_balance_description"), table_name="balance")
op.drop_index(op.f("ix_balance_created_at"), table_name="balance")
op.drop_table("balance")
# ### end Alembic commands ###
|
[
"sqlmodel.sql.sqltypes.AutoString",
"sqlmodel.sql.sqltypes.GUID"
] |
[((5828, 5862), 'alembic.op.drop_table', 'op.drop_table', (['"""fiscal_note_items"""'], {}), "('fiscal_note_items')\n", (5841, 5862), False, 'from alembic import op\n'), ((6280, 6304), 'alembic.op.drop_table', 'op.drop_table', (['"""balance"""'], {}), "('balance')\n", (6293, 6304), False, 'from alembic import op\n'), ((1354, 1405), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['owner_id']", "['users.id']"], {}), "(['owner_id'], ['users.id'])\n", (1377, 1405), True, 'import sqlalchemy as sa\n'), ((1450, 1479), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1473, 1479), True, 'import sqlalchemy as sa\n'), ((1507, 1536), 'alembic.op.f', 'op.f', (['"""ix_balance_created_at"""'], {}), "('ix_balance_created_at')\n", (1511, 1536), False, 'from alembic import op\n'), ((1599, 1627), 'alembic.op.f', 'op.f', (['"""ix_balance_operation"""'], {}), "('ix_balance_operation')\n", (1603, 1627), False, 'from alembic import op\n'), ((1689, 1716), 'alembic.op.f', 'op.f', (['"""ix_balance_owner_id"""'], {}), "('ix_balance_owner_id')\n", (1693, 1716), False, 'from alembic import op\n'), ((1777, 1800), 'alembic.op.f', 'op.f', (['"""ix_balance_type"""'], {}), "('ix_balance_type')\n", (1781, 1800), False, 'from alembic import op\n'), ((2414, 2472), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['file_id']", "['files.bucket_key']"], {}), "(['file_id'], ['files.bucket_key'])\n", (2437, 2472), True, 'import sqlalchemy as sa\n'), ((2517, 2581), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['fiscal_note_id']", "['fiscal_notes.id']"], {}), "(['fiscal_note_id'], ['fiscal_notes.id'])\n", (2540, 2581), True, 'import sqlalchemy as sa\n'), ((2626, 2676), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['item_id']", "['items.id']"], {}), "(['item_id'], ['items.id'])\n", (2649, 2676), True, 'import sqlalchemy as sa\n'), ((2721, 2772), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['owner_id']", "['users.id']"], {}), "(['owner_id'], ['users.id'])\n", (2744, 2772), True, 'import sqlalchemy as sa\n'), ((2817, 2846), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2840, 2846), True, 'import sqlalchemy as sa\n'), ((2874, 2917), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_fiscal_note_id"""'], {}), "('ix_fiscal_note_items_fiscal_note_id')\n", (2878, 2917), False, 'from alembic import op\n'), ((2994, 3030), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_item_id"""'], {}), "('ix_fiscal_note_items_item_id')\n", (2998, 3030), False, 'from alembic import op\n'), ((3100, 3137), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_owner_id"""'], {}), "('ix_fiscal_note_items_owner_id')\n", (3104, 3137), False, 'from alembic import op\n'), ((3217, 3265), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_sugested_sell_value"""'], {}), "('ix_fiscal_note_items_sugested_sell_value')\n", (3221, 3265), False, 'from alembic import op\n'), ((3352, 3379), 'alembic.op.f', 'op.f', (['"""ix_clients_owner_id"""'], {}), "('ix_clients_owner_id')\n", (3356, 3379), False, 'from alembic import op\n'), ((3440, 3472), 'alembic.op.f', 'op.f', (['"""ix_fiscal_notes_owner_id"""'], {}), "('ix_fiscal_notes_owner_id')\n", (3444, 3472), False, 'from alembic import op\n'), ((3647, 3670), 'alembic.op.f', 'op.f', (['"""ix_items_amount"""'], {}), "('ix_items_amount')\n", (3651, 3670), False, 'from alembic import op\n'), ((3942, 3972), 'alembic.op.f', 'op.f', (['"""ix_users_password_hash"""'], {}), "('ix_users_password_hash')\n", (3946, 3972), False, 'from alembic import op\n'), ((4012, 4033), 'alembic.op.f', 'op.f', (['"""ix_users_name"""'], {}), "('ix_users_name')\n", (4016, 4033), False, 'from alembic import op\n'), ((4073, 4102), 'alembic.op.f', 'op.f', (['"""ix_orders_description"""'], {}), "('ix_orders_description')\n", (4077, 4102), False, 'from alembic import op\n'), ((4143, 4178), 'alembic.op.f', 'op.f', (['"""ix_order_details_sell_value"""'], {}), "('ix_order_details_sell_value')\n", (4147, 4178), False, 'from alembic import op\n'), ((4226, 4260), 'alembic.op.f', 'op.f', (['"""ix_order_details_item_name"""'], {}), "('ix_order_details_item_name')\n", (4230, 4260), False, 'from alembic import op\n'), ((4308, 4337), 'alembic.op.f', 'op.f', (['"""ix_order_details_cost"""'], {}), "('ix_order_details_cost')\n", (4312, 4337), False, 'from alembic import op\n'), ((4483, 4505), 'alembic.op.f', 'op.f', (['"""ix_items_value"""'], {}), "('ix_items_value')\n", (4487, 4505), False, 'from alembic import op\n'), ((4545, 4566), 'alembic.op.f', 'op.f', (['"""ix_items_cost"""'], {}), "('ix_items_cost')\n", (4549, 4566), False, 'from alembic import op\n'), ((4606, 4629), 'alembic.op.f', 'op.f', (['"""ix_items_amount"""'], {}), "('ix_items_amount')\n", (4610, 4629), False, 'from alembic import op\n'), ((4779, 4811), 'alembic.op.f', 'op.f', (['"""ix_fiscal_notes_owner_id"""'], {}), "('ix_fiscal_notes_owner_id')\n", (4783, 4811), False, 'from alembic import op\n'), ((4858, 4889), 'alembic.op.f', 'op.f', (['"""ix_fiscal_notes_file_id"""'], {}), "('ix_fiscal_notes_file_id')\n", (4862, 4889), False, 'from alembic import op\n'), ((4936, 4971), 'alembic.op.f', 'op.f', (['"""ix_fiscal_notes_description"""'], {}), "('ix_fiscal_notes_description')\n", (4940, 4971), False, 'from alembic import op\n'), ((5018, 5046), 'alembic.op.f', 'op.f', (['"""ix_files_uploaded_at"""'], {}), "('ix_files_uploaded_at')\n", (5022, 5046), False, 'from alembic import op\n'), ((5086, 5110), 'alembic.op.f', 'op.f', (['"""ix_clients_phone"""'], {}), "('ix_clients_phone')\n", (5090, 5110), False, 'from alembic import op\n'), ((5152, 5179), 'alembic.op.f', 'op.f', (['"""ix_clients_owner_id"""'], {}), "('ix_clients_owner_id')\n", (5156, 5179), False, 'from alembic import op\n'), ((5221, 5250), 'alembic.op.f', 'op.f', (['"""ix_clients_created_at"""'], {}), "('ix_clients_created_at')\n", (5225, 5250), False, 'from alembic import op\n'), ((5292, 5340), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_sugested_sell_value"""'], {}), "('ix_fiscal_note_items_sugested_sell_value')\n", (5296, 5340), False, 'from alembic import op\n'), ((5392, 5429), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_owner_id"""'], {}), "('ix_fiscal_note_items_owner_id')\n", (5396, 5429), False, 'from alembic import op\n'), ((5481, 5517), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_item_id"""'], {}), "('ix_fiscal_note_items_item_id')\n", (5485, 5517), False, 'from alembic import op\n'), ((5569, 5612), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_fiscal_note_id"""'], {}), "('ix_fiscal_note_items_fiscal_note_id')\n", (5573, 5612), False, 'from alembic import op\n'), ((5664, 5700), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_file_id"""'], {}), "('ix_fiscal_note_items_file_id')\n", (5668, 5700), False, 'from alembic import op\n'), ((5752, 5790), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_buy_value"""'], {}), "('ix_fiscal_note_items_buy_value')\n", (5756, 5790), False, 'from alembic import op\n'), ((5881, 5905), 'alembic.op.f', 'op.f', (['"""ix_balance_value"""'], {}), "('ix_balance_value')\n", (5885, 5905), False, 'from alembic import op\n'), ((5947, 5970), 'alembic.op.f', 'op.f', (['"""ix_balance_type"""'], {}), "('ix_balance_type')\n", (5951, 5970), False, 'from alembic import op\n'), ((6012, 6039), 'alembic.op.f', 'op.f', (['"""ix_balance_owner_id"""'], {}), "('ix_balance_owner_id')\n", (6016, 6039), False, 'from alembic import op\n'), ((6081, 6109), 'alembic.op.f', 'op.f', (['"""ix_balance_operation"""'], {}), "('ix_balance_operation')\n", (6085, 6109), False, 'from alembic import op\n'), ((6151, 6181), 'alembic.op.f', 'op.f', (['"""ix_balance_description"""'], {}), "('ix_balance_description')\n", (6155, 6181), False, 'from alembic import op\n'), ((6223, 6252), 'alembic.op.f', 'op.f', (['"""ix_balance_created_at"""'], {}), "('ix_balance_created_at')\n", (6227, 6252), False, 'from alembic import op\n'), ((524, 552), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (550, 552), False, 'import sqlmodel\n'), ((598, 608), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (606, 608), True, 'import sqlalchemy as sa\n'), ((653, 698), 'sqlalchemy.Enum', 'sa.Enum', (['"""DEBT"""', '"""CREDIT"""'], {'name': '"""balancetype"""'}), "('DEBT', 'CREDIT', name='balancetype')\n", (660, 698), True, 'import sqlalchemy as sa\n'), ((773, 945), 'sqlalchemy.Enum', 'sa.Enum', (['"""PAYMENT_OF_EMPLOYEES"""', '"""PAYMENT_OF_SUPPLIERS"""', '"""ANOTHER_PAYMENTS"""', '"""SALE_IN_PIX"""', '"""SALE_IN_DEBT"""', '"""SALE_IN_CREDIT"""', '"""SALE_IN_MONEY"""'], {'name': '"""operationtype"""'}), "('PAYMENT_OF_EMPLOYEES', 'PAYMENT_OF_SUPPLIERS', 'ANOTHER_PAYMENTS',\n 'SALE_IN_PIX', 'SALE_IN_DEBT', 'SALE_IN_CREDIT', 'SALE_IN_MONEY', name=\n 'operationtype')\n", (780, 945), True, 'import sqlalchemy as sa\n'), ((1153, 1187), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1185, 1187), False, 'import sqlmodel\n'), ((1238, 1251), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1249, 1251), True, 'import sqlalchemy as sa\n'), ((1299, 1327), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1325, 1327), False, 'import sqlmodel\n'), ((1911, 1939), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1937, 1939), False, 'import sqlmodel\n'), ((1989, 1999), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (1997, 1999), True, 'import sqlalchemy as sa\n'), ((2059, 2069), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (2067, 2069), True, 'import sqlalchemy as sa\n'), ((2118, 2146), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2144, 2146), False, 'import sqlmodel\n'), ((2201, 2229), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2227, 2229), False, 'import sqlmodel\n'), ((2277, 2305), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2303, 2305), False, 'import sqlmodel\n'), ((2353, 2387), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2385, 2387), False, 'import sqlmodel\n'), ((3569, 3610), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (3596, 3610), False, 'from sqlalchemy.dialects import postgresql\n'), ((3770, 3787), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (3785, 3787), False, 'from sqlalchemy.dialects import postgresql\n'), ((4430, 4447), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (4445, 4447), False, 'from sqlalchemy.dialects import postgresql\n'), ((4702, 4743), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (4729, 4743), False, 'from sqlalchemy.dialects import postgresql\n')]
|
# This example implements 2nd-level homogenization of Biot-Darcy-Brinkman model of flow in deformable
# double porous media.
# The mathematical model is described in:
#
#<NAME>., <NAME>., <NAME>.
#The Biot-Darcy-Brinkman model of flow in deformable double porous media; homogenization and numerical modelling.
# Computers and Mathematics with applications, 78(9):3044-3066, 2019,
# https://doi.org/10.1016/j.camwa.2019.04.004
#
# Run calculation of homogenized coefficients:
#
# ./homogen.py example_perfusion_BDB/perf_BDB_mes.py
#
# The results are stored in `example_perfusion_BDB/results/meso` directory.
#
import numpy as nm
from sfepy import data_dir
import os.path as osp
from sfepy.discrete.fem.mesh import Mesh
import sfepy.discrete.fem.periodic as per
from sfepy.mechanics.tensors import dim2sym
from sfepy.homogenization.utils import define_box_regions
import sfepy.homogenization.coefs_base as cb
from sfepy.homogenization.micmac import get_homog_coefs_linear
data_dir = 'example_perfusion_BDB'
def coefs2qp(coefs, nqp):
out = {}
for k, v in coefs.items():
if type(v) not in [nm.ndarray, float]:
continue
if type(v) is nm.ndarray:
if len(v.shape) >= 3:
out[k] = v
out[k] = nm.tile(v, (nqp, 1, 1))
return out
def get_periodic_bc(var_tab, dim=3, dim_tab=None):
if dim_tab is None:
dim_tab = {'x': ['left', 'right'],
'z': ['bottom', 'top'],
'y': ['near', 'far']}
periodic = {}
epbcs = {}
for ivar, reg in var_tab:
periodic['per_%s' % ivar] = pers = []
for idim in 'xyz'[0:dim]:
key = 'per_%s_%s' % (ivar, idim)
regs = ['%s_%s' % (reg, ii) for ii in dim_tab[idim]]
epbcs[key] = (regs, {'%s.all' % ivar: '%s.all' % ivar},
'match_%s_plane' % idim)
pers.append(key)
return epbcs, periodic
# get homogenized coefficients, recalculate them if necessary
def get_homog(coors, mode, pb,micro_filename, **kwargs):
if not (mode == 'qp'):
return
nqp = coors.shape[0]
coefs_filename = 'coefs_micro'
coefs_filename = osp.join(pb.conf.options.get('output_dir', '.'),
coefs_filename) + '.h5'
coefs = get_homog_coefs_linear(0, 0, None,
micro_filename=micro_filename,coefs_filename = coefs_filename )
coefs['B'] = coefs['B'][:, nm.newaxis]
for k in coefs.keys():
v = coefs[k]
if type(v) is nm.ndarray:
if len(v.shape) == 0:
coefs[k] = v.reshape((1, 1))
elif len(v.shape) == 1:
coefs[k] = v[:, nm.newaxis]
elif isinstance(v, float):
coefs[k] = nm.array([[v]])
out = coefs2qp(coefs, nqp)
return out
def define(filename_mesh=None):
eta = 3.6e-3
if filename_mesh is None:
filename_mesh = osp.join(data_dir, 'meso_perf_puc.vtk')
mesh = Mesh.from_file(filename_mesh)
poroela_micro_file = osp.join(data_dir, 'perf_BDB_mic.py')
dim = 3
sym = (dim + 1) * dim // 2
sym_eye = 'nm.array([1,1,0])' if dim == 2 else 'nm.array([1,1,1,0,0,0])'
bbox = mesh.get_bounding_box()
regions = define_box_regions(mesh.dim, bbox[0], bbox[1], eps=1e-3)
regions.update({
'Z': 'all',
'Gamma_Z': ('vertices of surface', 'facet'),
# matrix
'Zm': 'cells of group 1',
'Zm_left': ('r.Zm *v r.Left', 'vertex'),
'Zm_right': ('r.Zm *v r.Right', 'vertex'),
'Zm_bottom': ('r.Zm *v r.Bottom', 'vertex'),
'Zm_top': ('r.Zm *v r.Top', 'vertex'),
'Gamma_Zm': ('r.Zm *v r.Zc', 'facet', 'Zm'),
# canal
'Zc': 'cells of group 2',
'Zc0': ('r.Zc -v r.Gamma_Zc', 'vertex'),
'Zc_left': ('r.Zc0 *v r.Left', 'vertex'),
'Zc_right': ('r.Zc0 *v r.Right', 'vertex'),
'Zc_bottom': ('r.Zc0 *v r.Bottom', 'vertex'),
'Zc_top': ('r.Zc0 *v r.Top', 'vertex'),
'Gamma_Zc': ('r.Zm *v r.Zc', 'facet', 'Zc'),
"Surface": ("vertices of surface", "facet"),
'Center_c': ('vertex 5346', 'vertex'), # canal center
})
if dim == 3:
regions.update({
'Zm_far': ('r.Zm *v r.Far', 'vertex'),
'Zm_near': ('r.Zm *v r.Near', 'vertex'),
'Zc_far': ('r.Zc0 *v r.Far', 'vertex'),
'Zc_near': ('r.Zc0 *v r.Near', 'vertex'),
})
fields = {
'one': ('real', 'scalar', 'Z', 1),
'displacement': ('real', 'vector', 'Zm', 1),
'pressure_m': ('real', 'scalar', 'Zm', 1),
'pressure_c': ('real', 'scalar', 'Zc', 1),
'displacement_c': ('real', 'vector', 'Zc', 1),
'velocity': ('real', 'vector', 'Zc', 2),
}
variables = {
# displacement
'u': ('unknown field', 'displacement', 0),
'v': ('test field', 'displacement', 'u'),
'Pi_u': ('parameter field', 'displacement', 'u'),
'U1': ('parameter field', 'displacement', '(set-to-None)'),
'U2': ('parameter field', 'displacement', '(set-to-None)'),
'uc': ('unknown field', 'displacement_c', 4),
'vc': ('test field', 'displacement_c', 'uc'),
# velocity
'w': ('unknown field', 'velocity', 1),
'z': ('test field', 'velocity', 'w'),
'Pi_w': ('parameter field', 'velocity', 'w'),
'W1': ('parameter field', 'velocity', '(set-to-None)'),
'W2': ('parameter field', 'velocity', '(set-to-None)'),
# pressure
'pm': ('unknown field', 'pressure_m', 2),
'qm': ('test field', 'pressure_m', 'pm'),
'Pm1': ('parameter field', 'pressure_m', '(set-to-None)'),
'Pm2': ('parameter field', 'pressure_m', '(set-to-None)'),
'Pi_pm': ('parameter field', 'pressure_m', 'pm'),
'pc': ('unknown field', 'pressure_c', 3),
'qc': ('test field', 'pressure_c', 'pc'),
'Pc1': ('parameter field', 'pressure_c', '(set-to-None)'),
'Pc2': ('parameter field', 'pressure_c', '(set-to-None)'),
# one
'one': ('parameter field', 'one', '(set-to-None)'),
}
functions = {
'match_x_plane': (per.match_x_plane,),
'match_y_plane': (per.match_y_plane,),
'match_z_plane': (per.match_z_plane,),
'get_homog': (lambda ts, coors, mode=None, problem=None, **kwargs:\
get_homog(coors, mode, problem, poroela_micro_file, **kwargs),),
}
materials = {
'hmatrix': 'get_homog',
'fluid': ({
'eta_c': eta* nm.eye(dim2sym(dim)),
},),
'mat': ({
'k1': nm.array([[1, 0, 0]]).T,
'k2': nm.array([[0, 1, 0]]).T,
'k3': nm.array([[0, 0, 1]]).T,
},),
}
ebcs = {
'fixed_u': ('Corners', {'um.all': 0.0}),
'fixed_pm': ('Corners', {'p.0': 0.0}),
'fixed_w': ('Center_c', {'w.all': 0.0}),
}
epbcs, periodic = get_periodic_bc([('u', 'Zm'), ('pm', 'Zm'),
('pc', 'Zc'), ('w', 'Zc')])
all_periodic = periodic['per_w'] + periodic['per_pc']
integrals = {
'i': 4,
}
options = {
'coefs': 'coefs',
'coefs_filename': 'coefs_meso',
'requirements': 'requirements',
'volume': {
'variables': ['u', 'pc'],
'expression': 'd_volume.i.Zm(u) + d_volume.i.Zc(pc)',
},
'output_dir': data_dir + '/results/meso',
'file_per_var': True,
'save_format': 'vtk', # Global setting.
'dump_format': 'h5', # Global setting.
'absolute_mesh_path': True,
'multiprocessing': False,
'ls': 'ls',
'nls': 'ns_m15',
'output_prefix': 'meso:',
}
solvers = {
'ls': ('ls.mumps', {}),
'ls_s': ('ls.schur_mumps',
{'schur_variables': ['pc'],
'fallback': 'ls'}),
'ns': ('nls.newton', {
'i_max': 1,
'eps_a': 1e-14,
'eps_r': 1e-3,
'problem': 'nonlinear'}),
'ns_em15': ('nls.newton', {
'i_max': 1,
'eps_a': 1e-15,
'eps_r': 1e-3,
'problem': 'nonlinear'}),
'ns_em12': ('nls.newton', {
'i_max': 1,
'eps_a': 1e-12,
'eps_r': 1e-3,
'problem': 'nonlinear'}),
'ns_em9': ('nls.newton', {
'i_max': 1,
'eps_a': 1e-9,
'eps_r': 1e-3,
'problem': 'nonlinear'}),
'ns_em6': ('nls.newton', {
'i_max': 1,
'eps_a': 1e-4,
'eps_r': 1e-3,
'problem': 'nonlinear'}),
}
#Definition of homogenized coefficients, see (33)-(35)
coefs = {
'A': {
'requires': ['pis_u', 'corrs_omega_ij'],
'expression': 'dw_lin_elastic.i.Zm(hmatrix.A, U1, U2)',
'set_variables': [('U1', ('corrs_omega_ij', 'pis_u'), 'u'),
('U2', ('corrs_omega_ij', 'pis_u'), 'u')],
'class': cb.CoefSymSym,
},
'B_aux1': {
'status': 'auxiliary',
'requires': ['corrs_omega_ij'],
'expression': '- dw_surface_ltr.i.Gamma_Zm(U1)', # !!! -
'set_variables': [('U1', 'corrs_omega_ij', 'u')],
'class': cb.CoefSym,
},
'B_aux2': {
'status': 'auxiliary',
'requires': ['corrs_omega_ij', 'pis_u', 'corr_one'],
'expression': 'dw_biot.i.Zm(hmatrix.B, U1, one)',
'set_variables': [('U1', ('corrs_omega_ij', 'pis_u'), 'u'),
('one', 'corr_one', 'one')],
'class': cb.CoefSym,
},
'B': {
'requires': ['c.B_aux1', 'c.B_aux2', 'c.vol_c'],
'expression': 'c.B_aux1 + c.B_aux2 + c.vol_c* %s' % sym_eye,
'class': cb.CoefEval,
},
'H': {
'requires': ['corrs_phi_k'],
'expression': 'dw_diffusion.i.Zm(hmatrix.K, Pm1, Pm2)',
'set_variables': [('Pm1', 'corrs_phi_k', 'pm'),
('Pm2', 'corrs_phi_k', 'pm')],
'class': cb.CoefDimDim,
},
'K': {
'requires': ['corrs_pi_k', 'pis_pm'],
'expression': 'dw_diffusion.i.Zm(hmatrix.K, Pm1, Pm2)',
'set_variables': [('Pm1', ('corrs_pi_k', 'pis_pm'), 'pm'),
('Pm2', ('corrs_pi_k', 'pis_pm'), 'pm')],
'class': cb.CoefDimDim,
},
'Q': {
'requires': ['corrs_phi_k', 'pis_pm'],
'expression': 'dw_diffusion.i.Zm(hmatrix.K, Pm1, Pm2)',
'set_variables': [('Pm1', 'pis_pm', 'pm'),
('Pm2', 'corrs_phi_k', 'pm')],
'class': cb.CoefDimDim,
},
'P': {
'requires': ['c.Q', 'c.vol'],
'expression': 'c.vol["fraction_Zc"] * nm.eye(%d) - c.Q' % dim,
'class': cb.CoefEval,
},
'PT': {
'requires': ['c.P'],
'expression': 'c.P.T',
'class': cb.CoefEval,
},
'M_aux1': {
'status': 'auxiliary',
'requires': [],
'expression': 'ev_volume_integrate_mat.i.Zm(hmatrix.M, one)',
'set_variables': [],
'class': cb.CoefOne,
},
'M_aux2': {
'status': 'auxiliary',
'requires': ['corrs_omega_p', 'corr_one'],
'expression': 'dw_biot.i.Zm(hmatrix.B, U1, one)',
'set_variables': [('U1', 'corrs_omega_p', 'u'),
('one', 'corr_one', 'one')],
'class': cb.CoefOne,
},
'M_aux3': {
'status': 'auxiliary',
'requires': ['corrs_omega_p'],
'expression': ' dw_surface_ltr.i.Gamma_Zm(U1)',
'set_variables': [('U1', 'corrs_omega_p', 'u')],
'class': cb.CoefOne,
},
'M': {
'requires': ['c.M_aux1', 'c.M_aux2', 'c.M_aux3'],
'expression': 'c.M_aux1 + c.M_aux2 -c.M_aux3 ',
'class': cb.CoefEval,
},
'S': {
'requires': ['corrs_psi_ij', 'pis_w'],
'expression': 'dw_lin_elastic.i.Zc(fluid.eta_c, W1, W2)',
'set_variables': [('W1', ('corrs_psi_ij', 'pis_w'), 'w'),
('W2', ('corrs_psi_ij', 'pis_w'), 'w')],
'class': cb.CoefSymSym,
},
'vol': {
'regions': ['Zm', 'Zc'],
'expression': 'd_volume.i.%s(one)',
'class': cb.VolumeFractions,
},
'surf_vol': {
'regions': ['Zm', 'Zc'],
'expression': 'd_surface.i.%s(one)',
'class': cb.VolumeFractions,
},
'surf_c': {
'requires': ['c.surf_vol'],
'expression': 'c.surf_vol["fraction_Zc"]',
'class': cb.CoefEval,
},
'vol_c': {
'requires': ['c.vol'],
'expression': 'c.vol["fraction_Zc"]',
'class': cb.CoefEval,
},
'filenames': {},
}
#Definition of mesoscopic corrector problems
requirements = {
'corr_one': {
'variable': 'one',
'expression':
"nm.ones((problem.fields['one'].n_vertex_dof, 1), dtype=nm.float64)",
'class': cb.CorrEval,
},
'pis_u': {
'variables': ['u'],
'class': cb.ShapeDimDim,
'save_name': 'corrs_pis_u',
'dump_variables': ['u'],
},
'pis_pm': {
'variables': ['pm'],
'class': cb.ShapeDim,
},
'pis_w': {
'variables': ['w'],
'class': cb.ShapeDimDim,
},
# Corrector problem, see (31)_1
'corrs_omega_ij': {
'requires': ['pis_u'],
'ebcs': ['fixed_u'],
'epbcs': periodic['per_u'],
'is_linear': True,
'equations': {
'balance_of_forces':
"""dw_lin_elastic.i.Zm(hmatrix.A, v, u)
= - dw_lin_elastic.i.Zm(hmatrix.A, v, Pi_u)"""
},
'set_variables': [('Pi_u', 'pis_u', 'u')],
'class': cb.CorrDimDim,
'save_name': 'corrs_omega_ij',
'dump_variables': ['u'],
'solvers': {'ls': 'ls', 'nls': 'ns_em6'},
'is_linear': True,
},
# Corrector problem, see (31)_2
'corrs_omega_p': {
'requires': ['corr_one'],
'ebcs': ['fixed_u'],
'epbcs': periodic['per_u'],
'equations': {
'balance_of_forces':
"""dw_lin_elastic.i.Zm(hmatrix.A, v, u)
= dw_biot.i.Zm(hmatrix.B, v, one)
- dw_surface_ltr.i.Gamma_Zm(v)""",
},
'set_variables': [('one', 'corr_one', 'one')],
'class': cb.CorrOne,
'save_name': 'corrs_omega_p',
'dump_variables': ['u'],
'solvers': {'ls': 'ls', 'nls': 'ns_em9'},
},
# Corrector problem, see (31)_3
'corrs_pi_k': {
'requires': ['pis_pm'],
'ebcs': [], # ['fixed_pm'],
'epbcs': periodic['per_pm'],
'is_linear': True,
'equations': {
'eq':
"""dw_diffusion.i.Zm(hmatrix.K, qm, pm)
= - dw_diffusion.i.Zm(hmatrix.K, qm, Pi_pm)""",
},
'set_variables': [('Pi_pm', 'pis_pm', 'pm')],
'class': cb.CorrDim,
'save_name': 'corrs_pi_k',
'dump_variables': ['pm'],
'solvers': {'ls': 'ls', 'nls': 'ns_em12'},
},
# Corrector problem, see (31)_4
'corrs_phi_k': {
'requires': [],
'ebcs': [],
'epbcs': periodic['per_pm'],
'equations': {
'eq':
"""dw_diffusion.i.Zm(hmatrix.K, qm, pm)
= - dw_surface_ndot.i.Gamma_Zm(mat.k%d, qm)""",
},
'class': cb.CorrEqPar,
'eq_pars': [(ii + 1) for ii in range(dim)],
'save_name': 'corrs_phi_k',
'dump_variables': ['pm'],
'solvers': {'ls': 'ls', 'nls': 'ns_em9'},
},
# Corrector problem, see (32)
'corrs_psi_ij': {
'requires': ['pis_w'],
'ebcs': ['fixed_w'],
'epbcs': periodic['per_w'] + periodic['per_pc'],
'equations': {
'eq1':
"""2*dw_lin_elastic.i.Zc(fluid.eta_c, z, w)
- dw_stokes.i.Zc(z, pc)
= - 2*dw_lin_elastic.i.Zc(fluid.eta_c, z, Pi_w)""",
'eq2':
"""dw_stokes.i.Zc(w, qc)
= - dw_stokes.i.Zc(Pi_w, qc)"""
},
'set_variables': [('Pi_w', 'pis_w', 'w')],
'class': cb.CorrDimDim,
'save_name': 'corrs_psi_ij',
'dump_variables': ['w', 'pc'],
'solvers': {'ls': 'ls', 'nls': 'ns_em15'},
# 'solvers': {'ls': 'ls_s', 'nls': 'ns_em15'},
'is_linear': True,
},
}
return locals()
|
[
"sfepy.mechanics.tensors.dim2sym",
"sfepy.homogenization.utils.define_box_regions",
"sfepy.homogenization.micmac.get_homog_coefs_linear",
"sfepy.discrete.fem.mesh.Mesh.from_file"
] |
[((2373, 2473), 'sfepy.homogenization.micmac.get_homog_coefs_linear', 'get_homog_coefs_linear', (['(0)', '(0)', 'None'], {'micro_filename': 'micro_filename', 'coefs_filename': 'coefs_filename'}), '(0, 0, None, micro_filename=micro_filename,\n coefs_filename=coefs_filename)\n', (2395, 2473), False, 'from sfepy.homogenization.micmac import get_homog_coefs_linear\n'), ((3096, 3125), 'sfepy.discrete.fem.mesh.Mesh.from_file', 'Mesh.from_file', (['filename_mesh'], {}), '(filename_mesh)\n', (3110, 3125), False, 'from sfepy.discrete.fem.mesh import Mesh\n'), ((3154, 3191), 'os.path.join', 'osp.join', (['data_dir', '"""perf_BDB_mic.py"""'], {}), "(data_dir, 'perf_BDB_mic.py')\n", (3162, 3191), True, 'import os.path as osp\n'), ((3368, 3425), 'sfepy.homogenization.utils.define_box_regions', 'define_box_regions', (['mesh.dim', 'bbox[0]', 'bbox[1]'], {'eps': '(0.001)'}), '(mesh.dim, bbox[0], bbox[1], eps=0.001)\n', (3386, 3425), False, 'from sfepy.homogenization.utils import define_box_regions\n'), ((1305, 1328), 'numpy.tile', 'nm.tile', (['v', '(nqp, 1, 1)'], {}), '(v, (nqp, 1, 1))\n', (1312, 1328), True, 'import numpy as nm\n'), ((3042, 3081), 'os.path.join', 'osp.join', (['data_dir', '"""meso_perf_puc.vtk"""'], {}), "(data_dir, 'meso_perf_puc.vtk')\n", (3050, 3081), True, 'import os.path as osp\n'), ((2863, 2878), 'numpy.array', 'nm.array', (['[[v]]'], {}), '([[v]])\n', (2871, 2878), True, 'import numpy as nm\n'), ((6856, 6877), 'numpy.array', 'nm.array', (['[[1, 0, 0]]'], {}), '([[1, 0, 0]])\n', (6864, 6877), True, 'import numpy as nm\n'), ((6908, 6929), 'numpy.array', 'nm.array', (['[[0, 1, 0]]'], {}), '([[0, 1, 0]])\n', (6916, 6929), True, 'import numpy as nm\n'), ((6960, 6981), 'numpy.array', 'nm.array', (['[[0, 0, 1]]'], {}), '([[0, 0, 1]])\n', (6968, 6981), True, 'import numpy as nm\n'), ((6771, 6783), 'sfepy.mechanics.tensors.dim2sym', 'dim2sym', (['dim'], {}), '(dim)\n', (6778, 6783), False, 'from sfepy.mechanics.tensors import dim2sym\n')]
|
import os
import sys
import time
from collections import OrderedDict
from time import strftime, gmtime
from tensorboardX import SummaryWriter
from dataset import AsrDataset, DataLoader, AsrCollator
from models.transformer import Model
import hparams as hp
import argparse
import megengine as mge
import megengine.module as M
import megengine.functional as F
from megengine.functional import clip, concat, minimum, norm
from megengine.core._imperative_rt.core2 import pop_scope, push_scope
from typing import Iterable, Union
from megengine.tensor import Tensor
import megengine.distributed as dist
from megengine.data import SequentialSampler, RandomSampler, DataLoader
from criterions.label_smoothing_loss import LabelSmoothingLoss
from megengine.utils.network import Network as Net
import megengine.autodiff as autodiff
import megengine.data as data
import megengine
import multiprocessing
logging = megengine.logger.get_logger()
def clip_grad_norm(
tensors: Union[Tensor, Iterable[Tensor]],
max_norm: float,
ord: float = 2.0,
):
push_scope("clip_grad_norm")
if isinstance(tensors, Tensor):
tensors = [tensors]
tensors = [t for t in tensors if t.grad is not None]
norm_ = [norm(t.grad.flatten(), ord=ord) for t in tensors]
if len(norm_) > 1:
norm_ = norm(concat(norm_), ord=ord)
else:
norm_ = norm_[0]
scale = max_norm / (norm_ + 1e-6)
scale = minimum(scale, 1)
for tensor in tensors:
tensor.grad._reset(tensor.grad * scale)
pop_scope("clip_grad_norm")
return norm_
class exponential_ma:
def __init__(self, ratio):
self.value = 0
self.weight = 0
self.ratio = ratio
def update(self, x):
self.value = self.value * self.ratio + (1 - self.ratio) * x
self.weight = self.weight * self.ratio + (1 - self.ratio)
def get_value(self):
if self.weight < 1e-8:
return 0
return self.value / self.weight
def update_train_log(monitor_vars_name, ma_dict, losses, ttrain, tdata):
for n in monitor_vars_name:
for ma in ma_dict["losses"]:
ma[n].update(losses[n])
for ma in ma_dict["ttrain"]:
ma.update(ttrain)
for ma in ma_dict["tdata"]:
ma.update(tdata)
def print_train_log(sess, epoch, minibatch, ma_dict, minibatch_per_epoch):
ma_output = "[{}] e:{}, {}/{} ".format(
strftime("%Y-%m-%d %H:%M:%S", gmtime()), epoch, minibatch, minibatch_per_epoch
)
print(ma_output, file=sys.stderr)
line = " {:31}:".format("speed")
for ma in ma_dict["ttrain"]:
line += "{:10.2g}".format(1 / ma.get_value())
print(line, file=sys.stderr)
line = " {:31}".format("dp/tot")
for ma1, ma2 in zip(ma_dict["ttrain"], ma_dict["tdata"]):
line += "{:10.2g}".format(ma2.get_value() / ma1.get_value())
print(line, file=sys.stderr)
for k in sess.loss_names:
line = " {:31}".format(k)
for ma in ma_dict["losses"]:
line += "{:10.2E}".format(ma[k].get_value())
print(line, file=sys.stderr)
line = " {:31}: {}".format("lr", sess.get_learning_rate())
print(line, file=sys.stderr)
sys.stderr.flush()
def adjust_learning_rate(optimizer, step_num, warmup_step=4000):
lr = (
hp.lr
* warmup_step ** 0.5
* min(step_num * warmup_step ** -1.5, step_num ** -0.5)
)
for param_group in optimizer.param_groups:
param_group["lr"] = lr
def set_grad(net, min, max):
for param in net.parameters():
param.grad = mge.random.uniform(min, max, param.shape)
param.grad_backup = F.copy(param.grad)
class Session:
def __init__(self, args):
with open(os.path.join(hp.dataset_root, "vocab.txt")) as f:
self.vocab = [w.strip() for w in f.readlines()]
self.vocab = ["<pad>"] + self.vocab
print(f"Vocab Size: {len(self.vocab)}")
self.model = Model(hp.num_mels, len(self.vocab))
world_size = args.world_size * args.ngpus
if world_size > 1:
dist.bcast_list_(self.model.parameters(), dist.WORLD)
# Autodiff gradient manager
self.gm = autodiff.GradManager().attach(
self.model.parameters(),
callbacks=dist.make_allreduce_cb("SUM") if world_size > 1 else None,
)
self.global_step = 0
self.optimizer = mge.optimizer.Adam(self.model.parameters(), lr=hp.lr)
# load pretrain model
if args.continue_path:
ckpt = mge.load(args.continue_path)
if "model" in ckpt:
state_dict = ckpt["model"]
self.model.load_state_dict(state_dict, strict=False)
self.loss_names = ["total"]
self.criterion = LabelSmoothingLoss(len(self.vocab), 0, hp.lsm_weight)
def get_learning_rate(self):
lr = self.optimizer.param_groups[0]["lr"]
return lr
def get_current_losses(self):
losses = OrderedDict()
for name in self.loss_names:
losses[name] = float(getattr(self, "loss_" + name))
return losses
def optimize_parameters(self, data):
"""Calculate losses, gradients, and update network weights; called in every training iteration"""
text_input, text_output, mel, pos_text, pos_mel, text_length, mel_length = data
with self.gm:
hs_pad, hs_mask, pred_pad, pred_mask = self.model.forward(
mel, mel_length, text_input, text_length
)
self.loss_total = self.criterion(pred_pad, text_output)
self.gm.backward(self.loss_total)
clip_grad_norm(self.model.parameters(), 1.0)
self.optimizer.step().clear_grad()
def main():
os.makedirs(hp.checkpoint_path, exist_ok=True)
parser = argparse.ArgumentParser()
parser.add_argument("--continue_path")
parser.add_argument(
"-n",
"--ngpus",
default=None,
type=int,
help="number of GPUs per node (default: None, use all available GPUs)",
)
parser.add_argument(
"--save",
metavar="DIR",
default="output",
help="path to save checkpoint and log",
)
parser.add_argument(
"--epochs",
default=90,
type=int,
help="number of total epochs to run (default: 90)",
)
parser.add_argument("-j", "--workers", default=2, type=int)
parser.add_argument(
"-p",
"--print-freq",
default=20,
type=int,
metavar="N",
help="print frequency (default: 10)",
)
parser.add_argument("--dist-addr", default="localhost")
parser.add_argument("--dist-port", default=23456, type=int)
parser.add_argument("--world-size", default=1, type=int)
parser.add_argument("--rank", default=0, type=int)
args = parser.parse_args()
# create server if is master
if args.rank <= 0:
server = dist.Server(
port=args.dist_port
) # pylint: disable=unused-variable # noqa: F841
# get device count
with multiprocessing.Pool(1) as pool:
ngpus_per_node, _ = pool.map(megengine.get_device_count, ["gpu", "cpu"])
if args.ngpus:
ngpus_per_node = args.ngpus
# launch processes
procs = []
for local_rank in range(ngpus_per_node):
p = multiprocessing.Process(
target=worker,
kwargs=dict(
rank=args.rank * ngpus_per_node + local_rank,
world_size=args.world_size * ngpus_per_node,
ngpus_per_node=ngpus_per_node,
args=args,
),
)
p.start()
procs.append(p)
# join processes
for p in procs:
p.join()
def worker(rank, world_size, ngpus_per_node, args):
# pylint: disable=too-many-statements
if rank == 0:
os.makedirs(os.path.join(args.save, "asr"), exist_ok=True)
megengine.logger.set_log_file(os.path.join(args.save, "asr", "log.txt"))
# init process group
if world_size > 1:
dist.init_process_group(
master_ip=args.dist_addr,
port=args.dist_port,
world_size=world_size,
rank=rank,
device=rank % ngpus_per_node,
backend="nccl",
)
logging.info(
"init process group rank %d / %d", dist.get_rank(), dist.get_world_size()
)
# build dataset
train_dataloader = build_dataset(args)
train_queue = iter(train_dataloader)
steps_per_epoch = 164905 // (world_size * hp.batch_size)
sess = Session(args)
ma_rates = [1 - 0.01 ** x for x in range(3)]
ma_dict = {
"losses": [
{k: exponential_ma(rate) for k in sess.loss_names} for rate in ma_rates
],
"ttrain": [exponential_ma(rate) for rate in ma_rates],
"tdata": [exponential_ma(rate) for rate in ma_rates],
}
for epoch in range(1, (hp.epochs + 1) * steps_per_epoch):
t_minibatch_start = time.time()
sess.global_step += 1
if sess.global_step < 400000:
adjust_learning_rate(sess.optimizer, sess.global_step)
tdata = time.time() - t_minibatch_start
data = next(train_queue)
sess.optimize_parameters(data)
losses = sess.get_current_losses()
ttrain = time.time() - t_minibatch_start
# print(ttrain, tdata)
update_train_log(sess.loss_names, ma_dict, losses, ttrain, tdata)
if sess.global_step % hp.log_interval == 0 and rank == 0:
print_train_log(sess, epoch, epoch, ma_dict, hp.epochs * steps_per_epoch)
if sess.global_step % hp.save_interval == 0 and rank == 0:
print("*******************************************")
mge.save(
{"model": sess.model.state_dict(), "global_step": sess.global_step},
os.path.join(
hp.checkpoint_path, "checkpoint_%d.pkl" % sess.global_step
),
)
print("*******************************************")
if sess.global_step > hp.max_steps:
exit(1)
def build_dataset(args):
dataset = AsrDataset()
train_sampler = data.Infinite(
RandomSampler(dataset=dataset, batch_size=hp.batch_size)
)
dataloader = DataLoader(
dataset=dataset, sampler=train_sampler, collator=AsrCollator()
)
return dataloader
if __name__ == "__main__":
main()
|
[
"megengine.core._imperative_rt.core2.push_scope",
"megengine.autodiff.GradManager",
"megengine.logger.get_logger",
"megengine.functional.minimum",
"megengine.random.uniform",
"megengine.distributed.init_process_group",
"megengine.distributed.make_allreduce_cb",
"megengine.functional.concat",
"megengine.distributed.Server",
"megengine.functional.copy",
"megengine.load",
"megengine.distributed.get_rank",
"megengine.data.RandomSampler",
"megengine.distributed.get_world_size",
"megengine.core._imperative_rt.core2.pop_scope"
] |
[((902, 931), 'megengine.logger.get_logger', 'megengine.logger.get_logger', ([], {}), '()\n', (929, 931), False, 'import megengine\n'), ((1050, 1078), 'megengine.core._imperative_rt.core2.push_scope', 'push_scope', (['"""clip_grad_norm"""'], {}), "('clip_grad_norm')\n", (1060, 1078), False, 'from megengine.core._imperative_rt.core2 import pop_scope, push_scope\n'), ((1416, 1433), 'megengine.functional.minimum', 'minimum', (['scale', '(1)'], {}), '(scale, 1)\n', (1423, 1433), False, 'from megengine.functional import clip, concat, minimum, norm\n'), ((1513, 1540), 'megengine.core._imperative_rt.core2.pop_scope', 'pop_scope', (['"""clip_grad_norm"""'], {}), "('clip_grad_norm')\n", (1522, 1540), False, 'from megengine.core._imperative_rt.core2 import pop_scope, push_scope\n'), ((3181, 3199), 'sys.stderr.flush', 'sys.stderr.flush', ([], {}), '()\n', (3197, 3199), False, 'import sys\n'), ((5731, 5777), 'os.makedirs', 'os.makedirs', (['hp.checkpoint_path'], {'exist_ok': '(True)'}), '(hp.checkpoint_path, exist_ok=True)\n', (5742, 5777), False, 'import os\n'), ((5791, 5816), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5814, 5816), False, 'import argparse\n'), ((10157, 10169), 'dataset.AsrDataset', 'AsrDataset', ([], {}), '()\n', (10167, 10169), False, 'from dataset import AsrDataset, DataLoader, AsrCollator\n'), ((3556, 3597), 'megengine.random.uniform', 'mge.random.uniform', (['min', 'max', 'param.shape'], {}), '(min, max, param.shape)\n', (3574, 3597), True, 'import megengine as mge\n'), ((3626, 3644), 'megengine.functional.copy', 'F.copy', (['param.grad'], {}), '(param.grad)\n', (3632, 3644), True, 'import megengine.functional as F\n'), ((4966, 4979), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4977, 4979), False, 'from collections import OrderedDict\n'), ((6925, 6957), 'megengine.distributed.Server', 'dist.Server', ([], {'port': 'args.dist_port'}), '(port=args.dist_port)\n', (6936, 6957), True, 'import megengine.distributed as dist\n'), ((7062, 7085), 'multiprocessing.Pool', 'multiprocessing.Pool', (['(1)'], {}), '(1)\n', (7082, 7085), False, 'import multiprocessing\n'), ((8045, 8204), 'megengine.distributed.init_process_group', 'dist.init_process_group', ([], {'master_ip': 'args.dist_addr', 'port': 'args.dist_port', 'world_size': 'world_size', 'rank': 'rank', 'device': '(rank % ngpus_per_node)', 'backend': '"""nccl"""'}), "(master_ip=args.dist_addr, port=args.dist_port,\n world_size=world_size, rank=rank, device=rank % ngpus_per_node, backend\n ='nccl')\n", (8068, 8204), True, 'import megengine.distributed as dist\n'), ((8989, 9000), 'time.time', 'time.time', ([], {}), '()\n', (8998, 9000), False, 'import time\n'), ((10213, 10269), 'megengine.data.RandomSampler', 'RandomSampler', ([], {'dataset': 'dataset', 'batch_size': 'hp.batch_size'}), '(dataset=dataset, batch_size=hp.batch_size)\n', (10226, 10269), False, 'from megengine.data import SequentialSampler, RandomSampler, DataLoader\n'), ((1307, 1320), 'megengine.functional.concat', 'concat', (['norm_'], {}), '(norm_)\n', (1313, 1320), False, 'from megengine.functional import clip, concat, minimum, norm\n'), ((2420, 2428), 'time.gmtime', 'gmtime', ([], {}), '()\n', (2426, 2428), False, 'from time import strftime, gmtime\n'), ((4523, 4551), 'megengine.load', 'mge.load', (['args.continue_path'], {}), '(args.continue_path)\n', (4531, 4551), True, 'import megengine as mge\n'), ((7861, 7891), 'os.path.join', 'os.path.join', (['args.save', '"""asr"""'], {}), "(args.save, 'asr')\n", (7873, 7891), False, 'import os\n'), ((7946, 7987), 'os.path.join', 'os.path.join', (['args.save', '"""asr"""', '"""log.txt"""'], {}), "(args.save, 'asr', 'log.txt')\n", (7958, 7987), False, 'import os\n'), ((8348, 8363), 'megengine.distributed.get_rank', 'dist.get_rank', ([], {}), '()\n', (8361, 8363), True, 'import megengine.distributed as dist\n'), ((8365, 8386), 'megengine.distributed.get_world_size', 'dist.get_world_size', ([], {}), '()\n', (8384, 8386), True, 'import megengine.distributed as dist\n'), ((9152, 9163), 'time.time', 'time.time', ([], {}), '()\n', (9161, 9163), False, 'import time\n'), ((9316, 9327), 'time.time', 'time.time', ([], {}), '()\n', (9325, 9327), False, 'import time\n'), ((10362, 10375), 'dataset.AsrCollator', 'AsrCollator', ([], {}), '()\n', (10373, 10375), False, 'from dataset import AsrDataset, DataLoader, AsrCollator\n'), ((3710, 3752), 'os.path.join', 'os.path.join', (['hp.dataset_root', '"""vocab.txt"""'], {}), "(hp.dataset_root, 'vocab.txt')\n", (3722, 3752), False, 'import os\n'), ((4175, 4197), 'megengine.autodiff.GradManager', 'autodiff.GradManager', ([], {}), '()\n', (4195, 4197), True, 'import megengine.autodiff as autodiff\n'), ((9860, 9932), 'os.path.join', 'os.path.join', (['hp.checkpoint_path', "('checkpoint_%d.pkl' % sess.global_step)"], {}), "(hp.checkpoint_path, 'checkpoint_%d.pkl' % sess.global_step)\n", (9872, 9932), False, 'import os\n'), ((4265, 4294), 'megengine.distributed.make_allreduce_cb', 'dist.make_allreduce_cb', (['"""SUM"""'], {}), "('SUM')\n", (4287, 4294), True, 'import megengine.distributed as dist\n')]
|
# -*- coding: utf-8
r"""
Homogenization of the Darcy flow in a thin porous layer.
The reference cell is composed of the matrix representing the dual porosity
and of two disconnected channels representing the primary porosity,
see paper [1].
[1] <NAME>, <NAME>: Modeling Tissue Perfusion Using a Homogenized
Model with Layer-wise Decomposition. IFAC Proceedings Volumes 45(2), 2012,
pages 1029-1034.
https://doi.org/10.3182/20120215-3-AT-3016.00182
"""
from __future__ import absolute_import
from sfepy.discrete.fem.periodic import match_x_plane, match_y_plane
import sfepy.homogenization.coefs_base as cb
import numpy as nm
from sfepy import data_dir
import six
from six.moves import range
def get_mats(pk, ph, pe, dim):
m1 = nm.eye(dim, dtype=nm.float64) * pk
m1[-1, -1] = pk / ph
m2 = nm.eye(dim, dtype=nm.float64) * pk
m2[-1, -1] = pk / ph ** 2
return m1, m2
def recovery_perf(pb, corrs, macro):
from sfepy.homogenization.recovery import compute_p_from_macro
from sfepy.base.base import Struct
slev = ''
micro_nnod = pb.domain.mesh.n_nod
centre_Y = nm.sum(pb.domain.mesh.coors, axis=0) / micro_nnod
nodes_Y = {}
channels = {}
for k in six.iterkeys(macro):
if 'press' in k:
channels[k[-1]] = 1
channels = list(channels.keys())
varnames = ['pM']
for ch in channels:
nodes_Y[ch] = pb.domain.regions['Y' + ch].vertices
varnames.append('p' + ch)
pvars = pb.create_variables(varnames)
press = {}
# matrix
press['M'] = \
corrs['corrs_%s_gamma_p' % pb_def['name']]['pM'] * macro['g_p'] + \
corrs['corrs_%s_gamma_m' % pb_def['name']]['pM'] * macro['g_m']
out = {}
# channels
for ch in channels:
press_mac = macro['press' + ch][0, 0]
press_mac_grad = macro['pressg' + ch]
nnod = corrs['corrs_%s_pi%s' % (pb_def['name'], ch)]\
['p%s_0' % ch].shape[0]
press_mic = nm.zeros((nnod, 1))
for key, val in \
six.iteritems(corrs['corrs_%s_pi%s' % (pb_def['name'], ch)]):
kk = int(key[-1])
press_mic += val * press_mac_grad[kk, 0]
for key in six.iterkeys(corrs):
if ('_gamma_' + ch in key):
kk = int(key[-1]) - 1
press_mic += corrs[key]['p' + ch] * macro['g' + ch][kk]
press_mic += \
compute_p_from_macro(press_mac_grad[nm.newaxis,nm.newaxis, :, :],
micro_coors[nodes_Y[ch]], 0,
centre=centre_Y, extdim=-1).reshape((nnod, 1))
press[ch] = press_mac + eps0 * press_mic
out[slev + 'p' + ch] = Struct(name='output_data',
mode='vertex',
data=press[ch],
var_name='p' + ch,
dofs=None)
pvars['p' + ch].set_data(press_mic)
dvel = pb.evaluate('ev_diffusion_velocity.iV.Y%s(mat1%s.k, p%s)'
% (ch, ch, ch),
var_dict={'p' + ch: pvars['p' + ch]},
mode='el_avg')
out[slev + 'w' + ch] = Struct(name='output_data',
mode='cell',
data=dvel,
var_name='w' + ch,
dofs=None)
press['M'] += corrs['corrs_%s_eta%s' % (pb_def['name'], ch)]['pM']\
* press_mac
pvars['pM'].set_data(press['M'])
dvel = pb.evaluate('%e * ev_diffusion_velocity.iV.YM(mat1M.k, pM)' % eps0,
var_dict={'pM': pvars['pM']}, mode='el_avg')
out[slev + 'pM'] = Struct(name='output_data',
mode='vertex',
dat=press['M'],
var_name='pM',
dofs=None)
out[slev + 'wM'] = Struct(name='output_data',
mode='cell',
data=dvel,
var_name='wM',
dofs=None)
return out
geoms = {
'2_4': ['2_4_Q1', '2', 5],
'3_8': ['3_8_Q1', '4', 5],
'3_4': ['3_4_P1', '3', 3],
}
pb_def = {
'name': '3d_2ch',
'mesh_filename': data_dir + '/meshes/3d/perfusion_micro3d.mesh',
'dim': 3,
'geom': geoms['3_4'],
'eps0': 1.0e-2,
'param_h': 1.0,
'param_kappa_m': 0.1,
'matrix_mat_el_grp': 3,
'channels': {
'A': {
'mat_el_grp': 1,
'fix_nd_grp': (4, 1),
'io_nd_grp': [1, 2, 3],
'param_kappa_ch': 1.0,
},
'B': {
'mat_el_grp': 2,
'fix_nd_grp': (14, 11),
'io_nd_grp': [11, 12, 13],
'param_kappa_ch': 2.0,
},
},
}
filename_mesh = pb_def['mesh_filename']
eps0 = pb_def['eps0']
param_h = pb_def['param_h']
# integrals
integrals = {
'iV': 2,
'iS': 2,
}
functions = {
'match_x_plane': (match_x_plane,),
'match_y_plane': (match_y_plane,),
}
aux = []
for ch, val in six.iteritems(pb_def['channels']):
aux.append('r.bYM' + ch)
# basic regions
regions = {
'Y': 'all',
'YM': 'cells of group %d' % pb_def['matrix_mat_el_grp'],
# periodic boundaries
'Pl': ('vertices in (x < 0.001)', 'facet'),
'Pr': ('vertices in (x > 0.999)', 'facet'),
'PlYM': ('r.Pl *v r.YM', 'facet'),
'PrYM': ('r.Pr *v r.YM', 'facet'),
'bYMp': ('r.bYp *v r.YM', 'facet', 'YM'),
'bYMm': ('r.bYm *v r.YM', 'facet', 'YM'),
'bYMpm': ('r.bYMp +v r.bYMm', 'facet', 'YM'),
}
# matrix/channel boundaries
regions.update({
'bYMchs': (' +v '.join(aux), 'facet', 'YM'),
'YMmchs': 'r.YM -v r.bYMchs',
})
# boundary conditions Gamma+/-
ebcs = {
'gamma_pm_bYMchs': ('bYMchs', {'pM.0': 0.0}),
'gamma_pm_YMmchs': ('YMmchs', {'pM.0': 1.0}),
}
# periodic boundary conditions - matrix, X-direction
epbcs = {'periodic_xYM': (['PlYM', 'PrYM'], {'pM.0': 'pM.0'}, 'match_x_plane')}
lcbcs = {}
all_periodicYM = ['periodic_%sYM' % ii for ii in ['x', 'y'][:pb_def['dim']-1]]
all_periodicY = {}
if pb_def['dim'] == 2:
regions.update({
'bYm': ('vertices in (y < 0.001)', 'facet'),
'bYp': ('vertices in (y > 0.999)', 'facet'),
})
if pb_def['dim'] == 3:
regions.update({
'Pn': ('vertices in (y < 0.001)', 'facet'),
'Pf': ('vertices in (y > 0.999)', 'facet'),
'PnYM': ('r.Pn *v r.YM', 'facet'),
'PfYM': ('r.Pf *v r.YM', 'facet'),
'bYm': ('vertices in (z < 0.001)', 'facet'),
'bYp': ('vertices in (z > 0.999)', 'facet'),
})
# periodic boundary conditions - matrix, Y-direction
epbcs.update({
'periodic_yYM': (['PnYM', 'PfYM'], {'pM.0': 'pM.0'}, 'match_y_plane'),
})
reg_io = {}
ebcs_eta = {}
ebcs_gamma = {}
# generate regions, ebcs, epbcs
for ch, val in six.iteritems(pb_def['channels']):
all_periodicY[ch] = ['periodic_%sY%s' % (ii, ch)
for ii in ['x', 'y'][:pb_def['dim']-1]]
# channels: YA, fixedYA, bYMA (matrix/channel boundaries)
regions.update({
'Y' + ch: 'cells of group %d' % val['mat_el_grp'],
'bYM' + ch: ('r.YM *v r.Y' + ch, 'facet', 'YM'),
'PlY' + ch: ('r.Pl *v r.Y' + ch, 'facet'),
'PrY' + ch: ('r.Pr *v r.Y' + ch, 'facet'),
})
if 'fix_nd_grp' in val:
regions.update({
'fixedY' + ch: ('vertices of group %d' % val['fix_nd_grp'][0],
'vertex'),
})
ebcs_eta[ch] = []
for ch2, val2 in six.iteritems(pb_def['channels']):
aux = 'eta%s_bYM%s' % (ch, ch2)
if ch2 == ch:
ebcs.update({aux: ('bYM' + ch2, {'pM.0': 1.0})})
else:
ebcs.update({aux: ('bYM' + ch2, {'pM.0': 0.0})})
ebcs_eta[ch].append(aux)
# boundary conditions
# periodic boundary conditions - channels, X-direction
epbcs.update({
'periodic_xY' + ch: (['PlY' + ch, 'PrY' + ch],
{'p%s.0' % ch: 'p%s.0' % ch},
'match_x_plane'),
})
if pb_def['dim'] == 3:
regions.update({
'PnY' + ch: ('r.Pn *v r.Y' + ch, 'facet'),
'PfY' + ch: ('r.Pf *v r.Y' + ch, 'facet'),
})
# periodic boundary conditions - channels, Y-direction
epbcs.update({
'periodic_yY' + ch: (['PnY' + ch, 'PfY' + ch],
{'p%s.0' % ch: 'p%s.0' % ch},
'match_y_plane'),
})
reg_io[ch] = []
aux_bY = []
# channel: inputs/outputs
for i_io in range(len(val['io_nd_grp'])):
io = '%s_%d' % (ch, i_io+1)
# regions
aux = val['io_nd_grp'][i_io]
if 'fix_nd_grp' in val and val['fix_nd_grp'][1] == aux:
regions.update({
'bY%s' % io: ('vertices of group %d +v r.fixedY%s' % (aux, ch),
'facet', 'Y%s' % ch),
})
else:
regions.update({
'bY%s' % io: ('vertices of group %d' % aux,
'facet', 'Y%s' % ch),
})
aux_bY.append('r.bY%s' % io)
reg_io[ch].append('bY%s' % io)
regions.update({
'bY' + ch: (' +v '.join(aux_bY), 'facet', 'Y' + ch),
})
# channel: inputs/outputs
for i_io in range(len(val['io_nd_grp'])):
io = '%s_%d' % (ch, i_io + 1)
ion = '%s_n%d' % (ch, i_io + 1)
regions.update({
'bY%s' % ion: ('r.bY%s -v r.bY%s' % (ch, io), 'facet', 'Y%s' % ch),
})
# boundary conditions
aux = 'fix_p%s_bY%s' % (ch, ion)
ebcs.update({
aux: ('bY%s' % ion, {'p%s.0' % ch: 0.0}),
})
lcbcs.update({
'imv' + ch: ('Y' + ch, {'ls%s.all' % ch: None}, None,
'integral_mean_value')
})
matk1, matk2 = get_mats(pb_def['param_kappa_m'], param_h, eps0, pb_def['dim'])
materials = {
'mat1M': ({'k': matk1},),
'mat2M': ({'k': matk2},),
}
fields = {
'corrector_M': ('real', 'scalar', 'YM', 1),
'vel_M': ('real', 'vector', 'YM', 1),
'vol_all': ('real', 'scalar', 'Y', 1),
}
variables = {
'pM': ('unknown field', 'corrector_M'),
'qM': ('test field', 'corrector_M', 'pM'),
'Pi_M': ('parameter field', 'corrector_M', '(set-to-None)'),
'corr_M': ('parameter field', 'corrector_M', '(set-to-None)'),
'corr1_M': ('parameter field', 'corrector_M', '(set-to-None)'),
'corr2_M': ('parameter field', 'corrector_M', '(set-to-None)'),
'wM': ('parameter field', 'vel_M', '(set-to-None)'),
'vol_all': ('parameter field', 'vol_all', '(set-to-None)'),
}
# generate regions for channel inputs/outputs
for ch, val in six.iteritems(pb_def['channels']):
matk1, matk2 = get_mats(val['param_kappa_ch'], param_h,
eps0, pb_def['dim'])
materials.update({
'mat1' + ch: ({'k': matk1},),
'mat2' + ch: ({'k': matk2},),
})
fields.update({
'corrector_' + ch: ('real', 'scalar', 'Y' + ch, 1),
'vel_' + ch: ('real', 'vector', 'Y' + ch, 1),
})
variables.update({
'p' + ch: ('unknown field', 'corrector_' + ch),
'q' + ch: ('test field', 'corrector_' + ch, 'p' + ch),
'Pi_' + ch: ('parameter field', 'corrector_' + ch, '(set-to-None)'),
'corr1_' + ch: ('parameter field', 'corrector_' + ch, '(set-to-None)'),
'corr2_' + ch: ('parameter field', 'corrector_' + ch, '(set-to-None)'),
'w' + ch: ('unknown field', 'vel_' + ch),
# lagrange mutltipliers - integral mean value
'ls' + ch: ('unknown field', 'corrector_' + ch),
'lv' + ch: ('test field', 'corrector_' + ch, 'ls' + ch),
})
options = {
'coefs': 'coefs',
'requirements': 'requirements',
'ls': 'ls', # linear solver to use
'volumes': {
'total': {
'variables': ['vol_all'],
'expression': """ev_volume.iV.Y(vol_all)""",
},
'one': {
'value': 1.0,
}
},
'output_dir': './output',
'file_per_var': True,
'coefs_filename': 'coefs_perf_' + pb_def['name'],
'coefs_info': {'eps0': eps0},
'recovery_hook': 'recovery_perf',
'multiprocessing': False,
}
for ipm in ['p', 'm']:
options['volumes'].update({
'bYM' + ipm: {
'variables': ['pM'],
'expression': "ev_volume.iS.bYM%s(pM)" % ipm,
},
'bY' + ipm: {
'variables': ['vol_all'],
'expression': "ev_volume.iS.bY%s(vol_all)" % ipm,
}
})
for ch in six.iterkeys(reg_io):
for ireg in reg_io[ch]:
options['volumes'].update({
ireg: {
'variables': ['p' + ch],
'expression': "ev_volume.iS.%s(p%s)" % (ireg, ch),
}
})
coefs = {
'vol_bYMpm': {
'regions': ['bYMp', 'bYMm'],
'expression': 'ev_volume.iS.%s(pM)',
'class': cb.VolumeFractions,
},
'filenames': {},
}
requirements = {
'corrs_one_YM': {
'variable': ['pM'],
'ebcs': ['gamma_pm_YMmchs', 'gamma_pm_bYMchs'],
'epbcs': [],
'save_name': 'corrs_one_YM',
'class': cb.CorrSetBCS,
},
}
for ipm in ['p', 'm']:
requirements.update({
'corrs_gamma_' + ipm: {
'requires': [],
'ebcs': ['gamma_pm_bYMchs'],
'epbcs': all_periodicYM,
'equations': {
'eq_gamma_pm': """dw_diffusion.iV.YM(mat2M.k, qM, pM) =
%e * dw_integrate.iS.bYM%s(qM)"""
% (1.0/param_h, ipm),
},
'class': cb.CorrOne,
'save_name': 'corrs_%s_gamma_%s' % (pb_def['name'], ipm),
},
})
for ipm2 in ['p', 'm']:
coefs.update({
'H' + ipm + ipm2: { # test+
'requires': ['corrs_gamma_' + ipm],
'set_variables': [('corr_M', 'corrs_gamma_' + ipm, 'pM')],
'expression': 'ev_integrate.iS.bYM%s(corr_M)' % ipm2,
'set_volume': 'bYp',
'class': cb.CoefOne,
},
})
def get_channel(keys, bn):
for ii in keys:
if bn in ii:
return ii[(ii.rfind(bn) + len(bn)):]
return None
def set_corrpis(variables, ir, ic, mode, **kwargs):
ch = get_channel(list(kwargs.keys()), 'pis_')
pis = kwargs['pis_' + ch]
corrs_pi = kwargs['corrs_pi' + ch]
if mode == 'row':
val = pis.states[ir]['p' + ch] + corrs_pi.states[ir]['p' + ch]
variables['corr1_' + ch].set_data(val)
elif mode == 'col':
val = pis.states[ic]['p' + ch] + corrs_pi.states[ic]['p' + ch]
variables['corr2_' + ch].set_data(val)
def set_corr_S(variables, ir, *args, **kwargs):
ch = get_channel(list(kwargs.keys()), 'pis_')
io = get_channel(list(kwargs.keys()), 'corrs_gamma_')
pis = kwargs['pis_' + ch]
corrs_gamma = kwargs['corrs_gamma_' + io]
pi = pis.states[ir]['p' + ch]
val = corrs_gamma.state['p' + ch]
variables['corr1_' + ch].set_data(pi)
variables['corr2_' + ch].set_data(val)
def set_corr_cc(variables, ir, *args, **kwargs):
ch = get_channel(list(kwargs.keys()), 'pis_')
pis = kwargs['pis_' + ch]
corrs_pi = kwargs['corrs_pi' + ch]
pi = pis.states[ir]['p' + ch]
pi = pi - nm.mean(pi)
val = pi + corrs_pi.states[ir]['p' + ch]
variables['corr1_' + ch].set_data(val)
for ch, val in six.iteritems(pb_def['channels']):
coefs.update({
'G' + ch: { # test+
'requires': ['corrs_one' + ch, 'corrs_eta' + ch],
'set_variables': [('corr1_M', 'corrs_one' + ch, 'pM'),
('corr2_M', 'corrs_eta' + ch, 'pM')],
'expression': 'dw_diffusion.iV.YM(mat2M.k, corr1_M, corr2_M)',
'class': cb.CoefOne,
},
'K' + ch: { # test+
'requires': ['pis_' + ch, 'corrs_pi' + ch],
'set_variables': set_corrpis,
'expression': 'dw_diffusion.iV.Y%s(mat2%s.k, corr1_%s, corr2_%s)'\
% ((ch,) * 4),
'dim': pb_def['dim'] - 1,
'class': cb.CoefDimDim,
},
})
requirements.update({
'pis_' + ch: {
'variables': ['p' + ch],
'class': cb.ShapeDim,
},
'corrs_one' + ch: {
'variable': ['pM'],
'ebcs': ebcs_eta[ch],
'epbcs': [],
'save_name': 'corrs_%s_one%s' % (pb_def['name'], ch),
'class': cb.CorrSetBCS,
},
'corrs_eta' + ch: {
'ebcs': ebcs_eta[ch],
'epbcs': all_periodicYM,
'equations': {
'eq_eta': 'dw_diffusion.iV.YM(mat2M.k, qM, pM) = 0',
},
'class': cb.CorrOne,
'save_name': 'corrs_%s_eta%s' % (pb_def['name'], ch),
},
'corrs_pi' + ch: {
'requires': ['pis_' + ch],
'set_variables': [('Pi_' + ch, 'pis_' + ch, 'p' + ch)],
'ebcs': [],
'epbcs': all_periodicY[ch],
'lcbcs': ['imv' + ch],
'equations': {
'eq_pi': """dw_diffusion.iV.Y%s(mat2%s.k, q%s, p%s)
+ dw_dot.iV.Y%s(q%s, ls%s)
= - dw_diffusion.iV.Y%s(mat2%s.k, q%s, Pi_%s)"""
% ((ch,) * 11),
'eq_imv': 'dw_dot.iV.Y%s(lv%s, p%s) = 0' % ((ch,) * 3),
},
'dim': pb_def['dim'] - 1,
'class': cb.CorrDim,
'save_name': 'corrs_%s_pi%s' % (pb_def['name'], ch),
},
})
for ipm in ['p', 'm']:
coefs.update({
'E' + ipm + ch: { # test+
'requires': ['corrs_eta' + ch],
'set_variables': [('corr_M', 'corrs_eta' + ch, 'pM')],
'expression': 'ev_integrate.iS.bYM%s(corr_M)' % ipm,
'set_volume': 'bYp',
'class': cb.CoefOne,
},
'F' + ipm + ch: { # test+
'requires': ['corrs_one' + ch, 'corrs_gamma_' + ipm],
'set_variables': [('corr1_M', 'corrs_one' + ch, 'pM'),
('corr2_M', 'corrs_gamma_' + ipm, 'pM')],
'expression': """dw_diffusion.iV.YM(mat2M.k, corr1_M, corr2_M)
- %e * ev_integrate.iS.bYM%s(corr1_M)"""\
% (1.0/param_h, ipm),
'class': cb.CoefOne,
},
})
for i_io in range(len(val['io_nd_grp'])):
io = '%s_%d' % (ch, i_io + 1)
coefs.update({
'S' + io: { # [Rohan1] (4.28), test+
'requires': ['corrs_gamma_' + io, 'pis_' + ch],
'set_variables': set_corr_S,
'expression': 'dw_diffusion.iV.Y%s(mat2%s.k,corr1_%s,corr2_%s)'
% ((ch,) * 4),
'dim': pb_def['dim'] - 1,
'class': cb.CoefDim,
},
'P' + io: { # test+
'requires': ['pis_' + ch, 'corrs_pi' + ch],
'set_variables': set_corr_cc,
'expression': 'ev_integrate.iS.bY%s(corr1_%s)'\
% (io, ch),
'set_volume': 'bYp',
'dim': pb_def['dim'] - 1,
'class': cb.CoefDim,
},
'S_test' + io: {
'requires': ['corrs_pi' + ch],
'set_variables': [('corr1_' + ch, 'corrs_pi' + ch, 'p' + ch)],
'expression': '%e * ev_integrate.iS.bY%s(corr1_%s)'\
% (1.0 / param_h, io, ch),
'dim': pb_def['dim'] - 1,
'class': cb.CoefDim,
},
})
requirements.update({
'corrs_gamma_' + io: {
'requires': [],
'variables': ['p' + ch, 'q' + ch],
'ebcs': [],
'epbcs': all_periodicY[ch],
'lcbcs': ['imv' + ch],
'equations': {
'eq_gamma': """dw_diffusion.iV.Y%s(mat2%s.k, q%s, p%s)
+ dw_dot.iV.Y%s(q%s, ls%s)
= %e * dw_integrate.iS.bY%s(q%s)"""
% ((ch,) * 7 + (1.0/param_h, io, ch)),
'eq_imv': 'dw_dot.iV.Y%s(lv%s, p%s) = 0'
% ((ch,) * 3),
},
'class': cb.CorrOne,
'save_name': 'corrs_%s_gamma_%s' % (pb_def['name'], io),
},
})
for i_io2 in range(len(val['io_nd_grp'])):
io2 = '%s_%d' % (ch, i_io2 + 1)
io12 = '%s_%d' % (io, i_io2 + 1)
coefs.update({
'R' + io12: { # test+
'requires': ['corrs_gamma_' + io2],
'set_variables': [('corr1_' + ch, 'corrs_gamma_' + io2,
'p' + ch)],
'expression': 'ev_integrate.iS.bY%s(corr1_%s)'\
% (io, ch),
'set_volume': 'bYp',
'class': cb.CoefOne,
},
})
solvers = {
'ls': ('ls.scipy_direct', {}),
'newton': ('nls.newton', {
'i_max': 1,
})
}
|
[
"sfepy.homogenization.recovery.compute_p_from_macro",
"sfepy.base.base.Struct"
] |
[((5156, 5189), 'six.iteritems', 'six.iteritems', (["pb_def['channels']"], {}), "(pb_def['channels'])\n", (5169, 5189), False, 'import six\n'), ((6949, 6982), 'six.iteritems', 'six.iteritems', (["pb_def['channels']"], {}), "(pb_def['channels'])\n", (6962, 6982), False, 'import six\n'), ((10860, 10893), 'six.iteritems', 'six.iteritems', (["pb_def['channels']"], {}), "(pb_def['channels'])\n", (10873, 10893), False, 'import six\n'), ((12725, 12745), 'six.iterkeys', 'six.iterkeys', (['reg_io'], {}), '(reg_io)\n', (12737, 12745), False, 'import six\n'), ((15620, 15653), 'six.iteritems', 'six.iteritems', (["pb_def['channels']"], {}), "(pb_def['channels'])\n", (15633, 15653), False, 'import six\n'), ((1200, 1219), 'six.iterkeys', 'six.iterkeys', (['macro'], {}), '(macro)\n', (1212, 1219), False, 'import six\n'), ((3753, 3840), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""vertex"""', 'dat': "press['M']", 'var_name': '"""pM"""', 'dofs': 'None'}), "(name='output_data', mode='vertex', dat=press['M'], var_name='pM',\n dofs=None)\n", (3759, 3840), False, 'from sfepy.base.base import Struct\n'), ((3981, 4057), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'dvel', 'var_name': '"""wM"""', 'dofs': 'None'}), "(name='output_data', mode='cell', data=dvel, var_name='wM', dofs=None)\n", (3987, 4057), False, 'from sfepy.base.base import Struct\n'), ((7635, 7668), 'six.iteritems', 'six.iteritems', (["pb_def['channels']"], {}), "(pb_def['channels'])\n", (7648, 7668), False, 'import six\n'), ((733, 762), 'numpy.eye', 'nm.eye', (['dim'], {'dtype': 'nm.float64'}), '(dim, dtype=nm.float64)\n', (739, 762), True, 'import numpy as nm\n'), ((802, 831), 'numpy.eye', 'nm.eye', (['dim'], {'dtype': 'nm.float64'}), '(dim, dtype=nm.float64)\n', (808, 831), True, 'import numpy as nm\n'), ((1101, 1137), 'numpy.sum', 'nm.sum', (['pb.domain.mesh.coors'], {'axis': '(0)'}), '(pb.domain.mesh.coors, axis=0)\n', (1107, 1137), True, 'import numpy as nm\n'), ((1958, 1977), 'numpy.zeros', 'nm.zeros', (['(nnod, 1)'], {}), '((nnod, 1))\n', (1966, 1977), True, 'import numpy as nm\n'), ((2014, 2074), 'six.iteritems', 'six.iteritems', (["corrs['corrs_%s_pi%s' % (pb_def['name'], ch)]"], {}), "(corrs['corrs_%s_pi%s' % (pb_def['name'], ch)])\n", (2027, 2074), False, 'import six\n'), ((2179, 2198), 'six.iterkeys', 'six.iterkeys', (['corrs'], {}), '(corrs)\n', (2191, 2198), False, 'import six\n'), ((2670, 2761), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""vertex"""', 'data': 'press[ch]', 'var_name': "('p' + ch)", 'dofs': 'None'}), "(name='output_data', mode='vertex', data=press[ch], var_name='p' + ch,\n dofs=None)\n", (2676, 2761), False, 'from sfepy.base.base import Struct\n'), ((3210, 3295), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'dvel', 'var_name': "('w' + ch)", 'dofs': 'None'}), "(name='output_data', mode='cell', data=dvel, var_name='w' + ch, dofs=None\n )\n", (3216, 3295), False, 'from sfepy.base.base import Struct\n'), ((15503, 15514), 'numpy.mean', 'nm.mean', (['pi'], {}), '(pi)\n', (15510, 15514), True, 'import numpy as nm\n'), ((2384, 2511), 'sfepy.homogenization.recovery.compute_p_from_macro', 'compute_p_from_macro', (['press_mac_grad[nm.newaxis, nm.newaxis, :, :]', 'micro_coors[nodes_Y[ch]]', '(0)'], {'centre': 'centre_Y', 'extdim': '(-1)'}), '(press_mac_grad[nm.newaxis, nm.newaxis, :, :],\n micro_coors[nodes_Y[ch]], 0, centre=centre_Y, extdim=-1)\n', (2404, 2511), False, 'from sfepy.homogenization.recovery import compute_p_from_macro\n')]
|
"""empty message
Revision ID: 2d614148ea4b
Revises:
Create Date: 2022-03-17 22:29:00.613962
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
import sqlmodel # added
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '2d614148ea4b'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('role',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user',
sa.Column('first_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('last_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('email', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_superuser', sa.Boolean(), nullable=True),
sa.Column('birthdate', sa.DateTime(), nullable=True),
sa.Column('phone', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('state', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('country', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('address', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('hashed_password', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
op.create_index(op.f('ix_user_hashed_password'), 'user', ['hashed_password'], unique=False)
op.create_table('textinference',
sa.Column('result', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('text', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('created_by_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_textinference_text'), 'textinference', ['text'], unique=False)
op.create_table('zeroshotinference',
sa.Column('candidate_labels', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('result', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('text', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('created_by_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_zeroshotinference_text'), 'zeroshotinference', ['text'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_zeroshotinference_text'), table_name='zeroshotinference')
op.drop_table('zeroshotinference')
op.drop_index(op.f('ix_textinference_text'), table_name='textinference')
op.drop_table('textinference')
op.drop_index(op.f('ix_user_hashed_password'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
op.drop_table('role')
# ### end Alembic commands ###
|
[
"sqlmodel.sql.sqltypes.AutoString"
] |
[((3708, 3742), 'alembic.op.drop_table', 'op.drop_table', (['"""zeroshotinference"""'], {}), "('zeroshotinference')\n", (3721, 3742), False, 'from alembic import op\n'), ((3824, 3854), 'alembic.op.drop_table', 'op.drop_table', (['"""textinference"""'], {}), "('textinference')\n", (3837, 3854), False, 'from alembic import op\n'), ((3989, 4010), 'alembic.op.drop_table', 'op.drop_table', (['"""user"""'], {}), "('user')\n", (4002, 4010), False, 'from alembic import op\n'), ((4015, 4036), 'alembic.op.drop_table', 'op.drop_table', (['"""role"""'], {}), "('role')\n", (4028, 4036), False, 'from alembic import op\n'), ((806, 835), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (829, 835), True, 'import sqlalchemy as sa\n'), ((1899, 1948), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['role_id']", "['role.id']"], {}), "(['role_id'], ['role.id'])\n", (1922, 1948), True, 'import sqlalchemy as sa\n'), ((1956, 1985), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1979, 1985), True, 'import sqlalchemy as sa\n'), ((2012, 2033), 'alembic.op.f', 'op.f', (['"""ix_user_email"""'], {}), "('ix_user_email')\n", (2016, 2033), False, 'from alembic import op\n'), ((2087, 2118), 'alembic.op.f', 'op.f', (['"""ix_user_hashed_password"""'], {}), "('ix_user_hashed_password')\n", (2091, 2118), False, 'from alembic import op\n'), ((2589, 2644), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['created_by_id']", "['user.id']"], {}), "(['created_by_id'], ['user.id'])\n", (2612, 2644), True, 'import sqlalchemy as sa\n'), ((2652, 2681), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2675, 2681), True, 'import sqlalchemy as sa\n'), ((2708, 2737), 'alembic.op.f', 'op.f', (['"""ix_textinference_text"""'], {}), "('ix_textinference_text')\n", (2712, 2737), False, 'from alembic import op\n'), ((3300, 3355), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['created_by_id']", "['user.id']"], {}), "(['created_by_id'], ['user.id'])\n", (3323, 3355), True, 'import sqlalchemy as sa\n'), ((3363, 3392), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (3386, 3392), True, 'import sqlalchemy as sa\n'), ((3419, 3452), 'alembic.op.f', 'op.f', (['"""ix_zeroshotinference_text"""'], {}), "('ix_zeroshotinference_text')\n", (3423, 3452), False, 'from alembic import op\n'), ((3637, 3670), 'alembic.op.f', 'op.f', (['"""ix_zeroshotinference_text"""'], {}), "('ix_zeroshotinference_text')\n", (3641, 3670), False, 'from alembic import op\n'), ((3761, 3790), 'alembic.op.f', 'op.f', (['"""ix_textinference_text"""'], {}), "('ix_textinference_text')\n", (3765, 3790), False, 'from alembic import op\n'), ((3873, 3904), 'alembic.op.f', 'op.f', (['"""ix_user_hashed_password"""'], {}), "('ix_user_hashed_password')\n", (3877, 3904), False, 'from alembic import op\n'), ((3943, 3964), 'alembic.op.f', 'op.f', (['"""ix_user_email"""'], {}), "('ix_user_email')\n", (3947, 3964), False, 'from alembic import op\n'), ((498, 532), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (530, 532), False, 'import sqlmodel\n'), ((580, 614), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (612, 614), False, 'import sqlmodel\n'), ((653, 665), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (663, 665), True, 'import sqlalchemy as sa\n'), ((712, 725), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (723, 725), True, 'import sqlalchemy as sa\n'), ((771, 784), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (782, 784), True, 'import sqlalchemy as sa\n'), ((898, 932), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (930, 932), False, 'import sqlmodel\n'), ((978, 1012), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1010, 1012), False, 'import sqlmodel\n'), ((1054, 1088), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1086, 1088), False, 'import sqlmodel\n'), ((1133, 1145), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1143, 1145), True, 'import sqlalchemy as sa\n'), ((1193, 1205), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1203, 1205), True, 'import sqlalchemy as sa\n'), ((1250, 1263), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1261, 1263), True, 'import sqlalchemy as sa\n'), ((1304, 1338), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1336, 1338), False, 'import sqlmodel\n'), ((1379, 1413), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1411, 1413), False, 'import sqlmodel\n'), ((1456, 1490), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1488, 1490), False, 'import sqlmodel\n'), ((1533, 1567), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1565, 1567), False, 'import sqlmodel\n'), ((1613, 1626), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1624, 1626), True, 'import sqlalchemy as sa\n'), ((1672, 1685), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1683, 1685), True, 'import sqlalchemy as sa\n'), ((1723, 1735), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1733, 1735), True, 'import sqlalchemy as sa\n'), ((1787, 1821), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1819, 1821), False, 'import sqlmodel\n'), ((1865, 1877), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1875, 1877), True, 'import sqlalchemy as sa\n'), ((2302, 2336), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2334, 2336), False, 'import sqlmodel\n'), ((2375, 2387), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2385, 2387), True, 'import sqlalchemy as sa\n'), ((2434, 2447), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (2445, 2447), True, 'import sqlalchemy as sa\n'), ((2493, 2506), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (2504, 2506), True, 'import sqlalchemy as sa\n'), ((2555, 2567), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2565, 2567), True, 'import sqlalchemy as sa\n'), ((3013, 3047), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3045, 3047), False, 'import sqlmodel\n'), ((3086, 3098), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (3096, 3098), True, 'import sqlalchemy as sa\n'), ((3145, 3158), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (3156, 3158), True, 'import sqlalchemy as sa\n'), ((3204, 3217), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (3215, 3217), True, 'import sqlalchemy as sa\n'), ((3266, 3278), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (3276, 3278), True, 'import sqlalchemy as sa\n'), ((2252, 2261), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2259, 2261), True, 'import sqlalchemy as sa\n'), ((2883, 2892), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2890, 2892), True, 'import sqlalchemy as sa\n'), ((2963, 2972), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2970, 2972), True, 'import sqlalchemy as sa\n')]
|
"""
Computational domain for isogeometric analysis.
"""
import os.path as op
import numpy as nm
from sfepy.base.base import Struct
from sfepy.discrete.common.domain import Domain
import sfepy.discrete.iga as iga
import sfepy.discrete.iga.io as io
from sfepy.discrete.iga.extmods.igac import eval_in_tp_coors
class NurbsPatch(Struct):
"""
Single NURBS patch data.
"""
def __init__(self, knots, degrees, cps,
weights, cs, conn):
Struct.__init__(self, name='nurbs', knots=knots, degrees=degrees,
cps=cps, weights=weights, cs=cs, conn=conn)
self.n_els = [len(ii) for ii in cs]
self.dim = len(self.n_els)
def _get_ref_coors_1d(self, pars, axis):
uk = nm.unique(self.knots[axis])
indices = nm.searchsorted(uk[1:], pars)
ref_coors = nm.empty_like(pars)
for ii in xrange(len(uk) - 1):
ispan = nm.where(indices == ii)[0]
pp = pars[ispan]
ref_coors[ispan] = (pp - uk[ii]) / (uk[ii+1] - uk[ii])
return uk, indices, ref_coors
def __call__(self, u=None, v=None, w=None, field=None):
"""
Igakit-like interface for NURBS evaluation.
"""
pars = [u]
if v is not None: pars += [v]
if w is not None: pars += [w]
indices = []
rcs = []
for ia, par in enumerate(pars):
uk, indx, rc = self._get_ref_coors_1d(par, ia)
indices.append(indx.astype(nm.uint32))
rcs.append(rc)
out = eval_in_tp_coors(field, indices,
rcs, self.cps, self.weights,
self.degrees,
self.cs, self.conn)
return out
def evaluate(self, field, u=None, v=None, w=None):
"""
Igakit-like interface for NURBS evaluation.
"""
return self(u, v, w, field)
class IGDomain(Domain):
"""
Bezier extraction based NURBS domain for isogeometric analysis.
"""
@staticmethod
def from_file(filename):
"""
filename : str
The name of the IGA domain file.
"""
(knots, degrees, cps, weights, cs, conn,
bcps, bweights, bconn, regions) = io.read_iga_data(filename)
nurbs = NurbsPatch(knots, degrees, cps, weights, cs, conn)
bmesh = Struct(name='bmesh', cps=bcps, weights=bweights, conn=bconn)
name = op.splitext(filename)[0]
domain = IGDomain(name, nurbs=nurbs, bmesh=bmesh, regions=regions)
return domain
def __init__(self, name, nurbs, bmesh, regions=None, **kwargs):
"""
Create an IGA domain.
Parameters
----------
name : str
The domain name.
"""
Domain.__init__(self, name, nurbs=nurbs, bmesh=bmesh, regions=regions,
**kwargs)
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.extmods.cmesh import CMesh
from sfepy.discrete.fem.utils import prepare_remap
ac = nm.ascontiguousarray
self.nurbs.cs = [ac(nm.array(cc, dtype=nm.float64)[:, None, ...])
for cc in self.nurbs.cs]
self.nurbs.degrees = self.nurbs.degrees.astype(nm.int32)
self.facets = iga.get_bezier_element_entities(nurbs.degrees)
tconn = iga.get_bezier_topology(bmesh.conn, nurbs.degrees)
itc = nm.unique(tconn)
remap = prepare_remap(itc, bmesh.conn.max() + 1)
ltcoors = bmesh.cps[itc]
ltconn = remap[tconn]
n_nod, dim = ltcoors.shape
n_el = ltconn.shape[0]
self.shape = Struct(n_nod=n_nod, dim=dim, tdim=0, n_el=n_el, n_gr=1)
desc = '%d_%d' % (dim, 2**dim)
mat_id = nm.zeros(ltconn.shape[0], dtype=nm.int32)
self.mesh = Mesh.from_data(self.name + '_topo', ltcoors, None, [ltconn],
[mat_id], [desc])
self.cmesh = CMesh.from_mesh(self.mesh)
gels = create_geometry_elements()
self.cmesh.set_local_entities(gels)
self.cmesh.setup_entities()
self.shape.tdim = self.cmesh.tdim
self.gel = gels[desc]
if regions is not None:
self.vertex_set_bcs = {}
for key, val in self.regions.iteritems():
self.vertex_set_bcs[key] = remap[val]
self.cell_offsets = {0 : 0}
self.reset_regions()
|
[
"sfepy.discrete.iga.io.read_iga_data",
"sfepy.discrete.fem.Mesh.from_data",
"sfepy.base.base.Struct",
"sfepy.discrete.common.domain.Domain.__init__",
"sfepy.discrete.iga.get_bezier_element_entities",
"sfepy.discrete.fem.geometry_element.create_geometry_elements",
"sfepy.discrete.fem.extmods.cmesh.CMesh.from_mesh",
"sfepy.discrete.iga.extmods.igac.eval_in_tp_coors",
"sfepy.base.base.Struct.__init__",
"sfepy.discrete.iga.get_bezier_topology"
] |
[((472, 585), 'sfepy.base.base.Struct.__init__', 'Struct.__init__', (['self'], {'name': '"""nurbs"""', 'knots': 'knots', 'degrees': 'degrees', 'cps': 'cps', 'weights': 'weights', 'cs': 'cs', 'conn': 'conn'}), "(self, name='nurbs', knots=knots, degrees=degrees, cps=cps,\n weights=weights, cs=cs, conn=conn)\n", (487, 585), False, 'from sfepy.base.base import Struct\n'), ((744, 771), 'numpy.unique', 'nm.unique', (['self.knots[axis]'], {}), '(self.knots[axis])\n', (753, 771), True, 'import numpy as nm\n'), ((790, 819), 'numpy.searchsorted', 'nm.searchsorted', (['uk[1:]', 'pars'], {}), '(uk[1:], pars)\n', (805, 819), True, 'import numpy as nm\n'), ((840, 859), 'numpy.empty_like', 'nm.empty_like', (['pars'], {}), '(pars)\n', (853, 859), True, 'import numpy as nm\n'), ((1544, 1643), 'sfepy.discrete.iga.extmods.igac.eval_in_tp_coors', 'eval_in_tp_coors', (['field', 'indices', 'rcs', 'self.cps', 'self.weights', 'self.degrees', 'self.cs', 'self.conn'], {}), '(field, indices, rcs, self.cps, self.weights, self.degrees,\n self.cs, self.conn)\n', (1560, 1643), False, 'from sfepy.discrete.iga.extmods.igac import eval_in_tp_coors\n'), ((2262, 2288), 'sfepy.discrete.iga.io.read_iga_data', 'io.read_iga_data', (['filename'], {}), '(filename)\n', (2278, 2288), True, 'import sfepy.discrete.iga.io as io\n'), ((2373, 2433), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""bmesh"""', 'cps': 'bcps', 'weights': 'bweights', 'conn': 'bconn'}), "(name='bmesh', cps=bcps, weights=bweights, conn=bconn)\n", (2379, 2433), False, 'from sfepy.base.base import Struct\n'), ((2790, 2875), 'sfepy.discrete.common.domain.Domain.__init__', 'Domain.__init__', (['self', 'name'], {'nurbs': 'nurbs', 'bmesh': 'bmesh', 'regions': 'regions'}), '(self, name, nurbs=nurbs, bmesh=bmesh, regions=regions, **kwargs\n )\n', (2805, 2875), False, 'from sfepy.discrete.common.domain import Domain\n'), ((3386, 3432), 'sfepy.discrete.iga.get_bezier_element_entities', 'iga.get_bezier_element_entities', (['nurbs.degrees'], {}), '(nurbs.degrees)\n', (3417, 3432), True, 'import sfepy.discrete.iga as iga\n'), ((3450, 3500), 'sfepy.discrete.iga.get_bezier_topology', 'iga.get_bezier_topology', (['bmesh.conn', 'nurbs.degrees'], {}), '(bmesh.conn, nurbs.degrees)\n', (3473, 3500), True, 'import sfepy.discrete.iga as iga\n'), ((3515, 3531), 'numpy.unique', 'nm.unique', (['tconn'], {}), '(tconn)\n', (3524, 3531), True, 'import numpy as nm\n'), ((3742, 3797), 'sfepy.base.base.Struct', 'Struct', ([], {'n_nod': 'n_nod', 'dim': 'dim', 'tdim': '(0)', 'n_el': 'n_el', 'n_gr': '(1)'}), '(n_nod=n_nod, dim=dim, tdim=0, n_el=n_el, n_gr=1)\n', (3748, 3797), False, 'from sfepy.base.base import Struct\n'), ((3855, 3896), 'numpy.zeros', 'nm.zeros', (['ltconn.shape[0]'], {'dtype': 'nm.int32'}), '(ltconn.shape[0], dtype=nm.int32)\n', (3863, 3896), True, 'import numpy as nm\n'), ((3917, 3995), 'sfepy.discrete.fem.Mesh.from_data', 'Mesh.from_data', (["(self.name + '_topo')", 'ltcoors', 'None', '[ltconn]', '[mat_id]', '[desc]'], {}), "(self.name + '_topo', ltcoors, None, [ltconn], [mat_id], [desc])\n", (3931, 3995), False, 'from sfepy.discrete.fem import Mesh\n'), ((4053, 4079), 'sfepy.discrete.fem.extmods.cmesh.CMesh.from_mesh', 'CMesh.from_mesh', (['self.mesh'], {}), '(self.mesh)\n', (4068, 4079), False, 'from sfepy.discrete.fem.extmods.cmesh import CMesh\n'), ((4095, 4121), 'sfepy.discrete.fem.geometry_element.create_geometry_elements', 'create_geometry_elements', ([], {}), '()\n', (4119, 4121), False, 'from sfepy.discrete.fem.geometry_element import create_geometry_elements\n'), ((2450, 2471), 'os.path.splitext', 'op.splitext', (['filename'], {}), '(filename)\n', (2461, 2471), True, 'import os.path as op\n'), ((919, 942), 'numpy.where', 'nm.where', (['(indices == ii)'], {}), '(indices == ii)\n', (927, 942), True, 'import numpy as nm\n'), ((3201, 3231), 'numpy.array', 'nm.array', (['cc'], {'dtype': 'nm.float64'}), '(cc, dtype=nm.float64)\n', (3209, 3231), True, 'import numpy as nm\n')]
|
#!/usr/bin/env python3
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
import megengine as mge
import megengine.module as M
import numpy as np
import pytest
import torch
import torch.nn as nn
from basecls.configs import BaseConfig
from basecls.layers import BinaryCrossEntropy, CrossEntropy, build_loss
@pytest.mark.parametrize("name", [CrossEntropy, "BinaryCrossEntropy", "CrossEntropy"])
def test_build_loss(name):
cfg = BaseConfig(loss=dict(name=name))
m = build_loss(cfg)
assert isinstance(m, M.Module)
def test_bce():
x = np.random.rand(2, 8, 4).astype("float32")
y = np.random.rand(2, 8, 4).astype("float32")
ml = BinaryCrossEntropy()(mge.Tensor(x), mge.Tensor(y)).numpy()
tl = nn.BCEWithLogitsLoss()(torch.tensor(x), torch.tensor(y)).numpy()
np.testing.assert_allclose(ml, tl, rtol=1e-4, atol=1e-6)
def test_ce():
K = 4
x = np.random.rand(2, 8, K).astype("float32")
y = np.random.randint(K, size=(2, 8)).astype("int32")
oy = np.eye(K, dtype="int32")[y]
ml = CrossEntropy(axis=2)(mge.Tensor(x), mge.Tensor(y)).numpy()
tl = nn.CrossEntropyLoss()(
torch.tensor(x).reshape(-1, K), torch.tensor(y).flatten().long()
).numpy()
np.testing.assert_allclose(ml, tl, rtol=1e-4, atol=1e-6)
# one hot
ol = CrossEntropy(axis=2)(mge.Tensor(x), mge.Tensor(oy)).numpy()
np.testing.assert_allclose(ml, ol, rtol=1e-4, atol=1e-6)
# label smoothing
ml = CrossEntropy(axis=2, label_smooth=0.1)(mge.Tensor(x), mge.Tensor(y)).numpy()
ol = CrossEntropy(axis=2, label_smooth=0.1)(mge.Tensor(x), mge.Tensor(oy)).numpy()
np.testing.assert_allclose(ml, ol, rtol=1e-4, atol=1e-6)
|
[
"megengine.Tensor"
] |
[((318, 407), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""name"""', "[CrossEntropy, 'BinaryCrossEntropy', 'CrossEntropy']"], {}), "('name', [CrossEntropy, 'BinaryCrossEntropy',\n 'CrossEntropy'])\n", (341, 407), False, 'import pytest\n'), ((483, 498), 'basecls.layers.build_loss', 'build_loss', (['cfg'], {}), '(cfg)\n', (493, 498), False, 'from basecls.layers import BinaryCrossEntropy, CrossEntropy, build_loss\n'), ((799, 858), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['ml', 'tl'], {'rtol': '(0.0001)', 'atol': '(1e-06)'}), '(ml, tl, rtol=0.0001, atol=1e-06)\n', (825, 858), True, 'import numpy as np\n'), ((1221, 1280), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['ml', 'tl'], {'rtol': '(0.0001)', 'atol': '(1e-06)'}), '(ml, tl, rtol=0.0001, atol=1e-06)\n', (1247, 1280), True, 'import numpy as np\n'), ((1366, 1425), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['ml', 'ol'], {'rtol': '(0.0001)', 'atol': '(1e-06)'}), '(ml, ol, rtol=0.0001, atol=1e-06)\n', (1392, 1425), True, 'import numpy as np\n'), ((1623, 1682), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['ml', 'ol'], {'rtol': '(0.0001)', 'atol': '(1e-06)'}), '(ml, ol, rtol=0.0001, atol=1e-06)\n', (1649, 1682), True, 'import numpy as np\n'), ((1001, 1025), 'numpy.eye', 'np.eye', (['K'], {'dtype': '"""int32"""'}), "(K, dtype='int32')\n", (1007, 1025), True, 'import numpy as np\n'), ((560, 583), 'numpy.random.rand', 'np.random.rand', (['(2)', '(8)', '(4)'], {}), '(2, 8, 4)\n', (574, 583), True, 'import numpy as np\n'), ((610, 633), 'numpy.random.rand', 'np.random.rand', (['(2)', '(8)', '(4)'], {}), '(2, 8, 4)\n', (624, 633), True, 'import numpy as np\n'), ((892, 915), 'numpy.random.rand', 'np.random.rand', (['(2)', '(8)', 'K'], {}), '(2, 8, K)\n', (906, 915), True, 'import numpy as np\n'), ((942, 975), 'numpy.random.randint', 'np.random.randint', (['K'], {'size': '(2, 8)'}), '(K, size=(2, 8))\n', (959, 975), True, 'import numpy as np\n'), ((662, 682), 'basecls.layers.BinaryCrossEntropy', 'BinaryCrossEntropy', ([], {}), '()\n', (680, 682), False, 'from basecls.layers import BinaryCrossEntropy, CrossEntropy, build_loss\n'), ((683, 696), 'megengine.Tensor', 'mge.Tensor', (['x'], {}), '(x)\n', (693, 696), True, 'import megengine as mge\n'), ((698, 711), 'megengine.Tensor', 'mge.Tensor', (['y'], {}), '(y)\n', (708, 711), True, 'import megengine as mge\n'), ((730, 752), 'torch.nn.BCEWithLogitsLoss', 'nn.BCEWithLogitsLoss', ([], {}), '()\n', (750, 752), True, 'import torch.nn as nn\n'), ((753, 768), 'torch.tensor', 'torch.tensor', (['x'], {}), '(x)\n', (765, 768), False, 'import torch\n'), ((770, 785), 'torch.tensor', 'torch.tensor', (['y'], {}), '(y)\n', (782, 785), False, 'import torch\n'), ((1039, 1059), 'basecls.layers.CrossEntropy', 'CrossEntropy', ([], {'axis': '(2)'}), '(axis=2)\n', (1051, 1059), False, 'from basecls.layers import BinaryCrossEntropy, CrossEntropy, build_loss\n'), ((1060, 1073), 'megengine.Tensor', 'mge.Tensor', (['x'], {}), '(x)\n', (1070, 1073), True, 'import megengine as mge\n'), ((1075, 1088), 'megengine.Tensor', 'mge.Tensor', (['y'], {}), '(y)\n', (1085, 1088), True, 'import megengine as mge\n'), ((1107, 1128), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (1126, 1128), True, 'import torch.nn as nn\n'), ((1302, 1322), 'basecls.layers.CrossEntropy', 'CrossEntropy', ([], {'axis': '(2)'}), '(axis=2)\n', (1314, 1322), False, 'from basecls.layers import BinaryCrossEntropy, CrossEntropy, build_loss\n'), ((1323, 1336), 'megengine.Tensor', 'mge.Tensor', (['x'], {}), '(x)\n', (1333, 1336), True, 'import megengine as mge\n'), ((1338, 1352), 'megengine.Tensor', 'mge.Tensor', (['oy'], {}), '(oy)\n', (1348, 1352), True, 'import megengine as mge\n'), ((1455, 1493), 'basecls.layers.CrossEntropy', 'CrossEntropy', ([], {'axis': '(2)', 'label_smooth': '(0.1)'}), '(axis=2, label_smooth=0.1)\n', (1467, 1493), False, 'from basecls.layers import BinaryCrossEntropy, CrossEntropy, build_loss\n'), ((1494, 1507), 'megengine.Tensor', 'mge.Tensor', (['x'], {}), '(x)\n', (1504, 1507), True, 'import megengine as mge\n'), ((1509, 1522), 'megengine.Tensor', 'mge.Tensor', (['y'], {}), '(y)\n', (1519, 1522), True, 'import megengine as mge\n'), ((1541, 1579), 'basecls.layers.CrossEntropy', 'CrossEntropy', ([], {'axis': '(2)', 'label_smooth': '(0.1)'}), '(axis=2, label_smooth=0.1)\n', (1553, 1579), False, 'from basecls.layers import BinaryCrossEntropy, CrossEntropy, build_loss\n'), ((1580, 1593), 'megengine.Tensor', 'mge.Tensor', (['x'], {}), '(x)\n', (1590, 1593), True, 'import megengine as mge\n'), ((1595, 1609), 'megengine.Tensor', 'mge.Tensor', (['oy'], {}), '(oy)\n', (1605, 1609), True, 'import megengine as mge\n'), ((1138, 1153), 'torch.tensor', 'torch.tensor', (['x'], {}), '(x)\n', (1150, 1153), False, 'import torch\n'), ((1170, 1185), 'torch.tensor', 'torch.tensor', (['y'], {}), '(y)\n', (1182, 1185), False, 'import torch\n')]
|
#!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
from argparse import ArgumentParser
import numpy as nm
import sys
sys.path.append('.')
from sfepy.base.base import IndexedStruct
from sfepy.discrete import (FieldVariable, Material, Integral, Function,
Equation, Equations, Problem)
from sfepy.discrete.fem import Mesh, FEDomain, Field
from sfepy.terms import Term
from sfepy.discrete.conditions import Conditions, EssentialBC
from sfepy.solvers.ls import ScipyDirect
from sfepy.solvers.nls import Newton
from sfepy.postprocess.viewer import Viewer
from sfepy.mechanics.matcoefs import stiffness_from_lame
def shift_u_fun(ts, coors, bc=None, problem=None, shift=0.0):
"""
Define a displacement depending on the y coordinate.
"""
val = shift * coors[:,1]**2
return val
helps = {
'show' : 'show the results figure',
}
def main():
from sfepy import data_dir
parser = ArgumentParser()
parser.add_argument('--version', action='version', version='%(prog)s')
parser.add_argument('-s', '--show',
action="store_true", dest='show',
default=False, help=helps['show'])
options = parser.parse_args()
mesh = Mesh.from_file(data_dir + '/meshes/2d/rectangle_tri.mesh')
domain = FEDomain('domain', mesh)
min_x, max_x = domain.get_mesh_bounding_box()[:,0]
eps = 1e-8 * (max_x - min_x)
omega = domain.create_region('Omega', 'all')
gamma1 = domain.create_region('Gamma1',
'vertices in x < %.10f' % (min_x + eps),
'facet')
gamma2 = domain.create_region('Gamma2',
'vertices in x > %.10f' % (max_x - eps),
'facet')
field = Field.from_args('fu', nm.float64, 'vector', omega,
approx_order=2)
u = FieldVariable('u', 'unknown', field)
v = FieldVariable('v', 'test', field, primary_var_name='u')
m = Material('m', D=stiffness_from_lame(dim=2, lam=1.0, mu=1.0))
f = Material('f', val=[[0.02], [0.01]])
integral = Integral('i', order=3)
t1 = Term.new('dw_lin_elastic(m.D, v, u)',
integral, omega, m=m, v=v, u=u)
t2 = Term.new('dw_volume_lvf(f.val, v)', integral, omega, f=f, v=v)
eq = Equation('balance', t1 + t2)
eqs = Equations([eq])
fix_u = EssentialBC('fix_u', gamma1, {'u.all' : 0.0})
bc_fun = Function('shift_u_fun', shift_u_fun,
extra_args={'shift' : 0.01})
shift_u = EssentialBC('shift_u', gamma2, {'u.0' : bc_fun})
ls = ScipyDirect({})
nls_status = IndexedStruct()
nls = Newton({}, lin_solver=ls, status=nls_status)
pb = Problem('elasticity', equations=eqs, nls=nls, ls=ls)
pb.save_regions_as_groups('regions')
pb.time_update(ebcs=Conditions([fix_u, shift_u]))
vec = pb.solve()
print(nls_status)
pb.save_state('linear_elasticity.vtk', vec)
if options.show:
view = Viewer('linear_elasticity.vtk')
view(vector_mode='warp_norm', rel_scaling=2,
is_scalar_bar=True, is_wireframe=True)
if __name__ == '__main__':
main()
|
[
"sfepy.discrete.conditions.EssentialBC",
"sfepy.discrete.Integral",
"sfepy.postprocess.viewer.Viewer",
"sfepy.solvers.ls.ScipyDirect",
"sfepy.discrete.Equations",
"sfepy.discrete.fem.Field.from_args",
"sfepy.discrete.Equation",
"sfepy.discrete.fem.Mesh.from_file",
"sfepy.discrete.fem.FEDomain",
"sfepy.discrete.Function",
"sfepy.mechanics.matcoefs.stiffness_from_lame",
"sfepy.terms.Term.new",
"sfepy.discrete.conditions.Conditions",
"sfepy.discrete.FieldVariable",
"sfepy.discrete.Material",
"sfepy.discrete.Problem",
"sfepy.base.base.IndexedStruct",
"sfepy.solvers.nls.Newton"
] |
[((166, 186), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (181, 186), False, 'import sys\n'), ((980, 996), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (994, 996), False, 'from argparse import ArgumentParser\n'), ((1275, 1333), 'sfepy.discrete.fem.Mesh.from_file', 'Mesh.from_file', (["(data_dir + '/meshes/2d/rectangle_tri.mesh')"], {}), "(data_dir + '/meshes/2d/rectangle_tri.mesh')\n", (1289, 1333), False, 'from sfepy.discrete.fem import Mesh, FEDomain, Field\n'), ((1347, 1371), 'sfepy.discrete.fem.FEDomain', 'FEDomain', (['"""domain"""', 'mesh'], {}), "('domain', mesh)\n", (1355, 1371), False, 'from sfepy.discrete.fem import Mesh, FEDomain, Field\n'), ((1847, 1913), 'sfepy.discrete.fem.Field.from_args', 'Field.from_args', (['"""fu"""', 'nm.float64', '"""vector"""', 'omega'], {'approx_order': '(2)'}), "('fu', nm.float64, 'vector', omega, approx_order=2)\n", (1862, 1913), False, 'from sfepy.discrete.fem import Mesh, FEDomain, Field\n'), ((1951, 1987), 'sfepy.discrete.FieldVariable', 'FieldVariable', (['"""u"""', '"""unknown"""', 'field'], {}), "('u', 'unknown', field)\n", (1964, 1987), False, 'from sfepy.discrete import FieldVariable, Material, Integral, Function, Equation, Equations, Problem\n'), ((1996, 2051), 'sfepy.discrete.FieldVariable', 'FieldVariable', (['"""v"""', '"""test"""', 'field'], {'primary_var_name': '"""u"""'}), "('v', 'test', field, primary_var_name='u')\n", (2009, 2051), False, 'from sfepy.discrete import FieldVariable, Material, Integral, Function, Equation, Equations, Problem\n'), ((2130, 2165), 'sfepy.discrete.Material', 'Material', (['"""f"""'], {'val': '[[0.02], [0.01]]'}), "('f', val=[[0.02], [0.01]])\n", (2138, 2165), False, 'from sfepy.discrete import FieldVariable, Material, Integral, Function, Equation, Equations, Problem\n'), ((2182, 2204), 'sfepy.discrete.Integral', 'Integral', (['"""i"""'], {'order': '(3)'}), "('i', order=3)\n", (2190, 2204), False, 'from sfepy.discrete import FieldVariable, Material, Integral, Function, Equation, Equations, Problem\n'), ((2215, 2284), 'sfepy.terms.Term.new', 'Term.new', (['"""dw_lin_elastic(m.D, v, u)"""', 'integral', 'omega'], {'m': 'm', 'v': 'v', 'u': 'u'}), "('dw_lin_elastic(m.D, v, u)', integral, omega, m=m, v=v, u=u)\n", (2223, 2284), False, 'from sfepy.terms import Term\n'), ((2312, 2374), 'sfepy.terms.Term.new', 'Term.new', (['"""dw_volume_lvf(f.val, v)"""', 'integral', 'omega'], {'f': 'f', 'v': 'v'}), "('dw_volume_lvf(f.val, v)', integral, omega, f=f, v=v)\n", (2320, 2374), False, 'from sfepy.terms import Term\n'), ((2384, 2412), 'sfepy.discrete.Equation', 'Equation', (['"""balance"""', '(t1 + t2)'], {}), "('balance', t1 + t2)\n", (2392, 2412), False, 'from sfepy.discrete import FieldVariable, Material, Integral, Function, Equation, Equations, Problem\n'), ((2423, 2438), 'sfepy.discrete.Equations', 'Equations', (['[eq]'], {}), '([eq])\n', (2432, 2438), False, 'from sfepy.discrete import FieldVariable, Material, Integral, Function, Equation, Equations, Problem\n'), ((2452, 2496), 'sfepy.discrete.conditions.EssentialBC', 'EssentialBC', (['"""fix_u"""', 'gamma1', "{'u.all': 0.0}"], {}), "('fix_u', gamma1, {'u.all': 0.0})\n", (2463, 2496), False, 'from sfepy.discrete.conditions import Conditions, EssentialBC\n'), ((2512, 2576), 'sfepy.discrete.Function', 'Function', (['"""shift_u_fun"""', 'shift_u_fun'], {'extra_args': "{'shift': 0.01}"}), "('shift_u_fun', shift_u_fun, extra_args={'shift': 0.01})\n", (2520, 2576), False, 'from sfepy.discrete import FieldVariable, Material, Integral, Function, Equation, Equations, Problem\n'), ((2614, 2661), 'sfepy.discrete.conditions.EssentialBC', 'EssentialBC', (['"""shift_u"""', 'gamma2', "{'u.0': bc_fun}"], {}), "('shift_u', gamma2, {'u.0': bc_fun})\n", (2625, 2661), False, 'from sfepy.discrete.conditions import Conditions, EssentialBC\n'), ((2673, 2688), 'sfepy.solvers.ls.ScipyDirect', 'ScipyDirect', (['{}'], {}), '({})\n', (2684, 2688), False, 'from sfepy.solvers.ls import ScipyDirect\n'), ((2707, 2722), 'sfepy.base.base.IndexedStruct', 'IndexedStruct', ([], {}), '()\n', (2720, 2722), False, 'from sfepy.base.base import IndexedStruct\n'), ((2733, 2777), 'sfepy.solvers.nls.Newton', 'Newton', (['{}'], {'lin_solver': 'ls', 'status': 'nls_status'}), '({}, lin_solver=ls, status=nls_status)\n', (2739, 2777), False, 'from sfepy.solvers.nls import Newton\n'), ((2788, 2840), 'sfepy.discrete.Problem', 'Problem', (['"""elasticity"""'], {'equations': 'eqs', 'nls': 'nls', 'ls': 'ls'}), "('elasticity', equations=eqs, nls=nls, ls=ls)\n", (2795, 2840), False, 'from sfepy.discrete import FieldVariable, Material, Integral, Function, Equation, Equations, Problem\n'), ((3067, 3098), 'sfepy.postprocess.viewer.Viewer', 'Viewer', (['"""linear_elasticity.vtk"""'], {}), "('linear_elasticity.vtk')\n", (3073, 3098), False, 'from sfepy.postprocess.viewer import Viewer\n'), ((2077, 2120), 'sfepy.mechanics.matcoefs.stiffness_from_lame', 'stiffness_from_lame', ([], {'dim': '(2)', 'lam': '(1.0)', 'mu': '(1.0)'}), '(dim=2, lam=1.0, mu=1.0)\n', (2096, 2120), False, 'from sfepy.mechanics.matcoefs import stiffness_from_lame\n'), ((2907, 2935), 'sfepy.discrete.conditions.Conditions', 'Conditions', (['[fix_u, shift_u]'], {}), '([fix_u, shift_u])\n', (2917, 2935), False, 'from sfepy.discrete.conditions import Conditions, EssentialBC\n')]
|
from typing import Optional
from sqlmodel import Field, SQLModel, create_engine
class Student(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
first_name: str
last_name: str
email: str
# dob:
sqlite_url = "sqlite:///school.db"
engine = create_engine(sqlite_url)
SQLModel.metadata.create_all(engine)
|
[
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Field",
"sqlmodel.create_engine"
] |
[((294, 319), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {}), '(sqlite_url)\n', (307, 319), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((320, 356), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (348, 356), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((144, 181), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (149, 181), False, 'from sqlmodel import Field, SQLModel, create_engine\n')]
|
from sqlmodel import Session, func, text, SQLModel, select
from typing import Any, List, Literal, Optional, Type, TypeVar, Generic
ReadType = TypeVar("ReadType", bound=SQLModel)
CreateType = TypeVar("CreateType", bound=SQLModel)
UpdateType = TypeVar("UpdateType", bound=SQLModel)
class BaseRepository(Generic[ReadType, CreateType, UpdateType]):
entity: Type[ReadType]
def __init__(self, db: Session):
self.db = db
def get_entity(self, *args: Any, **kwargs: Any) -> Optional[ReadType]:
result = self.db.exec(
select(self.entity)
.filter(*args)
.filter_by(**kwargs)
)
return result.first()
def get_entities(self, *args: Any, offset: int = 0, limit: int = 100, order_by: str = 'id', order: Literal['desc', 'asc'] = 'asc', **kwargs: Any) -> List[ReadType]:
result = self.db.exec(
select(self.entity)
.filter(*args)
.filter_by(**kwargs)
.offset(offset)
.limit(limit)
.order_by(text(f"{order_by} {order}"))
)
return result.all()
def count_entities(self) -> int:
return self.db.query(func.count(self.entity.id)).scalar()
def create_entity(self, entity: CreateType) -> ReadType:
entity = self.entity.from_orm(entity)
self.db.add(entity)
self.db.commit()
self.db.refresh(entity)
return entity
def create_entities(self, entities: List[CreateType]) -> ReadType:
entities_input = []
for entity in entities:
entity = self.entity.from_orm(entity)
entities_input.append(entity)
self.db.add_all(entities_input)
self.db.commit()
entities_output = []
for entity in entities:
self.db.refresh(entity)
entities_output.append(entity)
return entities_output
def delete_entity(self, *args, **kwargs) -> bool:
try:
entity = self.get_entity(*args, **kwargs)
if entity is None:
return False
self.db.delete(entity)
self.db.commit()
return True
except Exception:
return False
def delete_entities(self, *args, **kwargs) -> bool:
try:
entities = self.get_entities(*args, **kwargs)
if entities is None:
return False
for entity in entities:
self.db.delete(entity)
self.db.commit()
return True
except Exception:
return False
def update_entity(self, data: UpdateType, *args, **kwargs) -> ReadType:
entity = self.get_entity(*args, **kwargs)
if entity is None:
return None
data = data.dict(exclude_unset=True)
for key, value in data.items():
setattr(entity, key, value)
self.db.add(entity)
self.db.commit()
self.db.refresh(entity)
return entity
def update_entities(self, data: UpdateType, *args, **kwargs) -> ReadType:
entities = self.get_entities(*args, **kwargs)
if entities is None:
return None
data = data.dict(exclude_unset=True)
for entity in entities:
for key, value in data.items():
setattr(entity, key, value)
self.db.add(entity)
self.db.commit()
for entity in entities:
self.db.refresh(entity)
return entities
def update_entity_changes(self, entity: ReadType) -> ReadType:
self.db.add(entity)
self.db.commit()
self.db.refresh(entity)
return entity
|
[
"sqlmodel.func.count",
"sqlmodel.text",
"sqlmodel.select"
] |
[((143, 178), 'typing.TypeVar', 'TypeVar', (['"""ReadType"""'], {'bound': 'SQLModel'}), "('ReadType', bound=SQLModel)\n", (150, 178), False, 'from typing import Any, List, Literal, Optional, Type, TypeVar, Generic\n'), ((192, 229), 'typing.TypeVar', 'TypeVar', (['"""CreateType"""'], {'bound': 'SQLModel'}), "('CreateType', bound=SQLModel)\n", (199, 229), False, 'from typing import Any, List, Literal, Optional, Type, TypeVar, Generic\n'), ((243, 280), 'typing.TypeVar', 'TypeVar', (['"""UpdateType"""'], {'bound': 'SQLModel'}), "('UpdateType', bound=SQLModel)\n", (250, 280), False, 'from typing import Any, List, Literal, Optional, Type, TypeVar, Generic\n'), ((1043, 1070), 'sqlmodel.text', 'text', (['f"""{order_by} {order}"""'], {}), "(f'{order_by} {order}')\n", (1047, 1070), False, 'from sqlmodel import Session, func, text, SQLModel, select\n'), ((1178, 1204), 'sqlmodel.func.count', 'func.count', (['self.entity.id'], {}), '(self.entity.id)\n', (1188, 1204), False, 'from sqlmodel import Session, func, text, SQLModel, select\n'), ((553, 572), 'sqlmodel.select', 'select', (['self.entity'], {}), '(self.entity)\n', (559, 572), False, 'from sqlmodel import Session, func, text, SQLModel, select\n'), ((887, 906), 'sqlmodel.select', 'select', (['self.entity'], {}), '(self.entity)\n', (893, 906), False, 'from sqlmodel import Session, func, text, SQLModel, select\n')]
|
from sqlmodel import Session, select
from database import UserRead, PostCreate, UserCreate, User, Post
from typing import Union
from datetime import datetime
def create_object(
session: Session,
model: Union[User, Post],
request_data: Union[UserCreate, PostCreate],
user: UserRead = None,
isPost: bool = False,
) -> dict:
if isPost:
setattr(request_data, "author_name", user.name)
db_object = model.from_orm(request_data)
if isPost:
setattr(db_object, "updated_at", datetime.utcnow())
setattr(db_object, "created_at", datetime.utcnow())
session.add(db_object)
session.commit()
session.refresh(db_object)
return db_object
def get_objects(
session: Session, model: Union[User, Post], offset: int, limit: int
) -> list:
objects = session.exec(select(model).offset(offset).limit(limit)).all()
return objects
def get_object(
session: Session,
model: Union[User, Post],
criteria: Union[int, str],
isUser: bool = False,
) -> Union[User, Post]:
if isUser:
statement = select(model).where(model.email == criteria)
results = session.exec(statement)
user = results.first()
if not user:
raise Exception("User not found")
return user
post = session.get(Post, criteria)
return post
def patch_object(
session: Session,
old_object: Union[User, Post],
request_data: dict,
isPost: bool = False,
) -> Union[User, Post]:
for key, value in request_data.items():
setattr(old_object, key, value)
if isPost:
setattr(old_object, "updated_at", datetime.utcnow())
session.add(old_object)
session.commit()
session.refresh(old_object)
return old_object
def delete_object(session: Session, object_: Union[User, Post]) -> dict:
session.delete(object_)
session.commit()
return {"ok": True}
|
[
"sqlmodel.select"
] |
[((574, 591), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (589, 591), False, 'from datetime import datetime\n'), ((518, 535), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (533, 535), False, 'from datetime import datetime\n'), ((1628, 1645), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1643, 1645), False, 'from datetime import datetime\n'), ((1076, 1089), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (1082, 1089), False, 'from sqlmodel import Session, select\n'), ((822, 835), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (828, 835), False, 'from sqlmodel import Session, select\n')]
|
import hypothesis.strategies as st
from hypothesis import given
from hypothesis.strategies import DataObject
from sqlmodel import Session, select
from fastapi_server.models.user import User
from fastapi_server.test.base_test import BaseTest
class TestDatabase(BaseTest):
def test_user_add_single(self, method_session_fixture: Session):
session = method_session_fixture
assert isinstance(session, Session)
username = 'asd'
email = '<EMAIL>'
password = '<PASSWORD>'
assert session.exec(select(User)).all() == []
session.add(
User(
username=username,
email=email,
password_hashed=password,
is_admin=False,
is_disabled=False,
is_verified=False,
)
)
session.commit()
assert session.exec(select(User)).all() != []
@given(data=st.data())
def test_user_add_multiple(self, data: DataObject):
username = data.draw(st.from_regex('[a-zA-Z0-9]{1,20}', fullmatch=True))
email = data.draw(st.from_regex('[a-zA-Z]{1,20}@gmailcom', fullmatch=True))
password = data.draw(st.from_regex('[a-zA-Z0-9]{1,20}', fullmatch=True))
with self.example_session_context() as session:
assert session.exec(select(User)).all() == []
session.add(
User(
username=username,
email=email,
password_hashed=password,
is_admin=False,
is_disabled=False,
is_verified=False,
)
)
session.commit()
assert session.exec(select(User)).all() != []
|
[
"sqlmodel.select"
] |
[((599, 720), 'fastapi_server.models.user.User', 'User', ([], {'username': 'username', 'email': 'email', 'password_hashed': 'password', 'is_admin': '(False)', 'is_disabled': '(False)', 'is_verified': '(False)'}), '(username=username, email=email, password_hashed=password, is_admin=\n False, is_disabled=False, is_verified=False)\n', (603, 720), False, 'from fastapi_server.models.user import User\n'), ((1029, 1079), 'hypothesis.strategies.from_regex', 'st.from_regex', (['"""[a-zA-Z0-9]{1,20}"""'], {'fullmatch': '(True)'}), "('[a-zA-Z0-9]{1,20}', fullmatch=True)\n", (1042, 1079), True, 'import hypothesis.strategies as st\n'), ((1107, 1163), 'hypothesis.strategies.from_regex', 'st.from_regex', (['"""[a-zA-Z]{1,20}@gmailcom"""'], {'fullmatch': '(True)'}), "('[a-zA-Z]{1,20}@gmailcom', fullmatch=True)\n", (1120, 1163), True, 'import hypothesis.strategies as st\n'), ((1194, 1244), 'hypothesis.strategies.from_regex', 'st.from_regex', (['"""[a-zA-Z0-9]{1,20}"""'], {'fullmatch': '(True)'}), "('[a-zA-Z0-9]{1,20}', fullmatch=True)\n", (1207, 1244), True, 'import hypothesis.strategies as st\n'), ((933, 942), 'hypothesis.strategies.data', 'st.data', ([], {}), '()\n', (940, 942), True, 'import hypothesis.strategies as st\n'), ((1401, 1522), 'fastapi_server.models.user.User', 'User', ([], {'username': 'username', 'email': 'email', 'password_hashed': 'password', 'is_admin': '(False)', 'is_disabled': '(False)', 'is_verified': '(False)'}), '(username=username, email=email, password_hashed=password, is_admin=\n False, is_disabled=False, is_verified=False)\n', (1405, 1522), False, 'from fastapi_server.models.user import User\n'), ((540, 552), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (546, 552), False, 'from sqlmodel import Session, select\n'), ((890, 902), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (896, 902), False, 'from sqlmodel import Session, select\n'), ((1334, 1346), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1340, 1346), False, 'from sqlmodel import Session, select\n'), ((1732, 1744), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1738, 1744), False, 'from sqlmodel import Session, select\n')]
|
import os
import asyncio
import subprocess
import contextlib
import contextvars
from typing import *
import wait4it
import sqlmodel
from sqlmodel import Session
from aioify import aioify
from twitterscraper.models.domain import TwitterProfile, TwitterTweet, JobHistoric
from twitterscraper.utils import Singleton
class Repository(Singleton):
# https://sqlmodel.tiangolo.com/
get: Callable[..., "Repository"]
_session_contextvar: contextvars.ContextVar[Optional[Session]]
def __init__(self, uri: str):
self._engine = sqlmodel.create_engine(uri)
self._session_contextvar = contextvars.ContextVar("session", default=None)
# TODO implement pool_pre_ping & pool_recycle_time
# Fix for https://github.com/tiangolo/sqlmodel/issues/189#issuecomment-1025190094
from sqlmodel.sql.expression import Select, SelectOfScalar
SelectOfScalar.inherit_cache = True # type: ignore
Select.inherit_cache = True # type: ignore
def _new_session(self) -> Session:
session = Session(bind=self._engine)
self._session_contextvar.set(session)
return session
def _get_context_session(self) -> Optional[Session]:
return self._session_contextvar.get()
def _clear_context_session(self):
self._session_contextvar.set(None)
@contextlib.contextmanager
def session(self) -> Session:
"""Contextmanager that wraps code behind a database transaction (session).
Any error during the execution rolls back the transaction, so any data saved is not persisted."""
session = self._get_context_session()
if session is not None:
# Another session() contextmanager is already running; let it handle the commit/rollback
yield session
return
session = self._new_session()
print("New session")
try:
yield session
print("Session Commit")
session.commit()
except Exception as ex:
print("Session Rollback")
session.rollback()
raise ex
finally:
print("Session Close")
session.close()
self._clear_context_session()
@contextlib.asynccontextmanager
async def session_async(self) -> Session:
"""Contextmanager that wraps code behind a database transaction (session).
Any error during the execution rolls back the transaction, so any data saved is not persisted."""
session = self._get_context_session()
if session is not None:
# Another session() contextmanager is already running; let it handle the commit/rollback
yield session
return
session = self._new_session()
print("New session")
try:
yield session
print("Session Commit")
await aioify(session.commit)()
except Exception as ex:
print("Session Rollback")
await aioify(session.rollback)()
raise ex
finally:
print("Session Close")
await aioify(session.close)()
self._clear_context_session()
def save_object(self, obj: sqlmodel.SQLModel, flush: bool = False):
"""Save or update any SQLModel object instance"""
with self.session() as session:
session.add(obj)
if flush:
session.flush([obj])
async def save_object_async(self, *obj: sqlmodel.SQLModel, flush: bool = False):
# return await aioify(self.save_object)(obj, flush)
async with self.session_async() as session:
await asyncio.gather(*[aioify(session.add)(_obj) for _obj in obj])
if flush:
await aioify(session.flush)(obj)
def delete_object(self, obj: sqlmodel.SQLModel, flush: bool = False):
with self.session() as session:
session.delete(obj)
if flush:
session.flush([obj])
async def delete_object_async(self, obj: sqlmodel.SQLModel, flush: bool = False):
async with self.session_async() as session:
await aioify(session.delete)(obj)
if flush:
await aioify(session.flush)([obj])
def list_profiles(self) -> List[TwitterProfile]:
with self.session() as session:
query = sqlmodel.select(TwitterProfile)
return session.exec(query).all()
async def list_profiles_async(self, filter_active_profiles: Optional[bool] = None) -> List[TwitterProfile]:
async with self.session_async() as session:
query = sqlmodel.select(TwitterProfile)
if filter_active_profiles is not None:
query = query.where(TwitterProfile.active == filter_active_profiles)
result = await aioify(session.exec)(query)
return await aioify(result.all)()
def get_profile_by_userid(self, userid: str) -> TwitterProfile:
# TODO Deprecate
with self.session() as session:
query = sqlmodel.select(TwitterProfile).where(TwitterProfile.userid == userid)
return session.exec(query).one()
async def get_profile_by_userid_async(self, userid: str) -> TwitterProfile:
# TODO Deprecate
async with self.session_async() as session:
query = sqlmodel.select(TwitterProfile).where(TwitterProfile.userid == userid)
result = await aioify(session.exec)(query)
return await aioify(result.one)()
async def get_profile_by(self, userid: Optional[str] = None, username: Optional[str] = None) -> TwitterProfile:
async with self.session_async() as session:
query = sqlmodel.select(TwitterProfile)
if userid:
query = query.where(TwitterProfile.userid == userid)
if username:
query = query.where(TwitterProfile.username == username)
result = await aioify(session.exec)(query)
return await aioify(result.one)()
# noinspection PyComparisonWithNone
async def tweets_iterator(
self,
batch_size: int,
userid: Optional[str] = None,
username: Optional[str] = None,
from_ts: Optional[int] = None,
to_ts: Optional[int] = None,
filter_active_profiles: Optional[bool] = None,
filter_active_tweets: Optional[bool] = None,
tweets_ids: Optional[List[str]] = None,
) -> AsyncIterable[List[TwitterTweet]]:
# TODO Deprecate, iterator seems like no longer needed
# TODO Any way for making the generator async?
async with self.session_async() as session:
query = sqlmodel.select(TwitterTweet).join(TwitterProfile)
if filter_active_profiles is not None:
query = query.where(TwitterProfile.active == filter_active_profiles)
if filter_active_tweets is True:
query = query.where(TwitterTweet.deletion_detected_timestamp == None)
elif filter_active_tweets is False:
query = query.where(TwitterTweet.deletion_detected_timestamp != None)
if userid is not None:
query = query.where(TwitterProfile.userid == userid)
if username is not None:
query = query.where(TwitterProfile.username == username)
if from_ts is not None:
query = query.where(TwitterTweet.timestamp >= from_ts)
if to_ts is not None:
query = query.where(TwitterTweet.timestamp < to_ts)
if tweets_ids is not None:
# noinspection PyUnresolvedReferences
query = query.filter(TwitterTweet.tweet_id.in_(tweets_ids))
query = query.execution_options(stream_results=True)
result = session.exec(query)
for partition in result.partitions(batch_size):
yield partition
async def get_tweets(
self,
**kwargs
) -> List[TwitterTweet]:
tweets = list()
async for tweets_batch in self.tweets_iterator(batch_size=50, **kwargs):
tweets.extend(tweets_batch)
return tweets
async def get_job_historic(self, job_id: str) -> Optional[JobHistoric]:
async with self.session_async() as session:
query = sqlmodel.select(JobHistoric).where(JobHistoric.job_id == job_id)
return session.exec(query).one_or_none()
# TODO remove "with self.session..." from everything, since we're returning ORM models, it's always needed on the outside
async def close(self):
# TODO implement
print("Closing Repository...")
print("Closed Repository")
@staticmethod
def run_migrations():
subprocess.check_output(("alembic", "upgrade", "head"), cwd=os.getcwd())
@staticmethod
def generate_migration(name: str):
subprocess.check_output(("alembic", "revision", "--autogenerate", "-m", name))
def tcp_wait(self):
# TODO configurable timeout
url = self._engine.url
print("Waiting for TCP port", url)
wait4it.wait_for(host=url.host, port=url.port, timeout=5)
print("TCP port ready", url)
async def tcp_wait_async(self):
await aioify(self.tcp_wait)()
|
[
"sqlmodel.Session",
"sqlmodel.create_engine",
"sqlmodel.select"
] |
[((545, 572), 'sqlmodel.create_engine', 'sqlmodel.create_engine', (['uri'], {}), '(uri)\n', (567, 572), False, 'import sqlmodel\n'), ((608, 655), 'contextvars.ContextVar', 'contextvars.ContextVar', (['"""session"""'], {'default': 'None'}), "('session', default=None)\n", (630, 655), False, 'import contextvars\n'), ((1043, 1069), 'sqlmodel.Session', 'Session', ([], {'bind': 'self._engine'}), '(bind=self._engine)\n', (1050, 1069), False, 'from sqlmodel import Session\n'), ((8932, 9010), 'subprocess.check_output', 'subprocess.check_output', (["('alembic', 'revision', '--autogenerate', '-m', name)"], {}), "(('alembic', 'revision', '--autogenerate', '-m', name))\n", (8955, 9010), False, 'import subprocess\n'), ((9154, 9211), 'wait4it.wait_for', 'wait4it.wait_for', ([], {'host': 'url.host', 'port': 'url.port', 'timeout': '(5)'}), '(host=url.host, port=url.port, timeout=5)\n', (9170, 9211), False, 'import wait4it\n'), ((4359, 4390), 'sqlmodel.select', 'sqlmodel.select', (['TwitterProfile'], {}), '(TwitterProfile)\n', (4374, 4390), False, 'import sqlmodel\n'), ((4621, 4652), 'sqlmodel.select', 'sqlmodel.select', (['TwitterProfile'], {}), '(TwitterProfile)\n', (4636, 4652), False, 'import sqlmodel\n'), ((5700, 5731), 'sqlmodel.select', 'sqlmodel.select', (['TwitterProfile'], {}), '(TwitterProfile)\n', (5715, 5731), False, 'import sqlmodel\n'), ((8853, 8864), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8862, 8864), False, 'import os\n'), ((9300, 9321), 'aioify.aioify', 'aioify', (['self.tcp_wait'], {}), '(self.tcp_wait)\n', (9306, 9321), False, 'from aioify import aioify\n'), ((2877, 2899), 'aioify.aioify', 'aioify', (['session.commit'], {}), '(session.commit)\n', (2883, 2899), False, 'from aioify import aioify\n'), ((3108, 3129), 'aioify.aioify', 'aioify', (['session.close'], {}), '(session.close)\n', (3114, 3129), False, 'from aioify import aioify\n'), ((4144, 4166), 'aioify.aioify', 'aioify', (['session.delete'], {}), '(session.delete)\n', (4150, 4166), False, 'from aioify import aioify\n'), ((4817, 4837), 'aioify.aioify', 'aioify', (['session.exec'], {}), '(session.exec)\n', (4823, 4837), False, 'from aioify import aioify\n'), ((4870, 4888), 'aioify.aioify', 'aioify', (['result.all'], {}), '(result.all)\n', (4876, 4888), False, 'from aioify import aioify\n'), ((5045, 5076), 'sqlmodel.select', 'sqlmodel.select', (['TwitterProfile'], {}), '(TwitterProfile)\n', (5060, 5076), False, 'import sqlmodel\n'), ((5339, 5370), 'sqlmodel.select', 'sqlmodel.select', (['TwitterProfile'], {}), '(TwitterProfile)\n', (5354, 5370), False, 'import sqlmodel\n'), ((5437, 5457), 'aioify.aioify', 'aioify', (['session.exec'], {}), '(session.exec)\n', (5443, 5457), False, 'from aioify import aioify\n'), ((5490, 5508), 'aioify.aioify', 'aioify', (['result.one'], {}), '(result.one)\n', (5496, 5508), False, 'from aioify import aioify\n'), ((5949, 5969), 'aioify.aioify', 'aioify', (['session.exec'], {}), '(session.exec)\n', (5955, 5969), False, 'from aioify import aioify\n'), ((6002, 6020), 'aioify.aioify', 'aioify', (['result.one'], {}), '(result.one)\n', (6008, 6020), False, 'from aioify import aioify\n'), ((6714, 6743), 'sqlmodel.select', 'sqlmodel.select', (['TwitterTweet'], {}), '(TwitterTweet)\n', (6729, 6743), False, 'import sqlmodel\n'), ((7719, 7756), 'twitterscraper.models.domain.TwitterTweet.tweet_id.in_', 'TwitterTweet.tweet_id.in_', (['tweets_ids'], {}), '(tweets_ids)\n', (7744, 7756), False, 'from twitterscraper.models.domain import TwitterProfile, TwitterTweet, JobHistoric\n'), ((8368, 8396), 'sqlmodel.select', 'sqlmodel.select', (['JobHistoric'], {}), '(JobHistoric)\n', (8383, 8396), False, 'import sqlmodel\n'), ((2990, 3014), 'aioify.aioify', 'aioify', (['session.rollback'], {}), '(session.rollback)\n', (2996, 3014), False, 'from aioify import aioify\n'), ((3754, 3775), 'aioify.aioify', 'aioify', (['session.flush'], {}), '(session.flush)\n', (3760, 3775), False, 'from aioify import aioify\n'), ((4216, 4237), 'aioify.aioify', 'aioify', (['session.flush'], {}), '(session.flush)\n', (4222, 4237), False, 'from aioify import aioify\n'), ((3666, 3685), 'aioify.aioify', 'aioify', (['session.add'], {}), '(session.add)\n', (3672, 3685), False, 'from aioify import aioify\n')]
|
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import numpy as np
import pytest
import megengine as mge
from megengine import tensor
from megengine.core.autodiff.grad import Function, Grad
from megengine.core.tensor.utils import make_shape_tuple
from megengine.quantization.internal_fake_quant import *
from megengine.quantization.utils import QuantMode, fake_quant_tensor, tqt_forward
class TQT_numpy:
def __init__(self, lowerbound, upperbound):
super().__init__()
self.lowerbound = lowerbound
self.upperbound = upperbound
def forward(self, inp, scale):
t = 2 ** scale
# t = F.maximum(t, 1e-4)
inp_scaled = inp / t
inp_clipped = np.maximum(
np.minimum(inp_scaled, self.upperbound), self.lowerbound
)
inp_rounded = np.round(inp_clipped)
inp_flq = inp_rounded * t
self.saved_tensors = (inp_scaled, inp_rounded, t)
return inp_flq
def backward(self, grad_inp_flq):
(inp_scaled, inp_rounded, t) = self.saved_tensors
mask_clip = (inp_scaled < -0.5 + self.lowerbound) + (
inp_scaled > self.upperbound + 0.5
) # mask for accumulating the gradients of |data_scaled|>L
mask_quant = np.abs(
mask_clip - 1
) # mask for accumulating the gradients with |data_scaled|<=L
grad_quant = (
grad_inp_flq * mask_quant * (inp_rounded - inp_scaled)
) # gradient within |data_scaled|<=L
grad_clip = (
grad_inp_flq * mask_clip * inp_rounded
) # gradient with | data_scaled|>L
grad_s = grad_clip.sum() + grad_quant.sum()
# dL/ds = dL/dt * t * ln(2)
grad_s = grad_s * t * np.log(2)
grad_inp = grad_inp_flq * mask_quant
return grad_inp, grad_s
def test_tqt():
g = []
def cb(grad):
g.append(grad)
x = np.random.normal(size=(1, 2, 3, 4))
s = np.random.rand(1) + 1
g_y = np.ones(shape=(1, 2, 3, 4), dtype="float32")
n = TQT_numpy(-127, 127)
y_np = n.forward(x, s)
g_x_np, g_s_np = n.backward(g_y)
x = mge.tensor(x, dtype="float32")
s = mge.tensor(s, dtype="float32")
g_y = mge.tensor(g_y, dtype="float32")
grad = Grad().wrt(x, s, callback=cb)
y = tqt_forward(-127, 127, x, s)
grad(y, g_y)
g_x, g_s = g
np.testing.assert_allclose(y.numpy(), y_np, atol=1e-6)
np.testing.assert_allclose(g_x.numpy(), g_x_np, atol=1e-6)
np.testing.assert_allclose(g_s.numpy(), g_s_np, atol=1e-6)
def _save_to(self, name="grad"):
def callback(grad):
setattr(self, name, grad)
return callback
class Round(Function):
def forward(self, x):
return F.round(x)
def backward(self, output_grads):
return output_grads
def fake_quant_tensor_gt(inp, scale, zero_point, qmin, qmax):
oup = Round()(inp / scale) + zero_point
oup = F.minimum(F.maximum(oup, qmin), qmax)
oup = (oup - zero_point) * scale
return oup
def test_fakequant():
qmin = -126
qmax = 129
def run(zero_point, scale):
q_dict = {}
q_dict["mode"] = QuantMode.ASYMMERTIC
q_dict["scale"] = scale
q_dict["zero_point"] = zero_point
inp_data = np.random.uniform(low=-512.0, high=512.0, size=(1, 32, 32, 32))
inp = tensor(inp_data, dtype=np.float32)
# test forward
oup = fake_quant_tensor(inp, qmin, qmax, q_dict).numpy()
oup_gt = fake_quant_tensor_gt(inp, scale, zero_point, qmin, qmax).numpy()
assert np.allclose(oup, oup_gt)
assert oup.shape == oup_gt.shape
# test backward
x = tensor(inp_data, dtype=np.float32)
grad = Grad().wrt(x, callback=_save_to(x))
y = fake_quant_tensor(x, qmin, qmax, q_dict)
grad(y, tensor(F.ones_like(x)))
x1 = tensor(inp_data, dtype=np.float32)
grad = Grad().wrt(x1, callback=_save_to(x1))
y1 = fake_quant_tensor_gt(x1, scale, zero_point, qmin, qmax)
grad(y1, tensor(F.ones_like(x1)))
assert np.allclose(x.grad.numpy(), x1.grad.numpy())
assert make_shape_tuple(x.grad.shape) == make_shape_tuple(x1.grad.shape)
zero_point = tensor([1.0], dtype=np.float32)
scale = tensor([4.0], dtype=np.float32)
run(zero_point, scale)
zero_point = tensor(1.0 * np.ones((1, 32, 1, 1)), dtype=np.float32)
scale = tensor(4.0 * np.ones((1, 32, 1, 1)), dtype=np.float32)
run(zero_point, scale)
|
[
"megengine.tensor",
"megengine.quantization.utils.tqt_forward",
"megengine.quantization.utils.fake_quant_tensor",
"megengine.core.tensor.utils.make_shape_tuple",
"megengine.core.autodiff.grad.Grad"
] |
[((2219, 2254), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(1, 2, 3, 4)'}), '(size=(1, 2, 3, 4))\n', (2235, 2254), True, 'import numpy as np\n'), ((2295, 2339), 'numpy.ones', 'np.ones', ([], {'shape': '(1, 2, 3, 4)', 'dtype': '"""float32"""'}), "(shape=(1, 2, 3, 4), dtype='float32')\n", (2302, 2339), True, 'import numpy as np\n'), ((2443, 2473), 'megengine.tensor', 'mge.tensor', (['x'], {'dtype': '"""float32"""'}), "(x, dtype='float32')\n", (2453, 2473), True, 'import megengine as mge\n'), ((2482, 2512), 'megengine.tensor', 'mge.tensor', (['s'], {'dtype': '"""float32"""'}), "(s, dtype='float32')\n", (2492, 2512), True, 'import megengine as mge\n'), ((2523, 2555), 'megengine.tensor', 'mge.tensor', (['g_y'], {'dtype': '"""float32"""'}), "(g_y, dtype='float32')\n", (2533, 2555), True, 'import megengine as mge\n'), ((2605, 2633), 'megengine.quantization.utils.tqt_forward', 'tqt_forward', (['(-127)', '(127)', 'x', 's'], {}), '(-127, 127, x, s)\n', (2616, 2633), False, 'from megengine.quantization.utils import QuantMode, fake_quant_tensor, tqt_forward\n'), ((4522, 4553), 'megengine.tensor', 'tensor', (['[1.0]'], {'dtype': 'np.float32'}), '([1.0], dtype=np.float32)\n', (4528, 4553), False, 'from megengine import tensor\n'), ((4566, 4597), 'megengine.tensor', 'tensor', (['[4.0]'], {'dtype': 'np.float32'}), '([4.0], dtype=np.float32)\n', (4572, 4597), False, 'from megengine import tensor\n'), ((1141, 1162), 'numpy.round', 'np.round', (['inp_clipped'], {}), '(inp_clipped)\n', (1149, 1162), True, 'import numpy as np\n'), ((1573, 1594), 'numpy.abs', 'np.abs', (['(mask_clip - 1)'], {}), '(mask_clip - 1)\n', (1579, 1594), True, 'import numpy as np\n'), ((2263, 2280), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (2277, 2280), True, 'import numpy as np\n'), ((3569, 3632), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(-512.0)', 'high': '(512.0)', 'size': '(1, 32, 32, 32)'}), '(low=-512.0, high=512.0, size=(1, 32, 32, 32))\n', (3586, 3632), True, 'import numpy as np\n'), ((3647, 3681), 'megengine.tensor', 'tensor', (['inp_data'], {'dtype': 'np.float32'}), '(inp_data, dtype=np.float32)\n', (3653, 3681), False, 'from megengine import tensor\n'), ((3867, 3891), 'numpy.allclose', 'np.allclose', (['oup', 'oup_gt'], {}), '(oup, oup_gt)\n', (3878, 3891), True, 'import numpy as np\n'), ((3970, 4004), 'megengine.tensor', 'tensor', (['inp_data'], {'dtype': 'np.float32'}), '(inp_data, dtype=np.float32)\n', (3976, 4004), False, 'from megengine import tensor\n'), ((4068, 4108), 'megengine.quantization.utils.fake_quant_tensor', 'fake_quant_tensor', (['x', 'qmin', 'qmax', 'q_dict'], {}), '(x, qmin, qmax, q_dict)\n', (4085, 4108), False, 'from megengine.quantization.utils import QuantMode, fake_quant_tensor, tqt_forward\n'), ((4163, 4197), 'megengine.tensor', 'tensor', (['inp_data'], {'dtype': 'np.float32'}), '(inp_data, dtype=np.float32)\n', (4169, 4197), False, 'from megengine import tensor\n'), ((1052, 1091), 'numpy.minimum', 'np.minimum', (['inp_scaled', 'self.upperbound'], {}), '(inp_scaled, self.upperbound)\n', (1062, 1091), True, 'import numpy as np\n'), ((2051, 2060), 'numpy.log', 'np.log', (['(2)'], {}), '(2)\n', (2057, 2060), True, 'import numpy as np\n'), ((2567, 2573), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (2571, 2573), False, 'from megengine.core.autodiff.grad import Function, Grad\n'), ((4438, 4468), 'megengine.core.tensor.utils.make_shape_tuple', 'make_shape_tuple', (['x.grad.shape'], {}), '(x.grad.shape)\n', (4454, 4468), False, 'from megengine.core.tensor.utils import make_shape_tuple\n'), ((4472, 4503), 'megengine.core.tensor.utils.make_shape_tuple', 'make_shape_tuple', (['x1.grad.shape'], {}), '(x1.grad.shape)\n', (4488, 4503), False, 'from megengine.core.tensor.utils import make_shape_tuple\n'), ((4656, 4678), 'numpy.ones', 'np.ones', (['(1, 32, 1, 1)'], {}), '((1, 32, 1, 1))\n', (4663, 4678), True, 'import numpy as np\n'), ((4723, 4745), 'numpy.ones', 'np.ones', (['(1, 32, 1, 1)'], {}), '((1, 32, 1, 1))\n', (4730, 4745), True, 'import numpy as np\n'), ((3719, 3761), 'megengine.quantization.utils.fake_quant_tensor', 'fake_quant_tensor', (['inp', 'qmin', 'qmax', 'q_dict'], {}), '(inp, qmin, qmax, q_dict)\n', (3736, 3761), False, 'from megengine.quantization.utils import QuantMode, fake_quant_tensor, tqt_forward\n'), ((4020, 4026), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (4024, 4026), False, 'from megengine.core.autodiff.grad import Function, Grad\n'), ((4213, 4219), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (4217, 4219), False, 'from megengine.core.autodiff.grad import Function, Grad\n')]
|
from datetime import datetime
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class Disease(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class DiseaseGroup(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class DiseaseGroupMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
disease_group_id: int
disease_id: int
class PatientDisease(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_id: int
doctor_id: Optional[int] = None
cleft: bool
craniofacial: bool
syndronic: bool
non: bool
comorbidity: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class PatientDiseaseList(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_disease_id: int
disease_id: int
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class PatientDiseaseCleft(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_disease_id: int
cleft_type: str
cleft_lateral: str
cleft_side: str
cleft_complete: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class PatientDiseaseCraniofacial(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_disease_id: int
micrognathia_detail: str
craniofacial_cleft_right: int
craniofacial_cleft_medial: int
craniofacial_cleft_left: int
feem_nasofrontal: bool
feem_nasofrontal_side: str
feem_nasoethmoidal: bool
feem_nasoethmoidal_side: str
feem_mix: bool
feem_mix_side: str
feem_mix_detail: str
craniofacial_microsomia_side: str
craniofacial_microsomia_detail: str
microtia_side: str
microtia_detail: str
craniosynostosis_detail: str
frontonasal_dysplasia_detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class PatientDiseaseOther(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_disease_id: int
disease_group_id: int
name: str
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
|
[
"sqlmodel.Field"
] |
[((136, 147), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (145, 147), False, 'from fastapi import APIRouter\n'), ((211, 248), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (216, 248), False, 'from sqlmodel import Field, SQLModel\n'), ((331, 368), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (336, 368), False, 'from sqlmodel import Field, SQLModel\n'), ((454, 491), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (459, 491), False, 'from sqlmodel import Field, SQLModel\n'), ((608, 645), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (613, 645), False, 'from sqlmodel import Field, SQLModel\n'), ((978, 1015), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (983, 1015), False, 'from sqlmodel import Field, SQLModel\n'), ((1262, 1299), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1267, 1299), False, 'from sqlmodel import Field, SQLModel\n'), ((1604, 1641), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1609, 1641), False, 'from sqlmodel import Field, SQLModel\n'), ((2367, 2404), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2372, 2404), False, 'from sqlmodel import Field, SQLModel\n')]
|
import os
import pathlib
from datetime import datetime, date, time
from decimal import Decimal
from typing import Optional, List
from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile
from fastapi.encoders import jsonable_encoder
from sqlmodel import Field, SQLModel
from ..db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
cert_path = '/uploads/user/{user_id}/cert/'
avatar_path = '/uploads/user/{user_id}/avatar/'
class User(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
state: str # Statue: pending, active, inactive
id_type_id: int
id_number: str = None
email = str
email_verified_at: Optional[datetime] = None
password: str
remember_token: str
hospital_id: Optional[int] = None
hospital_node_id: Optional[int] = None
discipline_id: int
first_name_thai: str
last_name_thai: str
first_name_english: str
last_name_english: str
nickname: str
birth_date: date
gender: str
academic_degree: str
is_thai_address: bool
address_house_number: str
address_moo: str
address_soi: str
address_road: str
address_tambon_id: Optional[int] = None
address_amphoe_id: Optional[int] = None
address_province_id: Optional[int] = None
address_other: str
latitude: Decimal
longitude: Decimal
potential: str
avatar_path: str
document_path: str
policy_accept: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class UserPhone(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
number: str
detail: str
receive_sms: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class UserFeedback(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
feedback_type_id: Optional[int] = None
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class UserNotification(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
name: str
detail: str
is_read: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class UserRole(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
role_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class UserSource(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
created_at: datetime
created_by: int
class UserLog(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
class Doctor(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
hospital_id: int
discipline_id: int
prefix: str
first_name: str
last_name: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class DoctorProcedureMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
doctor_id: int
procedure_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class Discipline(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
discipline_group_id: int
name: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class DisciplineGroup(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class Role(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class RoleModuleFunctionMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
role_id: int
module_function_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/user", response_model=User)
async def create_user(user: User, session: AsyncSession = Depends(get_session)):
# Add user
session.add(user)
await session.commit()
await session.refresh(user)
return user
@router.get("/user", response_model=List[User])
async def get_users(session: AsyncSession = Depends(get_session)):
# Select all users
statement = select(User)
result = await session.execute(statement)
users = result.scalars().all()
return users
@router.post("/user/check_id/{id_type_id}/{id_number}", response_model=bool)
async def check_id_available(id_type_id: int, id_number: str, session: AsyncSession = Depends(get_session)):
# Check id existence
users = await session.execute(select(User).where(User.id_type_id == id_type_id).where(User.id_number == id_number))
if users is None:
return True
return False
@router.get("/user/pending_num", response_model=int)
async def get_pending_user_num(session: AsyncSession = Depends(get_session)):
# Get user with pending status
users = await session.execute(select(User).where(User.state == "pending"))
return len(users.scalars().all())
@router.get("/user/{user_id}", response_model=User)
async def get_user(user_id: int, session: AsyncSession = Depends(get_session)):
# Get user with id
users = await session.execute(select(User).where(User.id == user_id))
user = users.scalars().first()
return user
@router.put("/user/{user_id}", response_model=User)
async def update_user(user_id: int, user: User, session: AsyncSession = Depends(get_session)):
# Get user with id
statement = select(User).where(User.id == user_id)
users = await session.execute(statement)
# Update detail
user_old = users.one()
model = User(**user_old)
update_data = user.dict(exclude_unset=True)
updated_user = model.copy(update=update_data)
user_old = jsonable_encoder(updated_user)
# Commit to database
await session.commit()
return user_old
@router.delete("/user/{user_id}")
async def delete_user(user_id: int, session: AsyncSession = Depends(get_session)):
# Check user existence
statement = select(User).where(User.id == user_id)
user = await session.execute(statement)
# Not found error
if not user:
raise HTTPException(status_code=404, detail='User not found')
user = user.scalars().one()
# Delete
await session.delete(user)
await session.commit()
return status.HTTP_200_OK
@router.post("/user/{user_id}/document")
async def upload_document(user_id: int, document: UploadFile = File(...), session: AsyncSession = Depends(get_session)):
# File name
file_dir = os.getcwd() + cert_path.format(user_id=user_id)
file_path = file_dir + document.filename
# Make directory if not exist
try:
if not os.path.exists(file_dir):
pathlib.Path(file_dir).mkdir(parents=True, exist_ok=True)
except Exception as e:
print(e)
# Write file
with open(file_path, 'wb') as f:
f.write(document.file.read())
f.close()
# Update user document path
statement = select(User).where(User.id == user_id)
users = await session.execute(statement)
user_old = users.scalars().one()
user_old.document_path = file_path
await session.commit()
return {'document_path': file_path}
@router.post("/user/{user_id}/accept")
async def accept_policy(session: AsyncSession = Depends(get_session)):
return None
@router.post("/user/{user_id}/reset")
async def reset_password(session: AsyncSession = Depends(get_session)):
return None
@router.post("/user/{user_id}/avatar")
async def upload_avatar(session: AsyncSession = Depends(get_session)):
return None
@router.delete("/user/{user_id}/avatar")
async def delete_avatar(session: AsyncSession = Depends(get_session)):
return None
@router.post("/user/{user_id}/role")
async def set_role(session: AsyncSession = Depends(get_session)):
return None
@router.post("/user/feedback")
async def create_user_feedback(session: AsyncSession = Depends(get_session)):
return None
@router.get("/user/feedback")
async def get_user_feedbacks(session: AsyncSession = Depends(get_session)):
return None
@router.get("/user/feedback/{feedback_id}")
async def get_user_feedback(session: AsyncSession = Depends(get_session)):
return None
@router.put("/user/feedback/{feedback_id}")
async def update_user_feedback(session: AsyncSession = Depends(get_session)):
return None
@router.post("/user/doctor")
async def create_doctor(session: AsyncSession = Depends(get_session)):
return None
@router.get("/user/doctor")
async def get_doctors(session: AsyncSession = Depends(get_session)):
return None
@router.get("/user/doctor/{doctor_id}")
async def get_doctor(session: AsyncSession = Depends(get_session)):
return None
@router.put("/user/doctor/{doctor_id}")
async def update_doctor(session: AsyncSession = Depends(get_session)):
return None
@router.delete("/user/doctor/{doctor_id}")
async def delete_doctor(session: AsyncSession = Depends(get_session)):
return None
|
[
"sqlmodel.Field"
] |
[((412, 423), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (421, 423), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((577, 614), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (582, 614), False, 'from sqlmodel import Field, SQLModel\n'), ((1689, 1726), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1694, 1726), False, 'from sqlmodel import Field, SQLModel\n'), ((1973, 2010), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1978, 2010), False, 'from sqlmodel import Field, SQLModel\n'), ((2266, 2303), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2271, 2303), False, 'from sqlmodel import Field, SQLModel\n'), ((2540, 2577), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2545, 2577), False, 'from sqlmodel import Field, SQLModel\n'), ((2785, 2822), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2790, 2822), False, 'from sqlmodel import Field, SQLModel\n'), ((2945, 2982), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2950, 2982), False, 'from sqlmodel import Field, SQLModel\n'), ((3062, 3099), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (3067, 3099), False, 'from sqlmodel import Field, SQLModel\n'), ((3397, 3434), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (3402, 3434), False, 'from sqlmodel import Field, SQLModel\n'), ((3649, 3686), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (3654, 3686), False, 'from sqlmodel import Field, SQLModel\n'), ((3908, 3945), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (3913, 3945), False, 'from sqlmodel import Field, SQLModel\n'), ((4127, 4164), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (4132, 4164), False, 'from sqlmodel import Field, SQLModel\n'), ((4363, 4400), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (4368, 4400), False, 'from sqlmodel import Field, SQLModel\n'), ((4656, 4676), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (4663, 4676), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((4886, 4906), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (4893, 4906), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((4948, 4960), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (4954, 4960), False, 'from sqlalchemy import select\n'), ((5224, 5244), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (5231, 5244), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((5561, 5581), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (5568, 5581), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((5847, 5867), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (5854, 5867), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((6144, 6164), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (6151, 6164), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((6480, 6510), 'fastapi.encoders.jsonable_encoder', 'jsonable_encoder', (['updated_user'], {}), '(updated_user)\n', (6496, 6510), False, 'from fastapi.encoders import jsonable_encoder\n'), ((6681, 6701), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (6688, 6701), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((7182, 7191), 'fastapi.File', 'File', (['...'], {}), '(...)\n', (7186, 7191), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((7217, 7237), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (7224, 7237), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8042, 8062), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8049, 8062), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8170, 8190), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8177, 8190), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8298, 8318), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8305, 8318), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8428, 8448), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8435, 8448), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8549, 8569), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8556, 8569), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8676, 8696), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8683, 8696), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8800, 8820), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8807, 8820), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8937, 8957), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8944, 8957), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9077, 9097), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9084, 9097), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9195, 9215), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9202, 9215), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9310, 9330), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9317, 9330), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9436, 9456), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9443, 9456), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9565, 9585), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9572, 9585), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9697, 9717), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9704, 9717), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((6884, 6939), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""User not found"""'}), "(status_code=404, detail='User not found')\n", (6897, 6939), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((7272, 7283), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (7281, 7283), False, 'import os\n'), ((6206, 6218), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (6212, 6218), False, 'from sqlalchemy import select\n'), ((6747, 6759), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (6753, 6759), False, 'from sqlalchemy import select\n'), ((7424, 7448), 'os.path.exists', 'os.path.exists', (['file_dir'], {}), '(file_dir)\n', (7438, 7448), False, 'import os\n'), ((7724, 7736), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (7730, 7736), False, 'from sqlalchemy import select\n'), ((5653, 5665), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (5659, 5665), False, 'from sqlalchemy import select\n'), ((5927, 5939), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (5933, 5939), False, 'from sqlalchemy import select\n'), ((7462, 7484), 'pathlib.Path', 'pathlib.Path', (['file_dir'], {}), '(file_dir)\n', (7474, 7484), False, 'import pathlib\n'), ((5306, 5318), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (5312, 5318), False, 'from sqlalchemy import select\n')]
|
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import platform
import weakref
import numpy as np
import pytest
import megengine as mge
import megengine.distributed as dist
import megengine.functional as F
import megengine.module as M
import megengine.optimizer as optim
from megengine.autodiff import GradManager
from megengine.distributed.helper import get_device_count_by_fork
from megengine.jit import trace
def test_basic():
x = mge.tensor([1.0, 3.0, 5.0]).reshape(1, 3)
w = mge.tensor([2.0, 4.0, 6.0]).reshape(3, 1)
b = mge.tensor(-1.0)
gm = GradManager().attach([w, b])
gm.record()
p = F.matmul(x, w)
y = p + b
gm.backward(y)
gm.release() # is not necessary
np.testing.assert_equal(w.grad.numpy(), [[1], [3], [5]])
np.testing.assert_equal(b.grad.numpy(), [1])
w.grad = None
b.grad = None
with gm:
p = F.matmul(x, w)
y = p + b
gm.backward(y)
np.testing.assert_equal(w.grad.numpy(), [[1], [3], [5]])
np.testing.assert_equal(b.grad.numpy(), [1])
def test_attach_in_with_block():
a = mge.Parameter([1.0])
gm = GradManager()
with gm:
b = a * 3
gm.attach(b)
c = b + 1
gm.backward(c)
assert int(b.grad.numpy()) == 1
def test_attach_temporary():
w = mge.Parameter(2.0)
gm = GradManager()
gm.attach(w)
def cb(x, g):
assert x is ref()
cb.called = True
for i in range(3):
with gm:
cb.called = False
x = mge.Tensor(i, dtype="float32")
gm.attach(x, callbacks=cb)
ref = weakref.ref(x)
y = x * w
gm.backward(y)
assert cb.called
del x
assert ref() is None
# NOTE: does not guarantee timely release when recording
# for i in range(3):
# with gm:
# x = mge.Tensor(i, dtype='float32')
# gm.attach(x)
# ref = weakref.ref(x)
# y = x * w
# del x
# assert ref() is None
# gm.backward(y)
def test_no_dependency():
x = mge.tensor(3)
w = mge.Parameter(1.0)
w_no_dep = mge.Parameter(1.0)
gm = GradManager()
gm.attach(w)
gm.attach(w_no_dep)
with gm:
out1 = x * w
out2 = w_no_dep * out1
gm.backward(out1.sum())
assert w.grad is not None
assert w_no_dep.grad is None
def test_regression_1762():
x = F.ones((10, 10, 3, 3))
conv = M.Conv2d(10, 10, kernel_size=3, padding=1)
t_shape = (1, 10, 1, 1)
weight = mge.Parameter(np.ones(t_shape, dtype=np.float32))
bias = mge.Parameter(np.zeros(t_shape, dtype=np.float32))
gm = GradManager()
gm.attach(list(conv.parameters()) + [weight, bias])
with gm:
out1 = conv(x)
out2 = F.batch_norm(out1, None, None, weight, bias, training=True,)
# Weird error only occur when this action is placed after BN
# Op type is not relevant
loss = out1 + 1
gm.backward(loss)
@pytest.mark.skipif(
platform.system() == "Darwin", reason="do not imp GPU mode at macos now"
)
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
@pytest.mark.skipif(get_device_count_by_fork("gpu") < 2, reason="need more gpu device")
@pytest.mark.isolated_distributed
def test_remote_grad():
@dist.launcher
def worker():
rank = dist.get_rank()
size = dist.get_world_size()
x = mge.tensor(np.random.randn(1, rank * 2 + 2), dtype=np.float32)
m = M.Linear(rank * 2 + 2, rank * 2 + 4)
gm = GradManager().attach(m.parameters())
opt = optim.SGD(m.parameters(), 1e-3, momentum=0.9)
def train_func(x):
with gm:
if rank != 0:
x = dist.functional.remote_recv(
rank - 1, shape=(1, rank * 2 + 2), dtype=np.float32
)
y = m(x)
if rank != size - 1:
dist.functional.remote_send(y, dest_rank=rank + 1)
gm.backward()
else:
y = y.mean()
gm.backward(y)
opt.step().clear_grad()
train_funcs = [
train_func,
trace(symbolic=False)(train_func),
trace(symbolic=True)(train_func),
]
for func in train_funcs:
for i in range(3):
func(x)
worker()
|
[
"megengine.distributed.functional.remote_recv",
"megengine.functional.ones",
"megengine.module.Conv2d",
"megengine.distributed.get_rank",
"megengine.functional.batch_norm",
"megengine.distributed.get_world_size",
"megengine.Parameter",
"megengine.jit.trace",
"megengine.functional.matmul",
"megengine.tensor",
"megengine.distributed.functional.remote_send",
"megengine.distributed.helper.get_device_count_by_fork",
"megengine.module.Linear",
"megengine.autodiff.GradManager",
"megengine.Tensor"
] |
[((847, 863), 'megengine.tensor', 'mge.tensor', (['(-1.0)'], {}), '(-1.0)\n', (857, 863), True, 'import megengine as mge\n'), ((928, 942), 'megengine.functional.matmul', 'F.matmul', (['x', 'w'], {}), '(x, w)\n', (936, 942), True, 'import megengine.functional as F\n'), ((1396, 1416), 'megengine.Parameter', 'mge.Parameter', (['[1.0]'], {}), '([1.0])\n', (1409, 1416), True, 'import megengine as mge\n'), ((1426, 1439), 'megengine.autodiff.GradManager', 'GradManager', ([], {}), '()\n', (1437, 1439), False, 'from megengine.autodiff import GradManager\n'), ((1608, 1626), 'megengine.Parameter', 'mge.Parameter', (['(2.0)'], {}), '(2.0)\n', (1621, 1626), True, 'import megengine as mge\n'), ((1636, 1649), 'megengine.autodiff.GradManager', 'GradManager', ([], {}), '()\n', (1647, 1649), False, 'from megengine.autodiff import GradManager\n'), ((2409, 2422), 'megengine.tensor', 'mge.tensor', (['(3)'], {}), '(3)\n', (2419, 2422), True, 'import megengine as mge\n'), ((2432, 2450), 'megengine.Parameter', 'mge.Parameter', (['(1.0)'], {}), '(1.0)\n', (2445, 2450), True, 'import megengine as mge\n'), ((2466, 2484), 'megengine.Parameter', 'mge.Parameter', (['(1.0)'], {}), '(1.0)\n', (2479, 2484), True, 'import megengine as mge\n'), ((2494, 2507), 'megengine.autodiff.GradManager', 'GradManager', ([], {}), '()\n', (2505, 2507), False, 'from megengine.autodiff import GradManager\n'), ((2749, 2771), 'megengine.functional.ones', 'F.ones', (['(10, 10, 3, 3)'], {}), '((10, 10, 3, 3))\n', (2755, 2771), True, 'import megengine.functional as F\n'), ((2784, 2826), 'megengine.module.Conv2d', 'M.Conv2d', (['(10)', '(10)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(10, 10, kernel_size=3, padding=1)\n', (2792, 2826), True, 'import megengine.module as M\n'), ((2991, 3004), 'megengine.autodiff.GradManager', 'GradManager', ([], {}), '()\n', (3002, 3004), False, 'from megengine.autodiff import GradManager\n'), ((1186, 1200), 'megengine.functional.matmul', 'F.matmul', (['x', 'w'], {}), '(x, w)\n', (1194, 1200), True, 'import megengine.functional as F\n'), ((2883, 2917), 'numpy.ones', 'np.ones', (['t_shape'], {'dtype': 'np.float32'}), '(t_shape, dtype=np.float32)\n', (2890, 2917), True, 'import numpy as np\n'), ((2944, 2979), 'numpy.zeros', 'np.zeros', (['t_shape'], {'dtype': 'np.float32'}), '(t_shape, dtype=np.float32)\n', (2952, 2979), True, 'import numpy as np\n'), ((3114, 3173), 'megengine.functional.batch_norm', 'F.batch_norm', (['out1', 'None', 'None', 'weight', 'bias'], {'training': '(True)'}), '(out1, None, None, weight, bias, training=True)\n', (3126, 3173), True, 'import megengine.functional as F\n'), ((3731, 3746), 'megengine.distributed.get_rank', 'dist.get_rank', ([], {}), '()\n', (3744, 3746), True, 'import megengine.distributed as dist\n'), ((3762, 3783), 'megengine.distributed.get_world_size', 'dist.get_world_size', ([], {}), '()\n', (3781, 3783), True, 'import megengine.distributed as dist\n'), ((3871, 3907), 'megengine.module.Linear', 'M.Linear', (['(rank * 2 + 2)', '(rank * 2 + 4)'], {}), '(rank * 2 + 2, rank * 2 + 4)\n', (3879, 3907), True, 'import megengine.module as M\n'), ((3356, 3373), 'platform.system', 'platform.system', ([], {}), '()\n', (3371, 3373), False, 'import platform\n'), ((3456, 3473), 'platform.system', 'platform.system', ([], {}), '()\n', (3471, 3473), False, 'import platform\n'), ((3553, 3584), 'megengine.distributed.helper.get_device_count_by_fork', 'get_device_count_by_fork', (['"""gpu"""'], {}), "('gpu')\n", (3577, 3584), False, 'from megengine.distributed.helper import get_device_count_by_fork\n'), ((747, 774), 'megengine.tensor', 'mge.tensor', (['[1.0, 3.0, 5.0]'], {}), '([1.0, 3.0, 5.0])\n', (757, 774), True, 'import megengine as mge\n'), ((797, 824), 'megengine.tensor', 'mge.tensor', (['[2.0, 4.0, 6.0]'], {}), '([2.0, 4.0, 6.0])\n', (807, 824), True, 'import megengine as mge\n'), ((874, 887), 'megengine.autodiff.GradManager', 'GradManager', ([], {}), '()\n', (885, 887), False, 'from megengine.autodiff import GradManager\n'), ((1824, 1854), 'megengine.Tensor', 'mge.Tensor', (['i'], {'dtype': '"""float32"""'}), "(i, dtype='float32')\n", (1834, 1854), True, 'import megengine as mge\n'), ((1912, 1926), 'weakref.ref', 'weakref.ref', (['x'], {}), '(x)\n', (1923, 1926), False, 'import weakref\n'), ((3807, 3839), 'numpy.random.randn', 'np.random.randn', (['(1)', '(rank * 2 + 2)'], {}), '(1, rank * 2 + 2)\n', (3822, 3839), True, 'import numpy as np\n'), ((3921, 3934), 'megengine.autodiff.GradManager', 'GradManager', ([], {}), '()\n', (3932, 3934), False, 'from megengine.autodiff import GradManager\n'), ((4606, 4627), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(False)'}), '(symbolic=False)\n', (4611, 4627), False, 'from megengine.jit import trace\n'), ((4653, 4673), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)'}), '(symbolic=True)\n', (4658, 4673), False, 'from megengine.jit import trace\n'), ((4121, 4206), 'megengine.distributed.functional.remote_recv', 'dist.functional.remote_recv', (['(rank - 1)'], {'shape': '(1, rank * 2 + 2)', 'dtype': 'np.float32'}), '(rank - 1, shape=(1, rank * 2 + 2), dtype=np.float32\n )\n', (4148, 4206), True, 'import megengine.distributed as dist\n'), ((4330, 4380), 'megengine.distributed.functional.remote_send', 'dist.functional.remote_send', (['y'], {'dest_rank': '(rank + 1)'}), '(y, dest_rank=rank + 1)\n', (4357, 4380), True, 'import megengine.distributed as dist\n')]
|
# Copyright (c) 2020 <NAME>
# This code is licensed under MIT license
# (https://github.com/kwotsin/mimicry/blob/master/LICENSE)
# ------------------------------------------------------------------------------
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# This file has been modified by Megvii ("Megvii Modifications").
# All Megvii Modifications are Copyright (C) 2014-2019 Megvii Inc. All rights reserved.
# ------------------------------------------------------------------------------
"""
Implementation of Base GAN models.
"""
import megengine
import megengine.functional as F
import megengine.module as M
import megengine.random as R
import numpy as np
from . import losses
from .basemodel import BaseModel
class BaseGenerator(BaseModel):
r"""
Base class for a generic unconditional generator model.
Attributes:
nz (int): Noise dimension for upsampling.
ngf (int): Variable controlling generator feature map sizes.
bottom_width (int): Starting width for upsampling generator output to an image.
loss_type (str): Name of loss to use for GAN loss.
"""
def __init__(self, nz, ngf, bottom_width, loss_type, **kwargs):
super().__init__(**kwargs)
self.nz = nz
self.ngf = ngf
self.bottom_width = bottom_width
self.loss_type = loss_type
def _train_step_implementation(
self,
real_batch,
netD=None,
optG=None):
# Produce fake images
fake_images = self._infer_step_implementation(real_batch)
# Compute output logit of D thinking image real
output = netD(fake_images)
# Compute loss
errG = self.compute_gan_loss(output=output)
optG.zero_grad()
optG.backward(errG)
optG.step()
return errG
def _infer_step_implementation(self, batch):
# Get only batch size from real batch
batch_size = batch.shape[0]
noise = R.gaussian(shape=[batch_size, self.nz])
fake_images = self.forward(noise)
return fake_images
def compute_gan_loss(self, output):
if self.loss_type == "ns":
errG = losses.ns_loss_gen(output)
elif self.loss_type == "wasserstein":
errG = losses.wasserstein_loss_gen(output)
else:
raise ValueError("Invalid loss_type {} selected.".format(
self.loss_type))
return errG
def generate_images(self, num_images):
"""Generate images of shape [`num_images`, C, H, W].
Depending on the final activation function, pixel values are NOT guarenteed
to be within [0, 1].
"""
return self.infer_step(np.empty(num_images, dtype="float32"))
class BaseDiscriminator(BaseModel):
r"""
Base class for a generic unconditional discriminator model.
Attributes:
ndf (int): Variable controlling discriminator feature map sizes.
loss_type (str): Name of loss to use for GAN loss.
"""
def __init__(self, ndf, loss_type, **kwargs):
super().__init__(**kwargs)
self.ndf = ndf
self.loss_type = loss_type
def _train_step_implementation(
self,
real_batch,
netG=None,
optD=None):
# Produce logits for real images
output_real = self._infer_step_implementation(real_batch)
# Produce fake images
fake_images = netG._infer_step_implementation(real_batch)
fake_images = F.zero_grad(fake_images)
# Produce logits for fake images
output_fake = self._infer_step_implementation(fake_images)
# Compute loss for D
errD = self.compute_gan_loss(output_real=output_real,
output_fake=output_fake)
D_x, D_Gz = self.compute_probs(output_real=output_real,
output_fake=output_fake)
# Backprop and update gradients
optD.zero_grad()
optD.backward(errD)
optD.step()
return errD, D_x, D_Gz
def _infer_step_implementation(self, batch):
return self.forward(batch)
def compute_gan_loss(self, output_real, output_fake):
r"""
Computes GAN loss for discriminator.
Args:
output_real (Tensor): A batch of output logits of shape (N, 1) from real images.
output_fake (Tensor): A batch of output logits of shape (N, 1) from fake images.
Returns:
errD (Tensor): A batch of GAN losses for the discriminator.
"""
# Compute loss for D
if self.loss_type == "gan" or self.loss_type == "ns":
errD = losses.minimax_loss_dis(output_fake=output_fake,
output_real=output_real)
elif self.loss_type == "wasserstein":
errD = losses.wasserstein_loss_dis(output_fake=output_fake,
output_real=output_real)
else:
raise ValueError("Invalid loss_type selected.")
return errD
def compute_probs(self, output_real, output_fake):
r"""
Computes probabilities from real/fake images logits.
Args:
output_real (Tensor): A batch of output logits of shape (N, 1) from real images.
output_fake (Tensor): A batch of output logits of shape (N, 1) from fake images.
Returns:
tuple: Average probabilities of real/fake image considered as real for the batch.
"""
D_x = F.sigmoid(output_real).mean()
D_Gz = F.sigmoid(output_fake).mean()
return D_x, D_Gz
|
[
"megengine.random.gaussian",
"megengine.functional.zero_grad",
"megengine.functional.sigmoid"
] |
[((2259, 2298), 'megengine.random.gaussian', 'R.gaussian', ([], {'shape': '[batch_size, self.nz]'}), '(shape=[batch_size, self.nz])\n', (2269, 2298), True, 'import megengine.random as R\n'), ((3780, 3804), 'megengine.functional.zero_grad', 'F.zero_grad', (['fake_images'], {}), '(fake_images)\n', (3791, 3804), True, 'import megengine.functional as F\n'), ((2994, 3031), 'numpy.empty', 'np.empty', (['num_images'], {'dtype': '"""float32"""'}), "(num_images, dtype='float32')\n", (3002, 3031), True, 'import numpy as np\n'), ((5829, 5851), 'megengine.functional.sigmoid', 'F.sigmoid', (['output_real'], {}), '(output_real)\n', (5838, 5851), True, 'import megengine.functional as F\n'), ((5874, 5896), 'megengine.functional.sigmoid', 'F.sigmoid', (['output_fake'], {}), '(output_fake)\n', (5883, 5896), True, 'import megengine.functional as F\n')]
|
"""add application resume
Revision ID: 07061a7c250f
Revises: 378a9b9a491b
Create Date: 2022-04-26 08:00:47.428979+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "07061a7c250f"
down_revision = "378a9b9a491b"
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
"applications",
sa.Column("resume", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
)
def downgrade():
op.drop_column("applications", "resume")
|
[
"sqlmodel.sql.sqltypes.AutoString"
] |
[((496, 536), 'alembic.op.drop_column', 'op.drop_column', (['"""applications"""', '"""resume"""'], {}), "('applications', 'resume')\n", (510, 536), False, 'from alembic import op\n'), ((415, 449), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (447, 449), False, 'import sqlmodel\n')]
|
from sqlalchemy import inspect
from sqlalchemy.engine.reflection import Inspector
from sqlmodel import create_engine
def test_tutorial001(clear_sqlmodel):
from docs_src.tutorial.connect.create_tables import tutorial001 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
mod.main()
insp: Inspector = inspect(mod.engine)
assert insp.has_table(str(mod.Hero.__tablename__))
assert insp.has_table(str(mod.Team.__tablename__))
|
[
"sqlmodel.create_engine"
] |
[((283, 312), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (296, 312), False, 'from sqlmodel import create_engine\n'), ((317, 327), 'docs_src.tutorial.connect.create_tables.tutorial001.main', 'mod.main', ([], {}), '()\n', (325, 327), True, 'from docs_src.tutorial.connect.create_tables import tutorial001 as mod\n'), ((350, 369), 'sqlalchemy.inspect', 'inspect', (['mod.engine'], {}), '(mod.engine)\n', (357, 369), False, 'from sqlalchemy import inspect\n')]
|
from typing import Optional
from sqlmodel import Field, SQLModel
from datetime import datetime, date
class Rate(SQLModel, table=True):
"""Create an SQLModel for rates"""
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="app_db.appuser.id")
client_id: int = Field(foreign_key="app_db.client.id")
valid_from: date
valid_to: date
amount: float # currency: EUR
created_at: datetime
updated_at: datetime
is_active: bool
__table_args__ = {"schema": "app_db"}
|
[
"sqlmodel.Field"
] |
[((201, 238), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (206, 238), False, 'from sqlmodel import Field, SQLModel\n'), ((258, 296), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.appuser.id"""'}), "(foreign_key='app_db.appuser.id')\n", (263, 296), False, 'from sqlmodel import Field, SQLModel\n'), ((318, 355), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.client.id"""'}), "(foreign_key='app_db.client.id')\n", (323, 355), False, 'from sqlmodel import Field, SQLModel\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2018-2019 Open-MMLab.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ---------------------------------------------------------------------
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# This file has been modified by Megvii ("Megvii Modifications").
# All Megvii Modifications are Copyright (C) 2014-2019 Megvii Inc. All rights reserved.
# ---------------------------------------------------------------------
from abc import ABCMeta, abstractmethod
import megengine.functional as F
import numpy as np
from megengine.core import tensor, Tensor
class BaseAnchorGenerator(metaclass=ABCMeta):
"""base class for anchor generator.
"""
def __init__(self):
pass
@abstractmethod
def get_anchors_by_feature(self) -> Tensor:
pass
class DefaultAnchorGenerator(BaseAnchorGenerator):
"""default retinanet anchor generator.
This class generate anchors by feature map in level.
Args:
base_size (int): anchor base size.
anchor_scales (np.ndarray): anchor scales based on stride.
The practical anchor scale is anchor_scale * stride
anchor_ratios(np.ndarray): anchor aspect ratios.
offset (float): center point offset.default is 0.
"""
def __init__(
self,
base_size=8,
anchor_scales: np.ndarray = np.array([2, 3, 4]),
anchor_ratios: np.ndarray = np.array([0.5, 1, 2]),
offset: float = 0,
):
super().__init__()
self.base_size = base_size
self.anchor_scales = anchor_scales
self.anchor_ratios = anchor_ratios
self.offset = offset
def _whctrs(self, anchor):
"""convert anchor box into (w, h, ctr_x, ctr_y)
"""
w = anchor[:, 2] - anchor[:, 0] + 1
h = anchor[:, 3] - anchor[:, 1] + 1
x_ctr = anchor[:, 0] + 0.5 * (w - 1)
y_ctr = anchor[:, 1] + 0.5 * (h - 1)
return w, h, x_ctr, y_ctr
def get_plane_anchors(self, anchor_scales: np.ndarray):
"""get anchors per location on feature map.
The anchor number is anchor_scales x anchor_ratios
"""
base_anchor = tensor([0, 0, self.base_size - 1, self.base_size - 1])
base_anchor = F.add_axis(base_anchor, 0)
w, h, x_ctr, y_ctr = self._whctrs(base_anchor)
# ratio enumerate
size = w * h
size_ratios = size / self.anchor_ratios
ws = size_ratios.sqrt().round()
hs = (ws * self.anchor_ratios).round()
# scale enumerate
anchor_scales = anchor_scales[None, ...]
ws = F.add_axis(ws, 1)
hs = F.add_axis(hs, 1)
ws = (ws * anchor_scales).reshape(-1, 1)
hs = (hs * anchor_scales).reshape(-1, 1)
anchors = F.concat(
[
x_ctr - 0.5 * (ws - 1),
y_ctr - 0.5 * (hs - 1),
x_ctr + 0.5 * (ws - 1),
y_ctr + 0.5 * (hs - 1),
],
axis=1,
)
return anchors.astype(np.float32)
def get_center_offsets(self, featmap, stride):
f_shp = featmap.shape
fm_height, fm_width = f_shp[-2], f_shp[-1]
shift_x = F.linspace(0, fm_width - 1, fm_width) * stride
shift_y = F.linspace(0, fm_height - 1, fm_height) * stride
# make the mesh grid of shift_x and shift_y
mesh_shape = (fm_height, fm_width)
broad_shift_x = shift_x.reshape(-1, shift_x.shape[0]).broadcast(*mesh_shape)
broad_shift_y = shift_y.reshape(shift_y.shape[0], -1).broadcast(*mesh_shape)
flatten_shift_x = F.add_axis(broad_shift_x.reshape(-1), 1)
flatten_shift_y = F.add_axis(broad_shift_y.reshape(-1), 1)
centers = F.concat(
[flatten_shift_x, flatten_shift_y, flatten_shift_x, flatten_shift_y,],
axis=1,
)
if self.offset > 0:
centers = centers + self.offset * stride
return centers
def get_anchors_by_feature(self, featmap, stride):
# shifts shape: [A, 4]
shifts = self.get_center_offsets(featmap, stride)
# plane_anchors shape: [B, 4], e.g. B=9
plane_anchors = self.get_plane_anchors(self.anchor_scales * stride)
all_anchors = F.add_axis(plane_anchors, 0) + F.add_axis(shifts, 1)
all_anchors = all_anchors.reshape(-1, 4)
return all_anchors
def __call__(self, featmap, stride):
return self.get_anchors_by_feature(featmap, stride)
|
[
"megengine.functional.linspace",
"megengine.core.tensor",
"megengine.functional.add_axis",
"megengine.functional.concat"
] |
[((2161, 2180), 'numpy.array', 'np.array', (['[2, 3, 4]'], {}), '([2, 3, 4])\n', (2169, 2180), True, 'import numpy as np\n'), ((2218, 2239), 'numpy.array', 'np.array', (['[0.5, 1, 2]'], {}), '([0.5, 1, 2])\n', (2226, 2239), True, 'import numpy as np\n'), ((2971, 3025), 'megengine.core.tensor', 'tensor', (['[0, 0, self.base_size - 1, self.base_size - 1]'], {}), '([0, 0, self.base_size - 1, self.base_size - 1])\n', (2977, 3025), False, 'from megengine.core import tensor, Tensor\n'), ((3048, 3074), 'megengine.functional.add_axis', 'F.add_axis', (['base_anchor', '(0)'], {}), '(base_anchor, 0)\n', (3058, 3074), True, 'import megengine.functional as F\n'), ((3402, 3419), 'megengine.functional.add_axis', 'F.add_axis', (['ws', '(1)'], {}), '(ws, 1)\n', (3412, 3419), True, 'import megengine.functional as F\n'), ((3433, 3450), 'megengine.functional.add_axis', 'F.add_axis', (['hs', '(1)'], {}), '(hs, 1)\n', (3443, 3450), True, 'import megengine.functional as F\n'), ((3568, 3686), 'megengine.functional.concat', 'F.concat', (['[x_ctr - 0.5 * (ws - 1), y_ctr - 0.5 * (hs - 1), x_ctr + 0.5 * (ws - 1), \n y_ctr + 0.5 * (hs - 1)]'], {'axis': '(1)'}), '([x_ctr - 0.5 * (ws - 1), y_ctr - 0.5 * (hs - 1), x_ctr + 0.5 * (ws -\n 1), y_ctr + 0.5 * (hs - 1)], axis=1)\n', (3576, 3686), True, 'import megengine.functional as F\n'), ((4526, 4616), 'megengine.functional.concat', 'F.concat', (['[flatten_shift_x, flatten_shift_y, flatten_shift_x, flatten_shift_y]'], {'axis': '(1)'}), '([flatten_shift_x, flatten_shift_y, flatten_shift_x,\n flatten_shift_y], axis=1)\n', (4534, 4616), True, 'import megengine.functional as F\n'), ((3992, 4029), 'megengine.functional.linspace', 'F.linspace', (['(0)', '(fm_width - 1)', 'fm_width'], {}), '(0, fm_width - 1, fm_width)\n', (4002, 4029), True, 'import megengine.functional as F\n'), ((4057, 4096), 'megengine.functional.linspace', 'F.linspace', (['(0)', '(fm_height - 1)', 'fm_height'], {}), '(0, fm_height - 1, fm_height)\n', (4067, 4096), True, 'import megengine.functional as F\n'), ((5045, 5073), 'megengine.functional.add_axis', 'F.add_axis', (['plane_anchors', '(0)'], {}), '(plane_anchors, 0)\n', (5055, 5073), True, 'import megengine.functional as F\n'), ((5076, 5097), 'megengine.functional.add_axis', 'F.add_axis', (['shifts', '(1)'], {}), '(shifts, 1)\n', (5086, 5097), True, 'import megengine.functional as F\n')]
|
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(
op.f("ix_job_applicant_name"), "job_applicant", ["name"], unique=False
)
op.create_table(
"line_of_business",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("webhook_url", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("traction_issue_enabled", sa.Boolean(), nullable=False),
sa.Column("public_did", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("cred_def_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("wallet_key", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"student",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("age", sa.Integer(), nullable=True),
sa.Column("student_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(op.f("ix_student_name"), "student", ["name"], unique=False)
op.create_table(
"out_of_band",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("msg", postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column("msg_type", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sender_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("recipient_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("action", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["recipient_id"],
["line_of_business.id"],
),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.ForeignKeyConstraint(
["sender_id"],
["line_of_business.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("out_of_band")
op.drop_index(op.f("ix_student_name"), table_name="student")
op.drop_table("student")
op.drop_table("line_of_business")
op.drop_index(op.f("ix_job_applicant_name"), table_name="job_applicant")
op.drop_table("job_applicant")
op.drop_table("sandbox")
# ### end Alembic commands ###
|
[
"sqlmodel.sql.sqltypes.GUID",
"sqlmodel.sql.sqltypes.AutoString"
] |
[((7044, 7072), 'alembic.op.drop_table', 'op.drop_table', (['"""out_of_band"""'], {}), "('out_of_band')\n", (7057, 7072), False, 'from alembic import op\n'), ((7142, 7166), 'alembic.op.drop_table', 'op.drop_table', (['"""student"""'], {}), "('student')\n", (7155, 7166), False, 'from alembic import op\n'), ((7171, 7204), 'alembic.op.drop_table', 'op.drop_table', (['"""line_of_business"""'], {}), "('line_of_business')\n", (7184, 7204), False, 'from alembic import op\n'), ((7286, 7316), 'alembic.op.drop_table', 'op.drop_table', (['"""job_applicant"""'], {}), "('job_applicant')\n", (7299, 7316), False, 'from alembic import op\n'), ((7321, 7345), 'alembic.op.drop_table', 'op.drop_table', (['"""sandbox"""'], {}), "('sandbox')\n", (7334, 7345), False, 'from alembic import op\n'), ((1040, 1069), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1063, 1069), True, 'import sqlalchemy as sa\n'), ((2281, 2336), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (2304, 2336), True, 'import sqlalchemy as sa\n'), ((2381, 2410), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2404, 2410), True, 'import sqlalchemy as sa\n'), ((2420, 2461), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""', '"""sandbox_id"""'], {}), "('name', 'sandbox_id')\n", (2439, 2461), True, 'import sqlalchemy as sa\n'), ((2498, 2527), 'alembic.op.f', 'op.f', (['"""ix_job_applicant_name"""'], {}), "('ix_job_applicant_name')\n", (2502, 2527), False, 'from alembic import op\n'), ((3782, 3837), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (3805, 3837), True, 'import sqlalchemy as sa\n'), ((3882, 3911), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (3905, 3911), True, 'import sqlalchemy as sa\n'), ((5256, 5311), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (5279, 5311), True, 'import sqlalchemy as sa\n'), ((5356, 5385), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (5379, 5385), True, 'import sqlalchemy as sa\n'), ((5395, 5436), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""', '"""sandbox_id"""'], {}), "('name', 'sandbox_id')\n", (5414, 5436), True, 'import sqlalchemy as sa\n'), ((5464, 5487), 'alembic.op.f', 'op.f', (['"""ix_student_name"""'], {}), "('ix_student_name')\n", (5468, 5487), False, 'from alembic import op\n'), ((6564, 6630), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['recipient_id']", "['line_of_business.id']"], {}), "(['recipient_id'], ['line_of_business.id'])\n", (6587, 6630), True, 'import sqlalchemy as sa\n'), ((6675, 6730), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (6698, 6730), True, 'import sqlalchemy as sa\n'), ((6775, 6838), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sender_id']", "['line_of_business.id']"], {}), "(['sender_id'], ['line_of_business.id'])\n", (6798, 6838), True, 'import sqlalchemy as sa\n'), ((6883, 6912), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (6906, 6912), True, 'import sqlalchemy as sa\n'), ((7091, 7114), 'alembic.op.f', 'op.f', (['"""ix_student_name"""'], {}), "('ix_student_name')\n", (7095, 7114), False, 'from alembic import op\n'), ((7223, 7252), 'alembic.op.f', 'op.f', (['"""ix_job_applicant_name"""'], {}), "('ix_job_applicant_name')\n", (7227, 7252), False, 'from alembic import op\n'), ((498, 527), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (513, 527), False, 'from sqlalchemy.dialects import postgresql\n'), ((682, 704), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (702, 704), False, 'from sqlalchemy.dialects import postgresql\n'), ((847, 869), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (867, 869), False, 'from sqlalchemy.dialects import postgresql\n'), ((980, 1014), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1012, 1014), False, 'import sqlmodel\n'), ((1172, 1201), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (1187, 1201), False, 'from sqlalchemy.dialects import postgresql\n'), ((1356, 1378), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (1376, 1378), False, 'from sqlalchemy.dialects import postgresql\n'), ((1521, 1543), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (1541, 1543), False, 'from sqlalchemy.dialects import postgresql\n'), ((1655, 1689), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1687, 1689), False, 'import sqlmodel\n'), ((1740, 1768), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1766, 1768), False, 'import sqlmodel\n'), ((1815, 1849), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1847, 1849), False, 'import sqlmodel\n'), ((1893, 1906), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1904, 1906), True, 'import sqlalchemy as sa\n'), ((1975, 2009), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2007, 2009), False, 'import sqlmodel\n'), ((2071, 2099), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2097, 2099), False, 'import sqlmodel\n'), ((2148, 2176), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2174, 2176), False, 'import sqlmodel\n'), ((2221, 2255), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2253, 2255), False, 'import sqlmodel\n'), ((2673, 2702), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (2688, 2702), False, 'from sqlalchemy.dialects import postgresql\n'), ((2857, 2879), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (2877, 2879), False, 'from sqlalchemy.dialects import postgresql\n'), ((3022, 3044), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (3042, 3044), False, 'from sqlalchemy.dialects import postgresql\n'), ((3156, 3190), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3188, 3190), False, 'import sqlmodel\n'), ((3242, 3276), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3274, 3276), False, 'import sqlmodel\n'), ((3326, 3354), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (3352, 3354), False, 'import sqlmodel\n'), ((3417, 3429), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3427, 3429), True, 'import sqlalchemy as sa\n'), ((3480, 3514), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3512, 3514), False, 'import sqlmodel\n'), ((3565, 3599), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3597, 3599), False, 'import sqlmodel\n'), ((3648, 3676), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (3674, 3676), False, 'import sqlmodel\n'), ((3727, 3755), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (3753, 3755), False, 'import sqlmodel\n'), ((4008, 4037), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (4023, 4037), False, 'from sqlalchemy.dialects import postgresql\n'), ((4192, 4214), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (4212, 4214), False, 'from sqlalchemy.dialects import postgresql\n'), ((4357, 4379), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (4377, 4379), False, 'from sqlalchemy.dialects import postgresql\n'), ((4491, 4525), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4523, 4525), False, 'import sqlmodel\n'), ((4576, 4604), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (4602, 4604), False, 'import sqlmodel\n'), ((4651, 4685), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4683, 4685), False, 'import sqlmodel\n'), ((4728, 4740), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (4738, 4740), True, 'import sqlalchemy as sa\n'), ((4790, 4824), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4822, 4824), False, 'import sqlmodel\n'), ((4868, 4881), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (4879, 4881), True, 'import sqlalchemy as sa\n'), ((4950, 4984), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4982, 4984), False, 'import sqlmodel\n'), ((5046, 5074), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (5072, 5074), False, 'import sqlmodel\n'), ((5123, 5151), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (5149, 5151), False, 'import sqlmodel\n'), ((5196, 5230), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5228, 5230), False, 'import sqlmodel\n'), ((5617, 5646), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (5632, 5646), False, 'from sqlalchemy.dialects import postgresql\n'), ((5801, 5823), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (5821, 5823), False, 'from sqlalchemy.dialects import postgresql\n'), ((5966, 5988), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (5986, 5988), False, 'from sqlalchemy.dialects import postgresql\n'), ((6185, 6219), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (6217, 6219), False, 'import sqlmodel\n'), ((6269, 6297), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (6295, 6297), False, 'import sqlmodel\n'), ((6350, 6378), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (6376, 6378), False, 'import sqlmodel\n'), ((6429, 6457), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (6455, 6457), False, 'import sqlmodel\n'), ((6504, 6538), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (6536, 6538), False, 'import sqlmodel\n'), ((556, 584), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (563, 584), True, 'import sqlalchemy as sa\n'), ((733, 749), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (740, 749), True, 'import sqlalchemy as sa\n'), ((898, 914), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (905, 914), True, 'import sqlalchemy as sa\n'), ((1230, 1258), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (1237, 1258), True, 'import sqlalchemy as sa\n'), ((1407, 1423), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (1414, 1423), True, 'import sqlalchemy as sa\n'), ((1572, 1588), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (1579, 1588), True, 'import sqlalchemy as sa\n'), ((2731, 2759), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (2738, 2759), True, 'import sqlalchemy as sa\n'), ((2908, 2924), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (2915, 2924), True, 'import sqlalchemy as sa\n'), ((3073, 3089), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (3080, 3089), True, 'import sqlalchemy as sa\n'), ((4066, 4094), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (4073, 4094), True, 'import sqlalchemy as sa\n'), ((4243, 4259), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (4250, 4259), True, 'import sqlalchemy as sa\n'), ((4408, 4424), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (4415, 4424), True, 'import sqlalchemy as sa\n'), ((5675, 5703), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (5682, 5703), True, 'import sqlalchemy as sa\n'), ((5852, 5868), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (5859, 5868), True, 'import sqlalchemy as sa\n'), ((6017, 6033), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (6024, 6033), True, 'import sqlalchemy as sa\n'), ((6127, 6136), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6134, 6136), True, 'import sqlalchemy as sa\n')]
|
from datetime import datetime
from typing import List, Optional
from pydantic import BaseModel
from pydantic.networks import HttpUrl
from sqlmodel import Field, SQLModel
PageType = str
class Ecoindex(SQLModel):
grade: Optional[str] = Field(
default=None,
title="Ecoindex grade",
description="Is the corresponding ecoindex grade of the page (from A to G)",
)
score: Optional[float] = Field(
default=None,
title="Ecoindex score",
description="Is the corresponding ecoindex score of the page (0 to 100)",
ge=0,
le=100,
)
ges: Optional[float] = Field(
default=None,
title="Ecoindex GES equivalent",
description="Is the equivalent of greenhouse gases emission (in `gCO2e`) of the page",
ge=0,
)
water: Optional[float] = Field(
default=None,
title="Ecoindex Water equivalent",
description="Is the equivalent water consumption (in `cl`) of the page",
ge=0,
)
class Page(BaseModel):
logs: List
outer_html: str
nodes: List
class PageMetrics(SQLModel):
size: float = Field(
default=...,
title="Page size",
description="Is the size of the page and of the downloaded elements of the page in KB",
ge=0,
)
nodes: int = Field(
default=...,
title="Page nodes",
description="Is the number of the DOM elements in the page",
ge=0,
)
requests: int = Field(
default=...,
title="Page requests",
description="Is the number of external requests made by the page",
ge=0,
)
class WindowSize(BaseModel):
height: int = Field(
default=...,
title="Window height",
description="Height of the simulated window in pixel",
)
width: int = Field(
default=...,
title="Window width",
description="Width of the simulated window in pixel",
)
def __str__(self) -> str:
return f"{self.width},{self.height}"
class WebPage(SQLModel):
width: Optional[int] = Field(
default=None,
title="Page Width",
description="Width of the simulated window in pixel",
)
height: Optional[int] = Field(
default=None,
title="Page Height",
description="Height of the simulated window in pixel",
)
url: Optional[HttpUrl] = Field(
default=None, title="Page url", description="Url of the analysed page"
)
class Result(Ecoindex, PageMetrics, WebPage):
date: Optional[datetime] = Field(
default=None, title="Analysis datetime", description="Date of the analysis"
)
page_type: Optional[PageType] = Field(
default=None,
title="Page type",
description="Is the type of the page, based ton the [opengraph type tag](https://ogp.me/#types)",
)
|
[
"sqlmodel.Field"
] |
[((242, 367), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Ecoindex grade"""', 'description': '"""Is the corresponding ecoindex grade of the page (from A to G)"""'}), "(default=None, title='Ecoindex grade', description=\n 'Is the corresponding ecoindex grade of the page (from A to G)')\n", (247, 367), False, 'from sqlmodel import Field, SQLModel\n'), ((423, 559), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Ecoindex score"""', 'description': '"""Is the corresponding ecoindex score of the page (0 to 100)"""', 'ge': '(0)', 'le': '(100)'}), "(default=None, title='Ecoindex score', description=\n 'Is the corresponding ecoindex score of the page (0 to 100)', ge=0, le=100)\n", (428, 559), False, 'from sqlmodel import Field, SQLModel\n'), ((629, 783), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Ecoindex GES equivalent"""', 'description': '"""Is the equivalent of greenhouse gases emission (in `gCO2e`) of the page"""', 'ge': '(0)'}), "(default=None, title='Ecoindex GES equivalent', description=\n 'Is the equivalent of greenhouse gases emission (in `gCO2e`) of the page',\n ge=0)\n", (634, 783), False, 'from sqlmodel import Field, SQLModel\n'), ((843, 981), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Ecoindex Water equivalent"""', 'description': '"""Is the equivalent water consumption (in `cl`) of the page"""', 'ge': '(0)'}), "(default=None, title='Ecoindex Water equivalent', description=\n 'Is the equivalent water consumption (in `cl`) of the page', ge=0)\n", (848, 981), False, 'from sqlmodel import Field, SQLModel\n'), ((1141, 1281), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Page size"""', 'description': '"""Is the size of the page and of the downloaded elements of the page in KB"""', 'ge': '(0)'}), "(default=..., title='Page size', description=\n 'Is the size of the page and of the downloaded elements of the page in KB',\n ge=0)\n", (1146, 1281), False, 'from sqlmodel import Field, SQLModel\n'), ((1329, 1439), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Page nodes"""', 'description': '"""Is the number of the DOM elements in the page"""', 'ge': '(0)'}), "(default=..., title='Page nodes', description=\n 'Is the number of the DOM elements in the page', ge=0)\n", (1334, 1439), False, 'from sqlmodel import Field, SQLModel\n'), ((1494, 1613), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Page requests"""', 'description': '"""Is the number of external requests made by the page"""', 'ge': '(0)'}), "(default=..., title='Page requests', description=\n 'Is the number of external requests made by the page', ge=0)\n", (1499, 1613), False, 'from sqlmodel import Field, SQLModel\n'), ((1697, 1798), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Window height"""', 'description': '"""Height of the simulated window in pixel"""'}), "(default=..., title='Window height', description=\n 'Height of the simulated window in pixel')\n", (1702, 1798), False, 'from sqlmodel import Field, SQLModel\n'), ((1842, 1941), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Window width"""', 'description': '"""Width of the simulated window in pixel"""'}), "(default=..., title='Window width', description=\n 'Width of the simulated window in pixel')\n", (1847, 1941), False, 'from sqlmodel import Field, SQLModel\n'), ((2098, 2196), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Page Width"""', 'description': '"""Width of the simulated window in pixel"""'}), "(default=None, title='Page Width', description=\n 'Width of the simulated window in pixel')\n", (2103, 2196), False, 'from sqlmodel import Field, SQLModel\n'), ((2251, 2351), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Page Height"""', 'description': '"""Height of the simulated window in pixel"""'}), "(default=None, title='Page Height', description=\n 'Height of the simulated window in pixel')\n", (2256, 2351), False, 'from sqlmodel import Field, SQLModel\n'), ((2407, 2484), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Page url"""', 'description': '"""Url of the analysed page"""'}), "(default=None, title='Page url', description='Url of the analysed page')\n", (2412, 2484), False, 'from sqlmodel import Field, SQLModel\n'), ((2578, 2665), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Analysis datetime"""', 'description': '"""Date of the analysis"""'}), "(default=None, title='Analysis datetime', description=\n 'Date of the analysis')\n", (2583, 2665), False, 'from sqlmodel import Field, SQLModel\n'), ((2711, 2857), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Page type"""', 'description': '"""Is the type of the page, based ton the [opengraph type tag](https://ogp.me/#types)"""'}), "(default=None, title='Page type', description=\n 'Is the type of the page, based ton the [opengraph type tag](https://ogp.me/#types)'\n )\n", (2716, 2857), False, 'from sqlmodel import Field, SQLModel\n')]
|
from typing import Optional, List
from sqlmodel import (
SQLModel,
Field,
create_engine,
Relationship
)
engine = create_engine('sqlite:///database.db')
class Person(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
nome: str
idade: int
livros: List['Livro'] = Relationship(back_populates='person')
class Livro(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
titulo: str
pessoa_id: Optional[int] = Field(default=None, foreign_key='person.id')
pessoa: Optional[Person] = Relationship(back_populates='livros')
SQLModel.metadata.create_all(engine)
|
[
"sqlmodel.Relationship",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine",
"sqlmodel.Field"
] |
[((131, 169), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///database.db"""'], {}), "('sqlite:///database.db')\n", (144, 169), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((629, 665), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (657, 665), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((232, 269), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (237, 269), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((328, 365), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""person"""'}), "(back_populates='person')\n", (340, 365), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((427, 464), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (432, 464), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((513, 557), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""person.id"""'}), "(default=None, foreign_key='person.id')\n", (518, 557), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((589, 626), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""livros"""'}), "(back_populates='livros')\n", (601, 626), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n')]
|
from __future__ import annotations
import inspect
from functools import wraps
from typing import Any, Callable, Dict, List, Literal, Type, TypeVar
from fastapi.encoders import jsonable_encoder
from sqlalchemy.orm import Query, noload, raiseload, selectinload, subqueryload
from sqlalchemy.sql.elements import BinaryExpression
from sqlmodel import SQLModel, select
from sqlmodel.ext.asyncio.session import AsyncSession
Self = TypeVar("Self", bound="Base")
LoadStrategy = Literal["subquery", "selectin", "raise", "raise_on_sql", "noload"]
load_strategy_map: Dict[LoadStrategy, Callable[..., Any]] = {
"subquery": subqueryload,
"selectin": selectinload,
"raise": raiseload,
"raise_on_sql": raiseload,
"noload": noload,
}
class InvalidTable(RuntimeError):
"""Raised when calling a method coupled to SQLAlchemy operations.
It should be called only by SQLModel objects that are tables.
"""
def is_table(cls: Type[Self]) -> bool:
base_is_table = False
for base in cls.__bases__:
config = getattr(base, "__config__")
if config and getattr(config, "table", False):
base_is_table = True
break
return getattr(cls.__config__, "table", False) and not base_is_table
def validate_table(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
cls = self if inspect.isclass(self) else self.__class__
if not is_table(cls):
raise InvalidTable(
f'"{cls.__name__}" is not a table. '
"Add the class parameter `table=True` or don't use with this object."
)
return func(self, *args, **kwargs)
return wrapper
def _prepare_query(
cls: Type[Self], load_strategy: Dict[str, LoadStrategy] | None
) -> Query:
load_strategy = load_strategy or {}
query = select(cls)
for key, strategy in load_strategy.items():
query = query.options(load_strategy_map[strategy](key))
return query
class Base(SQLModel):
@classmethod
@validate_table
async def get(
cls: Type[Self],
session: AsyncSession,
*args: BinaryExpression,
load_strategy: Dict[str, LoadStrategy] = None,
**kwargs: Any,
) -> Self:
query = _prepare_query(cls, load_strategy)
result = await session.execute(query.filter(*args).filter_by(**kwargs))
return result.scalars().first()
@classmethod
@validate_table
async def get_multi(
cls: Type[Self],
session: AsyncSession,
*args: BinaryExpression,
load_strategy: Dict[str, LoadStrategy] = None,
offset: int = 0,
limit: int = 100,
**kwargs: Any,
) -> List[Self]:
query = _prepare_query(cls, load_strategy)
result = await session.execute(
query.filter(*args).filter_by(**kwargs).offset(offset).limit(limit)
)
return result.scalars().all()
@classmethod
@validate_table
async def create(cls: Type[Self], session: AsyncSession, **kwargs: Any) -> Self:
db_obj = cls(**kwargs)
session.add(db_obj)
await session.commit()
return db_obj
@validate_table
async def update(self: Self, session: AsyncSession, **kwargs: Any) -> Self:
obj_data = jsonable_encoder(self)
for field in obj_data:
if field in kwargs:
setattr(self, field, kwargs[field])
session.add(self)
await session.commit()
await session.refresh(self)
return self
@classmethod
@validate_table
async def delete(
cls: Type[Self], session: AsyncSession, *args: BinaryExpression, **kwargs: Any
) -> Self:
db_obj = await cls.get(session, *args, **kwargs)
await session.delete(db_obj)
await session.commit()
return db_obj
|
[
"sqlmodel.select"
] |
[((428, 457), 'typing.TypeVar', 'TypeVar', (['"""Self"""'], {'bound': '"""Base"""'}), "('Self', bound='Base')\n", (435, 457), False, 'from typing import Any, Callable, Dict, List, Literal, Type, TypeVar\n'), ((1277, 1288), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (1282, 1288), False, 'from functools import wraps\n'), ((1824, 1835), 'sqlmodel.select', 'select', (['cls'], {}), '(cls)\n', (1830, 1835), False, 'from sqlmodel import SQLModel, select\n'), ((3274, 3296), 'fastapi.encoders.jsonable_encoder', 'jsonable_encoder', (['self'], {}), '(self)\n', (3290, 3296), False, 'from fastapi.encoders import jsonable_encoder\n'), ((1351, 1372), 'inspect.isclass', 'inspect.isclass', (['self'], {}), '(self)\n', (1366, 1372), False, 'import inspect\n')]
|
"""initial3
Revision ID: 01b6c8ce3965
Revises: <PASSWORD>
Create Date: 2021-11-01 04:29:57.210756
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '01b6c8ce3965'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('images', sa.Column('size_x', sa.Float(), nullable=False))
op.add_column('images', sa.Column('size_y', sa.Float(), nullable=False))
op.create_index(op.f('ix_images_size_x'), 'images', ['size_x'], unique=False)
op.create_index(op.f('ix_images_size_y'), 'images', ['size_y'], unique=False)
op.add_column('listings', sa.Column('url', sqlmodel.sql.sqltypes.AutoString(), nullable=False))
op.create_index(op.f('ix_listings_url'), 'listings', ['url'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_listings_url'), table_name='listings')
op.drop_column('listings', 'url')
op.drop_index(op.f('ix_images_size_y'), table_name='images')
op.drop_index(op.f('ix_images_size_x'), table_name='images')
op.drop_column('images', 'size_y')
op.drop_column('images', 'size_x')
# ### end Alembic commands ###
|
[
"sqlmodel.sql.sqltypes.AutoString"
] |
[((1075, 1108), 'alembic.op.drop_column', 'op.drop_column', (['"""listings"""', '"""url"""'], {}), "('listings', 'url')\n", (1089, 1108), False, 'from alembic import op\n'), ((1243, 1277), 'alembic.op.drop_column', 'op.drop_column', (['"""images"""', '"""size_y"""'], {}), "('images', 'size_y')\n", (1257, 1277), False, 'from alembic import op\n'), ((1282, 1316), 'alembic.op.drop_column', 'op.drop_column', (['"""images"""', '"""size_x"""'], {}), "('images', 'size_x')\n", (1296, 1316), False, 'from alembic import op\n'), ((561, 585), 'alembic.op.f', 'op.f', (['"""ix_images_size_x"""'], {}), "('ix_images_size_x')\n", (565, 585), False, 'from alembic import op\n'), ((643, 667), 'alembic.op.f', 'op.f', (['"""ix_images_size_y"""'], {}), "('ix_images_size_y')\n", (647, 667), False, 'from alembic import op\n'), ((825, 848), 'alembic.op.f', 'op.f', (['"""ix_listings_url"""'], {}), "('ix_listings_url')\n", (829, 848), False, 'from alembic import op\n'), ((1023, 1046), 'alembic.op.f', 'op.f', (['"""ix_listings_url"""'], {}), "('ix_listings_url')\n", (1027, 1046), False, 'from alembic import op\n'), ((1127, 1151), 'alembic.op.f', 'op.f', (['"""ix_images_size_y"""'], {}), "('ix_images_size_y')\n", (1131, 1151), False, 'from alembic import op\n'), ((1192, 1216), 'alembic.op.f', 'op.f', (['"""ix_images_size_x"""'], {}), "('ix_images_size_x')\n", (1196, 1216), False, 'from alembic import op\n'), ((435, 445), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (443, 445), True, 'import sqlalchemy as sa\n'), ((512, 522), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (520, 522), True, 'import sqlalchemy as sa\n'), ((752, 786), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (784, 786), False, 'import sqlmodel\n')]
|
import megengine as mge
import megengine.module as nn
import megengine.functional as F
from model.module import Encoder, Fusion, Decoder, Regression
from common import se3, quaternion
import math
class OMNet(nn.Module):
def __init__(self, params):
super(OMNet, self).__init__()
self.num_iter = params.titer
self.encoder = [Encoder() for _ in range(self.num_iter)]
self.fusion = [Fusion() for _ in range(self.num_iter)]
self.decoder = [Decoder() for _ in range(self.num_iter)]
self.regression = [Regression() for _ in range(self.num_iter)]
self.overlap_dist = params.overlap_dist
for m in self.modules():
if isinstance(m, nn.Conv1d) or isinstance(m, nn.Linear):
nn.init.msra_normal_(m.weight, a=math.sqrt(5))
if m.bias is not None:
fan_in, _ = nn.init.calculate_fan_in_and_fan_out(m.weight)
bound = 1 / math.sqrt(fan_in)
nn.init.uniform_(m.bias, -bound, bound)
# elif isinstance(m, nn.BatchNorm1d):
# nn.init.ones_(m.weight)
# nn.init.zeros_(m.bias)
def generate_overlap_mask(self, points_src, points_ref, mask_src, mask_ref, transform_gt):
points_src[F.logical_not(mask_src.astype("bool")), :] = 50.0
points_ref[F.logical_not(mask_ref.astype("bool")), :] = 100.0
points_src = se3.mge_transform(transform_gt, points_src)
points_src = F.expand_dims(points_src, axis=2)
points_ref = F.expand_dims(points_ref, axis=1)
dist_matrix = F.sqrt(F.sum(F.square(points_src - points_ref), axis=-1)) # (B, N, N)
dist_s2r = F.min(dist_matrix, axis=2)
dist_r2s = F.min(dist_matrix, axis=1)
overlap_src_mask = dist_s2r < self.overlap_dist # (B, N)
overlap_ref_mask = dist_r2s < self.overlap_dist # (B, N)
return overlap_src_mask, overlap_ref_mask
def forward(self, data_batch):
endpoints = {}
xyz_src = data_batch["points_src"]
xyz_ref = data_batch["points_ref"]
transform_gt = data_batch["transform_gt"]
pose_gt = data_batch["pose_gt"]
# init endpoints
all_src_cls_pair = []
all_ref_cls_pair = []
all_transform_pair = []
all_pose_pair = []
all_xyz_src_t = [xyz_src]
# init params
B, src_N, _ = xyz_src.shape
_, ref_N, _ = xyz_ref.shape
init_quat = F.tile(mge.tensor([1, 0, 0, 0], dtype="float32"), (B, 1)) # (B, 4)
init_translate = F.tile(mge.tensor([0, 0, 0], dtype="float32"), (B, 1)) # (B, 3)
pose_pred = F.concat((init_quat, init_translate), axis=1) # (B, 7)
# rename xyz_src
xyz_src_iter = F.copy(xyz_src, device=xyz_src.device)
for i in range(self.num_iter):
# deley mask
if i < 2:
src_pred_mask = F.ones((B, src_N), dtype=xyz_src.dtype)
ref_pred_mask = F.ones((B, ref_N), dtype=xyz_ref.dtype)
# encoder
src_encoder_feats, src_glob_feat = self.encoder[i](xyz_src_iter.transpose(0, 2, 1).detach(), F.expand_dims(src_pred_mask,
axis=1))
ref_encoder_feats, ref_glob_feat = self.encoder[i](xyz_ref.transpose(0, 2, 1).detach(), F.expand_dims(ref_pred_mask, axis=1))
# fusion
src_concat_feat = F.concat(
(src_encoder_feats[0], F.repeat(src_glob_feat, src_N, axis=2), F.repeat(ref_glob_feat, src_N, axis=2)), axis=1)
ref_concat_feat = F.concat(
(ref_encoder_feats[0], F.repeat(ref_glob_feat, ref_N, axis=2), F.repeat(src_glob_feat, ref_N, axis=2)), axis=1)
_, src_fused_feat = self.fusion[i](src_concat_feat, F.expand_dims(src_pred_mask, axis=1))
_, ref_fused_feat = self.fusion[i](ref_concat_feat, F.expand_dims(ref_pred_mask, axis=1))
# decoder
src_decoder_feats, src_cls_pred = self.decoder[i](src_fused_feat)
ref_decoder_feats, ref_cls_pred = self.decoder[i](ref_fused_feat)
# regression
src_feat = F.concat(src_decoder_feats, axis=1) * F.expand_dims(src_pred_mask, axis=1)
ref_feat = F.concat(ref_decoder_feats, axis=1) * F.expand_dims(ref_pred_mask, axis=1)
concat_feat = F.concat((src_fused_feat, src_feat, ref_fused_feat, ref_feat), axis=1)
concat_feat = F.max(concat_feat, axis=-1)
pose_pred_iter = self.regression[i](concat_feat) # (B, 7)
xyz_src_iter = quaternion.mge_quat_transform(pose_pred_iter, xyz_src_iter.detach())
pose_pred = quaternion.mge_transform_pose(pose_pred.detach(), pose_pred_iter)
transform_pred = quaternion.mge_quat2mat(pose_pred)
# compute overlap and cls gt
overlap_src_mask, overlap_ref_mask = self.generate_overlap_mask(F.copy(xyz_src, device=xyz_src.device),
F.copy(xyz_ref, device=xyz_ref.device), src_pred_mask,
ref_pred_mask, transform_gt)
# overlap_src_mask, overlap_ref_mask = self.generate_overlap_mask(xyz_src, xyz_ref, src_pred_mask, ref_pred_mask, transform_gt)
src_cls_gt = F.ones((B, src_N)) * overlap_src_mask
ref_cls_gt = F.ones((B, ref_N)) * overlap_ref_mask
src_pred_mask = F.argmax(src_cls_pred, axis=1)
ref_pred_mask = F.argmax(ref_cls_pred, axis=1)
# add endpoints
all_src_cls_pair.append([src_cls_gt, src_cls_pred])
all_ref_cls_pair.append([ref_cls_gt, ref_cls_pred])
all_transform_pair.append([transform_gt, transform_pred])
all_pose_pair.append([pose_gt, pose_pred])
all_xyz_src_t.append(xyz_src_iter)
endpoints["all_src_cls_pair"] = all_src_cls_pair
endpoints["all_ref_cls_pair"] = all_ref_cls_pair
endpoints["all_transform_pair"] = all_transform_pair
endpoints["all_pose_pair"] = all_pose_pair
endpoints["transform_pair"] = [transform_gt, transform_pred]
endpoints["pose_pair"] = [pose_gt, pose_pred]
endpoints["all_xyz_src_t"] = all_xyz_src_t
return endpoints
def fetch_net(params):
if params.net_type == "omnet":
net = OMNet(params)
else:
raise NotImplementedError
return net
|
[
"megengine.functional.repeat",
"megengine.module.init.calculate_fan_in_and_fan_out",
"megengine.functional.ones",
"megengine.functional.expand_dims",
"megengine.functional.concat",
"megengine.functional.max",
"megengine.functional.min",
"megengine.functional.argmax",
"megengine.tensor",
"megengine.functional.copy",
"megengine.module.init.uniform_",
"megengine.functional.square"
] |
[((1424, 1467), 'common.se3.mge_transform', 'se3.mge_transform', (['transform_gt', 'points_src'], {}), '(transform_gt, points_src)\n', (1441, 1467), False, 'from common import se3, quaternion\n'), ((1489, 1522), 'megengine.functional.expand_dims', 'F.expand_dims', (['points_src'], {'axis': '(2)'}), '(points_src, axis=2)\n', (1502, 1522), True, 'import megengine.functional as F\n'), ((1544, 1577), 'megengine.functional.expand_dims', 'F.expand_dims', (['points_ref'], {'axis': '(1)'}), '(points_ref, axis=1)\n', (1557, 1577), True, 'import megengine.functional as F\n'), ((1690, 1716), 'megengine.functional.min', 'F.min', (['dist_matrix'], {'axis': '(2)'}), '(dist_matrix, axis=2)\n', (1695, 1716), True, 'import megengine.functional as F\n'), ((1736, 1762), 'megengine.functional.min', 'F.min', (['dist_matrix'], {'axis': '(1)'}), '(dist_matrix, axis=1)\n', (1741, 1762), True, 'import megengine.functional as F\n'), ((2653, 2698), 'megengine.functional.concat', 'F.concat', (['(init_quat, init_translate)'], {'axis': '(1)'}), '((init_quat, init_translate), axis=1)\n', (2661, 2698), True, 'import megengine.functional as F\n'), ((2758, 2796), 'megengine.functional.copy', 'F.copy', (['xyz_src'], {'device': 'xyz_src.device'}), '(xyz_src, device=xyz_src.device)\n', (2764, 2796), True, 'import megengine.functional as F\n'), ((353, 362), 'model.module.Encoder', 'Encoder', ([], {}), '()\n', (360, 362), False, 'from model.module import Encoder, Fusion, Decoder, Regression\n'), ((417, 425), 'model.module.Fusion', 'Fusion', ([], {}), '()\n', (423, 425), False, 'from model.module import Encoder, Fusion, Decoder, Regression\n'), ((481, 490), 'model.module.Decoder', 'Decoder', ([], {}), '()\n', (488, 490), False, 'from model.module import Encoder, Fusion, Decoder, Regression\n'), ((549, 561), 'model.module.Regression', 'Regression', ([], {}), '()\n', (559, 561), False, 'from model.module import Encoder, Fusion, Decoder, Regression\n'), ((2482, 2523), 'megengine.tensor', 'mge.tensor', (['[1, 0, 0, 0]'], {'dtype': '"""float32"""'}), "([1, 0, 0, 0], dtype='float32')\n", (2492, 2523), True, 'import megengine as mge\n'), ((2575, 2613), 'megengine.tensor', 'mge.tensor', (['[0, 0, 0]'], {'dtype': '"""float32"""'}), "([0, 0, 0], dtype='float32')\n", (2585, 2613), True, 'import megengine as mge\n'), ((4439, 4509), 'megengine.functional.concat', 'F.concat', (['(src_fused_feat, src_feat, ref_fused_feat, ref_feat)'], {'axis': '(1)'}), '((src_fused_feat, src_feat, ref_fused_feat, ref_feat), axis=1)\n', (4447, 4509), True, 'import megengine.functional as F\n'), ((4536, 4563), 'megengine.functional.max', 'F.max', (['concat_feat'], {'axis': '(-1)'}), '(concat_feat, axis=-1)\n', (4541, 4563), True, 'import megengine.functional as F\n'), ((4850, 4884), 'common.quaternion.mge_quat2mat', 'quaternion.mge_quat2mat', (['pose_pred'], {}), '(pose_pred)\n', (4873, 4884), False, 'from common import se3, quaternion\n'), ((5573, 5603), 'megengine.functional.argmax', 'F.argmax', (['src_cls_pred'], {'axis': '(1)'}), '(src_cls_pred, axis=1)\n', (5581, 5603), True, 'import megengine.functional as F\n'), ((5632, 5662), 'megengine.functional.argmax', 'F.argmax', (['ref_cls_pred'], {'axis': '(1)'}), '(ref_cls_pred, axis=1)\n', (5640, 5662), True, 'import megengine.functional as F\n'), ((1613, 1646), 'megengine.functional.square', 'F.square', (['(points_src - points_ref)'], {}), '(points_src - points_ref)\n', (1621, 1646), True, 'import megengine.functional as F\n'), ((2916, 2955), 'megengine.functional.ones', 'F.ones', (['(B, src_N)'], {'dtype': 'xyz_src.dtype'}), '((B, src_N), dtype=xyz_src.dtype)\n', (2922, 2955), True, 'import megengine.functional as F\n'), ((2988, 3027), 'megengine.functional.ones', 'F.ones', (['(B, ref_N)'], {'dtype': 'xyz_ref.dtype'}), '((B, ref_N), dtype=xyz_ref.dtype)\n', (2994, 3027), True, 'import megengine.functional as F\n'), ((3156, 3192), 'megengine.functional.expand_dims', 'F.expand_dims', (['src_pred_mask'], {'axis': '(1)'}), '(src_pred_mask, axis=1)\n', (3169, 3192), True, 'import megengine.functional as F\n'), ((3413, 3449), 'megengine.functional.expand_dims', 'F.expand_dims', (['ref_pred_mask'], {'axis': '(1)'}), '(ref_pred_mask, axis=1)\n', (3426, 3449), True, 'import megengine.functional as F\n'), ((3872, 3908), 'megengine.functional.expand_dims', 'F.expand_dims', (['src_pred_mask'], {'axis': '(1)'}), '(src_pred_mask, axis=1)\n', (3885, 3908), True, 'import megengine.functional as F\n'), ((3974, 4010), 'megengine.functional.expand_dims', 'F.expand_dims', (['ref_pred_mask'], {'axis': '(1)'}), '(ref_pred_mask, axis=1)\n', (3987, 4010), True, 'import megengine.functional as F\n'), ((4240, 4275), 'megengine.functional.concat', 'F.concat', (['src_decoder_feats'], {'axis': '(1)'}), '(src_decoder_feats, axis=1)\n', (4248, 4275), True, 'import megengine.functional as F\n'), ((4278, 4314), 'megengine.functional.expand_dims', 'F.expand_dims', (['src_pred_mask'], {'axis': '(1)'}), '(src_pred_mask, axis=1)\n', (4291, 4314), True, 'import megengine.functional as F\n'), ((4338, 4373), 'megengine.functional.concat', 'F.concat', (['ref_decoder_feats'], {'axis': '(1)'}), '(ref_decoder_feats, axis=1)\n', (4346, 4373), True, 'import megengine.functional as F\n'), ((4376, 4412), 'megengine.functional.expand_dims', 'F.expand_dims', (['ref_pred_mask'], {'axis': '(1)'}), '(ref_pred_mask, axis=1)\n', (4389, 4412), True, 'import megengine.functional as F\n'), ((5003, 5041), 'megengine.functional.copy', 'F.copy', (['xyz_src'], {'device': 'xyz_src.device'}), '(xyz_src, device=xyz_src.device)\n', (5009, 5041), True, 'import megengine.functional as F\n'), ((5119, 5157), 'megengine.functional.copy', 'F.copy', (['xyz_ref'], {'device': 'xyz_ref.device'}), '(xyz_ref, device=xyz_ref.device)\n', (5125, 5157), True, 'import megengine.functional as F\n'), ((5444, 5462), 'megengine.functional.ones', 'F.ones', (['(B, src_N)'], {}), '((B, src_N))\n', (5450, 5462), True, 'import megengine.functional as F\n'), ((5507, 5525), 'megengine.functional.ones', 'F.ones', (['(B, ref_N)'], {}), '((B, ref_N))\n', (5513, 5525), True, 'import megengine.functional as F\n'), ((878, 924), 'megengine.module.init.calculate_fan_in_and_fan_out', 'nn.init.calculate_fan_in_and_fan_out', (['m.weight'], {}), '(m.weight)\n', (914, 924), True, 'import megengine.module as nn\n'), ((995, 1034), 'megengine.module.init.uniform_', 'nn.init.uniform_', (['m.bias', '(-bound)', 'bound'], {}), '(m.bias, -bound, bound)\n', (1011, 1034), True, 'import megengine.module as nn\n'), ((3551, 3589), 'megengine.functional.repeat', 'F.repeat', (['src_glob_feat', 'src_N'], {'axis': '(2)'}), '(src_glob_feat, src_N, axis=2)\n', (3559, 3589), True, 'import megengine.functional as F\n'), ((3591, 3629), 'megengine.functional.repeat', 'F.repeat', (['ref_glob_feat', 'src_N'], {'axis': '(2)'}), '(ref_glob_feat, src_N, axis=2)\n', (3599, 3629), True, 'import megengine.functional as F\n'), ((3719, 3757), 'megengine.functional.repeat', 'F.repeat', (['ref_glob_feat', 'ref_N'], {'axis': '(2)'}), '(ref_glob_feat, ref_N, axis=2)\n', (3727, 3757), True, 'import megengine.functional as F\n'), ((3759, 3797), 'megengine.functional.repeat', 'F.repeat', (['src_glob_feat', 'ref_N'], {'axis': '(2)'}), '(src_glob_feat, ref_N, axis=2)\n', (3767, 3797), True, 'import megengine.functional as F\n'), ((793, 805), 'math.sqrt', 'math.sqrt', (['(5)'], {}), '(5)\n', (802, 805), False, 'import math\n'), ((957, 974), 'math.sqrt', 'math.sqrt', (['fan_in'], {}), '(fan_in)\n', (966, 974), False, 'import math\n')]
|
import os
from venv import create
from sqlmodel import SQLModel, create_engine
from .base_variables import APPNAME,DEBUG
from utils import print_warning
import sys
if DEBUG:
PG_HOST: str = os.getenv("PGHOST", "localhost")
PG_USER: str = os.getenv("PGUSER", "postgres")
PG_PASSWORD: str = os.getenv("PGPASSWORD", "<PASSWORD>")
PG_PORT: str = os.getenv("PGPORT", "5432")
PG_DATABASE:str = os.getenv("PGDATABASE", APPNAME)
DATABASE_URL:str = os.getenv("DATABASE_URL", f"postgresql://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}")
else:
DATABASE_URL = os.getenv("DATABASE_URL", "")
if not DATABASE_URL:
print_warning(
(
"You are not setting the DATABASE_URL in your environment!",
)
)
sys.exit("[ERROR] Default DATABASE_URL is not set\n")
print(DATABASE_URL)
dbengine = create_engine(DATABASE_URL)
|
[
"sqlmodel.create_engine"
] |
[((847, 874), 'sqlmodel.create_engine', 'create_engine', (['DATABASE_URL'], {}), '(DATABASE_URL)\n', (860, 874), False, 'from sqlmodel import SQLModel, create_engine\n'), ((193, 225), 'os.getenv', 'os.getenv', (['"""PGHOST"""', '"""localhost"""'], {}), "('PGHOST', 'localhost')\n", (202, 225), False, 'import os\n'), ((245, 276), 'os.getenv', 'os.getenv', (['"""PGUSER"""', '"""postgres"""'], {}), "('PGUSER', 'postgres')\n", (254, 276), False, 'import os\n'), ((300, 337), 'os.getenv', 'os.getenv', (['"""PGPASSWORD"""', '"""<PASSWORD>"""'], {}), "('PGPASSWORD', '<PASSWORD>')\n", (309, 337), False, 'import os\n'), ((357, 384), 'os.getenv', 'os.getenv', (['"""PGPORT"""', '"""5432"""'], {}), "('PGPORT', '5432')\n", (366, 384), False, 'import os\n'), ((407, 439), 'os.getenv', 'os.getenv', (['"""PGDATABASE"""', 'APPNAME'], {}), "('PGDATABASE', APPNAME)\n", (416, 439), False, 'import os\n'), ((463, 567), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""', 'f"""postgresql://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}"""'], {}), "('DATABASE_URL',\n f'postgresql://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}')\n", (472, 567), False, 'import os\n'), ((589, 618), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""', '""""""'], {}), "('DATABASE_URL', '')\n", (598, 618), False, 'import os\n'), ((644, 721), 'utils.print_warning', 'print_warning', (["('You are not setting the DATABASE_URL in your environment!',)"], {}), "(('You are not setting the DATABASE_URL in your environment!',))\n", (657, 721), False, 'from utils import print_warning\n'), ((762, 815), 'sys.exit', 'sys.exit', (['"""[ERROR] Default DATABASE_URL is not set\n"""'], {}), "('[ERROR] Default DATABASE_URL is not set\\n')\n", (770, 815), False, 'import sys\n')]
|
import os
import time
import numpy as np
import megengine.distributed as dist
import megengine as mge
import megengine.functional as F
from megengine.autodiff import GradManager
from edit.core.hook.evaluation import psnr, ssim
from edit.utils import imwrite, tensor2img, bgr2ycbcr, img_multi_padding, img_de_multi_padding, ensemble_forward, ensemble_back
from edit.utils import img_multi_padding, img_de_multi_padding, flow_to_image
from ..base import BaseModel
from ..builder import build_backbone, build_loss
from ..registry import MODELS
from tqdm import tqdm
def get_bilinear(image):
B,T,C,h,w = image.shape
image = image.reshape(-1, C,h,w)
return F.nn.interpolate(image, scale_factor=4).reshape(B,T,C,4*h, 4*w)
def train_generator_batch(image, label, *, gm, netG, netloss):
B,T,_,h,w = image.shape
biup = get_bilinear(image)
netG.train()
with gm:
forward_hiddens = []
backward_hiddens = []
res = []
hidden = F.zeros((2*B, netG.hidden_channels, h, w))
for i in range(T):
now_frame = F.concat([image[:, i, ...], image[:, T-i-1, ...]], axis=0)
if i==0:
flow = netG.flownet(now_frame, now_frame)
else:
ref = F.concat([image[:, i-1, ...], image[:, T-i, ...]], axis=0)
flow = netG.flownet(now_frame, ref)
hidden = netG(hidden, flow, now_frame)
forward_hiddens.append(hidden[0:B, ...])
backward_hiddens.append(hidden[B:2*B, ...])
for i in range(T):
res.append(netG.do_upsample(forward_hiddens[i], backward_hiddens[T-i-1]))
res = F.stack(res, axis = 1) # [B,T,3,H,W]
loss = netloss(res+biup, label)
gm.backward(loss)
if dist.is_distributed():
loss = dist.functional.all_reduce_sum(loss) / dist.get_world_size()
return loss
def test_generator_batch(image, *, netG):
# image: [1,100,3,180,320]
B,T,_,h,w = image.shape
biup = get_bilinear(image)
netG.eval()
forward_hiddens = []
backward_hiddens = []
res = []
hidden = F.zeros((2*B, netG.hidden_channels, h, w))
for i in range(T):
now_frame = F.concat([image[:, i, ...], image[:, T-i-1, ...]], axis=0)
if i==0:
flow = netG.flownet(now_frame, now_frame)
else:
ref = F.concat([image[:, i-1, ...], image[:, T-i, ...]], axis=0)
flow = netG.flownet(now_frame, ref)
hidden = netG(hidden, flow, now_frame)
forward_hiddens.append(hidden[0:B, ...])
backward_hiddens.append(hidden[B:2*B, ...])
for i in range(T):
res.append(netG.do_upsample(forward_hiddens[i], backward_hiddens[T-i-1]))
res = F.stack(res, axis = 1) # [B,T,3,H,W]
return res + biup
epoch_dict = {}
def adjust_learning_rate(optimizer, epoch):
if epoch>=8 and epoch % 1 == 0 and epoch_dict.get(epoch, None) is None:
epoch_dict[epoch] = True
for param_group in optimizer.param_groups:
param_group["lr"] = param_group["lr"] * 0.8
print("adjust lr! , now lr: {}".format(param_group["lr"]))
# TODO 可以再写一个父类,抽象一些公共方法,当大于1个模型时,代码重复了,如getimdid和test step
@MODELS.register_module()
class BidirectionalRestorer(BaseModel):
allowed_metrics = {'PSNR': psnr, 'SSIM': ssim}
def __init__(self, generator, pixel_loss, train_cfg=None, eval_cfg=None, pretrained=None, Fidelity_loss=None):
super(BidirectionalRestorer, self).__init__()
self.train_cfg = train_cfg
self.eval_cfg = eval_cfg
# generator
self.generator = build_backbone(generator)
# loss
self.pixel_loss = build_loss(pixel_loss)
if Fidelity_loss:
self.Fidelity_loss = build_loss(Fidelity_loss)
else:
self.Fidelity_loss = None
# load pretrained
self.init_weights(pretrained)
def init_weights(self, pretrained=None):
self.generator.init_weights(pretrained)
def train_step(self, batchdata, now_epoch, now_iter):
LR_tensor = mge.tensor(batchdata['lq'], dtype="float32")
HR_tensor = mge.tensor(batchdata['gt'], dtype="float32")
loss = train_generator_batch(LR_tensor, HR_tensor, gm=self.gms['generator'], netG=self.generator, netloss=self.pixel_loss)
adjust_learning_rate(self.optimizers['generator'], now_epoch)
self.optimizers['generator'].step()
self.optimizers['generator'].clear_grad()
return loss
def get_img_id(self, key):
shift = self.eval_cfg.get('save_shift', 0)
assert isinstance(key, str)
L = key.split("/")
return int(L[-1][:-4]), str(int(L[-2]) - shift).zfill(3) # id, clip
def test_step(self, batchdata, **kwargs):
"""
possible kwargs:
save_image
save_path
ensemble
"""
lq = batchdata['lq'] # [B,3,h,w]
gt = batchdata.get('gt', None) # if not None: [B,3,4*h,4*w]
assert len(batchdata['lq_path']) == 1 # 每个sample所带的lq_path列表长度仅为1, 即自己
lq_paths = batchdata['lq_path'][0] # length 为batch长度
now_start_id, clip = self.get_img_id(lq_paths[0])
now_end_id, _ = self.get_img_id(lq_paths[-1])
assert clip == _
if now_start_id==0:
print("first frame: {}".format(lq_paths[0]))
self.LR_list = []
self.HR_list = []
# pad lq
B ,_ ,origin_H, origin_W = lq.shape
lq = img_multi_padding(lq, padding_multi=self.eval_cfg.multi_pad, pad_method = "edge") # edge constant
self.LR_list.append(lq) # [1,3,h,w]
if gt is not None:
for i in range(B):
self.HR_list.append(gt[i:i+1, ...])
if now_end_id == 99:
print("start to forward all frames....")
if self.eval_cfg.gap == 1:
# do ensemble (8 times)
ensemble_res = []
self.LR_list = np.concatenate(self.LR_list, axis=0) # [100, 3,h,w]
for item in tqdm(range(8)): # do not have flip
inp = mge.tensor(ensemble_forward(self.LR_list, Type=item), dtype="float32")
oup = test_generator_batch(F.expand_dims(inp, axis=0), netG=self.generator)
ensemble_res.append(ensemble_back(oup.numpy(), Type=item))
self.HR_G = sum(ensemble_res) / len(ensemble_res) # ensemble_res 结果取平均
elif self.eval_cfg.gap == 2:
raise NotImplementedError("not implement gap != 1 now")
# self.HR_G_1 = test_generator_batch(F.stack(self.LR_list[::2], axis=1), netG=self.generator)
# self.HR_G_2 = test_generator_batch(F.stack(self.LR_list[1::2], axis=1), netG=self.generator) # [B,T,C,H,W]
# # 交叉组成HR_G
# res = []
# _,T1,_,_,_ = self.HR_G_1.shape
# _,T2,_,_,_ = self.HR_G_2.shape
# assert T1 == T2
# for i in range(T1):
# res.append(self.HR_G_1[:, i, ...])
# res.append(self.HR_G_2[:, i, ...])
# self.HR_G = F.stack(res, axis=1) # [B,T,C,H,W]
else:
raise NotImplementedError("do not support eval&test gap value")
scale = self.generator.upscale_factor
# get numpy
self.HR_G = img_de_multi_padding(self.HR_G, origin_H=origin_H * scale, origin_W=origin_W * scale) # depad for HR_G [B,T,C,H,W]
if kwargs.get('save_image', False):
print("saving images to disk ...")
save_path = kwargs.get('save_path')
B,T,_,_,_ = self.HR_G.shape
assert B == 1
assert T == 100
for i in range(T):
img = tensor2img(self.HR_G[0, i, ...], min_max=(0, 1))
if (i+1)%10 == 0:
imwrite(img, file_path=os.path.join(save_path, "partframes", f"{clip}_{str(i).zfill(8)}.png"))
imwrite(img, file_path=os.path.join(save_path, "allframes", f"{clip}_{str(i).zfill(8)}.png"))
return now_end_id == 99
def cal_for_eval(self, gathered_outputs, gathered_batchdata):
if gathered_outputs:
crop_border = self.eval_cfg.crop_border
assert len(self.HR_list) == 100
res = []
for i in range(len(self.HR_list)):
G = tensor2img(self.HR_G[0, i, ...], min_max=(0, 1))
gt = tensor2img(self.HR_list[i][0], min_max=(0, 1))
eval_result = dict()
for metric in self.eval_cfg.metrics:
eval_result[metric+"_RGB"] = self.allowed_metrics[metric](G, gt, crop_border)
# eval_result[metric+"_Y"] = self.allowed_metrics[metric](G_key_y, gt_y, crop_border)
res.append(eval_result)
return res
else:
return []
|
[
"megengine.tensor",
"megengine.functional.nn.interpolate",
"megengine.distributed.functional.all_reduce_sum",
"megengine.functional.zeros",
"megengine.functional.stack",
"megengine.functional.expand_dims",
"megengine.distributed.is_distributed",
"megengine.distributed.get_world_size",
"megengine.functional.concat"
] |
[((2104, 2148), 'megengine.functional.zeros', 'F.zeros', (['(2 * B, netG.hidden_channels, h, w)'], {}), '((2 * B, netG.hidden_channels, h, w))\n', (2111, 2148), True, 'import megengine.functional as F\n'), ((2722, 2742), 'megengine.functional.stack', 'F.stack', (['res'], {'axis': '(1)'}), '(res, axis=1)\n', (2729, 2742), True, 'import megengine.functional as F\n'), ((975, 1019), 'megengine.functional.zeros', 'F.zeros', (['(2 * B, netG.hidden_channels, h, w)'], {}), '((2 * B, netG.hidden_channels, h, w))\n', (982, 1019), True, 'import megengine.functional as F\n'), ((1645, 1665), 'megengine.functional.stack', 'F.stack', (['res'], {'axis': '(1)'}), '(res, axis=1)\n', (1652, 1665), True, 'import megengine.functional as F\n'), ((1759, 1780), 'megengine.distributed.is_distributed', 'dist.is_distributed', ([], {}), '()\n', (1778, 1780), True, 'import megengine.distributed as dist\n'), ((2190, 2252), 'megengine.functional.concat', 'F.concat', (['[image[:, i, ...], image[:, T - i - 1, ...]]'], {'axis': '(0)'}), '([image[:, i, ...], image[:, T - i - 1, ...]], axis=0)\n', (2198, 2252), True, 'import megengine.functional as F\n'), ((4055, 4099), 'megengine.tensor', 'mge.tensor', (["batchdata['lq']"], {'dtype': '"""float32"""'}), "(batchdata['lq'], dtype='float32')\n", (4065, 4099), True, 'import megengine as mge\n'), ((4120, 4164), 'megengine.tensor', 'mge.tensor', (["batchdata['gt']"], {'dtype': '"""float32"""'}), "(batchdata['gt'], dtype='float32')\n", (4130, 4164), True, 'import megengine as mge\n'), ((5499, 5578), 'edit.utils.img_multi_padding', 'img_multi_padding', (['lq'], {'padding_multi': 'self.eval_cfg.multi_pad', 'pad_method': '"""edge"""'}), "(lq, padding_multi=self.eval_cfg.multi_pad, pad_method='edge')\n", (5516, 5578), False, 'from edit.utils import img_multi_padding, img_de_multi_padding, flow_to_image\n'), ((665, 704), 'megengine.functional.nn.interpolate', 'F.nn.interpolate', (['image'], {'scale_factor': '(4)'}), '(image, scale_factor=4)\n', (681, 704), True, 'import megengine.functional as F\n'), ((1069, 1131), 'megengine.functional.concat', 'F.concat', (['[image[:, i, ...], image[:, T - i - 1, ...]]'], {'axis': '(0)'}), '([image[:, i, ...], image[:, T - i - 1, ...]], axis=0)\n', (1077, 1131), True, 'import megengine.functional as F\n'), ((2352, 2414), 'megengine.functional.concat', 'F.concat', (['[image[:, i - 1, ...], image[:, T - i, ...]]'], {'axis': '(0)'}), '([image[:, i - 1, ...], image[:, T - i, ...]], axis=0)\n', (2360, 2414), True, 'import megengine.functional as F\n'), ((7418, 7508), 'edit.utils.img_de_multi_padding', 'img_de_multi_padding', (['self.HR_G'], {'origin_H': '(origin_H * scale)', 'origin_W': '(origin_W * scale)'}), '(self.HR_G, origin_H=origin_H * scale, origin_W=\n origin_W * scale)\n', (7438, 7508), False, 'from edit.utils import img_multi_padding, img_de_multi_padding, flow_to_image\n'), ((1247, 1309), 'megengine.functional.concat', 'F.concat', (['[image[:, i - 1, ...], image[:, T - i, ...]]'], {'axis': '(0)'}), '([image[:, i - 1, ...], image[:, T - i, ...]], axis=0)\n', (1255, 1309), True, 'import megengine.functional as F\n'), ((1801, 1837), 'megengine.distributed.functional.all_reduce_sum', 'dist.functional.all_reduce_sum', (['loss'], {}), '(loss)\n', (1831, 1837), True, 'import megengine.distributed as dist\n'), ((1840, 1861), 'megengine.distributed.get_world_size', 'dist.get_world_size', ([], {}), '()\n', (1859, 1861), True, 'import megengine.distributed as dist\n'), ((5982, 6018), 'numpy.concatenate', 'np.concatenate', (['self.LR_list'], {'axis': '(0)'}), '(self.LR_list, axis=0)\n', (5996, 6018), True, 'import numpy as np\n'), ((8507, 8555), 'edit.utils.tensor2img', 'tensor2img', (['self.HR_G[0, i, ...]'], {'min_max': '(0, 1)'}), '(self.HR_G[0, i, ...], min_max=(0, 1))\n', (8517, 8555), False, 'from edit.utils import imwrite, tensor2img, bgr2ycbcr, img_multi_padding, img_de_multi_padding, ensemble_forward, ensemble_back\n'), ((8577, 8623), 'edit.utils.tensor2img', 'tensor2img', (['self.HR_list[i][0]'], {'min_max': '(0, 1)'}), '(self.HR_list[i][0], min_max=(0, 1))\n', (8587, 8623), False, 'from edit.utils import imwrite, tensor2img, bgr2ycbcr, img_multi_padding, img_de_multi_padding, ensemble_forward, ensemble_back\n'), ((7854, 7902), 'edit.utils.tensor2img', 'tensor2img', (['self.HR_G[0, i, ...]'], {'min_max': '(0, 1)'}), '(self.HR_G[0, i, ...], min_max=(0, 1))\n', (7864, 7902), False, 'from edit.utils import imwrite, tensor2img, bgr2ycbcr, img_multi_padding, img_de_multi_padding, ensemble_forward, ensemble_back\n'), ((6134, 6175), 'edit.utils.ensemble_forward', 'ensemble_forward', (['self.LR_list'], {'Type': 'item'}), '(self.LR_list, Type=item)\n', (6150, 6175), False, 'from edit.utils import imwrite, tensor2img, bgr2ycbcr, img_multi_padding, img_de_multi_padding, ensemble_forward, ensemble_back\n'), ((6241, 6267), 'megengine.functional.expand_dims', 'F.expand_dims', (['inp'], {'axis': '(0)'}), '(inp, axis=0)\n', (6254, 6267), True, 'import megengine.functional as F\n')]
|
from sqlmodel import create_engine
from pyflarum.database.session import FlarumDatabase
from pyflarum.database.flarum.core.users import DB_User
ENGINE = create_engine('sqlite:///tests/database/database.db')
DATABASE = FlarumDatabase(engine=ENGINE)
if __name__ == "__main__":
with DATABASE:
for user in DATABASE.generic_filter(DB_User, id=1).all():
if user.discussions:
print(user.username, ':', sep='')
for discussion in user.discussions:
print('•', discussion.title)
else:
print(user.username, '(no discussions)')
|
[
"sqlmodel.create_engine"
] |
[((156, 209), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///tests/database/database.db"""'], {}), "('sqlite:///tests/database/database.db')\n", (169, 209), False, 'from sqlmodel import create_engine\n'), ((221, 250), 'pyflarum.database.session.FlarumDatabase', 'FlarumDatabase', ([], {'engine': 'ENGINE'}), '(engine=ENGINE)\n', (235, 250), False, 'from pyflarum.database.session import FlarumDatabase\n')]
|
"""
dayong.impls
~~~~~~~~~~~~
Implementaion of interfaces and the logic for injecting them.
"""
import asyncio
from typing import Any
import tanjun
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import SQLModel, select
from sqlmodel.engine.result import ScalarResult
from sqlmodel.ext.asyncio.session import AsyncSession
from dayong.configs import DayongConfig, DayongConfigLoader
from dayong.models import Message
class MessageDBImpl:
"""Implementaion of a database connection for transacting and interacting with
message tables, those of which derive from message table models.
The data to be selected, retrieved, and modified is determined by the table model
object and its type. The type, in this case, is `dayong.models.Message`.
"""
def __init__(self) -> None:
self._conn: AsyncEngine
async def connect(self, config: DayongConfig = tanjun.injected(type=DayongConfig)):
"""Create a database connection.
If the `database_uri` is Falsy, the function will reattempt to get the url from
the environment variables.
Args:
config (DayongConfig, optional): [description]. Defaults to
tanjun.injected(type=DayongConfig).
"""
loop = asyncio.get_running_loop()
self._conn = await loop.run_in_executor(
None,
create_async_engine,
config.database_uri if config.database_uri else DayongConfigLoader().load(),
)
async def create_table(self) -> None:
"""Create physical message tables for all the message table models stored in
`SQLModel.metadata`.
"""
async with self._conn.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
async def add_row(self, tabe_model_object: Message) -> None:
"""Insert a row in the message table.
Args:
table_model_object (Message): An instance of `dayong.models.Message` or one
of its subclasses.
"""
async with AsyncSession(self._conn) as session:
loop = asyncio.get_running_loop()
await loop.run_in_executor(None, session.add, tabe_model_object)
await session.commit()
async def remove_row(self, tabe_model_object: Message) -> None:
"""Delete a row in the message table.
Args:
table_model_object (Message): An instance of `dayong.models.Message` or one
of its subclasses.
"""
table_model = type(tabe_model_object)
async with AsyncSession(self._conn) as session:
# Temp ignore incompatible type passed to `exec()`. See:
# https://github.com/tiangolo/sqlmodel/issues/54
# https://github.com/tiangolo/sqlmodel/pull/58
row: ScalarResult[Any] = await session.exec(
select(table_model).where( # type: ignore
table_model.message_id == tabe_model_object.message_id
)
)
await session.delete(row)
await session.commit()
async def get_row(self, tabe_model_object: Message) -> ScalarResult[Any]:
"""Fetch a row from the message table.
Args:
tabe_model_object (Message): An instance of `dayong.models.Message` or one
of its subclasses.
Returns:
ScalarResult: An `ScalarResult` object which contains a scalar value or
sequence of scalar values.
"""
table_model = type(tabe_model_object)
async with AsyncSession(self._conn) as session:
# Temp ignore incompatible type passed to `exec()`. See:
# https://github.com/tiangolo/sqlmodel/issues/54
# https://github.com/tiangolo/sqlmodel/pull/58
row: ScalarResult[Any] = await session.exec(
select(table_model).where( # type: ignore
table_model.message_id == tabe_model_object.message_id
)
)
return row
|
[
"sqlmodel.ext.asyncio.session.AsyncSession",
"sqlmodel.select"
] |
[((917, 951), 'tanjun.injected', 'tanjun.injected', ([], {'type': 'DayongConfig'}), '(type=DayongConfig)\n', (932, 951), False, 'import tanjun\n'), ((1285, 1311), 'asyncio.get_running_loop', 'asyncio.get_running_loop', ([], {}), '()\n', (1309, 1311), False, 'import asyncio\n'), ((2066, 2090), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (2078, 2090), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((2122, 2148), 'asyncio.get_running_loop', 'asyncio.get_running_loop', ([], {}), '()\n', (2146, 2148), False, 'import asyncio\n'), ((2587, 2611), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (2599, 2611), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((3590, 3614), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (3602, 3614), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((1472, 1492), 'dayong.configs.DayongConfigLoader', 'DayongConfigLoader', ([], {}), '()\n', (1490, 1492), False, 'from dayong.configs import DayongConfig, DayongConfigLoader\n'), ((2886, 2905), 'sqlmodel.select', 'select', (['table_model'], {}), '(table_model)\n', (2892, 2905), False, 'from sqlmodel import SQLModel, select\n'), ((3889, 3908), 'sqlmodel.select', 'select', (['table_model'], {}), '(table_model)\n', (3895, 3908), False, 'from sqlmodel import SQLModel, select\n')]
|
from sqlalchemy.engine import Engine
from sqlmodel import create_engine, Session, SQLModel
from sqlmodel.engine.create import _FutureEngine
from typing import Union, Optional
from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, \
raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains
from typing import List
class DBConnectionBase:
"""
References
----------
for users who want to know the differences between Engine, Connection, Session
https://stackoverflow.com/questions/34322471/sqlalchemy-engine-connection-and-session-difference
"""
def __init__(self, db_engine: Union[Engine, _FutureEngine]):
self._db_engine = db_engine
SQLModel.metadata.create_all(self._db_engine)
@classmethod
def from_db_user(cls, db_type, db_driver, host, user, password, port, db, charset='utf8', echo=True):
engine = create_engine(f"{db_type}+{db_driver}://{user}:{password}@{host}:{port}/{db}", echo=echo)
return cls(engine)
@classmethod
def from_full_db_path(cls, full_db_path, echo=True):
engine = create_engine(f"{full_db_path}", echo=echo)
return cls(engine)
def get_db_engine(self):
return self._db_engine
def execute(self, sql, always_commit=False, fetch: Optional[Union[int, str]] = None):
with Session(self._db_engine) as session:
q = session.execute(sql)
if always_commit:
session.commit()
if fetch is not None:
raise_if_incorrect_type(fetch, (int, str))
if isinstance(fetch, int):
if fetch == 1:
return q.fetchone()
elif fetch > 1:
return q.fetchmany(fetch)
elif isinstance(fetch, str):
if fetch == 'all':
return q.fetchall()
raise ValueError
return q
def get_db_inspector(self):
from sqlalchemy import inspect
inspector = inspect(self._db_engine)
return inspector
def get_schemas(self, schemas='all', tables='all'):
inspector = self.get_db_inspector()
from collections import defaultdict
schema_containers = defaultdict(dict)
schemas = _validate_schema_names(inspector, schemas)
return _get_schemas(inspector, schema_containers, schemas, tables)
def get_tables_names(self):
inspector = self.get_db_inspector()
return inspector.get_table_names()
def _get_schemas(inspector, schema_containers, schemas: Union[str, List[str]], tables: Union[str, List[List[str]], List[str]]):
schema_list = _validate_schema_names(inspector, schemas)
if check_all_element_type_uniform(tables, list):
for schema, table in zip(schema_list, tables):
table_names = _validate_table_names(inspector, schema, table)
for sub_table_names in table_names:
schema_containers[schema][sub_table_names] = inspector.get_columns(sub_table_names, schema=schema)
return schema_containers
elif check_all_element_type_uniform(tables, str) or tables == 'all':
for schema in schema_list:
table_names = _validate_table_names(inspector, schema, tables)
for table_name in table_names:
schema_containers[schema][table_name] = inspector.get_columns(table_name, schema=schema)
return schema_containers
raise ValueError
def _validate_schema_names(inspector, schemas: List[str]):
if schemas == 'all':
return inspector.get_schema_names()
if isinstance(schemas, list):
raise_if_not_all_value_contains(schemas, inspector.get_schema_names())
return schemas
raise ValueError('schemas must be "all" or a list of string')
def _validate_table_names(inspector, schema: str, tables: List[str]):
if tables == 'all':
return inspector.get_table_names(schema=schema)
if isinstance(tables, list):
if check_all_element_type_uniform(tables, str):
raise_if_value_not_contains(tables, inspector.get_table_names(schema=schema))
return tables
elif check_all_element_type_uniform(tables, list):
for sub_tab in tables:
print(sub_tab, inspector.get_table_names(schema=schema))
raise_if_value_not_contains(sub_tab, inspector.get_table_names(schema=schema))
return tables
raise ValueError('tables name are not existed in database, pls verify')
|
[
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all"
] |
[((2815, 2859), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'list'], {}), '(tables, list)\n', (2845, 2859), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((767, 812), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['self._db_engine'], {}), '(self._db_engine)\n', (795, 812), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((954, 1047), 'sqlmodel.create_engine', 'create_engine', (['f"""{db_type}+{db_driver}://{user}:{password}@{host}:{port}/{db}"""'], {'echo': 'echo'}), "(f'{db_type}+{db_driver}://{user}:{password}@{host}:{port}/{db}',\n echo=echo)\n", (967, 1047), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((1163, 1206), 'sqlmodel.create_engine', 'create_engine', (['f"""{full_db_path}"""'], {'echo': 'echo'}), "(f'{full_db_path}', echo=echo)\n", (1176, 1206), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((2117, 2141), 'sqlalchemy.inspect', 'inspect', (['self._db_engine'], {}), '(self._db_engine)\n', (2124, 2141), False, 'from sqlalchemy import inspect\n'), ((2341, 2358), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (2352, 2358), False, 'from collections import defaultdict\n'), ((4104, 4147), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'str'], {}), '(tables, str)\n', (4134, 4147), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((1400, 1424), 'sqlmodel.Session', 'Session', (['self._db_engine'], {}), '(self._db_engine)\n', (1407, 1424), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((3196, 3239), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'str'], {}), '(tables, str)\n', (3226, 3239), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((4278, 4322), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'list'], {}), '(tables, list)\n', (4308, 4322), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((1588, 1630), 'pyemits.common.validation.raise_if_incorrect_type', 'raise_if_incorrect_type', (['fetch', '(int, str)'], {}), '(fetch, (int, str))\n', (1611, 1630), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n')]
|
from collections import deque
from time import sleep
import pytest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import WebDriverException
from sqlmodel import Session, select
from youtube.db import engine
from youtube.models import YouTube
@pytest.fixture(scope="session")
def driver():
driver = webdriver.Chrome()
try:
driver.get("http://localhost:8000/")
yield driver
except WebDriverException:
raise RuntimeError("Cannot get to localhost:8000, did you start FastAPI?")
finally:
driver.quit()
@pytest.fixture(scope="session")
def scroll_to_end(driver):
cache_size = 5
num_rows = deque(maxlen=cache_size)
i = 0
while True:
last_element = driver.find_elements_by_class_name("mui--text-subhead")[-1]
actions = webdriver.ActionChains(driver)
actions.move_to_element(last_element).perform()
i += 1
num_rows.append(len(driver.find_elements_by_tag_name("tr")))
if i > cache_size and num_rows.count(num_rows[-1]) == len(num_rows):
print("num rows stable, seems I hit the end of infinite scroll")
break
def test_number_of_rows_on_page(session, driver, scroll_to_end):
with Session(engine) as session:
num_row_in_db_table = len(session.exec(select(YouTube)).all())
num_rows_on_page = len(driver.find_elements_by_tag_name("tbody tr"))
assert num_rows_on_page == num_row_in_db_table
|
[
"sqlmodel.Session",
"sqlmodel.select"
] |
[((311, 342), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (325, 342), False, 'import pytest\n'), ((616, 647), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (630, 647), False, 'import pytest\n'), ((370, 388), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (386, 388), False, 'from selenium import webdriver\n'), ((709, 733), 'collections.deque', 'deque', ([], {'maxlen': 'cache_size'}), '(maxlen=cache_size)\n', (714, 733), False, 'from collections import deque\n'), ((862, 892), 'selenium.webdriver.ActionChains', 'webdriver.ActionChains', (['driver'], {}), '(driver)\n', (884, 892), False, 'from selenium import webdriver\n'), ((1283, 1298), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1290, 1298), False, 'from sqlmodel import Session, select\n'), ((1358, 1373), 'sqlmodel.select', 'select', (['YouTube'], {}), '(YouTube)\n', (1364, 1373), False, 'from sqlmodel import Session, select\n')]
|
import logging
from datetime import datetime
from typing import List
from sqlmodel import Session, select
from db import engine
from models import Social, User
def get_last_social() -> Social:
with Session(engine) as session:
statement = select(Social).order_by(Social.id.desc()).limit(1)
result = session.exec(statement).one_or_none()
logging.info(f"SELECT social row: {result}")
return result
def get_previous_social() -> Social:
with Session(engine) as session:
statement = select(Social).order_by(Social.id.desc()).offset(1).limit(1)
result = session.exec(statement).one_or_none()
logging.info(f"SELECT previous social row: {result}")
return result
def create_social(fb: int, ig: int, tw: int, sp: int, yt: int):
dt_now = datetime.now().strftime("%Y%m%d_%H%M%S")
logging.info(f"INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})")
social_row = Social(dt=dt_now, fb=fb, ig=ig, tw=tw, sp=sp, yt=yt)
with Session(engine) as session:
session.add(social_row)
session.commit()
def create_user(telegram_id: int, username: str = None, first_name: str = None, last_name: str = None):
logging.info(f"INSERT user: {first_name}")
user_row = User(telegram_id=telegram_id, username=username, first_name=first_name, last_name=last_name)
with Session(engine) as session:
session.add(user_row)
session.commit()
def get_user(telegram_id: int) -> User:
with Session(engine) as session:
statement = select(User).where(User.telegram_id == telegram_id)
result = session.exec(statement).one_or_none()
logging.info(f"SELECT user: {result}")
return result
def get_all_users() -> List[User]:
with Session(engine) as session:
statement = select(User)
result = session.exec(statement).fetchall()
logging.info(f"SELECT all users: {result}")
return result
|
[
"sqlmodel.select",
"sqlmodel.Session"
] |
[((364, 408), 'logging.info', 'logging.info', (['f"""SELECT social row: {result}"""'], {}), "(f'SELECT social row: {result}')\n", (376, 408), False, 'import logging\n'), ((643, 696), 'logging.info', 'logging.info', (['f"""SELECT previous social row: {result}"""'], {}), "(f'SELECT previous social row: {result}')\n", (655, 696), False, 'import logging\n'), ((839, 909), 'logging.info', 'logging.info', (['f"""INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})"""'], {}), "(f'INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})')\n", (851, 909), False, 'import logging\n'), ((927, 979), 'models.Social', 'Social', ([], {'dt': 'dt_now', 'fb': 'fb', 'ig': 'ig', 'tw': 'tw', 'sp': 'sp', 'yt': 'yt'}), '(dt=dt_now, fb=fb, ig=ig, tw=tw, sp=sp, yt=yt)\n', (933, 979), False, 'from models import Social, User\n'), ((1184, 1226), 'logging.info', 'logging.info', (['f"""INSERT user: {first_name}"""'], {}), "(f'INSERT user: {first_name}')\n", (1196, 1226), False, 'import logging\n'), ((1242, 1338), 'models.User', 'User', ([], {'telegram_id': 'telegram_id', 'username': 'username', 'first_name': 'first_name', 'last_name': 'last_name'}), '(telegram_id=telegram_id, username=username, first_name=first_name,\n last_name=last_name)\n', (1246, 1338), False, 'from models import Social, User\n'), ((1637, 1675), 'logging.info', 'logging.info', (['f"""SELECT user: {result}"""'], {}), "(f'SELECT user: {result}')\n", (1649, 1675), False, 'import logging\n'), ((1857, 1900), 'logging.info', 'logging.info', (['f"""SELECT all users: {result}"""'], {}), "(f'SELECT all users: {result}')\n", (1869, 1900), False, 'import logging\n'), ((206, 221), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (213, 221), False, 'from sqlmodel import Session, select\n'), ((475, 490), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (482, 490), False, 'from sqlmodel import Session, select\n'), ((989, 1004), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (996, 1004), False, 'from sqlmodel import Session, select\n'), ((1344, 1359), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1351, 1359), False, 'from sqlmodel import Session, select\n'), ((1478, 1493), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1485, 1493), False, 'from sqlmodel import Session, select\n'), ((1740, 1755), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1747, 1755), False, 'from sqlmodel import Session, select\n'), ((1788, 1800), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1794, 1800), False, 'from sqlmodel import Session, select\n'), ((794, 808), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (806, 808), False, 'from datetime import datetime\n'), ((1526, 1538), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1532, 1538), False, 'from sqlmodel import Session, select\n'), ((278, 294), 'models.Social.id.desc', 'Social.id.desc', ([], {}), '()\n', (292, 294), False, 'from models import Social, User\n'), ((254, 268), 'sqlmodel.select', 'select', (['Social'], {}), '(Social)\n', (260, 268), False, 'from sqlmodel import Session, select\n'), ((547, 563), 'models.Social.id.desc', 'Social.id.desc', ([], {}), '()\n', (561, 563), False, 'from models import Social, User\n'), ((523, 537), 'sqlmodel.select', 'select', (['Social'], {}), '(Social)\n', (529, 537), False, 'from sqlmodel import Session, select\n')]
|
input_names = {'TL': '../examples/large_deformation/hyperelastic.py',
'UL': '../examples/large_deformation/hyperelastic_ul.py',
'ULM': '../examples/large_deformation/hyperelastic_ul_up.py'}
output_name_trunk = 'test_hyperelastic_'
from sfepy.base.testing import TestCommon
from tests_basic import NLSStatus
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
return Test(conf = conf, options = options)
def test_solution(self):
from sfepy.base.base import Struct
from sfepy.base.conf import ProblemConf, get_standard_keywords
from sfepy.applications import solve_pde, assign_standard_hooks
import numpy as nm
import os.path as op
solutions = {}
ok = True
for hp, pb_filename in input_names.iteritems():
required, other = get_standard_keywords()
input_name = op.join(op.dirname(__file__), pb_filename)
test_conf = ProblemConf.from_file(input_name, required, other)
name = output_name_trunk + hp
solver_options = Struct(output_filename_trunk=name,
output_format='vtk',
save_ebc=False, save_ebc_nodes=False,
save_regions=False,
save_regions_as_groups=False,
save_field_meshes=False,
solve_not=False)
assign_standard_hooks(self, test_conf.options.get, test_conf)
self.report( 'hyperelastic formulation: %s' % (hp, ) )
status = NLSStatus(conditions=[])
pb, state = solve_pde(test_conf,
solver_options,
nls_status=status,
output_dir=self.options.out_dir,
step_hook=self.step_hook,
post_process_hook=self.post_process_hook,
post_process_hook_final=self.post_process_hook_final)
converged = status.condition == 0
ok = ok and converged
solutions[hp] = state.get_parts()['u']
self.report('%s solved' % input_name)
rerr = 1.0e-3
aerr = nm.linalg.norm(solutions['TL'], ord=None) * rerr
self.report('allowed error: rel = %e, abs = %e' % (rerr, aerr))
ok = ok and self.compare_vectors(solutions['TL'], solutions['UL'],
label1='TLF',
label2='ULF',
allowed_error=rerr)
ok = ok and self.compare_vectors(solutions['UL'], solutions['ULM'],
label1='ULF',
label2='ULF_mixed',
allowed_error=rerr)
return ok
|
[
"sfepy.base.conf.get_standard_keywords",
"sfepy.base.base.Struct",
"sfepy.base.conf.ProblemConf.from_file",
"sfepy.applications.solve_pde",
"sfepy.applications.assign_standard_hooks"
] |
[((871, 894), 'sfepy.base.conf.get_standard_keywords', 'get_standard_keywords', ([], {}), '()\n', (892, 894), False, 'from sfepy.base.conf import ProblemConf, get_standard_keywords\n'), ((987, 1037), 'sfepy.base.conf.ProblemConf.from_file', 'ProblemConf.from_file', (['input_name', 'required', 'other'], {}), '(input_name, required, other)\n', (1008, 1037), False, 'from sfepy.base.conf import ProblemConf, get_standard_keywords\n'), ((1110, 1303), 'sfepy.base.base.Struct', 'Struct', ([], {'output_filename_trunk': 'name', 'output_format': '"""vtk"""', 'save_ebc': '(False)', 'save_ebc_nodes': '(False)', 'save_regions': '(False)', 'save_regions_as_groups': '(False)', 'save_field_meshes': '(False)', 'solve_not': '(False)'}), "(output_filename_trunk=name, output_format='vtk', save_ebc=False,\n save_ebc_nodes=False, save_regions=False, save_regions_as_groups=False,\n save_field_meshes=False, solve_not=False)\n", (1116, 1303), False, 'from sfepy.base.base import Struct\n'), ((1524, 1585), 'sfepy.applications.assign_standard_hooks', 'assign_standard_hooks', (['self', 'test_conf.options.get', 'test_conf'], {}), '(self, test_conf.options.get, test_conf)\n', (1545, 1585), False, 'from sfepy.applications import solve_pde, assign_standard_hooks\n'), ((1676, 1700), 'tests_basic.NLSStatus', 'NLSStatus', ([], {'conditions': '[]'}), '(conditions=[])\n', (1685, 1700), False, 'from tests_basic import NLSStatus\n'), ((1726, 1946), 'sfepy.applications.solve_pde', 'solve_pde', (['test_conf', 'solver_options'], {'nls_status': 'status', 'output_dir': 'self.options.out_dir', 'step_hook': 'self.step_hook', 'post_process_hook': 'self.post_process_hook', 'post_process_hook_final': 'self.post_process_hook_final'}), '(test_conf, solver_options, nls_status=status, output_dir=self.\n options.out_dir, step_hook=self.step_hook, post_process_hook=self.\n post_process_hook, post_process_hook_final=self.post_process_hook_final)\n', (1735, 1946), False, 'from sfepy.applications import solve_pde, assign_standard_hooks\n'), ((2362, 2403), 'numpy.linalg.norm', 'nm.linalg.norm', (["solutions['TL']"], {'ord': 'None'}), "(solutions['TL'], ord=None)\n", (2376, 2403), True, 'import numpy as nm\n'), ((928, 948), 'os.path.dirname', 'op.dirname', (['__file__'], {}), '(__file__)\n', (938, 948), True, 'import os.path as op\n')]
|
from typing import Optional
from uuid import UUID
from ecoindex.models import Result
from sqlmodel import Field
class ApiEcoindex(Result, table=True):
id: Optional[UUID] = Field(
default=None, description="Analysis ID of type `UUID`", primary_key=True
)
host: str = Field(
default=..., title="Web page host", description="Host name of the web page"
)
version: int = Field(
default=1,
title="API version",
description="Version number of the API used to run the test",
)
|
[
"sqlmodel.Field"
] |
[((179, 258), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'description': '"""Analysis ID of type `UUID`"""', 'primary_key': '(True)'}), "(default=None, description='Analysis ID of type `UUID`', primary_key=True)\n", (184, 258), False, 'from sqlmodel import Field\n'), ((289, 376), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Web page host"""', 'description': '"""Host name of the web page"""'}), "(default=..., title='Web page host', description=\n 'Host name of the web page')\n", (294, 376), False, 'from sqlmodel import Field\n'), ((405, 509), 'sqlmodel.Field', 'Field', ([], {'default': '(1)', 'title': '"""API version"""', 'description': '"""Version number of the API used to run the test"""'}), "(default=1, title='API version', description=\n 'Version number of the API used to run the test')\n", (410, 509), False, 'from sqlmodel import Field\n')]
|
"""add swag tiers
Revision ID: <KEY>
Revises: 02338256c6aa
Create Date: 2022-06-01 05:58:25.373228+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "02338256c6aa"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"swag_tiers",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("required_attendance", sa.Integer(), nullable=False),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.add_column(
"participants", sa.Column("swag_tier_id", sa.Integer(), nullable=True)
)
op.create_foreign_key(None, "participants", "swag_tiers", ["swag_tier_id"], ["id"])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, "participants", type_="foreignkey")
op.drop_column("participants", "swag_tier_id")
op.drop_table("swag_tiers")
# ### end Alembic commands ###
|
[
"sqlmodel.sql.sqltypes.AutoString"
] |
[((876, 963), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""participants"""', '"""swag_tiers"""', "['swag_tier_id']", "['id']"], {}), "(None, 'participants', 'swag_tiers', ['swag_tier_id'],\n ['id'])\n", (897, 963), False, 'from alembic import op\n'), ((1084, 1144), 'alembic.op.drop_constraint', 'op.drop_constraint', (['None', '"""participants"""'], {'type_': '"""foreignkey"""'}), "(None, 'participants', type_='foreignkey')\n", (1102, 1144), False, 'from alembic import op\n'), ((1149, 1195), 'alembic.op.drop_column', 'op.drop_column', (['"""participants"""', '"""swag_tier_id"""'], {}), "('participants', 'swag_tier_id')\n", (1163, 1195), False, 'from alembic import op\n'), ((1200, 1227), 'alembic.op.drop_table', 'op.drop_table', (['"""swag_tiers"""'], {}), "('swag_tiers')\n", (1213, 1227), False, 'from alembic import op\n'), ((731, 760), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (754, 760), True, 'import sqlalchemy as sa\n'), ((457, 491), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (489, 491), False, 'import sqlmodel\n'), ((543, 577), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (575, 577), False, 'import sqlmodel\n'), ((637, 649), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (647, 649), True, 'import sqlalchemy as sa\n'), ((692, 704), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (702, 704), True, 'import sqlalchemy as sa\n'), ((837, 849), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (847, 849), True, 'import sqlalchemy as sa\n')]
|
r"""
Compute homogenized elastic coefficients for a given heterogeneous linear
elastic microstructure, see [1] for details or [2] and [3] for a quick
explanation.
[1] <NAME>, <NAME>: Homogenization in open sets with holes.
Journal of Mathematical Analysis and Applications 71(2), 1979, pages 590-607.
https://doi.org/10.1016/0022-247X(79)90211-7
[2] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part I: Mathematical formulation and finite element modelling.
Computational Materials Science 45(4), 2009, pages 1073-1080.
http://dx.doi.org/10.1016/j.commatsci.2009.02.025
[3] <NAME>, <NAME>, <NAME>:
Asymptotic homogenisation in linear elasticity.
Part II: Finite element procedures and multiscale applications.
Computational Materials Science 45(4), 2009, pages 1081-1096.
http://dx.doi.org/10.1016/j.commatsci.2009.01.027
"""
from __future__ import absolute_import
import sfepy.discrete.fem.periodic as per
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson
from sfepy.homogenization.utils import define_box_regions
import sfepy.homogenization.coefs_base as cb
from sfepy import data_dir
from sfepy.base.base import Struct
from sfepy.homogenization.recovery import compute_micro_u,\
compute_stress_strain_u, compute_mac_stress_part
def recovery_le(pb, corrs, macro):
out = {}
dim = corrs['corrs_le']['u_00'].shape[1]
mic_u = - compute_micro_u(corrs['corrs_le'], macro['strain'], 'u', dim)
out['u_mic'] = Struct(name='output_data',
mode='vertex', data=mic_u,
var_name='u', dofs=None)
stress_Y, strain_Y = \
compute_stress_strain_u(pb, 'i', 'Y', 'mat.D', 'u', mic_u)
stress_Y += \
compute_mac_stress_part(pb, 'i', 'Y', 'mat.D', 'u', macro['strain'])
strain = macro['strain'] + strain_Y
out['cauchy_strain'] = Struct(name='output_data',
mode='cell', data=strain,
dofs=None)
out['cauchy_stress'] = Struct(name='output_data',
mode='cell', data=stress_Y,
dofs=None)
return out
filename_mesh = data_dir + '/meshes/3d/matrix_fiber.mesh'
dim = 3
region_lbn = (0, 0, 0)
region_rtf = (1, 1, 1)
regions = {
'Y': 'all',
'Ym': 'cells of group 1',
'Yc': 'cells of group 2',
}
regions.update(define_box_regions(dim, region_lbn, region_rtf))
materials = {
'mat': ({'D': {'Ym': stiffness_from_youngpoisson(dim, 7.0e9, 0.4),
'Yc': stiffness_from_youngpoisson(dim, 70.0e9, 0.2)}},),
}
fields = {
'corrector': ('real', dim, 'Y', 1),
}
variables = {
'u': ('unknown field', 'corrector', 0),
'v': ('test field', 'corrector', 'u'),
'Pi': ('parameter field', 'corrector', 'u'),
'Pi1': ('parameter field', 'corrector', '(set-to-None)'),
'Pi2': ('parameter field', 'corrector', '(set-to-None)'),
}
functions = {
'match_x_plane': (per.match_x_plane,),
'match_y_plane': (per.match_y_plane,),
'match_z_plane': (per.match_z_plane,),
}
ebcs = {
'fixed_u': ('Corners', {'u.all': 0.0}),
}
if dim == 3:
epbcs = {
'periodic_x': (['Left', 'Right'], {'u.all': 'u.all'},
'match_x_plane'),
'periodic_y': (['Near', 'Far'], {'u.all': 'u.all'},
'match_y_plane'),
'periodic_z': (['Top', 'Bottom'], {'u.all': 'u.all'},
'match_z_plane'),
}
else:
epbcs = {
'periodic_x': (['Left', 'Right'], {'u.all': 'u.all'},
'match_x_plane'),
'periodic_y': (['Bottom', 'Top'], {'u.all': 'u.all'},
'match_y_plane'),
}
all_periodic = ['periodic_%s' % ii for ii in ['x', 'y', 'z'][:dim]]
integrals = {
'i': 2,
}
options = {
'coefs': 'coefs',
'requirements': 'requirements',
'ls': 'ls', # linear solver to use
'volume': {'expression': 'd_volume.i.Y(u)'},
'output_dir': 'output',
'coefs_filename': 'coefs_le',
'recovery_hook': 'recovery_le',
}
equation_corrs = {
'balance_of_forces':
"""dw_lin_elastic.i.Y(mat.D, v, u) =
- dw_lin_elastic.i.Y(mat.D, v, Pi)"""
}
expr_coefs = """dw_lin_elastic.i.Y(mat.D, Pi1, Pi2)"""
coefs = {
'D': {
'requires': ['pis', 'corrs_rs'],
'expression': expr_coefs,
'set_variables': [('Pi1', ('pis', 'corrs_rs'), 'u'),
('Pi2', ('pis', 'corrs_rs'), 'u')],
'class': cb.CoefSymSym,
},
'filenames': {},
}
requirements = {
'pis': {
'variables': ['u'],
'class': cb.ShapeDimDim,
'save_name': 'corrs_pis',
'dump_variables': ['u'],
},
'corrs_rs': {
'requires': ['pis'],
'ebcs': ['fixed_u'],
'epbcs': all_periodic,
'equations': equation_corrs,
'set_variables': [('Pi', 'pis', 'u')],
'class': cb.CorrDimDim,
'save_name': 'corrs_le',
'dump_variables': ['u'],
},
}
solvers = {
'ls': ('ls.scipy_direct', {}),
'newton': ('nls.newton', {
'i_max': 1,
'eps_a': 1e-4,
})
}
|
[
"sfepy.base.base.Struct",
"sfepy.homogenization.recovery.compute_micro_u",
"sfepy.homogenization.utils.define_box_regions",
"sfepy.homogenization.recovery.compute_mac_stress_part",
"sfepy.homogenization.recovery.compute_stress_strain_u",
"sfepy.mechanics.matcoefs.stiffness_from_youngpoisson"
] |
[((1476, 1554), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""vertex"""', 'data': 'mic_u', 'var_name': '"""u"""', 'dofs': 'None'}), "(name='output_data', mode='vertex', data=mic_u, var_name='u', dofs=None)\n", (1482, 1554), False, 'from sfepy.base.base import Struct\n'), ((1643, 1701), 'sfepy.homogenization.recovery.compute_stress_strain_u', 'compute_stress_strain_u', (['pb', '"""i"""', '"""Y"""', '"""mat.D"""', '"""u"""', 'mic_u'], {}), "(pb, 'i', 'Y', 'mat.D', 'u', mic_u)\n", (1666, 1701), False, 'from sfepy.homogenization.recovery import compute_micro_u, compute_stress_strain_u, compute_mac_stress_part\n'), ((1728, 1796), 'sfepy.homogenization.recovery.compute_mac_stress_part', 'compute_mac_stress_part', (['pb', '"""i"""', '"""Y"""', '"""mat.D"""', '"""u"""', "macro['strain']"], {}), "(pb, 'i', 'Y', 'mat.D', 'u', macro['strain'])\n", (1751, 1796), False, 'from sfepy.homogenization.recovery import compute_micro_u, compute_stress_strain_u, compute_mac_stress_part\n'), ((1866, 1929), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'strain', 'dofs': 'None'}), "(name='output_data', mode='cell', data=strain, dofs=None)\n", (1872, 1929), False, 'from sfepy.base.base import Struct\n'), ((2025, 2090), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'stress_Y', 'dofs': 'None'}), "(name='output_data', mode='cell', data=stress_Y, dofs=None)\n", (2031, 2090), False, 'from sfepy.base.base import Struct\n'), ((2394, 2441), 'sfepy.homogenization.utils.define_box_regions', 'define_box_regions', (['dim', 'region_lbn', 'region_rtf'], {}), '(dim, region_lbn, region_rtf)\n', (2412, 2441), False, 'from sfepy.homogenization.utils import define_box_regions\n'), ((1394, 1455), 'sfepy.homogenization.recovery.compute_micro_u', 'compute_micro_u', (["corrs['corrs_le']", "macro['strain']", '"""u"""', 'dim'], {}), "(corrs['corrs_le'], macro['strain'], 'u', dim)\n", (1409, 1455), False, 'from sfepy.homogenization.recovery import compute_micro_u, compute_stress_strain_u, compute_mac_stress_part\n'), ((2483, 2534), 'sfepy.mechanics.matcoefs.stiffness_from_youngpoisson', 'stiffness_from_youngpoisson', (['dim', '(7000000000.0)', '(0.4)'], {}), '(dim, 7000000000.0, 0.4)\n', (2510, 2534), False, 'from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson\n'), ((2554, 2606), 'sfepy.mechanics.matcoefs.stiffness_from_youngpoisson', 'stiffness_from_youngpoisson', (['dim', '(70000000000.0)', '(0.2)'], {}), '(dim, 70000000000.0, 0.2)\n', (2581, 2606), False, 'from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson\n')]
|
from __future__ import absolute_import
import numpy as nm
from sfepy.base.base import output, iter_dict_of_lists, Struct, basestr
from sfepy.base.timing import Timer
import six
def parse_approx_order(approx_order):
"""
Parse the uniform approximation order value (str or int).
"""
ao_msg = 'unsupported approximation order! (%s)'
force_bubble = False
discontinuous = False
if approx_order is None:
return 'iga', force_bubble, discontinuous
elif isinstance(approx_order, basestr):
if approx_order.startswith('iga'):
return approx_order, force_bubble, discontinuous
try:
ao = int(approx_order)
except ValueError:
mode = approx_order[-1].lower()
if mode == 'b':
ao = int(approx_order[:-1])
force_bubble = True
elif mode == 'd':
ao = int(approx_order[:-1])
discontinuous = True
else:
raise ValueError(ao_msg % approx_order)
if ao < 0:
raise ValueError(ao_msg % approx_order)
elif ao == 0:
discontinuous = True
return ao, force_bubble, discontinuous
def parse_shape(shape, dim):
if isinstance(shape, basestr):
try:
shape = {'scalar' : (1,),
'vector' : (dim,)}[shape]
except KeyError:
raise ValueError('unsupported field shape! (%s)', shape)
elif isinstance(shape, six.integer_types):
shape = (int(shape),)
return shape
def setup_extra_data(conn_info):
"""
Setup extra data required for non-volume integration.
"""
for key, ii, info in iter_dict_of_lists(conn_info, return_keys=True):
for var in info.all_vars:
field = var.get_field()
if var == info.primary:
field.setup_extra_data(info.ps_tg, info, info.is_trace)
def fields_from_conf(conf, regions):
fields = {}
for key, val in six.iteritems(conf):
field = Field.from_conf(val, regions)
fields[field.name] = field
return fields
class Field(Struct):
"""
Base class for fields.
"""
_all = None
@staticmethod
def from_args(name, dtype, shape, region, approx_order=1,
space='H1', poly_space_base='lagrange'):
"""
Create a Field subclass instance corresponding to a given space.
Parameters
----------
name : str
The field name.
dtype : numpy.dtype
The field data type: float64 or complex128.
shape : int/tuple/str
The field shape: 1 or (1,) or 'scalar', space dimension (2, or (2,)
or 3 or (3,)) or 'vector', or a tuple. The field shape determines
the shape of the FE base functions and is related to the number of
components of variables and to the DOF per node count, depending
on the field kind.
region : Region
The region where the field is defined.
approx_order : int/str
The FE approximation order, e.g. 0, 1, 2, '1B' (1 with bubble).
space : str
The function space name.
poly_space_base : str
The name of polynomial space base.
Notes
-----
Assumes one cell type for the whole region!
"""
conf = Struct(name=name, dtype=dtype, shape=shape, region=region.name,
approx_order=approx_order, space=space,
poly_space_base=poly_space_base)
return Field.from_conf(conf, {region.name : region})
@staticmethod
def from_conf(conf, regions):
"""
Create a Field subclass instance based on the configuration.
"""
if Field._all is None:
from sfepy import get_paths
from sfepy.base.base import load_classes
field_files = [ii for ii
in get_paths('sfepy/discrete/fem/fields*.py')
if 'fields_base.py' not in ii]
field_files += get_paths('sfepy/discrete/iga/fields*.py')
field_files += get_paths('sfepy/discrete/structural/fields*.py')
Field._all = load_classes(field_files, [Field], ignore_errors=True,
name_attr='family_name')
table = Field._all
space = conf.get('space', 'H1')
poly_space_base = conf.get('poly_space_base', 'lagrange')
key = space + '_' + poly_space_base
approx_order = parse_approx_order(conf.approx_order)
ao, force_bubble, discontinuous = approx_order
region = regions[conf.region]
if region.kind == 'cell':
# Volume fields.
kind = 'volume'
if discontinuous:
cls = table[kind + '_' + key + '_discontinuous']
else:
cls = table[kind + '_' + key]
obj = cls(conf.name, conf.dtype, conf.shape, region,
approx_order=approx_order[:2])
else:
# Surface fields.
kind = 'surface'
cls = table[kind + '_' + key]
obj = cls(conf.name, conf.dtype, conf.shape, region,
approx_order=approx_order[:2])
return obj
def _setup_kind(self):
name = self.get('family_name', None,
'An abstract Field method called!')
aux = name.split('_')
self.space = aux[1]
self.poly_space_base = aux[2]
def clear_mappings(self, clear_all=False):
"""
Clear current reference mappings.
"""
self.mappings = {}
if clear_all:
if hasattr(self, 'mappings0'):
self.mappings0.clear()
else:
self.mappings0 = {}
def save_mappings(self):
"""
Save current reference mappings to `mappings0` attribute.
"""
import sfepy.base.multiproc as multi
if multi.is_remote_dict(self.mappings0):
for k, v in six.iteritems(self.mappings):
m, _ = self.mappings[k]
nv = (m.bf, m.bfg, m.det, m.volume, m.normal)
self.mappings0[k] = nv
else:
self.mappings0 = self.mappings.copy()
def get_mapping(self, region, integral, integration,
get_saved=False, return_key=False):
"""
For given region, integral and integration type, get a reference
mapping, i.e. jacobians, element volumes and base function
derivatives for Volume-type geometries, and jacobians, normals
and base function derivatives for Surface-type geometries
corresponding to the field approximation.
The mappings are cached in the field instance in `mappings`
attribute. The mappings can be saved to `mappings0` using
`Field.save_mappings`. The saved mapping can be retrieved by
passing `get_saved=True`. If the required (saved) mapping
is not in cache, a new one is created.
Returns
-------
geo : CMapping instance
The reference mapping.
mapping : VolumeMapping or SurfaceMapping instance
The mapping.
key : tuple
The key of the mapping in `mappings` or `mappings0`.
"""
import sfepy.base.multiproc as multi
key = (region.name, integral.order, integration)
if get_saved:
out = self.mappings0.get(key, None)
if multi.is_remote_dict(self.mappings0) and out is not None:
m, i = self.create_mapping(region, integral, integration)
m.bf[:], m.bfg[:], m.det[:], m.volume[:] = out[0:4]
if m.normal is not None:
m.normal[:] = m[4]
out = m, i
else:
out = self.mappings.get(key, None)
if out is None:
out = self.create_mapping(region, integral, integration)
self.mappings[key] = out
if return_key:
out = out + (key,)
return out
def create_eval_mesh(self):
"""
Create a mesh for evaluating the field. The default implementation
returns None, because this mesh is for most fields the same as the one
created by `Field.create_mesh()`.
"""
def evaluate_at(self, coors, source_vals, mode='val', strategy='general',
close_limit=0.1, get_cells_fun=None, cache=None,
ret_cells=False, ret_status=False, ret_ref_coors=False,
verbose=False):
"""
Evaluate source DOF values corresponding to the field in the given
coordinates using the field interpolation.
Parameters
----------
coors : array, shape ``(n_coor, dim)``
The coordinates the source values should be interpolated into.
source_vals : array, shape ``(n_nod, n_components)``
The source DOF values corresponding to the field.
mode : {'val', 'grad'}, optional
The evaluation mode: the field value (default) or the field value
gradient.
strategy : {'general', 'convex'}, optional
The strategy for finding the elements that contain the
coordinates. For convex meshes, the 'convex' strategy might be
faster than the 'general' one.
close_limit : float, optional
The maximum limit distance of a point from the closest
element allowed for extrapolation.
get_cells_fun : callable, optional
If given, a function with signature ``get_cells_fun(coors, cmesh,
**kwargs)`` returning cells and offsets that potentially contain
points with the coordinates `coors`. Applicable only when
`strategy` is 'general'. When not given,
:func:`get_potential_cells()
<sfepy.discrete.common.global_interp.get_potential_cells>` is used.
cache : Struct, optional
To speed up a sequence of evaluations, the field mesh and other
data can be cached. Optionally, the cache can also contain the
reference element coordinates as `cache.ref_coors`, `cache.cells`
and `cache.status`, if the evaluation occurs in the same
coordinates repeatedly. In that case the mesh related data are
ignored. See :func:`Field.get_evaluate_cache()
<sfepy.discrete.fem.fields_base.FEField.get_evaluate_cache()>`.
ret_ref_coors : bool, optional
If True, return also the found reference element coordinates.
ret_status : bool, optional
If True, return also the enclosing cell status for each point.
ret_cells : bool, optional
If True, return also the cell indices the coordinates are in.
verbose : bool
If False, reduce verbosity.
Returns
-------
vals : array
The interpolated values with shape ``(n_coor, n_components)`` or
gradients with shape ``(n_coor, n_components, dim)`` according to
the `mode`. If `ret_status` is False, the values where the status
is greater than one are set to ``numpy.nan``.
ref_coors : array
The found reference element coordinates, if `ret_ref_coors` is True.
cells : array
The cell indices, if `ret_ref_coors` or `ret_cells` or `ret_status`
are True.
status : array
The status, if `ret_ref_coors` or `ret_status` are True, with the
following meaning: 0 is success, 1 is extrapolation within
`close_limit`, 2 is extrapolation outside `close_limit`, 3 is
failure, 4 is failure due to non-convergence of the Newton
iteration in tensor product cells. If close_limit is 0, then for
the 'general' strategy the status 5 indicates points outside of the
field domain that had no potential cells.
"""
from sfepy.discrete.common.global_interp import get_ref_coors
from sfepy.discrete.common.extmods.crefcoors import evaluate_in_rc
from sfepy.base.base import complex_types
output('evaluating in %d points...' % coors.shape[0], verbose=verbose)
ref_coors, cells, status = get_ref_coors(self, coors,
strategy=strategy,
close_limit=close_limit,
get_cells_fun=get_cells_fun,
cache=cache,
verbose=verbose)
timer = Timer(start=True)
# Interpolate to the reference coordinates.
source_dtype = nm.float64 if source_vals.dtype in complex_types\
else source_vals.dtype
if mode == 'val':
vals = nm.empty((coors.shape[0], source_vals.shape[1], 1),
dtype=source_dtype)
cmode = 0
elif mode == 'grad':
vals = nm.empty((coors.shape[0], source_vals.shape[1],
coors.shape[1]),
dtype=source_dtype)
cmode = 1
ctx = self.create_basis_context()
if source_vals.dtype in complex_types:
valsi = vals.copy()
evaluate_in_rc(vals, ref_coors, cells, status,
nm.ascontiguousarray(source_vals.real),
self.get_econn('volume', self.region), cmode, ctx)
evaluate_in_rc(valsi, ref_coors, cells, status,
nm.ascontiguousarray(source_vals.imag),
self.get_econn('volume', self.region), cmode, ctx)
vals = vals + valsi * 1j
else:
evaluate_in_rc(vals, ref_coors, cells, status, source_vals,
self.get_econn('volume', self.region), cmode, ctx)
output('interpolation: %f s' % timer.stop(),verbose=verbose)
output('...done',verbose=verbose)
if mode == 'val':
vals.shape = (coors.shape[0], source_vals.shape[1])
if not ret_status:
ii = nm.where(status > 1)[0]
vals[ii] = nm.nan
if ret_ref_coors:
return vals, ref_coors, cells, status
elif ret_status:
return vals, cells, status
elif ret_cells:
return vals, cells
else:
return vals
|
[
"sfepy.get_paths",
"sfepy.base.base.Struct",
"sfepy.discrete.common.global_interp.get_ref_coors",
"sfepy.base.multiproc.is_remote_dict",
"sfepy.base.timing.Timer",
"sfepy.base.base.iter_dict_of_lists",
"sfepy.base.base.load_classes",
"sfepy.base.base.output"
] |
[((1640, 1687), 'sfepy.base.base.iter_dict_of_lists', 'iter_dict_of_lists', (['conn_info'], {'return_keys': '(True)'}), '(conn_info, return_keys=True)\n', (1658, 1687), False, 'from sfepy.base.base import output, iter_dict_of_lists, Struct, basestr\n'), ((1941, 1960), 'six.iteritems', 'six.iteritems', (['conf'], {}), '(conf)\n', (1954, 1960), False, 'import six\n'), ((3337, 3477), 'sfepy.base.base.Struct', 'Struct', ([], {'name': 'name', 'dtype': 'dtype', 'shape': 'shape', 'region': 'region.name', 'approx_order': 'approx_order', 'space': 'space', 'poly_space_base': 'poly_space_base'}), '(name=name, dtype=dtype, shape=shape, region=region.name,\n approx_order=approx_order, space=space, poly_space_base=poly_space_base)\n', (3343, 3477), False, 'from sfepy.base.base import output, iter_dict_of_lists, Struct, basestr\n'), ((5974, 6010), 'sfepy.base.multiproc.is_remote_dict', 'multi.is_remote_dict', (['self.mappings0'], {}), '(self.mappings0)\n', (5994, 6010), True, 'import sfepy.base.multiproc as multi\n'), ((12248, 12318), 'sfepy.base.base.output', 'output', (["('evaluating in %d points...' % coors.shape[0])"], {'verbose': 'verbose'}), "('evaluating in %d points...' % coors.shape[0], verbose=verbose)\n", (12254, 12318), False, 'from sfepy.base.base import output, iter_dict_of_lists, Struct, basestr\n'), ((12355, 12488), 'sfepy.discrete.common.global_interp.get_ref_coors', 'get_ref_coors', (['self', 'coors'], {'strategy': 'strategy', 'close_limit': 'close_limit', 'get_cells_fun': 'get_cells_fun', 'cache': 'cache', 'verbose': 'verbose'}), '(self, coors, strategy=strategy, close_limit=close_limit,\n get_cells_fun=get_cells_fun, cache=cache, verbose=verbose)\n', (12368, 12488), False, 'from sfepy.discrete.common.global_interp import get_ref_coors\n'), ((12747, 12764), 'sfepy.base.timing.Timer', 'Timer', ([], {'start': '(True)'}), '(start=True)\n', (12752, 12764), False, 'from sfepy.base.timing import Timer\n'), ((14118, 14152), 'sfepy.base.base.output', 'output', (['"""...done"""'], {'verbose': 'verbose'}), "('...done', verbose=verbose)\n", (14124, 14152), False, 'from sfepy.base.base import output, iter_dict_of_lists, Struct, basestr\n'), ((4045, 4087), 'sfepy.get_paths', 'get_paths', (['"""sfepy/discrete/iga/fields*.py"""'], {}), "('sfepy/discrete/iga/fields*.py')\n", (4054, 4087), False, 'from sfepy import get_paths\n'), ((4115, 4164), 'sfepy.get_paths', 'get_paths', (['"""sfepy/discrete/structural/fields*.py"""'], {}), "('sfepy/discrete/structural/fields*.py')\n", (4124, 4164), False, 'from sfepy import get_paths\n'), ((4190, 4269), 'sfepy.base.base.load_classes', 'load_classes', (['field_files', '[Field]'], {'ignore_errors': '(True)', 'name_attr': '"""family_name"""'}), "(field_files, [Field], ignore_errors=True, name_attr='family_name')\n", (4202, 4269), False, 'from sfepy.base.base import load_classes\n'), ((6036, 6064), 'six.iteritems', 'six.iteritems', (['self.mappings'], {}), '(self.mappings)\n', (6049, 6064), False, 'import six\n'), ((12971, 13042), 'numpy.empty', 'nm.empty', (['(coors.shape[0], source_vals.shape[1], 1)'], {'dtype': 'source_dtype'}), '((coors.shape[0], source_vals.shape[1], 1), dtype=source_dtype)\n', (12979, 13042), True, 'import numpy as nm\n'), ((7511, 7547), 'sfepy.base.multiproc.is_remote_dict', 'multi.is_remote_dict', (['self.mappings0'], {}), '(self.mappings0)\n', (7531, 7547), True, 'import sfepy.base.multiproc as multi\n'), ((13142, 13231), 'numpy.empty', 'nm.empty', (['(coors.shape[0], source_vals.shape[1], coors.shape[1])'], {'dtype': 'source_dtype'}), '((coors.shape[0], source_vals.shape[1], coors.shape[1]), dtype=\n source_dtype)\n', (13150, 13231), True, 'import numpy as nm\n'), ((13515, 13553), 'numpy.ascontiguousarray', 'nm.ascontiguousarray', (['source_vals.real'], {}), '(source_vals.real)\n', (13535, 13553), True, 'import numpy as nm\n'), ((13720, 13758), 'numpy.ascontiguousarray', 'nm.ascontiguousarray', (['source_vals.imag'], {}), '(source_vals.imag)\n', (13740, 13758), True, 'import numpy as nm\n'), ((14288, 14308), 'numpy.where', 'nm.where', (['(status > 1)'], {}), '(status > 1)\n', (14296, 14308), True, 'import numpy as nm\n'), ((3917, 3959), 'sfepy.get_paths', 'get_paths', (['"""sfepy/discrete/fem/fields*.py"""'], {}), "('sfepy/discrete/fem/fields*.py')\n", (3926, 3959), False, 'from sfepy import get_paths\n')]
|
#!/usr/bin/env python3
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
import copy
import time
from typing import Iterable
import megengine as mge
import megengine.amp as amp
import megengine.distributed as dist
import megengine.functional as F
import megengine.module as M
import megengine.optimizer as optim
from basecore.config import ConfigDict
from basecore.engine import BaseHook, BaseTrainer
from basecore.utils import MeterBuffer
from megengine import jit
from basecls.data import DataLoaderType
from basecls.layers import Preprocess, build_loss
from basecls.solver import Solver
from basecls.utils import registers
__all__ = ["ClsTrainer"]
@registers.trainers.register()
class ClsTrainer(BaseTrainer):
"""Classification trainer.
Args:
cfg: config for training.
model: model for training.
dataloader: dataloader for training.
solver: solver for training.
hooks: hooks for training.
Attributes:
cfg: config for training.
model: model for training.
ema: model exponential moving average.
dataloader: dataloader for training.
solver: solver for training.
progress: object for recording training process.
loss: loss function for training.
meter : object for recording metrics.
"""
def __init__(
self,
cfg: ConfigDict,
model: M.Module,
dataloader: DataLoaderType,
solver: Solver,
hooks: Iterable[BaseHook] = None,
):
super().__init__(model, dataloader, solver, hooks)
self.cfg = cfg
self.ema = copy.deepcopy(model) if cfg.model_ema.enabled else None
self.preprocess = Preprocess(cfg.preprocess.img_mean, cfg.preprocess.img_std)
self.loss = build_loss(cfg)
self.meter = MeterBuffer(cfg.log_every_n_iter)
if cfg.trace:
# FIXME: tracing makes the training slower than before, why?
self.model_step = jit.trace(self.model_step, symbolic=True)
def train(self):
start_training_info = (1, 1)
max_iter = len(self.dataloader)
max_training_info = (self.cfg.solver.max_epoch, max_iter)
super().train(start_training_info, max_training_info)
def before_train(self):
super().before_train()
def before_epoch(self):
super().before_epoch()
self.dataloader_iter = iter(self.dataloader)
def after_epoch(self):
del self.dataloader_iter
super().after_epoch()
def train_one_iter(self):
"""Basic logic of training one iteration."""
data_tik = time.perf_counter()
data = next(self.dataloader_iter)
samples, targets = self.preprocess(data)
mge._full_sync() # use full_sync func to sync launch queue for dynamic execution
data_tok = time.perf_counter()
train_tik = time.perf_counter()
losses, accs = self.model_step(samples, targets)
mge._full_sync() # use full_sync func to sync launch queue for dynamic execution
train_tok = time.perf_counter()
# TODO: stats and accs
loss_meters = {"loss": losses.item()}
stat_meters = {"stat_acc@1": accs[0].item() * 100, "stat_acc@5": accs[1].item() * 100}
time_meters = {"train_time": train_tok - train_tik, "data_time": data_tok - data_tik}
self.meter.update(**loss_meters, **stat_meters, **time_meters)
def model_step(self, samples, targets):
optimizer = self.solver.optimizer
grad_manager = self.solver.grad_manager
grad_scaler = self.solver.grad_scaler
with grad_manager:
with amp.autocast(enabled=self.cfg.amp.enabled):
outputs = self.model(samples)
losses = self.loss(outputs, targets)
if isinstance(losses, mge.Tensor):
total_loss = losses
elif isinstance(losses, dict):
if "total_loss" in losses:
total_loss = losses["total_loss"]
else:
# only key contains "loss" will be calculated.
total_loss = sum([v for k, v in losses.items() if "loss" in k])
losses["total_loss"] = total_loss
else:
# list or tuple
total_loss = sum(losses)
total_loss = total_loss / self.cfg.solver.accumulation_steps
# this is made compatible with one hot labels
if targets.ndim == 2:
targets = F.argmax(targets, axis=1)
accs = F.metric.topk_accuracy(outputs, targets, (1, 5))
if self.cfg.amp.enabled:
grad_scaler.backward(grad_manager, total_loss)
else:
grad_manager.backward(total_loss)
if self.progress.iter % self.cfg.solver.accumulation_steps == 0:
self.modify_grad()
optimizer.step().clear_grad()
self.model_ema_step()
return losses, accs
def modify_grad(self):
grad_cfg = self.cfg.solver.grad_clip
# TODO: support advanced params for grad clip in the future
params = self.model.parameters()
if grad_cfg.name is None:
return
elif grad_cfg.name == "norm":
optim.clip_grad_norm(params, grad_cfg.max_norm)
elif grad_cfg.name == "value":
optim.clip_grad_value(params, grad_cfg.lower, grad_cfg.upper)
else:
raise ValueError(f"Grad clip type '{grad_cfg.name}' not supported")
def model_ema_step(self):
"""Implement momentum based Exponential Moving Average (EMA) for model states
https://github.com/rwightman/pytorch-image-models/blob/master/timm/utils/model_ema.py
Also inspired by Pycls https://github.com/facebookresearch/pycls/pull/138/, which is more
flexible and efficient
Heuristically, one can use a momentum of 0.9999 as used by Tensorflow and 0.9998 as used
by timm, which updates model ema every iter. To be more efficient, one can set
``update_period`` to e.g. 8 or 32 to speed up your training, and decrease your momentum
at scale: set ``momentum=0.9978`` from 0.9999 (32 times) when you ``update_period=32``.
Also, to make model EMA really work (improve generalization), one should carefully tune
the momentum based on various factors, e.g. the learning rate scheduler,
the total batch size, the training epochs, e.t.c.
To initialize a momentum in Pycls style, one set ``model_ema.alpha = 1e-5`` instead.
Momentum will be calculated through ``_calculate_pycls_momentum``.
"""
if self.ema is None:
return
ema_cfg = self.cfg.model_ema
cur_iter, cur_epoch = self.progress.iter, self.progress.epoch
if cur_iter % ema_cfg.update_period == 0:
if cur_epoch > (ema_cfg.start_epoch or self.cfg.solver.warmup_epochs):
momentum = (
ema_cfg.momentum
if ema_cfg.alpha is None
else _calculate_pycls_momentum(
alpha=ema_cfg.alpha,
total_batch_size=self.cfg.batch_size * dist.get_world_size(),
max_epoch=self.cfg.solver.max_epoch,
update_period=ema_cfg.update_period,
)
)
else:
# copy model to ema
momentum = 0.0
if not hasattr(self, "_ema_states"):
self._ema_states = (
list(self.ema.parameters()) + list(self.ema.buffers()),
list(self.model.parameters()) + list(self.model.buffers()),
)
for e, p in zip(*self._ema_states):
# _inplace_add_(e, p, alpha=mge.tensor(momentum), beta=mge.tensor(1 - momentum))
e._reset(e * momentum + p * (1 - momentum))
def _calculate_pycls_momentum(
alpha: float, total_batch_size: int, max_epoch: int, update_period: int
):
"""pycls style momentum calculation which uses a relative model_ema to decouple momentum with
other training hyper-parameters e.g.
* training epochs
* interval to update ema
* batch sizes
Usually the alpha is a tiny positive floating number, e.g. 1e-4 or 1e-5,
with ``max_epoch=100``, ``total_batch_size=1024`` and ``update_period=32``, the ema
momentum should be 0.996723175, which has roughly same behavior to the default setting.
i.e. ``momentum=0.9999`` together with ``update_period=1``
"""
return max(0, 1 - alpha * (total_batch_size / max_epoch * update_period))
|
[
"megengine.jit.trace",
"megengine.distributed.get_world_size",
"megengine.optimizer.clip_grad_value",
"megengine.amp.autocast",
"megengine.optimizer.clip_grad_norm",
"megengine.functional.argmax",
"megengine._full_sync",
"megengine.functional.metric.topk_accuracy"
] |
[((666, 695), 'basecls.utils.registers.trainers.register', 'registers.trainers.register', ([], {}), '()\n', (693, 695), False, 'from basecls.utils import registers\n'), ((1698, 1757), 'basecls.layers.Preprocess', 'Preprocess', (['cfg.preprocess.img_mean', 'cfg.preprocess.img_std'], {}), '(cfg.preprocess.img_mean, cfg.preprocess.img_std)\n', (1708, 1757), False, 'from basecls.layers import Preprocess, build_loss\n'), ((1778, 1793), 'basecls.layers.build_loss', 'build_loss', (['cfg'], {}), '(cfg)\n', (1788, 1793), False, 'from basecls.layers import Preprocess, build_loss\n'), ((1815, 1848), 'basecore.utils.MeterBuffer', 'MeterBuffer', (['cfg.log_every_n_iter'], {}), '(cfg.log_every_n_iter)\n', (1826, 1848), False, 'from basecore.utils import MeterBuffer\n'), ((2610, 2629), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2627, 2629), False, 'import time\n'), ((2729, 2745), 'megengine._full_sync', 'mge._full_sync', ([], {}), '()\n', (2743, 2745), True, 'import megengine as mge\n'), ((2830, 2849), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2847, 2849), False, 'import time\n'), ((2871, 2890), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2888, 2890), False, 'import time\n'), ((2956, 2972), 'megengine._full_sync', 'mge._full_sync', ([], {}), '()\n', (2970, 2972), True, 'import megengine as mge\n'), ((3058, 3077), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (3075, 3077), False, 'import time\n'), ((1616, 1636), 'copy.deepcopy', 'copy.deepcopy', (['model'], {}), '(model)\n', (1629, 1636), False, 'import copy\n'), ((1974, 2015), 'megengine.jit.trace', 'jit.trace', (['self.model_step'], {'symbolic': '(True)'}), '(self.model_step, symbolic=True)\n', (1983, 2015), False, 'from megengine import jit\n'), ((4565, 4613), 'megengine.functional.metric.topk_accuracy', 'F.metric.topk_accuracy', (['outputs', 'targets', '(1, 5)'], {}), '(outputs, targets, (1, 5))\n', (4587, 4613), True, 'import megengine.functional as F\n'), ((3642, 3684), 'megengine.amp.autocast', 'amp.autocast', ([], {'enabled': 'self.cfg.amp.enabled'}), '(enabled=self.cfg.amp.enabled)\n', (3654, 3684), True, 'import megengine.amp as amp\n'), ((4520, 4545), 'megengine.functional.argmax', 'F.argmax', (['targets'], {'axis': '(1)'}), '(targets, axis=1)\n', (4528, 4545), True, 'import megengine.functional as F\n'), ((5280, 5327), 'megengine.optimizer.clip_grad_norm', 'optim.clip_grad_norm', (['params', 'grad_cfg.max_norm'], {}), '(params, grad_cfg.max_norm)\n', (5300, 5327), True, 'import megengine.optimizer as optim\n'), ((5379, 5440), 'megengine.optimizer.clip_grad_value', 'optim.clip_grad_value', (['params', 'grad_cfg.lower', 'grad_cfg.upper'], {}), '(params, grad_cfg.lower, grad_cfg.upper)\n', (5400, 5440), True, 'import megengine.optimizer as optim\n'), ((7230, 7251), 'megengine.distributed.get_world_size', 'dist.get_world_size', ([], {}), '()\n', (7249, 7251), True, 'import megengine.distributed as dist\n')]
|
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import gc
import platform
import weakref
import numpy as np
import pytest
import megengine as mge
import megengine.distributed as dist
import megengine.functional as F
from megengine.core._imperative_rt import TensorAttr, imperative
from megengine.core._imperative_rt.imperative import sync
from megengine.core.autodiff.grad import Grad
from megengine.core.ops.builtin import Elemwise
from megengine.core.tensor.raw_tensor import as_raw_tensor
from megengine.core.tensor.tensor import Tensor, apply
from megengine.core.tensor.tensor_wrapper import TensorWrapper
from megengine.functional.distributed import remote_recv, remote_send
def _elwise(mode):
op = Elemwise(mode)
def f(*args):
(result,) = apply(op, *args)
return result
return f
add = _elwise(Elemwise.Mode.ADD)
mul = _elwise(Elemwise.Mode.MUL)
cos = _elwise(Elemwise.Mode.COS)
relu = _elwise(Elemwise.Mode.RELU)
def as_tensor(x):
return Tensor(as_raw_tensor(x, device=mge.device.get_default_device()))
def save_to(self, name="grad"):
def callback(tensor, grad):
setattr(self, name, grad)
return callback
@pytest.mark.isolated_distributed
@pytest.mark.skipif(
platform.system() == "Windows", reason="windows disable MGB_ENABLE_OPR_MM"
)
def test_dist_grad():
world_size = 2
x_np = np.random.rand(10).astype("float32")
port = dist.get_free_ports(1)[0]
server = dist.Server(port)
def worker0():
dist.init_process_group("localhost", port, world_size, 0, 0)
mge.device.set_default_device("gpu0")
grad = Grad()
x = as_tensor(x_np)
grad.wrt(x, callback=save_to(x))
# need a placeholder to trace operator
send_x = remote_send(x, 1)
recv_x = remote_recv(1, x_np.shape, x_np.dtype, "gpu0")
y = recv_x * recv_x
grad([y], [as_tensor(np.ones_like(x_np))])
np.testing.assert_almost_equal(x.grad.numpy(), x.numpy() * 2)
def worker1():
dist.init_process_group("localhost", port, world_size, 1, 1)
mge.device.set_default_device("gpu1")
grad = Grad()
recv_x = remote_recv(0, x_np.shape, x_np.dtype, "gpu1")
send_x = remote_send(recv_x, 0)
grad([], [])
# sync because grad has a send operator
sync()
send_x.device._cn._sync_all()
import multiprocessing as mp
p0 = mp.Process(target=worker0)
p1 = mp.Process(target=worker1)
p0.start()
p1.start()
p0.join(10)
p1.join(10)
assert p0.exitcode == 0 and p1.exitcode == 0
def test_grad():
x_np = np.random.rand(10).astype("float32")
x = as_tensor(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = cos(x)
grad(y, as_tensor(np.ones_like(x_np)))
np.testing.assert_almost_equal(x.grad.numpy(), -np.sin(x_np))
def test_grad_2():
x_np = np.random.rand(10).astype("float32")
x = as_tensor(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = mul(x, x)
y = mul(y, y)
grad(y, as_tensor(np.ones_like(x_np)))
np.testing.assert_almost_equal(x.grad.numpy(), 4 * x_np ** 3, decimal=6)
@pytest.mark.skip(reason="high order gradient was not implemented yet")
def test_2nd_grad():
x_np = np.random.rand(10).astype("float32")
x = as_tensor(x_np)
ones = as_tensor(np.ones_like(x_np))
grad = Grad().wrt(x, callback=save_to(x))
grad2 = Grad().wrt(x, callback=save_to(x))
y = cos(x)
grad(y, ones)
np.testing.assert_almost_equal(x.grad.numpy(), -np.sin(x_np), decimal=5)
grad2(x.grad, ones)
np.testing.assert_almost_equal(x.grad.numpy(), -np.cos(x_np))
def test_grad_with_tensor_wrapper():
x_np = np.random.rand(10).astype("float32")
x = TensorWrapper(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = mul(x, x)
y = mul(y, y)
grad(y, TensorWrapper(np.ones_like(x_np)))
np.testing.assert_almost_equal(x.grad.numpy(), 4 * x_np ** 3, decimal=6)
def test_release():
def check(f):
n = 0
d = None
gc.disable()
try:
for i in range(3):
f()
m = len(gc.get_objects())
d = m - n
n = m
assert d == 0
finally:
gc.enable()
x = TensorWrapper([0.0])
dy = TensorWrapper(np.ones_like(x.numpy()))
@check
def _():
g = Grad().wrt(x)
y = x * x
g(y, dy)
@check
def _():
with Grad().wrt(x) as g:
pass
@check
def _():
with Grad().wrt(x) as g:
y = x * x
def test_grad_inplace():
x_np = np.random.rand(10).astype("float32")
x = TensorWrapper(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = mul(x, x)
y *= y
grad(y, TensorWrapper(np.ones_like(x_np)))
np.testing.assert_almost_equal(x.grad.numpy(), 4 * x_np ** 3, decimal=6)
def test_elemwise_add():
x_np = np.random.rand(10).astype("float32")
y_np = np.random.rand(10, 10).astype("float32")
dz_np = np.random.rand(10, 10).astype("float32")
x = TensorWrapper(x_np)
y = TensorWrapper(y_np)
dz = TensorWrapper(dz_np)
refs = {}
def f(x, y):
x = x * 2
refs["x"] = weakref.ref(x.__wrapped__)
refs["y"] = weakref.ref(y.__wrapped__)
return x + y
grad = Grad().wrt(x, callback=save_to(x))
z = f(x, y)
del y
for k, r in refs.items():
assert r() is None
grad(z, dz)
np.testing.assert_almost_equal(x.grad.numpy(), dz_np.sum(0) * 2, decimal=5)
def test_elemwise_relu():
x_np = [1.0, -1.0]
dz_np = [1.0]
x = TensorWrapper(x_np)
dz = TensorWrapper(dz_np)
refs = {}
def f(x):
x = x * 2
refs["x"] = weakref.ref(x.__wrapped__)
return relu(x)
grad = Grad().wrt(x, callback=save_to(x))
z = f(x)
assert refs["x"]() is None
grad(z, dz)
np.testing.assert_almost_equal(x.grad.numpy(), [2.0, 0])
def test_elemwise_relu_backward_fn():
op = Elemwise(Elemwise.Mode.RELU)
attr = TensorAttr()
attr.dtype = "float32"
attr.comp_node = "xpux"
result = imperative.make_backward_graph(op, [attr], [True], [True])
backward_graph, save_for_backward_mask, input_has_grad = result
assert save_for_backward_mask == [False, True, True], save_for_backward_mask
def test_reshape():
x_np = np.random.rand(2, 5).astype("float32")
x = TensorWrapper(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = x.reshape(5, 2)
grad(y, F.ones_like(y))
np.testing.assert_equal(np.ones((2, 5), dtype=np.float32), x.grad.numpy())
def test_subtensor():
x_np = np.random.rand(3, 3).astype("float32")
x = TensorWrapper(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = x[1:-1, :2]
grad(y, F.ones_like(y))
np.testing.assert_equal(
np.array([[0, 0, 0], [1, 1, 0], [0, 0, 0]], dtype=np.float32), x.grad.numpy()
)
def test_IndexingMultiAxisVec():
x_np = np.random.rand(3, 3).astype("float32")
x = TensorWrapper(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = x[[0, 2], [0, 2]]
grad(y, F.ones_like(y))
np.testing.assert_equal(
np.array([[1, 0, 0], [0, 0, 0], [0, 0, 1]], dtype=np.float32), x.grad.numpy()
)
def test_AxisAddRemove():
x_np = np.random.rand(1, 5).astype("float32")
x = TensorWrapper(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = F.squeeze(F.expand_dims(x, 2), 0)
grad(y, F.ones_like(y))
np.testing.assert_equal(
np.array([[1, 1, 1, 1, 1]], dtype=np.float32), x.grad.numpy()
)
def test_Broadcast():
x_np = np.random.rand(3, 3, 1).astype("float32")
x = TensorWrapper(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = F.broadcast_to(x, (3, 3, 10))
grad(y, F.ones_like(y))
np.testing.assert_equal(np.ones((3, 3, 1), dtype=np.float32) * 10, x.grad.numpy())
def test_Reduce_sum():
x_np = np.random.rand(3, 3).astype("float32")
x = TensorWrapper(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = x.sum(axis=0)
grad(y, F.ones_like(y))
np.testing.assert_equal(np.ones((3, 3), dtype=np.float32), x.grad.numpy())
def test_Reduce_mean():
x_np = np.random.rand(3, 3).astype("float32")
x = TensorWrapper(x_np)
grad = Grad().wrt(x, callback=save_to(x))
y = x.mean(axis=0)
grad(y, F.ones_like(y))
np.testing.assert_equal(np.ones((3, 3), dtype=np.float32) / 3, x.grad.numpy())
|
[
"megengine.core._imperative_rt.imperative.make_backward_graph",
"megengine.functional.distributed.remote_send",
"megengine.functional.expand_dims",
"megengine.functional.distributed.remote_recv",
"megengine.core._imperative_rt.imperative.sync",
"megengine.distributed.init_process_group",
"megengine.functional.ones_like",
"megengine.core.tensor.tensor.apply",
"megengine.core.autodiff.grad.Grad",
"megengine.distributed.Server",
"megengine.core.tensor.tensor_wrapper.TensorWrapper",
"megengine.core._imperative_rt.TensorAttr",
"megengine.device.set_default_device",
"megengine.functional.broadcast_to",
"megengine.distributed.get_free_ports",
"megengine.device.get_default_device",
"megengine.core.ops.builtin.Elemwise"
] |
[((3488, 3558), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""high order gradient was not implemented yet"""'}), "(reason='high order gradient was not implemented yet')\n", (3504, 3558), False, 'import pytest\n'), ((1041, 1055), 'megengine.core.ops.builtin.Elemwise', 'Elemwise', (['mode'], {}), '(mode)\n', (1049, 1055), False, 'from megengine.core.ops.builtin import Elemwise\n'), ((1778, 1795), 'megengine.distributed.Server', 'dist.Server', (['port'], {}), '(port)\n', (1789, 1795), True, 'import megengine.distributed as dist\n'), ((2749, 2775), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'worker0'}), '(target=worker0)\n', (2759, 2775), True, 'import multiprocessing as mp\n'), ((2785, 2811), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'worker1'}), '(target=worker1)\n', (2795, 2811), True, 'import multiprocessing as mp\n'), ((4085, 4104), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (4098, 4104), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((4636, 4656), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['[0.0]'], {}), '([0.0])\n', (4649, 4656), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((5029, 5048), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (5042, 5048), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((5439, 5458), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (5452, 5458), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((5467, 5486), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['y_np'], {}), '(y_np)\n', (5480, 5486), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((5496, 5516), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['dz_np'], {}), '(dz_np)\n', (5509, 5516), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((5989, 6008), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (6002, 6008), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((6018, 6038), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['dz_np'], {}), '(dz_np)\n', (6031, 6038), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((6377, 6405), 'megengine.core.ops.builtin.Elemwise', 'Elemwise', (['Elemwise.Mode.RELU'], {}), '(Elemwise.Mode.RELU)\n', (6385, 6405), False, 'from megengine.core.ops.builtin import Elemwise\n'), ((6417, 6429), 'megengine.core._imperative_rt.TensorAttr', 'TensorAttr', ([], {}), '()\n', (6427, 6429), False, 'from megengine.core._imperative_rt import TensorAttr, imperative\n'), ((6498, 6556), 'megengine.core._imperative_rt.imperative.make_backward_graph', 'imperative.make_backward_graph', (['op', '[attr]', '[True]', '[True]'], {}), '(op, [attr], [True], [True])\n', (6528, 6556), False, 'from megengine.core._imperative_rt import TensorAttr, imperative\n'), ((6786, 6805), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (6799, 6805), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((7067, 7086), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (7080, 7086), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((7397, 7416), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (7410, 7416), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((7726, 7745), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (7739, 7745), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((8054, 8073), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (8067, 8073), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((8129, 8158), 'megengine.functional.broadcast_to', 'F.broadcast_to', (['x', '(3, 3, 10)'], {}), '(x, (3, 3, 10))\n', (8143, 8158), True, 'import megengine.functional as F\n'), ((8358, 8377), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (8371, 8377), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((8639, 8658), 'megengine.core.tensor.tensor_wrapper.TensorWrapper', 'TensorWrapper', (['x_np'], {}), '(x_np)\n', (8652, 8658), False, 'from megengine.core.tensor.tensor_wrapper import TensorWrapper\n'), ((1095, 1111), 'megengine.core.tensor.tensor.apply', 'apply', (['op', '*args'], {}), '(op, *args)\n', (1100, 1111), False, 'from megengine.core.tensor.tensor import Tensor, apply\n'), ((1739, 1761), 'megengine.distributed.get_free_ports', 'dist.get_free_ports', (['(1)'], {}), '(1)\n', (1758, 1761), True, 'import megengine.distributed as dist\n'), ((1824, 1884), 'megengine.distributed.init_process_group', 'dist.init_process_group', (['"""localhost"""', 'port', 'world_size', '(0)', '(0)'], {}), "('localhost', port, world_size, 0, 0)\n", (1847, 1884), True, 'import megengine.distributed as dist\n'), ((1893, 1930), 'megengine.device.set_default_device', 'mge.device.set_default_device', (['"""gpu0"""'], {}), "('gpu0')\n", (1922, 1930), True, 'import megengine as mge\n'), ((1946, 1952), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (1950, 1952), False, 'from megengine.core.autodiff.grad import Grad\n'), ((2087, 2104), 'megengine.functional.distributed.remote_send', 'remote_send', (['x', '(1)'], {}), '(x, 1)\n', (2098, 2104), False, 'from megengine.functional.distributed import remote_recv, remote_send\n'), ((2122, 2168), 'megengine.functional.distributed.remote_recv', 'remote_recv', (['(1)', 'x_np.shape', 'x_np.dtype', '"""gpu0"""'], {}), "(1, x_np.shape, x_np.dtype, 'gpu0')\n", (2133, 2168), False, 'from megengine.functional.distributed import remote_recv, remote_send\n'), ((2347, 2407), 'megengine.distributed.init_process_group', 'dist.init_process_group', (['"""localhost"""', 'port', 'world_size', '(1)', '(1)'], {}), "('localhost', port, world_size, 1, 1)\n", (2370, 2407), True, 'import megengine.distributed as dist\n'), ((2416, 2453), 'megengine.device.set_default_device', 'mge.device.set_default_device', (['"""gpu1"""'], {}), "('gpu1')\n", (2445, 2453), True, 'import megengine as mge\n'), ((2469, 2475), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (2473, 2475), False, 'from megengine.core.autodiff.grad import Grad\n'), ((2494, 2540), 'megengine.functional.distributed.remote_recv', 'remote_recv', (['(0)', 'x_np.shape', 'x_np.dtype', '"""gpu1"""'], {}), "(0, x_np.shape, x_np.dtype, 'gpu1')\n", (2505, 2540), False, 'from megengine.functional.distributed import remote_recv, remote_send\n'), ((2558, 2580), 'megengine.functional.distributed.remote_send', 'remote_send', (['recv_x', '(0)'], {}), '(recv_x, 0)\n', (2569, 2580), False, 'from megengine.functional.distributed import remote_recv, remote_send\n'), ((2660, 2666), 'megengine.core._imperative_rt.imperative.sync', 'sync', ([], {}), '()\n', (2664, 2666), False, 'from megengine.core._imperative_rt.imperative import sync\n'), ((1562, 1579), 'platform.system', 'platform.system', ([], {}), '()\n', (1577, 1579), False, 'import platform\n'), ((3673, 3691), 'numpy.ones_like', 'np.ones_like', (['x_np'], {}), '(x_np)\n', (3685, 3691), True, 'import numpy as np\n'), ((4393, 4405), 'gc.disable', 'gc.disable', ([], {}), '()\n', (4403, 4405), False, 'import gc\n'), ((5588, 5614), 'weakref.ref', 'weakref.ref', (['x.__wrapped__'], {}), '(x.__wrapped__)\n', (5599, 5614), False, 'import weakref\n'), ((5635, 5661), 'weakref.ref', 'weakref.ref', (['y.__wrapped__'], {}), '(y.__wrapped__)\n', (5646, 5661), False, 'import weakref\n'), ((6107, 6133), 'weakref.ref', 'weakref.ref', (['x.__wrapped__'], {}), '(x.__wrapped__)\n', (6118, 6133), False, 'import weakref\n'), ((6890, 6904), 'megengine.functional.ones_like', 'F.ones_like', (['y'], {}), '(y)\n', (6901, 6904), True, 'import megengine.functional as F\n'), ((6934, 6967), 'numpy.ones', 'np.ones', (['(2, 5)'], {'dtype': 'np.float32'}), '((2, 5), dtype=np.float32)\n', (6941, 6967), True, 'import numpy as np\n'), ((7167, 7181), 'megengine.functional.ones_like', 'F.ones_like', (['y'], {}), '(y)\n', (7178, 7181), True, 'import megengine.functional as F\n'), ((7220, 7281), 'numpy.array', 'np.array', (['[[0, 0, 0], [1, 1, 0], [0, 0, 0]]'], {'dtype': 'np.float32'}), '([[0, 0, 0], [1, 1, 0], [0, 0, 0]], dtype=np.float32)\n', (7228, 7281), True, 'import numpy as np\n'), ((7503, 7517), 'megengine.functional.ones_like', 'F.ones_like', (['y'], {}), '(y)\n', (7514, 7517), True, 'import megengine.functional as F\n'), ((7556, 7617), 'numpy.array', 'np.array', (['[[1, 0, 0], [0, 0, 0], [0, 0, 1]]'], {'dtype': 'np.float32'}), '([[1, 0, 0], [0, 0, 0], [0, 0, 1]], dtype=np.float32)\n', (7564, 7617), True, 'import numpy as np\n'), ((7811, 7830), 'megengine.functional.expand_dims', 'F.expand_dims', (['x', '(2)'], {}), '(x, 2)\n', (7824, 7830), True, 'import megengine.functional as F\n'), ((7848, 7862), 'megengine.functional.ones_like', 'F.ones_like', (['y'], {}), '(y)\n', (7859, 7862), True, 'import megengine.functional as F\n'), ((7901, 7946), 'numpy.array', 'np.array', (['[[1, 1, 1, 1, 1]]'], {'dtype': 'np.float32'}), '([[1, 1, 1, 1, 1]], dtype=np.float32)\n', (7909, 7946), True, 'import numpy as np\n'), ((8172, 8186), 'megengine.functional.ones_like', 'F.ones_like', (['y'], {}), '(y)\n', (8183, 8186), True, 'import megengine.functional as F\n'), ((8460, 8474), 'megengine.functional.ones_like', 'F.ones_like', (['y'], {}), '(y)\n', (8471, 8474), True, 'import megengine.functional as F\n'), ((8504, 8537), 'numpy.ones', 'np.ones', (['(3, 3)'], {'dtype': 'np.float32'}), '((3, 3), dtype=np.float32)\n', (8511, 8537), True, 'import numpy as np\n'), ((8742, 8756), 'megengine.functional.ones_like', 'F.ones_like', (['y'], {}), '(y)\n', (8753, 8756), True, 'import megengine.functional as F\n'), ((1691, 1709), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (1705, 1709), True, 'import numpy as np\n'), ((2953, 2971), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (2967, 2971), True, 'import numpy as np\n'), ((3026, 3032), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (3030, 3032), False, 'from megengine.core.autodiff.grad import Grad\n'), ((3100, 3118), 'numpy.ones_like', 'np.ones_like', (['x_np'], {}), '(x_np)\n', (3112, 3118), True, 'import numpy as np\n'), ((3173, 3185), 'numpy.sin', 'np.sin', (['x_np'], {}), '(x_np)\n', (3179, 3185), True, 'import numpy as np\n'), ((3219, 3237), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (3233, 3237), True, 'import numpy as np\n'), ((3292, 3298), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (3296, 3298), False, 'from megengine.core.autodiff.grad import Grad\n'), ((3387, 3405), 'numpy.ones_like', 'np.ones_like', (['x_np'], {}), '(x_np)\n', (3399, 3405), True, 'import numpy as np\n'), ((3591, 3609), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (3605, 3609), True, 'import numpy as np\n'), ((3705, 3711), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (3709, 3711), False, 'from megengine.core.autodiff.grad import Grad\n'), ((3752, 3758), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (3756, 3758), False, 'from megengine.core.autodiff.grad import Grad\n'), ((3874, 3886), 'numpy.sin', 'np.sin', (['x_np'], {}), '(x_np)\n', (3880, 3886), True, 'import numpy as np\n'), ((3976, 3988), 'numpy.cos', 'np.cos', (['x_np'], {}), '(x_np)\n', (3982, 3988), True, 'import numpy as np\n'), ((4040, 4058), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (4054, 4058), True, 'import numpy as np\n'), ((4117, 4123), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (4121, 4123), False, 'from megengine.core.autodiff.grad import Grad\n'), ((4216, 4234), 'numpy.ones_like', 'np.ones_like', (['x_np'], {}), '(x_np)\n', (4228, 4234), True, 'import numpy as np\n'), ((4615, 4626), 'gc.enable', 'gc.enable', ([], {}), '()\n', (4624, 4626), False, 'import gc\n'), ((4984, 5002), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (4998, 5002), True, 'import numpy as np\n'), ((5061, 5067), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (5065, 5067), False, 'from megengine.core.autodiff.grad import Grad\n'), ((5153, 5171), 'numpy.ones_like', 'np.ones_like', (['x_np'], {}), '(x_np)\n', (5165, 5171), True, 'import numpy as np\n'), ((5289, 5307), 'numpy.random.rand', 'np.random.rand', (['(10)'], {}), '(10)\n', (5303, 5307), True, 'import numpy as np\n'), ((5337, 5359), 'numpy.random.rand', 'np.random.rand', (['(10)', '(10)'], {}), '(10, 10)\n', (5351, 5359), True, 'import numpy as np\n'), ((5390, 5412), 'numpy.random.rand', 'np.random.rand', (['(10)', '(10)'], {}), '(10, 10)\n', (5404, 5412), True, 'import numpy as np\n'), ((5695, 5701), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (5699, 5701), False, 'from megengine.core.autodiff.grad import Grad\n'), ((6169, 6175), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (6173, 6175), False, 'from megengine.core.autodiff.grad import Grad\n'), ((6739, 6759), 'numpy.random.rand', 'np.random.rand', (['(2)', '(5)'], {}), '(2, 5)\n', (6753, 6759), True, 'import numpy as np\n'), ((6818, 6824), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (6822, 6824), False, 'from megengine.core.autodiff.grad import Grad\n'), ((7020, 7040), 'numpy.random.rand', 'np.random.rand', (['(3)', '(3)'], {}), '(3, 3)\n', (7034, 7040), True, 'import numpy as np\n'), ((7099, 7105), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (7103, 7105), False, 'from megengine.core.autodiff.grad import Grad\n'), ((7350, 7370), 'numpy.random.rand', 'np.random.rand', (['(3)', '(3)'], {}), '(3, 3)\n', (7364, 7370), True, 'import numpy as np\n'), ((7429, 7435), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (7433, 7435), False, 'from megengine.core.autodiff.grad import Grad\n'), ((7679, 7699), 'numpy.random.rand', 'np.random.rand', (['(1)', '(5)'], {}), '(1, 5)\n', (7693, 7699), True, 'import numpy as np\n'), ((7758, 7764), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (7762, 7764), False, 'from megengine.core.autodiff.grad import Grad\n'), ((8004, 8027), 'numpy.random.rand', 'np.random.rand', (['(3)', '(3)', '(1)'], {}), '(3, 3, 1)\n', (8018, 8027), True, 'import numpy as np\n'), ((8086, 8092), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (8090, 8092), False, 'from megengine.core.autodiff.grad import Grad\n'), ((8216, 8252), 'numpy.ones', 'np.ones', (['(3, 3, 1)'], {'dtype': 'np.float32'}), '((3, 3, 1), dtype=np.float32)\n', (8223, 8252), True, 'import numpy as np\n'), ((8311, 8331), 'numpy.random.rand', 'np.random.rand', (['(3)', '(3)'], {}), '(3, 3)\n', (8325, 8331), True, 'import numpy as np\n'), ((8390, 8396), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (8394, 8396), False, 'from megengine.core.autodiff.grad import Grad\n'), ((8592, 8612), 'numpy.random.rand', 'np.random.rand', (['(3)', '(3)'], {}), '(3, 3)\n', (8606, 8612), True, 'import numpy as np\n'), ((8671, 8677), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (8675, 8677), False, 'from megengine.core.autodiff.grad import Grad\n'), ((8786, 8819), 'numpy.ones', 'np.ones', (['(3, 3)'], {'dtype': 'np.float32'}), '((3, 3), dtype=np.float32)\n', (8793, 8819), True, 'import numpy as np\n'), ((1346, 1377), 'megengine.device.get_default_device', 'mge.device.get_default_device', ([], {}), '()\n', (1375, 1377), True, 'import megengine as mge\n'), ((4742, 4748), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (4746, 4748), False, 'from megengine.core.autodiff.grad import Grad\n'), ((2227, 2245), 'numpy.ones_like', 'np.ones_like', (['x_np'], {}), '(x_np)\n', (2239, 2245), True, 'import numpy as np\n'), ((4494, 4510), 'gc.get_objects', 'gc.get_objects', ([], {}), '()\n', (4508, 4510), False, 'import gc\n'), ((4829, 4835), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (4833, 4835), False, 'from megengine.core.autodiff.grad import Grad\n'), ((4904, 4910), 'megengine.core.autodiff.grad.Grad', 'Grad', ([], {}), '()\n', (4908, 4910), False, 'from megengine.core.autodiff.grad import Grad\n')]
|
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = Session(engine)
# Post new role
@router.post("/")
async def post_role(*, role: Role, session: Session = Depends(get_session)):
statement = select(Role).where(Role.id == role.id)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(role)
session.commit()
session.refresh(role)
return role
# Get list of all roles
@router.get("/")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role)
results = session.exec(statement).all()
return results
# Get list of active roles
@router.get("/active")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role).where(Role.is_active == True)
results = session.exec(statement).all()
return results
@router.put("/{role_id}/activate")
async def activate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = select(Role).where(Role.id == role_id)
role_to_activate = session.exec(statement).one()
role_to_activate.is_active = True
role_to_activate.updated_at = datetime.now()
session.add(role_to_activate)
session.commit()
session.refresh(role_to_activate)
return role_to_activate
# Deactivate role
@router.put("/{role_id}/deactivate")
async def deactivate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = select(Role).where(Role.id == role_id)
role_to_deactivate = session.exec(statement).one()
role_to_deactivate.is_active = False
role_to_deactivate.updated_at = datetime.now()
session.add(role_to_deactivate)
session.commit()
session.refresh(role_to_deactivate)
return role_to_deactivate
# Update role
@router.put("/")
async def update_role(
id: str = None,
new_name: str = None,
new_short_name: str = None,
is_active: bool = None,
session: Session = Depends(get_session),
):
statement = select(Role.is_active).where(Role.id == id)
result = session.exec(statement).first()
if result == True:
statement = select(Role).where(Role.id == id)
role_to_update = session.exec(statement).one()
if new_name != None:
role_to_update.name = new_name
if new_short_name != None:
role_to_update.short_name = new_short_name
if is_active != None:
role_to_update.is_active = is_active
session.add(role_to_update)
role_to_update.updated_at = datetime.now()
session.commit()
session.refresh(role_to_update)
return role_to_update
else:
return False
|
[
"sqlmodel.Session",
"sqlmodel.select"
] |
[((243, 288), 'fastapi.APIRouter', 'APIRouter', ([], {'prefix': '"""/api/roles"""', 'tags': "['role']"}), "(prefix='/api/roles', tags=['role'])\n", (252, 288), False, 'from fastapi import APIRouter, Depends\n'), ((299, 314), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (306, 314), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((404, 424), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (411, 424), False, 'from fastapi import APIRouter, Depends\n'), ((769, 789), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (776, 789), False, 'from fastapi import APIRouter, Depends\n'), ((808, 820), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (814, 820), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((976, 996), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (983, 996), False, 'from fastapi import APIRouter, Depends\n'), ((1231, 1251), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1238, 1251), False, 'from fastapi import APIRouter, Depends\n'), ((1436, 1450), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1448, 1450), False, 'from datetime import datetime\n'), ((1704, 1724), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1711, 1724), False, 'from fastapi import APIRouter, Depends\n'), ((1916, 1930), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1928, 1930), False, 'from datetime import datetime\n'), ((2243, 2263), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2250, 2263), False, 'from fastapi import APIRouter, Depends\n'), ((2818, 2832), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2830, 2832), False, 'from datetime import datetime\n'), ((443, 455), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (449, 455), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((1015, 1027), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (1021, 1027), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((1272, 1284), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (1278, 1284), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((1745, 1757), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (1751, 1757), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((2284, 2306), 'sqlmodel.select', 'select', (['Role.is_active'], {}), '(Role.is_active)\n', (2290, 2306), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((2416, 2428), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (2422, 2428), False, 'from sqlmodel import Session, select, SQLModel, or_\n')]
|
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import numpy as np
import pytest
from megengine import tensor
from megengine.test import assertTensorClose
def test_reshape_tuple():
inp = tensor(np.arange(1, 17, dtype=np.int32).reshape(4, 4))
out = tensor(np.arange(100, 116, dtype=np.int32).reshape(1, 16))
out = out.reshape(inp.shape)
assertTensorClose(out.numpy(), np.arange(100, 116, dtype=np.int32).reshape(4, 4))
def test_reshape_asterisk():
inp = tensor(np.arange(1, 17, dtype=np.int32).reshape(4, 4))
out = tensor(np.arange(100, 116, dtype=np.int32).reshape(1, 16))
out = out.reshape(*inp.shape)
assertTensorClose(out.numpy(), np.arange(100, 116, dtype=np.int32).reshape(4, 4))
def test_reshape_shapeof():
inp = tensor(np.arange(1, 17, dtype=np.int32).reshape(4, 4))
out = tensor(np.arange(100, 116, dtype=np.int32).reshape(1, 16))
out = out.reshape(inp.shapeof())
assertTensorClose(out.numpy(), np.arange(100, 116, dtype=np.int32).reshape(4, 4))
def test_reshape_tensor():
out = tensor(np.arange(100, 116, dtype=np.int32).reshape(1, 16))
out = out.reshape(tensor([4, 4]))
assertTensorClose(out.numpy(), np.arange(100, 116, dtype=np.int32).reshape(4, 4))
def test_reshape_tensor_fused():
out = tensor(np.arange(100, 116, dtype=np.int32).reshape(1, 16))
out = out.reshape(tensor([4, 4]), 1)
assertTensorClose(out.numpy(), np.arange(100, 116, dtype=np.int32).reshape(4, 4, 1))
def test_reshape_fused():
out = tensor(np.arange(100, 116, dtype=np.int32).reshape(1, 16))
out = out.reshape(tensor(2), 2, tensor(4), 1)
assertTensorClose(
out.numpy(), np.arange(100, 116, dtype=np.int32).reshape(2, 2, 4, 1)
)
def test_reshape_wrong_tuple():
out = tensor(np.arange(100, 116, dtype=np.int32).reshape(1, 16))
with pytest.raises(ValueError):
out = out.reshape((2, 2), 4)
def test_reshape_wrong_tuple2():
out = tensor(np.arange(100, 116, dtype=np.int32).reshape(1, 16))
with pytest.raises(AssertionError):
out = out.reshape(4, (2, 2))
def test_broadcast_tuple():
inp = tensor(np.arange(1, 17, dtype=np.int32).reshape(4, 4))
out = tensor(np.arange(100, 104, dtype=np.int32).reshape(1, 4))
out = out.broadcast(inp.shape)
tmp = np.array([[100, 101, 102, 103]], dtype=np.int32)
out2 = np.repeat(tmp, 4, axis=0)
assertTensorClose(out.numpy(), out2)
def test_broadcast_asterisk():
inp = tensor(np.arange(1, 17, dtype=np.int32).reshape(4, 4))
out = tensor(np.arange(100, 104, dtype=np.int32).reshape(1, 4))
out = out.broadcast(*inp.shape)
tmp = np.array([[100, 101, 102, 103]], dtype=np.int32)
out2 = np.repeat(tmp, 4, axis=0)
assertTensorClose(out.numpy(), out2)
def test_broadcast_shapeof():
inp = tensor(np.arange(1, 17, dtype=np.int32).reshape(4, 4))
out = tensor(np.arange(100, 104, dtype=np.int32).reshape(1, 4))
out = out.broadcast(inp.shapeof())
tmp = np.array([[100, 101, 102, 103]], dtype=np.int32)
out2 = np.repeat(tmp, 4, axis=0)
assertTensorClose(out.numpy(), out2)
|
[
"megengine.tensor"
] |
[((2620, 2668), 'numpy.array', 'np.array', (['[[100, 101, 102, 103]]'], {'dtype': 'np.int32'}), '([[100, 101, 102, 103]], dtype=np.int32)\n', (2628, 2668), True, 'import numpy as np\n'), ((2680, 2705), 'numpy.repeat', 'np.repeat', (['tmp', '(4)'], {'axis': '(0)'}), '(tmp, 4, axis=0)\n', (2689, 2705), True, 'import numpy as np\n'), ((2962, 3010), 'numpy.array', 'np.array', (['[[100, 101, 102, 103]]'], {'dtype': 'np.int32'}), '([[100, 101, 102, 103]], dtype=np.int32)\n', (2970, 3010), True, 'import numpy as np\n'), ((3022, 3047), 'numpy.repeat', 'np.repeat', (['tmp', '(4)'], {'axis': '(0)'}), '(tmp, 4, axis=0)\n', (3031, 3047), True, 'import numpy as np\n'), ((3306, 3354), 'numpy.array', 'np.array', (['[[100, 101, 102, 103]]'], {'dtype': 'np.int32'}), '([[100, 101, 102, 103]], dtype=np.int32)\n', (3314, 3354), True, 'import numpy as np\n'), ((3366, 3391), 'numpy.repeat', 'np.repeat', (['tmp', '(4)'], {'axis': '(0)'}), '(tmp, 4, axis=0)\n', (3375, 3391), True, 'import numpy as np\n'), ((1461, 1475), 'megengine.tensor', 'tensor', (['[4, 4]'], {}), '([4, 4])\n', (1467, 1475), False, 'from megengine import tensor\n'), ((1690, 1704), 'megengine.tensor', 'tensor', (['[4, 4]'], {}), '([4, 4])\n', (1696, 1704), False, 'from megengine import tensor\n'), ((1918, 1927), 'megengine.tensor', 'tensor', (['(2)'], {}), '(2)\n', (1924, 1927), False, 'from megengine import tensor\n'), ((1932, 1941), 'megengine.tensor', 'tensor', (['(4)'], {}), '(4)\n', (1938, 1941), False, 'from megengine import tensor\n'), ((2165, 2190), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2178, 2190), False, 'import pytest\n'), ((2342, 2371), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (2355, 2371), False, 'import pytest\n'), ((530, 562), 'numpy.arange', 'np.arange', (['(1)', '(17)'], {'dtype': 'np.int32'}), '(1, 17, dtype=np.int32)\n', (539, 562), True, 'import numpy as np\n'), ((595, 630), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (604, 630), True, 'import numpy as np\n'), ((716, 751), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (725, 751), True, 'import numpy as np\n'), ((815, 847), 'numpy.arange', 'np.arange', (['(1)', '(17)'], {'dtype': 'np.int32'}), '(1, 17, dtype=np.int32)\n', (824, 847), True, 'import numpy as np\n'), ((880, 915), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (889, 915), True, 'import numpy as np\n'), ((1002, 1037), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (1011, 1037), True, 'import numpy as np\n'), ((1100, 1132), 'numpy.arange', 'np.arange', (['(1)', '(17)'], {'dtype': 'np.int32'}), '(1, 17, dtype=np.int32)\n', (1109, 1132), True, 'import numpy as np\n'), ((1165, 1200), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (1174, 1200), True, 'import numpy as np\n'), ((1290, 1325), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (1299, 1325), True, 'import numpy as np\n'), ((1387, 1422), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (1396, 1422), True, 'import numpy as np\n'), ((1513, 1548), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (1522, 1548), True, 'import numpy as np\n'), ((1616, 1651), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (1625, 1651), True, 'import numpy as np\n'), ((1745, 1780), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (1754, 1780), True, 'import numpy as np\n'), ((1844, 1879), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (1853, 1879), True, 'import numpy as np\n'), ((1991, 2026), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (2000, 2026), True, 'import numpy as np\n'), ((2104, 2139), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (2113, 2139), True, 'import numpy as np\n'), ((2281, 2316), 'numpy.arange', 'np.arange', (['(100)', '(116)'], {'dtype': 'np.int32'}), '(100, 116, dtype=np.int32)\n', (2290, 2316), True, 'import numpy as np\n'), ((2457, 2489), 'numpy.arange', 'np.arange', (['(1)', '(17)'], {'dtype': 'np.int32'}), '(1, 17, dtype=np.int32)\n', (2466, 2489), True, 'import numpy as np\n'), ((2522, 2557), 'numpy.arange', 'np.arange', (['(100)', '(104)'], {'dtype': 'np.int32'}), '(100, 104, dtype=np.int32)\n', (2531, 2557), True, 'import numpy as np\n'), ((2798, 2830), 'numpy.arange', 'np.arange', (['(1)', '(17)'], {'dtype': 'np.int32'}), '(1, 17, dtype=np.int32)\n', (2807, 2830), True, 'import numpy as np\n'), ((2863, 2898), 'numpy.arange', 'np.arange', (['(100)', '(104)'], {'dtype': 'np.int32'}), '(100, 104, dtype=np.int32)\n', (2872, 2898), True, 'import numpy as np\n'), ((3139, 3171), 'numpy.arange', 'np.arange', (['(1)', '(17)'], {'dtype': 'np.int32'}), '(1, 17, dtype=np.int32)\n', (3148, 3171), True, 'import numpy as np\n'), ((3204, 3239), 'numpy.arange', 'np.arange', (['(100)', '(104)'], {'dtype': 'np.int32'}), '(100, 104, dtype=np.int32)\n', (3213, 3239), True, 'import numpy as np\n')]
|
from sqlmodel import SQLModel
from sqlmodel import Field, Relationship
from sqlalchemy import String
from sqlalchemy.sql.schema import Column
from typing import TYPE_CHECKING, Optional, List
if TYPE_CHECKING:
from app.src.models.product import ProductRead
from app.src.models.product import Product
class ProductTypeBase(SQLModel):
name: str
description: str
class ProductType(ProductTypeBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str = Field(sa_column=Column("name", String, unique=True))
description: Optional[str] = Field(default=None)
products: List["Product"] = Relationship(back_populates="product_type")
class ProductTypeReadwithProduct(ProductTypeBase):
product_type: Optional["ProductRead"] = None
class ProductTypeCreate(ProductTypeBase):
pass
class ProductTypeRead(ProductTypeBase):
id: int
# Nel modello update tutti gli attributi devono essere opzionali
class ProductTypeUpdate(SQLModel):
name: Optional[str] = None
description: Optional[str] = None
|
[
"sqlmodel.Field",
"sqlmodel.Relationship"
] |
[((452, 489), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (457, 489), False, 'from sqlmodel import Field, Relationship\n'), ((592, 611), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (597, 611), False, 'from sqlmodel import Field, Relationship\n'), ((644, 687), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""product_type"""'}), "(back_populates='product_type')\n", (656, 687), False, 'from sqlmodel import Field, Relationship\n'), ((522, 557), 'sqlalchemy.sql.schema.Column', 'Column', (['"""name"""', 'String'], {'unique': '(True)'}), "('name', String, unique=True)\n", (528, 557), False, 'from sqlalchemy.sql.schema import Column\n')]
|
"""All Models for database are defined here"""
from typing import Optional, List
from sqlalchemy import Column, ForeignKey, Integer
from sqlmodel import Field, SQLModel, Relationship
class VrfLink(SQLModel, table=True):
"""Used for vrf to device link"""
vrf_id: Optional[int] = Field(default=None, foreign_key="vrf.id", primary_key=True)
device_id: Optional[int] = Field(
default=None, foreign_key="device.id", primary_key=True
)
class Platform(SQLModel, table=True):
"""Used to define platforms"""
id: Optional[int] = Field(default=None, primary_key=True)
platform_name: str = Field(index=True)
class Vrf(SQLModel, table=True):
"""used to define VRFs"""
id: Optional[int] = Field(default=None, primary_key=True)
vrf_name: str = Field(index=True)
devices: List["Device"] = Relationship(back_populates="vrfs", link_model=VrfLink)
class Device(SQLModel, table=True):
"""Used to define a simple device"""
id: Optional[int] = Field(default=None, primary_key=True)
name: str = Field(index=True)
mgmt: str
platform_id: Optional[int] = Field(
sa_column=Column(
Integer, ForeignKey("platform.id", ondelete="SET NULL", onupdate="CASCADE")
)
)
vrfs: List["Vrf"] = Relationship(back_populates="devices", link_model=VrfLink)
|
[
"sqlmodel.Field",
"sqlmodel.Relationship"
] |
[((288, 347), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""vrf.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='vrf.id', primary_key=True)\n", (293, 347), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((379, 441), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""device.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='device.id', primary_key=True)\n", (384, 441), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((555, 592), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (560, 592), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((618, 635), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (623, 635), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((725, 762), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (730, 762), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((783, 800), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (788, 800), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((831, 886), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""vrfs"""', 'link_model': 'VrfLink'}), "(back_populates='vrfs', link_model=VrfLink)\n", (843, 886), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((990, 1027), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (995, 1027), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((1044, 1061), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (1049, 1061), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((1270, 1328), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""devices"""', 'link_model': 'VrfLink'}), "(back_populates='devices', link_model=VrfLink)\n", (1282, 1328), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((1163, 1229), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""platform.id"""'], {'ondelete': '"""SET NULL"""', 'onupdate': '"""CASCADE"""'}), "('platform.id', ondelete='SET NULL', onupdate='CASCADE')\n", (1173, 1229), False, 'from sqlalchemy import Column, ForeignKey, Integer\n')]
|
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# pylint: skip-file
import argparse
import sys
# pylint: disable=import-error
import resnet.model as resnet_model
# pylint: disable=import-error
import shufflenet.model as snet_model
import numpy as np
import megengine as mge
import megengine.functional as F
from megengine import jit
def dump_static_graph(model, graph_name, shape):
model.eval()
data = mge.Tensor(np.ones(shape, dtype=np.uint8))
@jit.trace(capture_as_const=True)
def pred_func(data):
out = data.astype(np.float32)
output_h, output_w = 224, 224
# resize
print(shape)
M = mge.tensor(np.array([[1,0,0], [0,1,0], [0,0,1]], dtype=np.float32))
M_shape = F.concat([data.shape[0],M.shape])
M = F.broadcast_to(M, M_shape)
out = F.vision.warp_perspective(out, M, (output_h, output_w), format='NHWC')
# mean
_mean = mge.Tensor(np.array([103.530, 116.280, 123.675], dtype=np.float32))
out = F.sub(out, _mean)
# div
_div = mge.Tensor(np.array([57.375, 57.120, 58.395], dtype=np.float32))
out = F.div(out, _div)
# dimshuffile
out = F.transpose(out, (0,3,1,2))
outputs = model(out)
return outputs
pred_func(data)
pred_func.dump(
graph_name,
arg_names=["data"],
optimize_for_inference=True,
enable_fuse_conv_bias_nonlinearity=True,
)
def main():
parser = argparse.ArgumentParser(description="MegEngine Classification Dump .mge")
parser.add_argument(
"-a",
"--arch",
default="resnet18",
help="model architecture (default: resnet18)",
)
parser.add_argument(
"-s",
"--shape",
type=int,
nargs='+',
default="1 3 224 224",
help="input shape (default: 1 3 224 224)"
)
parser.add_argument(
"-o",
"--output",
type=str,
default="model.mge",
help="output filename"
)
args = parser.parse_args()
if 'resnet' in args.arch:
model = getattr(resnet_model, args.arch)(pretrained=True)
elif 'shufflenet' in args.arch:
model = getattr(snet_model, args.arch)(pretrained=True)
else:
print('unavailable arch {}'.format(args.arch))
sys.exit()
print(model)
dump_static_graph(model, args.output, tuple(args.shape))
if __name__ == "__main__":
main()
|
[
"megengine.functional.div",
"megengine.functional.broadcast_to",
"megengine.jit.trace",
"megengine.functional.sub",
"megengine.functional.vision.warp_perspective",
"megengine.functional.concat",
"megengine.functional.transpose"
] |
[((795, 827), 'megengine.jit.trace', 'jit.trace', ([], {'capture_as_const': '(True)'}), '(capture_as_const=True)\n', (804, 827), False, 'from megengine import jit\n'), ((1815, 1888), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""MegEngine Classification Dump .mge"""'}), "(description='MegEngine Classification Dump .mge')\n", (1838, 1888), False, 'import argparse\n'), ((757, 787), 'numpy.ones', 'np.ones', (['shape'], {'dtype': 'np.uint8'}), '(shape, dtype=np.uint8)\n', (764, 787), True, 'import numpy as np\n'), ((1074, 1108), 'megengine.functional.concat', 'F.concat', (['[data.shape[0], M.shape]'], {}), '([data.shape[0], M.shape])\n', (1082, 1108), True, 'import megengine.functional as F\n'), ((1120, 1146), 'megengine.functional.broadcast_to', 'F.broadcast_to', (['M', 'M_shape'], {}), '(M, M_shape)\n', (1134, 1146), True, 'import megengine.functional as F\n'), ((1161, 1231), 'megengine.functional.vision.warp_perspective', 'F.vision.warp_perspective', (['out', 'M', '(output_h, output_w)'], {'format': '"""NHWC"""'}), "(out, M, (output_h, output_w), format='NHWC')\n", (1186, 1231), True, 'import megengine.functional as F\n'), ((1345, 1362), 'megengine.functional.sub', 'F.sub', (['out', '_mean'], {}), '(out, _mean)\n', (1350, 1362), True, 'import megengine.functional as F\n'), ((1472, 1488), 'megengine.functional.div', 'F.div', (['out', '_div'], {}), '(out, _div)\n', (1477, 1488), True, 'import megengine.functional as F\n'), ((1526, 1556), 'megengine.functional.transpose', 'F.transpose', (['out', '(0, 3, 1, 2)'], {}), '(out, (0, 3, 1, 2))\n', (1537, 1556), True, 'import megengine.functional as F\n'), ((991, 1052), 'numpy.array', 'np.array', (['[[1, 0, 0], [0, 1, 0], [0, 0, 1]]'], {'dtype': 'np.float32'}), '([[1, 0, 0], [0, 1, 0], [0, 0, 1]], dtype=np.float32)\n', (999, 1052), True, 'import numpy as np\n'), ((1274, 1327), 'numpy.array', 'np.array', (['[103.53, 116.28, 123.675]'], {'dtype': 'np.float32'}), '([103.53, 116.28, 123.675], dtype=np.float32)\n', (1282, 1327), True, 'import numpy as np\n'), ((1404, 1455), 'numpy.array', 'np.array', (['[57.375, 57.12, 58.395]'], {'dtype': 'np.float32'}), '([57.375, 57.12, 58.395], dtype=np.float32)\n', (1412, 1455), True, 'import numpy as np\n'), ((2661, 2671), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2669, 2671), False, 'import sys\n')]
|
from typing import Optional
import pytest
from sqlmodel import Field, SQLModel, create_engine
def test_missing_sql_type():
class CustomType:
@classmethod
def __get_validators__(cls):
yield cls.validate
@classmethod
def validate(cls, v):
return v
with pytest.raises(ValueError):
class Item(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
item: CustomType
|
[
"sqlmodel.Field"
] |
[((332, 357), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (345, 357), False, 'import pytest\n'), ((433, 470), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (438, 470), False, 'from sqlmodel import Field, SQLModel, create_engine\n')]
|
import textwrap
from typing import List, Optional
import pytest
from sqlmodel import Field, Relationship, SQLModel
import strawberry
class City(SQLModel, table=True):
id: Optional[int] = Field(primary_key=True, default=None)
name: str = Field()
population: int = Field()
class Manager(SQLModel, table=True):
id: Optional[int] = Field(primary_key=True, default=None)
name: str = Field()
managed_team: "Team" = Relationship(back_populates="manager")
class Team(SQLModel, table=True):
id: Optional[int] = Field(primary_key=True, default=None)
name: str = Field(index=True)
headquarters: Optional[str] = Field(default=None)
manager_id: int = Field(nullable=False, foreign_key="manager.id")
manager: Manager = Relationship(back_populates="managed_team")
heroes: List["Hero"] = Relationship(back_populates="team")
class Hero(SQLModel, table=True):
id: Optional[int] = Field(primary_key=True, default=None)
name: str = Field(index=True)
secret_name: str
age: Optional[int] = Field(default=None, index=True)
team_id: Optional[int] = Field(default=None, foreign_key="team.id")
team: Optional[Team] = Relationship(back_populates="heroes")
@pytest.fixture
def clear_types():
for model in (Team, Hero, Manager, City):
if hasattr(model, "_strawberry_type"):
delattr(model, "_strawberry_type")
def test_all_fields(clear_types):
@strawberry.experimental.pydantic.type(City, all_fields=True)
class CityType:
pass
@strawberry.type
class Query:
@strawberry.field
def city(self) -> CityType:
return CityType(id=1, name="Gotham", population=100000)
schema = strawberry.Schema(query=Query)
expected_schema = """
type CityType {
name: String!
population: Int!
id: Int
}
type Query {
city: CityType!
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ city { name } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["city"]["name"] == "Gotham"
def test_basic_type_field_list(clear_types):
@strawberry.experimental.pydantic.type(Team, fields=["name", "headquarters"])
class TeamType:
pass
@strawberry.type
class Query:
@strawberry.field
def team(self) -> TeamType:
return TeamType(name="hobbits", headquarters="The Shire")
schema = strawberry.Schema(query=Query)
expected_schema = """
type Query {
team: TeamType!
}
type TeamType {
name: String!
headquarters: String
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ team { name } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["team"]["name"] == "hobbits"
def test_one_to_one_optional(clear_types):
@strawberry.experimental.pydantic.type(Team, fields=["name"])
class TeamType:
pass
@strawberry.experimental.pydantic.type(Hero, fields=["team"])
class HeroType:
pass
@strawberry.type
class Query:
@strawberry.field
def hero(self) -> HeroType:
return HeroType(team=TeamType(name="Skii"))
schema = strawberry.Schema(query=Query)
expected_schema = """
type HeroType {
team: TeamType
}
type Query {
hero: HeroType!
}
type TeamType {
name: String!
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ hero { team { name } } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["hero"]["team"]["name"] == "Skii"
def test_one_to_one_required(clear_types):
@strawberry.experimental.pydantic.type(Manager, fields=["name"])
class ManagerType:
pass
@strawberry.experimental.pydantic.type(Team, fields=["manager"])
class TeamType:
pass
@strawberry.type
class Query:
@strawberry.field
def team(self) -> TeamType:
return TeamType(manager=ManagerType(name="Skii"))
schema = strawberry.Schema(query=Query)
expected_schema = """
type ManagerType {
name: String!
}
type Query {
team: TeamType!
}
type TeamType {
manager: ManagerType!
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ team { manager { name } } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["team"]["manager"]["name"] == "Skii"
def test_nested_type_unordered(clear_types):
@strawberry.experimental.pydantic.type(Hero, fields=["team"])
class HeroType:
pass
@strawberry.experimental.pydantic.type(Team, fields=["name"])
class TeamType:
pass
@strawberry.type
class Query:
@strawberry.field
def hero(self) -> HeroType:
return HeroType(team=TeamType(name="Skii"))
schema = strawberry.Schema(query=Query)
expected_schema = """
type HeroType {
team: TeamType
}
type Query {
hero: HeroType!
}
type TeamType {
name: String!
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ hero { team { name } } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["hero"]["team"]["name"] == "Skii"
def test_one_to_many(clear_types):
@strawberry.experimental.pydantic.type(Team, fields=["heroes"])
class TeamType:
pass
@strawberry.experimental.pydantic.type(Hero, fields=["name"])
class HeroType:
pass
@strawberry.type
class Query:
@strawberry.field
def team(self) -> TeamType:
return TeamType(heroes=[HeroType(name="Skii"), HeroType(name="Chris")])
schema = strawberry.Schema(query=Query)
expected_schema = """
type HeroType {
name: String!
}
type Query {
team: TeamType!
}
type TeamType {
heroes: [HeroType!]!
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ team { heroes { name } } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["team"]["heroes"][0]["name"] == "Skii"
assert result.data["team"]["heroes"][1]["name"] == "Chris"
|
[
"sqlmodel.Relationship",
"sqlmodel.Field"
] |
[((196, 233), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'default': 'None'}), '(primary_key=True, default=None)\n', (201, 233), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((250, 257), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (255, 257), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((280, 287), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (285, 287), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((351, 388), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'default': 'None'}), '(primary_key=True, default=None)\n', (356, 388), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((405, 412), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (410, 412), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((440, 478), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""manager"""'}), "(back_populates='manager')\n", (452, 478), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((539, 576), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'default': 'None'}), '(primary_key=True, default=None)\n', (544, 576), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((593, 610), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (598, 610), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((645, 664), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (650, 664), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((687, 734), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)', 'foreign_key': '"""manager.id"""'}), "(nullable=False, foreign_key='manager.id')\n", (692, 734), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((758, 801), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""managed_team"""'}), "(back_populates='managed_team')\n", (770, 801), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((829, 864), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""team"""'}), "(back_populates='team')\n", (841, 864), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((925, 962), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'default': 'None'}), '(primary_key=True, default=None)\n', (930, 962), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((979, 996), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (984, 996), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1043, 1074), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'index': '(True)'}), '(default=None, index=True)\n', (1048, 1074), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1104, 1146), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""team.id"""'}), "(default=None, foreign_key='team.id')\n", (1109, 1146), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1174, 1211), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""heroes"""'}), "(back_populates='heroes')\n", (1186, 1211), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1430, 1490), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['City'], {'all_fields': '(True)'}), '(City, all_fields=True)\n', (1467, 1490), False, 'import strawberry\n'), ((1707, 1737), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (1724, 1737), False, 'import strawberry\n'), ((2177, 2253), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Team'], {'fields': "['name', 'headquarters']"}), "(Team, fields=['name', 'headquarters'])\n", (2214, 2253), False, 'import strawberry\n'), ((2472, 2502), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (2489, 2502), False, 'import strawberry\n'), ((2931, 2991), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Team'], {'fields': "['name']"}), "(Team, fields=['name'])\n", (2968, 2991), False, 'import strawberry\n'), ((3031, 3091), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Hero'], {'fields': "['team']"}), "(Hero, fields=['team'])\n", (3068, 3091), False, 'import strawberry\n'), ((3296, 3326), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (3313, 3326), False, 'import strawberry\n'), ((3790, 3853), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Manager'], {'fields': "['name']"}), "(Manager, fields=['name'])\n", (3827, 3853), False, 'import strawberry\n'), ((3896, 3959), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Team'], {'fields': "['manager']"}), "(Team, fields=['manager'])\n", (3933, 3959), False, 'import strawberry\n'), ((4170, 4200), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (4187, 4200), False, 'import strawberry\n'), ((4682, 4742), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Hero'], {'fields': "['team']"}), "(Hero, fields=['team'])\n", (4719, 4742), False, 'import strawberry\n'), ((4782, 4842), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Team'], {'fields': "['name']"}), "(Team, fields=['name'])\n", (4819, 4842), False, 'import strawberry\n'), ((5047, 5077), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (5064, 5077), False, 'import strawberry\n'), ((5533, 5595), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Team'], {'fields': "['heroes']"}), "(Team, fields=['heroes'])\n", (5570, 5595), False, 'import strawberry\n'), ((5635, 5695), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Hero'], {'fields': "['name']"}), "(Hero, fields=['name'])\n", (5672, 5695), False, 'import strawberry\n'), ((5928, 5958), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (5945, 5958), False, 'import strawberry\n'), ((1929, 1961), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (1944, 1961), False, 'import textwrap\n'), ((2684, 2716), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (2699, 2716), False, 'import textwrap\n'), ((3529, 3561), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (3544, 3561), False, 'import textwrap\n'), ((4413, 4445), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (4428, 4445), False, 'import textwrap\n'), ((5280, 5312), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (5295, 5312), False, 'import textwrap\n'), ((6167, 6199), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (6182, 6199), False, 'import textwrap\n')]
|
import io
import numpy as np
import megengine.core.tensor.megbrain_graph as G
import megengine.utils.comp_graph_tools as cgtools
from megengine import tensor
from megengine.jit import trace
from megengine.utils.network_node import VarNode
def _default_compare_fn(x, y):
if isinstance(x, np.ndarray):
np.testing.assert_allclose(x, y, rtol=1e-6)
else:
np.testing.assert_allclose(x.numpy(), y, rtol=1e-6)
def make_tensor(x, network=None, device=None):
if network is not None:
if isinstance(x, VarNode):
return VarNode(x.var)
return network.make_const(x, device=device)
else:
return tensor(x, device=device)
def opr_test(
cases,
func,
compare_fn=_default_compare_fn,
ref_fn=None,
test_trace=True,
network=None,
**kwargs
):
"""
:param cases: the list which have dict element, the list length should be 2 for dynamic shape test.
and the dict should have input,
and should have output if ref_fn is None.
should use list for multiple inputs and outputs for each case.
:param func: the function to run opr.
:param compare_fn: the function to compare the result and expected, use
``np.testing.assert_allclose`` if None.
:param ref_fn: the function to generate expected data, should assign output if None.
Examples:
.. code-block::
dtype = np.float32
cases = [{"input": [10, 20]}, {"input": [20, 30]}]
opr_test(cases,
F.eye,
ref_fn=lambda n, m: np.eye(n, m).astype(dtype),
dtype=dtype)
"""
def check_results(results, expected):
if not isinstance(results, (tuple, list)):
results = (results,)
for r, e in zip(results, expected):
if not isinstance(r, (tensor, VarNode)):
r = tensor(r)
compare_fn(r, e)
def get_param(cases, idx):
case = cases[idx]
inp = case.get("input", None)
outp = case.get("output", None)
if inp is None:
raise ValueError("the test case should have input")
if not isinstance(inp, (tuple, list)):
inp = (inp,)
if ref_fn is not None and callable(ref_fn):
outp = ref_fn(*inp)
if outp is None:
raise ValueError("the test case should have output or reference function")
if not isinstance(outp, (tuple, list)):
outp = (outp,)
return inp, outp
if len(cases) == 0:
raise ValueError("should give one case at least")
if not callable(func):
raise ValueError("the input func should be callable")
inp, outp = get_param(cases, 0)
inp_tensor = [make_tensor(inpi, network) for inpi in inp]
if test_trace and not network:
copied_inp = inp_tensor.copy()
for symbolic in [False, True]:
traced_func = trace(symbolic=symbolic)(func)
for _ in range(3):
traced_results = traced_func(*copied_inp, **kwargs)
check_results(traced_results, outp)
dumped_func = trace(symbolic=True, capture_as_const=True)(func)
dumped_results = dumped_func(*copied_inp, **kwargs)
check_results(dumped_results, outp)
file = io.BytesIO()
dump_info = dumped_func.dump(file)
file.seek(0)
# arg_name has pattern arg_xxx, xxx is int value
def take_number(arg_name):
return int(arg_name.split("_")[-1])
input_names = dump_info[4]
inps_np = [i.numpy() for i in copied_inp]
input_names.sort(key=take_number)
inp_dict = dict(zip(input_names, inps_np))
infer_cg = cgtools.GraphInference(file)
# assume #outputs == 1
loaded_results = list(infer_cg.run(inp_dict=inp_dict).values())[0]
check_results(loaded_results, outp)
results = func(*inp_tensor, **kwargs)
check_results(results, outp)
|
[
"megengine.jit.trace",
"megengine.tensor",
"megengine.utils.comp_graph_tools.GraphInference",
"megengine.utils.network_node.VarNode"
] |
[((316, 360), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['x', 'y'], {'rtol': '(1e-06)'}), '(x, y, rtol=1e-06)\n', (342, 360), True, 'import numpy as np\n'), ((653, 677), 'megengine.tensor', 'tensor', (['x'], {'device': 'device'}), '(x, device=device)\n', (659, 677), False, 'from megengine import tensor\n'), ((3295, 3307), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (3305, 3307), False, 'import io\n'), ((3711, 3739), 'megengine.utils.comp_graph_tools.GraphInference', 'cgtools.GraphInference', (['file'], {}), '(file)\n', (3733, 3739), True, 'import megengine.utils.comp_graph_tools as cgtools\n'), ((561, 575), 'megengine.utils.network_node.VarNode', 'VarNode', (['x.var'], {}), '(x.var)\n', (568, 575), False, 'from megengine.utils.network_node import VarNode\n'), ((3125, 3168), 'megengine.jit.trace', 'trace', ([], {'symbolic': '(True)', 'capture_as_const': '(True)'}), '(symbolic=True, capture_as_const=True)\n', (3130, 3168), False, 'from megengine.jit import trace\n'), ((1879, 1888), 'megengine.tensor', 'tensor', (['r'], {}), '(r)\n', (1885, 1888), False, 'from megengine import tensor\n'), ((2923, 2947), 'megengine.jit.trace', 'trace', ([], {'symbolic': 'symbolic'}), '(symbolic=symbolic)\n', (2928, 2947), False, 'from megengine.jit import trace\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.