Dataset Viewer
Auto-converted to Parquet
code
stringlengths
110
64.5k
apis
sequence
extract_api
stringlengths
123
69.9k
from datetime import datetime from typing import Optional from fastapi import APIRouter from sqlmodel import Field, SQLModel router = APIRouter() class Right(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class Province(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class Amphoe(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) province_id: int name: str class Tambon(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) amphoe_id: int name: str class Religion(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class National(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class Occupation(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class MaritalStatus(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class AcademicDegree(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class Allergy(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class Vehicle(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class Language(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class Relationship(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class IdType(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class FeedbackType(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class VisibilityLevel(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str class Module(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str detail: str created_at: datetime updated_at: datetime created_by: int updated_by: Optional[int] = None class ModuleFunction(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str detail: str created_at: datetime updated_at: datetime created_by: int updated_by: Optional[int] = None
[ "sqlmodel.Field" ]
[((136, 147), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (145, 147), False, 'from fastapi import APIRouter\n'), ((209, 246), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (214, 246), False, 'from sqlmodel import Field, SQLModel\n'), ((325, 362), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (330, 362), False, 'from sqlmodel import Field, SQLModel\n'), ((439, 476), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (444, 476), False, 'from sqlmodel import Field, SQLModel\n'), ((574, 611), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (579, 611), False, 'from sqlmodel import Field, SQLModel\n'), ((709, 746), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (714, 746), False, 'from sqlmodel import Field, SQLModel\n'), ((825, 862), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (830, 862), False, 'from sqlmodel import Field, SQLModel\n'), ((943, 980), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (948, 980), False, 'from sqlmodel import Field, SQLModel\n'), ((1064, 1101), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1069, 1101), False, 'from sqlmodel import Field, SQLModel\n'), ((1186, 1223), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1191, 1223), False, 'from sqlmodel import Field, SQLModel\n'), ((1301, 1338), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1306, 1338), False, 'from sqlmodel import Field, SQLModel\n'), ((1416, 1453), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1421, 1453), False, 'from sqlmodel import Field, SQLModel\n'), ((1532, 1569), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1537, 1569), False, 'from sqlmodel import Field, SQLModel\n'), ((1652, 1689), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1657, 1689), False, 'from sqlmodel import Field, SQLModel\n'), ((1766, 1803), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1771, 1803), False, 'from sqlmodel import Field, SQLModel\n'), ((1886, 1923), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1891, 1923), False, 'from sqlmodel import Field, SQLModel\n'), ((2009, 2046), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2014, 2046), False, 'from sqlmodel import Field, SQLModel\n'), ((2123, 2160), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2128, 2160), False, 'from sqlmodel import Field, SQLModel\n'), ((2368, 2405), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2373, 2405), False, 'from sqlmodel import Field, SQLModel\n')]
from typing import Optional from sqlmodel import Field, SQLModel, create_engine class Team(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str headquarters: str class Hero(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str secret_name: str age: Optional[int] = None team_id: Optional[int] = Field(default=None, foreign_key="team.id") sqlite_file_name = "database.db" sqlite_url = f"sqlite:///{sqlite_file_name}" engine = create_engine(sqlite_url, echo=True) def create_db_and_tables(): SQLModel.metadata.create_all(engine) def main(): create_db_and_tables() if __name__ == "__main__": main()
[ "sqlmodel.SQLModel.metadata.create_all", "sqlmodel.create_engine", "sqlmodel.Field" ]
[((541, 577), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (554, 577), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((141, 178), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (146, 178), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((275, 312), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (280, 312), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((408, 450), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""team.id"""'}), "(default=None, foreign_key='team.id')\n", (413, 450), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((612, 648), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (640, 648), False, 'from sqlmodel import Field, SQLModel, create_engine\n')]
"""initial2 Revision ID: 9d9a<PASSWORD>dbfd7 Revises: <PASSWORD> Create Date: 2021-11-01 04:28:38.426261 """ from alembic import op import sqlalchemy as sa import sqlmodel # revision identifiers, used by Alembic. revision = '9d9a746db<PASSWORD>' down_revision = 'a<PASSWORD>' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('images', sa.Column('id', sa.Integer(), nullable=False), sa.Column('url', sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_images_id'), 'images', ['id'], unique=False) op.create_index(op.f('ix_images_url'), 'images', ['url'], unique=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_images_url'), table_name='images') op.drop_index(op.f('ix_images_id'), table_name='images') op.drop_table('images') # ### end Alembic commands ###
[ "sqlmodel.sql.sqltypes.AutoString" ]
[((994, 1017), 'alembic.op.drop_table', 'op.drop_table', (['"""images"""'], {}), "('images')\n", (1007, 1017), False, 'from alembic import op\n'), ((561, 590), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (584, 590), True, 'import sqlalchemy as sa\n'), ((617, 637), 'alembic.op.f', 'op.f', (['"""ix_images_id"""'], {}), "('ix_images_id')\n", (621, 637), False, 'from alembic import op\n'), ((691, 712), 'alembic.op.f', 'op.f', (['"""ix_images_url"""'], {}), "('ix_images_url')\n", (695, 712), False, 'from alembic import op\n'), ((885, 906), 'alembic.op.f', 'op.f', (['"""ix_images_url"""'], {}), "('ix_images_url')\n", (889, 906), False, 'from alembic import op\n'), ((947, 967), 'alembic.op.f', 'op.f', (['"""ix_images_id"""'], {}), "('ix_images_id')\n", (951, 967), False, 'from alembic import op\n'), ((452, 464), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (462, 464), True, 'import sqlalchemy as sa\n'), ((504, 538), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (536, 538), False, 'import sqlmodel\n')]
# Copyright 2021 Modelyst LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import AbstractSet, Any, Dict, Mapping, Optional, Sequence, Union from pydantic.fields import Undefined, UndefinedType from sqlalchemy import Column from sqlmodel import Field from dbgen.utils.typing import NoArgAnyCallable def Attribute( default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None, alias: str = None, title: str = None, description: str = None, exclude: Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any] = None, include: Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any] = None, const: bool = None, gt: float = None, ge: float = None, lt: float = None, le: float = None, multiple_of: float = None, min_items: int = None, max_items: int = None, min_length: int = None, max_length: int = None, allow_mutation: bool = True, regex: str = None, primary_key: bool = False, foreign_key: Optional[Any] = None, nullable: Union[bool, UndefinedType] = Undefined, index: Union[bool, UndefinedType] = Undefined, sa_column: Union[Column, UndefinedType] = Undefined, sa_column_args: Union[Sequence[Any], UndefinedType] = Undefined, sa_column_kwargs: Union[Mapping[str, Any], UndefinedType] = Undefined, schema_extra: Optional[Dict[str, Any]] = None, ) -> Any: field = Field( default, default_factory=default_factory, alias=alias, title=title, description=description, exclude=exclude, include=include, const=const, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, min_items=min_items, max_items=max_items, min_length=min_length, max_length=max_length, allow_mutation=allow_mutation, regex=regex, primary_key=primary_key, foreign_key=foreign_key, nullable=nullable, index=index, sa_column=sa_column, sa_column_args=sa_column_args, sa_column_kwargs=sa_column_kwargs, schema_extra=schema_extra, ) return field
[ "sqlmodel.Field" ]
[((1965, 2523), 'sqlmodel.Field', 'Field', (['default'], {'default_factory': 'default_factory', 'alias': 'alias', 'title': 'title', 'description': 'description', 'exclude': 'exclude', 'include': 'include', 'const': 'const', 'gt': 'gt', 'ge': 'ge', 'lt': 'lt', 'le': 'le', 'multiple_of': 'multiple_of', 'min_items': 'min_items', 'max_items': 'max_items', 'min_length': 'min_length', 'max_length': 'max_length', 'allow_mutation': 'allow_mutation', 'regex': 'regex', 'primary_key': 'primary_key', 'foreign_key': 'foreign_key', 'nullable': 'nullable', 'index': 'index', 'sa_column': 'sa_column', 'sa_column_args': 'sa_column_args', 'sa_column_kwargs': 'sa_column_kwargs', 'schema_extra': 'schema_extra'}), '(default, default_factory=default_factory, alias=alias, title=title,\n description=description, exclude=exclude, include=include, const=const,\n gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, min_items=\n min_items, max_items=max_items, min_length=min_length, max_length=\n max_length, allow_mutation=allow_mutation, regex=regex, primary_key=\n primary_key, foreign_key=foreign_key, nullable=nullable, index=index,\n sa_column=sa_column, sa_column_args=sa_column_args, sa_column_kwargs=\n sa_column_kwargs, schema_extra=schema_extra)\n', (1970, 2523), False, 'from sqlmodel import Field\n')]
import numpy as nm import six from sfepy import data_dir from sfepy.base.base import Struct, output from sfepy.terms.terms_hyperelastic_ul import HyperElasticULFamilyData from sfepy.homogenization.micmac import get_homog_coefs_nonlinear import sfepy.linalg as la hyperelastic_data = {} def post_process(out, pb, state, extend=False): if isinstance(state, dict): pass else: pb.update_materials_flag = 2 stress = pb.evaluate('ev_integrate_mat.1.Omega(solid.S, u)', mode='el_avg') out['cauchy_stress'] = Struct(name='output_data', mode='cell', data=stress, dofs=None) strain = pb.evaluate('ev_integrate_mat.1.Omega(solid.E, u)', mode='el_avg') out['green_strain'] = Struct(name='output_data', mode='cell', data=strain, dofs=None) pb.update_materials_flag = 0 if pb.conf.options.get('recover_micro', False): happ = pb.homogen_app if pb.ts.step == 0: rname = pb.conf.options.recovery_region rcells = pb.domain.regions[rname].get_cells() sh = hyperelastic_data['homog_mat_shape'] happ.app_options.store_micro_idxs = sh[1] * rcells else: hpb = happ.problem recovery_hook = hpb.conf.options.get('recovery_hook', None) if recovery_hook is not None: recovery_hook = hpb.conf.get_function(recovery_hook) rname = pb.conf.options.recovery_region rcoors = [] for ii in happ.app_options.store_micro_idxs: key = happ.get_micro_cache_key('coors', ii, pb.ts.step) if key in happ.micro_state_cache: rcoors.append(happ.micro_state_cache[key]) recovery_hook(hpb, rcoors, pb.domain.regions[rname], pb.ts) return out def get_homog_mat(ts, coors, mode, term=None, problem=None, **kwargs): if problem.update_materials_flag == 2 and mode == 'qp': out = hyperelastic_data['homog_mat'] return {k: nm.array(v) for k, v in six.iteritems(out)} elif problem.update_materials_flag == 0 or not mode == 'qp': return output('get_homog_mat') dim = problem.domain.mesh.dim update_var = problem.conf.options.mesh_update_variables[0] state_u = problem.equations.variables[update_var] state_u.field.clear_mappings() family_data = problem.family_data(state_u, term.region, term.integral, term.integration) mtx_f = family_data.mtx_f.reshape((coors.shape[0],) + family_data.mtx_f.shape[-2:]) out = get_homog_coefs_nonlinear(ts, coors, mode, mtx_f, term=term, problem=problem, iteration=problem.iiter, **kwargs) out['E'] = 0.5 * (la.dot_sequences(mtx_f, mtx_f, 'ATB') - nm.eye(dim)) hyperelastic_data['time'] = ts.step hyperelastic_data['homog_mat_shape'] = family_data.det_f.shape[:2] hyperelastic_data['homog_mat'] = \ {k: nm.array(v) for k, v in six.iteritems(out)} return out def ulf_iteration_hook(pb, nls, vec, it, err, err0): vec = pb.equations.make_full_vec(vec) pb.equations.set_variables_from_state(vec) update_var = pb.conf.options.mesh_update_variables[0] state_u = pb.equations.variables[update_var] nods = state_u.field.get_dofs_in_region(state_u.field.region, merge=True) coors = pb.domain.get_mesh_coors().copy() coors[nods, :] += state_u().reshape(len(nods), state_u.n_components) if len(state_u.field.mappings0) == 0: state_u.field.save_mappings() state_u.field.clear_mappings() pb.set_mesh_coors(coors, update_fields=False, actual=True, clear_all=False) pb.iiter = it pb.update_materials_flag = True pb.update_materials() pb.update_materials_flag = False class MyEvalResidual(object): def __init__(self, problem, matrix_hook=None): self.problem = problem self.matrix_hook = problem.matrix_hook def eval_residual(self, vec, is_full=False): if not is_full: vec = self.problem.equations.make_full_vec(vec) vec_r = self.problem.equations.eval_residuals(vec * 0) return vec_r def ulf_init(pb): pb.family_data = HyperElasticULFamilyData() pb.init_solvers() pb.nls.fun = MyEvalResidual(pb).eval_residual pb.nls_iter_hook = ulf_iteration_hook pb.domain.mesh.coors_act = pb.domain.mesh.coors.copy() pb_vars = pb.get_variables() pb_vars['u'].init_data() pb.update_materials_flag = True pb.iiter = 0 options = { 'output_dir': 'output', 'mesh_update_variables': ['u'], 'nls_iter_hook': ulf_iteration_hook, 'pre_process_hook': ulf_init, 'micro_filename': 'examples/homogenization/nonlinear_homogenization.py', 'recover_micro': True, 'recovery_region': 'Recovery', 'post_process_hook': post_process, } materials = { 'solid': 'get_homog', } fields = { 'displacement': ('real', 'vector', 'Omega', 1), } variables = { 'u': ('unknown field', 'displacement'), 'v': ('test field', 'displacement', 'u'), } filename_mesh = data_dir + '/meshes/2d/its2D.mesh' regions = { 'Omega': 'all', 'Left': ('vertices in (x < 0.001)', 'facet'), 'Bottom': ('vertices in (y < 0.001 )', 'facet'), 'Recovery': ('cell 49, 81', 'cell'), } ebcs = { 'l': ('Left', {'u.all': 0.0}), 'b': ('Bottom', {'u.all': 'move_bottom'}), } centre = nm.array([0, 0], dtype=nm.float64) def move_bottom(ts, coor, **kwargs): from sfepy.linalg import rotation_matrix2d vec = coor[:, 0:2] - centre angle = 3 * ts.step print('angle:', angle) mtx = rotation_matrix2d(angle) out = nm.dot(vec, mtx) - vec return out functions = { 'move_bottom': (move_bottom,), 'get_homog': (get_homog_mat,), } equations = { 'balance_of_forces': """dw_nonsym_elastic.1.Omega(solid.A, v, u) = - dw_lin_prestress.1.Omega(solid.S, v)""", } solvers = { 'ls': ('ls.scipy_direct', {}), 'newton': ('nls.newton', { 'eps_a': 1e-3, 'eps_r': 1e-3, 'i_max': 20, }), 'ts': ('ts.simple', { 't0': 0, 't1': 1, 'n_step': 3 + 1, }) }
[ "sfepy.base.base.Struct", "sfepy.linalg.rotation_matrix2d", "sfepy.homogenization.micmac.get_homog_coefs_nonlinear", "sfepy.linalg.dot_sequences", "sfepy.base.base.output", "sfepy.terms.terms_hyperelastic_ul.HyperElasticULFamilyData" ]
[((5887, 5921), 'numpy.array', 'nm.array', (['[0, 0]'], {'dtype': 'nm.float64'}), '([0, 0], dtype=nm.float64)\n', (5895, 5921), True, 'import numpy as nm\n'), ((2515, 2538), 'sfepy.base.base.output', 'output', (['"""get_homog_mat"""'], {}), "('get_homog_mat')\n", (2521, 2538), False, 'from sfepy.base.base import Struct, output\n'), ((2994, 3111), 'sfepy.homogenization.micmac.get_homog_coefs_nonlinear', 'get_homog_coefs_nonlinear', (['ts', 'coors', 'mode', 'mtx_f'], {'term': 'term', 'problem': 'problem', 'iteration': 'problem.iiter'}), '(ts, coors, mode, mtx_f, term=term, problem=\n problem, iteration=problem.iiter, **kwargs)\n', (3019, 3111), False, 'from sfepy.homogenization.micmac import get_homog_coefs_nonlinear\n'), ((4686, 4712), 'sfepy.terms.terms_hyperelastic_ul.HyperElasticULFamilyData', 'HyperElasticULFamilyData', ([], {}), '()\n', (4710, 4712), False, 'from sfepy.terms.terms_hyperelastic_ul import HyperElasticULFamilyData\n'), ((6102, 6126), 'sfepy.linalg.rotation_matrix2d', 'rotation_matrix2d', (['angle'], {}), '(angle)\n', (6119, 6126), False, 'from sfepy.linalg import rotation_matrix2d\n'), ((575, 638), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'stress', 'dofs': 'None'}), "(name='output_data', mode='cell', data=stress, dofs=None)\n", (581, 638), False, 'from sfepy.base.base import Struct, output\n'), ((898, 961), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'mode': '"""cell"""', 'data': 'strain', 'dofs': 'None'}), "(name='output_data', mode='cell', data=strain, dofs=None)\n", (904, 961), False, 'from sfepy.base.base import Struct, output\n'), ((3418, 3429), 'numpy.array', 'nm.array', (['v'], {}), '(v)\n', (3426, 3429), True, 'import numpy as nm\n'), ((6137, 6153), 'numpy.dot', 'nm.dot', (['vec', 'mtx'], {}), '(vec, mtx)\n', (6143, 6153), True, 'import numpy as nm\n'), ((2386, 2397), 'numpy.array', 'nm.array', (['v'], {}), '(v)\n', (2394, 2397), True, 'import numpy as nm\n'), ((3202, 3239), 'sfepy.linalg.dot_sequences', 'la.dot_sequences', (['mtx_f', 'mtx_f', '"""ATB"""'], {}), "(mtx_f, mtx_f, 'ATB')\n", (3218, 3239), True, 'import sfepy.linalg as la\n'), ((3242, 3253), 'numpy.eye', 'nm.eye', (['dim'], {}), '(dim)\n', (3248, 3253), True, 'import numpy as nm\n'), ((3442, 3460), 'six.iteritems', 'six.iteritems', (['out'], {}), '(out)\n', (3455, 3460), False, 'import six\n'), ((2410, 2428), 'six.iteritems', 'six.iteritems', (['out'], {}), '(out)\n', (2423, 2428), False, 'import six\n')]
import uuid from logging import getLogger from typing import Optional from fastapi import UploadFile from sqlmodel import select, Session from .models import User logger = getLogger("uvicorn.error") def get_user(username: str, session: Session) -> Optional[User]: statement = select(User).where(User.username == username) user = session.exec(statement).first() if user: return user return None def save_file(file: UploadFile) -> str: filename = uuid.uuid4() path = f"static/{filename}" with open(path, "wb") as f: content = file.file.read() f.write(content) return path
[ "sqlmodel.select" ]
[((175, 201), 'logging.getLogger', 'getLogger', (['"""uvicorn.error"""'], {}), "('uvicorn.error')\n", (184, 201), False, 'from logging import getLogger\n'), ((480, 492), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (490, 492), False, 'import uuid\n'), ((285, 297), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (291, 297), False, 'from sqlmodel import select, Session\n')]
from unittest.mock import patch from sqlmodel import create_engine from ...conftest import get_testing_print_function def test_tutorial(clear_sqlmodel): from docs_src.tutorial.where import tutorial010 as mod mod.sqlite_url = "sqlite://" mod.engine = create_engine(mod.sqlite_url) calls = [] new_print = get_testing_print_function(calls) with patch("builtins.print", new=new_print): mod.main() assert calls == [ [{"name": "Tarantula", "secret_name": "<NAME>", "age": 32, "id": 4}], [{"name": "<NAME>", "secret_name": "<NAME>", "age": 35, "id": 5}], [ { "name": "<NAME>", "secret_name": "<NAME>", "age": 93, "id": 7, } ], ]
[ "sqlmodel.create_engine" ]
[((267, 296), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (280, 296), False, 'from sqlmodel import create_engine\n'), ((373, 411), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'new': 'new_print'}), "('builtins.print', new=new_print)\n", (378, 411), False, 'from unittest.mock import patch\n'), ((421, 431), 'docs_src.tutorial.where.tutorial010.main', 'mod.main', ([], {}), '()\n', (429, 431), True, 'from docs_src.tutorial.where import tutorial010 as mod\n')]
#!/usr/bin/env python """ Plot quadrature points for the given geometry and integration order. """ from optparse import OptionParser import sfepy.postprocess.plot_quadrature as pq usage = '%prog [options]\n' + __doc__.rstrip() helps = { 'geometry' : 'reference element geometry, one of "2_3", "2_4", "3_4", "3_8"' ' [default: %default]', 'order' : 'quadrature order [default: %default]', 'min_radius' : 'min. radius of points corresponding to the min. weight' ' [default: %default]', 'max_radius' : 'max. radius of points corresponding to the max. weight' ' [default: %default]', 'show_colorbar' : 'show colorbar for quadrature weights' } def main(): parser = OptionParser(usage=usage, version='%prog') parser.add_option('-g', '--geometry', metavar='name', action='store', dest='geometry', default='2_4', help=helps['geometry']) parser.add_option('-n', '--order', metavar='order', type=int, action='store', dest='order', default=2, help=helps['order']) parser.add_option('-r', '--min-radius', metavar='float', type=float, action='store', dest='min_radius', default=10, help=helps['min_radius']) parser.add_option('-R', '--max-radius', metavar='float', type=float, action='store', dest='max_radius', default=50, help=helps['max_radius']) parser.add_option('-c', '--show-colorbar', action='store_true', dest='show_colorbar', default=False, help=helps['show_colorbar']) options, args = parser.parse_args() if len(args) != 0: parser.print_help(), return pq.plot_quadrature(None, options.geometry, options.order, options.min_radius, options.max_radius, options.show_colorbar) pq.plt.show() if __name__ == '__main__': main()
[ "sfepy.postprocess.plot_quadrature.plt.show", "sfepy.postprocess.plot_quadrature.plot_quadrature" ]
[((722, 764), 'optparse.OptionParser', 'OptionParser', ([], {'usage': 'usage', 'version': '"""%prog"""'}), "(usage=usage, version='%prog')\n", (734, 764), False, 'from optparse import OptionParser\n'), ((1782, 1907), 'sfepy.postprocess.plot_quadrature.plot_quadrature', 'pq.plot_quadrature', (['None', 'options.geometry', 'options.order', 'options.min_radius', 'options.max_radius', 'options.show_colorbar'], {}), '(None, options.geometry, options.order, options.\n min_radius, options.max_radius, options.show_colorbar)\n', (1800, 1907), True, 'import sfepy.postprocess.plot_quadrature as pq\n'), ((1953, 1966), 'sfepy.postprocess.plot_quadrature.plt.show', 'pq.plt.show', ([], {}), '()\n', (1964, 1966), True, 'import sfepy.postprocess.plot_quadrature as pq\n')]
# -*- coding: utf-8 -*- # MegEngine is Licensed under the Apache License, Version 2.0 (the "License") # # Copyright (c) 2014-2020 Megvii Inc. All rights reserved. # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. import pickle from tempfile import TemporaryFile import numpy as np from megengine.core import Buffer, Parameter, tensor from megengine.test import assertTensorClose def test_tensor_serialization(): def tensor_eq(a, b): assert a.dtype == b.dtype assert a.device == b.device assert a.requires_grad == b.requires_grad assertTensorClose(a, b) with TemporaryFile() as f: data = np.random.randint(low=0, high=7, size=[233]) a = tensor(data, device="xpux", dtype=np.int32) pickle.dump(a, f) f.seek(0) b = pickle.load(f) tensor_eq(a, b) with TemporaryFile() as f: a = Parameter(np.random.random(size=(233, 2)).astype(np.float32)) pickle.dump(a, f) f.seek(0) b = pickle.load(f) assert isinstance(b, Parameter) tensor_eq(a, b) with TemporaryFile() as f: a = Buffer(np.random.random(size=(2, 233)).astype(np.float32)) pickle.dump(a, f) f.seek(0) b = pickle.load(f) assert isinstance(b, Buffer) tensor_eq(a, b)
[ "megengine.core.tensor", "megengine.test.assertTensorClose" ]
[((733, 756), 'megengine.test.assertTensorClose', 'assertTensorClose', (['a', 'b'], {}), '(a, b)\n', (750, 756), False, 'from megengine.test import assertTensorClose\n'), ((767, 782), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (780, 782), False, 'from tempfile import TemporaryFile\n'), ((804, 848), 'numpy.random.randint', 'np.random.randint', ([], {'low': '(0)', 'high': '(7)', 'size': '[233]'}), '(low=0, high=7, size=[233])\n', (821, 848), True, 'import numpy as np\n'), ((861, 904), 'megengine.core.tensor', 'tensor', (['data'], {'device': '"""xpux"""', 'dtype': 'np.int32'}), "(data, device='xpux', dtype=np.int32)\n", (867, 904), False, 'from megengine.core import Buffer, Parameter, tensor\n'), ((913, 930), 'pickle.dump', 'pickle.dump', (['a', 'f'], {}), '(a, f)\n', (924, 930), False, 'import pickle\n'), ((961, 975), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (972, 975), False, 'import pickle\n'), ((1010, 1025), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (1023, 1025), False, 'from tempfile import TemporaryFile\n'), ((1114, 1131), 'pickle.dump', 'pickle.dump', (['a', 'f'], {}), '(a, f)\n', (1125, 1131), False, 'import pickle\n'), ((1162, 1176), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1173, 1176), False, 'import pickle\n'), ((1251, 1266), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {}), '()\n', (1264, 1266), False, 'from tempfile import TemporaryFile\n'), ((1352, 1369), 'pickle.dump', 'pickle.dump', (['a', 'f'], {}), '(a, f)\n', (1363, 1369), False, 'import pickle\n'), ((1400, 1414), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1411, 1414), False, 'import pickle\n'), ((1054, 1085), 'numpy.random.random', 'np.random.random', ([], {'size': '(233, 2)'}), '(size=(233, 2))\n', (1070, 1085), True, 'import numpy as np\n'), ((1292, 1323), 'numpy.random.random', 'np.random.random', ([], {'size': '(2, 233)'}), '(size=(2, 233))\n', (1308, 1323), True, 'import numpy as np\n')]
import typing as t from sqlmodel import SQLModel, Field, Relationship from datetime import datetime from .discussions import DB_Discussion class DB_Post(SQLModel, table=True): __tablename__ = 'posts' id: t.Optional[int] = Field(default=None, primary_key=True) """The ID of the post. This is handled by the database.""" discussion_id: int = Field(foreign_key='discussions.id') discussion: t.Optional[DB_Discussion] = Relationship(back_populates='posts') """Discussion that this post belongs to.""" number: int = Field(default=1) """The number/order of the post in the discussion.""" created_at: datetime = Field(default=datetime.utcnow()) """When was this post created. Default is now.""" type: str = Field(max_length=100, default='comment') """The type of the post. Can be `'comment'` for standard post.""" content: t.Text """The post's content, in HTML.""" edited_at: t.Optional[datetime] """When was the post edited at?""" hidden_at: t.Optional[datetime] """When was the post hidden at?""" ip_address: t.Optional[str] = Field(max_length=45) """The IP address of the user that created the post.""" is_private: bool = Field(default=False) """Whether or not the post is private.""" is_approved: bool = Field(default=True) """Whether or not the post is approved."""
[ "sqlmodel.Relationship", "sqlmodel.Field" ]
[((235, 272), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (240, 272), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((362, 397), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""discussions.id"""'}), "(foreign_key='discussions.id')\n", (367, 397), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((442, 478), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""posts"""'}), "(back_populates='posts')\n", (454, 478), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((546, 562), 'sqlmodel.Field', 'Field', ([], {'default': '(1)'}), '(default=1)\n', (551, 562), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((751, 791), 'sqlmodel.Field', 'Field', ([], {'max_length': '(100)', 'default': '"""comment"""'}), "(max_length=100, default='comment')\n", (756, 791), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1108, 1128), 'sqlmodel.Field', 'Field', ([], {'max_length': '(45)'}), '(max_length=45)\n', (1113, 1128), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1213, 1233), 'sqlmodel.Field', 'Field', ([], {'default': '(False)'}), '(default=False)\n', (1218, 1233), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1304, 1323), 'sqlmodel.Field', 'Field', ([], {'default': '(True)'}), '(default=True)\n', (1309, 1323), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((662, 679), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (677, 679), False, 'from datetime import datetime\n')]
import megengine as mge import megengine.functional as F import numpy as np def bilinear_sampler(img, coords, mode="bilinear", mask=False): """Wrapper for grid_sample, uses pixel coordinates""" H, W = img.shape[-2:] img = F.remap(img, coords, border_mode="constant") if mask: mask = ( (coords[:, :, :, 0:1] < 0) | (coords[:, :, :, 0:1] > W - 1) | (coords[:, :, :, 1:2] < 0) | (coords[:, :, :, 1:2] > H - 1) ) mask = F.logical_not(mask) return img, mask.astype("float32") return img def coords_grid(batch, ht, wd): x_grid, y_grid = np.meshgrid(np.arange(wd), np.arange(ht)) y_grid, x_grid = mge.tensor(y_grid, dtype="float32"), mge.tensor( x_grid, dtype="float32" ) coords = F.stack([x_grid, y_grid], axis=0) coords = F.repeat(F.expand_dims(coords, axis=0), batch, axis=0) return coords def manual_pad(x, pady, padx): if pady > 0: u = F.repeat(x[:, :, 0:1, :], pady, axis=2) d = F.repeat(x[:, :, -1:, :], pady, axis=2) x = F.concat([u, x, d], axis=2) if padx > 0: l = F.repeat(x[:, :, :, 0:1], padx, axis=3) r = F.repeat(x[:, :, :, -1:], padx, axis=3) x = F.concat([l, x, r], axis=3) return x
[ "megengine.functional.remap", "megengine.functional.stack", "megengine.tensor", "megengine.functional.expand_dims", "megengine.functional.concat", "megengine.functional.repeat", "megengine.functional.logical_not" ]
[((237, 281), 'megengine.functional.remap', 'F.remap', (['img', 'coords'], {'border_mode': '"""constant"""'}), "(img, coords, border_mode='constant')\n", (244, 281), True, 'import megengine.functional as F\n'), ((805, 838), 'megengine.functional.stack', 'F.stack', (['[x_grid, y_grid]'], {'axis': '(0)'}), '([x_grid, y_grid], axis=0)\n', (812, 838), True, 'import megengine.functional as F\n'), ((508, 527), 'megengine.functional.logical_not', 'F.logical_not', (['mask'], {}), '(mask)\n', (521, 527), True, 'import megengine.functional as F\n'), ((654, 667), 'numpy.arange', 'np.arange', (['wd'], {}), '(wd)\n', (663, 667), True, 'import numpy as np\n'), ((669, 682), 'numpy.arange', 'np.arange', (['ht'], {}), '(ht)\n', (678, 682), True, 'import numpy as np\n'), ((705, 740), 'megengine.tensor', 'mge.tensor', (['y_grid'], {'dtype': '"""float32"""'}), "(y_grid, dtype='float32')\n", (715, 740), True, 'import megengine as mge\n'), ((742, 777), 'megengine.tensor', 'mge.tensor', (['x_grid'], {'dtype': '"""float32"""'}), "(x_grid, dtype='float32')\n", (752, 777), True, 'import megengine as mge\n'), ((861, 890), 'megengine.functional.expand_dims', 'F.expand_dims', (['coords'], {'axis': '(0)'}), '(coords, axis=0)\n', (874, 890), True, 'import megengine.functional as F\n'), ((987, 1026), 'megengine.functional.repeat', 'F.repeat', (['x[:, :, 0:1, :]', 'pady'], {'axis': '(2)'}), '(x[:, :, 0:1, :], pady, axis=2)\n', (995, 1026), True, 'import megengine.functional as F\n'), ((1039, 1078), 'megengine.functional.repeat', 'F.repeat', (['x[:, :, -1:, :]', 'pady'], {'axis': '(2)'}), '(x[:, :, -1:, :], pady, axis=2)\n', (1047, 1078), True, 'import megengine.functional as F\n'), ((1091, 1118), 'megengine.functional.concat', 'F.concat', (['[u, x, d]'], {'axis': '(2)'}), '([u, x, d], axis=2)\n', (1099, 1118), True, 'import megengine.functional as F\n'), ((1148, 1187), 'megengine.functional.repeat', 'F.repeat', (['x[:, :, :, 0:1]', 'padx'], {'axis': '(3)'}), '(x[:, :, :, 0:1], padx, axis=3)\n', (1156, 1187), True, 'import megengine.functional as F\n'), ((1200, 1239), 'megengine.functional.repeat', 'F.repeat', (['x[:, :, :, -1:]', 'padx'], {'axis': '(3)'}), '(x[:, :, :, -1:], padx, axis=3)\n', (1208, 1239), True, 'import megengine.functional as F\n'), ((1252, 1279), 'megengine.functional.concat', 'F.concat', (['[l, x, r]'], {'axis': '(3)'}), '([l, x, r], axis=3)\n', (1260, 1279), True, 'import megengine.functional as F\n')]
# Copyright 2021 Modelyst LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import os import sys import pytest from psycopg import connect as pg3_connect from sqlalchemy import MetaData from sqlmodel import Session, create_engine, text from dbgen.configuration import config from dbgen.core.entity import BaseEntity from dbgen.core.metadata import meta_registry @pytest.fixture() def clear_registry(): # Clear the tables in the metadata for the default base model BaseEntity.metadata.clear() # Clear the Models associated with the registry, to avoid warnings BaseEntity._sa_registry.dispose() yield BaseEntity.metadata.clear() BaseEntity._sa_registry.dispose() @pytest.fixture(scope="module") def sql_engine(): dsn = os.environ.get('TEST_DSN', config.main_dsn) engine = create_engine(dsn) return engine @pytest.fixture(scope="function") def connection(sql_engine): """sql_engine connection""" metadata = MetaData() metadata.reflect(sql_engine) metadata.drop_all(sql_engine) connection = sql_engine.connect() yield connection connection.close() @pytest.fixture(scope="function") def session(connection): transaction = connection.begin() session = Session(bind=connection, autocommit=False, autoflush=True) yield session transaction.rollback() transaction.close() session.close() @pytest.fixture(scope="function") def seed_db(connection): connection.execute(text("CREATE table users (id serial primary key, name text);")) for user in range(100): connection.execute(text(f"INSERT into users(name) values ('user_{user}');")) connection.commit() yield connection.execute(text("drop table users;")) connection.commit() @pytest.fixture(scope="function") def make_db(connection): pass metadata = MetaData() metadata.reflect(connection) metadata.drop_all(connection) BaseEntity.metadata.create_all(connection) connection.commit() yield BaseEntity.metadata.drop_all(connection) connection.commit() @pytest.fixture(scope="function") def raw_connection(make_db, sql_engine): raw = sql_engine.raw_connection() yield raw raw.close() @pytest.fixture(scope="function") def raw_pg3_connection(make_db, sql_engine): connection = pg3_connect(str(sql_engine.url)) yield connection connection.close() @pytest.fixture def debug_logger(): custom_logger = logging.getLogger("dbgen") custom_logger.propagate = True custom_logger.setLevel(logging.DEBUG) log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s Test" formatter = logging.Formatter(log_format) console_handler = logging.StreamHandler(stream=sys.stdout) console_handler.setFormatter(formatter) custom_logger.addHandler(console_handler) return custom_logger @pytest.fixture(scope='function') def recreate_meta(connection): connection.execute(text(f'create schema if not exists {config.meta_schema}')) meta_registry.metadata.drop_all(connection) meta_registry.metadata.create_all(connection) yield
[ "sqlmodel.create_engine", "sqlmodel.Session", "sqlmodel.text" ]
[((903, 919), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (917, 919), False, 'import pytest\n'), ((1232, 1262), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1246, 1262), False, 'import pytest\n'), ((1388, 1420), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1402, 1420), False, 'import pytest\n'), ((1659, 1691), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1673, 1691), False, 'import pytest\n'), ((1919, 1951), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1933, 1951), False, 'import pytest\n'), ((2288, 2320), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (2302, 2320), False, 'import pytest\n'), ((2602, 2634), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (2616, 2634), False, 'import pytest\n'), ((2747, 2779), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (2761, 2779), False, 'import pytest\n'), ((3386, 3418), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (3400, 3418), False, 'import pytest\n'), ((1012, 1039), 'dbgen.core.entity.BaseEntity.metadata.clear', 'BaseEntity.metadata.clear', ([], {}), '()\n', (1037, 1039), False, 'from dbgen.core.entity import BaseEntity\n'), ((1115, 1148), 'dbgen.core.entity.BaseEntity._sa_registry.dispose', 'BaseEntity._sa_registry.dispose', ([], {}), '()\n', (1146, 1148), False, 'from dbgen.core.entity import BaseEntity\n'), ((1163, 1190), 'dbgen.core.entity.BaseEntity.metadata.clear', 'BaseEntity.metadata.clear', ([], {}), '()\n', (1188, 1190), False, 'from dbgen.core.entity import BaseEntity\n'), ((1195, 1228), 'dbgen.core.entity.BaseEntity._sa_registry.dispose', 'BaseEntity._sa_registry.dispose', ([], {}), '()\n', (1226, 1228), False, 'from dbgen.core.entity import BaseEntity\n'), ((1291, 1334), 'os.environ.get', 'os.environ.get', (['"""TEST_DSN"""', 'config.main_dsn'], {}), "('TEST_DSN', config.main_dsn)\n", (1305, 1334), False, 'import os\n'), ((1348, 1366), 'sqlmodel.create_engine', 'create_engine', (['dsn'], {}), '(dsn)\n', (1361, 1366), False, 'from sqlmodel import Session, create_engine, text\n'), ((1496, 1506), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (1504, 1506), False, 'from sqlalchemy import MetaData\n'), ((1768, 1826), 'sqlmodel.Session', 'Session', ([], {'bind': 'connection', 'autocommit': '(False)', 'autoflush': '(True)'}), '(bind=connection, autocommit=False, autoflush=True)\n', (1775, 1826), False, 'from sqlmodel import Session, create_engine, text\n'), ((2371, 2381), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (2379, 2381), False, 'from sqlalchemy import MetaData\n'), ((2453, 2495), 'dbgen.core.entity.BaseEntity.metadata.create_all', 'BaseEntity.metadata.create_all', (['connection'], {}), '(connection)\n', (2483, 2495), False, 'from dbgen.core.entity import BaseEntity\n'), ((2534, 2574), 'dbgen.core.entity.BaseEntity.metadata.drop_all', 'BaseEntity.metadata.drop_all', (['connection'], {}), '(connection)\n', (2562, 2574), False, 'from dbgen.core.entity import BaseEntity\n'), ((2977, 3003), 'logging.getLogger', 'logging.getLogger', (['"""dbgen"""'], {}), "('dbgen')\n", (2994, 3003), False, 'import logging\n'), ((3175, 3204), 'logging.Formatter', 'logging.Formatter', (['log_format'], {}), '(log_format)\n', (3192, 3204), False, 'import logging\n'), ((3227, 3267), 'logging.StreamHandler', 'logging.StreamHandler', ([], {'stream': 'sys.stdout'}), '(stream=sys.stdout)\n', (3248, 3267), False, 'import logging\n'), ((3536, 3579), 'dbgen.core.metadata.meta_registry.metadata.drop_all', 'meta_registry.metadata.drop_all', (['connection'], {}), '(connection)\n', (3567, 3579), False, 'from dbgen.core.metadata import meta_registry\n'), ((3584, 3629), 'dbgen.core.metadata.meta_registry.metadata.create_all', 'meta_registry.metadata.create_all', (['connection'], {}), '(connection)\n', (3617, 3629), False, 'from dbgen.core.metadata import meta_registry\n'), ((2000, 2062), 'sqlmodel.text', 'text', (['"""CREATE table users (id serial primary key, name text);"""'], {}), "('CREATE table users (id serial primary key, name text);')\n", (2004, 2062), False, 'from sqlmodel import Session, create_engine, text\n'), ((2234, 2259), 'sqlmodel.text', 'text', (['"""drop table users;"""'], {}), "('drop table users;')\n", (2238, 2259), False, 'from sqlmodel import Session, create_engine, text\n'), ((3473, 3530), 'sqlmodel.text', 'text', (['f"""create schema if not exists {config.meta_schema}"""'], {}), "(f'create schema if not exists {config.meta_schema}')\n", (3477, 3530), False, 'from sqlmodel import Session, create_engine, text\n'), ((2119, 2175), 'sqlmodel.text', 'text', (['f"""INSERT into users(name) values (\'user_{user}\');"""'], {}), '(f"INSERT into users(name) values (\'user_{user}\');")\n', (2123, 2175), False, 'from sqlmodel import Session, create_engine, text\n')]
from unittest.mock import patch from sqlmodel import create_engine from ...conftest import get_testing_print_function expected_calls = [ [ [ { "id": 7, "name": "Captain North America", "secret_name": "<NAME>", "age": 93, } ] ] ] def test_tutorial(clear_sqlmodel): from docs_src.tutorial.offset_and_limit import tutorial003 as mod mod.sqlite_url = "sqlite://" mod.engine = create_engine(mod.sqlite_url) calls = [] new_print = get_testing_print_function(calls) with patch("builtins.print", new=new_print): mod.main() assert calls == expected_calls
[ "sqlmodel.create_engine" ]
[((502, 531), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (515, 531), False, 'from sqlmodel import create_engine\n'), ((608, 646), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'new': 'new_print'}), "('builtins.print', new=new_print)\n", (613, 646), False, 'from unittest.mock import patch\n'), ((656, 666), 'docs_src.tutorial.offset_and_limit.tutorial003.main', 'mod.main', ([], {}), '()\n', (664, 666), True, 'from docs_src.tutorial.offset_and_limit import tutorial003 as mod\n')]
from time import sleep from sqlmodel import select from icon_governance.config import settings from icon_governance.log import logger from icon_governance.metrics import prom_metrics from icon_governance.models.preps import Prep from icon_governance.utils.rpc import convert_hex_int, getStake, post_rpc_json def get_prep_stake(session): result = session.execute(select(Prep)) preps = result.scalars().all() for prep in preps: prep.stake = convert_hex_int(post_rpc_json(getStake(prep.address))["stake"]) / 1e18 session.merge(prep) session.commit() def prep_stake_cron(session): while True: logger.info("Starting stake cron") get_prep_stake(session) logger.info("Prep stake ran.") prom_metrics.preps_stake_cron_ran.inc() sleep(settings.CRON_SLEEP_SEC) if __name__ == "__main__": from icon_governance.db import session_factory get_prep_stake(session_factory())
[ "sqlmodel.select" ]
[((370, 382), 'sqlmodel.select', 'select', (['Prep'], {}), '(Prep)\n', (376, 382), False, 'from sqlmodel import select\n'), ((645, 679), 'icon_governance.log.logger.info', 'logger.info', (['"""Starting stake cron"""'], {}), "('Starting stake cron')\n", (656, 679), False, 'from icon_governance.log import logger\n'), ((720, 750), 'icon_governance.log.logger.info', 'logger.info', (['"""Prep stake ran."""'], {}), "('Prep stake ran.')\n", (731, 750), False, 'from icon_governance.log import logger\n'), ((759, 798), 'icon_governance.metrics.prom_metrics.preps_stake_cron_ran.inc', 'prom_metrics.preps_stake_cron_ran.inc', ([], {}), '()\n', (796, 798), False, 'from icon_governance.metrics import prom_metrics\n'), ((807, 837), 'time.sleep', 'sleep', (['settings.CRON_SLEEP_SEC'], {}), '(settings.CRON_SLEEP_SEC)\n', (812, 837), False, 'from time import sleep\n'), ((938, 955), 'icon_governance.db.session_factory', 'session_factory', ([], {}), '()\n', (953, 955), False, 'from icon_governance.db import session_factory\n'), ((494, 516), 'icon_governance.utils.rpc.getStake', 'getStake', (['prep.address'], {}), '(prep.address)\n', (502, 516), False, 'from icon_governance.utils.rpc import convert_hex_int, getStake, post_rpc_json\n')]
from typing import Optional from sqlmodel import Field, SQLModel, Field from pydantic import validator from datetime import datetime, date from fastapi import HTTPException import re class AppUser(SQLModel, table=True): """Create an SQLModel for users""" id: Optional[int] = Field(default=None, primary_key=True) username: str first_name: str last_name: str email: str role_id: int team_id: Optional[int] = None start_date: date created_at: datetime updated_at: datetime is_active: bool __table_args__ = {"schema": "app_db"} @validator("first_name", always=True) def valid_first_name(cls, first_name): assert first_name.replace( " ", "" ).isalpha(), "only alphabet letters allowed in first name" if first_name[0].isupper() == False: raise HTTPException( status_code=400, detail="first name should start with a capital letter" ) return first_name @validator("last_name", always=True) def valid_last_name(cls, ln_input): assert ln_input.replace( " ", "" ).isalpha(), "only alphabet letters allowed in last name" return ln_input @validator("email", always=True) def valid_email(cls, email_input): regex = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b" assert re.fullmatch(regex, email_input), "email format incorrect" return email_input
[ "sqlmodel.Field" ]
[((286, 323), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (291, 323), False, 'from sqlmodel import Field, SQLModel, Field\n'), ((587, 623), 'pydantic.validator', 'validator', (['"""first_name"""'], {'always': '(True)'}), "('first_name', always=True)\n", (596, 623), False, 'from pydantic import validator\n'), ((1001, 1036), 'pydantic.validator', 'validator', (['"""last_name"""'], {'always': '(True)'}), "('last_name', always=True)\n", (1010, 1036), False, 'from pydantic import validator\n'), ((1226, 1257), 'pydantic.validator', 'validator', (['"""email"""'], {'always': '(True)'}), "('email', always=True)\n", (1235, 1257), False, 'from pydantic import validator\n'), ((1383, 1415), 're.fullmatch', 're.fullmatch', (['regex', 'email_input'], {}), '(regex, email_input)\n', (1395, 1415), False, 'import re\n'), ((852, 943), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""first name should start with a capital letter"""'}), "(status_code=400, detail=\n 'first name should start with a capital letter')\n", (865, 943), False, 'from fastapi import HTTPException\n')]
from select import select from app.schemas.common import ( IGetResponseBase, IPostResponseBase, IDeleteResponseBase, ) from app.utils.text_nlp import analyze_text from app.schemas.text_inference import ( TextInferenceCreate, TextInferenceRead, ) from fastapi_pagination import Page, Params from sqlmodel.ext.asyncio.session import AsyncSession from fastapi import APIRouter, Depends, HTTPException, Query from app.api import deps from app import crud from app.models import TextInference from app.models import TextInferenceBase from app.models.user import User from sqlmodel import select router = APIRouter() @router.get( "/text-classification-inferences/", response_model=IGetResponseBase[Page[TextInferenceRead]], ) async def get_text_classification_inferences( params: Params = Depends(), db_session: AsyncSession = Depends(deps.get_db), current_user: User = Depends(deps.get_current_active_user), ): inferences = await crud.text_inference.get_multi_paginated( db_session, params=params ) return IGetResponseBase[Page[TextInferenceRead]](data=inferences) @router.get( "/text-classification-inferences/order_by_created_at/", response_model=IGetResponseBase[Page[TextInferenceRead]], ) async def text_classification_inferences_order_by_created_at( params: Params = Depends(), db_session: AsyncSession = Depends(deps.get_db), current_user: User = Depends(deps.get_current_active_user), ): query = select(TextInference).order_by(TextInference.created_at) inferences = await crud.text_inference.get_multi_paginated( db_session, query=query, params=params ) return IGetResponseBase[Page[TextInferenceRead]](data=inferences) @router.post( "/text-classification-predict/", response_model=IPostResponseBase[TextInferenceRead] ) async def predict( request: TextInferenceBase, db_session: AsyncSession = Depends(deps.get_db), current_user: User = Depends(deps.get_current_active_user), ): text = request.text result = await analyze_text(text) text = result[0] res = result[1] inference = TextInferenceCreate(text=text, result=res) my_inference = await crud.text_inference.create_inference( db_session, obj_in=inference, user_id=current_user.id ) return IPostResponseBase(data=TextInferenceRead.from_orm(my_inference))
[ "sqlmodel.select" ]
[((620, 631), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (629, 631), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((818, 827), 'fastapi.Depends', 'Depends', ([], {}), '()\n', (825, 827), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((860, 880), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (867, 880), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((907, 944), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (914, 944), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1345, 1354), 'fastapi.Depends', 'Depends', ([], {}), '()\n', (1352, 1354), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1387, 1407), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (1394, 1407), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1434, 1471), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (1441, 1471), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1921, 1941), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (1928, 1941), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1968, 2005), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (1975, 2005), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((2131, 2173), 'app.schemas.text_inference.TextInferenceCreate', 'TextInferenceCreate', ([], {'text': 'text', 'result': 'res'}), '(text=text, result=res)\n', (2150, 2173), False, 'from app.schemas.text_inference import TextInferenceCreate, TextInferenceRead\n'), ((972, 1038), 'app.crud.text_inference.get_multi_paginated', 'crud.text_inference.get_multi_paginated', (['db_session'], {'params': 'params'}), '(db_session, params=params)\n', (1011, 1038), False, 'from app import crud\n'), ((1568, 1647), 'app.crud.text_inference.get_multi_paginated', 'crud.text_inference.get_multi_paginated', (['db_session'], {'query': 'query', 'params': 'params'}), '(db_session, query=query, params=params)\n', (1607, 1647), False, 'from app import crud\n'), ((2054, 2072), 'app.utils.text_nlp.analyze_text', 'analyze_text', (['text'], {}), '(text)\n', (2066, 2072), False, 'from app.utils.text_nlp import analyze_text\n'), ((2200, 2296), 'app.crud.text_inference.create_inference', 'crud.text_inference.create_inference', (['db_session'], {'obj_in': 'inference', 'user_id': 'current_user.id'}), '(db_session, obj_in=inference, user_id=\n current_user.id)\n', (2236, 2296), False, 'from app import crud\n'), ((1488, 1509), 'sqlmodel.select', 'select', (['TextInference'], {}), '(TextInference)\n', (1494, 1509), False, 'from sqlmodel import select\n'), ((2341, 2381), 'app.schemas.text_inference.TextInferenceRead.from_orm', 'TextInferenceRead.from_orm', (['my_inference'], {}), '(my_inference)\n', (2367, 2381), False, 'from app.schemas.text_inference import TextInferenceCreate, TextInferenceRead\n')]
import os from fastapi import FastAPI from sqlmodel import create_engine, SQLModel from .configurations import env from .models import * # init models package class AppFactory(object): def __init__(self): self._app = None @staticmethod def _get_all_router(): from pigeon.blog.services.routers import __all_routers__ return __all_routers__ def _apply_router(self): if not isinstance(self._app, FastAPI): raise RuntimeError("self._app isn't initialized.") routers = AppFactory._get_all_router() for r in routers: self._app.include_router(r) def _ensure_sql(self): if not isinstance(self._app, FastAPI): return @self._app.on_event("startup") def sql_startup(): engine = get_engine() SQLModel.metadata.create_all(engine) @self._app.on_event("shutdown") def sql_shutdown(): pass def __call__(self, *args, **kwargs): self._app = FastAPI( title="Pigeon Blog", ) self._apply_router() self._ensure_sql() return self._app
[ "sqlmodel.SQLModel.metadata.create_all" ]
[((1026, 1054), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""Pigeon Blog"""'}), "(title='Pigeon Blog')\n", (1033, 1054), False, 'from fastapi import FastAPI\n'), ((841, 877), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (869, 877), False, 'from sqlmodel import create_engine, SQLModel\n')]
#!/usr/bin/env python3 # Copyright (c) 2014-2021 Megvii Inc. All rights reserved. from typing import Callable, Union import megengine as mge import megengine.functional as F import megengine.module as M from .activations import activation __all__ = ["conv2d", "norm2d", "pool2d", "gap2d", "linear", "SE", "DropPath"] def conv2d( w_in: int, w_out: int, k: int, *, stride: int = 1, dilation: int = 1, groups: int = 1, bias: bool = False, ) -> M.Conv2d: """Helper for building a conv2d layer. It will calculate padding automatically. Args: w_in: input width. w_out: output width. k: kernel size. stride: stride. Default: ``1`` dilation: dilation. Default: ``1`` groups: groups. Default: ``1`` bias: enable bias or not. Default: ``False`` Returns: A conv2d module. """ assert k % 2 == 1, "Only odd size kernels supported to avoid padding issues." s, p, d, g, b = stride, (k - 1) * dilation // 2, dilation, groups, bias return M.Conv2d(w_in, w_out, k, stride=s, padding=p, dilation=d, groups=g, bias=b) def norm2d(name: Union[str, Callable], w_in: int, **kwargs) -> M.Module: """Helper for building a norm2d layer. Args: norm_name: normalization name, supports ``None``, ``"BN"``, ``"GN"``, ``"IN"``, ``"LN"`` and ``"SyncBN"``. w_in: input width. Returns: A norm2d module. """ if name is None: return M.Identity() if callable(name): return name(w_in, **kwargs) if isinstance(name, str): norm_funcs = { "BN": M.BatchNorm2d, "GN": M.GroupNorm, "IN": M.InstanceNorm, "LN": M.LayerNorm, "SyncBN": M.SyncBatchNorm, } if name in norm_funcs.keys(): return norm_funcs[name](w_in, **kwargs) raise ValueError(f"Norm name '{name}' not supported") def pool2d(k: int, *, stride: int = 1, name: str = "max") -> M.Module: """Helper for building a pool2d layer. Args: k: kernel size. stride: stride. Default: ``1`` name: pooling name, supports ``"avg"`` and ``"max"``. Returns: A pool2d module. """ assert k % 2 == 1, "Only odd size kernels supported to avoid padding issues." pool_funcs = { "avg": M.AvgPool2d, "max": M.MaxPool2d, } if name not in pool_funcs.keys(): raise ValueError(f"Pool name '{name}' not supported") return pool_funcs[name](k, stride=stride, padding=(k - 1) // 2) def gap2d(shape=1) -> M.AdaptiveAvgPool2d: """Helper for building a gap2d layer. Args: shape: output shape. Default: ``1`` Returns: A gap2d module. """ return M.AdaptiveAvgPool2d(shape) def linear(w_in: int, w_out: int, *, bias: bool = False) -> M.Linear: """Helper for building a linear layer. Args: w_in: input width. w_out: output width. bias: enable bias or not. Default: ``False`` Returns: A linear module. """ return M.Linear(w_in, w_out, bias=bias) class SE(M.Module): """Squeeze-and-Excitation (SE) block: AvgPool, FC, Act, FC, Sigmoid. Args: w_in: input width. w_se: se width. act_name: activation name. approx_sigmoid: approximated sigmoid function. Attributes: avg_pool: gad2d layer. f_ex: sequantial which conbines conv2d -> act -> conv2d -> sigmoid. """ def __init__(self, w_in: int, w_se: int, act_name: str, approx_sigmoid: bool = False): super().__init__() self.avg_pool = gap2d() self.f_ex = M.Sequential( conv2d(w_in, w_se, 1, bias=True), activation(act_name), conv2d(w_se, w_in, 1, bias=True), activation("hsigmoid") if approx_sigmoid else M.Sigmoid(), ) def forward(self, x: mge.Tensor) -> mge.Tensor: return x * self.f_ex(self.avg_pool(x)) class DropPath(M.Dropout): """DropPath block. Args: drop_prob: the probability to drop (set to zero) each path. """ def forward(self, x: mge.Tensor): if not self.training or self.drop_prob == 0.0: return x shape = (x.shape[0],) + (1,) * (x.ndim - 1) mask = F.ones(shape) mask = F.dropout(mask, self.drop_prob, training=self.training) return x * mask
[ "megengine.module.AdaptiveAvgPool2d", "megengine.module.Identity", "megengine.module.Sigmoid", "megengine.module.Linear", "megengine.functional.dropout", "megengine.functional.ones", "megengine.module.Conv2d" ]
[((1058, 1133), 'megengine.module.Conv2d', 'M.Conv2d', (['w_in', 'w_out', 'k'], {'stride': 's', 'padding': 'p', 'dilation': 'd', 'groups': 'g', 'bias': 'b'}), '(w_in, w_out, k, stride=s, padding=p, dilation=d, groups=g, bias=b)\n', (1066, 1133), True, 'import megengine.module as M\n'), ((2780, 2806), 'megengine.module.AdaptiveAvgPool2d', 'M.AdaptiveAvgPool2d', (['shape'], {}), '(shape)\n', (2799, 2806), True, 'import megengine.module as M\n'), ((3100, 3132), 'megengine.module.Linear', 'M.Linear', (['w_in', 'w_out'], {'bias': 'bias'}), '(w_in, w_out, bias=bias)\n', (3108, 3132), True, 'import megengine.module as M\n'), ((1500, 1512), 'megengine.module.Identity', 'M.Identity', ([], {}), '()\n', (1510, 1512), True, 'import megengine.module as M\n'), ((4325, 4338), 'megengine.functional.ones', 'F.ones', (['shape'], {}), '(shape)\n', (4331, 4338), True, 'import megengine.functional as F\n'), ((4354, 4409), 'megengine.functional.dropout', 'F.dropout', (['mask', 'self.drop_prob'], {'training': 'self.training'}), '(mask, self.drop_prob, training=self.training)\n', (4363, 4409), True, 'import megengine.functional as F\n'), ((3881, 3892), 'megengine.module.Sigmoid', 'M.Sigmoid', ([], {}), '()\n', (3890, 3892), True, 'import megengine.module as M\n')]
import uuid from datetime import datetime from typing import Optional from sqlalchemy import Column from sqlalchemy.dialects.postgresql import JSON from sqlmodel import Field, Relationship from api.db.models.base import BaseModel, BaseTable class OutOfBandBase(BaseModel): msg_type: str = Field(nullable=False) msg: dict = Field(default={}, sa_column=Column(JSON)) sender_id: uuid.UUID = None recipient_id: uuid.UUID = None sandbox_id: uuid.UUID = None action: Optional[str] = Field(nullable=True) class OutOfBand(OutOfBandBase, BaseTable, table=True): __tablename__ = "out_of_band" # optional else, required on save sender_id: uuid.UUID = Field(foreign_key="line_of_business.id") recipient_id: uuid.UUID = Field(foreign_key="line_of_business.id") sandbox_id: uuid.UUID = Field(foreign_key="sandbox.id") # relationships sender: Optional["Lob"] = Relationship( # noqa: F821 sa_relationship_kwargs={ "primaryjoin": "OutOfBand.sender_id==Lob.id", "lazy": "joined", } ) recipient: Optional["Lob"] = Relationship( # noqa: F821 sa_relationship_kwargs={ "primaryjoin": "OutOfBand.recipient_id==Lob.id", "lazy": "joined", } ) class Config: arbitrary_types_allowed = True class OutOfBandCreate(OutOfBandBase): pass class OutOfBandRead(OutOfBandBase): id: uuid.UUID created_at: datetime updated_at: datetime class OutOfBandUpdate(BaseModel): id: uuid.UUID name: Optional[str] = None action: Optional[str] = None
[ "sqlmodel.Relationship", "sqlmodel.Field" ]
[((297, 318), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (302, 318), False, 'from sqlmodel import Field, Relationship\n'), ((505, 525), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)'}), '(nullable=True)\n', (510, 525), False, 'from sqlmodel import Field, Relationship\n'), ((683, 723), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""line_of_business.id"""'}), "(foreign_key='line_of_business.id')\n", (688, 723), False, 'from sqlmodel import Field, Relationship\n'), ((754, 794), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""line_of_business.id"""'}), "(foreign_key='line_of_business.id')\n", (759, 794), False, 'from sqlmodel import Field, Relationship\n'), ((823, 854), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""sandbox.id"""'}), "(foreign_key='sandbox.id')\n", (828, 854), False, 'from sqlmodel import Field, Relationship\n'), ((906, 1011), 'sqlmodel.Relationship', 'Relationship', ([], {'sa_relationship_kwargs': "{'primaryjoin': 'OutOfBand.sender_id==Lob.id', 'lazy': 'joined'}"}), "(sa_relationship_kwargs={'primaryjoin':\n 'OutOfBand.sender_id==Lob.id', 'lazy': 'joined'})\n", (918, 1011), False, 'from sqlmodel import Field, Relationship\n'), ((1104, 1212), 'sqlmodel.Relationship', 'Relationship', ([], {'sa_relationship_kwargs': "{'primaryjoin': 'OutOfBand.recipient_id==Lob.id', 'lazy': 'joined'}"}), "(sa_relationship_kwargs={'primaryjoin':\n 'OutOfBand.recipient_id==Lob.id', 'lazy': 'joined'})\n", (1116, 1212), False, 'from sqlmodel import Field, Relationship\n'), ((363, 375), 'sqlalchemy.Column', 'Column', (['JSON'], {}), '(JSON)\n', (369, 375), False, 'from sqlalchemy import Column\n')]
from typing import Optional from sqlmodel import Field, Session, SQLModel, create_engine, select class Hero(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str secret_name: str age: Optional[int] = None sqlite_file_name = "database.db" sqlite_url = f"sqlite:///{sqlite_file_name}" engine = create_engine(sqlite_url, echo=True) def create_db_and_tables(): SQLModel.metadata.create_all(engine) def create_heroes(): hero_1 = Hero(name="Deadpond", secret_name="<NAME>") hero_2 = Hero(name="Spider-Boy", secret_name="<NAME>") hero_3 = Hero(name="Rusty-Man", secret_name="<NAME>", age=48) hero_4 = Hero(name="Tarantula", secret_name="<NAME>", age=32) hero_5 = Hero(name="<NAME>", secret_name="<NAME>", age=35) hero_6 = Hero(name="<NAME>", secret_name="<NAME>", age=36) hero_7 = Hero(name="Captain North America", secret_name="<NAME>", age=93) with Session(engine) as session: session.add(hero_1) session.add(hero_2) session.add(hero_3) session.add(hero_4) session.add(hero_5) session.add(hero_6) session.add(hero_7) session.commit() def select_heroes(): with Session(engine) as session: statement = select(Hero).where(Hero.id == 1) results = session.exec(statement) hero = results.first() print("Hero:", hero) def main(): create_db_and_tables() create_heroes() select_heroes() if __name__ == "__main__": main()
[ "sqlmodel.SQLModel.metadata.create_all", "sqlmodel.Session", "sqlmodel.Field", "sqlmodel.select", "sqlmodel.create_engine" ]
[((351, 387), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (364, 387), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((158, 195), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (163, 195), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((422, 458), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (450, 458), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((944, 959), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (951, 959), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1226, 1241), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1233, 1241), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1274, 1286), 'sqlmodel.select', 'select', (['Hero'], {}), '(Hero)\n', (1280, 1286), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n')]
"""Add schools Revision ID: 423e059e8b64 Revises: 58d2280520b8 Create Date: 2022-02-12 07:44:42.189067+00:00 """ import sqlalchemy as sa import sqlmodel from alembic import op # revision identifiers, used by Alembic. revision = "423e059e8b64" down_revision = "58d2280520b8" branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "schools", sa.Column("id", sa.Integer(), nullable=False), sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.PrimaryKeyConstraint("id"), ) op.add_column("applications", sa.Column("school_id", sa.Integer(), nullable=False)) op.create_foreign_key(None, "applications", "schools", ["school_id"], ["id"]) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, "applications", type_="foreignkey") op.drop_column("applications", "school_id") op.drop_table("schools") # ### end Alembic commands ###
[ "sqlmodel.sql.sqltypes.AutoString" ]
[((710, 787), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""applications"""', '"""schools"""', "['school_id']", "['id']"], {}), "(None, 'applications', 'schools', ['school_id'], ['id'])\n", (731, 787), False, 'from alembic import op\n'), ((912, 972), 'alembic.op.drop_constraint', 'op.drop_constraint', (['None', '"""applications"""'], {'type_': '"""foreignkey"""'}), "(None, 'applications', type_='foreignkey')\n", (930, 972), False, 'from alembic import op\n'), ((977, 1020), 'alembic.op.drop_column', 'op.drop_column', (['"""applications"""', '"""school_id"""'], {}), "('applications', 'school_id')\n", (991, 1020), False, 'from alembic import op\n'), ((1025, 1049), 'alembic.op.drop_table', 'op.drop_table', (['"""schools"""'], {}), "('schools')\n", (1038, 1049), False, 'from alembic import op\n'), ((581, 610), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (604, 610), True, 'import sqlalchemy as sa\n'), ((463, 475), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (473, 475), True, 'import sqlalchemy as sa\n'), ((520, 554), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (552, 554), False, 'import sqlmodel\n'), ((675, 687), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (685, 687), True, 'import sqlalchemy as sa\n')]
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License") # # Copyright (c) 2014-2020 Megvii Inc. All rights reserved. # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. from collections import OrderedDict from enum import Enum from functools import cmp_to_key from typing import Set # pylint: disable=unused-import from typing import Callable, Dict, Sequence import numpy as np from megengine import Tensor from megengine.functional import sqrt from ..converter_ir.ir_graph import IRGraph from .ir_op import ( AddOpr, Conv2dOpr, ConvRelu2dOpr, Deconv2dOpr, DropoutOpr, ExpOpr, FlattenOpr, FuseMulAdd3Opr, GetSubTensorOpr, HardSigmoidOpr, HardSwishOpr, IdentityOpr, LeakyReluOpr, MulOpr, OpBase, PadOpr, ReduceOpr, ReluOpr, ReshapeOpr, ResizeOpr, SoftmaxOpr, SqueezeOpr, SubOpr, TanHOpr, TransposeOpr, TrueDivOpr, _PoolOpr, ) from .ir_tensor import AxisOrder, IRTensor class TransformerRule(Enum): # general rules NOPE = 1 # for TFLite REDUCE_AXIS_AS_INPUT = 100 REMOVE_RESHAPE_INPUT = 101 # FUSE_FOR_RELU6 pass should happen before FUSE_ACTIVATION FUSE_FOR_RELU6 = 102 ## EXPAND_CONVRELU = 102.1 CONV_ADD_ZERO_BIAS = 103 FUSE_FOR_CONV_BIAS = 103.1 FUSE_CONV_BN = 104 DECONV_ADD_ZERO_BIAS = 105 # DEPTHWISE_CONV_RESHAPE_WEIGHT requirs RESHAPE_BIAS_TO_1DIM DEPTHWISE_CONV_RESHAPE_WEIGHT = 106 FUSE_SOFTMAX = 107 # RESHAPE_BIAS_TO_1DIM should happen before DECONV_SHAPE_AS_INPUT RESHAPE_BIAS_TO_1DIM = 108 DECONV_SHAPE_AS_INPUT = 109 FUSE_ASTYPE = 110 ## PADDING_FOR_CONV_AND_POOLING = 111 TRANSPOSE_PATTERN_AS_INPUT = 112 # FUSE_FOR_LEAKY_RELU should happen before EXPAND_MUL_ADD3 FUSE_FOR_LEAKY_RELU = 113 EXPAND_MUL_ADD3 = 114 EXPAND_ADD_SIGMOID = 115 ## FUSE_FOR_DECONV_BIAS = 117 FUSE_FOR_FULLY_CONNECTED = 118 ## # for TFLite Converter SLICE_PARAMS_AS_INPUTS_AND_MAKE_SQUEEZE = 119 RESIZE_PARAMS_AS_INPUT = 120 REPLACE_FLATTEN_TO_RESHAPE = 120.1 # remove reshape REMOVE_RESHAPE_REALTED_OP = 121 REMOVE_DROPOUT = 122 FUSE_ACTIVATION = 123 REMOVE_IDENTITY = 124 REMOVE_RELU = 125 REMOVE_UNRELATED_IROP = 130 ADD_FAKE_HSIGMOID_OUT = 131 RENAME_CAFFE_LAYER_TENSOR = 132 def cmp_rules(a, b): if a.value < b.value: return -1 if a.value > b.value: return 1 return 0 class IRTransform: def __init__(self, transformer_options): if not isinstance(transformer_options, Sequence): transformer_options = [ transformer_options, ] # bias of depthwise_conv must be 1 dim if TransformerRule.DEPTHWISE_CONV_RESHAPE_WEIGHT in transformer_options: if TransformerRule.RESHAPE_BIAS_TO_1DIM not in transformer_options: transformer_options.append(TransformerRule.RESHAPE_BIAS_TO_1DIM) self.trans_options = sorted(transformer_options, key=cmp_to_key(cmp_rules)) def transform(self, ir_graph): for option in self.trans_options: TRANSFORMMAP[option](ir_graph) return ir_graph TRANSFORMMAP: Dict[Enum, Callable] = {} def _register_tranformation_rule(transformer_option): def callback(impl): TRANSFORMMAP[transformer_option] = impl return callback def cal_pad_mode(tm_opr): out_shape = tm_opr.out_tensors[0].shape inp_shape = tm_opr.inp_tensors[0].shape if out_shape[2:] == inp_shape[2:]: return "SAME" else: return "VALID" @_register_tranformation_rule(TransformerRule.REMOVE_RESHAPE_INPUT) def _remove_reshape_input(net): for op in net.all_oprs: if not isinstance(op, ReshapeOpr): continue if len(op.inp_tensors) == 2: del op.inp_tensors[1] @_register_tranformation_rule(TransformerRule.TRANSPOSE_PATTERN_AS_INPUT) def _transpose_pattern_as_input(net): for op in net.all_oprs: if not isinstance(op, TransposeOpr): continue perm_tensor = IRTensor( name=op.inp_tensors[0].name + "_perm", shape=np.array(op.pattern).shape, dtype=np.int32, np_data=np.array(op.pattern, dtype=np.int32), owner_opr=op, q_type=np.int32, axis=None, ) op.add_inp_tensors(perm_tensor) @_register_tranformation_rule(TransformerRule.REDUCE_AXIS_AS_INPUT) def _reduce_axis_as_input(net): for op in net.all_oprs: if not isinstance(op, ReduceOpr): continue axis_tensor = IRTensor( name=op.inp_tensors[0].name + "_axis", shape=[1], dtype=np.int32, np_data=np.array(op.axis, dtype=np.int32), owner_opr=op, q_type=np.int32, axis=None, ) op.add_inp_tensors(axis_tensor) @_register_tranformation_rule(TransformerRule.PADDING_FOR_CONV_AND_POOLING) def _make_padding(net: IRGraph): def have_padding(opr): if isinstance(opr, Conv2dOpr): if cal_pad_mode(opr) == "SAME": return False if hasattr(opr, "padding") and (opr.padding[0] > 0 or opr.padding[1] > 0): return True return False insert_intended = OrderedDict() # type: OrderedDict for op in net.all_oprs: if not isinstance(op, (Conv2dOpr, _PoolOpr)): continue if have_padding(op): assert op.inp_tensors[0].ndim == 4, "ERROR: unsupported padding mode" np_data = np.array( [ 0, 0, op.padding[0], op.padding[0], op.padding[1], op.padding[1], 0, 0, ], dtype=np.int32, ) new_tensor_id = max(net._tensor_ids) + 1 pad_in_tensor = IRTensor( name=op.inp_tensors[0].name + "_paddings", shape=[4, 2], dtype=np.int32, owner_opr=None, np_data=np_data, q_type=np.int32, axis=None, ) net.add_tensor(new_tensor_id, pad_in_tensor) shape = list(op.inp_tensors[0].shape) new_tensor_id = max(net._tensor_ids) + 1 pad_out_tensor = IRTensor( name=op.inp_tensors[0].name + "_pad_out", shape=[ shape[0], shape[1], shape[2] + op.padding[0] * 2, shape[3] + op.padding[1] * 2, ], dtype=op.inp_tensors[0].dtype, ) if ( hasattr(op.inp_tensors[0], "scale") and op.inp_tensors[0].scale is not None ): pad_out_tensor.scale = op.inp_tensors[0].scale pad_out_tensor.q_dtype = op.inp_tensors[0].q_dtype if hasattr(op.inp_tensors[0], "zero_point"): pad_out_tensor.zero_point = op.inp_tensors[0].zero_point net.add_tensor(new_tensor_id, pad_out_tensor) pad_opr = PadOpr() pad_opr.inp_tensors = [op.inp_tensors[0], pad_in_tensor] index = op.inp_tensors[0].user_opr.index(op) op.inp_tensors[0].user_opr[index] = pad_opr pad_opr.out_tensors = [pad_out_tensor] pad_out_tensor.owner_opr = pad_opr op.inp_tensors = [pad_out_tensor] + op.inp_tensors[1:] pad_out_tensor.user_opr.append(op) index = net._opr_ids.index(id(op)) insert_intended[index] = (id(pad_opr), pad_opr) for index, generated_pair in list(insert_intended.items())[::-1]: net._opr_ids.insert(index, generated_pair[0]) net.all_oprs.insert(index, generated_pair[1]) @_register_tranformation_rule(TransformerRule.DECONV_SHAPE_AS_INPUT) def _deconv_shape_as_input(net: IRGraph): for op in net.all_oprs: if not isinstance(op, Deconv2dOpr): continue result_shape = op.out_tensors[0].shape np_data = np.array( [result_shape[0], result_shape[2], result_shape[3], result_shape[1],], dtype=np.int32, ) new_tensor_id = max(net._tensor_ids) + 1 shape_symvar = IRTensor( name=op.inp_tensors[0].name + "_deconv_out_shape", shape=[4], dtype=np.int32, owner_opr=op, np_data=np_data, q_type=np.int32, axis=None, ) shape_tensor = net.get_tensor(new_tensor_id, shape_symvar) if len(op.inp_tensors) == 2: op.inp_tensors = [ shape_tensor, op.inp_tensors[1], op.inp_tensors[0], ] else: op.inp_tensors = [ shape_tensor, op.inp_tensors[1], op.inp_tensors[0], op.inp_tensors[2], ] @_register_tranformation_rule(TransformerRule.RESIZE_PARAMS_AS_INPUT) def _resize_params_as_input(net): for op in net.all_oprs: if not isinstance(op, ResizeOpr): continue if len(op.inp_tensors) == 2: continue out_size_tensor = IRTensor( name=op.inp_tensors[0].name + "_out_size", shape=(2,), dtype=np.int32, np_data=np.array(op.out_size, dtype=np.int32), q_type=np.int32, axis=None, ) op.add_inp_tensors(out_size_tensor) @_register_tranformation_rule(TransformerRule.CONV_ADD_ZERO_BIAS) def _add_bias_for_conv(net: IRGraph): for op in net.all_oprs: if not isinstance(op, Conv2dOpr): continue if len(op.inp_tensors) == 3: continue weight_shape = op.inp_tensors[1].shape bias_shape = ( weight_shape[0] if len(weight_shape) == 4 else weight_shape[0] * weight_shape[1] ) bias_shape = (1, bias_shape, 1, 1) bias = np.zeros(bias_shape, dtype=np.float32) bias_tensor = IRTensor( name=op.inp_tensors[0].name + "_bias", shape=bias_shape, dtype=np.float32, np_data=bias, axis=AxisOrder.NCHW, ) if op.inp_tensors[0].scale and op.inp_tensors[1].scale: bias_tensor.set_qparams( op.inp_tensors[0].scale * op.inp_tensors[1].scale, 0 ) bias_tensor.q_dtype = "int32" op.inp_tensors.append(bias_tensor) @_register_tranformation_rule(TransformerRule.DECONV_ADD_ZERO_BIAS) def _add_bias_for_deconv(net: IRGraph): for op in net.all_oprs: if not isinstance(op, Deconv2dOpr): continue if len(op.inp_tensors) == 3: continue weight_shape = op.inp_tensors[1].shape bias_shape = ( weight_shape[1] if len(weight_shape) == 4 else weight_shape[0] * weight_shape[2] ) bias_shape = (1, bias_shape, 1, 1) bias = np.zeros(bias_shape, dtype=np.float32) bias_tensor = IRTensor( name=op.inp_tensors[0].name + "_bias", shape=bias_shape, dtype=np.float32, np_data=bias, axis=AxisOrder.NCHW, ) if op.inp_tensors[0].scale and op.inp_tensors[1].scale: bias_tensor.set_qparams( op.inp_tensors[0].scale * op.inp_tensors[1].scale, 0 ) bias_tensor.q_dtype = "int32" op.inp_tensors.append(bias_tensor) @_register_tranformation_rule(TransformerRule.RESHAPE_BIAS_TO_1DIM) def _reshape_bias_to_1dim(net: IRGraph): for op in net.all_oprs: if not isinstance(op, (Deconv2dOpr, Conv2dOpr)): continue if len(op.inp_tensors) == 2: continue bias = op.inp_tensors[2] if bias.ndim == 4: bias.shape = (bias.shape[1],) bias.np_data = bias.np_data.reshape(-1) @_register_tranformation_rule(TransformerRule.DEPTHWISE_CONV_RESHAPE_WEIGHT) def _depthwise_conv_reshape_weight(net: IRGraph): # general group conv is not supported for TFLite for op in net.all_oprs: if not isinstance(op, Conv2dOpr): continue if op.groups == 1: continue weight = op.inp_tensors[1] # G, oc/G, ic/G, kh, kw ic, cm = weight.shape[1] * op.groups, weight.shape[2] h, w = weight.shape[3:5] weight.shape = (ic, cm, h, w) # oc, ic/G, kh, kw weight.np_data = weight.np_data.reshape(ic, cm, h, w) @_register_tranformation_rule(TransformerRule.FUSE_ACTIVATION) def _fuse_activation(net): delete_intended = [] for op_id, op in zip(net._opr_ids, net.all_oprs): if isinstance(op, (ReluOpr, TanHOpr)): prev_ops = net.find_inp_oprs(op) if len(prev_ops) == 0: continue prev_op = prev_ops[0] if not isinstance(prev_op, OpBase): continue if prev_op.activation != "IDENTITY" or prev_op.name == "Deconv2d": continue activation = op.name.upper() prev_op.activation = activation prev_op.out_tensors = op.out_tensors for t in prev_op.out_tensors: t.owner_opr = prev_op delete_intended.append(net._opr_ids.index(op_id)) for delete_idx in delete_intended[::-1]: net.delete_ops(delete_idx) @_register_tranformation_rule(TransformerRule.SLICE_PARAMS_AS_INPUTS_AND_MAKE_SQUEEZE) def _make_slice_as_inputs(net: IRGraph): for op in net.all_oprs: if not isinstance(op, GetSubTensorOpr): continue ndim = op.inp_tensors[0].ndim def make_input(axis, param, init_value): # make inputs: begin, end and step. ret = [init_value] * ndim # pylint:disable=cell-var-from-loop for k, v in zip(axis, param): ret[k] = v ret = IRTensor( name=op.name + "_fake_input", # pylint:disable=cell-var-from-loop shape=[len(ret)], dtype=np.int32, np_data=np.array(ret, dtype=np.int32), owner_opr=op, # pylint:disable=cell-var-from-loop q_type=np.int32, ) return ret begins_tensor = make_input(op.axis, op.begin_params, 0) ends_tensor = make_input(op.axis, op.end_params, np.iinfo(np.int32).max) steps_tensor = make_input(op.axis, op.step_params, 1) op.inp_tensors = [op.inp_tensors[0], begins_tensor, ends_tensor, steps_tensor] # TFLite slice do not support squeeze axis, so insert a squeeze opr here. # infer actual output shape of tflite slice desired_out_shape = op.out_tensors[0].shape actual_out_shape = [1] * ndim idx = 0 for i in range(ndim): if i in op.squeeze_axis: continue actual_out_shape[i] = desired_out_shape[idx] idx += 1 slice_out_tensor = IRTensor( name=op.name + "fake_output", shape=actual_out_shape, dtype=op.out_tensors[0].dtype, q_type=op.out_tensors[0].q_dtype, owner_opr=op, ) old_out = op.out_tensors op.out_tensors = [slice_out_tensor] squeeze = SqueezeOpr(op.squeeze_axis) squeeze.inp_tensors = [slice_out_tensor] squeeze.out_tensors = old_out idx = net._opr_ids.index(id(op)) + 1 net.add_op(squeeze, idx) # caffe transormer rules class PatternNode: def __init__(self, type, is_output=False, const_value=None): self.op = None self.type = type self.inp_oprs = [] self.inp_const = [] self.inp_tensors = [] self.is_output = is_output self.const_value = const_value def check_const_value(self, op): inp_tensors = [v.np_data for v in op.inp_tensors] for const in self.const_value: idx = const[0] if idx == -1: find = False for index, v in enumerate(inp_tensors): if np.array_equal(const[1], v): find = True del inp_tensors[index] break if not find: return False elif not np.array_equal(const[1], inp_tensors[idx]): return False return True get_type = lambda op: type(op).__name__ def match(node, opr): node_queue = [node] opr_queue = [opr] matched_opr = set() matched_node = set() while len(node_queue) != 0: cur_node = node_queue.pop(0) cur_opr = opr_queue.pop(0) if cur_node.type != get_type(cur_opr) and cur_node.type != "*" or cur_opr.skip: return False if cur_node.op == None: cur_node.op = cur_opr if cur_node.const_value != None: if not cur_node.check_const_value(cur_opr): return False elif cur_node.op != cur_opr: return False matched_opr.add(cur_opr) matched_node.add(cur_node) for i, var in enumerate(cur_opr.inp_tensors): if var.np_data is not None: cur_node.inp_const.append([i, var.np_data]) else: cur_node.inp_tensors.append([i, var]) if len(cur_node.inp_oprs) == 0: continue if len(cur_node.inp_oprs) != len(cur_opr.inp_oprs): return False for i, j in zip(cur_node.inp_oprs, cur_opr.inp_oprs): node_queue.append(i) opr_queue.append(j) for n in matched_node: if n.is_output: continue for op in n.op.out_oprs: if op not in matched_opr: return False return True def get_softmax_axis(ndim: int) -> int: if ndim in (0, 1, 3): return 0 return 1 @_register_tranformation_rule(TransformerRule.FUSE_SOFTMAX) def _fuse_softmax(net: IRGraph): matches = OrderedDict() # type: OrderedDict for op in net.all_oprs: if not isinstance(op, TrueDivOpr): continue try: prev_op = net.find_inp_oprs(op)[1] cur_index = net._opr_ids.index(id(op)) if ( not isinstance(prev_op, ReduceOpr) or prev_op.mode != "SUM" or prev_op.axis != get_softmax_axis(prev_op.inp_tensors[0].ndim) or net._opr_ids.index(id(prev_op)) != cur_index - 1 ): continue prev_op = net.find_inp_oprs(op)[0] if ( not isinstance(prev_op, ExpOpr) or net._opr_ids.index(id(prev_op)) != cur_index - 2 ): continue prev_op = net.find_inp_oprs(prev_op)[0] if ( not isinstance(prev_op, SubOpr) or net._opr_ids.index(id(prev_op)) != cur_index - 3 ): continue prev_op = net.find_inp_oprs(prev_op)[1] if ( not isinstance(prev_op, ReduceOpr) or prev_op.mode != "MAX" or prev_op.axis != get_softmax_axis(prev_op.inp_tensors[0].ndim) or net._opr_ids.index(id(prev_op)) != cur_index - 4 ): continue except IndexError: # doesn't match continue softmax_opr = SoftmaxOpr(axis=get_softmax_axis(prev_op.inp_tensors[0].ndim)) softmax_opr.beta = 1 softmax_opr.inp_tensors = prev_op.inp_tensors[:1] for i in softmax_opr.inp_tensors: i.user_opr.append(softmax_opr) softmax_opr.out_tensors = op.out_tensors softmax_out_oprs = net.find_out_oprs(op) matches[id(prev_op)] = (id(prev_op), softmax_opr, softmax_out_oprs) for original_id, generated_pair in list(matches.items())[::-1]: index = net._opr_ids.index(original_id) for out_op in generated_pair[2]: generated_pair[1].out_tensors[0].user_opr.append(out_op) del net._opr_ids[index : index + 5] del net.all_oprs[index : index + 5] net._opr_ids.insert(index, generated_pair[0]) net.all_oprs.insert(index, generated_pair[1]) @_register_tranformation_rule(TransformerRule.FUSE_FOR_LEAKY_RELU) def _fuse_leaky_relu(net: IRGraph): """ Elemwise(ADD) + Elemwise(MUL) + Elemwise(MAX) + Elemwise(MIN) -> LeakyRelu """ for opr in net.all_oprs: if ( opr.name == "Add" and len(net.find_inp_oprs(opr)) == 2 and net.find_inp_oprs(opr)[0].name == "Max" and net.find_inp_oprs(opr)[1].name == "Mul" ): max_op = net.find_inp_oprs(opr)[0] mul_op = net.find_inp_oprs(opr)[1] if not mul_op.inp_tensors[1].shape == (1,): continue if not max_op.inp_tensors[1].shape == (1,): continue if ( len(net.find_inp_oprs(mul_op)) != 1 or net.find_inp_oprs(mul_op)[0].name != "Min" or net.find_inp_oprs(mul_op)[0].inp_tensors[1].shape != (1,) ): continue min_op = net.find_inp_oprs(mul_op)[0] if not min_op.inp_tensors[1].shape == (1,): continue if max_op.inp_tensors[0] != min_op.inp_tensors[0]: continue leaky_relu = LeakyReluOpr( negative_slope=float(mul_op.inp_tensors[1].np_data) ) leaky_relu.inp_tensors = [max_op.inp_tensors[0]] max_op.inp_tensors[0].user_opr.remove(max_op) max_op.inp_tensors[0].user_opr.remove(min_op) max_op.inp_tensors[0].user_opr.append(leaky_relu) leaky_relu.out_tensors = opr.out_tensors opr.out_tensors[0].owner_opr = leaky_relu index = net.all_oprs.index(max_op) del net.all_oprs[index : index + 4] del net._opr_ids[index : index + 4] net.add_op(leaky_relu, index) @_register_tranformation_rule(TransformerRule.FUSE_FOR_CONV_BIAS) def _fuse_for_conv_bias(net: IRGraph): """ ConvolutionForward + Elemwise(ADD) -> ConvForwardBias """ for opr in net.all_oprs: if ( opr.name == "Conv2d" and len(net.find_out_oprs(opr)) == 1 and net.find_out_oprs(opr)[0].name == "Add" ): bias_op = net.find_out_oprs(opr)[0] if not ( ( bias_op.inp_tensors[1].np_data is not None and len(bias_op.inp_tensors[1].np_data.reshape(-1)) == opr.inp_tensors[1].shape[0] ) or ( ( bias_op.inp_tensors[0].np_data is not None and len(bias_op.inp_tensors[0].np_data.reshape(-1)) == opr.inp_tensors[1].shape[0] ) ) ): continue bias_idx = 0 if bias_op.inp_tensors[0].np_data is not None else 1 if len(opr.inp_tensors) == 2: opr.inp_tensors.append(bias_op.inp_tensors[bias_idx]) else: bias_shape = opr.inp_tensors[2].np_data.shape add_tensor = bias_op.inp_tensors[bias_idx].np_data if add_tensor.shape != bias_shape: add_tensor = add_tensor.reshape(bias_shape) opr.inp_tensors[2].np_data += add_tensor if bias_op in opr.out_tensors[0].user_opr: opr.out_tensors[0].user_opr.remove(bias_op) bias_out_op = net.find_out_oprs(bias_op) if len(bias_out_op) > 0: for op in bias_out_op: op.inp_tensors[0] = opr.out_tensors[0] opr.out_tensors[0].user_opr.append(op) else: # last op of the graph assert bias_op.out_tensors[0] in net.graph_outputs index = net.graph_outputs.index(bias_op.out_tensors[0]) net.graph_outputs[index] = opr.out_tensors[0] opr.activation = bias_op.activation index = net.all_oprs.index(bias_op) del net.all_oprs[index] del net._opr_ids[index] @_register_tranformation_rule(TransformerRule.FUSE_FOR_DECONV_BIAS) def _fuse_for_deconv_bias(net: IRGraph): for opr in net.all_oprs: if ( opr.name == "Deconv2d" and len(net.find_out_oprs(opr)) == 1 and net.find_out_oprs(opr)[0].name == "Add" ): bias_op = net.find_out_oprs(opr)[0] if not ( ( bias_op.inp_tensors[1].np_data is not None and len(bias_op.inp_tensors[1].np_data.reshape(-1)) == opr.inp_tensors[1].shape[1] ) or ( ( bias_op.inp_tensors[0].np_data is not None and len(bias_op.inp_tensors[0].np_data.reshape(-1)) == opr.inp_tensors[1].shape[1] ) ) ): continue bias_idx = 0 if bias_op.inp_tensors[0].np_data is not None else 1 if len(opr.inp_tensors) == 3: # shape, weight, input, bias opr.inp_tensors.append(bias_op.inp_tensors[bias_idx]) else: bias_shape = opr.inp_tensors[3].np_data.shape add_tensor = bias_op.inp_tensors[bias_idx].np_data if add_tensor.shape != bias_shape: add_tensor = add_tensor.reshape(bias_shape) opr.inp_tensors[3].np_data += add_tensor if bias_op in opr.out_tensors[0].user_opr: opr.out_tensors[0].user_opr.remove(bias_op) bias_out_op = net.find_out_oprs(bias_op) if len(bias_out_op) > 0: for op in bias_out_op: op.inp_tensors[0] = opr.out_tensors[0] opr.out_tensors[0].user_opr.append(op) else: # last op of the graph assert bias_op.out_tensors[0] in net.graph_outputs index = net.graph_outputs.index(bias_op.out_tensors[0]) net.graph_outputs[index] = opr.out_tensors[0] opr.activation = bias_op.activation index = net.all_oprs.index(bias_op) del net.all_oprs[index] del net._opr_ids[index] @_register_tranformation_rule(TransformerRule.EXPAND_MUL_ADD3) def _expand_mul_add3(net: IRGraph): for op in net.all_oprs: if not isinstance(op, FuseMulAdd3Opr): continue last_op = net.find_inp_oprs(op) assert len(last_op) == 1 mul_out_tensor = IRTensor( name=op.inp_tensors[0].name + "_mul_out", shape=op.inp_tensors[0].shape, dtype=op.inp_tensors[0].dtype, ) new_tensor_id = max(net._tensor_ids) + 1 net.add_tensor(new_tensor_id, mul_out_tensor) mul_op = MulOpr() mul_out_tensor.owner_opr = mul_op mul_op.inp_tensors = op.inp_tensors[:2] for o in mul_op.inp_tensors: index = o.user_opr.index(op) o.user_opr[index] = mul_op mul_op.out_tensors = [mul_out_tensor] add_op = AddOpr() add_op.inp_tensors = [mul_out_tensor, op.inp_tensors[2]] mul_out_tensor.user_opr.append(add_op) add_op.out_tensors = op.out_tensors index = net._opr_ids.index(id(op)) net.delete_ops(index) net.add_op(mul_op, index) net.add_op(add_op, index + 1) @_register_tranformation_rule(TransformerRule.REPLACE_FLATTEN_TO_RESHAPE) def _replace_flatten_to_reshape(net: IRGraph): for opr in net.all_oprs: if isinstance(opr, FlattenOpr): out_shape = tuple(list(opr.inp_tensors[0].shape[: opr.start_axis]) + [-1]) reshape_op = ReshapeOpr(out_shape=out_shape) reshape_op.inp_tensors = opr.inp_tensors for t in reshape_op.inp_tensors: idx = t.user_opr.index(opr) t.user_opr[idx] = reshape_op reshape_op.out_tensors = opr.out_tensors for t in reshape_op.out_tensors: t.owner_opr = reshape_op net.replace_op(opr, reshape_op) @_register_tranformation_rule(TransformerRule.REMOVE_RESHAPE_REALTED_OP) def _remove_reshape_tensors(net: IRGraph): for opr in net.all_oprs: if isinstance(opr, ReshapeOpr) and len(opr.inp_tensors) > 1: opr.inp_tensors = opr.inp_tensors[:1] @_register_tranformation_rule(TransformerRule.REMOVE_DROPOUT) def _remove_dropout(net: IRGraph): for opr in net.all_oprs: for idx, inp in enumerate(opr.inp_tensors): owner_opr = inp.owner_opr if isinstance(owner_opr, DropoutOpr) and owner_opr.drop_prob == 0: opr.inp_tensors[idx] = owner_opr.inp_tensors[0] for idx, out in enumerate(net.graph_outputs): owner_opr = out.owner_opr if isinstance(owner_opr, DropoutOpr) and owner_opr.drop_prob == 0: net.graph_outputs[idx] = owner_opr.inp_tensors[0] @_register_tranformation_rule(TransformerRule.REMOVE_RELU) def _remove_relu(net: IRGraph): for opr in net.all_oprs: for idx, inp in enumerate(opr.inp_tensors): owner_opr = inp.owner_opr if isinstance(owner_opr, ReluOpr): opr.inp_tensors[idx] = owner_opr.inp_tensors[0] for idx, out in enumerate(net.graph_outputs): owner_opr = out.owner_opr if isinstance(owner_opr, ReluOpr): net.graph_outputs[idx] = owner_opr.inp_tensors[0] visited_tensor = set() # type: set def _dfs_recursive(op_set, tensor): owner_opr = tensor.owner_opr op_set.add(owner_opr) if tensor in visited_tensor: return visited_tensor.add(tensor) if isinstance(owner_opr, IRGraph) or owner_opr is None: return for tt in owner_opr.inp_tensors: _dfs_recursive(op_set, tt) @_register_tranformation_rule(TransformerRule.REMOVE_UNRELATED_IROP) def _remove_unrelated_op(net: IRGraph): match_sets = set() # type: Set[OpBase] for out_tensor in net.graph_outputs: _dfs_recursive(match_sets, out_tensor) remove_idx = [] for opr in net.all_oprs: if opr not in match_sets: index = net._opr_ids.index(id(opr)) remove_idx.append(index) for i in remove_idx[::-1]: net.delete_ops(i) @_register_tranformation_rule(TransformerRule.ADD_FAKE_HSIGMOID_OUT) def _add_fake_hsigmoid_tensor(net: IRGraph): for opr in net.all_oprs: if isinstance(opr, (HardSwishOpr, HardSigmoidOpr)): add_3_out_tensor = IRTensor( opr.out_tensors[0].name + "_fake_add3_out", opr.inp_tensors[0].shape, opr.inp_tensors[0].dtype, q_type=opr.inp_tensors[0].q_dtype, scale=opr.inp_tensors[0].scale, zero_point=opr.inp_tensors[0].zero_point, ) opr.add_inp_tensors(add_3_out_tensor) relu6_out_tensor = IRTensor( opr.out_tensors[0].name + "_relu6_out", opr.inp_tensors[0].shape, opr.inp_tensors[0].dtype, q_type=opr.inp_tensors[0].q_dtype, scale=opr.inp_tensors[0].scale, zero_point=opr.inp_tensors[0].zero_point, ) opr.add_inp_tensors(relu6_out_tensor) if isinstance(opr, HardSwishOpr): div6_out_tensor = IRTensor( opr.out_tensors[0].name + "_div_out", opr.inp_tensors[0].shape, opr.inp_tensors[0].dtype, q_type=opr.inp_tensors[0].q_dtype, scale=opr.inp_tensors[0].scale, zero_point=opr.inp_tensors[0].zero_point, ) opr.add_inp_tensors(div6_out_tensor) def fold_conv_bn( conv_weight, conv_bias, conv_groups, gamma, beta, bn_mean, bn_var, eps ): conv_bias = conv_bias.reshape(1, -1, 1, 1) gamma = gamma.reshape(1, -1, 1, 1) beta = beta.reshape(1, -1, 1, 1) bn_mean = bn_mean.reshape(1, -1, 1, 1) bn_var = bn_var.reshape(1, -1, 1, 1) # bn_istd = 1 / bn_std bn_istd = 1.0 / sqrt(bn_var + eps) # type: ignore[attr-defined] # w_fold = gamma / bn_std * W scale_factor = gamma * bn_istd if conv_groups == 1: w_fold = conv_weight * scale_factor.reshape(-1, 1, 1, 1) else: w_fold = conv_weight * scale_factor.reshape(conv_groups, -1, 1, 1, 1) # b_fold = gamma * (b - bn_mean) / bn_std + beta b_fold = beta + gamma * (conv_bias - bn_mean) * bn_istd return w_fold, b_fold @_register_tranformation_rule(TransformerRule.FUSE_CONV_BN) def _fuse_conv_bn(net: IRGraph): for opr in net.all_oprs: if ( opr.name == "BatchNormalization" and len(net.find_inp_oprs(opr)) == 1 and net.find_inp_oprs(opr)[0].name == "Conv2d" and len(net.find_out_oprs(net.find_inp_oprs(opr)[0])) == 1 and net.find_out_oprs(net.find_inp_oprs(opr)[0])[0] == opr ): gamma = ( Tensor(opr.weight) # type: ignore[attr-defined] if opr.weight is not None # type: ignore[attr-defined] else Tensor(opr.inp_tensors[1].np_data) ) beta = ( Tensor(opr.bias) # type: ignore[attr-defined] if opr.bias is not None # type: ignore[attr-defined] else Tensor(opr.inp_tensors[2].np_data) ) bn_mean = ( Tensor(opr.mean) # type: ignore[attr-defined] if opr.mean is not None # type: ignore[attr-defined] else Tensor(opr.inp_tensors[3].np_data) ) bn_var = ( Tensor(opr.var) # type: ignore[attr-defined] if opr.var is not None # type: ignore[attr-defined] else Tensor(opr.inp_tensors[4].np_data) ) conv_op = net.find_inp_oprs(opr)[0] conv_weight = conv_op.inp_tensors[1].np_data if len(conv_op.inp_tensors) == 2: # add conv bias tensor weight_shape = conv_op.inp_tensors[1].shape bias_shape = ( weight_shape[0] if len(weight_shape) == 4 else weight_shape[0] * weight_shape[1] ) bias_shape = (1, bias_shape, 1, 1) conv_bias = IRTensor( name=conv_op.inp_tensors[0].name + "_bias", shape=bias_shape, dtype=np.float32, np_data=np.zeros(bias_shape, dtype=np.float32), owner_opr=conv_op, ) if conv_op.inp_tensors[0].scale and conv_op.inp_tensors[1].scale: conv_bias.set_qparams( conv_op.inp_tensors[0].scale * conv_op.inp_tensors[1].scale, 0 ) conv_bias.q_dtype = "int32" conv_op.inp_tensors.append(conv_bias) conv_bias = conv_op.inp_tensors[2].np_data.reshape(1, -1, 1, 1) w_fold, b_fold = fold_conv_bn( conv_weight, conv_bias, conv_op.groups, gamma, beta, bn_mean, bn_var, opr.eps, # type: ignore[attr-defined] ) conv_op.inp_tensors[1].np_data = w_fold.numpy() conv_op.inp_tensors[2].np_data = b_fold.numpy() # delete bn opr conv_op.out_tensors[0] = opr.out_tensors[-1] conv_op.out_tensors[0].owner_opr = conv_op index = net._opr_ids.index(id(opr)) net.delete_ops(index) @_register_tranformation_rule(TransformerRule.REMOVE_IDENTITY) def _remove_identity(net: IRGraph): delete_intended = [] for op_id, opr in zip(net._opr_ids, net.all_oprs): if not isinstance(opr, IdentityOpr): continue user_ops = net.find_out_oprs(opr) for user in user_ops: idx = user.inp_tensors.index(opr.out_tensors[0]) user.inp_tensors[idx] = opr.inp_tensors[0] idx = opr.inp_tensors[0].user_opr.index(opr) opr.inp_tensors[0].user_opr[idx] = user delete_intended.append(net._opr_ids.index(op_id)) for delete_idx in delete_intended[::-1]: net.delete_ops(delete_idx) @_register_tranformation_rule(TransformerRule.EXPAND_CONVRELU) def _expand_conv_relu(net: IRGraph): for opr in net.all_oprs: if not isinstance(opr, ConvRelu2dOpr): continue conv_op = Conv2dOpr( stride=opr.stride, padding=opr.padding, dilation=opr.dilation, groups=opr.groups, ) conv_op.inp_tensors = opr.inp_tensors for t in conv_op.inp_tensors: idx = t.user_opr.index(opr) t.user_opr[idx] = conv_op conv_out_tensor = IRTensor( name=opr.inp_tensors[0].name + "_conv_out", shape=opr.out_tensors[0].shape, dtype=opr.out_tensors[0].dtype, scale=opr.out_tensors[0].scale, zero_point=opr.out_tensors[0].zero_point, q_type=opr.out_tensors[0].q_dtype, owner_opr=conv_op, ) conv_op.out_tensors = [conv_out_tensor] conv_out_tensor.owner_opr = conv_op idx = net.all_oprs.index(opr) net.add_op(conv_op, idx) relu_op = ReluOpr() relu_op.inp_tensors = conv_op.out_tensors conv_out_tensor.user_opr.append(relu_op) relu_op.out_tensors = opr.out_tensors for t in relu_op.out_tensors: t.owner_opr = relu_op net.replace_op(opr, relu_op)
[ "megengine.Tensor", "megengine.functional.sqrt" ]
[((5519, 5532), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (5530, 5532), False, 'from collections import OrderedDict\n'), ((18529, 18542), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (18540, 18542), False, 'from collections import OrderedDict\n'), ((8433, 8532), 'numpy.array', 'np.array', (['[result_shape[0], result_shape[2], result_shape[3], result_shape[1]]'], {'dtype': 'np.int32'}), '([result_shape[0], result_shape[2], result_shape[3], result_shape[1\n ]], dtype=np.int32)\n', (8441, 8532), True, 'import numpy as np\n'), ((10396, 10434), 'numpy.zeros', 'np.zeros', (['bias_shape'], {'dtype': 'np.float32'}), '(bias_shape, dtype=np.float32)\n', (10404, 10434), True, 'import numpy as np\n'), ((11433, 11471), 'numpy.zeros', 'np.zeros', (['bias_shape'], {'dtype': 'np.float32'}), '(bias_shape, dtype=np.float32)\n', (11441, 11471), True, 'import numpy as np\n'), ((33030, 33048), 'megengine.functional.sqrt', 'sqrt', (['(bn_var + eps)'], {}), '(bn_var + eps)\n', (33034, 33048), False, 'from megengine.functional import sqrt\n'), ((5791, 5893), 'numpy.array', 'np.array', (['[0, 0, op.padding[0], op.padding[0], op.padding[1], op.padding[1], 0, 0]'], {'dtype': 'np.int32'}), '([0, 0, op.padding[0], op.padding[0], op.padding[1], op.padding[1],\n 0, 0], dtype=np.int32)\n', (5799, 5893), True, 'import numpy as np\n'), ((3220, 3241), 'functools.cmp_to_key', 'cmp_to_key', (['cmp_rules'], {}), '(cmp_rules)\n', (3230, 3241), False, 'from functools import cmp_to_key\n'), ((4441, 4477), 'numpy.array', 'np.array', (['op.pattern'], {'dtype': 'np.int32'}), '(op.pattern, dtype=np.int32)\n', (4449, 4477), True, 'import numpy as np\n'), ((4955, 4988), 'numpy.array', 'np.array', (['op.axis'], {'dtype': 'np.int32'}), '(op.axis, dtype=np.int32)\n', (4963, 4988), True, 'import numpy as np\n'), ((9740, 9777), 'numpy.array', 'np.array', (['op.out_size'], {'dtype': 'np.int32'}), '(op.out_size, dtype=np.int32)\n', (9748, 9777), True, 'import numpy as np\n'), ((14872, 14890), 'numpy.iinfo', 'np.iinfo', (['np.int32'], {}), '(np.int32)\n', (14880, 14890), True, 'import numpy as np\n'), ((33947, 33965), 'megengine.Tensor', 'Tensor', (['opr.weight'], {}), '(opr.weight)\n', (33953, 33965), False, 'from megengine import Tensor\n'), ((34089, 34123), 'megengine.Tensor', 'Tensor', (['opr.inp_tensors[1].np_data'], {}), '(opr.inp_tensors[1].np_data)\n', (34095, 34123), False, 'from megengine import Tensor\n'), ((34175, 34191), 'megengine.Tensor', 'Tensor', (['opr.bias'], {}), '(opr.bias)\n', (34181, 34191), False, 'from megengine import Tensor\n'), ((34313, 34347), 'megengine.Tensor', 'Tensor', (['opr.inp_tensors[2].np_data'], {}), '(opr.inp_tensors[2].np_data)\n', (34319, 34347), False, 'from megengine import Tensor\n'), ((34402, 34418), 'megengine.Tensor', 'Tensor', (['opr.mean'], {}), '(opr.mean)\n', (34408, 34418), False, 'from megengine import Tensor\n'), ((34540, 34574), 'megengine.Tensor', 'Tensor', (['opr.inp_tensors[3].np_data'], {}), '(opr.inp_tensors[3].np_data)\n', (34546, 34574), False, 'from megengine import Tensor\n'), ((34628, 34643), 'megengine.Tensor', 'Tensor', (['opr.var'], {}), '(opr.var)\n', (34634, 34643), False, 'from megengine import Tensor\n'), ((34764, 34798), 'megengine.Tensor', 'Tensor', (['opr.inp_tensors[4].np_data'], {}), '(opr.inp_tensors[4].np_data)\n', (34770, 34798), False, 'from megengine import Tensor\n'), ((4365, 4385), 'numpy.array', 'np.array', (['op.pattern'], {}), '(op.pattern)\n', (4373, 4385), True, 'import numpy as np\n'), ((14582, 14611), 'numpy.array', 'np.array', (['ret'], {'dtype': 'np.int32'}), '(ret, dtype=np.int32)\n', (14590, 14611), True, 'import numpy as np\n'), ((16601, 16628), 'numpy.array_equal', 'np.array_equal', (['const[1]', 'v'], {}), '(const[1], v)\n', (16615, 16628), True, 'import numpy as np\n'), ((16826, 16868), 'numpy.array_equal', 'np.array_equal', (['const[1]', 'inp_tensors[idx]'], {}), '(const[1], inp_tensors[idx])\n', (16840, 16868), True, 'import numpy as np\n'), ((35512, 35550), 'numpy.zeros', 'np.zeros', (['bias_shape'], {'dtype': 'np.float32'}), '(bias_shape, dtype=np.float32)\n', (35520, 35550), True, 'import numpy as np\n')]
"""Initial model generation Revision ID: a2ced875a244 Revises: Create Date: 2021-10-28 09:24:53.225445 """ from alembic import op import sqlalchemy as sa import sqlmodel # revision identifiers, used by Alembic. revision = 'a2ced875a244' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('category', sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column('playlists', sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column('id', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_category_id'), 'category', ['id'], unique=False) op.create_index(op.f('ix_category_name'), 'category', ['name'], unique=False) op.create_index(op.f('ix_category_playlists'), 'category', ['playlists'], unique=False) op.create_table('user', sa.Column('entity_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('last_login', sa.DateTime(), nullable=True), sa.Column('admin', sa.Boolean(), nullable=False), sa.Column('id', sa.Integer(), nullable=True), sa.Column('password', sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_user_admin'), 'user', ['admin'], unique=False) op.create_index(op.f('ix_user_created_at'), 'user', ['created_at'], unique=False) op.create_index(op.f('ix_user_entity_id'), 'user', ['entity_id'], unique=False) op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False) op.create_index(op.f('ix_user_last_login'), 'user', ['last_login'], unique=False) op.create_index(op.f('ix_user_name'), 'user', ['name'], unique=False) op.create_index(op.f('ix_user_password'), 'user', ['password'], unique=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_user_password'), table_name='user') op.drop_index(op.f('ix_user_name'), table_name='user') op.drop_index(op.f('ix_user_last_login'), table_name='user') op.drop_index(op.f('ix_user_id'), table_name='user') op.drop_index(op.f('ix_user_entity_id'), table_name='user') op.drop_index(op.f('ix_user_created_at'), table_name='user') op.drop_index(op.f('ix_user_admin'), table_name='user') op.drop_table('user') op.drop_index(op.f('ix_category_playlists'), table_name='category') op.drop_index(op.f('ix_category_name'), table_name='category') op.drop_index(op.f('ix_category_id'), table_name='category') op.drop_table('category') # ### end Alembic commands ###
[ "sqlmodel.sql.sqltypes.AutoString" ]
[((2554, 2575), 'alembic.op.drop_table', 'op.drop_table', (['"""user"""'], {}), "('user')\n", (2567, 2575), False, 'from alembic import op\n'), ((2784, 2809), 'alembic.op.drop_table', 'op.drop_table', (['"""category"""'], {}), "('category')\n", (2797, 2809), False, 'from alembic import op\n'), ((626, 655), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (649, 655), True, 'import sqlalchemy as sa\n'), ((682, 704), 'alembic.op.f', 'op.f', (['"""ix_category_id"""'], {}), "('ix_category_id')\n", (686, 704), False, 'from alembic import op\n'), ((760, 784), 'alembic.op.f', 'op.f', (['"""ix_category_name"""'], {}), "('ix_category_name')\n", (764, 784), False, 'from alembic import op\n'), ((842, 871), 'alembic.op.f', 'op.f', (['"""ix_category_playlists"""'], {}), "('ix_category_playlists')\n", (846, 871), False, 'from alembic import op\n'), ((1403, 1432), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1426, 1432), True, 'import sqlalchemy as sa\n'), ((1459, 1480), 'alembic.op.f', 'op.f', (['"""ix_user_admin"""'], {}), "('ix_user_admin')\n", (1463, 1480), False, 'from alembic import op\n'), ((1535, 1561), 'alembic.op.f', 'op.f', (['"""ix_user_created_at"""'], {}), "('ix_user_created_at')\n", (1539, 1561), False, 'from alembic import op\n'), ((1621, 1646), 'alembic.op.f', 'op.f', (['"""ix_user_entity_id"""'], {}), "('ix_user_entity_id')\n", (1625, 1646), False, 'from alembic import op\n'), ((1705, 1723), 'alembic.op.f', 'op.f', (['"""ix_user_id"""'], {}), "('ix_user_id')\n", (1709, 1723), False, 'from alembic import op\n'), ((1775, 1801), 'alembic.op.f', 'op.f', (['"""ix_user_last_login"""'], {}), "('ix_user_last_login')\n", (1779, 1801), False, 'from alembic import op\n'), ((1861, 1881), 'alembic.op.f', 'op.f', (['"""ix_user_name"""'], {}), "('ix_user_name')\n", (1865, 1881), False, 'from alembic import op\n'), ((1935, 1959), 'alembic.op.f', 'op.f', (['"""ix_user_password"""'], {}), "('ix_user_password')\n", (1939, 1959), False, 'from alembic import op\n'), ((2135, 2159), 'alembic.op.f', 'op.f', (['"""ix_user_password"""'], {}), "('ix_user_password')\n", (2139, 2159), False, 'from alembic import op\n'), ((2198, 2218), 'alembic.op.f', 'op.f', (['"""ix_user_name"""'], {}), "('ix_user_name')\n", (2202, 2218), False, 'from alembic import op\n'), ((2257, 2283), 'alembic.op.f', 'op.f', (['"""ix_user_last_login"""'], {}), "('ix_user_last_login')\n", (2261, 2283), False, 'from alembic import op\n'), ((2322, 2340), 'alembic.op.f', 'op.f', (['"""ix_user_id"""'], {}), "('ix_user_id')\n", (2326, 2340), False, 'from alembic import op\n'), ((2379, 2404), 'alembic.op.f', 'op.f', (['"""ix_user_entity_id"""'], {}), "('ix_user_entity_id')\n", (2383, 2404), False, 'from alembic import op\n'), ((2443, 2469), 'alembic.op.f', 'op.f', (['"""ix_user_created_at"""'], {}), "('ix_user_created_at')\n", (2447, 2469), False, 'from alembic import op\n'), ((2508, 2529), 'alembic.op.f', 'op.f', (['"""ix_user_admin"""'], {}), "('ix_user_admin')\n", (2512, 2529), False, 'from alembic import op\n'), ((2594, 2623), 'alembic.op.f', 'op.f', (['"""ix_category_playlists"""'], {}), "('ix_category_playlists')\n", (2598, 2623), False, 'from alembic import op\n'), ((2666, 2690), 'alembic.op.f', 'op.f', (['"""ix_category_name"""'], {}), "('ix_category_name')\n", (2670, 2690), False, 'from alembic import op\n'), ((2733, 2755), 'alembic.op.f', 'op.f', (['"""ix_category_id"""'], {}), "('ix_category_id')\n", (2737, 2755), False, 'from alembic import op\n'), ((439, 473), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (471, 473), False, 'import sqlmodel\n'), ((519, 553), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (551, 553), False, 'import sqlmodel\n'), ((592, 604), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (602, 604), True, 'import sqlalchemy as sa\n'), ((969, 1003), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1001, 1003), False, 'import sqlmodel\n'), ((1044, 1078), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1076, 1078), False, 'import sqlmodel\n'), ((1125, 1138), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1136, 1138), True, 'import sqlalchemy as sa\n'), ((1185, 1198), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1196, 1198), True, 'import sqlalchemy as sa\n'), ((1239, 1251), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1249, 1251), True, 'import sqlalchemy as sa\n'), ((1290, 1302), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1300, 1302), True, 'import sqlalchemy as sa\n'), ((1346, 1380), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1378, 1380), False, 'import sqlmodel\n')]
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] = Field(default=None, nullable=True) age: Optional[int] = Field(default=None, nullable=True) student_id: Optional[str] = Field(default=None, nullable=True) date: Optional[datetime] = Field(default=None, nullable=True) # track invitation information # this is for this LOB to track this entity in Traction invitation_state: Optional[str] = Field(default=None, nullable=True) connection_id: Optional[uuid.UUID] = Field(default=None) # for matching this student with their traction tenant # this would not be in this LOB data at all!!! # the entity/person/business that this record represents # would be tracking this in their system/data wallet_id: Optional[uuid.UUID] = None alias: Optional[str] = Field(default=None, nullable=True) class Student(StudentBase, BaseTable, table=True): __table_args__ = (UniqueConstraint("name", "sandbox_id"),) sandbox: Optional["Sandbox"] = Relationship(back_populates="students") # noqa: F821 sandbox_id: uuid.UUID = Field(foreign_key="sandbox.id") wallet_id: uuid.UUID = Field(default=None, nullable=True) class StudentCreate(StudentBase): pass class StudentRead(StudentBase): id: uuid.UUID created_at: datetime updated_at: datetime degree: Optional[str] = None age: Optional[int] = None student_id: Optional[str] = None date: Optional[datetime] = None class StudentUpdate(StudentBase): name: Optional[str] = None # FACTORIES class StudentCreateFactory(ModelFactory): __model__ = StudentCreate name = Use(Faker().name) degree = None age = None student_id = None date = None wallet_id = None alias = None invitation_state = None connection_id = None
[ "sqlmodel.Relationship", "sqlmodel.Field" ]
[((325, 358), 'sqlmodel.Field', 'Field', ([], {'index': '(True)', 'nullable': '(False)'}), '(index=True, nullable=False)\n', (330, 358), False, 'from sqlmodel import Field, Relationship\n'), ((486, 520), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (491, 520), False, 'from sqlmodel import Field, Relationship\n'), ((546, 580), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (551, 580), False, 'from sqlmodel import Field, Relationship\n'), ((613, 647), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (618, 647), False, 'from sqlmodel import Field, Relationship\n'), ((679, 713), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (684, 713), False, 'from sqlmodel import Field, Relationship\n'), ((848, 882), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (853, 882), False, 'from sqlmodel import Field, Relationship\n'), ((924, 943), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (929, 943), False, 'from sqlmodel import Field, Relationship\n'), ((1235, 1269), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (1240, 1269), False, 'from sqlmodel import Field, Relationship\n'), ((1422, 1461), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""students"""'}), "(back_populates='students')\n", (1434, 1461), False, 'from sqlmodel import Field, Relationship\n'), ((1505, 1536), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""sandbox.id"""'}), "(foreign_key='sandbox.id')\n", (1510, 1536), False, 'from sqlmodel import Field, Relationship\n'), ((1564, 1598), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (1569, 1598), False, 'from sqlmodel import Field, Relationship\n'), ((1345, 1383), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""name"""', '"""sandbox_id"""'], {}), "('name', 'sandbox_id')\n", (1361, 1383), False, 'from sqlalchemy import UniqueConstraint\n'), ((2053, 2060), 'faker.Faker', 'Faker', ([], {}), '()\n', (2058, 2060), False, 'from faker import Faker\n')]
from typing import Optional, List from sqlalchemy import String from sqlalchemy.sql.schema import Column from sqlmodel import SQLModel, Field, Relationship class CustomerProductLink(SQLModel, table=True): customer_id: Optional[int] = Field( default=None, foreign_key='customer.id', primary_key=True ) product_id: Optional[int] = Field( default=None, foreign_key='product.id', primary_key=True ) class AddressBase(SQLModel): street_name: str house_number: str city: str zip_code: str class Address(AddressBase, table=True): id: int = Field(default=None, primary_key=True) customers: List['Customer'] = Relationship(back_populates='address') class AddressOut(AddressBase): pass class AddressIn(AddressBase): pass class CustomerBase(SQLModel): first_name: str last_name: str birth_date: str gender: str mobile_number: str email: str class Customer(CustomerBase, table=True): id: int = Field(default=None, primary_key=True) address_id: Optional[int] = Field(default=None, foreign_key='address.id') address: Optional[Address] = Relationship(back_populates='customers', sa_relationship_kwargs={'lazy': 'selectin'}) mobile_number: str = Field(sa_column=Column('mobile_number', String, unique=True)) email: str = Field(sa_column=Column('email', String, unique=True)) products: List['Product'] = Relationship(back_populates='customers', link_model=CustomerProductLink, sa_relationship_kwargs={'lazy': 'selectin'}) class CustomerOut(CustomerBase): id: int address: Optional[AddressOut] class CustomerIn(CustomerBase): address: Optional[AddressIn] class ProductBase(SQLModel): name: Optional[str] = None class Product(ProductBase, table=True): id: int = Field(default=None, primary_key=True) name: str = Field(sa_column=Column('name', String, unique=True)) customers: List[Customer] = Relationship(back_populates='products', link_model=CustomerProductLink) class ProductOut(ProductBase): id: int name: str class ProductIn(ProductBase): name: str class ProductUpdate(ProductBase): product_id: int
[ "sqlmodel.Relationship", "sqlmodel.Field" ]
[((241, 305), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""customer.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='customer.id', primary_key=True)\n", (246, 305), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((352, 415), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""product.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='product.id', primary_key=True)\n", (357, 415), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((592, 629), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (597, 629), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((664, 702), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""address"""'}), "(back_populates='address')\n", (676, 702), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((989, 1026), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (994, 1026), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1059, 1104), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""address.id"""'}), "(default=None, foreign_key='address.id')\n", (1064, 1104), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1138, 1227), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""customers"""', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='customers', sa_relationship_kwargs={'lazy':\n 'selectin'})\n", (1150, 1227), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1461, 1582), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""customers"""', 'link_model': 'CustomerProductLink', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='customers', link_model=CustomerProductLink,\n sa_relationship_kwargs={'lazy': 'selectin'})\n", (1473, 1582), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1890, 1927), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1895, 1927), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2030, 2101), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""products"""', 'link_model': 'CustomerProductLink'}), "(back_populates='products', link_model=CustomerProductLink)\n", (2042, 2101), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1311, 1355), 'sqlalchemy.sql.schema.Column', 'Column', (['"""mobile_number"""', 'String'], {'unique': '(True)'}), "('mobile_number', String, unique=True)\n", (1317, 1355), False, 'from sqlalchemy.sql.schema import Column\n'), ((1390, 1426), 'sqlalchemy.sql.schema.Column', 'Column', (['"""email"""', 'String'], {'unique': '(True)'}), "('email', String, unique=True)\n", (1396, 1426), False, 'from sqlalchemy.sql.schema import Column\n'), ((1960, 1995), 'sqlalchemy.sql.schema.Column', 'Column', (['"""name"""', 'String'], {'unique': '(True)'}), "('name', String, unique=True)\n", (1966, 1995), False, 'from sqlalchemy.sql.schema import Column\n')]
r""" Biot problem - deformable porous medium with the no-penetration boundary condition on a boundary region enforced using Lagrange multipliers. The non-penetration condition is enforced weakly using the Lagrange multiplier :math:`\lambda`. There is also a rigid body movement constraint imposed on the :math:`\Gamma_{outlet}` region using the linear combination boundary conditions. Find :math:`\ul{u}`, :math:`p` and :math:`\lambda` such that: .. math:: \int_{\Omega} D_{ijkl}\ e_{ij}(\ul{v}) e_{kl}(\ul{u}) - \int_{\Omega} p\ \alpha_{ij} e_{ij}(\ul{v}) + \int_{\Gamma_{walls}} \lambda \ul{n} \cdot \ul{v} = 0 \;, \quad \forall \ul{v} \;, \int_{\Omega} q\ \alpha_{ij} e_{ij}(\ul{u}) + \int_{\Omega} K_{ij} \nabla_i q \nabla_j p = 0 \;, \quad \forall q \;, \int_{\Gamma_{walls}} \hat\lambda \ul{n} \cdot \ul{u} = 0 \;, \quad \forall \hat\lambda \;, \ul{u} \cdot \ul{n} = 0 \mbox{ on } \Gamma_{walls} \;, where .. math:: D_{ijkl} = \mu (\delta_{ik} \delta_{jl}+\delta_{il} \delta_{jk}) + \lambda \ \delta_{ij} \delta_{kl} \;. """ from __future__ import absolute_import from examples.multi_physics.biot_npbc import (cinc_simple, define_regions, get_pars) def define(): from sfepy import data_dir filename = data_dir + '/meshes/3d/cylinder.mesh' output_dir = 'output' return define_input(filename, output_dir) def post_process(out, pb, state, extend=False): from sfepy.base.base import Struct dvel = pb.evaluate('ev_diffusion_velocity.2.Omega( m.K, p )', mode='el_avg') out['dvel'] = Struct(name='output_data', var_name='p', mode='cell', data=dvel, dofs=None) stress = pb.evaluate('ev_cauchy_stress.2.Omega( m.D, u )', mode='el_avg') out['cauchy_stress'] = Struct(name='output_data', var_name='u', mode='cell', data=stress, dofs=None) return out def define_input(filename, output_dir): filename_mesh = filename options = { 'output_dir' : output_dir, 'output_format' : 'vtk', 'post_process_hook' : 'post_process', ## 'file_per_var' : True, 'ls' : 'ls', 'nls' : 'newton', } functions = { 'cinc_simple0' : (lambda coors, domain: cinc_simple(coors, 0),), 'cinc_simple1' : (lambda coors, domain: cinc_simple(coors, 1),), 'cinc_simple2' : (lambda coors, domain: cinc_simple(coors, 2),), 'get_pars' : (lambda ts, coors, mode=None, **kwargs: get_pars(ts, coors, mode, output_dir=output_dir, **kwargs),), } regions, dim = define_regions(filename_mesh) fields = { 'displacement': ('real', 'vector', 'Omega', 1), 'pressure': ('real', 'scalar', 'Omega', 1), 'multiplier': ('real', 'scalar', 'Walls', 1), } variables = { 'u' : ('unknown field', 'displacement', 0), 'v' : ('test field', 'displacement', 'u'), 'p' : ('unknown field', 'pressure', 1), 'q' : ('test field', 'pressure', 'p'), 'ul' : ('unknown field', 'multiplier', 2), 'vl' : ('test field', 'multiplier', 'ul'), } ebcs = { 'inlet' : ('Inlet', {'p.0' : 1.0, 'u.all' : 0.0}), 'outlet' : ('Outlet', {'p.0' : -1.0}), } lcbcs = { 'rigid' : ('Outlet', {'u.all' : None}, None, 'rigid'), } materials = { 'm' : 'get_pars', } equations = { 'eq_1' : """dw_lin_elastic.2.Omega( m.D, v, u ) - dw_biot.2.Omega( m.alpha, v, p ) + dw_non_penetration.2.Walls( v, ul ) = 0""", 'eq_2' : """dw_biot.2.Omega( m.alpha, u, q ) + dw_diffusion.2.Omega( m.K, q, p ) = 0""", 'eq_3' : """dw_non_penetration.2.Walls( u, vl ) = 0""", } solvers = { 'ls' : ('ls.scipy_direct', {}), 'newton' : ('nls.newton', {}), } return locals()
[ "sfepy.base.base.Struct" ]
[((1657, 1732), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'var_name': '"""p"""', 'mode': '"""cell"""', 'data': 'dvel', 'dofs': 'None'}), "(name='output_data', var_name='p', mode='cell', data=dvel, dofs=None)\n", (1663, 1732), False, 'from sfepy.base.base import Struct\n'), ((1889, 1966), 'sfepy.base.base.Struct', 'Struct', ([], {'name': '"""output_data"""', 'var_name': '"""u"""', 'mode': '"""cell"""', 'data': 'stress', 'dofs': 'None'}), "(name='output_data', var_name='u', mode='cell', data=stress, dofs=None)\n", (1895, 1966), False, 'from sfepy.base.base import Struct\n'), ((2822, 2851), 'examples.multi_physics.biot_npbc.define_regions', 'define_regions', (['filename_mesh'], {}), '(filename_mesh)\n', (2836, 2851), False, 'from examples.multi_physics.biot_npbc import cinc_simple, define_regions, get_pars\n'), ((2398, 2419), 'examples.multi_physics.biot_npbc.cinc_simple', 'cinc_simple', (['coors', '(0)'], {}), '(coors, 0)\n', (2409, 2419), False, 'from examples.multi_physics.biot_npbc import cinc_simple, define_regions, get_pars\n'), ((2497, 2518), 'examples.multi_physics.biot_npbc.cinc_simple', 'cinc_simple', (['coors', '(1)'], {}), '(coors, 1)\n', (2508, 2518), False, 'from examples.multi_physics.biot_npbc import cinc_simple, define_regions, get_pars\n'), ((2596, 2617), 'examples.multi_physics.biot_npbc.cinc_simple', 'cinc_simple', (['coors', '(2)'], {}), '(coors, 2)\n', (2607, 2617), False, 'from examples.multi_physics.biot_npbc import cinc_simple, define_regions, get_pars\n'), ((2704, 2762), 'examples.multi_physics.biot_npbc.get_pars', 'get_pars', (['ts', 'coors', 'mode'], {'output_dir': 'output_dir'}), '(ts, coors, mode, output_dir=output_dir, **kwargs)\n', (2712, 2762), False, 'from examples.multi_physics.biot_npbc import cinc_simple, define_regions, get_pars\n')]
from sqlmodel import Session from sfm.database import engine from sfm.config import get_settings from sfm.utils import verify_api_auth_token from fastapi import Depends, HTTPException from fastapi.security import HTTPBearer, HTTPBasicCredentials from passlib.context import CryptContext pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") app_settings = get_settings() security = HTTPBearer() def get_db(): # pragma: no cover db = Session(engine) try: yield db finally: db.close() def has_access( credentials: HTTPBasicCredentials = Depends(security), ): # pragma: no cover token = credentials.credentials verified = verify_api_auth_token(token) if verified: return True else: raise HTTPException(status_code=403, detail="Incorrect Credentials")
[ "sqlmodel.Session" ]
[((302, 353), 'passlib.context.CryptContext', 'CryptContext', ([], {'schemes': "['bcrypt']", 'deprecated': '"""auto"""'}), "(schemes=['bcrypt'], deprecated='auto')\n", (314, 353), False, 'from passlib.context import CryptContext\n'), ((369, 383), 'sfm.config.get_settings', 'get_settings', ([], {}), '()\n', (381, 383), False, 'from sfm.config import get_settings\n'), ((395, 407), 'fastapi.security.HTTPBearer', 'HTTPBearer', ([], {}), '()\n', (405, 407), False, 'from fastapi.security import HTTPBearer, HTTPBasicCredentials\n'), ((453, 468), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (460, 468), False, 'from sqlmodel import Session\n'), ((585, 602), 'fastapi.Depends', 'Depends', (['security'], {}), '(security)\n', (592, 602), False, 'from fastapi import Depends, HTTPException\n'), ((678, 706), 'sfm.utils.verify_api_auth_token', 'verify_api_auth_token', (['token'], {}), '(token)\n', (699, 706), False, 'from sfm.utils import verify_api_auth_token\n'), ((768, 830), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(403)', 'detail': '"""Incorrect Credentials"""'}), "(status_code=403, detail='Incorrect Credentials')\n", (781, 830), False, 'from fastapi import Depends, HTTPException\n')]
import os from dotenv import load_dotenv from dateutil.parser import parse from sqlmodel import Session, select, SQLModel, create_engine import requests from youtube.models import YouTube load_dotenv() YT_CHANNEL = os.environ["YT_CHANNEL"] YOUTUBE_API_KEY = os.environ["YOUTUBE_API_KEY"] DATABASE_URL = os.environ["DATABASE_URL"] YOUTUBE_VIDEO = "youtube#video" BASE_URL = ( "https://www.googleapis.com/youtube/v3/search?key={key}" "&channelId={channel}&part=snippet,id&order=date&maxResults=20" ) engine = create_engine(DATABASE_URL, echo=False) def get_session(): with Session(engine) as session: yield session def create_db_and_tables(): SQLModel.metadata.create_all(engine) def get_videos_from_channel(channel: str = YT_CHANNEL) -> list[dict]: base_url = BASE_URL.format(key=YOUTUBE_API_KEY, channel=channel) next_page, url = None, base_url videos = [] while True: if next_page is not None: url = base_url + f"&pageToken={next_page}" response = requests.get(url).json() for vid in response["items"]: if vid["id"]["kind"] != "youtube#video": continue videos.append(vid) if "nextPageToken" not in response: break next_page = response["nextPageToken"] return videos def insert_youtube_videos(session: Session, videos: list[dict]) -> None: num_inserted = 0 for video in videos: video_id = video["id"]["videoId"] title = video["snippet"]["title"] description = video["snippet"]["description"] thumb = video["snippet"]["thumbnails"]["medium"]["url"] published = video["snippet"]["publishTime"] statement = select(YouTube).where(YouTube.video_id == video_id) results = session.exec(statement) if results.first() is not None: continue youtube = YouTube( video_id=video_id, title=title, description=description, thumb=thumb, published=parse(published), ) session.add(youtube) num_inserted += 1 session.commit() statement = select(YouTube) results = session.exec(statement) total_records = len(results.all()) print(f"Total records: {total_records} (newly inserted: {num_inserted})") if __name__ == "__main__": create_db_and_tables() videos = get_videos_from_channel() with Session(engine) as session: insert_youtube_videos(session, videos)
[ "sqlmodel.create_engine", "sqlmodel.Session", "sqlmodel.SQLModel.metadata.create_all", "sqlmodel.select" ]
[((191, 204), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (202, 204), False, 'from dotenv import load_dotenv\n'), ((522, 561), 'sqlmodel.create_engine', 'create_engine', (['DATABASE_URL'], {'echo': '(False)'}), '(DATABASE_URL, echo=False)\n', (535, 561), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((676, 712), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (704, 712), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((2206, 2221), 'sqlmodel.select', 'select', (['YouTube'], {}), '(YouTube)\n', (2212, 2221), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((592, 607), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (599, 607), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((2481, 2496), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2488, 2496), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((1062, 1079), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1074, 1079), False, 'import requests\n'), ((1760, 1775), 'sqlmodel.select', 'select', (['YouTube'], {}), '(YouTube)\n', (1766, 1775), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((2083, 2099), 'dateutil.parser.parse', 'parse', (['published'], {}), '(published)\n', (2088, 2099), False, 'from dateutil.parser import parse\n')]
""" The Dirichlet, periodic and linear combination boundary condition classes, as well as the initial condition class. """ from __future__ import absolute_import import numpy as nm from sfepy.base.base import basestr, Container, Struct from sfepy.discrete.functions import Function import six def get_condition_value(val, functions, kind, name): """ Check a boundary/initial condition value type and return the value or corresponding function. """ if type(val) == str: if functions is not None: try: fun = functions[val] except IndexError: raise ValueError('unknown function %s given for %s %s!' % (val, kind, name)) else: raise ValueError('no functions given for %s %s!' % (kind, name)) elif (isinstance(val, Function) or nm.isscalar(val) or isinstance(val, nm.ndarray)): fun = val else: raise ValueError('unknown value type for %s %s!' % (kind, name)) return fun def _get_region(name, regions, bc_name): try: region = regions[name] except IndexError: msg = "no region '%s' used in condition %s!" % (name, bc_name) raise IndexError(msg) return region class Conditions(Container): """ Container for various conditions. """ @staticmethod def from_conf(conf, regions): conds = [] for key, cc in six.iteritems(conf): times = cc.get('times', None) if 'ebc' in key: region = _get_region(cc.region, regions, cc.name) cond = EssentialBC(cc.name, region, cc.dofs, key=key, times=times) elif 'epbc' in key: rs = [_get_region(ii, regions, cc.name) for ii in cc.region] cond = PeriodicBC(cc.name, rs, cc.dofs, cc.match, key=key, times=times) elif 'lcbc' in key: if isinstance(cc.region, basestr): rs = [_get_region(cc.region, regions, cc.name), None] else: rs = [_get_region(ii, regions, cc.name) for ii in cc.region] cond = LinearCombinationBC(cc.name, rs, cc.dofs, cc.dof_map_fun, cc.kind, key=key, times=times, arguments=cc.get('arguments', None)) elif 'ic' in key: region = _get_region(cc.region, regions, cc.name) cond = InitialCondition(cc.name, region, cc.dofs, key=key) else: raise ValueError('unknown condition type! (%s)' % key) conds.append(cond) obj = Conditions(conds) return obj def group_by_variables(self, groups=None): """ Group boundary conditions of each variable. Each condition is a group is a single condition. Parameters ---------- groups : dict, optional If present, update the `groups` dictionary. Returns ------- out : dict The dictionary with variable names as keys and lists of single condition instances as values. """ if groups is None: out = {} else: out = groups for cond in self: for single_cond in cond.iter_single(): vname = single_cond.dofs[0].split('.')[0] out.setdefault(vname, Conditions()).append(single_cond) return out def canonize_dof_names(self, dofs): """ Canonize the DOF names using the full list of DOFs of a variable. """ for cond in self: cond.canonize_dof_names(dofs) def sort(self): """ Sort boundary conditions by their key. """ self._objs.sort(key=lambda a: a.key) self.update() def zero_dofs(self): """ Set all boundary condition values to zero, if applicable. """ for cond in self: if isinstance(cond, EssentialBC): cond.zero_dofs() def _canonize(dofs, all_dofs): """ Helper function. """ vname, dd = dofs.split('.') if dd == 'all': cdofs = all_dofs elif dd[0] == '[': cdofs = [vname + '.' + ii.strip() for ii in dd[1:-1].split(',')] else: cdofs = [dofs] return cdofs class Condition(Struct): """ Common boundary condition methods. """ def __init__(self, name, **kwargs): Struct.__init__(self, name=name, **kwargs) self.is_single = False def iter_single(self): """ Create a single condition instance for each item in self.dofs and yield it. """ for dofs, val in six.iteritems(self.dofs): single_cond = self.copy(name=self.name) single_cond.is_single = True single_cond.dofs = [dofs, val] yield single_cond def canonize_dof_names(self, dofs): """ Canonize the DOF names using the full list of DOFs of a variable. Assumes single condition instance. """ self.dofs[0] = _canonize(self.dofs[0], dofs) class EssentialBC(Condition): """ Essential boundary condidion. Parameters ---------- name : str The boundary condition name. region : Region instance The region where the boundary condition is applied. dofs : dict The boundary condition specification defining the constrained DOFs and their values. key : str, optional The sorting key. times : list or str, optional The list of time intervals or a function returning True at time steps, when the condition applies. """ def __init__(self, name, region, dofs, key='', times=None): Condition.__init__(self, name=name, region=region, dofs=dofs, key=key, times=times) def zero_dofs(self): """ Set all essential boundary condition values to zero. """ if self.is_single: self.dofs[1] = 0.0 else: new_dofs = {} for key in six.iterkeys(self.dofs): new_dofs[key] = 0.0 self.dofs = new_dofs class PeriodicBC(Condition): """ Periodic boundary condidion. Parameters ---------- name : str The boundary condition name. regions : list of two Region instances The master region and the slave region where the DOFs should match. dofs : dict The boundary condition specification defining the DOFs in the master region and the corresponding DOFs in the slave region. match : str The name of function for matching corresponding nodes in the two regions. key : str, optional The sorting key. times : list or str, optional The list of time intervals or a function returning True at time steps, when the condition applies. """ def __init__(self, name, regions, dofs, match, key='', times=None): Condition.__init__(self, name=name, regions=regions, dofs=dofs, match=match, key=key, times=times) def canonize_dof_names(self, dofs): """ Canonize the DOF names using the full list of DOFs of a variable. Assumes single condition instance. """ self.dofs[0] = _canonize(self.dofs[0], dofs) self.dofs[1] = _canonize(self.dofs[1], dofs) class LinearCombinationBC(Condition): """ Linear combination boundary condidion. Parameters ---------- name : str The boundary condition name. regions : list of two Region instances The constrained (master) DOFs region and the new (slave) DOFs region. The latter can be None if new DOFs are not field variable DOFs. dofs : dict The boundary condition specification defining the constrained DOFs and the new DOFs (can be None). dof_map_fun : str The name of function for mapping the constrained DOFs to new DOFs (can be None). kind : str The linear combination condition kind. key : str, optional The sorting key. times : list or str, optional The list of time intervals or a function returning True at time steps, when the condition applies. arguments: tuple, optional Additional arguments, depending on the condition kind. """ def __init__(self, name, regions, dofs, dof_map_fun, kind, key='', times=None, arguments=None): Condition.__init__(self, name=name, regions=regions, dofs=dofs, dof_map_fun=dof_map_fun, kind=kind, key=key, times=times, arguments=arguments) def get_var_names(self): """ Get names of variables corresponding to the constrained and new DOFs. """ names = [self.dofs[0].split('.')[0]] if self.dofs[1] is not None: names.append(self.dofs[1].split('.')[0]) return names def canonize_dof_names(self, dofs0, dofs1=None): """ Canonize the DOF names using the full list of DOFs of a variable. Assumes single condition instance. """ self.dofs[0] = _canonize(self.dofs[0], dofs0) if self.dofs[1] is not None: self.dofs[1] = _canonize(self.dofs[1], dofs1) class InitialCondition(Condition): """ Initial condidion. Parameters ---------- name : str The initial condition name. region : Region instance The region where the initial condition is applied. dofs : dict The initial condition specification defining the constrained DOFs and their values. key : str, optional The sorting key. """ def __init__(self, name, region, dofs, key=''): Condition.__init__(self, name=name, region=region, dofs=dofs, key=key)
[ "sfepy.base.base.Struct.__init__" ]
[((1475, 1494), 'six.iteritems', 'six.iteritems', (['conf'], {}), '(conf)\n', (1488, 1494), False, 'import six\n'), ((4772, 4814), 'sfepy.base.base.Struct.__init__', 'Struct.__init__', (['self'], {'name': 'name'}), '(self, name=name, **kwargs)\n', (4787, 4814), False, 'from sfepy.base.base import basestr, Container, Struct\n'), ((5015, 5039), 'six.iteritems', 'six.iteritems', (['self.dofs'], {}), '(self.dofs)\n', (5028, 5039), False, 'import six\n'), ((869, 885), 'numpy.isscalar', 'nm.isscalar', (['val'], {}), '(val)\n', (880, 885), True, 'import numpy as nm\n'), ((6435, 6458), 'six.iterkeys', 'six.iterkeys', (['self.dofs'], {}), '(self.dofs)\n', (6447, 6458), False, 'import six\n')]
r""" Poisson equation. This example demonstrates parametric study capabilities of Application classes. In particular (written in the strong form): .. math:: c \Delta t = f \mbox{ in } \Omega, t = 2 \mbox{ on } \Gamma_1 \;, t = -2 \mbox{ on } \Gamma_2 \;, f = 1 \mbox{ in } \Omega_1 \;, f = 0 \mbox{ otherwise,} where :math:`\Omega` is a square domain, :math:`\Omega_1 \in \Omega` is a circular domain. Now let's see what happens if :math:`\Omega_1` diameter changes. Run:: $ ./simple.py <this file> and then look in 'output/r_omega1' directory, try for example:: $ ./postproc.py output/r_omega1/circles_in_square*.vtk Remark: this simple case could be achieved also by defining :math:`\Omega_1` by a time-dependent function and solve the static problem as a time-dependent problem. However, the approach below is much more general. Find :math:`t` such that: .. math:: \int_{\Omega} c \nabla s \cdot \nabla t = 0 \;, \quad \forall s \;. """ from __future__ import absolute_import import os import numpy as nm from sfepy import data_dir from sfepy.base.base import output # Mesh. filename_mesh = data_dir + '/meshes/2d/special/circles_in_square.vtk' # Options. The value of 'parametric_hook' is the function that does the # parametric study. options = { 'nls' : 'newton', # Nonlinear solver 'ls' : 'ls', # Linear solver 'parametric_hook' : 'vary_omega1_size', 'output_dir' : 'output/r_omega1', } # Domain and subdomains. default_diameter = 0.25 regions = { 'Omega' : 'all', 'Gamma_1' : ('vertices in (x < -0.999)', 'facet'), 'Gamma_2' : ('vertices in (x > 0.999)', 'facet'), 'Omega_1' : 'vertices by select_circ', } # FE field defines the FE approximation: 2_3_P1 = 2D, P1 on triangles. field_1 = { 'name' : 'temperature', 'dtype' : 'real', 'shape' : (1,), 'region' : 'Omega', 'approx_order' : 1, } # Unknown and test functions (FE sense). variables = { 't' : ('unknown field', 'temperature', 0), 's' : ('test field', 'temperature', 't'), } # Dirichlet boundary conditions. ebcs = { 't1' : ('Gamma_1', {'t.0' : 2.0}), 't2' : ('Gamma_2', {'t.0' : -2.0}), } # Material coefficient c and source term value f. material_1 = { 'name' : 'coef', 'values' : { 'val' : 1.0, } } material_2 = { 'name' : 'source', 'values' : { 'val' : 10.0, } } # Numerical quadrature and the equation. integral_1 = { 'name' : 'i', 'order' : 2, } equations = { 'Poisson' : """dw_laplace.i.Omega( coef.val, s, t ) = dw_volume_lvf.i.Omega_1( source.val, s )""" } # Solvers. solver_0 = { 'name' : 'ls', 'kind' : 'ls.scipy_direct', } solver_1 = { 'name' : 'newton', 'kind' : 'nls.newton', 'i_max' : 1, 'eps_a' : 1e-10, 'eps_r' : 1.0, 'macheps' : 1e-16, 'lin_red' : 1e-2, # Linear system error < (eps_a * lin_red). 'ls_red' : 0.1, 'ls_red_warp' : 0.001, 'ls_on' : 1.1, 'ls_min' : 1e-5, 'check' : 0, 'delta' : 1e-6, } functions = { 'select_circ': (lambda coors, domain=None: select_circ(coors[:,0], coors[:,1], 0, default_diameter),), } # Functions. def select_circ( x, y, z, diameter ): """Select circular subdomain of a given diameter.""" r = nm.sqrt( x**2 + y**2 ) out = nm.where(r < diameter)[0] n = out.shape[0] if n <= 3: raise ValueError( 'too few vertices selected! (%d)' % n ) return out def vary_omega1_size( problem ): """Vary size of \Omega1. Saves also the regions into options['output_dir']. Input: problem: Problem instance Return: a generator object: 1. creates new (modified) problem 2. yields the new (modified) problem and output container 3. use the output container for some logging 4. yields None (to signal next iteration to Application) """ from sfepy.discrete import Problem from sfepy.solvers.ts import get_print_info output.prefix = 'vary_omega1_size:' diameters = nm.linspace( 0.1, 0.6, 7 ) + 0.001 ofn_trunk, output_format = problem.ofn_trunk, problem.output_format output_dir = problem.output_dir join = os.path.join conf = problem.conf cf = conf.get_raw( 'functions' ) n_digit, aux, d_format = get_print_info( len( diameters ) + 1 ) for ii, diameter in enumerate( diameters ): output( 'iteration %d: diameter %3.2f' % (ii, diameter) ) cf['select_circ'] = (lambda coors, domain=None: select_circ(coors[:,0], coors[:,1], 0, diameter),) conf.edit('functions', cf) problem = Problem.from_conf(conf) problem.save_regions( join( output_dir, ('regions_' + d_format) % ii ), ['Omega_1'] ) region = problem.domain.regions['Omega_1'] if not region.has_cells(): raise ValueError('region %s has no cells!' % region.name) ofn_trunk = ofn_trunk + '_' + (d_format % ii) problem.setup_output(output_filename_trunk=ofn_trunk, output_dir=output_dir, output_format=output_format) out = [] yield problem, out out_problem, state = out[-1] filename = join( output_dir, ('log_%s.txt' % d_format) % ii ) fd = open( filename, 'w' ) log_item = '$r(\Omega_1)$: %f\n' % diameter fd.write( log_item ) fd.write( 'solution:\n' ) nm.savetxt(fd, state()) fd.close() yield None
[ "sfepy.base.base.output", "sfepy.discrete.Problem.from_conf" ]
[((3347, 3371), 'numpy.sqrt', 'nm.sqrt', (['(x ** 2 + y ** 2)'], {}), '(x ** 2 + y ** 2)\n', (3354, 3371), True, 'import numpy as nm\n'), ((3381, 3403), 'numpy.where', 'nm.where', (['(r < diameter)'], {}), '(r < diameter)\n', (3389, 3403), True, 'import numpy as nm\n'), ((4097, 4121), 'numpy.linspace', 'nm.linspace', (['(0.1)', '(0.6)', '(7)'], {}), '(0.1, 0.6, 7)\n', (4108, 4121), True, 'import numpy as nm\n'), ((4450, 4505), 'sfepy.base.base.output', 'output', (["('iteration %d: diameter %3.2f' % (ii, diameter))"], {}), "('iteration %d: diameter %3.2f' % (ii, diameter))\n", (4456, 4505), False, 'from sfepy.base.base import output\n'), ((4699, 4722), 'sfepy.discrete.Problem.from_conf', 'Problem.from_conf', (['conf'], {}), '(conf)\n', (4716, 4722), False, 'from sfepy.discrete import Problem\n')]
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
3