code
stringlengths 22
1.05M
| apis
sequencelengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
import os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
os.sys.path.insert(0,parentdir)
import math
import gym
from gym import spaces
from gym.utils import seeding
import numpy as np
import time
import pybullet as p
from . import kuka
import random
import pybullet_data
from pkg_resources import parse_version
maxSteps = 1000
RENDER_HEIGHT = 720
RENDER_WIDTH = 960
class KukaCamGymEnv(gym.Env):
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second' : 50
}
def __init__(self,
urdfRoot=pybullet_data.getDataPath(),
actionRepeat=1,
isEnableSelfCollision=True,
renders=False,
isDiscrete=False):
self._timeStep = 1./240.
self._urdfRoot = urdfRoot
self._actionRepeat = actionRepeat
self._isEnableSelfCollision = isEnableSelfCollision
self._observation = []
self._envStepCounter = 0
self._renders = renders
self._width = 341
self._height = 256
self._isDiscrete=isDiscrete
self.terminated = 0
self._p = p
if self._renders:
cid = p.connect(p.SHARED_MEMORY)
if (cid<0):
p.connect(p.GUI)
p.resetDebugVisualizerCamera(1.3,180,-41,[0.52,-0.2,-0.33])
else:
p.connect(p.DIRECT)
#timinglog = p.startStateLogging(p.STATE_LOGGING_PROFILE_TIMINGS, "kukaTimings.json")
self._seed()
self.reset()
observationDim = len(self.getExtendedObservation())
#print("observationDim")
#print(observationDim)
observation_high = np.array([np.finfo(np.float32).max] * observationDim)
if (self._isDiscrete):
self.action_space = spaces.Discrete(7)
else:
action_dim = 3
self._action_bound = 1
action_high = np.array([self._action_bound] * action_dim)
self.action_space = spaces.Box(-action_high, action_high)
self.observation_space = spaces.Box(low=0, high=255, shape=(self._height, self._width, 4))
self.viewer = None
def _reset(self):
self.terminated = 0
p.resetSimulation()
p.setPhysicsEngineParameter(numSolverIterations=150)
p.setTimeStep(self._timeStep)
p.loadURDF(os.path.join(self._urdfRoot,"plane.urdf"),[0,0,-1])
p.loadURDF(os.path.join(self._urdfRoot,"table/table.urdf"), 0.5000000,0.00000,-.820000,0.000000,0.000000,0.0,1.0)
xpos = 0.5 +0.2*random.random()
ypos = 0 +0.25*random.random()
ang = 3.1415925438*random.random()
orn = p.getQuaternionFromEuler([0,0,ang])
self.blockUid =p.loadURDF(os.path.join(self._urdfRoot,"block.urdf"), xpos,ypos,-0.1,orn[0],orn[1],orn[2],orn[3])
p.setGravity(0,0,-10)
self._kuka = kuka.Kuka(urdfRootPath=self._urdfRoot, timeStep=self._timeStep)
self._envStepCounter = 0
p.stepSimulation()
self._observation = self.getExtendedObservation()
return np.array(self._observation)
def __del__(self):
p.disconnect()
def _seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def getExtendedObservation(self):
#camEyePos = [0.03,0.236,0.54]
#distance = 1.06
#pitch=-56
#yaw = 258
#roll=0
#upAxisIndex = 2
#camInfo = p.getDebugVisualizerCamera()
#print("width,height")
#print(camInfo[0])
#print(camInfo[1])
#print("viewMatrix")
#print(camInfo[2])
#print("projectionMatrix")
#print(camInfo[3])
#viewMat = camInfo[2]
#viewMat = p.computeViewMatrixFromYawPitchRoll(camEyePos,distance,yaw, pitch,roll,upAxisIndex)
viewMat = [-0.5120397806167603, 0.7171027660369873, -0.47284144163131714, 0.0, -0.8589617609977722, -0.42747554183006287, 0.28186774253845215, 0.0, 0.0, 0.5504802465438843, 0.8348482847213745, 0.0, 0.1925382763147354, -0.24935829639434814, -0.4401884973049164, 1.0]
#projMatrix = camInfo[3]#[0.7499999403953552, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0000200271606445, -1.0, 0.0, 0.0, -0.02000020071864128, 0.0]
projMatrix = [0.75, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0000200271606445, -1.0, 0.0, 0.0, -0.02000020071864128, 0.0]
img_arr = p.getCameraImage(width=self._width,height=self._height,viewMatrix=viewMat,projectionMatrix=projMatrix)
rgb=img_arr[2]
np_img_arr = np.reshape(rgb, (self._height, self._width, 4))
self._observation = np_img_arr
return self._observation
def _step(self, action):
if (self._isDiscrete):
dv = 0.01
dx = [0,-dv,dv,0,0,0,0][action]
dy = [0,0,0,-dv,dv,0,0][action]
da = [0,0,0,0,0,-0.1,0.1][action]
f = 0.3
realAction = [dx,dy,-0.002,da,f]
else:
dv = 0.01
dx = action[0] * dv
dy = action[1] * dv
da = action[2] * 0.1
f = 0.3
realAction = [dx,dy,-0.002,da,f]
return self.step2( realAction)
def step2(self, action):
for i in range(self._actionRepeat):
self._kuka.applyAction(action)
p.stepSimulation()
if self._termination():
break
#self._observation = self.getExtendedObservation()
self._envStepCounter += 1
self._observation = self.getExtendedObservation()
if self._renders:
time.sleep(self._timeStep)
#print("self._envStepCounter")
#print(self._envStepCounter)
done = self._termination()
reward = self._reward()
#print("len=%r" % len(self._observation))
return np.array(self._observation), reward, done, {}
def _render(self, mode='human', close=False):
if mode != "rgb_array":
return np.array([])
base_pos,orn = self._p.getBasePositionAndOrientation(self._racecar.racecarUniqueId)
view_matrix = self._p.computeViewMatrixFromYawPitchRoll(
cameraTargetPosition=base_pos,
distance=self._cam_dist,
yaw=self._cam_yaw,
pitch=self._cam_pitch,
roll=0,
upAxisIndex=2)
proj_matrix = self._p.computeProjectionMatrixFOV(
fov=60, aspect=float(RENDER_WIDTH)/RENDER_HEIGHT,
nearVal=0.1, farVal=100.0)
(_, _, px, _, _) = self._p.getCameraImage(
width=RENDER_WIDTH, height=RENDER_HEIGHT, viewMatrix=view_matrix,
projectionMatrix=proj_matrix, renderer=pybullet.ER_BULLET_HARDWARE_OPENGL)
rgb_array = np.array(px)
rgb_array = rgb_array[:, :, :3]
return rgb_array
def _termination(self):
#print (self._kuka.endEffectorPos[2])
state = p.getLinkState(self._kuka.kukaUid,self._kuka.kukaEndEffectorIndex)
actualEndEffectorPos = state[0]
#print("self._envStepCounter")
#print(self._envStepCounter)
if (self.terminated or self._envStepCounter>maxSteps):
self._observation = self.getExtendedObservation()
return True
maxDist = 0.005
closestPoints = p.getClosestPoints(self._kuka.trayUid, self._kuka.kukaUid,maxDist)
if (len(closestPoints)):#(actualEndEffectorPos[2] <= -0.43):
self.terminated = 1
#print("closing gripper, attempting grasp")
#start grasp and terminate
fingerAngle = 0.3
for i in range (100):
graspAction = [0,0,0.0001,0,fingerAngle]
self._kuka.applyAction(graspAction)
p.stepSimulation()
fingerAngle = fingerAngle-(0.3/100.)
if (fingerAngle<0):
fingerAngle=0
for i in range (1000):
graspAction = [0,0,0.001,0,fingerAngle]
self._kuka.applyAction(graspAction)
p.stepSimulation()
blockPos,blockOrn=p.getBasePositionAndOrientation(self.blockUid)
if (blockPos[2] > 0.23):
#print("BLOCKPOS!")
#print(blockPos[2])
break
state = p.getLinkState(self._kuka.kukaUid,self._kuka.kukaEndEffectorIndex)
actualEndEffectorPos = state[0]
if (actualEndEffectorPos[2]>0.5):
break
self._observation = self.getExtendedObservation()
return True
return False
def _reward(self):
#rewards is height of target object
blockPos,blockOrn=p.getBasePositionAndOrientation(self.blockUid)
closestPoints = p.getClosestPoints(self.blockUid,self._kuka.kukaUid,1000, -1, self._kuka.kukaEndEffectorIndex)
reward = -1000
numPt = len(closestPoints)
#print(numPt)
if (numPt>0):
#print("reward:")
reward = -closestPoints[0][8]*10
if (blockPos[2] >0.2):
#print("grasped a block!!!")
#print("self._envStepCounter")
#print(self._envStepCounter)
reward = reward+1000
#print("reward")
#print(reward)
return reward
if parse_version(gym.__version__)>=parse_version('0.9.6'):
render = _render
reset = _reset
seed = _seed
step = _step
| [
"pybullet_data.getDataPath",
"os.sys.path.insert",
"pybullet.setTimeStep",
"pybullet.setGravity",
"numpy.array",
"pybullet.setPhysicsEngineParameter",
"pybullet.disconnect",
"time.sleep",
"gym.utils.seeding.np_random",
"numpy.reshape",
"pybullet.connect",
"pybullet.getCameraImage",
"pybullet.getQuaternionFromEuler",
"pybullet.resetDebugVisualizerCamera",
"pybullet.resetSimulation",
"gym.spaces.Discrete",
"os.path.dirname",
"pkg_resources.parse_version",
"numpy.finfo",
"pybullet.getClosestPoints",
"pybullet.getLinkState",
"pybullet.getBasePositionAndOrientation",
"inspect.currentframe",
"os.path.join",
"gym.spaces.Box",
"pybullet.stepSimulation",
"random.random"
] | [((164, 196), 'os.sys.path.insert', 'os.sys.path.insert', (['(0)', 'parentdir'], {}), '(0, parentdir)\n', (182, 196), False, 'import os, inspect\n'), ((135, 162), 'os.path.dirname', 'os.path.dirname', (['currentdir'], {}), '(currentdir)\n', (150, 162), False, 'import os, inspect\n'), ((656, 683), 'pybullet_data.getDataPath', 'pybullet_data.getDataPath', ([], {}), '()\n', (681, 683), False, 'import pybullet_data\n'), ((1987, 2052), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(0)', 'high': '(255)', 'shape': '(self._height, self._width, 4)'}), '(low=0, high=255, shape=(self._height, self._width, 4))\n', (1997, 2052), False, 'from gym import spaces\n'), ((2125, 2144), 'pybullet.resetSimulation', 'p.resetSimulation', ([], {}), '()\n', (2142, 2144), True, 'import pybullet as p\n'), ((2149, 2201), 'pybullet.setPhysicsEngineParameter', 'p.setPhysicsEngineParameter', ([], {'numSolverIterations': '(150)'}), '(numSolverIterations=150)\n', (2176, 2201), True, 'import pybullet as p\n'), ((2206, 2235), 'pybullet.setTimeStep', 'p.setTimeStep', (['self._timeStep'], {}), '(self._timeStep)\n', (2219, 2235), True, 'import pybullet as p\n'), ((2543, 2580), 'pybullet.getQuaternionFromEuler', 'p.getQuaternionFromEuler', (['[0, 0, ang]'], {}), '([0, 0, ang])\n', (2567, 2580), True, 'import pybullet as p\n'), ((2701, 2724), 'pybullet.setGravity', 'p.setGravity', (['(0)', '(0)', '(-10)'], {}), '(0, 0, -10)\n', (2713, 2724), True, 'import pybullet as p\n'), ((2837, 2855), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (2853, 2855), True, 'import pybullet as p\n'), ((2921, 2948), 'numpy.array', 'np.array', (['self._observation'], {}), '(self._observation)\n', (2929, 2948), True, 'import numpy as np\n'), ((2975, 2989), 'pybullet.disconnect', 'p.disconnect', ([], {}), '()\n', (2987, 2989), True, 'import pybullet as p\n'), ((3048, 3071), 'gym.utils.seeding.np_random', 'seeding.np_random', (['seed'], {}), '(seed)\n', (3065, 3071), False, 'from gym.utils import seeding\n'), ((4188, 4297), 'pybullet.getCameraImage', 'p.getCameraImage', ([], {'width': 'self._width', 'height': 'self._height', 'viewMatrix': 'viewMat', 'projectionMatrix': 'projMatrix'}), '(width=self._width, height=self._height, viewMatrix=viewMat,\n projectionMatrix=projMatrix)\n', (4204, 4297), True, 'import pybullet as p\n'), ((4329, 4376), 'numpy.reshape', 'np.reshape', (['rgb', '(self._height, self._width, 4)'], {}), '(rgb, (self._height, self._width, 4))\n', (4339, 4376), True, 'import numpy as np\n'), ((6273, 6285), 'numpy.array', 'np.array', (['px'], {}), '(px)\n', (6281, 6285), True, 'import numpy as np\n'), ((6424, 6491), 'pybullet.getLinkState', 'p.getLinkState', (['self._kuka.kukaUid', 'self._kuka.kukaEndEffectorIndex'], {}), '(self._kuka.kukaUid, self._kuka.kukaEndEffectorIndex)\n', (6438, 6491), True, 'import pybullet as p\n'), ((6769, 6836), 'pybullet.getClosestPoints', 'p.getClosestPoints', (['self._kuka.trayUid', 'self._kuka.kukaUid', 'maxDist'], {}), '(self._kuka.trayUid, self._kuka.kukaUid, maxDist)\n', (6787, 6836), True, 'import pybullet as p\n'), ((7971, 8017), 'pybullet.getBasePositionAndOrientation', 'p.getBasePositionAndOrientation', (['self.blockUid'], {}), '(self.blockUid)\n', (8002, 8017), True, 'import pybullet as p\n'), ((8038, 8139), 'pybullet.getClosestPoints', 'p.getClosestPoints', (['self.blockUid', 'self._kuka.kukaUid', '(1000)', '(-1)', 'self._kuka.kukaEndEffectorIndex'], {}), '(self.blockUid, self._kuka.kukaUid, 1000, -1, self._kuka.\n kukaEndEffectorIndex)\n', (8056, 8139), True, 'import pybullet as p\n'), ((8509, 8539), 'pkg_resources.parse_version', 'parse_version', (['gym.__version__'], {}), '(gym.__version__)\n', (8522, 8539), False, 'from pkg_resources import parse_version\n'), ((8541, 8563), 'pkg_resources.parse_version', 'parse_version', (['"""0.9.6"""'], {}), "('0.9.6')\n", (8554, 8563), False, 'from pkg_resources import parse_version\n'), ((81, 103), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (101, 103), False, 'import os, inspect\n'), ((1211, 1237), 'pybullet.connect', 'p.connect', (['p.SHARED_MEMORY'], {}), '(p.SHARED_MEMORY)\n', (1220, 1237), True, 'import pybullet as p\n'), ((1288, 1352), 'pybullet.resetDebugVisualizerCamera', 'p.resetDebugVisualizerCamera', (['(1.3)', '(180)', '(-41)', '[0.52, -0.2, -0.33]'], {}), '(1.3, 180, -41, [0.52, -0.2, -0.33])\n', (1316, 1352), True, 'import pybullet as p\n'), ((1364, 1383), 'pybullet.connect', 'p.connect', (['p.DIRECT'], {}), '(p.DIRECT)\n', (1373, 1383), True, 'import pybullet as p\n'), ((1751, 1769), 'gym.spaces.Discrete', 'spaces.Discrete', (['(7)'], {}), '(7)\n', (1766, 1769), False, 'from gym import spaces\n'), ((1850, 1893), 'numpy.array', 'np.array', (['([self._action_bound] * action_dim)'], {}), '([self._action_bound] * action_dim)\n', (1858, 1893), True, 'import numpy as np\n'), ((1920, 1957), 'gym.spaces.Box', 'spaces.Box', (['(-action_high)', 'action_high'], {}), '(-action_high, action_high)\n', (1930, 1957), False, 'from gym import spaces\n'), ((2251, 2293), 'os.path.join', 'os.path.join', (['self._urdfRoot', '"""plane.urdf"""'], {}), "(self._urdfRoot, 'plane.urdf')\n", (2263, 2293), False, 'import os, inspect\n'), ((2319, 2367), 'os.path.join', 'os.path.join', (['self._urdfRoot', '"""table/table.urdf"""'], {}), "(self._urdfRoot, 'table/table.urdf')\n", (2331, 2367), False, 'import os, inspect\n'), ((2517, 2532), 'random.random', 'random.random', ([], {}), '()\n', (2530, 2532), False, 'import random\n'), ((2609, 2651), 'os.path.join', 'os.path.join', (['self._urdfRoot', '"""block.urdf"""'], {}), "(self._urdfRoot, 'block.urdf')\n", (2621, 2651), False, 'import os, inspect\n'), ((4988, 5006), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (5004, 5006), True, 'import pybullet as p\n'), ((5225, 5251), 'time.sleep', 'time.sleep', (['self._timeStep'], {}), '(self._timeStep)\n', (5235, 5251), False, 'import time\n'), ((5439, 5466), 'numpy.array', 'np.array', (['self._observation'], {}), '(self._observation)\n', (5447, 5466), True, 'import numpy as np\n'), ((5575, 5587), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5583, 5587), True, 'import numpy as np\n'), ((1265, 1281), 'pybullet.connect', 'p.connect', (['p.GUI'], {}), '(p.GUI)\n', (1274, 1281), True, 'import pybullet as p\n'), ((2443, 2458), 'random.random', 'random.random', ([], {}), '()\n', (2456, 2458), False, 'import random\n'), ((2478, 2493), 'random.random', 'random.random', ([], {}), '()\n', (2491, 2493), False, 'import random\n'), ((7165, 7183), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (7181, 7183), True, 'import pybullet as p\n'), ((7411, 7429), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (7427, 7429), True, 'import pybullet as p\n'), ((7456, 7502), 'pybullet.getBasePositionAndOrientation', 'p.getBasePositionAndOrientation', (['self.blockUid'], {}), '(self.blockUid)\n', (7487, 7502), True, 'import pybullet as p\n'), ((7628, 7695), 'pybullet.getLinkState', 'p.getLinkState', (['self._kuka.kukaUid', 'self._kuka.kukaEndEffectorIndex'], {}), '(self._kuka.kukaUid, self._kuka.kukaEndEffectorIndex)\n', (7642, 7695), True, 'import pybullet as p\n'), ((1654, 1674), 'numpy.finfo', 'np.finfo', (['np.float32'], {}), '(np.float32)\n', (1662, 1674), True, 'import numpy as np\n')] |
# ------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License (MIT). See LICENSE in the repo root for license information.
# ------------------------------------------------------------------------------------------
# These tiling implementations are adapted from PANDA Kaggle solutions, for example:
# https://github.com/kentaroy47/Kaggle-PANDA-1st-place-solution/blob/master/src/data_process/a00_save_tiles.py
from typing import Any, Optional, Tuple
import numpy as np
def get_1d_padding(length: int, tile_size: int) -> Tuple[int, int]:
"""Computes symmetric padding for `length` to be divisible by `tile_size`."""
pad = (tile_size - length % tile_size) % tile_size
return (pad // 2, pad - pad // 2)
def pad_for_tiling_2d(array: np.ndarray, tile_size: int, channels_first: Optional[bool] = True,
**pad_kwargs: Any) -> Tuple[np.ndarray, np.ndarray]:
"""Symmetrically pads a 2D `array` such that both dimensions are divisible by `tile_size`.
:param array: 2D image array.
:param tile_size: Width/height of each tile in pixels.
:param channels_first: Whether `array` is in CHW (`True`, default) or HWC (`False`) layout.
:param pad_kwargs: Keyword arguments to be passed to `np.pad()` (e.g. `constant_values=0`).
:return: A tuple containing:
- `padded_array`: Resulting array, in the same CHW/HWC layout as the input.
- `offset`: XY offset introduced by the padding. Add this to coordinates relative to the
original array to obtain indices for the padded array.
"""
height, width = array.shape[1:] if channels_first else array.shape[:-1]
padding_h = get_1d_padding(height, tile_size)
padding_w = get_1d_padding(width, tile_size)
padding = [padding_h, padding_w]
channels_axis = 0 if channels_first else 2
padding.insert(channels_axis, (0, 0)) # zero padding on channels axis
padded_array = np.pad(array, padding, **pad_kwargs)
offset = (padding_w[0], padding_h[0])
return padded_array, np.array(offset)
def tile_array_2d(array: np.ndarray, tile_size: int, channels_first: Optional[bool] = True,
**pad_kwargs: Any) -> Tuple[np.ndarray, np.ndarray]:
"""Split an image array into square non-overlapping tiles.
The array will be padded symmetrically if its dimensions are not exact multiples of `tile_size`.
:param array: Image array.
:param tile_size: Width/height of each tile in pixels.
:param pad_kwargs: Keyword arguments to be passed to `np.pad()` (e.g. `constant_values=0`).
:param channels_first: Whether `array` is in CHW (`True`, default) or HWC (`False`) layout.
:return: A tuple containing:
- `tiles`: A batch of tiles in NCHW layout.
- `coords`: XY coordinates of each tile, in the same order.
"""
padded_array, (offset_w, offset_h) = pad_for_tiling_2d(array, tile_size, channels_first, **pad_kwargs)
if channels_first:
channels, height, width = padded_array.shape
else:
height, width, channels = padded_array.shape
n_tiles_h = height // tile_size
n_tiles_w = width // tile_size
if channels_first:
intermediate_shape = (channels, n_tiles_h, tile_size, n_tiles_w, tile_size)
axis_order = (1, 3, 0, 2, 4) # (n_tiles_h, n_tiles_w, channels, tile_size, tile_size)
output_shape = (n_tiles_h * n_tiles_w, channels, tile_size, tile_size)
else:
intermediate_shape = (n_tiles_h, tile_size, n_tiles_w, tile_size, channels)
axis_order = (0, 2, 1, 3, 4) # (n_tiles_h, n_tiles_w, tile_size, tile_size, channels)
output_shape = (n_tiles_h * n_tiles_w, tile_size, tile_size, channels)
tiles = padded_array.reshape(intermediate_shape) # Split width and height axes
tiles = tiles.transpose(axis_order)
tiles = tiles.reshape(output_shape) # Flatten tile batch dimension
# Compute top-left coordinates of every tile, relative to the original array's origin
coords_h = tile_size * np.arange(n_tiles_h) - offset_h
coords_w = tile_size * np.arange(n_tiles_w) - offset_w
# Shape: (n_tiles_h * n_tiles_w, 2)
coords = np.stack(np.meshgrid(coords_w, coords_h), axis=-1).reshape(-1, 2)
return tiles, coords
def assemble_tiles_2d(tiles: np.ndarray, coords: np.ndarray, fill_value: Optional[float] = np.nan,
channels_first: Optional[bool] = True) -> Tuple[np.ndarray, np.ndarray]:
"""Assembles a 2D array from sequences of tiles and coordinates.
:param tiles: Stack of tiles with batch dimension first.
:param coords: XY tile coordinates, assumed to be spaced by multiples of `tile_size` (shape: [N, 2]).
:param tile_size: Size of each tile; must be >0.
:param fill_value: Value to assign to empty elements (default: `NaN`).
:param channels_first: Whether each tile is in CHW (`True`, default) or HWC (`False`) layout.
:return: A tuple containing:
- `array`: The reassembled 2D array with the smallest dimensions to contain all given tiles.
- `offset`: The lowest XY coordinates.
- `offset`: XY offset introduced by the assembly. Add this to tile coordinates to obtain
indices for the assembled array.
"""
if coords.shape[0] != tiles.shape[0]:
raise ValueError(f"Tile coordinates and values must have the same length, "
f"got {coords.shape[0]} and {tiles.shape[0]}")
if channels_first:
n_tiles, channels, tile_size, _ = tiles.shape
else:
n_tiles, tile_size, _, channels = tiles.shape
tile_xs, tile_ys = coords.T
x_min, x_max = min(tile_xs), max(tile_xs + tile_size)
y_min, y_max = min(tile_ys), max(tile_ys + tile_size)
width = x_max - x_min
height = y_max - y_min
output_shape = (channels, height, width) if channels_first else (height, width, channels)
array = np.full(output_shape, fill_value)
offset = np.array([-x_min, -y_min])
for idx in range(n_tiles):
row = coords[idx, 1] + offset[1]
col = coords[idx, 0] + offset[0]
if channels_first:
array[:, row:row + tile_size, col:col + tile_size] = tiles[idx]
else:
array[row:row + tile_size, col:col + tile_size, :] = tiles[idx]
return array, offset
| [
"numpy.array",
"numpy.meshgrid",
"numpy.full",
"numpy.pad",
"numpy.arange"
] | [((2038, 2074), 'numpy.pad', 'np.pad', (['array', 'padding'], {}), '(array, padding, **pad_kwargs)\n', (2044, 2074), True, 'import numpy as np\n'), ((5985, 6018), 'numpy.full', 'np.full', (['output_shape', 'fill_value'], {}), '(output_shape, fill_value)\n', (5992, 6018), True, 'import numpy as np\n'), ((6033, 6059), 'numpy.array', 'np.array', (['[-x_min, -y_min]'], {}), '([-x_min, -y_min])\n', (6041, 6059), True, 'import numpy as np\n'), ((2142, 2158), 'numpy.array', 'np.array', (['offset'], {}), '(offset)\n', (2150, 2158), True, 'import numpy as np\n'), ((4115, 4135), 'numpy.arange', 'np.arange', (['n_tiles_h'], {}), '(n_tiles_h)\n', (4124, 4135), True, 'import numpy as np\n'), ((4174, 4194), 'numpy.arange', 'np.arange', (['n_tiles_w'], {}), '(n_tiles_w)\n', (4183, 4194), True, 'import numpy as np\n'), ((4268, 4299), 'numpy.meshgrid', 'np.meshgrid', (['coords_w', 'coords_h'], {}), '(coords_w, coords_h)\n', (4279, 4299), True, 'import numpy as np\n')] |
from math import pi
from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace
from numpy.linalg import eig, norm
class HartreeFock():
zeta = array([38.474970, 5.782948, 1.242567, 0.298073])
num_aos = len(zeta)
num_mos = 0
energy_tolerance = 0.0001; density_tolerance = 0.001
prev_energy = 0
prev_density = []
def __init__(self, num_elec):
# Make sure we can pair electrons
if num_elec % 2 != 0:
raise Exception("Can't do a RHF with", num_elec, "electrons.")
else:
print("Restricted Hartree-Fock with", num_elec, "electron(s).")
# We're RHF, so pair up spins in each molecular orbital
self.num_mos = int(num_elec / 2)
if self.num_mos > self.num_aos:
raise Exception("Can't create", self.num_mos, "molecular orbital(s) from", self.num_aos, "atomic orbital(s).")
else:
print(self.num_aos, "atomic orbital(s) and", self.num_mos, "molecular orbital(s).")
print("Zeta: ", self.zeta)
self.prev_density = ndarray(shape=(self.num_aos,self.num_aos),dtype=float, order='C')
def one_electron_integrals(self):
def overlap_kernel(zeta_i, zeta_j):
return pow(pi / (zeta_i + zeta_j), 1.5)
def kinetic_kernel(zeta_i, zeta_j):
return 3 * pow(pi, 1.5) * (zeta_i * zeta_j) / pow(zeta_i + zeta_j, 2.5)
def nucattr_kernel(zeta_i, zeta_j):
return (-4 * pi) / (zeta_i + zeta_j)
# Initialise our matrices
overlap = ndarray(shape=(self.num_aos,self.num_aos), dtype=float, order='C')
kinetic = ndarray(shape=(self.num_aos,self.num_aos), dtype=float, order='C')
nucattr = ndarray(shape=(self.num_aos,self.num_aos), dtype=float, order='C')
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
overlap[i_ao,j_ao] = overlap_kernel(self.zeta[i_ao], self.zeta[j_ao])
kinetic[i_ao,j_ao] = kinetic_kernel(self.zeta[i_ao], self.zeta[j_ao])
nucattr[i_ao,j_ao] = nucattr_kernel(self.zeta[i_ao], self.zeta[j_ao])
return overlap, kinetic, nucattr
def two_electron_integrals(self):
def tei_kernel(zeta_i, zeta_j, zeta_k, zeta_l):
temp_1 = (zeta_i + zeta_j) * (zeta_k + zeta_l)
temp_2 = sqrt(zeta_i + zeta_j + zeta_k + zeta_l)
return 2 * pow(pi, 2.5) / (temp_1 * temp_2)
teis = ndarray(shape=(self.num_aos,self.num_aos,self.num_aos,self.num_aos), dtype=float, order='C')
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
for k_ao in range(self.num_aos):
for l_ao in range(self.num_aos):
teis[i_ao,j_ao,k_ao,l_ao] = tei_kernel(self.zeta[i_ao], self.zeta[j_ao], self.zeta[k_ao], self.zeta[l_ao])
return teis
def basis_transformation_matrix(self, overlap):
# Get the eigenvalues and eigenvectors of the overlap matrix
overlap_evals, overlap_evecs = eig(overlap)
# Create diagonal matrix with entries given by inverse of eigenvalues of
# overlap matrix
try:
inv_sqrt_evals = diag(divide(1., sqrt(overlap_evals)))
except:
raise Exception("Overlap matrix is not positive definite.")
# Construct the basis transformation matrix and return it
return overlap_evecs @ inv_sqrt_evals @ overlap_evecs.T
def fock_matrix(self, core_hamiltonian, teis, density):
fock = ndarray(shape=density.shape, dtype=float, order='C')
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
fock[i_ao,j_ao] = core_hamiltonian[i_ao,j_ao]
for k_ao in range(self.num_aos):
for l_ao in range(self.num_aos):
coulomb = teis[i_ao,k_ao,j_ao,l_ao]
exchange = teis[i_ao,k_ao,l_ao,j_ao]
fock[i_ao,j_ao] += density[k_ao,l_ao] * (coulomb - 0.5*exchange)
return fock
def density_matrix(self, overlap, basis_transform, fock):
def ordered_eigensystem(matrix):
# Generate the eigenvalues and eigenvectors of the matrix
evals, evecs = eig(matrix)
# Sort the eigenvalues in ascending order and keep a track of what index they
# were originally assigned
ordered_indices = argsort(evals)
ordered_evals = sort(evals)
# Order the eigenvectors in asceding order of their corresponding eigenvalues
ordered_evecs = ndarray(shape=evecs.shape, dtype=float, order='C')
ordered_transform = ndarray(shape=evecs.shape, dtype=float, order='C')
for i_evec in range(len(ordered_evals)):
ordered_evecs[:,i_evec] = evecs[:,ordered_indices[i_evec]]
ordered_transform[i_evec,:] = basis_transform[ordered_indices[i_evec],:]
# Return the ordered eigenvalues and corresponding eigenvectors
return ordered_evals, ordered_evecs, ordered_transform
# Transform Fock matrix to orthogonal basis
fock = basis_transform.T @ fock @ basis_transform
# Get the eigenvalues and eigenvectors of the input Fock matrix
fock_evals, fock_evecs, new_transform = ordered_eigensystem(fock)
# Transform the eigenvectors of the Fock matrix back to the original basis
fock_evecs = new_transform @ fock_evecs
# First of all we make sure the eigenvectors of the Fock matrix are normalised by the
# overlap matrix (these are molecular orbitals, afterall)
for i_mo in range(self.num_aos):
ao_coeffs = fock_evecs[:,i_mo]
norm = ao_coeffs.T @ overlap @ ao_coeffs
fock_evecs[:,i_mo] /= sqrt(norm)
# Initialise the density matrix
density = ndarray(shape=overlap.shape, dtype=float, order='C')
# Loop over all elements in the density matrix and accumulate
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
density[i_ao,j_ao] = 0.0
# We accumulate only over occupied molecular orbitals! Note that we also have
# access to the virtual orbitals at this point, but they're effectively discarded
for i_mo in range(self.num_mos):
density[i_ao,j_ao] += 2 * fock_evecs[i_ao,i_mo] * fock_evecs[j_ao,i_mo]
return fock_evecs, density
def scf_energy(self, density, core_hamiltonian, fock):
energy = 0.0
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
energy += 0.5 * density[i_ao,j_ao] * (core_hamiltonian[i_ao,j_ao] + fock[i_ao,j_ao])
return energy
def check_convergence(self, energy, density):
if abs(energy - self.prev_energy) < self.energy_tolerance:
energy_converged = True
else:
energy_converged = False
self.prev_energy = energy
if norm(density - self.prev_density) < self.density_tolerance:
density_converged = True
else:
density_converged = False
self.prev_density = density
return energy_converged, density_converged
def mulliken(self, overlap, density):
return trace(density @ overlap)
def run(self, num_cycles):
print("Hartree-Fock will run for a maximum of", num_cycles, "SCF iteration(s).")
overlap, kinetic, nucattr = self.one_electron_integrals()
core_hamiltonian = kinetic + nucattr
teis = self.two_electron_integrals()
basis_transform = self.basis_transformation_matrix(overlap)
_, density = self.density_matrix(overlap, basis_transform, core_hamiltonian)
energy = self.scf_energy(density, core_hamiltonian, core_hamiltonian)
for i in range(num_cycles):
fock = self.fock_matrix(core_hamiltonian, teis, density)
fock_evecs, density = self.density_matrix(overlap, basis_transform, fock)
energy = self.scf_energy(density, core_hamiltonian, fock)
print("Iteration", i, "SCF Energy:", energy)
energy_converged, density_converged = self.check_convergence(energy, density)
if energy_converged and density_converged:
print("SCF has converged!")
for i_mo in range(self.num_mos):
print("Molecular Orbital", i_mo, "Coefficients :", fock_evecs[:,i_mo])
print("Mulliken charge:", self.mulliken(overlap, density))
break
if i == num_cycles - 1:
print("SCF failed to converge.")
print("Energy Convergence Check:", energy_converged)
print("Density Convergence Check:", density_converged)
fock_mo_basis = ndarray(shape=(self.num_mos,self.num_mos), dtype=float, order='C')
for i_mo in range(self.num_mos):
for j_mo in range(self.num_mos):
fock_mo_basis[i_mo,j_mo] = 0.0
for i_ao in range(self.num_aos):
for j_ao in range(self.num_aos):
fock_mo_basis[i_mo,j_mo] += fock_evecs[i_ao,j_mo] * fock_evecs[j_ao,i_mo] * fock[i_ao,j_ao]
print(fock_mo_basis)
if __name__ == "__main__":
hf = HartreeFock(4)
hf.run(2000) | [
"numpy.trace",
"numpy.sqrt",
"numpy.linalg.eig",
"numpy.sort",
"numpy.argsort",
"numpy.array",
"numpy.ndarray",
"numpy.linalg.norm"
] | [((170, 217), 'numpy.array', 'array', (['[38.47497, 5.782948, 1.242567, 0.298073]'], {}), '([38.47497, 5.782948, 1.242567, 0.298073])\n', (175, 217), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((1103, 1170), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_aos, self.num_aos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_aos, self.num_aos), dtype=float, order='C')\n", (1110, 1170), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((1595, 1662), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_aos, self.num_aos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_aos, self.num_aos), dtype=float, order='C')\n", (1602, 1662), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((1681, 1748), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_aos, self.num_aos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_aos, self.num_aos), dtype=float, order='C')\n", (1688, 1748), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((1767, 1834), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_aos, self.num_aos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_aos, self.num_aos), dtype=float, order='C')\n", (1774, 1834), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((2526, 2625), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_aos, self.num_aos, self.num_aos, self.num_aos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_aos, self.num_aos, self.num_aos, self.num_aos),\n dtype=float, order='C')\n", (2533, 2625), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((3135, 3147), 'numpy.linalg.eig', 'eig', (['overlap'], {}), '(overlap)\n', (3138, 3147), False, 'from numpy.linalg import eig, norm\n'), ((3645, 3697), 'numpy.ndarray', 'ndarray', ([], {'shape': 'density.shape', 'dtype': 'float', 'order': '"""C"""'}), "(shape=density.shape, dtype=float, order='C')\n", (3652, 3697), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((6077, 6129), 'numpy.ndarray', 'ndarray', ([], {'shape': 'overlap.shape', 'dtype': 'float', 'order': '"""C"""'}), "(shape=overlap.shape, dtype=float, order='C')\n", (6084, 6129), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((7583, 7607), 'numpy.trace', 'trace', (['(density @ overlap)'], {}), '(density @ overlap)\n', (7588, 7607), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((9159, 9226), 'numpy.ndarray', 'ndarray', ([], {'shape': '(self.num_mos, self.num_mos)', 'dtype': 'float', 'order': '"""C"""'}), "(shape=(self.num_mos, self.num_mos), dtype=float, order='C')\n", (9166, 9226), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((2411, 2450), 'numpy.sqrt', 'sqrt', (['(zeta_i + zeta_j + zeta_k + zeta_l)'], {}), '(zeta_i + zeta_j + zeta_k + zeta_l)\n', (2415, 2450), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((4409, 4420), 'numpy.linalg.eig', 'eig', (['matrix'], {}), '(matrix)\n', (4412, 4420), False, 'from numpy.linalg import eig, norm\n'), ((4585, 4599), 'numpy.argsort', 'argsort', (['evals'], {}), '(evals)\n', (4592, 4599), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((4629, 4640), 'numpy.sort', 'sort', (['evals'], {}), '(evals)\n', (4633, 4640), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((4763, 4813), 'numpy.ndarray', 'ndarray', ([], {'shape': 'evecs.shape', 'dtype': 'float', 'order': '"""C"""'}), "(shape=evecs.shape, dtype=float, order='C')\n", (4770, 4813), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((4847, 4897), 'numpy.ndarray', 'ndarray', ([], {'shape': 'evecs.shape', 'dtype': 'float', 'order': '"""C"""'}), "(shape=evecs.shape, dtype=float, order='C')\n", (4854, 4897), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((6004, 6014), 'numpy.sqrt', 'sqrt', (['norm'], {}), '(norm)\n', (6008, 6014), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n'), ((7277, 7310), 'numpy.linalg.norm', 'norm', (['(density - self.prev_density)'], {}), '(density - self.prev_density)\n', (7281, 7310), False, 'from numpy.linalg import eig, norm\n'), ((3318, 3337), 'numpy.sqrt', 'sqrt', (['overlap_evals'], {}), '(overlap_evals)\n', (3322, 3337), False, 'from numpy import array, ndarray, divide, sqrt, argsort, sort, diag, trace\n')] |
from django.db import models
from cloudinary.models import CloudinaryField
# Create your models here.
class Category(models.Model):
name = models.CharField( max_length=200, null=False, blank=False )
def __str__(self):
return self.name
class Photo(models.Model):
category = models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True )
image = CloudinaryField('image', default='')
description = models.TextField()
def __str__(self):
return self.description | [
"cloudinary.models.CloudinaryField",
"django.db.models.TextField",
"django.db.models.CharField",
"django.db.models.ForeignKey"
] | [((145, 202), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(False)', 'blank': '(False)'}), '(max_length=200, null=False, blank=False)\n', (161, 202), False, 'from django.db import models\n'), ((302, 379), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)'}), '(Category, on_delete=models.SET_NULL, null=True, blank=True)\n', (319, 379), False, 'from django.db import models\n'), ((394, 430), 'cloudinary.models.CloudinaryField', 'CloudinaryField', (['"""image"""'], {'default': '""""""'}), "('image', default='')\n", (409, 430), False, 'from cloudinary.models import CloudinaryField\n'), ((449, 467), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (465, 467), False, 'from django.db import models\n')] |
import logging
from configparser import ConfigParser
from sdk.data_uploader import DataUploader
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
config = ConfigParser()
config.read("config.ini")
#####
# Datasets to be added to metadata API
datasetData = {
"title": "Test",
"description": "Test data",
"keywords": ["test"],
"accessRights": "non-public",
"objective": "Formålsbeskrivelse",
"contactPoint": {
"name": "Tim",
"email": "<EMAIL>",
"phone": "12345678",
},
"publisher": "Tim",
}
datasetVersionData = {"version": "6", "schema": {}, "transformation": {}}
datasetVersionEditionData = {
"edition": "2019-05-28T15:37:00+02:00",
"description": "Data for one hour",
"startTime": "2018-12-21T08:00:00+01:00",
"endTime": "2018-12-21T09:00:00+01:00",
}
######
# The dataset* variables are optional, if these are set in config.ini this script will
# not run the relevant DataUploader function
datasetId = config.get("dataUploader", "datasetId", fallback=None)
datasetVersion = config.get("dataUploader", "datasetVersion", fallback=None)
datasetVersionEdition = config.get(
"dataUploader", "datasetVersionEdition", fallback=None
)
upload = DataUploader(config)
try:
log.info("Uploading a file to S3")
upload.login()
if datasetId is None:
upload.createDataset(datasetData)
if datasetVersion is None:
upload.createVersion(datasetVersionData)
if datasetVersionEdition is None:
upload.createEdition(datasetVersionEditionData)
log.info(f"Dataset: {upload.datasetId}")
log.info(f"Version: {upload.datasetVersion}")
log.info(f"Edition: {upload.datasetVersionEdition}")
if upload.upload("README.md"):
log.info("Done... go brew some coffee")
else:
log.error("Could not upload file....")
except Exception as e:
log.exception(f">> Something went horrible wrong:\n{e}")
# To upload with curl: cmd = upload.curl("tmp3.zip")
# Max upload size for now is 5GB
| [
"logging.basicConfig",
"configparser.ConfigParser",
"sdk.data_uploader.DataUploader",
"logging.getLogger"
] | [((97, 136), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (116, 136), False, 'import logging\n'), ((143, 162), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (160, 162), False, 'import logging\n'), ((173, 187), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (185, 187), False, 'from configparser import ConfigParser\n'), ((1234, 1254), 'sdk.data_uploader.DataUploader', 'DataUploader', (['config'], {}), '(config)\n', (1246, 1254), False, 'from sdk.data_uploader import DataUploader\n')] |
from setuptools import setup
from setuptools.command.install import install
class PostInstallCommand(install):
user_options = install.user_options + [
('noservice', None, None),
]
def initialize_options(self):
install.initialize_options(self)
self.noservice = None
def finalize_options(self):
install.finalize_options(self)
def run(self):
install.run(self)
if not self.noservice:
from xmediusmailrelayserver import console
console.install_service(['--startup', 'auto', 'install'])
setup(
name='xmediusmailrelayserver',
version='1.0.0',
description='The Python module to be used to relay mail to different servers depending on patterns',
long_description='See https://github.com/xmedius/xmedius-mailrelayserver for more information',
url='https://github.com/xmedius/xmedius-mailrelayserver/',
author='<NAME>',
license='MIT',
classifiers=[
'Programming Language :: Python :: 3.6',
'Environment :: Win32 (MS Windows)',
'Operating System :: Microsoft :: Windows'
],
cmdclass={
'install': PostInstallCommand
},
packages=['xmediusmailrelayserver'],
package_data={'xmediusmailrelayserver': ['config.yml']},
install_requires=['pyyaml', 'aiosmtpd'],
dependency_links=[]
)
| [
"setuptools.command.install.install.initialize_options",
"setuptools.command.install.install.run",
"setuptools.setup",
"setuptools.command.install.install.finalize_options",
"xmediusmailrelayserver.console.install_service"
] | [((578, 1304), 'setuptools.setup', 'setup', ([], {'name': '"""xmediusmailrelayserver"""', 'version': '"""1.0.0"""', 'description': '"""The Python module to be used to relay mail to different servers depending on patterns"""', 'long_description': '"""See https://github.com/xmedius/xmedius-mailrelayserver for more information"""', 'url': '"""https://github.com/xmedius/xmedius-mailrelayserver/"""', 'author': '"""<NAME>"""', 'license': '"""MIT"""', 'classifiers': "['Programming Language :: Python :: 3.6',\n 'Environment :: Win32 (MS Windows)',\n 'Operating System :: Microsoft :: Windows']", 'cmdclass': "{'install': PostInstallCommand}", 'packages': "['xmediusmailrelayserver']", 'package_data': "{'xmediusmailrelayserver': ['config.yml']}", 'install_requires': "['pyyaml', 'aiosmtpd']", 'dependency_links': '[]'}), "(name='xmediusmailrelayserver', version='1.0.0', description=\n 'The Python module to be used to relay mail to different servers depending on patterns'\n , long_description=\n 'See https://github.com/xmedius/xmedius-mailrelayserver for more information'\n , url='https://github.com/xmedius/xmedius-mailrelayserver/', author=\n '<NAME>', license='MIT', classifiers=[\n 'Programming Language :: Python :: 3.6',\n 'Environment :: Win32 (MS Windows)',\n 'Operating System :: Microsoft :: Windows'], cmdclass={'install':\n PostInstallCommand}, packages=['xmediusmailrelayserver'], package_data=\n {'xmediusmailrelayserver': ['config.yml']}, install_requires=['pyyaml',\n 'aiosmtpd'], dependency_links=[])\n", (583, 1304), False, 'from setuptools import setup\n'), ((240, 272), 'setuptools.command.install.install.initialize_options', 'install.initialize_options', (['self'], {}), '(self)\n', (266, 272), False, 'from setuptools.command.install import install\n'), ((344, 374), 'setuptools.command.install.install.finalize_options', 'install.finalize_options', (['self'], {}), '(self)\n', (368, 374), False, 'from setuptools.command.install import install\n'), ((403, 420), 'setuptools.command.install.install.run', 'install.run', (['self'], {}), '(self)\n', (414, 420), False, 'from setuptools.command.install import install\n'), ((519, 576), 'xmediusmailrelayserver.console.install_service', 'console.install_service', (["['--startup', 'auto', 'install']"], {}), "(['--startup', 'auto', 'install'])\n", (542, 576), False, 'from xmediusmailrelayserver import console\n')] |
#!/usr/bin/env python
from CraftProtocol.NBT.NBTBase import NBTBase
from CraftProtocol.NBT.NBTProvider import NBTProvider
from CraftProtocol.StreamIO import StreamIO
class NBTTagList(NBTBase):
TYPE_ID = 0x09
def __init__(self, tag_type, values=None):
NBTBase.__init__(self)
if values is None:
values = []
self._tag_type = tag_type
self._values = list(values)
def get(self):
return self._values
def get_tag_type(self):
return self._tag_type
def __getitem__(self, i):
return self._values.__getitem__(i)
def __setitem__(self, i, o):
assert isinstance(o, self._tag_type), "value must be " + self._tag_type.__name__
self._values.__setitem__(i, o)
def __delitem__(self, i):
self._values.__delitem__(i)
def __iter__(self):
return self._values.__iter__()
def __contains__(self, o):
return self._values.__contains__(o)
def __len__(self):
return self._values.__len__()
def append(self, x):
assert isinstance(x, self._tag_type), "arg must be " + self._tag_type.__name__
self._values.append(x)
def remove(self, x):
assert isinstance(x, self._tag_type), "arg must be " + self._tag_type.__name__
self._values.remove(x)
@staticmethod
def write(stream, tag):
StreamIO.write_ubyte(stream, tag.get_tag_type().TYPE_ID)
StreamIO.write_int(stream, len(tag))
for i in tag:
tag.get_tag_type().write(stream, i)
@staticmethod
def read(stream):
tag_type_id = StreamIO.read_ubyte(stream)
tag_type = NBTProvider.get_tag_class(tag_type_id)
values = []
len = StreamIO.read_int(stream)
for i in xrange(len):
values.append(tag_type.read(stream))
return NBTTagList(tag_type, values)
| [
"CraftProtocol.StreamIO.StreamIO.read_int",
"CraftProtocol.NBT.NBTBase.NBTBase.__init__",
"CraftProtocol.StreamIO.StreamIO.read_ubyte",
"CraftProtocol.NBT.NBTProvider.NBTProvider.get_tag_class"
] | [((271, 293), 'CraftProtocol.NBT.NBTBase.NBTBase.__init__', 'NBTBase.__init__', (['self'], {}), '(self)\n', (287, 293), False, 'from CraftProtocol.NBT.NBTBase import NBTBase\n'), ((1611, 1638), 'CraftProtocol.StreamIO.StreamIO.read_ubyte', 'StreamIO.read_ubyte', (['stream'], {}), '(stream)\n', (1630, 1638), False, 'from CraftProtocol.StreamIO import StreamIO\n'), ((1658, 1696), 'CraftProtocol.NBT.NBTProvider.NBTProvider.get_tag_class', 'NBTProvider.get_tag_class', (['tag_type_id'], {}), '(tag_type_id)\n', (1683, 1696), False, 'from CraftProtocol.NBT.NBTProvider import NBTProvider\n'), ((1731, 1756), 'CraftProtocol.StreamIO.StreamIO.read_int', 'StreamIO.read_int', (['stream'], {}), '(stream)\n', (1748, 1756), False, 'from CraftProtocol.StreamIO import StreamIO\n')] |
#!/usr/bin/env python
"""Get vocabulary coutings from transformed corpora samples."""
from onmt.utils.logging import init_logger
from onmt.utils.misc import set_random_seed, check_path
from onmt.utils.parse import ArgumentParser
from onmt.opts import dynamic_prepare_opts
from onmt.inputters.corpus import build_vocab
from onmt.transforms import make_transforms, get_transforms_cls
def build_vocab_main(opts):
"""Apply transforms to samples of specified data and build vocab from it.
Transforms that need vocab will be disabled in this.
Built vocab is saved in plain text format as following and can be pass as
`-src_vocab` (and `-tgt_vocab`) when training:
```
<tok_0>\t<count_0>
<tok_1>\t<count_1>
```
"""
ArgumentParser.validate_prepare_opts(opts, build_vocab_only=True)
assert opts.n_sample == -1 or opts.n_sample > 1, \
f"Illegal argument n_sample={opts.n_sample}."
logger = init_logger()
set_random_seed(opts.seed, False)
transforms_cls = get_transforms_cls(opts._all_transform)
fields = None
transforms = make_transforms(opts, transforms_cls, fields)
logger.info(f"Counter vocab from {opts.n_sample} samples.")
src_counter, tgt_counter, src_feats_counter = build_vocab(
opts, transforms, n_sample=opts.n_sample)
logger.info(f"Counters src:{len(src_counter)}")
logger.info(f"Counters tgt:{len(tgt_counter)}")
for feat_name, feat_counter in src_feats_counter.items():
logger.info(f"Counters {feat_name}:{len(feat_counter)}")
def save_counter(counter, save_path):
check_path(save_path, exist_ok=opts.overwrite, log=logger.warning)
with open(save_path, "w", encoding="utf8") as fo:
for tok, count in counter.most_common():
fo.write(tok + "\t" + str(count) + "\n")
if opts.share_vocab:
src_counter += tgt_counter
tgt_counter = src_counter
logger.info(f"Counters after share:{len(src_counter)}")
save_counter(src_counter, opts.src_vocab)
else:
save_counter(src_counter, opts.src_vocab)
save_counter(tgt_counter, opts.tgt_vocab)
for k, v in src_feats_counter.items():
save_counter(v, opts.src_feats_vocab[k])
def _get_parser():
parser = ArgumentParser(description='build_vocab.py')
dynamic_prepare_opts(parser, build_vocab_only=True)
return parser
def main():
parser = _get_parser()
opts, unknown = parser.parse_known_args()
build_vocab_main(opts)
if __name__ == '__main__':
main()
| [
"onmt.utils.misc.check_path",
"onmt.transforms.get_transforms_cls",
"onmt.utils.logging.init_logger",
"onmt.utils.parse.ArgumentParser.validate_prepare_opts",
"onmt.utils.parse.ArgumentParser",
"onmt.opts.dynamic_prepare_opts",
"onmt.inputters.corpus.build_vocab",
"onmt.transforms.make_transforms",
"onmt.utils.misc.set_random_seed"
] | [((752, 817), 'onmt.utils.parse.ArgumentParser.validate_prepare_opts', 'ArgumentParser.validate_prepare_opts', (['opts'], {'build_vocab_only': '(True)'}), '(opts, build_vocab_only=True)\n', (788, 817), False, 'from onmt.utils.parse import ArgumentParser\n'), ((941, 954), 'onmt.utils.logging.init_logger', 'init_logger', ([], {}), '()\n', (952, 954), False, 'from onmt.utils.logging import init_logger\n'), ((959, 992), 'onmt.utils.misc.set_random_seed', 'set_random_seed', (['opts.seed', '(False)'], {}), '(opts.seed, False)\n', (974, 992), False, 'from onmt.utils.misc import set_random_seed, check_path\n'), ((1014, 1053), 'onmt.transforms.get_transforms_cls', 'get_transforms_cls', (['opts._all_transform'], {}), '(opts._all_transform)\n', (1032, 1053), False, 'from onmt.transforms import make_transforms, get_transforms_cls\n'), ((1090, 1135), 'onmt.transforms.make_transforms', 'make_transforms', (['opts', 'transforms_cls', 'fields'], {}), '(opts, transforms_cls, fields)\n', (1105, 1135), False, 'from onmt.transforms import make_transforms, get_transforms_cls\n'), ((1251, 1304), 'onmt.inputters.corpus.build_vocab', 'build_vocab', (['opts', 'transforms'], {'n_sample': 'opts.n_sample'}), '(opts, transforms, n_sample=opts.n_sample)\n', (1262, 1304), False, 'from onmt.inputters.corpus import build_vocab\n'), ((2282, 2326), 'onmt.utils.parse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""build_vocab.py"""'}), "(description='build_vocab.py')\n", (2296, 2326), False, 'from onmt.utils.parse import ArgumentParser\n'), ((2331, 2382), 'onmt.opts.dynamic_prepare_opts', 'dynamic_prepare_opts', (['parser'], {'build_vocab_only': '(True)'}), '(parser, build_vocab_only=True)\n', (2351, 2382), False, 'from onmt.opts import dynamic_prepare_opts\n'), ((1597, 1663), 'onmt.utils.misc.check_path', 'check_path', (['save_path'], {'exist_ok': 'opts.overwrite', 'log': 'logger.warning'}), '(save_path, exist_ok=opts.overwrite, log=logger.warning)\n', (1607, 1663), False, 'from onmt.utils.misc import set_random_seed, check_path\n')] |
import os
import numpy as np
from skimage.io import imread
def get_file_count(paths, image_format='.tif'):
total_count = 0
for path in paths:
try:
path_list = [_ for _ in os.listdir(path) if _.endswith(image_format)]
total_count += len(path_list)
except OSError:
print("Directory does not exist. Returned file count for this path will be 0")
return total_count
# Function to load image
def load_image(img_path):
img = imread(img_path)
if img.shape[2] == 4:
img = img[:, :, :-1]
# img = np.roll(img, shift=1, axis=2) # CHECK IMAGE FORMAT
return img
# Function to load mask
def load_mask(mask_path):
mask = imread(mask_path)
return mask
def load_mask_from_img(cfg, img_path, img_name, suffixes):
a_mask = imread(os.path.join(img_path, img_name + suffixes[0]))
msk = np.zeros((a_mask.shape[0], a_mask.shape[1], len(suffixes) * cfg.NUMBER_MSK_CHANNELS))
i = 0
for suffix in suffixes:
msk_channel = imread(os.path.join(img_path, img_name + suffix))
if len(msk_channel.shape) == 2:
msk_channel = np.expand_dims(msk_channel, axis=-1)
if len(msk_channel.shape) != 3:
raise ValueError("Mask must be 3-dim here. Does your mask have 1 or more than 3 dimensions? "
"Check the masks.")
msk[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = msk_channel
i += cfg.NUMBER_MSK_CHANNELS
# print(msk, msk.shape)
return msk
def load_weights(cfg, img_path, img_name, weight_suffixes):
a_weights = np.load(os.path.join(img_path, img_name + weight_suffixes[0]))
weights = np.zeros((a_weights.shape[0], a_weights.shape[1], len(weight_suffixes) * cfg.NUMBER_MSK_CHANNELS))
i = 0
for suffix in weight_suffixes:
weights_channel = np.load(os.path.join(img_path, img_name + suffix))
if len(weights_channel.shape) == 2:
weights_channel = np.expand_dims(weights_channel, axis=-1)
if len(weights_channel.shape) != 3:
raise ValueError("Weights must be 3-dim here. Has your weights 1 or more than 3 dimensions? Check the weights.")
weights[:, :, i:i+cfg.NUMBER_MSK_CHANNELS] = weights_channel
i += cfg.NUMBER_MSK_CHANNELS
return weights | [
"skimage.io.imread",
"os.listdir",
"os.path.join",
"numpy.expand_dims"
] | [((491, 507), 'skimage.io.imread', 'imread', (['img_path'], {}), '(img_path)\n', (497, 507), False, 'from skimage.io import imread\n'), ((705, 722), 'skimage.io.imread', 'imread', (['mask_path'], {}), '(mask_path)\n', (711, 722), False, 'from skimage.io import imread\n'), ((820, 866), 'os.path.join', 'os.path.join', (['img_path', '(img_name + suffixes[0])'], {}), '(img_path, img_name + suffixes[0])\n', (832, 866), False, 'import os\n'), ((1599, 1652), 'os.path.join', 'os.path.join', (['img_path', '(img_name + weight_suffixes[0])'], {}), '(img_path, img_name + weight_suffixes[0])\n', (1611, 1652), False, 'import os\n'), ((1031, 1072), 'os.path.join', 'os.path.join', (['img_path', '(img_name + suffix)'], {}), '(img_path, img_name + suffix)\n', (1043, 1072), False, 'import os\n'), ((1140, 1176), 'numpy.expand_dims', 'np.expand_dims', (['msk_channel'], {'axis': '(-1)'}), '(msk_channel, axis=-1)\n', (1154, 1176), True, 'import numpy as np\n'), ((1846, 1887), 'os.path.join', 'os.path.join', (['img_path', '(img_name + suffix)'], {}), '(img_path, img_name + suffix)\n', (1858, 1887), False, 'import os\n'), ((1963, 2003), 'numpy.expand_dims', 'np.expand_dims', (['weights_channel'], {'axis': '(-1)'}), '(weights_channel, axis=-1)\n', (1977, 2003), True, 'import numpy as np\n'), ((202, 218), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (212, 218), False, 'import os\n')] |
from typing import Optional, Callable
try: # Assume we're a sub-module in a package.
from series import series_classes as sc
from utils import numeric as nm
except ImportError: # Apparently no higher-level package has been imported, fall back to a local import.
from .. import series_classes as sc
from ...utils import numeric as nm
Native = sc.AnySeries
DEFAULT_NUMERIC = True
WINDOW_DEFAULT = (-1, 0, 1)
WINDOW_WO_CENTER = (-2, -1, 0, 1, 2)
WINDOW_NEIGHBORS = (-1, 0)
class NumericSeries(sc.AnySeries):
def __init__(
self,
values=[],
validate=False,
name=None,
):
super().__init__(
values=values,
validate=validate,
name=name,
)
@staticmethod
def get_distance_func():
return nm.diff
def get_errors(self):
yield from super().get_errors()
if not self.has_valid_items():
yield 'Values of {} must be numeric'.format(self.get_class_name())
def has_valid_items(self):
for v in self.get_values():
if not isinstance(v, (int, float)):
return False
return True
def is_numeric(self, check=False):
if check:
return self.has_valid_items()
else:
return DEFAULT_NUMERIC
def get_sum(self):
return sum(
self.filter_values_defined().get_values(),
)
def get_mean(self):
values_defined = self.filter_values_defined().get_values()
if values_defined:
return sum(values_defined) / len(values_defined)
def norm(self, rate=None, default=None):
if rate is None:
rate = self.get_mean()
return self.map_values(lambda v: v / rate if rate else default)
def divide(self, series, default=None, extend=False):
return self.map_optionally_extend_zip_values(
lambda x, y: x / y if y else default,
extend,
series,
)
def subtract(self, series, default=None, extend=False):
return self.map_optionally_extend_zip_values(
lambda x, y: x - y if x is not None and y is not None else default,
extend,
series,
)
def derivative(self, extend=False, default=0):
if extend:
return self.preface(None).subtract(
self,
extend=True,
default=default,
).crop(0, 1)
else:
return self.slice(0, -1).subtract(
self.shift(-1)
)
def get_sliding_window(self, window=WINDOW_DEFAULT, extend=True, default=None, as_series=True):
if extend:
n_min = 0
n_max = self.get_count()
else:
n_min = - min(window)
n_max = self.get_count() - max(window)
for center in range(n_min, n_max):
sliding_window = [center + n for n in window]
if as_series:
yield self.value_series().items_no(sliding_window, extend=extend, default=default)
else:
yield self.value_series().get_items_no(sliding_window, extend=extend, default=default)
def apply_window_func(
self, function: Callable,
window=WINDOW_DEFAULT, extend=True, default=None, as_series=False,
inplace: bool = False,
) -> Optional[Native]:
values = map(function, self.get_sliding_window(window, extend=extend, default=default, as_series=as_series))
return self.set_values(values, inplace=inplace)
def mark_local_extremums(self, local_min=True, local_max=True):
return self.apply_window_func(
lambda a: nm.is_local_extremum(*a, local_min=local_min, local_max=local_max),
window=WINDOW_DEFAULT,
extend=True,
default=False,
)
def mark_local_max(self):
return self.mark_local_extremums(local_min=False, local_max=True)
def mark_local_min(self):
return self.mark_local_extremums(local_min=True, local_max=False)
def deviation_from_neighbors(self, window=WINDOW_NEIGHBORS, rel=False):
smoothed_series = self.smooth(window=window)
deviation = self.subtract(smoothed_series)
if rel:
deviation = deviation.divide(smoothed_series, default=0)
return deviation
# @deprecated
def smooth_simple_linear(self, window_len=3, exclude_center=False):
center = int((window_len - 1) / 2)
count = self.get_count()
result = self.new()
for n in self.get_range_numbers():
is_edge = n < center or n >= count - center
if is_edge:
result.append(self.get_item_no(n), inplace=True)
else:
sub_series = self.slice(n - center, n + center + 1)
if exclude_center:
sub_series = sub_series.drop_item_no(center)
result.append(sub_series.get_mean(), inplace=True)
return result
def smooth(self, how='linear', *args, **kwargs):
method_name = 'smooth_{}'.format(how)
smooth_method = self.__getattribute__(method_name)
return smooth_method(*args, **kwargs)
def smooth_multiple(self, list_kwargs=[]):
series = self
for kwargs in list_kwargs:
series = series.smooth(**kwargs)
return series
def smooth_linear(self, window=WINDOW_DEFAULT):
return self.apply_window_func(
lambda s: s.get_mean(),
window=window, extend=True, default=None,
as_series=True,
)
def smooth_spikes(self, threshold, window=WINDOW_WO_CENTER, local_min=False, local_max=True, whitelist=None):
spikes = self.mark_spikes(threshold, local_min=local_min, local_max=local_max)
if whitelist:
spikes = spikes.map_zip_values(
lambda a, b: a and not b,
whitelist,
)
return self.map_zip_values(
lambda v, t, s: s if t else v,
spikes,
self.smooth(window=window),
)
def mark_spikes(self, threshold, window=WINDOW_NEIGHBORS, local_min=False, local_max=True):
deviation = self.deviation_from_neighbors(window=window, rel=True)
if local_min or local_max:
deviation = deviation.map_zip_values(
lambda x, m: x if m else None,
self.mark_local_extremums(local_min=local_min, local_max=local_max),
)
spikes = deviation.map_values(
lambda x: abs(x or 0) > threshold,
)
return spikes
def plot(self, fmt='-'):
nm.plot(self.get_range_numbers(), self.get_values(), fmt=fmt)
| [
"utils.numeric.is_local_extremum"
] | [((3727, 3793), 'utils.numeric.is_local_extremum', 'nm.is_local_extremum', (['*a'], {'local_min': 'local_min', 'local_max': 'local_max'}), '(*a, local_min=local_min, local_max=local_max)\n', (3747, 3793), True, 'from utils import numeric as nm\n')] |
import glob
import pathlib
from .filemanager import filemanager_class
class database_class(filemanager_class):
def __init__(self):
filemanager_class.__init__(self)
async def update_info(self, year, cid, vid, title, explanation):
# 既存のjsonを読み込み
json_file = "/".join([self.video_dir, str(year),
cid, vid, "info.json"])
_dict = await self.read_json(json_file)
if not _dict:
return False
# jsonの更新
_dict["title"] = title
_dict["explanation"] = explanation
# jsonの書き込み
if self.write_json(json_file, _dict):
return True
return False
async def encode_result(self, folderpath, resolution, result=True):
# 既存のjsonを読み込み
json_file = "/".join([folderpath, "info.json"])
_dict = await self.read_json(json_file)
if not _dict:
return False
if result:
# 画質の追加
_dict["resolution"].append(f"{resolution}p")
_dict["encode_tasks"].remove(f"{resolution}p")
else:
_dict["encode_error"].append(f"{resolution}p")
_dict["encode_tasks"].remove(f"{resolution}p")
# jsonの書き込み
self.write_json(json_file, _dict)
# プレイリストに書き込み
playlist = "/".join([folderpath, "playlist.m3u8"])
await self.write_playlist(playlist, resolution)
async def encode_task(self, folderpath, resolution):
# 既存のjsonを読み込み
json_file = "/".join([folderpath, "info.json"])
_dict = await self.read_json(json_file)
if not _dict:
return False
if f"{resolution}p" in _dict["resolution"]:
return True
# 画質の追加
_dict["encode_tasks"].append(f"{resolution}p")
# jsonの書き込み
if self.write_json(json_file, _dict):
return True
return False
async def encode_error(self, folderpath, message):
# 既存のjsonを読み込み
json_file = "/".join([folderpath, "info.json"])
_dict = await self.read_json(json_file)
if not _dict:
return False
# 画質の追加
_dict["encode_error"].append(f"{message}")
# jsonの書き込み
if self.write_json(json_file, _dict):
return True
return False
async def get_all_info(self):
json_files_path = await self.async_wrap(glob.glob)(
f"./{self.video_dir}/**/info.json",
recursive=True)
result = []
for json_file in json_files_path:
temp = await self.read_json(json_file)
directory = "/".join(json_file.split("/")[:-1])
temp["video_directory"] = directory
try:
temp["video_file_name"] = glob.glob(
f"{directory}/1.*")[0].split("/")[-1]
except IndexError:
temp["video_file_name"] = None
result.append(temp)
return result
async def get_encode_tasks(self):
video_info = await self.get_all_info()
result = []
for info in video_info:
if len(info["encode_tasks"]) > 0:
result.append(info)
return result
async def list_video_id(self, year, cid):
_video_dir = "/".join([self.video_dir, str(year), cid])
temp = await self.async_wrap(glob.glob)(f"{_video_dir}/*")
return [video_id.split("/")[-1]
for video_id in temp]
async def list_link(self, year, cid):
_video_dir = "/".join([self.video_dir, str(year), cid])
temp = await self.async_wrap(glob.glob)(f"{_video_dir}/*")
result = {}
for link_path in temp:
json_file = link_path + "/info.json"
_dict = await self.read_json(json_file)
if not _dict:
pass
else:
result[link_path.split("/")[-1]] = _dict
return result
async def get_all_info(self):
json_files_path = await self.async_wrap(glob.glob)(
f"./{self.video_dir}/**/info.json",
recursive=True)
result = []
for json_file in json_files_path:
temp = await self.read_json(json_file)
directory = "/".join(json_file.split("/")[:-1])
temp["video_directory"] = directory
try:
temp["video_file_name"] = glob.glob(
f"{directory}/1.*")[0].split("/")[-1]
except IndexError:
temp["video_file_name"] = None
result.append(temp)
return result
async def get_encode_tasks(self):
video_info = await self.get_all_info()
result = []
for info in video_info:
if len(info["encode_tasks"]) > 0:
result.append(info)
return result
database = database_class()
| [
"glob.glob"
] | [((2763, 2792), 'glob.glob', 'glob.glob', (['f"""{directory}/1.*"""'], {}), "(f'{directory}/1.*')\n", (2772, 2792), False, 'import glob\n'), ((4384, 4413), 'glob.glob', 'glob.glob', (['f"""{directory}/1.*"""'], {}), "(f'{directory}/1.*')\n", (4393, 4413), False, 'import glob\n')] |
"""Array data-type implementations (abstraction points for GL array types"""
import ctypes
import OpenGL
from OpenGL.raw.GL import _types
from OpenGL import plugins
from OpenGL.arrays import formathandler, _arrayconstants as GL_1_1
from OpenGL import logs
_log = logs.getLog( 'OpenGL.arrays.arraydatatype' )
from OpenGL import acceleratesupport
ADT = None
if acceleratesupport.ACCELERATE_AVAILABLE:
try:
from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT
except ImportError as err:
_log.warn(
"Unable to load ArrayDatatype accelerator from OpenGL_accelerate"
)
if ADT is None:
# Python-coded version
class HandlerRegistry( dict ):
GENERIC_OUTPUT_PREFERENCES = ['numpy','ctypesarrays']
def __init__( self, plugin_match ):
self.match = plugin_match
self.output_handler = None
self.preferredOutput = None
self.all_output_handlers = []
def __call__( self, value ):
"""Lookup of handler for given value"""
try:
typ = value.__class__
except AttributeError as err:
typ = type(value)
handler = self.get( typ )
if not handler:
if hasattr( typ, '__mro__' ):
for base in typ.__mro__:
handler = self.get( base )
if not handler:
handler = self.match( base )
if handler:
handler = handler.load()
if handler:
handler = handler()
if handler:
self[ typ ] = handler
if hasattr( handler, 'registerEquivalent' ):
handler.registerEquivalent( typ, base )
return handler
raise TypeError(
"""No array-type handler for type %s.%s (value: %s) registered"""%(
typ.__module__, type.__name__, repr(value)[:50]
)
)
return handler
def handler_by_plugin_name( self, name ):
plugin = plugins.FormatHandler.by_name( name )
if plugin:
try:
return plugin.load()
except ImportError as err:
return None
else:
raise RuntimeError( 'No handler of name %s found'%(name,))
def get_output_handler( self ):
"""Fast-path lookup for output handler object"""
if self.output_handler is None:
if self.preferredOutput is not None:
self.output_handler = self.handler_by_plugin_name( self.preferredOutput )
if not self.output_handler:
for preferred in self.GENERIC_OUTPUT_PREFERENCES:
self.output_handler = self.handler_by_plugin_name( preferred )
if self.output_handler:
break
if not self.output_handler:
raise RuntimeError(
"""Unable to find any output handler at all (not even ctypes/numpy ones!)"""
)
return self.output_handler
def register( self, handler, types=None ):
"""Register this class as handler for given set of types"""
if not isinstance( types, (list,tuple)):
types = [ types ]
for type in types:
self[ type ] = handler
if handler.isOutput:
self.all_output_handlers.append( handler )
def registerReturn( self, handler ):
"""Register this handler as the default return-type handler"""
if isinstance( handler, (str,unicode)):
self.preferredOutput = handler
self.output_handler = None
else:
self.preferredOutput = None
self.output_handler = handler
GLOBAL_REGISTRY = HandlerRegistry( plugins.FormatHandler.match)
formathandler.FormatHandler.TYPE_REGISTRY = GLOBAL_REGISTRY
class ArrayDatatype( object ):
"""Mix-in for array datatype classes
The ArrayDatatype marker essentially is used to mark a particular argument
as having an "array" type, which means that it is eligible for handling
via the arrays sub-package and its registered handlers.
"""
typeConstant = None
handler = GLOBAL_REGISTRY
getHandler = GLOBAL_REGISTRY.__call__
returnHandler = GLOBAL_REGISTRY.get_output_handler
isAccelerated = False
@classmethod
def getRegistry( cls ):
"""Get our handler registry"""
return cls.handler
def from_param( cls, value, typeConstant=None ):
"""Given a value in a known data-pointer type, convert to a ctypes pointer"""
return cls.getHandler(value).from_param( value, cls.typeConstant )
from_param = classmethod( logs.logOnFail( from_param, _log ) )
def dataPointer( cls, value ):
"""Given a value in a known data-pointer type, return long for pointer"""
try:
return cls.getHandler(value).dataPointer( value )
except Exception as err:
_log.warn(
"""Failure in dataPointer for %s instance %s""", type(value), value,
)
raise
dataPointer = classmethod( logs.logOnFail( dataPointer, _log ) )
def voidDataPointer( cls, value ):
"""Given value in a known data-pointer type, return void_p for pointer"""
pointer = cls.dataPointer( value )
try:
return ctypes.c_void_p(pointer)
except TypeError as err:
return pointer
voidDataPointer = classmethod( logs.logOnFail( voidDataPointer, _log ) )
def typedPointer( cls, value ):
"""Return a pointer-to-base-type pointer for given value"""
return ctypes.cast( cls.dataPointer(value), ctypes.POINTER( cls.baseType ))
typedPointer = classmethod( typedPointer )
def asArray( cls, value, typeCode=None ):
"""Given a value, convert to preferred array representation"""
return cls.getHandler(value).asArray( value, typeCode or cls.typeConstant )
asArray = classmethod( logs.logOnFail( asArray, _log ) )
def arrayToGLType( cls, value ):
"""Given a data-value, guess the OpenGL type of the corresponding pointer
Note: this is not currently used in PyOpenGL and may be removed
eventually.
"""
return cls.getHandler(value).arrayToGLType( value )
arrayToGLType = classmethod( logs.logOnFail( arrayToGLType, _log ) )
def arraySize( cls, value, typeCode = None ):
"""Given a data-value, calculate dimensions for the array (number-of-units)"""
return cls.getHandler(value).arraySize( value, typeCode or cls.typeConstant )
arraySize = classmethod( logs.logOnFail( arraySize, _log ) )
def unitSize( cls, value, typeCode=None ):
"""Determine unit size of an array (if possible)
Uses our local type if defined, otherwise asks the handler to guess...
"""
return cls.getHandler(value).unitSize( value, typeCode or cls.typeConstant )
unitSize = classmethod( logs.logOnFail( unitSize, _log ) )
def zeros( cls, dims, typeCode=None ):
"""Allocate a return array of the given dimensions filled with zeros"""
return cls.returnHandler().zeros( dims, typeCode or cls.typeConstant )
zeros = classmethod( logs.logOnFail( zeros, _log ) )
def dimensions( cls, value ):
"""Given a data-value, get the dimensions (assumes full structure info)"""
return cls.getHandler(value).dimensions( value )
dimensions = classmethod( logs.logOnFail( dimensions, _log ) )
def arrayByteCount( cls, value ):
"""Given a data-value, try to determine number of bytes it's final form occupies
For most data-types this is arraySize() * atomic-unit-size
"""
return cls.getHandler(value).arrayByteCount( value )
arrayByteCount = classmethod( logs.logOnFail( arrayByteCount, _log ) )
# the final array data-type classes...
class GLclampdArray( ArrayDatatype, ctypes.POINTER(_types.GLclampd )):
"""Array datatype for GLclampd types"""
baseType = _types.GLclampd
typeConstant = _types.GL_DOUBLE
class GLclampfArray( ArrayDatatype, ctypes.POINTER(_types.GLclampf )):
"""Array datatype for GLclampf types"""
baseType = _types.GLclampf
typeConstant = _types.GL_FLOAT
class GLfloatArray( ArrayDatatype, ctypes.POINTER(_types.GLfloat )):
"""Array datatype for GLfloat types"""
baseType = _types.GLfloat
typeConstant = _types.GL_FLOAT
class GLdoubleArray( ArrayDatatype, ctypes.POINTER(_types.GLdouble )):
"""Array datatype for GLdouble types"""
baseType = _types.GLdouble
typeConstant = _types.GL_DOUBLE
class GLbyteArray( ArrayDatatype, ctypes.POINTER(_types.GLbyte )):
"""Array datatype for GLbyte types"""
baseType = _types.GLbyte
typeConstant = _types.GL_BYTE
class GLcharArray( ArrayDatatype, ctypes.c_char_p):
"""Array datatype for ARB extension pointers-to-arrays"""
baseType = _types.GLchar
typeConstant = _types.GL_BYTE
GLcharARBArray = GLcharArray
class GLshortArray( ArrayDatatype, ctypes.POINTER(_types.GLshort )):
"""Array datatype for GLshort types"""
baseType = _types.GLshort
typeConstant = _types.GL_SHORT
class GLintArray( ArrayDatatype, ctypes.POINTER(_types.GLint )):
"""Array datatype for GLint types"""
baseType = _types.GLint
typeConstant = _types.GL_INT
class GLubyteArray( ArrayDatatype, ctypes.POINTER(_types.GLubyte )):
"""Array datatype for GLubyte types"""
baseType = _types.GLubyte
typeConstant = _types.GL_UNSIGNED_BYTE
GLbooleanArray = GLubyteArray
class GLushortArray( ArrayDatatype, ctypes.POINTER(_types.GLushort )):
"""Array datatype for GLushort types"""
baseType = _types.GLushort
typeConstant = _types.GL_UNSIGNED_SHORT
class GLuintArray( ArrayDatatype, ctypes.POINTER(_types.GLuint )):
"""Array datatype for GLuint types"""
baseType = _types.GLuint
typeConstant = _types.GL_UNSIGNED_INT
class GLint64Array( ArrayDatatype, ctypes.POINTER(_types.GLint64 )):
"""Array datatype for GLuint types"""
baseType = _types.GLint64
typeConstant = None # TODO: find out what this should be!
class GLuint64Array( ArrayDatatype, ctypes.POINTER(_types.GLuint64 )):
"""Array datatype for GLuint types"""
baseType = _types.GLuint64
typeConstant = _types.GL_UNSIGNED_INT64
class GLenumArray( ArrayDatatype, ctypes.POINTER(_types.GLenum )):
"""Array datatype for GLenum types"""
baseType = _types.GLenum
typeConstant = _types.GL_UNSIGNED_INT
class GLsizeiArray( ArrayDatatype, ctypes.POINTER(_types.GLsizei )):
"""Array datatype for GLsizei types"""
baseType = _types.GLsizei
typeConstant = _types.GL_INT
class GLvoidpArray( ArrayDatatype, ctypes.POINTER(_types.GLvoid )):
"""Array datatype for GLenum types"""
baseType = _types.GLvoidp
typeConstant = _types.GL_VOID_P
else:
# Cython-coded array handler
_log.info( 'Using accelerated ArrayDatatype' )
ArrayDatatype = ADT( None, None )
GLclampdArray = ADT( GL_1_1.GL_DOUBLE, _types.GLclampd )
GLclampfArray = ADT( GL_1_1.GL_FLOAT, _types.GLclampf )
GLdoubleArray = ADT( GL_1_1.GL_DOUBLE, _types.GLdouble )
GLfloatArray = ADT( GL_1_1.GL_FLOAT, _types.GLfloat )
GLbyteArray = ADT( GL_1_1.GL_BYTE, _types.GLbyte )
GLcharArray = GLcharARBArray = ADT( GL_1_1.GL_BYTE, _types.GLchar )
GLshortArray = ADT( GL_1_1.GL_SHORT, _types.GLshort )
GLintArray = ADT( GL_1_1.GL_INT, _types.GLint )
GLubyteArray = GLbooleanArray = ADT( GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte )
GLushortArray = ADT( GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort )
GLuintArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLuint )
GLint64Array = ADT( None, _types.GLint64 )
GLuint64Array = ADT( GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64 )
GLenumArray = ADT( GL_1_1.GL_UNSIGNED_INT, _types.GLenum )
GLsizeiArray = ADT( GL_1_1.GL_INT, _types.GLsizei )
GLvoidpArray = ADT( _types.GL_VOID_P, _types.GLvoidp )
GL_CONSTANT_TO_ARRAY_TYPE = {
GL_1_1.GL_DOUBLE : GLclampdArray,
GL_1_1.GL_FLOAT : GLclampfArray,
GL_1_1.GL_FLOAT : GLfloatArray,
GL_1_1.GL_DOUBLE : GLdoubleArray,
GL_1_1.GL_BYTE : GLbyteArray,
GL_1_1.GL_SHORT : GLshortArray,
GL_1_1.GL_INT : GLintArray,
GL_1_1.GL_UNSIGNED_BYTE : GLubyteArray,
GL_1_1.GL_UNSIGNED_SHORT : GLushortArray,
GL_1_1.GL_UNSIGNED_INT : GLuintArray,
#GL_1_1.GL_UNSIGNED_INT : GLenumArray,
}
| [
"ctypes.POINTER",
"OpenGL.logs.getLog",
"OpenGL_accelerate.arraydatatype.ArrayDatatype",
"OpenGL.plugins.FormatHandler.by_name",
"OpenGL.logs.logOnFail",
"ctypes.c_void_p"
] | [((263, 305), 'OpenGL.logs.getLog', 'logs.getLog', (['"""OpenGL.arrays.arraydatatype"""'], {}), "('OpenGL.arrays.arraydatatype')\n", (274, 305), False, 'from OpenGL import logs\n'), ((8774, 8805), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLclampd'], {}), '(_types.GLclampd)\n', (8788, 8805), False, 'import ctypes\n'), ((8973, 9004), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLclampf'], {}), '(_types.GLclampf)\n', (8987, 9004), False, 'import ctypes\n'), ((9170, 9200), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLfloat'], {}), '(_types.GLfloat)\n', (9184, 9200), False, 'import ctypes\n'), ((9365, 9396), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLdouble'], {}), '(_types.GLdouble)\n', (9379, 9396), False, 'import ctypes\n'), ((9562, 9591), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLbyte'], {}), '(_types.GLbyte)\n', (9576, 9591), False, 'import ctypes\n'), ((9979, 10009), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLshort'], {}), '(_types.GLshort)\n', (9993, 10009), False, 'import ctypes\n'), ((10171, 10199), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLint'], {}), '(_types.GLint)\n', (10185, 10199), False, 'import ctypes\n'), ((10357, 10387), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLubyte'], {}), '(_types.GLubyte)\n', (10371, 10387), False, 'import ctypes\n'), ((10594, 10625), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLushort'], {}), '(_types.GLushort)\n', (10608, 10625), False, 'import ctypes\n'), ((10799, 10828), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLuint'], {}), '(_types.GLuint)\n', (10813, 10828), False, 'import ctypes\n'), ((11001, 11031), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLint64'], {}), '(_types.GLint64)\n', (11015, 11031), False, 'import ctypes\n'), ((11226, 11257), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLuint64'], {}), '(_types.GLuint64)\n', (11240, 11257), False, 'import ctypes\n'), ((11429, 11458), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLenum'], {}), '(_types.GLenum)\n', (11443, 11458), False, 'import ctypes\n'), ((11626, 11656), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLsizei'], {}), '(_types.GLsizei)\n', (11640, 11656), False, 'import ctypes\n'), ((11817, 11846), 'ctypes.POINTER', 'ctypes.POINTER', (['_types.GLvoid'], {}), '(_types.GLvoid)\n', (11831, 11846), False, 'import ctypes\n'), ((12080, 12095), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['None', 'None'], {}), '(None, None)\n', (12083, 12095), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12118, 12156), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_DOUBLE', '_types.GLclampd'], {}), '(GL_1_1.GL_DOUBLE, _types.GLclampd)\n', (12121, 12156), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12179, 12216), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_FLOAT', '_types.GLclampf'], {}), '(GL_1_1.GL_FLOAT, _types.GLclampf)\n', (12182, 12216), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12239, 12277), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_DOUBLE', '_types.GLdouble'], {}), '(GL_1_1.GL_DOUBLE, _types.GLdouble)\n', (12242, 12277), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12299, 12335), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_FLOAT', '_types.GLfloat'], {}), '(GL_1_1.GL_FLOAT, _types.GLfloat)\n', (12302, 12335), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12356, 12390), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_BYTE', '_types.GLbyte'], {}), '(GL_1_1.GL_BYTE, _types.GLbyte)\n', (12359, 12390), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12428, 12462), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_BYTE', '_types.GLchar'], {}), '(GL_1_1.GL_BYTE, _types.GLchar)\n', (12431, 12462), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12484, 12520), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_SHORT', '_types.GLshort'], {}), '(GL_1_1.GL_SHORT, _types.GLshort)\n', (12487, 12520), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12540, 12572), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_INT', '_types.GLint'], {}), '(GL_1_1.GL_INT, _types.GLint)\n', (12543, 12572), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12611, 12655), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_UNSIGNED_BYTE', '_types.GLubyte'], {}), '(GL_1_1.GL_UNSIGNED_BYTE, _types.GLubyte)\n', (12614, 12655), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12678, 12724), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_UNSIGNED_SHORT', '_types.GLushort'], {}), '(GL_1_1.GL_UNSIGNED_SHORT, _types.GLushort)\n', (12681, 12724), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12745, 12787), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_UNSIGNED_INT', '_types.GLuint'], {}), '(GL_1_1.GL_UNSIGNED_INT, _types.GLuint)\n', (12748, 12787), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12809, 12834), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['None', '_types.GLint64'], {}), '(None, _types.GLint64)\n', (12812, 12834), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12857, 12903), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_UNSIGNED_INT64', '_types.GLuint64'], {}), '(GL_1_1.GL_UNSIGNED_INT64, _types.GLuint64)\n', (12860, 12903), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12924, 12966), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_UNSIGNED_INT', '_types.GLenum'], {}), '(GL_1_1.GL_UNSIGNED_INT, _types.GLenum)\n', (12927, 12966), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((12988, 13022), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['GL_1_1.GL_INT', '_types.GLsizei'], {}), '(GL_1_1.GL_INT, _types.GLsizei)\n', (12991, 13022), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((13044, 13081), 'OpenGL_accelerate.arraydatatype.ArrayDatatype', 'ADT', (['_types.GL_VOID_P', '_types.GLvoidp'], {}), '(_types.GL_VOID_P, _types.GLvoidp)\n', (13047, 13081), True, 'from OpenGL_accelerate.arraydatatype import ArrayDatatype as ADT\n'), ((2301, 2336), 'OpenGL.plugins.FormatHandler.by_name', 'plugins.FormatHandler.by_name', (['name'], {}), '(name)\n', (2330, 2336), False, 'from OpenGL import plugins\n'), ((5245, 5277), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['from_param', '_log'], {}), '(from_param, _log)\n', (5259, 5277), False, 'from OpenGL import logs\n'), ((5718, 5751), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['dataPointer', '_log'], {}), '(dataPointer, _log)\n', (5732, 5751), False, 'from OpenGL import logs\n'), ((6104, 6141), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['voidDataPointer', '_log'], {}), '(voidDataPointer, _log)\n', (6118, 6141), False, 'from OpenGL import logs\n'), ((6641, 6670), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['asArray', '_log'], {}), '(asArray, _log)\n', (6655, 6670), False, 'from OpenGL import logs\n'), ((7033, 7068), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['arrayToGLType', '_log'], {}), '(arrayToGLType, _log)\n', (7047, 7068), False, 'from OpenGL import logs\n'), ((7341, 7372), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['arraySize', '_log'], {}), '(arraySize, _log)\n', (7355, 7372), False, 'from OpenGL import logs\n'), ((7722, 7752), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['unitSize', '_log'], {}), '(unitSize, _log)\n', (7736, 7752), False, 'from OpenGL import logs\n'), ((8000, 8027), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['zeros', '_log'], {}), '(zeros, _log)\n', (8014, 8027), False, 'from OpenGL import logs\n'), ((8252, 8284), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['dimensions', '_log'], {}), '(dimensions, _log)\n', (8266, 8284), False, 'from OpenGL import logs\n'), ((8636, 8672), 'OpenGL.logs.logOnFail', 'logs.logOnFail', (['arrayByteCount', '_log'], {}), '(arrayByteCount, _log)\n', (8650, 8672), False, 'from OpenGL import logs\n'), ((5972, 5996), 'ctypes.c_void_p', 'ctypes.c_void_p', (['pointer'], {}), '(pointer)\n', (5987, 5996), False, 'import ctypes\n'), ((6314, 6342), 'ctypes.POINTER', 'ctypes.POINTER', (['cls.baseType'], {}), '(cls.baseType)\n', (6328, 6342), False, 'import ctypes\n')] |
from __future__ import division, print_function
__author__ = 'saeedamen' # <NAME> / <EMAIL>
#
# Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
#
# See the License for the specific language governing permissions and limitations under the License.
#
## Web server components
import dash_core_components as dcc
import dash_html_components as html
import base64
import os
## Date/time components
import pandas as pd
import datetime
from datetime import timedelta
from collections import OrderedDict
from pandas.tseries.offsets import *
from tcapy.vis.layoutdash import LayoutDash
########################################################################################################################
class LayoutDashImplGen(LayoutDash):
"""This implements the LayoutDash abstract class, to create the web based GUI for the tcapy application. It creates two
web pages
- detailed_page - for doing detailed tcapy analysis for a specific currency pair
- aggregated_page - for more aggregated style analysis across multiple currency pairs and over multiple time periods
"""
def __init__(self, app=None, constants=None, url_prefix=''):
super(LayoutDashImplGen, self).__init__(app=app, constants=constants, url_prefix=url_prefix)
available_dates = pd.date_range(
datetime.datetime.today().date() - timedelta(days=self._constants.gui_lookback_window),
datetime.datetime.today().date(), freq=BDay())
times = pd.date_range("0:00", "23:59", freq="15min")
### create the possible values for drop down boxes on both pages
# Reverse date list (for both detailed and aggregated pages)
self.available_dates = [x.date() for x in available_dates[::-1]]
# For detailed page only
self.available_times = [t.strftime("%H:%M") for t in times]
self.available_tickers = self._constants.available_tickers_dictionary['All']
self.available_venues = self._constants.available_venues_dictionary['All']
self.available_brokers = self._constants.available_brokers_dictionary['All']
self.available_algos = self._constants.available_algos_dictionary['All']
self.available_market_data = self._constants.available_market_data
self.available_order_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'arrival', 'twap', 'vwap',
'buy trade', 'sell trade']
self.available_execution_plot_lines = ['candlestick', 'mid', 'bid', 'ask', 'buy trade', 'sell trade']
self.available_slippage_bounds = ['0.25', '0.5', '1.0', '1.25', '1.5', '2.0', 'bid/ask']
# For aggregated page only
self.available_grouped_tickers = self._flatten_dictionary(self._constants.available_tickers_dictionary)
self.available_grouped_venues = self._flatten_dictionary(self._constants.available_venues_dictionary)
self.available_grouped_brokers = self._flatten_dictionary(self._constants.available_brokers_dictionary)
self.available_grouped_algos = self._flatten_dictionary(self._constants.available_algos_dictionary)
self.available_event_types = self._constants.available_event_types
self.available_metrics = self._constants.available_metrics
self.available_reload = ['no', 'yes']
self.available_visualization = ['yes', 'no']
self.construct_layout()
def _flatten_dictionary(self, dictionary):
available = dictionary['All']
available_groups = self._util_func.dict_key_list(dictionary.keys())
return self.flatten_list_of_strings([available_groups, available])
def construct_layout(self):
self.page_content = html.Div([
dcc.Location(id='url', refresh=False),
html.Div(id='page-content')
])
link_bar_dict = {'Detailed' : 'detailed',
'Aggregated' : 'aggregated',
'Compliance' : 'compliance'}
trade_outliers_cols = ['Date', 'ticker', 'side', 'notional cur', 'benchmark', 'exec not',
'exec not in rep cur', 'slippage']
broker_cols = ['Date', 'by broker notional (rep cur)']
# Main page for detailed analysing of (eg. over the course of a few days)
self.pages['detailed'] = html.Div([
self._sc.header_bar('FX: Detailed - Trader Analysis', img='logo.png'),
self._sc.link_bar(link_bar_dict),
self._sc.width_row_cell(html.B("Status: ok", id='detailed-status'), margin_left=5),
self._sc.horizontal_bar(),
# Dropdown selection boxes
html.Div([
self._sc.drop_down(caption='Start Date', id={'start-date-val' : self.available_dates,
'start-time-val' : self.available_times},
prefix_id='detailed'),
self._sc.drop_down(caption='Finish Date', id=OrderedDict([('finish-date-val', self.available_dates),
('finish-time-val', self.available_times)]),
prefix_id='detailed'),
self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='detailed',
drop_down_values=self.available_tickers),
self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='detailed',
drop_down_values=self.available_grouped_brokers),
self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='detailed',
drop_down_values=self.available_grouped_algos),
self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='detailed',
drop_down_values=self.available_grouped_venues),
self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='detailed',
drop_down_values=self.available_market_data),
self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='detailed',
drop_down_values=self.available_metrics)
]),
self._sc.horizontal_bar(),
self._sc.button(caption='Calculate', id='calculation-button', prefix_id='detailed'),
# self.button(caption = 'Print PDF', id = 'detailed-print-pdf-button', className = 'no-print'),
# Orders
self._sc.horizontal_bar(),
self._sc.plot(caption='Orders: Timeline', id='order-candle-timeline-plot', prefix_id='detailed',
element_add=self._sc.timeline_dropdown('detailed-order-candle-timeline-plot',
self.available_order_plot_lines),
downloadplot_caption='Download CSV',
downloadplot_tag='order-candle-timeline-download-link',
download_file='download_order_candle_timeline', height=500),
self._sc.plot(caption='Orders: Markout', id='order-markout-plot', prefix_id='detailed', height=500),
self._sc.plot(caption='Orders: Histogram vs PDF fit', id='order-dist-plot', prefix_id='detailed', height=500),
# Execution trades
self._sc.horizontal_bar(),
self._sc.plot(caption='Executions: Timeline', id='execution-candle-timeline-plot', prefix_id='detailed',
element_add=self._sc.timeline_dropdown('detailed-execution-candle-timeline-plot',
self.available_execution_plot_lines),
downloadplot_caption='Download CSV',
downloadplot_tag='execution-candle-timeline-download-link',
download_file='download_execution_candle_timeline.csv', height=500),
self._sc.plot(caption='Executions: Markout', id='execution-markout-plot', prefix_id='detailed', height=500),
self._sc.plot(caption='Executions: Histogram vs PDF fit', id='execution-dist-plot', prefix_id='detailed', height=500),
# Detailed tcapy markout table for executions
html.Div([
html.H3('Executions: Markout Table'),
html.Div(id='detailed-execution-table')
],
style={'width': '1000px', 'display': 'inline-block', 'marginBottom': 5, 'marginTop': 5, 'marginLeft': 5,
'marginRight': 5}),
],
style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'})
################################################################################################################
# Secondary page for analysing aggregated statistics over long periods of time, eg. who is the best broker?
self.pages['aggregated'] = html.Div([
self._sc.header_bar('FX: Aggregated - Trader Analysis', img='logo.png'),
self._sc.link_bar(link_bar_dict),
self._sc.width_row_cell(html.B("Status: ok", id='aggregated-status'), margin_left=5),
self._sc.horizontal_bar(),
# dropdown selection boxes
html.Div([
self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='aggregated',
drop_down_values=self.available_dates),
self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='aggregated',
drop_down_values=self.available_dates),
self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='aggregated',
drop_down_values=self.available_grouped_tickers, multiselect=True),
self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='aggregated',
drop_down_values=self.available_grouped_brokers, multiselect=True),
self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='aggregated',
drop_down_values=self.available_grouped_algos, multiselect=True),
self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='aggregated',
drop_down_values=self.available_grouped_venues, multiselect=True),
self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='aggregated',
drop_down_values=self.available_reload),
self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='aggregated',
drop_down_values=self.available_market_data),
self._sc.drop_down(caption='Event Type', id='event-type-val', prefix_id='aggregated',
drop_down_values=self.available_event_types),
self._sc.drop_down(caption='Metric', id='metric-val', prefix_id='aggregated',
drop_down_values=self.available_metrics),
]),
self._sc.horizontal_bar(),
self._sc.button(caption='Calculate', id='calculation-button', prefix_id='aggregated'),
# , msg_id='aggregated-status'),
self._sc.horizontal_bar(),
# self.date_picker_range(caption='Start/Finish Dates', id='aggregated-date-val', offset=[-7,-1]),
self._sc.plot(caption='Aggregated Trader: Summary',
id=['execution-by-ticker-bar-plot', 'execution-by-venue-bar-plot'], prefix_id='aggregated', height=500),
self._sc.horizontal_bar(),
self._sc.plot(caption='Aggregated Trader: Timeline', id='execution-by-ticker-timeline-plot',
prefix_id='aggregated', height=500),
self._sc.horizontal_bar(),
self._sc.plot(caption='Aggregated Trader: PDF fit (' + self._constants.reporting_currency + ' notional)', id=['execution-by-ticker-dist-plot',
'execution-by-venue-dist-plot'],
prefix_id='aggregated', height=500),
self._sc.horizontal_bar()
],
style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'})
################################################################################################################
self.pages['compliance'] = html.Div([
self._sc.header_bar('FX: Compliance Analysis', img='logo.png'),
self._sc.link_bar(link_bar_dict),
self._sc.width_row_cell(html.B("Status: ok", id='compliance-status'), margin_left=5),
self._sc.horizontal_bar(),
# Dropdown selection boxes
html.Div([
self._sc.drop_down(caption='Start Date', id='start-date-val', prefix_id='compliance',
drop_down_values=self.available_dates),
self._sc.drop_down(caption='Finish Date', id='finish-date-val', prefix_id='compliance',
drop_down_values=self.available_dates),
self._sc.drop_down(caption='Ticker', id='ticker-val', prefix_id='compliance',
drop_down_values=self.available_grouped_tickers, multiselect=True),
self._sc.drop_down(caption='Broker', id='broker-val', prefix_id='compliance',
drop_down_values=self.available_grouped_brokers, multiselect=True),
self._sc.drop_down(caption='Algo', id='algo-val', prefix_id='compliance',
drop_down_values=self.available_grouped_algos, multiselect=True),
self._sc.drop_down(caption='Venue', id='venue-val', prefix_id='compliance',
drop_down_values=self.available_grouped_venues, multiselect=True),
self._sc.drop_down(caption='Reload', id='reload-val', prefix_id='compliance',
drop_down_values=self.available_reload),
self._sc.drop_down(caption='Market Data', id='market-data-val', prefix_id='compliance',
drop_down_values=self.available_market_data),
self._sc.drop_down(caption='Filter by Time', id='filter-time-of-day-val', prefix_id='compliance',
drop_down_values=self.available_reload),
self._sc.drop_down(caption='Start Time of Day', id='start-time-of-day-val', prefix_id='compliance',
drop_down_values=self.available_times),
self._sc.drop_down(caption='Finish Time of Day', id='finish-time-of-day-val', prefix_id='compliance',
drop_down_values=self.available_times),
self._sc.drop_down(caption='Slippage to Mid (bp)', id='slippage-bounds-val', prefix_id='compliance',
drop_down_values=self.available_slippage_bounds),
self._sc.drop_down(caption='Visualization', id='visualization-val', prefix_id='compliance',
drop_down_values=self.available_visualization)
]),
self._sc.horizontal_bar(),
html.Div([
self._sc.button(caption='Calculate', id='calculation-button', prefix_id='compliance'),
# self.date_picker(caption='Start Date', id='start-date-dtpicker', prefix_id='compliance'),
# self.date_picker(caption='Finish Date', id='finish-date-dtpicker', prefix_id='compliance'),
]),
self._sc.horizontal_bar(),
self._sc.table(caption='Compliance: Trade Outliers', id='execution-by-anomalous-table', prefix_id='compliance',
columns=trade_outliers_cols,
downloadplot_caption='Trade outliers CSV',
downloadplot_tag='execution-by-anomalous-download-link',
download_file='download_execution_by_anomalous.csv'),
self._sc.table(caption='Compliance: Totals by Broker', id='summary-by-broker-table', prefix_id='compliance',
columns=broker_cols,
downloadplot_caption='Download broker CSV',
downloadplot_tag='summary-by-broker-download-link',
download_file='download_broker.csv'
),
self._sc.horizontal_bar()
],
style={'width': '1000px', 'marginRight': 'auto', 'marginLeft': 'auto'})
# ID flags
self.id_flags = {
# Detailed trader page
# 'timeline_trade_orders' : {'client-orders': 'order', 'executions': 'trade'},
# 'markout_trade_orders' : {'client-orders': 'order_df', 'executions': 'trade_df'},
'detailed_candle_timeline_trade_order': {'execution': 'sparse_market_trade_df',
'order': 'sparse_market_order_df'},
'detailed_markout_trade_order': {'execution': 'trade_df', 'order': 'order_df'},
'detailed_table_trade_order': {'execution': 'table_trade_df_markout_by_all'},
'detailed_dist_trade_order': {'execution': 'dist_trade_df_by/pdf/side', 'order': 'dist_order_df_by/pdf/side'},
'detailed_download_link_trade_order': {'execution-candle-timeline': 'sparse_market_trade_df',
'order-candle-timeline': 'sparse_market_order_df'},
# Aggregated trader page
'aggregated_bar_trade_order': {'execution-by-ticker': 'bar_trade_df_by/mean/ticker',
'execution-by-venue': 'bar_trade_df_by/mean/venue'},
'aggregated_timeline_trade_order': {'execution-by-ticker': 'timeline_trade_df_by/mean_date/ticker',
'execution-by-venue': 'timeline_trade_df_by/mean_date/venue'},
'aggregated_dist_trade_order': {'execution-by-ticker': 'dist_trade_df_by/pdf/ticker',
'execution-by-venue': 'dist_trade_df_by/pdf/venue'},
# Compliance page
'compliance_metric_table_trade_order':
{'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all',
'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'},
'compliance_download_link_trade_order':
{'execution-by-anomalous': 'table_trade_df_slippage_by_worst_all',
'summary-by-broker': 'bar_trade_df_executed_notional_in_reporting_currency_by_broker_id'},
}
| [
"collections.OrderedDict",
"dash_core_components.Location",
"dash_html_components.H3",
"pandas.date_range",
"datetime.datetime.today",
"datetime.timedelta",
"dash_html_components.B",
"dash_html_components.Div"
] | [((1504, 1548), 'pandas.date_range', 'pd.date_range', (['"""0:00"""', '"""23:59"""'], {'freq': '"""15min"""'}), "('0:00', '23:59', freq='15min')\n", (1517, 1548), True, 'import pandas as pd\n'), ((1375, 1426), 'datetime.timedelta', 'timedelta', ([], {'days': 'self._constants.gui_lookback_window'}), '(days=self._constants.gui_lookback_window)\n', (1384, 1426), False, 'from datetime import timedelta\n'), ((3740, 3777), 'dash_core_components.Location', 'dcc.Location', ([], {'id': '"""url"""', 'refresh': '(False)'}), "(id='url', refresh=False)\n", (3752, 3777), True, 'import dash_core_components as dcc\n'), ((3791, 3818), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""page-content"""'}), "(id='page-content')\n", (3799, 3818), True, 'import dash_html_components as html\n'), ((1440, 1465), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (1463, 1465), False, 'import datetime\n'), ((4511, 4553), 'dash_html_components.B', 'html.B', (['"""Status: ok"""'], {'id': '"""detailed-status"""'}), "('Status: ok', id='detailed-status')\n", (4517, 4553), True, 'import dash_html_components as html\n'), ((9100, 9144), 'dash_html_components.B', 'html.B', (['"""Status: ok"""'], {'id': '"""aggregated-status"""'}), "('Status: ok', id='aggregated-status')\n", (9106, 9144), True, 'import dash_html_components as html\n'), ((12623, 12667), 'dash_html_components.B', 'html.B', (['"""Status: ok"""'], {'id': '"""compliance-status"""'}), "('Status: ok', id='compliance-status')\n", (12629, 12667), True, 'import dash_html_components as html\n'), ((1340, 1365), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (1363, 1365), False, 'import datetime\n'), ((8279, 8315), 'dash_html_components.H3', 'html.H3', (['"""Executions: Markout Table"""'], {}), "('Executions: Markout Table')\n", (8286, 8315), True, 'import dash_html_components as html\n'), ((8333, 8372), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""detailed-execution-table"""'}), "(id='detailed-execution-table')\n", (8341, 8372), True, 'import dash_html_components as html\n'), ((4989, 5092), 'collections.OrderedDict', 'OrderedDict', (["[('finish-date-val', self.available_dates), ('finish-time-val', self.\n available_times)]"], {}), "([('finish-date-val', self.available_dates), ('finish-time-val',\n self.available_times)])\n", (5000, 5092), False, 'from collections import OrderedDict\n')] |
import pytest
import stk
from ...case_data import CaseData
@pytest.fixture(
scope='session',
params=(
lambda name: CaseData(
molecule=stk.ConstructedMolecule(
topology_graph=stk.cof.PeriodicKagome(
building_blocks=(
stk.BuildingBlock(
smiles='BrC1=C(Br)[C+]=N1',
functional_groups=[stk.BromoFactory()],
),
stk.BuildingBlock(
smiles=(
'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+]('
'Br)[C+2]1'
),
functional_groups=[stk.BromoFactory()],
),
),
lattice_size=(2, 2, 1),
),
),
smiles=(
'[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]='
'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N='
'[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C'
'+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1'
'5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%'
'20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21'
')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2]['
'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N'
'=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)'
'[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]'
'5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206['
'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+'
']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C'
'2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N='
'[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%'
'211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121'
),
name=name,
),
lambda name: CaseData(
molecule=stk.ConstructedMolecule(
topology_graph=stk.cof.PeriodicKagome(
building_blocks=(
stk.BuildingBlock(
smiles='BrC1=C(Br)[C+]=N1',
functional_groups=[stk.BromoFactory()],
),
stk.BuildingBlock(
smiles=(
'Br[C+]1C2(Br)[C+]=N[C+]2[C+](Br)[C+]('
'Br)[C+2]1'
),
functional_groups=[stk.BromoFactory()],
),
),
lattice_size=(2, 2, 1),
optimizer=stk.PeriodicCollapser(),
),
),
smiles=(
'[C+]1=NC2=C1[C+]1[C+]3[C+2][C+]4C5=C(N=[C+]5)C56[C+]='
'N[C+]5[C+]5C7=C([C+]=N7)[C+]7[C+]8[C+2][C+]9C%10=C(N='
'[C+]%10)[C+]%10[C+2][C+]%11C%12=C([C+]=N%12)[C+]%12[C'
'+]%13[C+2][C+]%14C%15=C(N=[C+]%15)C%15%16[C+]=N[C+]%1'
'5[C+]%15C%17=C([C+]=N%17)[C+]%17[C+]%18[C+2][C+]%19C%'
'20=C(N=[C+]%20)[C+]%20[C+2][C+]2[C+]2C%21=C([C+]=N%21'
')[C+]%21[C+]([C+2][C+](C%22=C(N=[C+]%22)[C+]%16[C+2]['
'C+]%15C%15=C([C+]=N%15)[C+]%15[C+]([C+2][C+](C%16=C(N'
'=[C+]%16)C%10%16[C+]=N[C+]%16[C+]%11C%10=C([C+]=N%10)'
'[C+]%10[C+]([C+2][C+](C%11=C(N=[C+]%11)[C+]6[C+2][C+]'
'5C5=C([C+]=N5)[C+]5[C+]([C+2][C+](C6=C(N=[C+]6)C%206['
'C+]=N[C+]26)C2([C+]=N[C+]52)C2=C%18N=[C+]2)C2=C(N=[C+'
']2)C92[C+]=N[C+]72)C2([C+]=N[C+]%102)C2=C%13[C+]=N2)C'
'2=C([C+]=N2)C42[C+]=N[C+]12)C1([C+]=N[C+]%151)C1=C8N='
'[C+]1)C1=C(N=[C+]1)C%191[C+]=N[C+]%171)C1([C+]=N[C+]%'
'211)C1=C3[C+]=N1)C1=C([C+]=N1)C%141[C+]=N[C+]%121'
),
name=name,
),
),
)
def cof_periodic_kagome(request) -> CaseData:
return request.param(
f'{request.fixturename}{request.param_index}',
)
| [
"stk.BromoFactory",
"stk.PeriodicCollapser"
] | [((2901, 2924), 'stk.PeriodicCollapser', 'stk.PeriodicCollapser', ([], {}), '()\n', (2922, 2924), False, 'import stk\n'), ((429, 447), 'stk.BromoFactory', 'stk.BromoFactory', ([], {}), '()\n', (445, 447), False, 'import stk\n'), ((751, 769), 'stk.BromoFactory', 'stk.BromoFactory', ([], {}), '()\n', (767, 769), False, 'import stk\n'), ((2434, 2452), 'stk.BromoFactory', 'stk.BromoFactory', ([], {}), '()\n', (2450, 2452), False, 'import stk\n'), ((2756, 2774), 'stk.BromoFactory', 'stk.BromoFactory', ([], {}), '()\n', (2772, 2774), False, 'import stk\n')] |
#!python3
import os
import pandas as pd
import tensorflow as tf
from tensorflow.keras import layers
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
# gpu_devices = tf.config.experimental.list_physical_devices("GPU")
# for device in gpu_devices:
# tf.config.experimental.set_memory_growth(device, True)
def trainModel(data_in, params_in):
data_in = data_in.take(2048)
data_in = data_in.shuffle(24)
data_in = data_in.batch(1024)
arch = params_in["Architecture"]
dropout = params_in["Dropout"]
lr = params_in["LearningRate"]
attrs = params_in["Attrs"]
epochs = params_in["Epochs"]
if arch == "BaseCNN":
if params_in["BatchNorm"]:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.BatchNormalization(),
layers.Flatten(),
layers.Dense(50, "relu"),
layers.Dense(1)
])
else:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Flatten(),
layers.Dense(50, "relu"),
layers.Dense(1)
])
elif arch == "CNN-LSTM":
if params_in["BatchNorm"]:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.BatchNormalization(),
layers.Reshape((5, 10)),
layers.LSTM(30, return_sequences=False),
layers.Dense(50, "relu"),
layers.Dense(1)
])
else:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Reshape((5, 10)),
layers.LSTM(30, return_sequences=False),
layers.Dense(50, "relu"),
layers.Dense(1)
])
elif arch == "CNN-2LSTM":
if params_in["BatchNorm"]:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.BatchNormalization(),
layers.Reshape((5, 10)),
layers.LSTM(30, return_sequences=True),
layers.LSTM(30, return_sequences=False),
layers.Dense(1)
])
else:
model = tf.keras.Sequential([
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu", input_shape=(1, 50, attrs)),
layers.Dropout(dropout),
layers.Conv1D(filters=10, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Conv1D(filters=1, kernel_size=5, padding="same", activation="relu"),
layers.Dropout(dropout),
layers.Reshape((5, 10)),
layers.LSTM(30, return_sequences=True),
layers.LSTM(30, return_sequences=False),
layers.Dense(1)
])
model.compile(loss=tf.losses.MeanSquaredError(), optimizer=tf.optimizers.Adam(learning_rate=lr, amsgrad=True))
filepath = "./checkpoints/Model_in-" + arch + str(attrs) + ".h5"
losses = []
class CustomModelCheckPoint(tf.keras.callbacks.Callback):
def __init__(self, **kargs):
super(CustomModelCheckPoint, self).__init__(**kargs)
self.epoch_loss = {} # accuracy at given epoch
def on_epoch_begin(self, epoch, logs={}):
# Things done on beginning of epoch.
return
def on_epoch_end(self, epoch, logs={}):
# things done on end of the epoch
self.epoch_loss[epoch] = logs.get("loss")
losses.append(self.epoch_loss[epoch])
if params_in["ResumeTraining"]:
model.load_weights(filepath)
checkpoint2 = CustomModelCheckPoint()
checkpoint = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='loss', verbos=0, save_best_only=True,
save_freq='epoch')
model.fit(data_in, epochs=epochs, callbacks=[checkpoint, checkpoint2])
df_loss = pd.DataFrame()
df_loss["Epochs"] = list(range(1, epochs + 1))
df_loss["Loss"] = losses
df_loss.to_csv("./losses/lossTrend.csv", index=False)
| [
"tensorflow.keras.layers.Reshape",
"tensorflow.losses.MeanSquaredError",
"tensorflow.keras.layers.Dropout",
"tensorflow.keras.layers.BatchNormalization",
"tensorflow.keras.layers.LSTM",
"tensorflow.keras.layers.Dense",
"tensorflow.optimizers.Adam",
"tensorflow.keras.callbacks.ModelCheckpoint",
"pandas.DataFrame",
"tensorflow.keras.layers.Flatten",
"tensorflow.keras.layers.Conv1D"
] | [((5713, 5827), 'tensorflow.keras.callbacks.ModelCheckpoint', 'tf.keras.callbacks.ModelCheckpoint', (['filepath'], {'monitor': '"""loss"""', 'verbos': '(0)', 'save_best_only': '(True)', 'save_freq': '"""epoch"""'}), "(filepath, monitor='loss', verbos=0,\n save_best_only=True, save_freq='epoch')\n", (5747, 5827), True, 'import tensorflow as tf\n'), ((5966, 5980), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (5978, 5980), True, 'import pandas as pd\n'), ((4856, 4884), 'tensorflow.losses.MeanSquaredError', 'tf.losses.MeanSquaredError', ([], {}), '()\n', (4882, 4884), True, 'import tensorflow as tf\n'), ((4896, 4946), 'tensorflow.optimizers.Adam', 'tf.optimizers.Adam', ([], {'learning_rate': 'lr', 'amsgrad': '(True)'}), '(learning_rate=lr, amsgrad=True)\n', (4914, 4946), True, 'import tensorflow as tf\n'), ((735, 842), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (748, 842), False, 'from tensorflow.keras import layers\n'), ((856, 879), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (870, 879), False, 'from tensorflow.keras import layers\n'), ((897, 972), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (910, 972), False, 'from tensorflow.keras import layers\n'), ((990, 1013), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (1004, 1013), False, 'from tensorflow.keras import layers\n'), ((1031, 1105), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (1044, 1105), False, 'from tensorflow.keras import layers\n'), ((1123, 1146), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (1137, 1146), False, 'from tensorflow.keras import layers\n'), ((1164, 1191), 'tensorflow.keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (1189, 1191), False, 'from tensorflow.keras import layers\n'), ((1209, 1225), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (1223, 1225), False, 'from tensorflow.keras import layers\n'), ((1243, 1267), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(50)', '"""relu"""'], {}), "(50, 'relu')\n", (1255, 1267), False, 'from tensorflow.keras import layers\n'), ((1285, 1300), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (1297, 1300), False, 'from tensorflow.keras import layers\n'), ((1388, 1495), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (1401, 1495), False, 'from tensorflow.keras import layers\n'), ((1509, 1532), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (1523, 1532), False, 'from tensorflow.keras import layers\n'), ((1550, 1625), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (1563, 1625), False, 'from tensorflow.keras import layers\n'), ((1643, 1666), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (1657, 1666), False, 'from tensorflow.keras import layers\n'), ((1684, 1758), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (1697, 1758), False, 'from tensorflow.keras import layers\n'), ((1776, 1799), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (1790, 1799), False, 'from tensorflow.keras import layers\n'), ((1817, 1833), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (1831, 1833), False, 'from tensorflow.keras import layers\n'), ((1851, 1875), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(50)', '"""relu"""'], {}), "(50, 'relu')\n", (1863, 1875), False, 'from tensorflow.keras import layers\n'), ((1893, 1908), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (1905, 1908), False, 'from tensorflow.keras import layers\n'), ((2047, 2154), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (2060, 2154), False, 'from tensorflow.keras import layers\n'), ((2168, 2191), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (2182, 2191), False, 'from tensorflow.keras import layers\n'), ((2209, 2284), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (2222, 2284), False, 'from tensorflow.keras import layers\n'), ((2302, 2325), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (2316, 2325), False, 'from tensorflow.keras import layers\n'), ((2343, 2417), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (2356, 2417), False, 'from tensorflow.keras import layers\n'), ((2435, 2458), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (2449, 2458), False, 'from tensorflow.keras import layers\n'), ((2476, 2503), 'tensorflow.keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (2501, 2503), False, 'from tensorflow.keras import layers\n'), ((2521, 2544), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(5, 10)'], {}), '((5, 10))\n', (2535, 2544), False, 'from tensorflow.keras import layers\n'), ((2562, 2601), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(False)'}), '(30, return_sequences=False)\n', (2573, 2601), False, 'from tensorflow.keras import layers\n'), ((2619, 2643), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(50)', '"""relu"""'], {}), "(50, 'relu')\n", (2631, 2643), False, 'from tensorflow.keras import layers\n'), ((2661, 2676), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (2673, 2676), False, 'from tensorflow.keras import layers\n'), ((2764, 2871), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (2777, 2871), False, 'from tensorflow.keras import layers\n'), ((2885, 2908), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (2899, 2908), False, 'from tensorflow.keras import layers\n'), ((2926, 3001), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (2939, 3001), False, 'from tensorflow.keras import layers\n'), ((3019, 3042), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (3033, 3042), False, 'from tensorflow.keras import layers\n'), ((3060, 3134), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (3073, 3134), False, 'from tensorflow.keras import layers\n'), ((3152, 3175), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (3166, 3175), False, 'from tensorflow.keras import layers\n'), ((3193, 3216), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(5, 10)'], {}), '((5, 10))\n', (3207, 3216), False, 'from tensorflow.keras import layers\n'), ((3234, 3273), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(False)'}), '(30, return_sequences=False)\n', (3245, 3273), False, 'from tensorflow.keras import layers\n'), ((3291, 3315), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(50)', '"""relu"""'], {}), "(50, 'relu')\n", (3303, 3315), False, 'from tensorflow.keras import layers\n'), ((3333, 3348), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (3345, 3348), False, 'from tensorflow.keras import layers\n'), ((3488, 3595), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (3501, 3595), False, 'from tensorflow.keras import layers\n'), ((3609, 3632), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (3623, 3632), False, 'from tensorflow.keras import layers\n'), ((3650, 3725), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (3663, 3725), False, 'from tensorflow.keras import layers\n'), ((3743, 3766), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (3757, 3766), False, 'from tensorflow.keras import layers\n'), ((3784, 3858), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (3797, 3858), False, 'from tensorflow.keras import layers\n'), ((3876, 3899), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (3890, 3899), False, 'from tensorflow.keras import layers\n'), ((3917, 3944), 'tensorflow.keras.layers.BatchNormalization', 'layers.BatchNormalization', ([], {}), '()\n', (3942, 3944), False, 'from tensorflow.keras import layers\n'), ((3962, 3985), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(5, 10)'], {}), '((5, 10))\n', (3976, 3985), False, 'from tensorflow.keras import layers\n'), ((4003, 4041), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(True)'}), '(30, return_sequences=True)\n', (4014, 4041), False, 'from tensorflow.keras import layers\n'), ((4059, 4098), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(False)'}), '(30, return_sequences=False)\n', (4070, 4098), False, 'from tensorflow.keras import layers\n'), ((4116, 4131), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (4128, 4131), False, 'from tensorflow.keras import layers\n'), ((4219, 4326), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""', 'input_shape': '(1, 50, attrs)'}), "(filters=10, kernel_size=5, padding='same', activation='relu',\n input_shape=(1, 50, attrs))\n", (4232, 4326), False, 'from tensorflow.keras import layers\n'), ((4340, 4363), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (4354, 4363), False, 'from tensorflow.keras import layers\n'), ((4381, 4456), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(10)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=10, kernel_size=5, padding='same', activation='relu')\n", (4394, 4456), False, 'from tensorflow.keras import layers\n'), ((4474, 4497), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (4488, 4497), False, 'from tensorflow.keras import layers\n'), ((4515, 4589), 'tensorflow.keras.layers.Conv1D', 'layers.Conv1D', ([], {'filters': '(1)', 'kernel_size': '(5)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=1, kernel_size=5, padding='same', activation='relu')\n", (4528, 4589), False, 'from tensorflow.keras import layers\n'), ((4607, 4630), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['dropout'], {}), '(dropout)\n', (4621, 4630), False, 'from tensorflow.keras import layers\n'), ((4648, 4671), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(5, 10)'], {}), '((5, 10))\n', (4662, 4671), False, 'from tensorflow.keras import layers\n'), ((4689, 4727), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(True)'}), '(30, return_sequences=True)\n', (4700, 4727), False, 'from tensorflow.keras import layers\n'), ((4745, 4784), 'tensorflow.keras.layers.LSTM', 'layers.LSTM', (['(30)'], {'return_sequences': '(False)'}), '(30, return_sequences=False)\n', (4756, 4784), False, 'from tensorflow.keras import layers\n'), ((4802, 4817), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {}), '(1)\n', (4814, 4817), False, 'from tensorflow.keras import layers\n')] |
# -*- encoding: utf8 -*-
import numpy as np
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from lvq import SilvqModel
from lvq.utils import plot2d
def main():
# Load dataset
dataset = np.loadtxt('data/artificial_dataset1.csv', delimiter=',')
x = dataset[:, :-1].astype('float64')
y = dataset[:, -1].astype('int64')
# Split dataset into training set and test set
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y)
# Generating model
model = SilvqModel(x.shape[1], theta=0.8, bias_type='ls')
# Training the model
model.fit(x_train, y_train, epochs=30)
# Predict the response for test dataset
y_predict = model.predict(x_test)
# Evaluating the model
print('Accuracy: %.3f' %accuracy_score(y_test, y_predict))
# Plot prediction results and prototypes
plot2d(model, x, y, title='Artificial dataset1')
if __name__ == '__main__':
main()
| [
"sklearn.metrics.accuracy_score",
"sklearn.model_selection.train_test_split",
"lvq.SilvqModel",
"numpy.loadtxt",
"lvq.utils.plot2d"
] | [((245, 302), 'numpy.loadtxt', 'np.loadtxt', (['"""data/artificial_dataset1.csv"""'], {'delimiter': '""","""'}), "('data/artificial_dataset1.csv', delimiter=',')\n", (255, 302), True, 'import numpy as np\n'), ((474, 553), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'test_size': '(0.2)', 'random_state': '(3)', 'shuffle': '(True)', 'stratify': 'y'}), '(x, y, test_size=0.2, random_state=3, shuffle=True, stratify=y)\n', (490, 553), False, 'from sklearn.model_selection import train_test_split\n'), ((590, 639), 'lvq.SilvqModel', 'SilvqModel', (['x.shape[1]'], {'theta': '(0.8)', 'bias_type': '"""ls"""'}), "(x.shape[1], theta=0.8, bias_type='ls')\n", (600, 639), False, 'from lvq import SilvqModel\n'), ((930, 978), 'lvq.utils.plot2d', 'plot2d', (['model', 'x', 'y'], {'title': '"""Artificial dataset1"""'}), "(model, x, y, title='Artificial dataset1')\n", (936, 978), False, 'from lvq.utils import plot2d\n'), ((846, 879), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'y_predict'], {}), '(y_test, y_predict)\n', (860, 879), False, 'from sklearn.metrics import accuracy_score\n')] |
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
#Importing all required libraries
# In[ ]:
from __future__ import absolute_import, division, print_function, unicode_literals
# In[ ]:
#Checking for correct cuda and tf versions
from tensorflow.python.platform import build_info as tf_build_info
print(tf_build_info.cuda_version_number)
# 9.0 in v1.10.0
print(tf_build_info.cudnn_version_number)
# 7 in v1.10.0
# In[ ]:
import tensorflow as tf
import pathlib
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import os
import numpy as np
import matplotlib.pyplot as plt
# In[ ]:
AUTOTUNE = tf.data.experimental.AUTOTUNE
# In[ ]:
import IPython.display as display
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import os
# In[ ]:
tf.__version__
# In[ ]:
#Train and test data folder
train_data_dir = "\\hyper-kvasir\\splits\\all\\1"
test_data_dir = "\\hyper-kvasir\\splits\\all\\0"
# In[ ]:
train_data_dir = pathlib.Path(train_data_dir)
test_data_dir = pathlib.Path(test_data_dir)
# In[ ]:
#count how many images are there
image_count = len(list(train_data_dir.glob('*/*.jpg')))
image_count
# In[ ]:
total_train = len(list(train_data_dir.glob('*/*.jpg')))
total_val = len(list(test_data_dir.glob('*/*.jpg')))
# In[ ]:
#get the class names
CLASS_NAMES = np.array([item.name for item in train_data_dir.glob('*') if item.name != "LICENSE.txt"])
CLASS_NAMES
# In[ ]:
#Define parameter for training
batch_size = 32
IMG_HEIGHT = 224
IMG_WIDTH = 224
STEPS_PER_EPOCH = np.ceil(image_count/batch_size)
epochs = 8
num_classes = len(CLASS_NAMES) #23
# In[ ]:
#We use image data generators to load the images and prepare them for the training
train_image_generator = ImageDataGenerator() # Generator for our training data
validation_image_generator = ImageDataGenerator() # Generator for our validation data
train_data_gen = train_image_generator.flow_from_directory(directory=str(train_data_dir),
batch_size=batch_size,
shuffle=True,
target_size=(IMG_HEIGHT, IMG_WIDTH),
classes = list(CLASS_NAMES),
class_mode='categorical'
)
val_data_gen = validation_image_generator.flow_from_directory(directory=str(test_data_dir),
batch_size=batch_size,
shuffle=True,
target_size=(IMG_HEIGHT, IMG_WIDTH),
class_mode='categorical',
classes = list(CLASS_NAMES)
)
#get class order from directories
print(train_data_gen.class_indices.keys())
print(val_data_gen.class_indices.keys())
# In[ ]:
IMG_SIZE = 224
IMG_SHAPE = (IMG_SIZE, IMG_SIZE, 3)
# base model from the pre-trained model. Resnet 50 in this case
base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE,
include_top=False,
weights='imagenet')
base_model.trainable = False
# In[ ]:
#add new classification layer
x = base_model.output
x = tf.keras.layers.GlobalAveragePooling2D()(x)
x = tf.keras.layers.Dense(num_classes,activation='softmax')(x)
model = tf.keras.models.Model(inputs=base_model.input, outputs=x)
base_learning_rate = 0.001
model.compile(optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate),
loss='categorical_crossentropy',
metrics=['accuracy'])
# In[ ]:
#fit the model
history = model.fit_generator(
train_data_gen,
steps_per_epoch=total_train // batch_size,
epochs=epochs,
validation_data=val_data_gen,
validation_steps=total_val // batch_size
)
# In[ ]:
#create training plots
history
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(1, 2, 2)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()
# In[ ]:
base_model.trainable = True #now we want to train the base model
# In[ ]:
# How many layers are in the base model
print("Layers base model: ", len(base_model.layers))
# Fine tune from layer x
fine_tune_at = 100
# Freeze all the layers before the fine tune starting layer
for layer in base_model.layers[:fine_tune_at]:
layer.trainable = False
# In[ ]:
model.compile(loss='categorical_crossentropy',
optimizer = tf.keras.optimizers.RMSprop(lr=base_learning_rate/10),
metrics=['accuracy'])
# In[ ]:
model.summary()
# In[ ]:
#Fine tune step
initial_epochs = 7
fine_tune_epochs = 3
total_epochs = initial_epochs + fine_tune_epochs
train_batches = total_train // batch_size
print(total_val // batch_size)
validation_batches = total_val // batch_size
history_fine = model.fit_generator(
train_data_gen,
steps_per_epoch=total_train // batch_size,
epochs=total_epochs,
initial_epoch = history.epoch[-1],
validation_data=val_data_gen,
validation_steps=total_val // batch_size
)
# In[ ]:
acc += history_fine.history['accuracy']
val_acc += history_fine.history['val_accuracy']
loss += history_fine.history['loss']
val_loss += history_fine.history['val_loss']
# In[ ]:
#Plot fine tuning
plt.figure(figsize=(8, 8))
plt.subplot(2, 1, 1)
plt.plot(acc, label='Training Accuracy')
plt.plot(val_acc, label='Validation Accuracy')
plt.ylim([0.8, 1])
plt.plot([initial_epochs-1,initial_epochs-1],
plt.ylim(), label='Start Fine Tuning')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(2, 1, 2)
plt.plot(loss, label='Training Loss')
plt.plot(val_loss, label='Validation Loss')
plt.ylim([0, 1.0])
plt.plot([initial_epochs-1,initial_epochs-1],
plt.ylim(), label='Start Fine Tuning')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.xlabel('epoch')
plt.show()
# In[ ]:
#model save and load
import os
# In[ ]:
#some time stamp
from datetime import datetime
# current date and time.
now = datetime.now()
timestamp = datetime.timestamp(now)
print("timestamp =", timestamp)
# In[ ]:
mode_filename = str(timestamp)+'mymodel.h5'
model.save(model_filename)
# In[ ]:
#To apply the model on new data
new_model = tf.keras.models.load_model(model_filename)
# Show the model architecture
new_model.summary()
# In[ ]:
from tensorflow.keras.preprocessing import image
#image directory containing images to test
img_dir="\\polyps"
for i,img in enumerate(os.listdir(img_dir)):
tmpimage = image.load_img(os.path.join(img_dir,img), target_size=(IMG_SIZE,IMG_SIZE))
tmpimage = np.expand_dims(tmpimage, axis=0).astype('float32')
result_class=new_model.predict(tmpimage)
print(img,";",CLASS_NAMES[result_class.argmax(axis=-1)])
| [
"tensorflow.keras.preprocessing.image.ImageDataGenerator",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.models.load_model",
"tensorflow.keras.applications.ResNet50",
"tensorflow.keras.layers.GlobalAveragePooling2D",
"os.listdir",
"pathlib.Path",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"tensorflow.keras.models.Model",
"matplotlib.pyplot.ylim",
"numpy.ceil",
"datetime.datetime.timestamp",
"matplotlib.pyplot.title",
"tensorflow.keras.optimizers.RMSprop",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show",
"os.path.join",
"tensorflow.keras.optimizers.Adam",
"datetime.datetime.now",
"matplotlib.pyplot.figure",
"numpy.expand_dims",
"matplotlib.pyplot.subplot"
] | [((1111, 1139), 'pathlib.Path', 'pathlib.Path', (['train_data_dir'], {}), '(train_data_dir)\n', (1123, 1139), False, 'import pathlib\n'), ((1156, 1183), 'pathlib.Path', 'pathlib.Path', (['test_data_dir'], {}), '(test_data_dir)\n', (1168, 1183), False, 'import pathlib\n'), ((1680, 1713), 'numpy.ceil', 'np.ceil', (['(image_count / batch_size)'], {}), '(image_count / batch_size)\n', (1687, 1713), True, 'import numpy as np\n'), ((1879, 1899), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {}), '()\n', (1897, 1899), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((1963, 1983), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {}), '()\n', (1981, 1983), False, 'from tensorflow.keras.preprocessing.image import ImageDataGenerator\n'), ((3374, 3470), 'tensorflow.keras.applications.ResNet50', 'tf.keras.applications.ResNet50', ([], {'input_shape': 'IMG_SHAPE', 'include_top': '(False)', 'weights': '"""imagenet"""'}), "(input_shape=IMG_SHAPE, include_top=False,\n weights='imagenet')\n", (3404, 3470), True, 'import tensorflow as tf\n'), ((3777, 3834), 'tensorflow.keras.models.Model', 'tf.keras.models.Model', ([], {'inputs': 'base_model.input', 'outputs': 'x'}), '(inputs=base_model.input, outputs=x)\n', (3798, 3834), True, 'import tensorflow as tf\n'), ((4467, 4493), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (4477, 4493), True, 'import matplotlib.pyplot as plt\n'), ((4494, 4514), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (4505, 4514), True, 'import matplotlib.pyplot as plt\n'), ((4515, 4569), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs_range', 'acc'], {'label': '"""Training Accuracy"""'}), "(epochs_range, acc, label='Training Accuracy')\n", (4523, 4569), True, 'import matplotlib.pyplot as plt\n'), ((4570, 4630), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs_range', 'val_acc'], {'label': '"""Validation Accuracy"""'}), "(epochs_range, val_acc, label='Validation Accuracy')\n", (4578, 4630), True, 'import matplotlib.pyplot as plt\n'), ((4631, 4660), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""lower right"""'}), "(loc='lower right')\n", (4641, 4660), True, 'import matplotlib.pyplot as plt\n'), ((4661, 4706), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation Accuracy"""'], {}), "('Training and Validation Accuracy')\n", (4670, 4706), True, 'import matplotlib.pyplot as plt\n'), ((4708, 4728), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (4719, 4728), True, 'import matplotlib.pyplot as plt\n'), ((4729, 4780), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs_range', 'loss'], {'label': '"""Training Loss"""'}), "(epochs_range, loss, label='Training Loss')\n", (4737, 4780), True, 'import matplotlib.pyplot as plt\n'), ((4781, 4838), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs_range', 'val_loss'], {'label': '"""Validation Loss"""'}), "(epochs_range, val_loss, label='Validation Loss')\n", (4789, 4838), True, 'import matplotlib.pyplot as plt\n'), ((4839, 4868), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper right"""'}), "(loc='upper right')\n", (4849, 4868), True, 'import matplotlib.pyplot as plt\n'), ((4869, 4910), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation Loss"""'], {}), "('Training and Validation Loss')\n", (4878, 4910), True, 'import matplotlib.pyplot as plt\n'), ((4911, 4921), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4919, 4921), True, 'import matplotlib.pyplot as plt\n'), ((6194, 6220), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (6204, 6220), True, 'import matplotlib.pyplot as plt\n'), ((6221, 6241), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (6232, 6241), True, 'import matplotlib.pyplot as plt\n'), ((6242, 6282), 'matplotlib.pyplot.plot', 'plt.plot', (['acc'], {'label': '"""Training Accuracy"""'}), "(acc, label='Training Accuracy')\n", (6250, 6282), True, 'import matplotlib.pyplot as plt\n'), ((6283, 6329), 'matplotlib.pyplot.plot', 'plt.plot', (['val_acc'], {'label': '"""Validation Accuracy"""'}), "(val_acc, label='Validation Accuracy')\n", (6291, 6329), True, 'import matplotlib.pyplot as plt\n'), ((6330, 6348), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0.8, 1]'], {}), '([0.8, 1])\n', (6338, 6348), True, 'import matplotlib.pyplot as plt\n'), ((6444, 6473), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""lower right"""'}), "(loc='lower right')\n", (6454, 6473), True, 'import matplotlib.pyplot as plt\n'), ((6474, 6519), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation Accuracy"""'], {}), "('Training and Validation Accuracy')\n", (6483, 6519), True, 'import matplotlib.pyplot as plt\n'), ((6521, 6541), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(2)'], {}), '(2, 1, 2)\n', (6532, 6541), True, 'import matplotlib.pyplot as plt\n'), ((6542, 6579), 'matplotlib.pyplot.plot', 'plt.plot', (['loss'], {'label': '"""Training Loss"""'}), "(loss, label='Training Loss')\n", (6550, 6579), True, 'import matplotlib.pyplot as plt\n'), ((6580, 6623), 'matplotlib.pyplot.plot', 'plt.plot', (['val_loss'], {'label': '"""Validation Loss"""'}), "(val_loss, label='Validation Loss')\n", (6588, 6623), True, 'import matplotlib.pyplot as plt\n'), ((6624, 6642), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, 1.0]'], {}), '([0, 1.0])\n', (6632, 6642), True, 'import matplotlib.pyplot as plt\n'), ((6737, 6766), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper right"""'}), "(loc='upper right')\n", (6747, 6766), True, 'import matplotlib.pyplot as plt\n'), ((6767, 6808), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation Loss"""'], {}), "('Training and Validation Loss')\n", (6776, 6808), True, 'import matplotlib.pyplot as plt\n'), ((6809, 6828), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epoch"""'], {}), "('epoch')\n", (6819, 6828), True, 'import matplotlib.pyplot as plt\n'), ((6829, 6839), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6837, 6839), True, 'import matplotlib.pyplot as plt\n'), ((6976, 6990), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6988, 6990), False, 'from datetime import datetime\n'), ((7003, 7026), 'datetime.datetime.timestamp', 'datetime.timestamp', (['now'], {}), '(now)\n', (7021, 7026), False, 'from datetime import datetime\n'), ((7200, 7242), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['model_filename'], {}), '(model_filename)\n', (7226, 7242), True, 'import tensorflow as tf\n'), ((3661, 3701), 'tensorflow.keras.layers.GlobalAveragePooling2D', 'tf.keras.layers.GlobalAveragePooling2D', ([], {}), '()\n', (3699, 3701), True, 'import tensorflow as tf\n'), ((3709, 3765), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['num_classes'], {'activation': '"""softmax"""'}), "(num_classes, activation='softmax')\n", (3730, 3765), True, 'import tensorflow as tf\n'), ((6405, 6415), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {}), '()\n', (6413, 6415), True, 'import matplotlib.pyplot as plt\n'), ((6698, 6708), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {}), '()\n', (6706, 6708), True, 'import matplotlib.pyplot as plt\n'), ((7443, 7462), 'os.listdir', 'os.listdir', (['img_dir'], {}), '(img_dir)\n', (7453, 7462), False, 'import os\n'), ((3887, 3934), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'lr': 'base_learning_rate'}), '(lr=base_learning_rate)\n', (3911, 3934), True, 'import tensorflow as tf\n'), ((5372, 5427), 'tensorflow.keras.optimizers.RMSprop', 'tf.keras.optimizers.RMSprop', ([], {'lr': '(base_learning_rate / 10)'}), '(lr=base_learning_rate / 10)\n', (5399, 5427), True, 'import tensorflow as tf\n'), ((7493, 7519), 'os.path.join', 'os.path.join', (['img_dir', 'img'], {}), '(img_dir, img)\n', (7505, 7519), False, 'import os\n'), ((7569, 7601), 'numpy.expand_dims', 'np.expand_dims', (['tmpimage'], {'axis': '(0)'}), '(tmpimage, axis=0)\n', (7583, 7601), True, 'import numpy as np\n')] |
"""
One of the really important features of |jedi| is to have an option to
understand code like this::
def foo(bar):
bar. # completion here
foo(1)
There's no doubt wheter bar is an ``int`` or not, but if there's also a call
like ``foo('str')``, what would happen? Well, we'll just show both. Because
that's what a human would expect.
It works as follows:
- |Jedi| sees a param
- search for function calls named ``foo``
- execute these calls and check the input. This work with a ``ParamListener``.
"""
from itertools import chain
from jedi._compatibility import unicode
from jedi.parser import tree as pr
from jedi import settings
from jedi import debug
from jedi.evaluate.cache import memoize_default
from jedi.evaluate import imports
class ParamListener(object):
"""
This listener is used to get the params for a function.
"""
def __init__(self):
self.param_possibilities = []
def execute(self, params):
self.param_possibilities += params
@debug.increase_indent
def search_params(evaluator, param):
"""
A dynamic search for param values. If you try to complete a type:
>>> def func(foo):
... foo
>>> func(1)
>>> func("")
It is not known what the type ``foo`` without analysing the whole code. You
have to look for all calls to ``func`` to find out what ``foo`` possibly
is.
"""
if not settings.dynamic_params:
return []
debug.dbg('Dynamic param search for %s', param)
func = param.get_parent_until(pr.Function)
# Compare the param names.
names = [n for n in search_function_call(evaluator, func)
if n.value == param.name.value]
# Evaluate the ExecutedParams to types.
result = list(chain.from_iterable(n.parent.eval(evaluator) for n in names))
debug.dbg('Dynamic param result %s', result)
return result
@memoize_default([], evaluator_is_first_arg=True)
def search_function_call(evaluator, func):
"""
Returns a list of param names.
"""
from jedi.evaluate import representation as er
def get_params_for_module(module):
"""
Returns the values of a param, or an empty array.
"""
@memoize_default([], evaluator_is_first_arg=True)
def get_posibilities(evaluator, module, func_name):
try:
names = module.used_names[func_name]
except KeyError:
return []
for name in names:
parent = name.parent
if pr.is_node(parent, 'trailer'):
parent = parent.parent
trailer = None
if pr.is_node(parent, 'power'):
for t in parent.children[1:]:
if t == '**':
break
if t.start_pos > name.start_pos and t.children[0] == '(':
trailer = t
break
if trailer is not None:
types = evaluator.goto_definition(name)
# We have to remove decorators, because they are not the
# "original" functions, this way we can easily compare.
# At the same time we also have to remove InstanceElements.
undec = []
for escope in types:
if escope.isinstance(er.Function, er.Instance) \
and escope.decorates is not None:
undec.append(escope.decorates)
elif isinstance(escope, er.InstanceElement):
undec.append(escope.var)
else:
undec.append(escope)
if er.wrap(evaluator, compare) in undec:
# Only if we have the correct function we execute
# it, otherwise just ignore it.
evaluator.eval_trailer(types, trailer)
return listener.param_possibilities
return get_posibilities(evaluator, module, func_name)
current_module = func.get_parent_until()
func_name = unicode(func.name)
compare = func
if func_name == '__init__':
cls = func.get_parent_scope()
if isinstance(cls, pr.Class):
func_name = unicode(cls.name)
compare = cls
# add the listener
listener = ParamListener()
func.listeners.add(listener)
try:
result = []
# This is like backtracking: Get the first possible result.
for mod in imports.get_modules_containing_name(evaluator, [current_module], func_name):
result = get_params_for_module(mod)
if result:
break
finally:
# cleanup: remove the listener; important: should not stick.
func.listeners.remove(listener)
return result
| [
"jedi.evaluate.imports.get_modules_containing_name",
"jedi.parser.tree.is_node",
"jedi.debug.dbg",
"jedi.evaluate.cache.memoize_default",
"jedi.evaluate.representation.wrap",
"jedi._compatibility.unicode"
] | [((1874, 1922), 'jedi.evaluate.cache.memoize_default', 'memoize_default', (['[]'], {'evaluator_is_first_arg': '(True)'}), '([], evaluator_is_first_arg=True)\n', (1889, 1922), False, 'from jedi.evaluate.cache import memoize_default\n'), ((1446, 1493), 'jedi.debug.dbg', 'debug.dbg', (['"""Dynamic param search for %s"""', 'param'], {}), "('Dynamic param search for %s', param)\n", (1455, 1493), False, 'from jedi import debug\n'), ((1808, 1852), 'jedi.debug.dbg', 'debug.dbg', (['"""Dynamic param result %s"""', 'result'], {}), "('Dynamic param result %s', result)\n", (1817, 1852), False, 'from jedi import debug\n'), ((4185, 4203), 'jedi._compatibility.unicode', 'unicode', (['func.name'], {}), '(func.name)\n', (4192, 4203), False, 'from jedi._compatibility import unicode\n'), ((2199, 2247), 'jedi.evaluate.cache.memoize_default', 'memoize_default', (['[]'], {'evaluator_is_first_arg': '(True)'}), '([], evaluator_is_first_arg=True)\n', (2214, 2247), False, 'from jedi.evaluate.cache import memoize_default\n'), ((4604, 4679), 'jedi.evaluate.imports.get_modules_containing_name', 'imports.get_modules_containing_name', (['evaluator', '[current_module]', 'func_name'], {}), '(evaluator, [current_module], func_name)\n', (4639, 4679), False, 'from jedi.evaluate import imports\n'), ((4355, 4372), 'jedi._compatibility.unicode', 'unicode', (['cls.name'], {}), '(cls.name)\n', (4362, 4372), False, 'from jedi._compatibility import unicode\n'), ((2521, 2550), 'jedi.parser.tree.is_node', 'pr.is_node', (['parent', '"""trailer"""'], {}), "(parent, 'trailer')\n", (2531, 2550), True, 'from jedi.parser import tree as pr\n'), ((2646, 2673), 'jedi.parser.tree.is_node', 'pr.is_node', (['parent', '"""power"""'], {}), "(parent, 'power')\n", (2656, 2673), True, 'from jedi.parser import tree as pr\n'), ((3782, 3809), 'jedi.evaluate.representation.wrap', 'er.wrap', (['evaluator', 'compare'], {}), '(evaluator, compare)\n', (3789, 3809), True, 'from jedi.evaluate import representation as er\n')] |
from steamcheck import app
from flask import jsonify, render_template
import os
import steamapi
import json
@app.route('/')
def index():
return render_template("index.html")
@app.route('/report/<name>')
def report(name=None):
"""
This will generate the report based on the users Steam ID. Returns JSON
:param name: Steam ID (either numerical ID or vanity url: steamcommunity.com/id/moird
:return: Json object that contains listing of all linux games and general information about them:
{
"steamuser": "real steam name",
"image": "steam user image url",
"games": [{'gametitle', {"linux":true}}]
"error": ""
}
"""
process_report = {}
try:
# See if we are running on heroku or not. Could probably set an environment variable for this as well.
if os.path.exists('/app/assets/GAMES.json'):
linux_game_list = '/app/assets/GAMES.json'
winehq_list = '/app/assets/winehq.json'
else:
linux_game_list = './assets/GAMES.json'
winehq_list = './assets/winehq.json'
with open(linux_game_list) as linux_game_list_raw:
linux_games = json.load(linux_game_list_raw)
with open(winehq_list) as winehq_raw:
winehq_apps = json.load(winehq_raw)
steam_connection = steamapi.core.APIConnection(api_key=os.environ['steam_api_key'])
try:
user = steamapi.user.SteamUser(userid=int(name))
except ValueError:
# When we get further this as a fallback will be taken out, really don't want to do this.
user = steamapi.user.SteamUser(userurl=name)
process_report['steamuser'] = user.name
process_report['image'] = user.avatar
process_report['games'] = {}
for game in user.games:
linux = False
winehq = False
if str(game.id) in linux_games:
linux = True
if game.name in winehq_apps:
winehq = winehq_apps[game.name]
process_report['games'][game.id] = {"name": game.name, "linux": linux, "winehq":winehq}
except Exception as e:
process_report['error'] = e
return jsonify(**process_report) | [
"flask.render_template",
"os.path.exists",
"json.load",
"steamapi.core.APIConnection",
"steamcheck.app.route",
"steamapi.user.SteamUser",
"flask.jsonify"
] | [((111, 125), 'steamcheck.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (120, 125), False, 'from steamcheck import app\n'), ((183, 210), 'steamcheck.app.route', 'app.route', (['"""/report/<name>"""'], {}), "('/report/<name>')\n", (192, 210), False, 'from steamcheck import app\n'), ((150, 179), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (165, 179), False, 'from flask import jsonify, render_template\n'), ((2220, 2245), 'flask.jsonify', 'jsonify', ([], {}), '(**process_report)\n', (2227, 2245), False, 'from flask import jsonify, render_template\n'), ((837, 877), 'os.path.exists', 'os.path.exists', (['"""/app/assets/GAMES.json"""'], {}), "('/app/assets/GAMES.json')\n", (851, 877), False, 'import os\n'), ((1341, 1405), 'steamapi.core.APIConnection', 'steamapi.core.APIConnection', ([], {'api_key': "os.environ['steam_api_key']"}), "(api_key=os.environ['steam_api_key'])\n", (1368, 1405), False, 'import steamapi\n'), ((1187, 1217), 'json.load', 'json.load', (['linux_game_list_raw'], {}), '(linux_game_list_raw)\n', (1196, 1217), False, 'import json\n'), ((1291, 1312), 'json.load', 'json.load', (['winehq_raw'], {}), '(winehq_raw)\n', (1300, 1312), False, 'import json\n'), ((1629, 1666), 'steamapi.user.SteamUser', 'steamapi.user.SteamUser', ([], {'userurl': 'name'}), '(userurl=name)\n', (1652, 1666), False, 'import steamapi\n')] |
import torch.utils.data as data
import numpy as np
from imageio import imread
from path import Path
import pdb
def crawl_folders(folders_list):
imgs = []
depth = []
for folder in folders_list:
current_imgs = sorted(folder.files('*.jpg'))
current_depth = []
for img in current_imgs:
d = img.dirname()/(img.name[:-4] + '.npy')
assert(d.isfile()), "depth file {} not found".format(str(d))
depth.append(d)
imgs.extend(current_imgs)
depth.extend(current_depth)
return imgs, depth
def load_as_float(path):
return imread(path).astype(np.float32)
class ValidationSet(data.Dataset):
"""A sequence data loader where the files are arranged in this way:
root/scene_1/0000000.jpg
root/scene_1/0000000.npy
root/scene_1/0000001.jpg
root/scene_1/0000001.npy
..
root/scene_2/0000000.jpg
root/scene_2/0000000.npy
.
transform functions must take in a list a images and a numpy array which can be None
"""
def __init__(self, root, transform=None):
self.root = Path(root)
scene_list_path = self.root/'val.txt'
self.scenes = [self.root/folder[:-1] for folder in open(scene_list_path)]
self.imgs, self.depth = crawl_folders(self.scenes)
self.transform = transform
def __getitem__(self, index):
img = load_as_float(self.imgs[index])
depth = np.load(self.depth[index]).astype(np.float32) #;pdb.set_trace()
if self.transform is not None:
img, _, _ = self.transform([img], depth, None); #this depth is just used to fill the compose transform that is shared(no need for the result)
img = img[0]
return img, depth
def __len__(self):
return len(self.imgs)
| [
"imageio.imread",
"path.Path",
"numpy.load"
] | [((1183, 1193), 'path.Path', 'Path', (['root'], {}), '(root)\n', (1187, 1193), False, 'from path import Path\n'), ((654, 666), 'imageio.imread', 'imread', (['path'], {}), '(path)\n', (660, 666), False, 'from imageio import imread\n'), ((1513, 1539), 'numpy.load', 'np.load', (['self.depth[index]'], {}), '(self.depth[index])\n', (1520, 1539), True, 'import numpy as np\n')] |
# Copyright 2017 AT&T Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import sys
import time
from oslo_log import log as logging
from oslo_utils import excutils
from tempest import config
from tempest.lib import exceptions as lib_exc
from patrole_tempest_plugin import rbac_exceptions
CONF = config.CONF
LOG = logging.getLogger(__name__)
class _ValidateListContext(object):
"""Context class responsible for validation of the list functions.
This class is used in ``override_role_and_validate_list`` function and
the result of a list function must be assigned to the ``ctx.resources``
variable.
Example::
with self.override_role_and_validate_list(...) as ctx:
ctx.resources = list_function()
"""
def __init__(self, admin_resources=None, admin_resource_id=None):
"""Constructor for ``ValidateListContext``.
Either ``admin_resources`` or ``admin_resource_id`` should be used,
not both.
:param list admin_resources: The list of resources received before
calling the ``override_role_and_validate_list`` function. To
validate will be used the ``_validate_len`` function.
:param UUID admin_resource_id: An ID of a resource created before
calling the ``override_role_and_validate_list`` function. To
validate will be used the ``_validate_resource`` function.
:raises RbacValidateListException: if both ``admin_resources`` and
``admin_resource_id`` are set or unset.
"""
self.resources = None
if admin_resources is not None and not admin_resource_id:
self._admin_len = len(admin_resources)
if not self._admin_len:
raise rbac_exceptions.RbacValidateListException(
reason="the list of admin resources cannot be empty")
self._validate_func = self._validate_len
elif admin_resource_id and admin_resources is None:
self._admin_resource_id = admin_resource_id
self._validate_func = self._validate_resource
else:
raise rbac_exceptions.RbacValidateListException(
reason="admin_resources and admin_resource_id are mutually "
"exclusive")
def _validate_len(self):
"""Validates that the number of resources is less than admin resources.
"""
if not len(self.resources):
raise rbac_exceptions.RbacEmptyResponseBody()
elif self._admin_len > len(self.resources):
raise rbac_exceptions.RbacPartialResponseBody(body=self.resources)
def _validate_resource(self):
"""Validates that the admin resource is present in the resources.
"""
for resource in self.resources:
if resource['id'] == self._admin_resource_id:
return
raise rbac_exceptions.RbacPartialResponseBody(body=self.resources)
def _validate(self):
"""Calls the proper validation function.
:raises RbacValidateListException: if the ``ctx.resources`` variable is
not assigned.
"""
if self.resources is None:
raise rbac_exceptions.RbacValidateListException(
reason="ctx.resources is not assigned")
self._validate_func()
class RbacUtilsMixin(object):
"""Utility mixin responsible for switching ``os_primary`` role.
Should be used as a mixin class alongside an instance of
:py:class:`tempest.test.BaseTestCase` to perform Patrole class setup for a
base RBAC class. Child classes should not use this mixin.
Example::
class BaseRbacTest(rbac_utils.RbacUtilsMixin, base.BaseV2ComputeTest):
@classmethod
def setup_clients(cls):
super(BaseRbacTest, cls).setup_clients()
cls.hosts_client = cls.os_primary.hosts_client
...
This class is responsible for overriding the value of the primary Tempest
credential's role (i.e. ``os_primary`` role). By doing so, it is possible
to seamlessly swap between admin credentials, needed for setup and clean
up, and primary credentials, needed to perform the API call which does
policy enforcement. The primary credentials always cycle between roles
defined by ``CONF.identity.admin_role`` and
``CONF.patrole.rbac_test_roles``.
"""
credentials = ['primary', 'admin']
def __init__(self, *args, **kwargs):
super(RbacUtilsMixin, self).__init__(*args, **kwargs)
# Shows if override_role was called.
self.__override_role_called = False
# Shows if exception raised during override_role.
self.__override_role_caught_exc = False
_admin_role_id = None
_rbac_role_ids = None
_project_id = None
_user_id = None
_role_map = None
_role_inferences_mapping = None
_orig_roles = []
admin_roles_client = None
@classmethod
def restore_roles(cls):
if cls._orig_roles:
LOG.info("Restoring original roles %s", cls._orig_roles)
roles_already_present = cls._list_and_clear_user_roles_on_project(
cls._orig_roles)
if not roles_already_present:
cls._create_user_role_on_project(cls._orig_roles)
@classmethod
def setup_clients(cls):
if CONF.identity_feature_enabled.api_v3:
admin_roles_client = cls.os_admin.roles_v3_client
else:
raise lib_exc.InvalidConfiguration(
"Patrole role overriding only supports v3 identity API.")
cls.admin_roles_client = admin_roles_client
cls._project_id = cls.os_primary.credentials.tenant_id
cls._user_id = cls.os_primary.credentials.user_id
cls._role_inferences_mapping = cls._prepare_role_inferences_mapping()
cls._init_roles()
# Store the user's original roles and rollback after testing.
roles = cls.admin_roles_client.list_user_roles_on_project(
cls._project_id, cls._user_id)['roles']
cls._orig_roles = [role['id'] for role in roles]
cls.addClassResourceCleanup(cls.restore_roles)
# Change default role to admin
cls._override_role(False)
super(RbacUtilsMixin, cls).setup_clients()
@classmethod
def _prepare_role_inferences_mapping(cls):
"""Preparing roles mapping to support role inferences
Making query to `list-all-role-inference-rules`_ keystone API
returns all inference rules, which makes it possible to prepare
roles mapping.
It walks recursively through the raw data::
{"role_inferences": [
{
"implies": [{"id": "3", "name": "reader"}],
"prior_role": {"id": "2", "name": "member"}
},
{
"implies": [{"id": "2", "name": "member"}],
"prior_role": {"id": "1", "name": "admin"}
}
]
}
and converts it to the mapping::
{
"2": ["3"], # "member": ["reader"],
"1": ["2", "3"] # "admin": ["member", "reader"]
}
.. _list-all-role-inference-rules: https://docs.openstack.org/api-ref/identity/v3/#list-all-role-inference-rules
""" # noqa: E501
def process_roles(role_id, data):
roles = data.get(role_id, set())
for rid in roles.copy():
roles.update(process_roles(rid, data))
return roles
def convert_data(data):
res = {}
for rule in data:
prior_role = rule['prior_role']['id']
implies = {r['id'] for r in rule['implies']}
res[prior_role] = implies
return res
raw_data = cls.admin_roles_client.list_all_role_inference_rules()
data = convert_data(raw_data['role_inferences'])
res = {}
for role_id in data:
res[role_id] = process_roles(role_id, data)
return res
def get_all_needed_roles(self, roles):
"""Extending given roles with roles from mapping
Examples::
["admin"] >> ["admin", "member", "reader"]
["member"] >> ["member", "reader"]
["reader"] >> ["reader"]
["custom_role"] >> ["custom_role"]
:param roles: list of roles
:return: extended list of roles
"""
res = set(r for r in roles)
for role in res.copy():
role_id = self.__class__._role_map.get(role)
implied_roles = self.__class__._role_inferences_mapping.get(
role_id, set())
role_names = {self.__class__._role_map[rid]
for rid in implied_roles}
res.update(role_names)
LOG.debug('All needed roles: %s; Base roles: %s', res, roles)
return list(res)
@contextlib.contextmanager
def override_role(self):
"""Override the role used by ``os_primary`` Tempest credentials.
Temporarily change the role used by ``os_primary`` credentials to:
* ``[patrole] rbac_test_roles`` before test execution
* ``[identity] admin_role`` after test execution
Automatically switches to admin role after test execution.
:returns: None
.. warning::
This function can alter user roles for pre-provisioned credentials.
Work is underway to safely clean up after this function.
Example::
@rbac_rule_validation.action(service='test',
rules=['a:test:rule'])
def test_foo(self):
# Allocate test-level resources here.
with self.override_role():
# The role for `os_primary` has now been overridden. Within
# this block, call the API endpoint that enforces the
# expected policy specified by "rule" in the decorator.
self.foo_service.bar_api_call()
# The role is switched back to admin automatically. Note that
# if the API call above threw an exception, any code below this
# point in the test is not executed.
"""
self._set_override_role_called()
self._override_role(True)
try:
# Execute the test.
yield
finally:
# Check whether an exception was raised. If so, remember that
# for future validation.
exc = sys.exc_info()[0]
if exc is not None:
self._set_override_role_caught_exc()
# This code block is always executed, no matter the result of the
# test. Automatically switch back to the admin role for test clean
# up.
self._override_role(False)
@classmethod
def _override_role(cls, toggle_rbac_role=False):
"""Private helper for overriding ``os_primary`` Tempest credentials.
:param toggle_rbac_role: Boolean value that controls the role that
overrides default role of ``os_primary`` credentials.
* If True: role is set to ``[patrole] rbac_test_role``
* If False: role is set to ``[identity] admin_role``
"""
LOG.debug('Overriding role to: %s.', toggle_rbac_role)
roles_already_present = False
try:
target_roles = (cls._rbac_role_ids
if toggle_rbac_role else [cls._admin_role_id])
roles_already_present = cls._list_and_clear_user_roles_on_project(
target_roles)
# Do not override roles if `target_role` already exists.
if not roles_already_present:
cls._create_user_role_on_project(target_roles)
except Exception as exp:
with excutils.save_and_reraise_exception():
LOG.exception(exp)
finally:
auth_providers = cls.get_auth_providers()
for provider in auth_providers:
provider.clear_auth()
# Fernet tokens are not subsecond aware so sleep to ensure we are
# passing the second boundary before attempting to authenticate.
# Only sleep if a token revocation occurred as a result of role
# overriding. This will optimize test runtime in the case where
# ``[identity] admin_role`` == ``[patrole] rbac_test_roles``.
if not roles_already_present:
time.sleep(1)
for provider in auth_providers:
provider.set_auth()
@classmethod
def _init_roles(cls):
available_roles = cls.admin_roles_client.list_roles()['roles']
cls._role_map = {r['name']: r['id'] for r in available_roles}
LOG.debug('Available roles: %s', cls._role_map.keys())
rbac_role_ids = []
roles = CONF.patrole.rbac_test_roles
# TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed
if CONF.patrole.rbac_test_role:
if not roles:
roles.append(CONF.patrole.rbac_test_role)
for role_name in roles:
rbac_role_ids.append(cls._role_map.get(role_name))
admin_role_id = cls._role_map.get(CONF.identity.admin_role)
if not all([admin_role_id, all(rbac_role_ids)]):
missing_roles = []
msg = ("Could not find `[patrole] rbac_test_roles` or "
"`[identity] admin_role`, both of which are required for "
"RBAC testing.")
if not admin_role_id:
missing_roles.append(CONF.identity.admin_role)
if not all(rbac_role_ids):
missing_roles += [role_name for role_name in roles
if role_name not in cls._role_map]
msg += " Following roles were not found: %s." % (
", ".join(missing_roles))
msg += " Available roles: %s." % ", ".join(cls._role_map)
raise rbac_exceptions.RbacResourceSetupFailed(msg)
cls._admin_role_id = admin_role_id
cls._rbac_role_ids = rbac_role_ids
# Adding backward mapping
cls._role_map.update({v: k for k, v in cls._role_map.items()})
@classmethod
def _create_user_role_on_project(cls, role_ids):
for role_id in role_ids:
cls.admin_roles_client.create_user_role_on_project(
cls._project_id, cls._user_id, role_id)
@classmethod
def _list_and_clear_user_roles_on_project(cls, role_ids):
roles = cls.admin_roles_client.list_user_roles_on_project(
cls._project_id, cls._user_id)['roles']
all_role_ids = [role['id'] for role in roles]
# NOTE(felipemonteiro): We do not use ``role_id in all_role_ids`` here
# to avoid over-permission errors: if the current list of roles on the
# project includes "admin" and "Member", and we are switching to the
# "Member" role, then we must delete the "admin" role. Thus, we only
# return early if the user's roles on the project are an exact match.
if set(role_ids) == set(all_role_ids):
return True
for role in roles:
cls.admin_roles_client.delete_role_from_user_on_project(
cls._project_id, cls._user_id, role['id'])
return False
@contextlib.contextmanager
def override_role_and_validate_list(self,
admin_resources=None,
admin_resource_id=None):
"""Call ``override_role`` and validate RBAC for a list API action.
List actions usually do soft authorization: partial or empty response
bodies are returned instead of exceptions. This helper validates
that unauthorized roles only return a subset of the available
resources.
Should only be used for validating list API actions.
:param test_obj: Instance of ``tempest.test.BaseTestCase``.
:param list admin_resources: The list of resources received before
calling the ``override_role_and_validate_list`` function.
:param UUID admin_resource_id: An ID of a resource created before
calling the ``override_role_and_validate_list`` function.
:return: py:class:`_ValidateListContext` object.
Example::
# the resource created by admin
admin_resource_id = (
self.ntp_client.create_dscp_marking_rule()
["dscp_marking_rule"]["id'])
with self.override_role_and_validate_list(
admin_resource_id=admin_resource_id) as ctx:
# the list of resources available for member role
ctx.resources = self.ntp_client.list_dscp_marking_rules(
policy_id=self.policy_id)["dscp_marking_rules"]
"""
ctx = _ValidateListContext(admin_resources, admin_resource_id)
with self.override_role():
yield ctx
ctx._validate()
@classmethod
def get_auth_providers(cls):
"""Returns list of auth_providers used within test.
Tests may redefine this method to include their own or third party
client auth_providers.
"""
return [cls.os_primary.auth_provider]
def _set_override_role_called(self):
"""Helper for tracking whether ``override_role`` was called."""
self.__override_role_called = True
def _set_override_role_caught_exc(self):
"""Helper for tracking whether exception was thrown inside
``override_role``.
"""
self.__override_role_caught_exc = True
def _validate_override_role_called(self):
"""Idempotently validate that ``override_role`` is called and reset
its value to False for sequential tests.
"""
was_called = self.__override_role_called
self.__override_role_called = False
return was_called
def _validate_override_role_caught_exc(self):
"""Idempotently validate that exception was caught inside
``override_role``, so that, by process of elimination, it can be
determined whether one was thrown outside (which is invalid).
"""
caught_exception = self.__override_role_caught_exc
self.__override_role_caught_exc = False
return caught_exception
def is_admin():
"""Verifies whether the current test role equals the admin role.
:returns: True if ``rbac_test_roles`` contain the admin role.
"""
roles = CONF.patrole.rbac_test_roles
# TODO(vegasq) drop once CONF.patrole.rbac_test_role is removed
if CONF.patrole.rbac_test_role:
roles.append(CONF.patrole.rbac_test_role)
roles = list(set(roles))
# TODO(felipemonteiro): Make this more robust via a context is admin
# lookup.
return CONF.identity.admin_role in roles
| [
"patrole_tempest_plugin.rbac_exceptions.RbacPartialResponseBody",
"oslo_utils.excutils.save_and_reraise_exception",
"patrole_tempest_plugin.rbac_exceptions.RbacEmptyResponseBody",
"time.sleep",
"patrole_tempest_plugin.rbac_exceptions.RbacResourceSetupFailed",
"patrole_tempest_plugin.rbac_exceptions.RbacValidateListException",
"sys.exc_info",
"tempest.lib.exceptions.InvalidConfiguration",
"oslo_log.log.getLogger"
] | [((895, 922), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (912, 922), True, 'from oslo_log import log as logging\n'), ((3463, 3523), 'patrole_tempest_plugin.rbac_exceptions.RbacPartialResponseBody', 'rbac_exceptions.RbacPartialResponseBody', ([], {'body': 'self.resources'}), '(body=self.resources)\n', (3502, 3523), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((3036, 3075), 'patrole_tempest_plugin.rbac_exceptions.RbacEmptyResponseBody', 'rbac_exceptions.RbacEmptyResponseBody', ([], {}), '()\n', (3073, 3075), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((3771, 3857), 'patrole_tempest_plugin.rbac_exceptions.RbacValidateListException', 'rbac_exceptions.RbacValidateListException', ([], {'reason': '"""ctx.resources is not assigned"""'}), "(reason=\n 'ctx.resources is not assigned')\n", (3812, 3857), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((6076, 6167), 'tempest.lib.exceptions.InvalidConfiguration', 'lib_exc.InvalidConfiguration', (['"""Patrole role overriding only supports v3 identity API."""'], {}), "(\n 'Patrole role overriding only supports v3 identity API.')\n", (6104, 6167), True, 'from tempest.lib import exceptions as lib_exc\n'), ((14696, 14740), 'patrole_tempest_plugin.rbac_exceptions.RbacResourceSetupFailed', 'rbac_exceptions.RbacResourceSetupFailed', (['msg'], {}), '(msg)\n', (14735, 14740), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((2328, 2428), 'patrole_tempest_plugin.rbac_exceptions.RbacValidateListException', 'rbac_exceptions.RbacValidateListException', ([], {'reason': '"""the list of admin resources cannot be empty"""'}), "(reason=\n 'the list of admin resources cannot be empty')\n", (2369, 2428), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((2704, 2821), 'patrole_tempest_plugin.rbac_exceptions.RbacValidateListException', 'rbac_exceptions.RbacValidateListException', ([], {'reason': '"""admin_resources and admin_resource_id are mutually exclusive"""'}), "(reason=\n 'admin_resources and admin_resource_id are mutually exclusive')\n", (2745, 2821), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((3146, 3206), 'patrole_tempest_plugin.rbac_exceptions.RbacPartialResponseBody', 'rbac_exceptions.RbacPartialResponseBody', ([], {'body': 'self.resources'}), '(body=self.resources)\n', (3185, 3206), False, 'from patrole_tempest_plugin import rbac_exceptions\n'), ((11195, 11209), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (11207, 11209), False, 'import sys\n'), ((13183, 13196), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (13193, 13196), False, 'import time\n'), ((12517, 12554), 'oslo_utils.excutils.save_and_reraise_exception', 'excutils.save_and_reraise_exception', ([], {}), '()\n', (12552, 12554), False, 'from oslo_utils import excutils\n')] |
from initialize import *
from core.db.db_func import query_linedrug_list
import os
import wx
class DrugPopup(wx.ComboPopup):
def __init__(self, parent):
super().__init__()
self.lc = None
self.mv = parent.mv
self.init_d_l = query_linedrug_list(self.mv.sess).all()
self.d_l = []
def Create(self, parent):
self.lc = wx.ListCtrl(
parent,
style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER)
self.lc.AppendColumn('Thuốc', width=200)
self.lc.AppendColumn('Thành phần', width=150)
self.lc.AppendColumn('Số lượng')
self.lc.AppendColumn('Đơn giá')
self.lc.AppendColumn('Cách dùng', width=100)
self.lc.Bind(wx.EVT_MOTION, self.OnMotion)
self.lc.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
self.lc.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)
self.Update()
return True
def Init(self):
self.value = -1
self.curitem = -1
def GetControl(self):
return self.lc
def SetStringValue(self, val):
idx = self.lc.FindItem(-1, val)
if idx != wx.NOT_FOUND:
self.lc.Select(idx)
def GetStringValue(self):
if self.value >= 0:
return self.lc.GetItemText(self.value, col=0)
return ""
def GetAdjustedSize(self, minWidth, prefHeight, maxHeight):
return super().GetAdjustedSize(*popup_size)
def Update(self, s=''):
self.lc.DeleteAllItems()
self.d_l = list(filter(
lambda x: s.casefold() in x.name.casefold() or s.casefold() in x.element.casefold(),
self.init_d_l))
for index, item in enumerate(self.d_l):
self.lc.Append(
[item.name, item.element, item.quantity, item.sale_price, item.usage])
if item.quantity <= user_setting["so_luong_thuoc_toi_thieu_de_bao_dong_do"]:
self.lc.SetItemTextColour(index, wx.Colour(252, 3, 57, 255))
def OnMotion(self, e):
item, flags = self.lc.HitTest(e.GetPosition())
if item >= 0:
self.lc.Select(item)
self.curitem = item
def OnLeftDown(self, e):
try:
self.value = self.curitem
self.ComboCtrl.drugWH = self.d_l[self.value]
self.Dismiss()
self.ComboCtrl.SelectAll()
self.ComboCtrl.SetInsertionPointEnd()
except IndexError:
self.Dismiss()
def OnPopup(self):
self.Init()
self.Update(self.ComboCtrl.Value)
if self.lc.ItemCount > 0:
if self.curitem < (self.lc.ItemCount - 1):
self.curitem += 1
self.lc.Select(self.curitem)
self.lc.EnsureVisible(self.curitem)
def KeyDown(self):
if self.lc.ItemCount > 0:
if self.curitem < (self.lc.ItemCount - 1):
self.curitem += 1
self.lc.Select(self.curitem)
self.lc.EnsureVisible(self.curitem)
def KeyUp(self):
if self.lc.ItemCount > 0:
if self.curitem > 0:
self.curitem -= 1
self.lc.Select(self.curitem)
self.lc.EnsureVisible(self.curitem)
else:
self.KeyESC()
def KeyESC(self):
a = self.ComboCtrl.Value
self.Dismiss()
self.ComboCtrl.ChangeValue(a)
self.ComboCtrl.SetInsertionPointEnd()
def KeyReturn(self):
self.OnLeftDown(None)
def onKeyPress(self, e):
c = e.GetKeyCode()
if c == wx.WXK_DOWN:
self.KeyDown()
elif c == wx.WXK_UP:
self.KeyUp()
elif c == wx.WXK_ESCAPE:
self.KeyESC()
elif c == wx.WXK_RETURN:
self.KeyReturn()
class DrugPicker(wx.ComboCtrl):
def __init__(self, parent):
super().__init__(parent, size=drugctrl_size, style=wx.TE_PROCESS_ENTER)
self.mv = parent.mv
self.drug_popup = DrugPopup(self)
self.SetPopupControl(self.drug_popup)
self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)
self.Bind(wx.EVT_TEXT, self.onTextChange)
self.SetHint("Nhấn Enter để search thuốc")
self._drugWH = None
self.EnablePopupAnimation(enable=False)
@property
def drugWH(self):
return self._drugWH
@drugWH.setter
def drugWH(self, dwh):
self._drugWH = dwh
pg = self.Parent
if dwh:
pg.usage_unit.Label = dwh.usage_unit + " "
pg.sale_unit.Label = dwh.sale_unit + " "
else:
self.ChangeValue('')
pg.dosage_per.ChangeValue('')
pg.usage_unit.Label = '{Đơn vị} '
pg.times.ChangeValue("")
pg.quantity.ChangeValue("")
pg.sale_unit.Label = '{Đơn vị} '
pg.usage.ChangeValue("")
def onKeyPress(self, e):
if os.name == "posix":
if e.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_DOWN]:
if not self.IsPopupShown():
self.Popup()
else:
e.Skip()
else:
if e.GetKeyCode() not in [wx.WXK_RETURN,
wx.WXK_UP,
wx.WXK_DOWN,
wx.WXK_ESCAPE]:
if self.IsPopupShown():
a = self.Value
self.Dismiss()
self.ChangeValue(a)
self.SetInsertionPointEnd()
e.Skip()
def onTextChange(self, e):
if os.name == "nt":
if e.String == "":
self.Clear()
elif len(e.String) >= 1:
if not self.IsPopupShown():
self.Popup()
self.SetInsertionPointEnd()
if os.name == "posix":
if e.String == "":
self.Clear()
def Clear(self):
self.drugWH = None
def refreshPopup(self):
self.drug_popup.init_d_l = query_linedrug_list(self.mv.sess).all()
| [
"wx.Colour",
"core.db.db_func.query_linedrug_list",
"wx.ListCtrl"
] | [((373, 450), 'wx.ListCtrl', 'wx.ListCtrl', (['parent'], {'style': '(wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER)'}), '(parent, style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.SIMPLE_BORDER)\n', (384, 450), False, 'import wx\n'), ((262, 295), 'core.db.db_func.query_linedrug_list', 'query_linedrug_list', (['self.mv.sess'], {}), '(self.mv.sess)\n', (281, 295), False, 'from core.db.db_func import query_linedrug_list\n'), ((5989, 6022), 'core.db.db_func.query_linedrug_list', 'query_linedrug_list', (['self.mv.sess'], {}), '(self.mv.sess)\n', (6008, 6022), False, 'from core.db.db_func import query_linedrug_list\n'), ((1950, 1976), 'wx.Colour', 'wx.Colour', (['(252)', '(3)', '(57)', '(255)'], {}), '(252, 3, 57, 255)\n', (1959, 1976), False, 'import wx\n')] |
from flask import Flask
from flask_cors import CORS
from flask_graphql import GraphQLView
from schema import Schema
def create_app(**kwargs):
app = Flask(__name__)
app.debug = True
app.add_url_rule(
'/graphql',
view_func=GraphQLView.as_view('graphql', schema=Schema, **kwargs)
)
return app
if __name__ == '__main__':
app = create_app(graphiql=True)
CORS(app, resources={r'/graphql': {'origins': '*'}})
app.run()
| [
"flask_graphql.GraphQLView.as_view",
"flask_cors.CORS",
"flask.Flask"
] | [((154, 169), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (159, 169), False, 'from flask import Flask\n'), ((397, 448), 'flask_cors.CORS', 'CORS', (['app'], {'resources': "{'/graphql': {'origins': '*'}}"}), "(app, resources={'/graphql': {'origins': '*'}})\n", (401, 448), False, 'from flask_cors import CORS\n'), ((251, 306), 'flask_graphql.GraphQLView.as_view', 'GraphQLView.as_view', (['"""graphql"""'], {'schema': 'Schema'}), "('graphql', schema=Schema, **kwargs)\n", (270, 306), False, 'from flask_graphql import GraphQLView\n')] |
from DD.utils import PoolByteArray2NumpyArray, NumpyArray2PoolByteArray
from DD.Entity import Entity
import numpy as np
class Terrain(Entity):
def __init__(self, json, width, height, scale=4, terrain_types=4):
super(Terrain, self).__init__(json)
self._scale = scale
self.terrain_types = terrain_types
self.splat = PoolByteArray2NumpyArray(self._json['splat']).reshape(height*self._scale, width*self._scale, self.terrain_types, order='C')
def get_json(self):
json = self._json
json['splat'] = NumpyArray2PoolByteArray(self.splat.reshape(np.prod(self.splat.shape), order='C'))
return json
def pad(self, top, bottom, left, right):
self.splat = np.pad(self.splat,
((top*self._scale, bottom*self._scale), (left*self._scale, right*self._scale), (0,0)),
mode='edge')
def crop(self, top, bottom, left, right):
self.splat = self._crop_map_safe(self.splat, top, bottom, left, right, self._scale)
def fliplr(self, width):
self.splat = np.fliplr(self.splat)
def flipud(self, height):
self.splat = np.flipud(self.splat)
def rot90(self, width, height):
self.splat = self._rot90_map(self.splat)
def rot180(self, width, height):
self.splat = self._rot180_map(self.splat)
def rot270(self, width, height):
self.splat = self._rot270_map(self.splat)
| [
"numpy.prod",
"numpy.flipud",
"numpy.fliplr",
"DD.utils.PoolByteArray2NumpyArray",
"numpy.pad"
] | [((720, 852), 'numpy.pad', 'np.pad', (['self.splat', '((top * self._scale, bottom * self._scale), (left * self._scale, right *\n self._scale), (0, 0))'], {'mode': '"""edge"""'}), "(self.splat, ((top * self._scale, bottom * self._scale), (left * self\n ._scale, right * self._scale), (0, 0)), mode='edge')\n", (726, 852), True, 'import numpy as np\n'), ((1090, 1111), 'numpy.fliplr', 'np.fliplr', (['self.splat'], {}), '(self.splat)\n', (1099, 1111), True, 'import numpy as np\n'), ((1168, 1189), 'numpy.flipud', 'np.flipud', (['self.splat'], {}), '(self.splat)\n', (1177, 1189), True, 'import numpy as np\n'), ((351, 396), 'DD.utils.PoolByteArray2NumpyArray', 'PoolByteArray2NumpyArray', (["self._json['splat']"], {}), "(self._json['splat'])\n", (375, 396), False, 'from DD.utils import PoolByteArray2NumpyArray, NumpyArray2PoolByteArray\n'), ((594, 619), 'numpy.prod', 'np.prod', (['self.splat.shape'], {}), '(self.splat.shape)\n', (601, 619), True, 'import numpy as np\n')] |
import json
import sys
from openslides_backend.models.checker import Checker, CheckException
def main() -> int:
files = sys.argv[1:]
if not files:
print("No files specified.")
return 1
possible_modes = tuple(f"--{mode}" for mode in Checker.modes)
modes = tuple(mode[2:] for mode in possible_modes if mode in files)
if len(modes) == 0:
mode = "all"
elif len(modes) > 1:
print(f"You can only choose one mode of {', '.join(possible_modes)}.")
exit(1)
else:
mode = modes[0]
if len(modes):
files = [x for x in files if x not in possible_modes]
failed = False
for f in files:
with open(f) as data:
try:
Checker(
json.load(data),
mode=mode,
).run_check()
except CheckException as e:
print(f"Check for {f} failed:\n", e)
failed = True
else:
print(f"Check for {f} successful.")
return 1 if failed else 0
if __name__ == "__main__":
sys.exit(main())
| [
"json.load"
] | [((765, 780), 'json.load', 'json.load', (['data'], {}), '(data)\n', (774, 780), False, 'import json\n')] |
import unittest
from http import HTTPStatus
from unittest import TestCase
import bcrypt
from flask.ctx import AppContext
from flask.testing import FlaskClient
from app import create_app
from models.theme import Theme, SubTheme
from models.users import Users
class TestSubTemes(TestCase):
"""
Unittest for the creation, renaming and deleting of Themes
"""
def setUp(self):
"""
Setup a FlaskClient for testing, creates an admin user and creates the authorization header for requests to
the Flask Client and a dummy theme
"""
self.client, self.app_context = self.create_test_client()
self.user = self.create_admin_user()
self.auth_header = self.get_auth_header()
self.theme = Theme.get_by_name("_test_add_Subtheme_")
if not self.theme:
self.theme = Theme("_test_add_Subtheme_")
self.theme.save()
self.theme.commit()
self.theme = Theme.get_by_name("_test_add_Subtheme_")
self.subtheme = self.create_dummy_subtheme()
def create_test_client(self) -> (FlaskClient, AppContext):
"""
Create flask testing client
:return: FlaskClient for tests and AppContext
"""
test_app = create_app(DATABASE_NAME='test_analysis', TESTING=True)
testing_client = test_app.test_client()
test_app_context = test_app.app_context()
test_app_context.push()
return testing_client, test_app_context
def create_dummy_subtheme(self) -> SubTheme:
"""
Create SubTheme for tests
:return: SubTheme for tests
"""
subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_')
if not subtheme:
subtheme = SubTheme(self.theme.id, '_TEST_SUB_THEME_')
subtheme.save()
subtheme.commit()
subtheme = SubTheme.get_by_name('_TEST_SUB_THEME_')
return subtheme
def create_admin_user(self) -> Users:
"""
Create Admin user for tests
:return: an admin user for tests
"""
password_hash = bcrypt.hashpw("<PASSWORD>".encode("utf-8"), bcrypt.gensalt())
user = Users.find_by_email("<EMAIL>")
if not user:
user = Users("Admin", "<EMAIL>", password_hash.decode("utf8"), True, True)
try:
user.save()
user.commit()
except Exception as e:
pass
return user
def get_auth_header(self) -> {str: str}:
"""
Create an Authorization header for test
:return: An authorization header
"""
response_login = self.client.post('/login', data=dict(email=self.user.email, password="<PASSWORD>", remember=True),
follow_redirects=True)
response_login_json = response_login.get_json()
return {'Authorization': 'Bearer {}'.format(response_login_json["access_token"])}
def test_add_subtheme(self):
"""
Create a new SubTheme and check the client response status code for http status 200 (OK)
Check JSON response data for the expected message 'New theme created' and
Theme name
"""
response = self.client.post('/admin/themes/add_subtheme',
json={"theme_id": self.theme.id, "subtheme": "_TEST_SUB_THEME_2"},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.OK)
json_response = response.get_json()
self.assertEqual(json_response["message"], "sub theme created")
self.assertEqual(json_response["theme_id"], self.theme.id)
self.assertEqual(json_response["subtheme"], "_TEST_SUB_THEME_2")
def test_rename_subtheme_theme_id(self):
"""
Rename a SubTheme by theme_id and check the clients response status code for http status 200 (OK)
Check response data for the expected message 'Subtheme renamed' and the
Subtheme name has been changed
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
current_name = self.subtheme.name
response = self.client.post('/admin/themes/rename_subtheme', json={"theme_id": self.subtheme.t_id,
"current_name": current_name,
"new_name": "new_name_not_1"
}, headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.OK)
response = response.get_json()
self.assertEqual(response["id"], self.subtheme.id)
self.assertEqual(response["message"], "Subtheme renamed")
self.assertEqual(response["old_name"], current_name)
self.assertEqual(response["new_name"], "new_name_not_1")
def test_rename_subtheme_id(self):
"""
Rename a SubTheme by id and check the clients response status code for http status 200 (OK)
Check response data for the expected message 'Subtheme renamed' and the
Subtheme name has been changed
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
current_name = self.subtheme.name
response = self.client.post('/admin/themes/rename_subtheme', json={"id": self.subtheme.id,
"current_name": current_name,
"new_name": "new_name_not_1"
}, headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.OK)
response = response.get_json()
self.assertEqual(response["id"], self.subtheme.id)
self.assertEqual(response["message"], "Subtheme renamed")
self.assertEqual(response["old_name"], current_name)
self.assertEqual(response["new_name"], "new_name_not_1")
def test_rename_non_existant_subtheme(self):
"""
Rename a SubTheme that does not exist and check the clients response status code for http status 404 (OK)
"""
response = self.client.post('/admin/themes/rename_subtheme', json={"theme_id": -1,
"current_name": "a3d4f5g6h7j8k0",
"new_name": "new_name_not_1"
}, headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
def test_delete_non_exsitant_subtheme(self):
"""
Delete a SubTheme that does not exist and check the client response status code for http status 404
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
response = self.client.post('/admin/themes/delete_subtheme',
json={"name": "weA_gfj24fhurtyui", "theme_id": -1},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
def test_delete_subtheme_by_id(self):
"""
Delete a SubTheme by id and check the client response status code for http status 204 (NO_CONTENT)
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
response = self.client.post('/admin/themes/delete_subtheme', json={"id": self.subtheme.id},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT)
def test_delete_subtheme_by_theme_id_and_name(self):
"""
Delete a SubTheme by theme_id and name: check the client response status code for http status 204 (NO_CONTENT)
"""
if not self.subtheme:
self.subtheme = self.create_dummy_subtheme()
response = self.client.post('/admin/themes/delete_subtheme',
json={"theme_id": self.subtheme.t_id, "name": self.subtheme.name},
headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.NO_CONTENT)
def tearDown(self):
""" Handle the cleanup after tests"""
self.subtheme = SubTheme.get_by_name("new_name_not_1")
if not self.subtheme:
self.subtheme = SubTheme.get_by_name("_TEST_SUB_THEME_")
if self.subtheme:
self.subtheme.delete()
self.subtheme.commit()
test_sub = SubTheme.get_by_name("_TEST_SUB_THEME_2")
if test_sub:
test_sub.delete()
test_sub.commit()
if self.theme:
self.theme.delete()
self.theme.commit()
self.client.post('/logout', headers=self.auth_header)
if self.user:
self.user.delete()
self.user.commit()
self.app_context.pop()
if __name__ == '__main__':
unittest.main()
| [
"models.users.Users.find_by_email",
"models.theme.SubTheme.get_by_name",
"bcrypt.gensalt",
"app.create_app",
"models.theme.Theme.get_by_name",
"unittest.main",
"models.theme.Theme",
"models.theme.SubTheme"
] | [((9225, 9240), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9238, 9240), False, 'import unittest\n'), ((759, 799), 'models.theme.Theme.get_by_name', 'Theme.get_by_name', (['"""_test_add_Subtheme_"""'], {}), "('_test_add_Subtheme_')\n", (776, 799), False, 'from models.theme import Theme, SubTheme\n'), ((1259, 1314), 'app.create_app', 'create_app', ([], {'DATABASE_NAME': '"""test_analysis"""', 'TESTING': '(True)'}), "(DATABASE_NAME='test_analysis', TESTING=True)\n", (1269, 1314), False, 'from app import create_app\n'), ((1656, 1696), 'models.theme.SubTheme.get_by_name', 'SubTheme.get_by_name', (['"""_TEST_SUB_THEME_"""'], {}), "('_TEST_SUB_THEME_')\n", (1676, 1696), False, 'from models.theme import Theme, SubTheme\n'), ((2180, 2210), 'models.users.Users.find_by_email', 'Users.find_by_email', (['"""<EMAIL>"""'], {}), "('<EMAIL>')\n", (2199, 2210), False, 'from models.users import Users\n'), ((8547, 8585), 'models.theme.SubTheme.get_by_name', 'SubTheme.get_by_name', (['"""new_name_not_1"""'], {}), "('new_name_not_1')\n", (8567, 8585), False, 'from models.theme import Theme, SubTheme\n'), ((8802, 8843), 'models.theme.SubTheme.get_by_name', 'SubTheme.get_by_name', (['"""_TEST_SUB_THEME_2"""'], {}), "('_TEST_SUB_THEME_2')\n", (8822, 8843), False, 'from models.theme import Theme, SubTheme\n'), ((852, 880), 'models.theme.Theme', 'Theme', (['"""_test_add_Subtheme_"""'], {}), "('_test_add_Subtheme_')\n", (857, 880), False, 'from models.theme import Theme, SubTheme\n'), ((968, 1008), 'models.theme.Theme.get_by_name', 'Theme.get_by_name', (['"""_test_add_Subtheme_"""'], {}), "('_test_add_Subtheme_')\n", (985, 1008), False, 'from models.theme import Theme, SubTheme\n'), ((1745, 1788), 'models.theme.SubTheme', 'SubTheme', (['self.theme.id', '"""_TEST_SUB_THEME_"""'], {}), "(self.theme.id, '_TEST_SUB_THEME_')\n", (1753, 1788), False, 'from models.theme import Theme, SubTheme\n'), ((1870, 1910), 'models.theme.SubTheme.get_by_name', 'SubTheme.get_by_name', (['"""_TEST_SUB_THEME_"""'], {}), "('_TEST_SUB_THEME_')\n", (1890, 1910), False, 'from models.theme import Theme, SubTheme\n'), ((2147, 2163), 'bcrypt.gensalt', 'bcrypt.gensalt', ([], {}), '()\n', (2161, 2163), False, 'import bcrypt\n'), ((8644, 8684), 'models.theme.SubTheme.get_by_name', 'SubTheme.get_by_name', (['"""_TEST_SUB_THEME_"""'], {}), "('_TEST_SUB_THEME_')\n", (8664, 8684), False, 'from models.theme import Theme, SubTheme\n')] |
from struct import unpack_from, calcsize
LOG_GNSS_POSITION_REPORT = 0x1476
LOG_GNSS_GPS_MEASUREMENT_REPORT = 0x1477
LOG_GNSS_CLOCK_REPORT = 0x1478
LOG_GNSS_GLONASS_MEASUREMENT_REPORT = 0x1480
LOG_GNSS_BDS_MEASUREMENT_REPORT = 0x1756
LOG_GNSS_GAL_MEASUREMENT_REPORT = 0x1886
LOG_GNSS_OEMDRE_MEASUREMENT_REPORT = 0x14DE
LOG_GNSS_OEMDRE_SVPOLY_REPORT = 0x14E1
LOG_GNSS_ME_DPO_STATUS = 0x1838
LOG_GNSS_CD_DB_REPORT = 0x147B
LOG_GNSS_PRX_RF_HW_STATUS_REPORT = 0x147E
LOG_CGPS_SLOW_CLOCK_CLIB_REPORT = 0x1488
LOG_GNSS_CONFIGURATION_STATE = 0x1516
glonass_measurement_report = """
uint8_t version;
uint32_t f_count;
uint8_t glonass_cycle_number;
uint16_t glonass_number_of_days;
uint32_t milliseconds;
float time_bias;
float clock_time_uncertainty;
float clock_frequency_bias;
float clock_frequency_uncertainty;
uint8_t sv_count;
"""
glonass_measurement_report_sv = """
uint8_t sv_id;
int8_t frequency_index;
uint8_t observation_state; // SVObservationStates
uint8_t observations;
uint8_t good_observations;
uint8_t hemming_error_count;
uint8_t filter_stages;
uint16_t carrier_noise;
int16_t latency;
uint8_t predetect_interval;
uint16_t postdetections;
uint32_t unfiltered_measurement_integral;
float unfiltered_measurement_fraction;
float unfiltered_time_uncertainty;
float unfiltered_speed;
float unfiltered_speed_uncertainty;
uint32_t measurement_status;
uint8_t misc_status;
uint32_t multipath_estimate;
float azimuth;
float elevation;
int32_t carrier_phase_cycles_integral;
uint16_t carrier_phase_cycles_fraction;
float fine_speed;
float fine_speed_uncertainty;
uint8_t cycle_slip_count;
uint32_t pad;
"""
gps_measurement_report = """
uint8_t version;
uint32_t f_count;
uint16_t week;
uint32_t milliseconds;
float time_bias;
float clock_time_uncertainty;
float clock_frequency_bias;
float clock_frequency_uncertainty;
uint8_t sv_count;
"""
gps_measurement_report_sv = """
uint8_t sv_id;
uint8_t observation_state; // SVObservationStates
uint8_t observations;
uint8_t good_observations;
uint16_t parity_error_count;
uint8_t filter_stages;
uint16_t carrier_noise;
int16_t latency;
uint8_t predetect_interval;
uint16_t postdetections;
uint32_t unfiltered_measurement_integral;
float unfiltered_measurement_fraction;
float unfiltered_time_uncertainty;
float unfiltered_speed;
float unfiltered_speed_uncertainty;
uint32_t measurement_status;
uint8_t misc_status;
uint32_t multipath_estimate;
float azimuth;
float elevation;
int32_t carrier_phase_cycles_integral;
uint16_t carrier_phase_cycles_fraction;
float fine_speed;
float fine_speed_uncertainty;
uint8_t cycle_slip_count;
uint32_t pad;
"""
position_report = """
uint8 u_Version; /* Version number of DM log */
uint32 q_Fcount; /* Local millisecond counter */
uint8 u_PosSource; /* Source of position information */ /* 0: None 1: Weighted least-squares 2: Kalman filter 3: Externally injected 4: Internal database */
uint32 q_Reserved1; /* Reserved memory field */
uint16 w_PosVelFlag; /* Position velocity bit field: (see DM log 0x1476 documentation) */
uint32 q_PosVelFlag2; /* Position velocity 2 bit field: (see DM log 0x1476 documentation) */
uint8 u_FailureCode; /* Failure code: (see DM log 0x1476 documentation) */
uint16 w_FixEvents; /* Fix events bit field: (see DM log 0x1476 documentation) */
uint32 _fake_align_week_number;
uint16 w_GpsWeekNumber; /* GPS week number of position */
uint32 q_GpsFixTimeMs; /* GPS fix time of week of in milliseconds */
uint8 u_GloNumFourYear; /* Number of Glonass four year cycles */
uint16 w_GloNumDaysInFourYear; /* Glonass calendar day in four year cycle */
uint32 q_GloFixTimeMs; /* Glonass fix time of day in milliseconds */
uint32 q_PosCount; /* Integer count of the number of unique positions reported */
uint64 t_DblFinalPosLatLon[2]; /* Final latitude and longitude of position in radians */
uint32 q_FltFinalPosAlt; /* Final height-above-ellipsoid altitude of position */
uint32 q_FltHeadingRad; /* User heading in radians */
uint32 q_FltHeadingUncRad; /* User heading uncertainty in radians */
uint32 q_FltVelEnuMps[3]; /* User velocity in east, north, up coordinate frame. In meters per second. */
uint32 q_FltVelSigmaMps[3]; /* Gaussian 1-sigma value for east, north, up components of user velocity */
uint32 q_FltClockBiasMeters; /* Receiver clock bias in meters */
uint32 q_FltClockBiasSigmaMeters; /* Gaussian 1-sigma value for receiver clock bias in meters */
uint32 q_FltGGTBMeters; /* GPS to Glonass time bias in meters */
uint32 q_FltGGTBSigmaMeters; /* Gaussian 1-sigma value for GPS to Glonass time bias uncertainty in meters */
uint32 q_FltGBTBMeters; /* GPS to BeiDou time bias in meters */
uint32 q_FltGBTBSigmaMeters; /* Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty in meters */
uint32 q_FltBGTBMeters; /* BeiDou to Glonass time bias in meters */
uint32 q_FltBGTBSigmaMeters; /* Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty in meters */
uint32 q_FltFiltGGTBMeters; /* Filtered GPS to Glonass time bias in meters */
uint32 q_FltFiltGGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to Glonass time bias uncertainty in meters */
uint32 q_FltFiltGBTBMeters; /* Filtered GPS to BeiDou time bias in meters */
uint32 q_FltFiltGBTBSigmaMeters; /* Filtered Gaussian 1-sigma value for GPS to BeiDou time bias uncertainty in meters */
uint32 q_FltFiltBGTBMeters; /* Filtered BeiDou to Glonass time bias in meters */
uint32 q_FltFiltBGTBSigmaMeters; /* Filtered Gaussian 1-sigma value for BeiDou to Glonass time bias uncertainty in meters */
uint32 q_FltSftOffsetSec; /* SFT offset as computed by WLS in seconds */
uint32 q_FltSftOffsetSigmaSec; /* Gaussian 1-sigma value for SFT offset in seconds */
uint32 q_FltClockDriftMps; /* Clock drift (clock frequency bias) in meters per second */
uint32 q_FltClockDriftSigmaMps; /* Gaussian 1-sigma value for clock drift in meters per second */
uint32 q_FltFilteredAlt; /* Filtered height-above-ellipsoid altitude in meters as computed by WLS */
uint32 q_FltFilteredAltSigma; /* Gaussian 1-sigma value for filtered height-above-ellipsoid altitude in meters */
uint32 q_FltRawAlt; /* Raw height-above-ellipsoid altitude in meters as computed by WLS */
uint32 q_FltRawAltSigma; /* Gaussian 1-sigma value for raw height-above-ellipsoid altitude in meters */
uint32 align_Flt[14];
uint32 q_FltPdop; /* 3D position dilution of precision as computed from the unweighted
uint32 q_FltHdop; /* Horizontal position dilution of precision as computed from the unweighted least-squares covariance matrix */
uint32 q_FltVdop; /* Vertical position dilution of precision as computed from the unweighted least-squares covariance matrix */
uint8 u_EllipseConfidence; /* Statistical measure of the confidence (percentage) associated with the uncertainty ellipse values */
uint32 q_FltEllipseAngle; /* Angle of semimajor axis with respect to true North, with increasing angles moving clockwise from North. In units of degrees. */
uint32 q_FltEllipseSemimajorAxis; /* Semimajor axis of final horizontal position uncertainty error ellipse. In units of meters. */
uint32 q_FltEllipseSemiminorAxis; /* Semiminor axis of final horizontal position uncertainty error ellipse. In units of meters. */
uint32 q_FltPosSigmaVertical; /* Gaussian 1-sigma value for final position height-above-ellipsoid altitude in meters */
uint8 u_HorizontalReliability; /* Horizontal position reliability 0: Not set 1: Very Low 2: Low 3: Medium 4: High */
uint8 u_VerticalReliability; /* Vertical position reliability */
uint16 w_Reserved2; /* Reserved memory field */
uint32 q_FltGnssHeadingRad; /* User heading in radians derived from GNSS only solution */
uint32 q_FltGnssHeadingUncRad; /* User heading uncertainty in radians derived from GNSS only solution */
uint32 q_SensorDataUsageMask; /* Denotes which additional sensor data were used to compute this position fix. BIT[0] 0x00000001 <96> Accelerometer BIT[1] 0x00000002 <96> Gyro 0x0000FFFC - Reserved A bit set to 1 indicates that certain fields as defined by the SENSOR_AIDING_MASK were aided with sensor data*/
uint32 q_SensorAidMask; /* Denotes which component of the position report was assisted with additional sensors defined in SENSOR_DATA_USAGE_MASK BIT[0] 0x00000001 <96> Heading aided with sensor data BIT[1] 0x00000002 <96> Speed aided with sensor data BIT[2] 0x00000004 <96> Position aided with sensor data BIT[3] 0x00000008 <96> Velocity aided with sensor data 0xFFFFFFF0 <96> Reserved */
uint8 u_NumGpsSvsUsed; /* The number of GPS SVs used in the fix */
uint8 u_TotalGpsSvs; /* Total number of GPS SVs detected by searcher, including ones not used in position calculation */
uint8 u_NumGloSvsUsed; /* The number of Glonass SVs used in the fix */
uint8 u_TotalGloSvs; /* Total number of Glonass SVs detected by searcher, including ones not used in position calculation */
uint8 u_NumBdsSvsUsed; /* The number of BeiDou SVs used in the fix */
uint8 u_TotalBdsSvs; /* Total number of BeiDou SVs detected by searcher, including ones not used in position calculation */
"""
def name_to_camelcase(nam):
ret = []
i = 0
while i < len(nam):
if nam[i] == "_":
ret.append(nam[i+1].upper())
i += 2
else:
ret.append(nam[i])
i += 1
return ''.join(ret)
def parse_struct(ss):
st = "<"
nams = []
for l in ss.strip().split("\n"):
typ, nam = l.split(";")[0].split()
#print(typ, nam)
if typ == "float" or '_Flt' in nam:
st += "f"
elif typ == "double" or '_Dbl' in nam:
st += "d"
elif typ in ["uint8", "uint8_t"]:
st += "B"
elif typ in ["int8", "int8_t"]:
st += "b"
elif typ in ["uint32", "uint32_t"]:
st += "I"
elif typ in ["int32", "int32_t"]:
st += "i"
elif typ in ["uint16", "uint16_t"]:
st += "H"
elif typ in ["int16", "int16_t"]:
st += "h"
elif typ == "uint64":
st += "Q"
else:
print("unknown type", typ)
assert False
if '[' in nam:
cnt = int(nam.split("[")[1].split("]")[0])
st += st[-1]*(cnt-1)
for i in range(cnt):
nams.append("%s[%d]" % (nam.split("[")[0], i))
else:
nams.append(nam)
return st, nams
def dict_unpacker(ss, camelcase = False):
st, nams = parse_struct(ss)
if camelcase:
nams = [name_to_camelcase(x) for x in nams]
sz = calcsize(st)
return lambda x: dict(zip(nams, unpack_from(st, x))), sz
| [
"struct.calcsize",
"struct.unpack_from"
] | [((11349, 11361), 'struct.calcsize', 'calcsize', (['st'], {}), '(st)\n', (11357, 11361), False, 'from struct import unpack_from, calcsize\n'), ((11396, 11414), 'struct.unpack_from', 'unpack_from', (['st', 'x'], {}), '(st, x)\n', (11407, 11414), False, 'from struct import unpack_from, calcsize\n')] |
from __future__ import print_function
try:
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
except ImportError:
from PySide2.QtWidgets import *
from PySide2.QtGui import *
from PySide2.QtCore import *
import hou
from hammer_tools.utils import createAction
def isRevertToDefaultEvent(event):
return event.modifiers() == Qt.ControlModifier and event.button() == Qt.MiddleButton
class Slider(QSlider):
def __init__(self, orientation=Qt.Horizontal, parent=None):
super(Slider, self).__init__(orientation, parent)
self.defaultValue = 0
self.valueLadderMode = False
def revertToDefault(self):
self.setValue(self.defaultValue)
def setDefaultValue(self, value, reset=True):
self.defaultValue = value
if reset:
self.revertToDefault()
def mousePressEvent(self, event):
if False: # Type hint
event = QMouseEvent
if event.button() == Qt.MiddleButton:
return
elif event.button() == Qt.LeftButton:
event = QMouseEvent(QEvent.MouseButtonPress, event.pos(),
Qt.MiddleButton, Qt.MiddleButton, Qt.NoModifier)
super(Slider, self).mousePressEvent(event)
def mouseMoveEvent(self, event):
if False: # Type hint
event = QMouseEvent
if not self.valueLadderMode and event.buttons() == Qt.MiddleButton:
try:
hou.ui.openValueLadder(self.value(), self.setValue, data_type=hou.valueLadderDataType.Int)
except hou.OperationFailed:
return
else:
self.valueLadderMode = True
elif self.valueLadderMode:
hou.ui.updateValueLadder(event.globalX(), event.globalY(),
bool(event.modifiers() & Qt.AltModifier),
bool(event.modifiers() & Qt.ShiftModifier))
else:
super(Slider, self).mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
if False: # Type hint
event = QMouseEvent
if self.valueLadderMode and event.button() == Qt.MiddleButton:
hou.ui.closeValueLadder()
self.valueLadderMode = False
elif isRevertToDefaultEvent(event):
self.revertToDefault()
else:
super(Slider, self).mouseReleaseEvent(event)
class SearchField(QComboBox):
def __init__(self, parent=None):
super(SearchField, self).__init__(parent)
self.setEditable(True)
edit = self.lineEdit()
edit.setPlaceholderText('Search...')
edit.installEventFilter(self)
edit.setFont(QFont('Segoe UI'))
self.setFixedHeight(26)
comp = self.completer()
comp.setCompletionMode(QCompleter.PopupCompletion)
comp.setFilterMode(Qt.MatchContains)
comp.setModelSorting(QCompleter.CaseInsensitivelySortedModel)
comp.setMaxVisibleItems(5)
popup = comp.popup()
popup.setStyleSheet(hou.qt.styleSheet())
def mouseReleaseEvent(self, event):
if False: # Type hint
event = QMouseEvent
if isRevertToDefaultEvent(event):
self.clearEditText()
def eventFilter(self, watched, event):
if False: # Type hint
watched = QObject
event = QEvent
if watched == self.lineEdit():
if event.type() == QEvent.MouseButtonRelease and isRevertToDefaultEvent(event):
self.clearEditText()
event.accept()
return True
return False
def keyPressEvent(self, event):
if False: # Type hint
event = QKeyEvent
key = event.key()
mod = event.modifiers()
if mod == Qt.NoModifier and key == Qt.Key_Escape:
self.clearEditText()
else:
super(SearchField, self).keyPressEvent(event)
def hidePopup(self):
super(SearchField, self).hidePopup()
self.lineEdit().setFocus()
link_or_state_icon = 'BUTTONS_link'
embedded_icon = 'BUTTONS_pinned'
class BrowserMode(QStandardItemModel):
def __init__(self):
super(BrowserMode, self).__init__()
class BrowserTreeView(QTreeView):
def __init__(self, parent=None):
super(BrowserTreeView, self).__init__(parent)
self.setAlternatingRowColors(True)
class BrowserTableView(QListView):
def __init__(self, parent=None):
super(BrowserTableView, self).__init__(parent)
self.setViewMode(QListView.IconMode)
self.setResizeMode(QListView.Adjust)
self.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.setVerticalScrollMode(QAbstractItemView.ScrollPerPixel)
self.setIconSize(QSize(120, 90))
self.setUniformItemSizes(True)
self.setContextMenuPolicy(Qt.CustomContextMenu)
class ContentBrowser(QWidget):
def __init__(self, parent=None):
super(ContentBrowser, self).__init__(parent)
self.setWindowTitle('Content Browser')
self.setProperty('houdiniStyle', True)
topLayout = QHBoxLayout()
topLayout.setContentsMargins(4, 4, 4, 2)
topLayout.setSpacing(2)
self.refreshButton = QPushButton()
self.refreshButton.setFixedSize(26, 26)
self.refreshButton.setToolTip('Update\tF5')
self.refreshButton.setIcon(hou.qt.Icon('BUTTONS_reload', 18, 18))
self.refreshButton.setIconSize(QSize(18, 18))
topLayout.addWidget(self.refreshButton)
sep = hou.qt.Separator()
if False: # Type hint
sep = QFrame
sep.setFixedWidth(2)
sep.setFrameShape(QFrame.VLine)
topLayout.addWidget(sep)
viewModeButtonGroup = QButtonGroup(self)
viewModeButtonGroup.setExclusive(True)
self.treeViewButton = QPushButton()
self.treeViewButton.setFixedSize(26, 26)
self.treeViewButton.setToolTip('Tree View\t\tCtrl+1')
self.treeViewButton.setIcon(hou.qt.Icon('BUTTONS_tree', 18, 18))
self.treeViewButton.setIconSize(QSize(18, 18))
self.treeViewButton.setCheckable(True)
viewModeButtonGroup.addButton(self.treeViewButton)
topLayout.addWidget(self.treeViewButton)
self.tableViewButton = QPushButton()
self.tableViewButton.setFixedSize(26, 26)
self.tableViewButton.setToolTip('Table View\tCtrl+2')
self.tableViewButton.setIcon(hou.qt.Icon('NETVIEW_shape_palette', 18, 18))
self.tableViewButton.setIconSize(QSize(18, 18))
self.tableViewButton.setCheckable(True)
self.tableViewButton.toggle()
viewModeButtonGroup.addButton(self.tableViewButton)
topLayout.addWidget(self.tableViewButton)
topLayout.addWidget(sep)
self.searchField = SearchField()
self.searchField.setToolTip('Search\tCtrl+F, F3')
topLayout.addWidget(self.searchField)
searchModeButtonGroup = QButtonGroup(self)
searchModeButtonGroup.setExclusive(True)
self.wholeSearchButton = QPushButton()
self.wholeSearchButton.setFixedSize(26, 26)
self.wholeSearchButton.setCheckable(True)
self.wholeSearchButton.setToolTip('Whole word search')
self.wholeSearchButton.setIcon(hou.qt.Icon('VOP_titlecase', 18, 18))
self.wholeSearchButton.setIconSize(QSize(18, 18))
searchModeButtonGroup.addButton(self.wholeSearchButton)
topLayout.addWidget(self.wholeSearchButton)
self.fuzzySearchButton = QPushButton()
self.fuzzySearchButton.setFixedSize(26, 26)
self.fuzzySearchButton.setCheckable(True)
self.fuzzySearchButton.toggle()
self.fuzzySearchButton.setToolTip('Fuzzy search')
self.fuzzySearchButton.setIcon(hou.qt.Icon('VOP_endswith', 18, 18))
self.fuzzySearchButton.setIconSize(QSize(18, 18))
searchModeButtonGroup.addButton(self.fuzzySearchButton)
topLayout.addWidget(self.fuzzySearchButton)
self.patternSearchButton = QPushButton()
self.patternSearchButton.setFixedSize(26, 26)
self.patternSearchButton.setCheckable(True)
self.patternSearchButton.setToolTip('Search by Pattern')
self.patternSearchButton.setIcon(hou.qt.Icon('VOP_isalpha', 18, 18))
self.patternSearchButton.setIconSize(QSize(18, 18))
searchModeButtonGroup.addButton(self.patternSearchButton)
topLayout.addWidget(self.patternSearchButton)
self.regexSearchButton = QPushButton()
self.regexSearchButton.setFixedSize(26, 26)
self.regexSearchButton.setCheckable(True)
self.regexSearchButton.setToolTip('Search by Regular Expression')
self.regexSearchButton.setIcon(hou.qt.Icon('VOP_regex_match', 18, 18))
self.regexSearchButton.setIconSize(QSize(18, 18))
searchModeButtonGroup.addButton(self.regexSearchButton)
topLayout.addWidget(self.regexSearchButton)
topLayout.addWidget(sep)
topLayout.addWidget(hou.qt.HelpButton('/hammer/content_browser', 'Show Help\tF1'))
middleLayout = QHBoxLayout()
middleLayout.setContentsMargins(4, 0, 0, 4)
middleLayout.setSpacing(4)
self.viewLayout = QStackedLayout(middleLayout)
model = QFileSystemModel()
model.setRootPath('C:/')
treeView = BrowserTreeView()
treeView.setModel(model)
treeView.setRootIndex(model.index('C:/'))
self.viewLayout.addWidget(treeView)
tableView = BrowserTableView()
tableView.setModel(model)
tableView.setRootIndex(model.index('C:/'))
tableView.setSelectionModel(treeView.selectionModel())
self.viewLayout.addWidget(tableView)
self.viewLayout.setCurrentIndex(1)
self.treeViewButton.clicked.connect(self.switchToTreeView)
self.addAction(createAction(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1'))
self.tableViewButton.clicked.connect(self.switchToTableView)
self.addAction(createAction(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2'))
bottomLayout = QHBoxLayout()
bottomLayout.setContentsMargins(4, 0, 4, 4)
bottomLayout.setSpacing(2)
settingsButton = QPushButton()
settingsButton.setFixedSize(26, 26)
settingsButton.setToolTip('Settings')
settingsButton.setIcon(hou.qt.Icon('BUTTONS_gear_mini', 18, 18))
settingsButton.setIconSize(QSize(18, 18))
bottomLayout.addWidget(settingsButton)
spacer = QSpacerItem(0, 0, QSizePolicy.Expanding, QSizePolicy.Ignored)
bottomLayout.addSpacerItem(spacer)
self.scaleSlider = Slider()
self.scaleSlider.setDefaultValue(50)
self.scaleSlider.setFixedWidth(120)
self.scaleSlider.valueChanged.connect(lambda v: tableView.setIconSize(QSize(120, 90) * v / 100))
bottomLayout.addWidget(self.scaleSlider)
mainLayout = QVBoxLayout(self)
mainLayout.setContentsMargins(0, 0, 0, 0)
mainLayout.setSpacing(4)
mainLayout.addLayout(topLayout)
mainLayout.addLayout(middleLayout)
mainLayout.addLayout(bottomLayout)
def switchToTreeView(self):
self.viewLayout.setCurrentIndex(0)
self.scaleSlider.hide()
self.treeViewButton.setChecked(True)
def switchToTableView(self):
self.viewLayout.setCurrentIndex(1)
self.scaleSlider.show()
self.tableViewButton.setChecked(True)
def keyPressEvent(self, event):
if False: # Type hint
event = QKeyEvent
key = event.key()
mod = event.modifiers()
if mod == Qt.NoModifier and key == Qt.Key_F5:
pass
elif mod == Qt.ControlModifier and key == Qt.Key_F:
self.searchField.setFocus()
elif mod == Qt.NoModifier and key == Qt.Key_F3:
self.searchField.setFocus()
elif mod == Qt.ControlModifier and key == Qt.Key_Equal:
pass
elif mod == Qt.ControlModifier and key == Qt.Key_Minus:
pass
elif mod == Qt.ControlModifier and key == Qt.Key_1:
pass
elif mod == Qt.ControlModifier and key == Qt.Key_2:
pass
elif mod == Qt.NoModifier and key == Qt.Key_F1:
pass
else:
super(ContentBrowser, self).keyPressEvent(event)
if __name__ == '__main__':
app = QApplication([])
window = ContentBrowser()
window.show()
app.exec_()
| [
"hammer_tools.utils.createAction",
"hou.ui.closeValueLadder",
"hou.qt.Separator",
"hou.qt.styleSheet",
"hou.qt.Icon",
"hou.qt.HelpButton"
] | [((5595, 5613), 'hou.qt.Separator', 'hou.qt.Separator', ([], {}), '()\n', (5611, 5613), False, 'import hou\n'), ((2230, 2255), 'hou.ui.closeValueLadder', 'hou.ui.closeValueLadder', ([], {}), '()\n', (2253, 2255), False, 'import hou\n'), ((3082, 3101), 'hou.qt.styleSheet', 'hou.qt.styleSheet', ([], {}), '()\n', (3099, 3101), False, 'import hou\n'), ((5439, 5476), 'hou.qt.Icon', 'hou.qt.Icon', (['"""BUTTONS_reload"""', '(18)', '(18)'], {}), "('BUTTONS_reload', 18, 18)\n", (5450, 5476), False, 'import hou\n'), ((6060, 6095), 'hou.qt.Icon', 'hou.qt.Icon', (['"""BUTTONS_tree"""', '(18)', '(18)'], {}), "('BUTTONS_tree', 18, 18)\n", (6071, 6095), False, 'import hou\n'), ((6502, 6546), 'hou.qt.Icon', 'hou.qt.Icon', (['"""NETVIEW_shape_palette"""', '(18)', '(18)'], {}), "('NETVIEW_shape_palette', 18, 18)\n", (6513, 6546), False, 'import hou\n'), ((7333, 7369), 'hou.qt.Icon', 'hou.qt.Icon', (['"""VOP_titlecase"""', '(18)', '(18)'], {}), "('VOP_titlecase', 18, 18)\n", (7344, 7369), False, 'import hou\n'), ((7832, 7867), 'hou.qt.Icon', 'hou.qt.Icon', (['"""VOP_endswith"""', '(18)', '(18)'], {}), "('VOP_endswith', 18, 18)\n", (7843, 7867), False, 'import hou\n'), ((8305, 8339), 'hou.qt.Icon', 'hou.qt.Icon', (['"""VOP_isalpha"""', '(18)', '(18)'], {}), "('VOP_isalpha', 18, 18)\n", (8316, 8339), False, 'import hou\n'), ((8784, 8822), 'hou.qt.Icon', 'hou.qt.Icon', (['"""VOP_regex_match"""', '(18)', '(18)'], {}), "('VOP_regex_match', 18, 18)\n", (8795, 8822), False, 'import hou\n'), ((9061, 9122), 'hou.qt.HelpButton', 'hou.qt.HelpButton', (['"""/hammer/content_browser"""', '"""Show Help\tF1"""'], {}), "('/hammer/content_browser', 'Show Help\\tF1')\n", (9078, 9122), False, 'import hou\n'), ((9905, 9978), 'hammer_tools.utils.createAction', 'createAction', (['self', '"""Tree View"""', 'self.switchToTreeView'], {'shortcut': '"""Ctrl+1"""'}), "(self, 'Tree View', self.switchToTreeView, shortcut='Ctrl+1')\n", (9917, 9978), False, 'from hammer_tools.utils import createAction\n'), ((10072, 10147), 'hammer_tools.utils.createAction', 'createAction', (['self', '"""Table View"""', 'self.switchToTableView'], {'shortcut': '"""Ctrl+2"""'}), "(self, 'Table View', self.switchToTableView, shortcut='Ctrl+2')\n", (10084, 10147), False, 'from hammer_tools.utils import createAction\n'), ((10435, 10475), 'hou.qt.Icon', 'hou.qt.Icon', (['"""BUTTONS_gear_mini"""', '(18)', '(18)'], {}), "('BUTTONS_gear_mini', 18, 18)\n", (10446, 10475), False, 'import hou\n')] |
#!/usr/bin/python3
"""
UDP sender
"""
import socket
import time
import sys
smsg = b'\xaa\x08\xfe\x00\xc9\xe6\x5f\xee'
def main():
ip_port = ('192.168.3.188', 8888)
if len(sys.argv) < 2:
port = 8888
else:
port = int(sys.argv[1])
# 1. 创建 udp 套接字
udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# 2. 绑定本地信息
udp_socket.bind(('', port))
cnt = 100
loop = 4
print("send %d...", cnt*loop)
# 3. 接收发送的数据
while cnt > 0:
#loop = 10
#while loop > 0:
for i in range(0, loop):
udp_socket.sendto(smsg, ip_port)
print('.', end=' ')
#loop = loop -1
#recv_data = udp_socket.recvfrom(1024)
#print(recv_data.decode('gbk'))
#print(recv_data.decode('utf-8'))
#print('.', end=' ')
#data = recv_data.decode('utf-8')
#print('0x%x'%data)
cnt = cnt - 1
time.sleep(0.005)
print("")
print("finished")
# 7. 关闭套接字
udp_socket.close()
print("close")
if __name__ == '__main__':
main()
| [
"time.sleep",
"socket.socket"
] | [((305, 353), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (318, 353), False, 'import socket\n'), ((948, 965), 'time.sleep', 'time.sleep', (['(0.005)'], {}), '(0.005)\n', (958, 965), False, 'import time\n')] |
"""project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from rest_framework import routers
from blog import views
from blog.views import PostViewSet,CommentViewSet,CategoryViewSet,TagViewSet,DraftViewSet,HideViewSet
from django.conf import settings
from django.conf.urls.static import static
router = routers.DefaultRouter()
router.register(r'hide',HideViewSet, base_name='hiddinn')
router.register(r'draft',DraftViewSet, base_name='draft')
router.register(r'post', PostViewSet, base_name='post')
router.register(r'comment', CommentViewSet, base_name='comment')
router.register(r'tags', TagViewSet, base_name='tags')
router.register(r'category', CategoryViewSet, base_name='category')
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
urlpatterns.extend(
static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) +
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
) | [
"django.conf.urls.static.static",
"django.conf.urls.include",
"django.urls.path",
"rest_framework.routers.DefaultRouter"
] | [((919, 942), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (940, 942), False, 'from rest_framework import routers\n'), ((1390, 1421), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (1394, 1421), False, 'from django.urls import path\n'), ((1437, 1457), 'django.conf.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (1444, 1457), False, 'from django.conf.urls import url, include\n'), ((1483, 1541), 'django.conf.urls.include', 'include', (['"""rest_framework.urls"""'], {'namespace': '"""rest_framework"""'}), "('rest_framework.urls', namespace='rest_framework')\n", (1490, 1541), False, 'from django.conf.urls import url, include\n'), ((1569, 1632), 'django.conf.urls.static.static', 'static', (['settings.STATIC_URL'], {'document_root': 'settings.STATIC_ROOT'}), '(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n', (1575, 1632), False, 'from django.conf.urls.static import static\n'), ((1639, 1700), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (1645, 1700), False, 'from django.conf.urls.static import static\n')] |
from dash import Dash, Input, Output, dcc, html
from dash.exceptions import PreventUpdate
def test_dddo001_dynamic_options(dash_dcc):
dropdown_options = [
{"label": "New York City", "value": "NYC"},
{"label": "Montreal", "value": "MTL"},
{"label": "San Francisco", "value": "SF"},
]
app = Dash(__name__)
app.layout = dcc.Dropdown(id="my-dynamic-dropdown", options=[])
@app.callback(
Output("my-dynamic-dropdown", "options"),
[Input("my-dynamic-dropdown", "search_value")],
)
def update_options(search_value):
if not search_value:
raise PreventUpdate
return [o for o in dropdown_options if search_value in o["label"]]
dash_dcc.start_server(app)
# Get the inner input used for search value.
input_ = dash_dcc.find_element("#my-dynamic-dropdown input")
# Focus on the input to open the options menu
input_.send_keys("x")
# No options to be found with `x` in them, should show the empty message.
dash_dcc.wait_for_text_to_equal(".Select-noresults", "No results found")
input_.clear()
input_.send_keys("o")
options = dash_dcc.find_elements("#my-dynamic-dropdown .VirtualizedSelectOption")
# Should show all options.
assert len(options) == 3
# Searching for `on`
input_.send_keys("n")
options = dash_dcc.find_elements("#my-dynamic-dropdown .VirtualizedSelectOption")
assert len(options) == 1
print(options)
assert options[0].text == "Montreal"
assert dash_dcc.get_logs() == []
def test_dddo002_array_comma_value(dash_dcc):
app = Dash(__name__)
dropdown = dcc.Dropdown(
options=["New York, NY", "Montreal, QC", "San Francisco, CA"],
value=["San Francisco, CA"],
multi=True,
)
app.layout = html.Div(dropdown)
dash_dcc.start_server(app)
dash_dcc.wait_for_text_to_equal("#react-select-2--value-0", "San Francisco, CA\n ")
assert dash_dcc.get_logs() == []
def test_dddo003_value_no_options(dash_dcc):
app = Dash(__name__)
app.layout = html.Div(
[
dcc.Dropdown(value="foobar", id="dropdown"),
]
)
dash_dcc.start_server(app)
assert dash_dcc.get_logs() == []
dash_dcc.wait_for_element("#dropdown")
| [
"dash.dcc.Dropdown",
"dash.html.Div",
"dash.Input",
"dash.Output",
"dash.Dash"
] | [((328, 342), 'dash.Dash', 'Dash', (['__name__'], {}), '(__name__)\n', (332, 342), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((360, 410), 'dash.dcc.Dropdown', 'dcc.Dropdown', ([], {'id': '"""my-dynamic-dropdown"""', 'options': '[]'}), "(id='my-dynamic-dropdown', options=[])\n", (372, 410), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((1616, 1630), 'dash.Dash', 'Dash', (['__name__'], {}), '(__name__)\n', (1620, 1630), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((1647, 1767), 'dash.dcc.Dropdown', 'dcc.Dropdown', ([], {'options': "['New York, NY', 'Montreal, QC', 'San Francisco, CA']", 'value': "['San Francisco, CA']", 'multi': '(True)'}), "(options=['New York, NY', 'Montreal, QC', 'San Francisco, CA'],\n value=['San Francisco, CA'], multi=True)\n", (1659, 1767), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((1812, 1830), 'dash.html.Div', 'html.Div', (['dropdown'], {}), '(dropdown)\n', (1820, 1830), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((2047, 2061), 'dash.Dash', 'Dash', (['__name__'], {}), '(__name__)\n', (2051, 2061), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((439, 479), 'dash.Output', 'Output', (['"""my-dynamic-dropdown"""', '"""options"""'], {}), "('my-dynamic-dropdown', 'options')\n", (445, 479), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((490, 534), 'dash.Input', 'Input', (['"""my-dynamic-dropdown"""', '"""search_value"""'], {}), "('my-dynamic-dropdown', 'search_value')\n", (495, 534), False, 'from dash import Dash, Input, Output, dcc, html\n'), ((2112, 2155), 'dash.dcc.Dropdown', 'dcc.Dropdown', ([], {'value': '"""foobar"""', 'id': '"""dropdown"""'}), "(value='foobar', id='dropdown')\n", (2124, 2155), False, 'from dash import Dash, Input, Output, dcc, html\n')] |
import json
import gzip
import requests
from datetime import datetime
import pendulum
import boto3
from botocore.exceptions import ClientError
from util.log import Log
from settings.aws_settings import AWSSettings
from settings.telegram_settings import TelegramSettings
def lambda_handler(event: dict, context: dict) -> dict:
log = Log.setup(name='logger')
aws_settings = AWSSettings()
telegram_settings = TelegramSettings()
timezone = pendulum.timezone('America/Sao_Paulo')
date = datetime.now(tz=timezone).strftime('%Y-%m-%d')
timestamp = datetime.now(tz=timezone).strftime('%Y%m%d%H%M%S')
try:
token = telegram_settings.access_token
base_url = f"https://api.telegram.org/bot{token}"
data = json.loads(event["body"])
chat_id = data["message"]["chat"]["id"]
if chat_id == telegram_settings.chat_id:
client = boto3.client('s3')
bucket = aws_settings.raw_bucket
root_path = aws_settings.root_path
try:
with open(f"{root_path}/{timestamp}.json", mode='w', encoding='utf8') as fp:
json.dump(data, fp)
client.upload_file(f"{root_path}/{timestamp}.json", bucket, f"{date}/{timestamp}.json")
except ClientError as exc:
raise exc
else:
text = "I can't talk to strangers, sorry mate!"
data = {"text": text, "chat_id": chat_id}
data = gzip.compress(json.dumps(data).encode('utf-8'))
headers = {'content-type': 'application/json', 'content-encoding': 'gzip'}
url = base_url + "/sendMessage"
requests.post(url=url, data=data, headers=headers)
except Exception as exc:
log.error(msg=exc)
finally:
return dict(statusCode="200")
| [
"json.loads",
"requests.post",
"boto3.client",
"pendulum.timezone",
"json.dumps",
"settings.aws_settings.AWSSettings",
"datetime.datetime.now",
"util.log.Log.setup",
"settings.telegram_settings.TelegramSettings",
"json.dump"
] | [((341, 365), 'util.log.Log.setup', 'Log.setup', ([], {'name': '"""logger"""'}), "(name='logger')\n", (350, 365), False, 'from util.log import Log\n'), ((385, 398), 'settings.aws_settings.AWSSettings', 'AWSSettings', ([], {}), '()\n', (396, 398), False, 'from settings.aws_settings import AWSSettings\n'), ((423, 441), 'settings.telegram_settings.TelegramSettings', 'TelegramSettings', ([], {}), '()\n', (439, 441), False, 'from settings.telegram_settings import TelegramSettings\n'), ((458, 496), 'pendulum.timezone', 'pendulum.timezone', (['"""America/Sao_Paulo"""'], {}), "('America/Sao_Paulo')\n", (475, 496), False, 'import pendulum\n'), ((753, 778), 'json.loads', 'json.loads', (["event['body']"], {}), "(event['body'])\n", (763, 778), False, 'import json\n'), ((508, 533), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone'}), '(tz=timezone)\n', (520, 533), False, 'from datetime import datetime\n'), ((571, 596), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone'}), '(tz=timezone)\n', (583, 596), False, 'from datetime import datetime\n'), ((899, 917), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (911, 917), False, 'import boto3\n'), ((1670, 1720), 'requests.post', 'requests.post', ([], {'url': 'url', 'data': 'data', 'headers': 'headers'}), '(url=url, data=data, headers=headers)\n', (1683, 1720), False, 'import requests\n'), ((1141, 1160), 'json.dump', 'json.dump', (['data', 'fp'], {}), '(data, fp)\n', (1150, 1160), False, 'import json\n'), ((1493, 1509), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1503, 1509), False, 'import json\n')] |
"""Collecting statistics of site visits."""
import collections
from datetime import datetime
from functools import reduce
from django.utils.translation import gettext_lazy as _
from hier.models import IPInfo, AccessLog, SiteStat
from v2_hier.utils import APPS
def get_site_stat(user):
"""Processing a new portion of log file records.
The site applications that users have visited and information about their IP addresses will be shown.
"""
TOTAL_IP = _('total different').capitalize() + ' IP'
TOTAL_LOG = _('total log records').capitalize()
NEW_LOG = _('new log records').capitalize()
cnt = collections.Counter()
cnt[TOTAL_IP] = len(IPInfo.objects.all())
cnt[TOTAL_LOG] = len(AccessLog.objects.all())
#Determining the last previously processed log file entry
last = datetime.min
site_stat = None
if SiteStat.objects.filter(user=user.id).exists():
site_stat = SiteStat.objects.filter(user = user.id).get()
if site_stat.record and site_stat.record.event:
last = site_stat.record.event
# New records
records = AccessLog.objects.filter(event__gt=last).order_by('-event')
cnt[NEW_LOG] += len(records)
# Save last processed log record
last_rec = None
if (len(records) > 0):
last_rec = records[0]
if site_stat:
site_stat.record = last_rec
site_stat.save()
else:
SiteStat.objects.create(user=user, record=last_rec)
#raise Exception(last_rec.event)
apps = {}
for rec in records:
uri = valid_uri(rec)
if not uri:
continue
# Determining the access to the site application
a_app = list(filter(lambda x: '/{}/'.format(x) in uri, APPS))
if not a_app:
continue
app = a_app[0]
if not app in apps:
apps[app] = {}
host = str(rec.host.info())
#raise Exception('aaa = ', aaa)
if not host in apps[app]:
apps[app][host] = []
page = '{} {}'.format(rec.method, uri)
if not page in apps[app][host]:
apps[app][host].append(page)
return cnt.most_common(), apps
def valid_uri(rec):
if (rec.status >= 400) or (rec.status == 301):
return None
if 'favicon.ico' in rec.uri or '/static/' in rec.uri or '/jsi18n/' in rec.uri or '/photo/get_mini/' in rec.uri:
return None
if ('/?' in rec.uri) and (rec.method != 'POST'):
uri = rec.uri.split('?')[0]
else:
uri = rec.uri
uri = uri.replace('/ru/', '/').replace('/en/', '/')
if (uri == '/'):
return None
return uri
| [
"hier.models.IPInfo.objects.all",
"hier.models.SiteStat.objects.create",
"hier.models.AccessLog.objects.all",
"django.utils.translation.gettext_lazy",
"collections.Counter",
"hier.models.SiteStat.objects.filter",
"hier.models.AccessLog.objects.filter"
] | [((622, 643), 'collections.Counter', 'collections.Counter', ([], {}), '()\n', (641, 643), False, 'import collections\n'), ((668, 688), 'hier.models.IPInfo.objects.all', 'IPInfo.objects.all', ([], {}), '()\n', (686, 688), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n'), ((715, 738), 'hier.models.AccessLog.objects.all', 'AccessLog.objects.all', ([], {}), '()\n', (736, 738), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n'), ((527, 549), 'django.utils.translation.gettext_lazy', '_', (['"""total log records"""'], {}), "('total log records')\n", (528, 549), True, 'from django.utils.translation import gettext_lazy as _\n'), ((577, 597), 'django.utils.translation.gettext_lazy', '_', (['"""new log records"""'], {}), "('new log records')\n", (578, 597), True, 'from django.utils.translation import gettext_lazy as _\n'), ((855, 892), 'hier.models.SiteStat.objects.filter', 'SiteStat.objects.filter', ([], {'user': 'user.id'}), '(user=user.id)\n', (878, 892), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n'), ((1100, 1140), 'hier.models.AccessLog.objects.filter', 'AccessLog.objects.filter', ([], {'event__gt': 'last'}), '(event__gt=last)\n', (1124, 1140), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n'), ((1425, 1476), 'hier.models.SiteStat.objects.create', 'SiteStat.objects.create', ([], {'user': 'user', 'record': 'last_rec'}), '(user=user, record=last_rec)\n', (1448, 1476), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n'), ((469, 489), 'django.utils.translation.gettext_lazy', '_', (['"""total different"""'], {}), "('total different')\n", (470, 489), True, 'from django.utils.translation import gettext_lazy as _\n'), ((923, 960), 'hier.models.SiteStat.objects.filter', 'SiteStat.objects.filter', ([], {'user': 'user.id'}), '(user=user.id)\n', (946, 960), False, 'from hier.models import IPInfo, AccessLog, SiteStat\n')] |
__author__ = 'Xsank'
import time
from thinkutils_plus.eventbus.eventbus import EventBus
from myevent import GreetEvent
from myevent import ByeEvent
from mylistener import MyListener
if __name__=="__main__":
eventbus=EventBus()
eventbus.register(MyListener())
ge=GreetEvent('world')
be=ByeEvent('world')
eventbus.async_post(be)
eventbus.post(ge)
time.sleep(0.1)
eventbus.unregister(MyListener())
eventbus.destroy() | [
"mylistener.MyListener",
"myevent.ByeEvent",
"time.sleep",
"thinkutils_plus.eventbus.eventbus.EventBus",
"myevent.GreetEvent"
] | [((223, 233), 'thinkutils_plus.eventbus.eventbus.EventBus', 'EventBus', ([], {}), '()\n', (231, 233), False, 'from thinkutils_plus.eventbus.eventbus import EventBus\n'), ((277, 296), 'myevent.GreetEvent', 'GreetEvent', (['"""world"""'], {}), "('world')\n", (287, 296), False, 'from myevent import GreetEvent\n'), ((304, 321), 'myevent.ByeEvent', 'ByeEvent', (['"""world"""'], {}), "('world')\n", (312, 321), False, 'from myevent import ByeEvent\n'), ((376, 391), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (386, 391), False, 'import time\n'), ((256, 268), 'mylistener.MyListener', 'MyListener', ([], {}), '()\n', (266, 268), False, 'from mylistener import MyListener\n'), ((416, 428), 'mylistener.MyListener', 'MyListener', ([], {}), '()\n', (426, 428), False, 'from mylistener import MyListener\n')] |
import json
import multiprocessing as mp
import re
from argparse import ArgumentParser
from enum import Enum, auto
import javalang
from functools import partial
PRED_TOKEN = 'PRED'
modifiers = ['public', 'private', 'protected', 'static']
class TargetType(Enum):
seq = auto()
tree = auto()
@staticmethod
def from_string(s):
try:
return TargetType[s]
except KeyError:
raise ValueError()
target_type = TargetType.seq
RE_WORDS = re.compile(r'''
# Find words in a string. Order matters!
[A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized word
[A-Z]?[a-z]+ | # Capitalized words / all lower case
[A-Z]+ | # All upper case
\d+ | # Numbers
_ |
\" |
.+
''', re.VERBOSE)
TREE_SPLIT = re.compile(r'([(),])')
def split_subtokens(str):
return [subtok for subtok in RE_WORDS.findall(str) if not subtok == '_']
def subtokenize(s):
failed = False
try:
tokens = list(javalang.tokenizer.tokenize(s))
except:
try:
tokens = list(javalang.tokenizer.tokenize(s + '()'))[:-2]
except:
try:
tokens = list(javalang.tokenizer.tokenize('(' + s + ')'))[1:-1]
except:
tokens = s.split()
failed = True
if failed:
return [' _ '.join(split_subtokens(i)) for i in tokens if not i in modifiers]
else:
return [' _ '.join(split_subtokens(i.value)) for i in tokens if not i.value in modifiers]
def subtokenize_tree(s):
return ' '.join([sub for sub in re.split(TREE_SPLIT, s) if len(sub) > 0])
def process_line(target_type, max_targets, max_nodes, line):
obj = json.loads(line)
left_context = obj['left_context']
right_context = obj['right_context']
target_seq = obj['target_seq']
num_targets = obj['num_targets']
num_nodes = obj['num_nodes']
if max_targets is not None and num_targets > max_targets:
return None, None
if max_nodes is not None and num_nodes > max_nodes:
return None, None
if target_type is TargetType.seq:
target_pred = ' '.join(subtokenize(target_seq)).lower()
elif target_type is TargetType.tree:
target_pred = subtokenize_tree(obj['linearized_tree'])
source = '{} {} {}'.format(' '.join(subtokenize(left_context)[-200:]).lower(), PRED_TOKEN, ' '.join(subtokenize(right_context)[:200]).lower())
return source, target_pred
def process_file(file_path, data_file_role, dataset_name, target_type, max_targets, max_nodes):
total_examples = 0
source_output_path = '{}.{}.{}.source.txt'.format(dataset_name, target_type, data_file_role)
target_output_path = '{}.{}.{}.target.txt'.format(dataset_name, target_type, data_file_role)
with open(source_output_path, 'w') as source_output_file:
with open(target_output_path, 'w') as target_output_file:
with open(file_path, 'r') as file:
subtokenize_line = partial(process_line, target_type, max_targets, max_nodes)
with mp.Pool(64) as pool:
if data_file_role in ['test', 'val']:
examples = [process_line(target_type, max_targets, max_nodes, line) for line in file]
else:
examples = pool.imap_unordered(subtokenize_line, file, chunksize=100)
#examples = [process_line(target_type, max_targets, max_nodes, line) for line in file]
for source_seq, target_seq in examples:
if source_seq is None or target_seq is None:
continue
source_output_file.write(source_seq + '\n')
target_output_file.write(target_seq + '\n')
total_examples += 1
#print(source_seq, target_seq)
print('File: ' + file_path)
print('Total examples: ' + str(total_examples))
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument("-trd", "--train_data", dest="train_data_path",
help="path to training data file", required=True)
parser.add_argument("-ted", "--test_data", dest="test_data_path",
help="path to test data file", required=True)
parser.add_argument("-vd", "--val_data", dest="val_data_path",
help="path to validation data file", required=True)
parser.add_argument("-o", "--output_name", dest="output_name",
help="output name - the base name for the created dataset", metavar="FILE", required=True,
default='data')
parser.add_argument("--target_type", dest="target_type", type=TargetType.from_string, choices=list(TargetType), required=True)
parser.add_argument("--max_targets", dest="max_targets", type=int, required=False, default=40)
parser.add_argument("--max_nodes", dest="max_nodes", type=int, required=False, default=None)
parser.add_argument('--local', action='store_true')
args = parser.parse_args()
train_data_path = args.train_data_path
test_data_path = args.test_data_path
val_data_path = args.val_data_path
for data_file_path, data_role in zip([train_data_path, test_data_path, val_data_path], ['train', 'test', 'val']):
process_file(file_path=data_file_path, data_file_role=data_role, dataset_name=args.output_name,
target_type=args.target_type, max_targets=args.max_targets, max_nodes=args.max_nodes)
| [
"javalang.tokenizer.tokenize",
"re.split",
"json.loads",
"enum.auto",
"argparse.ArgumentParser",
"re.compile",
"functools.partial",
"multiprocessing.Pool"
] | [((487, 778), 're.compile', 're.compile', (['"""\n # Find words in a string. Order matters!\n [A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized word\n [A-Z]?[a-z]+ | # Capitalized words / all lower case\n [A-Z]+ | # All upper case\n \\\\d+ | # Numbers\n _ |\n \\\\" |\n .+\n"""', 're.VERBOSE'], {}), '(\n """\n # Find words in a string. Order matters!\n [A-Z]+(?=[A-Z][a-z]) | # All upper case before a capitalized word\n [A-Z]?[a-z]+ | # Capitalized words / all lower case\n [A-Z]+ | # All upper case\n \\\\d+ | # Numbers\n _ |\n \\\\" |\n .+\n"""\n , re.VERBOSE)\n', (497, 778), False, 'import re\n'), ((782, 803), 're.compile', 're.compile', (['"""([(),])"""'], {}), "('([(),])')\n", (792, 803), False, 'import re\n'), ((274, 280), 'enum.auto', 'auto', ([], {}), '()\n', (278, 280), False, 'from enum import Enum, auto\n'), ((292, 298), 'enum.auto', 'auto', ([], {}), '()\n', (296, 298), False, 'from enum import Enum, auto\n'), ((1691, 1707), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1701, 1707), False, 'import json\n'), ((3999, 4015), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (4013, 4015), False, 'from argparse import ArgumentParser\n'), ((980, 1010), 'javalang.tokenizer.tokenize', 'javalang.tokenizer.tokenize', (['s'], {}), '(s)\n', (1007, 1010), False, 'import javalang\n'), ((1577, 1600), 're.split', 're.split', (['TREE_SPLIT', 's'], {}), '(TREE_SPLIT, s)\n', (1585, 1600), False, 'import re\n'), ((2975, 3033), 'functools.partial', 'partial', (['process_line', 'target_type', 'max_targets', 'max_nodes'], {}), '(process_line, target_type, max_targets, max_nodes)\n', (2982, 3033), False, 'from functools import partial\n'), ((3055, 3066), 'multiprocessing.Pool', 'mp.Pool', (['(64)'], {}), '(64)\n', (3062, 3066), True, 'import multiprocessing as mp\n'), ((1063, 1100), 'javalang.tokenizer.tokenize', 'javalang.tokenizer.tokenize', (["(s + '()')"], {}), "(s + '()')\n", (1090, 1100), False, 'import javalang\n'), ((1170, 1212), 'javalang.tokenizer.tokenize', 'javalang.tokenizer.tokenize', (["('(' + s + ')')"], {}), "('(' + s + ')')\n", (1197, 1212), False, 'import javalang\n')] |
from __future__ import print_function
import numpy as np
import os,sys,time
"""
Copied from orphics.mpi
"""
try:
disable_mpi_env = os.environ['DISABLE_MPI']
disable_mpi = True if disable_mpi_env.lower().strip() == "true" else False
except:
disable_mpi = False
"""
Use the below cleanup stuff only for intel-mpi!
If you use it on openmpi, you will have no traceback for errors
causing hours of endless confusion and frustration! - Sincerely, past frustrated Mat
"""
# From Sigurd's enlib.mpi:
# Uncaught exceptions don't cause mpi to abort. This can lead to thousands of
# wasted CPU hours
# def cleanup(type, value, traceback):
# sys.__excepthook__(type, value, traceback)
# MPI.COMM_WORLD.Abort(1)
# sys.excepthook = cleanup
class fakeMpiComm:
"""
A Simple Fake MPI implementation
"""
def __init__(self):
pass
def Get_rank(self):
return 0
def Get_size(self):
return 1
def Barrier(self):
pass
def Abort(self,dummy):
pass
try:
if disable_mpi: raise
from mpi4py import MPI
except:
if not(disable_mpi): print("WARNING: mpi4py could not be loaded. Falling back to fake MPI. This means that if you submitted multiple processes, they will all be assigned the same rank of 0, and they are potentially doing the same thing.")
class template:
pass
MPI = template()
MPI.COMM_WORLD = fakeMpiComm()
def mpi_distribute(num_tasks,avail_cores,allow_empty=False):
# copied to mapsims.convert_noise_templates
if not(allow_empty): assert avail_cores<=num_tasks
min_each, rem = divmod(num_tasks,avail_cores)
num_each = np.array([min_each]*avail_cores) # first distribute equally
if rem>0: num_each[-rem:] += 1 # add the remainder to the last set of cores (so that rank 0 never gets extra jobs)
task_range = list(range(num_tasks)) # the full range of tasks
cumul = np.cumsum(num_each).tolist() # the end indices for each task
task_dist = [task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)] # a list containing the tasks for each core
assert sum(num_each)==num_tasks
assert len(num_each)==avail_cores
assert len(task_dist)==avail_cores
return num_each,task_dist
def distribute(njobs,verbose=True,**kwargs):
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
numcores = comm.Get_size()
num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs)
if rank==0: print ("At most ", max(num_each) , " tasks...")
my_tasks = each_tasks[rank]
return comm,rank,my_tasks
| [
"numpy.array",
"numpy.cumsum"
] | [((1658, 1692), 'numpy.array', 'np.array', (['([min_each] * avail_cores)'], {}), '([min_each] * avail_cores)\n', (1666, 1692), True, 'import numpy as np\n'), ((1917, 1936), 'numpy.cumsum', 'np.cumsum', (['num_each'], {}), '(num_each)\n', (1926, 1936), True, 'import numpy as np\n')] |
import typing
from bot.constants import BOT_REPO_URL
from discord import Embed
from discord.ext import commands
from discord.ext.commands.cooldowns import BucketType
from . import _issues, _profile, _source
class Github(commands.Cog):
"""
Github Category cog, which contains commands related to github.
Commands:
├ profile Fetches a user's GitHub information.
├ issue Command to retrieve issue(s) from a GitHub repository.
└ source Displays information about the bot's source code.
"""
def __init__(self, bot: commands.Bot) -> None:
self.bot = bot
@commands.group(name="github", aliases=("gh",), invoke_without_command=True)
async def github_group(self, ctx: commands.Context) -> None:
"""Commands for Github."""
await ctx.send_help(ctx.command)
@github_group.command(name="profile")
@commands.cooldown(1, 10, BucketType.user)
async def profile(self, ctx: commands.Context, username: str) -> None:
"""
Fetches a user's GitHub information.
Username is optional and sends the help command if not specified.
"""
github_profile = _profile.GithubInfo(self.bot.http_session)
embed = await github_profile.get_github_info(username)
await ctx.send(embed=embed)
@github_group.command(name="issue", aliases=("pr",))
async def issue(
self,
ctx: commands.Context,
numbers: commands.Greedy[int],
repository: typing.Optional[str] = None,
) -> None:
"""Command to retrieve issue(s) from a GitHub repository."""
github_issue = _issues.Issues(self.bot.http_session)
if not numbers:
raise commands.MissingRequiredArgument(ctx.command.clean_params["numbers"])
if repository is None:
user = "gurkult"
else:
user, _, repository = repository.rpartition("/")
if user == "":
user = "gurkult"
embed = await github_issue.issue(ctx.message.channel, numbers, repository, user)
await ctx.send(embed=embed)
@github_group.command(name="source", aliases=("src", "inspect"))
async def source_command(
self, ctx: commands.Context, *, source_item: typing.Optional[str] = None
) -> None:
"""Displays information about the bot's source code."""
if source_item is None:
embed = Embed(title="Gurkbot's GitHub Repository")
embed.add_field(name="Repository", value=f"[Go to GitHub]({BOT_REPO_URL})")
embed.set_thumbnail(url=self.bot.user.avatar_url)
await ctx.send(embed=embed)
return
elif not ctx.bot.get_command(source_item):
raise commands.BadArgument(
f"Unable to convert `{source_item}` to valid command or Cog."
)
github_source = _source.Source(self.bot.http_session, self.bot.user.avatar_url)
embed = await github_source.inspect(cmd=ctx.bot.get_command(source_item))
await ctx.send(embed=embed)
def setup(bot: commands.Bot) -> None:
"""Load the Github cog."""
bot.add_cog(Github(bot))
| [
"discord.ext.commands.MissingRequiredArgument",
"discord.ext.commands.group",
"discord.ext.commands.cooldown",
"discord.ext.commands.BadArgument",
"discord.Embed"
] | [((633, 708), 'discord.ext.commands.group', 'commands.group', ([], {'name': '"""github"""', 'aliases': "('gh',)", 'invoke_without_command': '(True)'}), "(name='github', aliases=('gh',), invoke_without_command=True)\n", (647, 708), False, 'from discord.ext import commands\n'), ((898, 939), 'discord.ext.commands.cooldown', 'commands.cooldown', (['(1)', '(10)', 'BucketType.user'], {}), '(1, 10, BucketType.user)\n', (915, 939), False, 'from discord.ext import commands\n'), ((1727, 1796), 'discord.ext.commands.MissingRequiredArgument', 'commands.MissingRequiredArgument', (["ctx.command.clean_params['numbers']"], {}), "(ctx.command.clean_params['numbers'])\n", (1759, 1796), False, 'from discord.ext import commands\n'), ((2432, 2474), 'discord.Embed', 'Embed', ([], {'title': '"""Gurkbot\'s GitHub Repository"""'}), '(title="Gurkbot\'s GitHub Repository")\n', (2437, 2474), False, 'from discord import Embed\n'), ((2753, 2841), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['f"""Unable to convert `{source_item}` to valid command or Cog."""'], {}), "(\n f'Unable to convert `{source_item}` to valid command or Cog.')\n", (2773, 2841), False, 'from discord.ext import commands\n')] |
from typing import Any, Dict, List, Tuple
from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp
def bump_fitness(fitness: Tuple[str, str]) -> Tuple[str, str]:
if len(fitness) == 0:
major = 0
minor = 1
else:
major = int.from_bytes(bytes.fromhex(fitness[0]), 'big')
minor = int.from_bytes(bytes.fromhex(fitness[1]), 'big') + 1
return major.to_bytes(1, 'big').hex(), minor.to_bytes(8, 'big').hex()
def forge_int_fixed(value: int, length: int) -> bytes:
return value.to_bytes(length, 'big')
def forge_command(command: str) -> bytes:
if command == 'activate':
return b'\x00'
raise NotImplementedError(command)
def forge_fitness(fitness: List[str]) -> bytes:
return forge_array(b''.join(map(lambda x: forge_array(bytes.fromhex(x)), fitness)))
def forge_priority(priority: int) -> bytes:
return priority.to_bytes(2, 'big')
def forge_content(content: Dict[str, Any]) -> bytes:
res = b''
res += forge_command(content['command'])
res += forge_base58(content['hash'])
res += forge_fitness(content['fitness'])
res += bytes.fromhex(content['protocol_parameters'])
return res
def forge_protocol_data(protocol_data: Dict[str, Any]) -> bytes:
res = b''
if protocol_data.get('content'):
res += forge_content(protocol_data['content'])
else:
res += forge_priority(protocol_data['priority'])
res += bytes.fromhex(protocol_data['proof_of_work_nonce'])
if protocol_data.get('seed_nonce_hash'):
res += b'\xFF'
res += forge_base58(protocol_data['seed_nonce_hash'])
else:
res += b'\x00'
res += b'\xFF' if protocol_data['liquidity_baking_escape_vote'] else b'\x00'
return res
def forge_block_header(shell_header: Dict[str, Any]) -> bytes:
res = forge_int_fixed(shell_header['level'], 4)
res += forge_int_fixed(shell_header['proto'], 1)
res += forge_base58(shell_header['predecessor'])
res += forge_int_fixed(optimize_timestamp(shell_header['timestamp']), 8)
res += forge_int_fixed(shell_header['validation_pass'], 1)
res += forge_base58(shell_header['operations_hash'])
res += forge_fitness(shell_header['fitness'])
res += forge_base58(shell_header['context'])
res += bytes.fromhex(shell_header['protocol_data'])
return res
| [
"pytezos.michelson.forge.optimize_timestamp",
"pytezos.michelson.forge.forge_base58"
] | [((1052, 1081), 'pytezos.michelson.forge.forge_base58', 'forge_base58', (["content['hash']"], {}), "(content['hash'])\n", (1064, 1081), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n'), ((1970, 2011), 'pytezos.michelson.forge.forge_base58', 'forge_base58', (["shell_header['predecessor']"], {}), "(shell_header['predecessor'])\n", (1982, 2011), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n'), ((2163, 2208), 'pytezos.michelson.forge.forge_base58', 'forge_base58', (["shell_header['operations_hash']"], {}), "(shell_header['operations_hash'])\n", (2175, 2208), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n'), ((2270, 2307), 'pytezos.michelson.forge.forge_base58', 'forge_base58', (["shell_header['context']"], {}), "(shell_header['context'])\n", (2282, 2307), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n'), ((2039, 2084), 'pytezos.michelson.forge.optimize_timestamp', 'optimize_timestamp', (["shell_header['timestamp']"], {}), "(shell_header['timestamp'])\n", (2057, 2084), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n'), ((1601, 1647), 'pytezos.michelson.forge.forge_base58', 'forge_base58', (["protocol_data['seed_nonce_hash']"], {}), "(protocol_data['seed_nonce_hash'])\n", (1613, 1647), False, 'from pytezos.michelson.forge import forge_array, forge_base58, optimize_timestamp\n')] |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import math
import sys
import paddle.compat as cpt
from op_test import OpTest
class TestROIPoolOp(OpTest):
def set_data(self):
self.init_test_case()
self.make_rois()
self.calc_roi_pool()
self.inputs = {'X': self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)}
self.attrs = {
'spatial_scale': self.spatial_scale,
'pooled_height': self.pooled_height,
'pooled_width': self.pooled_width
}
self.outputs = {'Out': self.outs, 'Argmax': self.argmaxes}
def init_test_case(self):
self.batch_size = 3
self.channels = 3
self.height = 6
self.width = 4
# n, c, h, w
self.x_dim = (self.batch_size, self.channels, self.height, self.width)
self.spatial_scale = 1.0 / 4.0
self.pooled_height = 2
self.pooled_width = 2
self.x = np.random.random(self.x_dim).astype('float32')
def calc_roi_pool(self):
out_data = np.zeros((self.rois_num, self.channels, self.pooled_height,
self.pooled_width))
argmax_data = np.zeros((self.rois_num, self.channels,
self.pooled_height, self.pooled_width))
for i in range(self.rois_num):
roi = self.rois[i]
roi_batch_id = roi[0]
roi_start_w = int(cpt.round(roi[1] * self.spatial_scale))
roi_start_h = int(cpt.round(roi[2] * self.spatial_scale))
roi_end_w = int(cpt.round(roi[3] * self.spatial_scale))
roi_end_h = int(cpt.round(roi[4] * self.spatial_scale))
roi_height = int(max(roi_end_h - roi_start_h + 1, 1))
roi_width = int(max(roi_end_w - roi_start_w + 1, 1))
x_i = self.x[roi_batch_id]
bin_size_h = float(roi_height) / float(self.pooled_height)
bin_size_w = float(roi_width) / float(self.pooled_width)
for c in range(self.channels):
for ph in range(self.pooled_height):
for pw in range(self.pooled_width):
hstart = int(math.floor(ph * bin_size_h))
wstart = int(math.floor(pw * bin_size_w))
hend = int(math.ceil((ph + 1) * bin_size_h))
wend = int(math.ceil((pw + 1) * bin_size_w))
hstart = min(max(hstart + roi_start_h, 0), self.height)
hend = min(max(hend + roi_start_h, 0), self.height)
wstart = min(max(wstart + roi_start_w, 0), self.width)
wend = min(max(wend + roi_start_w, 0), self.width)
is_empty = (hend <= hstart) or (wend <= wstart)
if is_empty:
out_data[i, c, ph, pw] = 0
else:
out_data[i, c, ph, pw] = -sys.float_info.max
argmax_data[i, c, ph, pw] = -1
for h in range(hstart, hend):
for w in range(wstart, wend):
if x_i[c, h, w] > out_data[i, c, ph, pw]:
out_data[i, c, ph, pw] = x_i[c, h, w]
argmax_data[i, c, ph,
pw] = h * self.width + w
self.outs = out_data.astype('float32')
self.argmaxes = argmax_data.astype('int64')
def make_rois(self):
rois = []
self.rois_lod = [[]]
for bno in range(self.batch_size):
self.rois_lod[0].append(bno + 1)
for i in range(bno + 1):
x1 = np.random.random_integers(
0, self.width // self.spatial_scale - self.pooled_width)
y1 = np.random.random_integers(
0, self.height // self.spatial_scale - self.pooled_height)
x2 = np.random.random_integers(x1 + self.pooled_width,
self.width // self.spatial_scale)
y2 = np.random.random_integers(
y1 + self.pooled_height, self.height // self.spatial_scale)
roi = [bno, x1, y1, x2, y2]
rois.append(roi)
self.rois_num = len(rois)
self.rois = np.array(rois).astype("int64")
def setUp(self):
self.op_type = "roi_pool"
self.set_data()
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
if __name__ == '__main__':
unittest.main()
| [
"math.ceil",
"math.floor",
"numpy.random.random_integers",
"numpy.random.random",
"numpy.array",
"numpy.zeros",
"paddle.compat.round",
"unittest.main"
] | [((5305, 5320), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5318, 5320), False, 'import unittest\n'), ((1679, 1758), 'numpy.zeros', 'np.zeros', (['(self.rois_num, self.channels, self.pooled_height, self.pooled_width)'], {}), '((self.rois_num, self.channels, self.pooled_height, self.pooled_width))\n', (1687, 1758), True, 'import numpy as np\n'), ((1810, 1889), 'numpy.zeros', 'np.zeros', (['(self.rois_num, self.channels, self.pooled_height, self.pooled_width)'], {}), '((self.rois_num, self.channels, self.pooled_height, self.pooled_width))\n', (1818, 1889), True, 'import numpy as np\n'), ((1583, 1611), 'numpy.random.random', 'np.random.random', (['self.x_dim'], {}), '(self.x_dim)\n', (1599, 1611), True, 'import numpy as np\n'), ((2057, 2095), 'paddle.compat.round', 'cpt.round', (['(roi[1] * self.spatial_scale)'], {}), '(roi[1] * self.spatial_scale)\n', (2066, 2095), True, 'import paddle.compat as cpt\n'), ((2127, 2165), 'paddle.compat.round', 'cpt.round', (['(roi[2] * self.spatial_scale)'], {}), '(roi[2] * self.spatial_scale)\n', (2136, 2165), True, 'import paddle.compat as cpt\n'), ((2195, 2233), 'paddle.compat.round', 'cpt.round', (['(roi[3] * self.spatial_scale)'], {}), '(roi[3] * self.spatial_scale)\n', (2204, 2233), True, 'import paddle.compat as cpt\n'), ((2263, 2301), 'paddle.compat.round', 'cpt.round', (['(roi[4] * self.spatial_scale)'], {}), '(roi[4] * self.spatial_scale)\n', (2272, 2301), True, 'import paddle.compat as cpt\n'), ((4385, 4472), 'numpy.random.random_integers', 'np.random.random_integers', (['(0)', '(self.width // self.spatial_scale - self.pooled_width)'], {}), '(0, self.width // self.spatial_scale - self.\n pooled_width)\n', (4410, 4472), True, 'import numpy as np\n'), ((4510, 4599), 'numpy.random.random_integers', 'np.random.random_integers', (['(0)', '(self.height // self.spatial_scale - self.pooled_height)'], {}), '(0, self.height // self.spatial_scale - self.\n pooled_height)\n', (4535, 4599), True, 'import numpy as np\n'), ((4638, 4726), 'numpy.random.random_integers', 'np.random.random_integers', (['(x1 + self.pooled_width)', '(self.width // self.spatial_scale)'], {}), '(x1 + self.pooled_width, self.width // self.\n spatial_scale)\n', (4663, 4726), True, 'import numpy as np\n'), ((4790, 4880), 'numpy.random.random_integers', 'np.random.random_integers', (['(y1 + self.pooled_height)', '(self.height // self.spatial_scale)'], {}), '(y1 + self.pooled_height, self.height // self.\n spatial_scale)\n', (4815, 4880), True, 'import numpy as np\n'), ((5029, 5043), 'numpy.array', 'np.array', (['rois'], {}), '(rois)\n', (5037, 5043), True, 'import numpy as np\n'), ((2806, 2833), 'math.floor', 'math.floor', (['(ph * bin_size_h)'], {}), '(ph * bin_size_h)\n', (2816, 2833), False, 'import math\n'), ((2872, 2899), 'math.floor', 'math.floor', (['(pw * bin_size_w)'], {}), '(pw * bin_size_w)\n', (2882, 2899), False, 'import math\n'), ((2936, 2968), 'math.ceil', 'math.ceil', (['((ph + 1) * bin_size_h)'], {}), '((ph + 1) * bin_size_h)\n', (2945, 2968), False, 'import math\n'), ((3005, 3037), 'math.ceil', 'math.ceil', (['((pw + 1) * bin_size_w)'], {}), '((pw + 1) * bin_size_w)\n', (3014, 3037), False, 'import math\n')] |
import pytest
from apistrap.flask import FlaskApistrap
from apistrap.schemas import ErrorResponse
@pytest.fixture()
def app_with_raises(app):
oapi = FlaskApistrap()
@app.route("/", methods=["GET"])
def view():
"""
Something something.
:raises KeyError: KeyError description
"""
oapi.init_app(app)
@pytest.fixture()
def app_with_raises_and_handler(app):
oapi = FlaskApistrap()
oapi.add_error_handler(KeyError, 515, lambda e: ErrorResponse())
@app.route("/", methods=["GET"])
def view():
"""
Something something.
:raises KeyError: KeyError description
"""
oapi.init_app(app)
def test_error_descriptions_from_raises(app_with_raises, client):
response = client.get("/spec.json")
assert response.json["paths"]["/"]["get"]["responses"] == {
"500": {
"description": "KeyError description",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
}
def test_http_code_from_handler(app_with_raises_and_handler, client):
response = client.get("/spec.json")
assert response.json["paths"]["/"]["get"]["responses"] == {
"515": {
"description": "KeyError description",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ErrorResponse"
}
}
}
}
}
| [
"pytest.fixture",
"apistrap.schemas.ErrorResponse",
"apistrap.flask.FlaskApistrap"
] | [((102, 118), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (116, 118), False, 'import pytest\n'), ((354, 370), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (368, 370), False, 'import pytest\n'), ((156, 171), 'apistrap.flask.FlaskApistrap', 'FlaskApistrap', ([], {}), '()\n', (169, 171), False, 'from apistrap.flask import FlaskApistrap\n'), ((420, 435), 'apistrap.flask.FlaskApistrap', 'FlaskApistrap', ([], {}), '()\n', (433, 435), False, 'from apistrap.flask import FlaskApistrap\n'), ((488, 503), 'apistrap.schemas.ErrorResponse', 'ErrorResponse', ([], {}), '()\n', (501, 503), False, 'from apistrap.schemas import ErrorResponse\n')] |
#!/usr/bin/env python
# Copyright (c) 2013-2015, Rethink Robotics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the Rethink Robotics nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Tool to tuck/untuck Baxter's arms to/from the shipping pose
"""
import argparse
from copy import deepcopy
import rospy
from std_msgs.msg import (
Empty,
Bool,
)
import baxter_interface
from baxter_core_msgs.msg import (
CollisionAvoidanceState,
)
from baxter_interface import CHECK_VERSION
class Tuck(object):
def __init__(self, tuck_cmd):
self._done = False
self._limbs = ('left', 'right')
self._arms = {
'left': baxter_interface.Limb('left'),
'right': baxter_interface.Limb('right'),
}
self._tuck = tuck_cmd
self._tuck_rate = rospy.Rate(20.0) # Hz
self._tuck_threshold = 0.2 # radians
self._peak_angle = -1.6 # radians
self._arm_state = {
'tuck': {'left': 'none', 'right': 'none'},
'collide': {'left': False, 'right': False},
'flipped': {'left': False, 'right': False}
}
self._joint_moves = {
'tuck': {
'left': [-1.0, -2.07, 3.0, 2.55, 0.0, 0.01, 0.0],
'right': [1.0, -2.07, -3.0, 2.55, -0.0, 0.01, 0.0]
},
'untuck': {
'left': [-0.08, -1.0, -1.19, 1.94, 0.67, 1.03, -0.50],
'right': [0.08, -1.0, 1.19, 1.94, -0.67, 1.03, 0.50]
}
}
self._collide_lsub = rospy.Subscriber(
'robot/limb/left/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'left')
self._collide_rsub = rospy.Subscriber(
'robot/limb/right/collision_avoidance_state',
CollisionAvoidanceState,
self._update_collision, 'right')
self._disable_pub = {
'left': rospy.Publisher(
'robot/limb/left/suppress_collision_avoidance',
Empty, queue_size=10),
'right': rospy.Publisher(
'robot/limb/right/suppress_collision_avoidance',
Empty, queue_size=10)
}
self._rs = baxter_interface.RobotEnable(CHECK_VERSION)
self._enable_pub = rospy.Publisher('robot/set_super_enable',
Bool, queue_size=10)
def _update_collision(self, data, limb):
self._arm_state['collide'][limb] = len(data.collision_object) > 0
self._check_arm_state()
def _check_arm_state(self):
"""
Check for goals and behind collision field.
If s1 joint is over the peak, collision will need to be disabled
to get the arm around the head-arm collision force-field.
"""
diff_check = lambda a, b: abs(a - b) <= self._tuck_threshold
for limb in self._limbs:
angles = [self._arms[limb].joint_angle(joint)
for joint in self._arms[limb].joint_names()]
# Check if in a goal position
untuck_goal = map(diff_check, angles,
self._joint_moves['untuck'][limb])
tuck_goal = map(diff_check, angles[0:2],
self._joint_moves['tuck'][limb][0:2])
if all(untuck_goal):
self._arm_state['tuck'][limb] = 'untuck'
elif all(tuck_goal):
self._arm_state['tuck'][limb] = 'tuck'
else:
self._arm_state['tuck'][limb] = 'none'
# Check if shoulder is flipped over peak
self._arm_state['flipped'][limb] = (
self._arms[limb].joint_angle(limb + '_s1') <= self._peak_angle)
def _prepare_to_tuck(self):
# If arms are in "tucked" state, disable collision avoidance
# before enabling robot, to avoid arm jerking from "force-field".
head = baxter_interface.Head()
start_disabled = not self._rs.state().enabled
at_goal = lambda: (abs(head.pan()) <=
baxter_interface.settings.HEAD_PAN_ANGLE_TOLERANCE)
rospy.loginfo("Moving head to neutral position")
while not at_goal() and not rospy.is_shutdown():
if start_disabled:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
if not self._rs.state().enabled:
self._enable_pub.publish(True)
head.set_pan(0.0, 0.5, timeout=0)
self._tuck_rate.sleep()
if start_disabled:
while self._rs.state().enabled == True and not rospy.is_shutdown():
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
def _move_to(self, tuck, disabled):
if any(disabled.values()):
[pub.publish(Empty()) for pub in self._disable_pub.values()]
while (any(self._arm_state['tuck'][limb] != goal
for limb, goal in tuck.viewitems())
and not rospy.is_shutdown()):
if self._rs.state().enabled == False:
self._enable_pub.publish(True)
for limb in self._limbs:
if disabled[limb]:
self._disable_pub[limb].publish(Empty())
if limb in tuck:
self._arms[limb].set_joint_positions(dict(zip(
self._arms[limb].joint_names(),
self._joint_moves[tuck[limb]][limb])))
self._check_arm_state()
self._tuck_rate.sleep()
if any(self._arm_state['collide'].values()):
self._rs.disable()
return
def supervised_tuck(self):
# Update our starting state to check if arms are tucked
self._prepare_to_tuck()
self._check_arm_state()
# Tuck Arms
if self._tuck == True:
# If arms are already tucked, report this to user and exit.
if all(self._arm_state['tuck'][limb] == 'tuck'
for limb in self._limbs):
rospy.loginfo("Tucking: Arms already in 'Tucked' position.")
self._done = True
return
else:
rospy.loginfo("Tucking: One or more arms not Tucked.")
any_flipped = not all(self._arm_state['flipped'].values())
if any_flipped:
rospy.loginfo(
"Moving to neutral start position with collision %s.",
"on" if any_flipped else "off")
# Move to neutral pose before tucking arms to avoid damage
self._check_arm_state()
actions = dict()
disabled = {'left': True, 'right': True}
for limb in self._limbs:
if not self._arm_state['flipped'][limb]:
actions[limb] = 'untuck'
disabled[limb] = False
self._move_to(actions, disabled)
# Disable collision and Tuck Arms
rospy.loginfo("Tucking: Tucking with collision avoidance off.")
actions = {'left': 'tuck', 'right': 'tuck'}
disabled = {'left': True, 'right': True}
self._move_to(actions, disabled)
self._done = True
return
# Untuck Arms
else:
# If arms are tucked disable collision and untuck arms
if any(self._arm_state['flipped'].values()):
rospy.loginfo("Untucking: One or more arms Tucked;"
" Disabling Collision Avoidance and untucking.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
# If arms already untucked, move to neutral location
else:
rospy.loginfo("Untucking: Arms already Untucked;"
" Moving to neutral position.")
self._check_arm_state()
suppress = deepcopy(self._arm_state['flipped'])
actions = {'left': 'untuck', 'right': 'untuck'}
self._move_to(actions, suppress)
self._done = True
return
def clean_shutdown(self):
"""Handles ROS shutdown (Ctrl-C) safely."""
if not self._done:
rospy.logwarn('Aborting: Shutting down safely...')
if any(self._arm_state['collide'].values()):
while self._rs.state().enabled != False:
[pub.publish(Empty()) for pub in self._disable_pub.values()]
self._enable_pub.publish(False)
self._tuck_rate.sleep()
def main():
parser = argparse.ArgumentParser()
tuck_group = parser.add_mutually_exclusive_group(required=True)
tuck_group.add_argument("-t","--tuck", dest="tuck",
action='store_true', default=False, help="tuck arms")
tuck_group.add_argument("-u", "--untuck", dest="untuck",
action='store_true', default=False, help="untuck arms")
args = parser.parse_args(rospy.myargv()[1:])
tuck = args.tuck
rospy.loginfo("Initializing node... ")
rospy.init_node("rsdk_tuck_arms")
rospy.loginfo("%sucking arms" % ("T" if tuck else "Unt",))
tucker = Tuck(tuck)
rospy.on_shutdown(tucker.clean_shutdown)
tucker.supervised_tuck()
rospy.loginfo("Finished tuck")
if __name__ == "__main__":
main()
| [
"rospy.Publisher",
"baxter_interface.Head",
"baxter_interface.Limb",
"argparse.ArgumentParser",
"rospy.logwarn",
"rospy.is_shutdown",
"rospy.init_node",
"rospy.loginfo",
"rospy.myargv",
"rospy.Rate",
"copy.deepcopy",
"baxter_interface.RobotEnable",
"rospy.Subscriber",
"rospy.on_shutdown",
"std_msgs.msg.Empty"
] | [((10616, 10641), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (10639, 10641), False, 'import argparse\n'), ((11028, 11066), 'rospy.loginfo', 'rospy.loginfo', (['"""Initializing node... """'], {}), "('Initializing node... ')\n", (11041, 11066), False, 'import rospy\n'), ((11071, 11104), 'rospy.init_node', 'rospy.init_node', (['"""rsdk_tuck_arms"""'], {}), "('rsdk_tuck_arms')\n", (11086, 11104), False, 'import rospy\n'), ((11109, 11167), 'rospy.loginfo', 'rospy.loginfo', (["('%sucking arms' % ('T' if tuck else 'Unt',))"], {}), "('%sucking arms' % ('T' if tuck else 'Unt',))\n", (11122, 11167), False, 'import rospy\n'), ((11196, 11236), 'rospy.on_shutdown', 'rospy.on_shutdown', (['tucker.clean_shutdown'], {}), '(tucker.clean_shutdown)\n', (11213, 11236), False, 'import rospy\n'), ((11270, 11300), 'rospy.loginfo', 'rospy.loginfo', (['"""Finished tuck"""'], {}), "('Finished tuck')\n", (11283, 11300), False, 'import rospy\n'), ((2208, 2224), 'rospy.Rate', 'rospy.Rate', (['(20.0)'], {}), '(20.0)\n', (2218, 2224), False, 'import rospy\n'), ((3063, 3185), 'rospy.Subscriber', 'rospy.Subscriber', (['"""robot/limb/left/collision_avoidance_state"""', 'CollisionAvoidanceState', 'self._update_collision', '"""left"""'], {}), "('robot/limb/left/collision_avoidance_state',\n CollisionAvoidanceState, self._update_collision, 'left')\n", (3079, 3185), False, 'import rospy\n'), ((3299, 3423), 'rospy.Subscriber', 'rospy.Subscriber', (['"""robot/limb/right/collision_avoidance_state"""', 'CollisionAvoidanceState', 'self._update_collision', '"""right"""'], {}), "('robot/limb/right/collision_avoidance_state',\n CollisionAvoidanceState, self._update_collision, 'right')\n", (3315, 3423), False, 'import rospy\n'), ((3852, 3895), 'baxter_interface.RobotEnable', 'baxter_interface.RobotEnable', (['CHECK_VERSION'], {}), '(CHECK_VERSION)\n', (3880, 3895), False, 'import baxter_interface\n'), ((3923, 3985), 'rospy.Publisher', 'rospy.Publisher', (['"""robot/set_super_enable"""', 'Bool'], {'queue_size': '(10)'}), "('robot/set_super_enable', Bool, queue_size=10)\n", (3938, 3985), False, 'import rospy\n'), ((5560, 5583), 'baxter_interface.Head', 'baxter_interface.Head', ([], {}), '()\n', (5581, 5583), False, 'import baxter_interface\n'), ((5769, 5817), 'rospy.loginfo', 'rospy.loginfo', (['"""Moving head to neutral position"""'], {}), "('Moving head to neutral position')\n", (5782, 5817), False, 'import rospy\n'), ((2054, 2083), 'baxter_interface.Limb', 'baxter_interface.Limb', (['"""left"""'], {}), "('left')\n", (2075, 2083), False, 'import baxter_interface\n'), ((2106, 2136), 'baxter_interface.Limb', 'baxter_interface.Limb', (['"""right"""'], {}), "('right')\n", (2127, 2136), False, 'import baxter_interface\n'), ((3558, 3647), 'rospy.Publisher', 'rospy.Publisher', (['"""robot/limb/left/suppress_collision_avoidance"""', 'Empty'], {'queue_size': '(10)'}), "('robot/limb/left/suppress_collision_avoidance', Empty,\n queue_size=10)\n", (3573, 3647), False, 'import rospy\n'), ((3701, 3791), 'rospy.Publisher', 'rospy.Publisher', (['"""robot/limb/right/suppress_collision_avoidance"""', 'Empty'], {'queue_size': '(10)'}), "('robot/limb/right/suppress_collision_avoidance', Empty,\n queue_size=10)\n", (3716, 3791), False, 'import rospy\n'), ((10267, 10317), 'rospy.logwarn', 'rospy.logwarn', (['"""Aborting: Shutting down safely..."""'], {}), "('Aborting: Shutting down safely...')\n", (10280, 10317), False, 'import rospy\n'), ((10982, 10996), 'rospy.myargv', 'rospy.myargv', ([], {}), '()\n', (10994, 10996), False, 'import rospy\n'), ((5854, 5873), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (5871, 5873), False, 'import rospy\n'), ((6714, 6733), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (6731, 6733), False, 'import rospy\n'), ((7788, 7848), 'rospy.loginfo', 'rospy.loginfo', (['"""Tucking: Arms already in \'Tucked\' position."""'], {}), '("Tucking: Arms already in \'Tucked\' position.")\n', (7801, 7848), False, 'import rospy\n'), ((7940, 7994), 'rospy.loginfo', 'rospy.loginfo', (['"""Tucking: One or more arms not Tucked."""'], {}), "('Tucking: One or more arms not Tucked.')\n", (7953, 7994), False, 'import rospy\n'), ((8791, 8854), 'rospy.loginfo', 'rospy.loginfo', (['"""Tucking: Tucking with collision avoidance off."""'], {}), "('Tucking: Tucking with collision avoidance off.')\n", (8804, 8854), False, 'import rospy\n'), ((9255, 9362), 'rospy.loginfo', 'rospy.loginfo', (['"""Untucking: One or more arms Tucked; Disabling Collision Avoidance and untucking."""'], {}), "(\n 'Untucking: One or more arms Tucked; Disabling Collision Avoidance and untucking.'\n )\n", (9268, 9362), False, 'import rospy\n'), ((9453, 9489), 'copy.deepcopy', 'deepcopy', (["self._arm_state['flipped']"], {}), "(self._arm_state['flipped'])\n", (9461, 9489), False, 'from copy import deepcopy\n'), ((9759, 9837), 'rospy.loginfo', 'rospy.loginfo', (['"""Untucking: Arms already Untucked; Moving to neutral position."""'], {}), "('Untucking: Arms already Untucked; Moving to neutral position.')\n", (9772, 9837), False, 'import rospy\n'), ((9938, 9974), 'copy.deepcopy', 'deepcopy', (["self._arm_state['flipped']"], {}), "(self._arm_state['flipped'])\n", (9946, 9974), False, 'from copy import deepcopy\n'), ((6244, 6263), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (6261, 6263), False, 'import rospy\n'), ((6531, 6538), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (6536, 6538), False, 'from std_msgs.msg import Empty, Bool\n'), ((8122, 8226), 'rospy.loginfo', 'rospy.loginfo', (['"""Moving to neutral start position with collision %s."""', "('on' if any_flipped else 'off')"], {}), "('Moving to neutral start position with collision %s.', 'on' if\n any_flipped else 'off')\n", (8135, 8226), False, 'import rospy\n'), ((5935, 5942), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (5940, 5942), False, 'from std_msgs.msg import Empty, Bool\n'), ((6294, 6301), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (6299, 6301), False, 'from std_msgs.msg import Empty, Bool\n'), ((6957, 6964), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (6962, 6964), False, 'from std_msgs.msg import Empty, Bool\n'), ((10453, 10460), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (10458, 10460), False, 'from std_msgs.msg import Empty, Bool\n')] |
from unittest.mock import Mock
import pytest
from galaxy import model
from galaxy.tools.parameters import basic
from .util import BaseParameterTestCase
class SelectToolParameterTestCase(BaseParameterTestCase):
def test_validated_values(self):
self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>"""
with pytest.raises(ValueError) as exc_info:
self.param.from_json("42", self.trans, {"input_bam": model.HistoryDatasetAssociation()})
assert str(exc_info.value) == "parameter 'my_name': requires a value, but no legal values defined"
def test_validated_values_missing_dependency(self):
self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>"""
with pytest.raises(ValueError) as exc_info:
self.param.from_json("42", self.trans)
assert str(exc_info.value) == "parameter 'my_name': requires a value, but no legal values defined"
def test_unvalidated_values(self):
self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>"""
self.trans.workflow_building_mode = True
assert self.param.from_json("42", self.trans) == "42"
def test_validated_datasets(self):
self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>"""
with pytest.raises(ValueError) as exc_info:
self.param.from_json(model.HistoryDatasetAssociation(), self.trans, {"input_bam": None})
assert str(exc_info.value) == "parameter 'my_name': requires a value, but no legal values defined"
def test_unvalidated_datasets(self):
self.options_xml = """<options><filter type="data_meta" ref="input_bam" key="dbkey"/></options>"""
self.trans.workflow_building_mode = True
assert isinstance(
self.param.from_json(model.HistoryDatasetAssociation(), self.trans, {"input_bam": basic.RuntimeValue()}),
model.HistoryDatasetAssociation,
)
def test_filter_param_value(self):
self.options_xml = """<options from_data_table="test_table"><filter type="param_value" ref="input_bam" column="0" /></options>"""
assert ("testname1", "testpath1", False) in self.param.get_options(self.trans, {"input_bam": "testname1"})
assert ("testname2", "testpath2", False) in self.param.get_options(self.trans, {"input_bam": "testname2"})
assert len(self.param.get_options(self.trans, {"input_bam": "testname3"})) == 0
def test_filter_param_value2(self):
# Same test as above, but filtering on a different column.
self.options_xml = """<options from_data_table="test_table"><filter type="param_value" ref="input_bam" column="1" /></options>"""
assert ("testname1", "testpath1", False) in self.param.get_options(self.trans, {"input_bam": "testpath1"})
assert ("testname2", "testpath2", False) in self.param.get_options(self.trans, {"input_bam": "testpath2"})
assert len(self.param.get_options(self.trans, {"input_bam": "testpath3"})) == 0
# TODO: Good deal of overlap here with DataToolParameterTestCase,
# refactor.
def setUp(self):
super().setUp()
self.test_history = model.History()
self.app.model.context.add(self.test_history)
self.app.model.context.flush()
self.app.tool_data_tables["test_table"] = MockToolDataTable()
self.trans = Mock(
app=self.app,
get_history=lambda: self.test_history,
get_current_user_roles=lambda: [],
workflow_building_mode=False,
webapp=Mock(name="galaxy"),
)
self.type = "select"
self.set_data_ref = False
self.multiple = False
self.optional = False
self.options_xml = ""
self._param = None
@property
def param(self):
if not self._param:
multi_text = ""
if self.multiple:
multi_text = 'multiple="True"'
optional_text = ""
if self.optional:
optional_text = 'optional="True"'
options_text = self.options_xml
data_ref_text = ""
if self.set_data_ref:
data_ref_text = 'data_ref="input_bam"'
template_xml = """<param name="my_name" type="%s" %s %s %s>%s</param>"""
param_str = template_xml % (self.type, data_ref_text, multi_text, optional_text, options_text)
self._param = self._parameter_for(xml=param_str)
return self._param
class MockToolDataTable:
def __init__(self):
self.columns = dict(
name=0,
value=1,
)
self.missing_index_file = None
def get_fields(self):
return [["testname1", "testpath1"], ["testname2", "testpath2"]]
| [
"galaxy.model.HistoryDatasetAssociation",
"unittest.mock.Mock",
"galaxy.model.History",
"pytest.raises",
"galaxy.tools.parameters.basic.RuntimeValue"
] | [((3287, 3302), 'galaxy.model.History', 'model.History', ([], {}), '()\n', (3300, 3302), False, 'from galaxy import model\n'), ((371, 396), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (384, 396), False, 'import pytest\n'), ((799, 824), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (812, 824), False, 'import pytest\n'), ((1418, 1443), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1431, 1443), False, 'import pytest\n'), ((1490, 1523), 'galaxy.model.HistoryDatasetAssociation', 'model.HistoryDatasetAssociation', ([], {}), '()\n', (1521, 1523), False, 'from galaxy import model\n'), ((1927, 1960), 'galaxy.model.HistoryDatasetAssociation', 'model.HistoryDatasetAssociation', ([], {}), '()\n', (1958, 1960), False, 'from galaxy import model\n'), ((3678, 3697), 'unittest.mock.Mock', 'Mock', ([], {'name': '"""galaxy"""'}), "(name='galaxy')\n", (3682, 3697), False, 'from unittest.mock import Mock\n'), ((475, 508), 'galaxy.model.HistoryDatasetAssociation', 'model.HistoryDatasetAssociation', ([], {}), '()\n', (506, 508), False, 'from galaxy import model\n'), ((1988, 2008), 'galaxy.tools.parameters.basic.RuntimeValue', 'basic.RuntimeValue', ([], {}), '()\n', (2006, 2008), False, 'from galaxy.tools.parameters import basic\n')] |
import logging
import torch
import torch.nn as nn
from abc import ABC, abstractmethod
log = logging.getLogger(__name__)
class Detector(ABC):
def __init__(self, lr=0.001):
self.lr = lr
self.__model = None
self.__optimizer = None
self.__criterion = nn.CrossEntropyLoss()
@property
def model(self):
return self.__model
@property
def optimizer(self):
return self.__optimizer
@property
def criterion(self):
return self.__criterion
@abstractmethod
def init_model(self, char_vocab, hidden_size, n_domain_type, n_layers):
pass
@abstractmethod
def train_model(self, epoch, train_dataset):
pass
@abstractmethod
def predict(self, epoch, train_dataset):
pass
def load_model(self, file_path):
""" This function load already saved model and sets cuda parameters.
:param file_path: File path of a model to loaded.
:type file_path: string
"""
model = torch.load(file_path)
model.eval()
self.__model = model
self.__set_model2cuda()
self.__set_optimizer()
def save_model(self, file_path):
""" This function saves model to given location.
:param file_path: File path to save model.
:type file_path: string
"""
torch.save(self.model, file_path)
def __set_parallelism(self):
gpu_count = torch.cuda.device_count()
if gpu_count > 1:
log.info("%s GPUs!" % (gpu_count))
self.__model = nn.DataParallel(self.model)
self.__set_model2cuda()
else:
self.__set_model2cuda()
def __set_optimizer(self):
self.__optimizer = torch.optim.RMSprop(
self.model.parameters(), self.lr, weight_decay=0.0
)
def __set_model2cuda(self):
if torch.cuda.is_available():
log.info("Setting cuda")
self.model.cuda()
def leverage_model(self, model):
"""This function leverages model by setting parallelism parameters.
:param model: Model instance.
:type model: RNNClassifier
"""
self.__model = model
self.__set_parallelism()
self.__set_optimizer()
| [
"logging.getLogger",
"torch.nn.CrossEntropyLoss",
"torch.load",
"torch.cuda.device_count",
"torch.nn.DataParallel",
"torch.cuda.is_available",
"torch.save"
] | [((93, 120), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (110, 120), False, 'import logging\n'), ((286, 307), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (305, 307), True, 'import torch.nn as nn\n'), ((1022, 1043), 'torch.load', 'torch.load', (['file_path'], {}), '(file_path)\n', (1032, 1043), False, 'import torch\n'), ((1356, 1389), 'torch.save', 'torch.save', (['self.model', 'file_path'], {}), '(self.model, file_path)\n', (1366, 1389), False, 'import torch\n'), ((1444, 1469), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (1467, 1469), False, 'import torch\n'), ((1881, 1906), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1904, 1906), False, 'import torch\n'), ((1570, 1597), 'torch.nn.DataParallel', 'nn.DataParallel', (['self.model'], {}), '(self.model)\n', (1585, 1597), True, 'import torch.nn as nn\n')] |
# SPDX-License-Identifier: BSD-3-Clause
#
# Copyright (c) 2021 <NAME>. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sqlalchemy.exc
from selfdroid.appstorage.AppMetadata import AppMetadata
from selfdroid.appstorage.AppMetadataDBModel import AppMetadataDBModel
from selfdroid.appstorage.AppStorageConsistencyEnsurer import AppStorageConsistencyEnsurer
from selfdroid.appstorage.apk.APKParser import APKParser
from selfdroid.appstorage.apk.ParsedAPK import ParsedAPK
from selfdroid.appstorage.crud.AppAdderException import AppAdderException
from selfdroid.web.WebStatusMessageCollector import WebStatusMessageCollector
from selfdroid import db
class AppAdder:
"""
This class must be instantiated and have its public methods called in a locked context!
"""
def __init__(self, uploaded_apk_path: str):
self._uploaded_apk_path: str = uploaded_apk_path
self._parsed_apk: ParsedAPK = APKParser(self._uploaded_apk_path).parsed_apk
def add_app_while_locked(self) -> AppMetadata:
"""
:return: The metadata of the added app.
"""
try:
app_metadata = self._add_app_while_locked_with_exceptions_handled()
except (sqlalchemy.exc.SQLAlchemyError, OSError):
db.session.rollback()
raise AppAdderException("An error occurred while adding the app!")
finally:
AppStorageConsistencyEnsurer().ensure_consistency_while_locked()
return app_metadata
def _add_app_while_locked_with_exceptions_handled(self) -> AppMetadata:
self._check_if_app_can_be_added()
return self._perform_app_addition()
def _check_if_app_can_be_added(self) -> None:
an_app_with_the_same_package_name = AppMetadataDBModel.query.filter_by(package_name=self._parsed_apk.package_name).first()
if an_app_with_the_same_package_name is not None:
html_message = WebStatusMessageCollector.format_html_message("An app with the same package name <i>({})</i> is already present on the server! You should update the app instead of adding it!", self._parsed_apk.package_name)
raise AppAdderException(html_message)
def _perform_app_addition(self) -> AppMetadata:
# An UserReadableException mustn't be raised in this method!
# 1. Database
db_model = self._parsed_apk.create_new_db_model_with_metadata()
db.session.add(db_model)
db.session.commit()
assert isinstance(db_model.id, int)
app_metadata = AppMetadata.from_db_model(db_model)
# 2. APK
apk_path = app_metadata.get_apk_path()
os.rename(self._uploaded_apk_path, apk_path)
# 3. Icon
icon_path = app_metadata.get_icon_path()
with open(icon_path, "wb") as icon_file:
icon_file.write(self._parsed_apk.uniform_png_app_icon)
return app_metadata
| [
"selfdroid.appstorage.crud.AppAdderException.AppAdderException",
"selfdroid.db.session.rollback",
"selfdroid.appstorage.AppMetadataDBModel.AppMetadataDBModel.query.filter_by",
"selfdroid.appstorage.AppMetadata.AppMetadata.from_db_model",
"os.rename",
"selfdroid.db.session.commit",
"selfdroid.web.WebStatusMessageCollector.WebStatusMessageCollector.format_html_message",
"selfdroid.appstorage.apk.APKParser.APKParser",
"selfdroid.db.session.add",
"selfdroid.appstorage.AppStorageConsistencyEnsurer.AppStorageConsistencyEnsurer"
] | [((3856, 3880), 'selfdroid.db.session.add', 'db.session.add', (['db_model'], {}), '(db_model)\n', (3870, 3880), False, 'from selfdroid import db\n'), ((3889, 3908), 'selfdroid.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3906, 3908), False, 'from selfdroid import db\n'), ((3978, 4013), 'selfdroid.appstorage.AppMetadata.AppMetadata.from_db_model', 'AppMetadata.from_db_model', (['db_model'], {}), '(db_model)\n', (4003, 4013), False, 'from selfdroid.appstorage.AppMetadata import AppMetadata\n'), ((4088, 4132), 'os.rename', 'os.rename', (['self._uploaded_apk_path', 'apk_path'], {}), '(self._uploaded_apk_path, apk_path)\n', (4097, 4132), False, 'import os\n'), ((2380, 2414), 'selfdroid.appstorage.apk.APKParser.APKParser', 'APKParser', (['self._uploaded_apk_path'], {}), '(self._uploaded_apk_path)\n', (2389, 2414), False, 'from selfdroid.appstorage.apk.APKParser import APKParser\n'), ((3373, 3590), 'selfdroid.web.WebStatusMessageCollector.WebStatusMessageCollector.format_html_message', 'WebStatusMessageCollector.format_html_message', (['"""An app with the same package name <i>({})</i> is already present on the server! You should update the app instead of adding it!"""', 'self._parsed_apk.package_name'], {}), "(\n 'An app with the same package name <i>({})</i> is already present on the server! You should update the app instead of adding it!'\n , self._parsed_apk.package_name)\n", (3418, 3590), False, 'from selfdroid.web.WebStatusMessageCollector import WebStatusMessageCollector\n'), ((3599, 3630), 'selfdroid.appstorage.crud.AppAdderException.AppAdderException', 'AppAdderException', (['html_message'], {}), '(html_message)\n', (3616, 3630), False, 'from selfdroid.appstorage.crud.AppAdderException import AppAdderException\n'), ((2716, 2737), 'selfdroid.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (2735, 2737), False, 'from selfdroid import db\n'), ((2757, 2817), 'selfdroid.appstorage.crud.AppAdderException.AppAdderException', 'AppAdderException', (['"""An error occurred while adding the app!"""'], {}), "('An error occurred while adding the app!')\n", (2774, 2817), False, 'from selfdroid.appstorage.crud.AppAdderException import AppAdderException\n'), ((3201, 3279), 'selfdroid.appstorage.AppMetadataDBModel.AppMetadataDBModel.query.filter_by', 'AppMetadataDBModel.query.filter_by', ([], {'package_name': 'self._parsed_apk.package_name'}), '(package_name=self._parsed_apk.package_name)\n', (3235, 3279), False, 'from selfdroid.appstorage.AppMetadataDBModel import AppMetadataDBModel\n'), ((2848, 2878), 'selfdroid.appstorage.AppStorageConsistencyEnsurer.AppStorageConsistencyEnsurer', 'AppStorageConsistencyEnsurer', ([], {}), '()\n', (2876, 2878), False, 'from selfdroid.appstorage.AppStorageConsistencyEnsurer import AppStorageConsistencyEnsurer\n')] |
from flask import Blueprint
from flask_restful import Api
# from restful import Api
from resources.Hello import CategoryResource
api_bp = Blueprint('api', __name__)
api = Api(api_bp)
# Route
api.add_resource(CategoryResource, '/Hello') | [
"flask.Blueprint",
"flask_restful.Api"
] | [((140, 166), 'flask.Blueprint', 'Blueprint', (['"""api"""', '__name__'], {}), "('api', __name__)\n", (149, 166), False, 'from flask import Blueprint\n'), ((173, 184), 'flask_restful.Api', 'Api', (['api_bp'], {}), '(api_bp)\n', (176, 184), False, 'from flask_restful import Api\n')] |
#from gevent import monkey
#monkey.patch_all()
from flask import Flask, render_template, json
from flask_socketio import SocketIO, emit
from pydbus import SystemBus
from gi.repository import GLib
import threading
import json
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, async_mode='threading')
#socketio = SocketIO(app)
#Message: (':1.654', '/hfp/org/bluez/hci0/dev_94_65_2D_84_61_99', 'org.ofono.Modem', 'PropertyChanged', ('Powered', False))
#Data: Powered
bus = SystemBus()
def cb_server_signal_emission(*args):
print("Message: ", args)
makedev = lambda path : path.split('/')[-1]
iface = args[2]
if 'org.ofono.Modem' in iface:
if 'PropertyChanged' in args[3]:
message = { 'source': 'modem', 'event': 'property_change', 'device': makedev(args[1]), 'property': args[4][0], 'property_value': args[4][1] }
else:
message = {'unknown_signal': args }
elif 'org.ofono.NetworkRegistration' in iface:
if 'PropertyChanged' in args[3]:
message = { 'source': 'network', 'event': 'property_change', 'device': makedev(args[1]), 'property': args[4][0], 'property_value': args[4][1] }
else:
message = {'unknown_signal': args }
elif 'ofono.VoiceCallManager' in iface:
if 'CallAdded' in args[3]:
message = { 'source': 'callmgr', 'event': 'call_added', 'device': makedev(args[1]), 'properties': args[4][1] }
elif 'CallRemoved' in args[3]:
message = { 'source': 'callmgr', 'event': 'call_removed', 'device': makedev(args[1]) }
else:
message = {'unknown_signal': args }
elif 'ofono.VoiceCall' in iface:
if 'PropertyChanged' in args[3]:
message = { 'source': 'call', 'event': 'property_change', 'device': makedev(args[1]), 'property': args[4][0], 'property_value': args[4][1] }
else:
message = {'unknown_signal': args }
socketio.emit('message', json.dumps(message))
def dbus_monitor():
bus.subscribe(iface = 'org.ofono.Modem',
signal_fired = cb_server_signal_emission)
bus.subscribe(iface = 'org.ofono.NetworkRegistration',
signal_fired = cb_server_signal_emission)
print(bus)
bus.subscribe(iface = 'org.ofono.VoiceCallManager',
signal_fired = cb_server_signal_emission)
print(bus)
bus.subscribe(iface = 'org.ofono.VoiceCall',
signal_fired = cb_server_signal_emission)
loop = GLib.MainLoop()
loop.run()
@app.route('/')
def index():
return '''
<html>
<head>
<script type="text/javascript" src="//cdnjs.cloudflare.com/ajax/libs/socket.io/1.3.6/socket.io.min.js"></script>
<script type="text/javascript" charset="utf-8">
var socket = io.connect('http://' + document.domain + ':' + location.port);
socket.on('connect', function() {
socket.emit('connected', {data: 'Client connected!'});
});
socket.on('message', function(message) {
console.log('The server has a message for you: ' + message);
var t = document.getElementById("logbox");
t.value = t.value + 'MESSAGE: ' + message + '\\n';
});
</script>
</head>
<body>
<textarea id="logbox" width="100" rows="10"></textarea>
<br>
<button onclick="document.getElementById('logbox').value='';">Clear</button>
</body>
</html>
'''
@socketio.on('my event')
def handle_my_custom_event(arg1):
emit('message', {'data': 42})
if __name__ == '__main__':
t = threading.Thread(target=dbus_monitor)
t.daemon = True
t.start()
socketio.run(app, host='0.0.0.0', port=5001)
| [
"flask_socketio.emit",
"flask.Flask",
"json.dumps",
"gi.repository.GLib.MainLoop",
"pydbus.SystemBus",
"flask_socketio.SocketIO",
"threading.Thread"
] | [((235, 250), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (240, 250), False, 'from flask import Flask, render_template, json\n'), ((299, 336), 'flask_socketio.SocketIO', 'SocketIO', (['app'], {'async_mode': '"""threading"""'}), "(app, async_mode='threading')\n", (307, 336), False, 'from flask_socketio import SocketIO, emit\n'), ((512, 523), 'pydbus.SystemBus', 'SystemBus', ([], {}), '()\n', (521, 523), False, 'from pydbus import SystemBus\n'), ((2454, 2469), 'gi.repository.GLib.MainLoop', 'GLib.MainLoop', ([], {}), '()\n', (2467, 2469), False, 'from gi.repository import GLib\n'), ((3367, 3396), 'flask_socketio.emit', 'emit', (['"""message"""', "{'data': 42}"], {}), "('message', {'data': 42})\n", (3371, 3396), False, 'from flask_socketio import SocketIO, emit\n'), ((3431, 3468), 'threading.Thread', 'threading.Thread', ([], {'target': 'dbus_monitor'}), '(target=dbus_monitor)\n', (3447, 3468), False, 'import threading\n'), ((1939, 1958), 'json.dumps', 'json.dumps', (['message'], {}), '(message)\n', (1949, 1958), False, 'import json\n')] |
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import Any, Iterable, cast
from pants.core.util_rules.lockfile_metadata import (
LockfileMetadata,
LockfileMetadataValidation,
LockfileScope,
_get_metadata,
lockfile_metadata_registrar,
)
from pants.jvm.resolve.common import ArtifactRequirement
from pants.util.ordered_set import FrozenOrderedSet
_jvm_lockfile_metadata = lockfile_metadata_registrar(LockfileScope.JVM)
class InvalidJVMLockfileReason(Enum):
REQUIREMENTS_MISMATCH = "requirements_mismatch"
@dataclass(frozen=True)
class JVMLockfileMetadata(LockfileMetadata):
scope = LockfileScope.JVM
@staticmethod
def new(
requirements: Iterable[ArtifactRequirement],
) -> JVMLockfileMetadata:
"""Call the most recent version of the `LockfileMetadata` class to construct a concrete
instance.
This static method should be used in place of the `LockfileMetadata` constructor. This gives
calling sites a predictable method to call to construct a new `LockfileMetadata` for
writing, while still allowing us to support _reading_ older, deprecated metadata versions.
"""
return JVMLockfileMetadataV1.from_artifact_requirements(requirements)
@classmethod
def from_lockfile(
cls, lockfile: bytes, lockfile_path: str | None = None, resolve_name: str | None = None
) -> JVMLockfileMetadataV1:
return cast(
JVMLockfileMetadataV1,
LockfileMetadata.from_lockfile_for_scope(
LockfileScope.JVM, lockfile, lockfile_path, resolve_name
),
)
def is_valid_for(
self,
requirements: Iterable[ArtifactRequirement] | None,
) -> LockfileMetadataValidation:
"""Returns Truthy if this `JVMLockfileMetadata` can be used in the current execution
context."""
raise NotImplementedError("call `is_valid_for` on subclasses only")
@_jvm_lockfile_metadata(1)
@dataclass(frozen=True)
class JVMLockfileMetadataV1(JVMLockfileMetadata):
"""Lockfile version that permits specifying a requirements as a set rather than a digest.
Validity is tested by the set of requirements strings being the same in the user requirements as
those in the stored requirements.
"""
requirements: FrozenOrderedSet[str]
@classmethod
def from_artifact_requirements(
cls, requirements: Iterable[ArtifactRequirement]
) -> JVMLockfileMetadataV1:
return cls(FrozenOrderedSet(i.to_metadata_str() for i in requirements))
@classmethod
def _from_json_dict(
cls: type[JVMLockfileMetadataV1],
json_dict: dict[Any, Any],
lockfile_description: str,
error_suffix: str,
) -> JVMLockfileMetadataV1:
metadata = _get_metadata(json_dict, lockfile_description, error_suffix)
requirements = metadata(
"generated_with_requirements",
FrozenOrderedSet[str],
FrozenOrderedSet,
)
return JVMLockfileMetadataV1(requirements)
@classmethod
def additional_header_attrs(cls, instance: LockfileMetadata) -> dict[Any, Any]:
instance = cast(JVMLockfileMetadataV1, instance)
return {
"generated_with_requirements": (
sorted(instance.requirements) if instance.requirements is not None else None
)
}
def is_valid_for(
self,
requirements: Iterable[ArtifactRequirement] | None,
) -> LockfileMetadataValidation:
"""Returns a truthy object if the request requirements match the metadata requirements.
For this version, "match" is defined as the request requirements being a non-strict subset
of the metadata requirements.
"""
failure_reasons: set[InvalidJVMLockfileReason] = set()
if not self.requirements.issuperset(i.to_metadata_str() for i in requirements or []):
failure_reasons.add(InvalidJVMLockfileReason.REQUIREMENTS_MISMATCH)
return LockfileMetadataValidation(failure_reasons)
| [
"pants.core.util_rules.lockfile_metadata.LockfileMetadataValidation",
"dataclasses.dataclass",
"pants.core.util_rules.lockfile_metadata.lockfile_metadata_registrar",
"pants.core.util_rules.lockfile_metadata.LockfileMetadata.from_lockfile_for_scope",
"pants.core.util_rules.lockfile_metadata._get_metadata",
"typing.cast"
] | [((580, 626), 'pants.core.util_rules.lockfile_metadata.lockfile_metadata_registrar', 'lockfile_metadata_registrar', (['LockfileScope.JVM'], {}), '(LockfileScope.JVM)\n', (607, 626), False, 'from pants.core.util_rules.lockfile_metadata import LockfileMetadata, LockfileMetadataValidation, LockfileScope, _get_metadata, lockfile_metadata_registrar\n'), ((722, 744), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (731, 744), False, 'from dataclasses import dataclass\n'), ((2166, 2188), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (2175, 2188), False, 'from dataclasses import dataclass\n'), ((2978, 3038), 'pants.core.util_rules.lockfile_metadata._get_metadata', '_get_metadata', (['json_dict', 'lockfile_description', 'error_suffix'], {}), '(json_dict, lockfile_description, error_suffix)\n', (2991, 3038), False, 'from pants.core.util_rules.lockfile_metadata import LockfileMetadata, LockfileMetadataValidation, LockfileScope, _get_metadata, lockfile_metadata_registrar\n'), ((3364, 3401), 'typing.cast', 'cast', (['JVMLockfileMetadataV1', 'instance'], {}), '(JVMLockfileMetadataV1, instance)\n', (3368, 3401), False, 'from typing import Any, Iterable, cast\n'), ((4216, 4259), 'pants.core.util_rules.lockfile_metadata.LockfileMetadataValidation', 'LockfileMetadataValidation', (['failure_reasons'], {}), '(failure_reasons)\n', (4242, 4259), False, 'from pants.core.util_rules.lockfile_metadata import LockfileMetadata, LockfileMetadataValidation, LockfileScope, _get_metadata, lockfile_metadata_registrar\n'), ((1672, 1774), 'pants.core.util_rules.lockfile_metadata.LockfileMetadata.from_lockfile_for_scope', 'LockfileMetadata.from_lockfile_for_scope', (['LockfileScope.JVM', 'lockfile', 'lockfile_path', 'resolve_name'], {}), '(LockfileScope.JVM, lockfile,\n lockfile_path, resolve_name)\n', (1712, 1774), False, 'from pants.core.util_rules.lockfile_metadata import LockfileMetadata, LockfileMetadataValidation, LockfileScope, _get_metadata, lockfile_metadata_registrar\n')] |
#!/usr/bin/env python3
import os
import re
cur_path = os.path.dirname(os.path.realpath(__file__))
opendbc_root = os.path.join(cur_path, '../')
include_pattern = re.compile(r'CM_ "IMPORT (.*?)";')
def read_dbc(src_dir, filename):
with open(os.path.join(src_dir, filename)) as file_in:
return file_in.read()
def create_dbc(src_dir, filename, output_path):
dbc_file_in = read_dbc(src_dir, filename)
includes = include_pattern.findall(dbc_file_in)
output_filename = filename.replace('.dbc', '_generated.dbc')
output_file_location = os.path.join(output_path, output_filename)
with open(output_file_location, 'w') as dbc_file_out:
dbc_file_out.write('CM_ "AUTOGENERATED FILE, DO NOT EDIT";\n')
for include_filename in includes:
include_file_header = '\n\nCM_ "Imported file %s starts here";\n' % include_filename
dbc_file_out.write(include_file_header)
include_file = read_dbc(src_dir, include_filename)
dbc_file_out.write(include_file)
dbc_file_out.write('\nCM_ "%s starts here";\n' % filename)
core_dbc = include_pattern.sub('', dbc_file_in)
dbc_file_out.write(core_dbc)
def create_all(output_path):
for src_dir, _, filenames in os.walk(cur_path):
if src_dir == cur_path:
continue
#print(src_dir)
for filename in filenames:
if filename.startswith('_') or not filename.endswith('.dbc'):
continue
#print(filename)
create_dbc(src_dir, filename, output_path)
if __name__ == "__main__":
create_all(opendbc_root)
| [
"os.path.realpath",
"os.path.join",
"os.walk",
"re.compile"
] | [((114, 143), 'os.path.join', 'os.path.join', (['cur_path', '"""../"""'], {}), "(cur_path, '../')\n", (126, 143), False, 'import os\n'), ((162, 195), 're.compile', 're.compile', (['"""CM_ "IMPORT (.*?)";"""'], {}), '(\'CM_ "IMPORT (.*?)";\')\n', (172, 195), False, 'import re\n'), ((71, 97), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (87, 97), False, 'import os\n'), ((549, 591), 'os.path.join', 'os.path.join', (['output_path', 'output_filename'], {}), '(output_path, output_filename)\n', (561, 591), False, 'import os\n'), ((1201, 1218), 'os.walk', 'os.walk', (['cur_path'], {}), '(cur_path)\n', (1208, 1218), False, 'import os\n'), ((244, 275), 'os.path.join', 'os.path.join', (['src_dir', 'filename'], {}), '(src_dir, filename)\n', (256, 275), False, 'import os\n')] |
from django.contrib import admin
from django.utils.safestring import mark_safe
from customer.models import Owner, Dog, Breed, SubBreed
class OwnerAdmin(admin.ModelAdmin):
"""
Owner ModelAdmin.
"""
search_fields = ['name']
class BreedAdmin(admin.ModelAdmin):
"""
Breed ModelAdmin.
"""
search_fields = ['name']
class SubBreedAdmin(admin.ModelAdmin):
"""
SubBreed ModelAdmin.
"""
search_fields = ['name', 'breed__name']
autocomplete_fields = ['breed']
list_display = ['name', 'breed']
class DogAdmin(admin.ModelAdmin):
"""
Dog ModelAdmin.
"""
search_fields = ['name', 'owner__name']
autocomplete_fields = ['owner', 'breed', 'sub_breed']
list_display = ['name', 'owner', 'breed', 'sub_breed', 'img_photo']
def img_photo(self, obj):
"""
Render the dog's photo.
"""
return mark_safe('<img src="%s" width="70">' % obj.photo.url)
admin.site.register(Dog, DogAdmin)
admin.site.register(Owner, OwnerAdmin)
admin.site.register(Breed, BreedAdmin)
admin.site.register(SubBreed, SubBreedAdmin) | [
"django.contrib.admin.site.register",
"django.utils.safestring.mark_safe"
] | [((948, 982), 'django.contrib.admin.site.register', 'admin.site.register', (['Dog', 'DogAdmin'], {}), '(Dog, DogAdmin)\n', (967, 982), False, 'from django.contrib import admin\n'), ((983, 1021), 'django.contrib.admin.site.register', 'admin.site.register', (['Owner', 'OwnerAdmin'], {}), '(Owner, OwnerAdmin)\n', (1002, 1021), False, 'from django.contrib import admin\n'), ((1022, 1060), 'django.contrib.admin.site.register', 'admin.site.register', (['Breed', 'BreedAdmin'], {}), '(Breed, BreedAdmin)\n', (1041, 1060), False, 'from django.contrib import admin\n'), ((1061, 1105), 'django.contrib.admin.site.register', 'admin.site.register', (['SubBreed', 'SubBreedAdmin'], {}), '(SubBreed, SubBreedAdmin)\n', (1080, 1105), False, 'from django.contrib import admin\n'), ((891, 945), 'django.utils.safestring.mark_safe', 'mark_safe', (['(\'<img src="%s" width="70">\' % obj.photo.url)'], {}), '(\'<img src="%s" width="70">\' % obj.photo.url)\n', (900, 945), False, 'from django.utils.safestring import mark_safe\n')] |
from __future__ import absolute_import
from redis import Redis
from rq.decorators import job
from kaneda.utils import get_backend
backend = get_backend()
@job(queue='kaneda', connection=Redis())
def report(name, metric, value, tags, id_):
"""
RQ job to report metrics to the configured backend in kanedasettings.py
To run the worker execute this command:
rqworker [queue]
"""
return backend.report(name, metric, value, tags, id_)
| [
"kaneda.utils.get_backend",
"redis.Redis"
] | [((143, 156), 'kaneda.utils.get_backend', 'get_backend', ([], {}), '()\n', (154, 156), False, 'from kaneda.utils import get_backend\n'), ((191, 198), 'redis.Redis', 'Redis', ([], {}), '()\n', (196, 198), False, 'from redis import Redis\n')] |
from dotenv import load_dotenv
from PyPDF2 import PdfFileReader, PdfFileWriter
import os
import json
class CertRipper:
def __init__(
self,
start_page_index=0,
master_pdf_path=None,
json_points_path=None,
ripped_certs_path=None,
ripped_cert_file_name=None,
):
self.start_page_index = start_page_index
self.master_pdf_path = master_pdf_path
self.pdf = PdfFileReader(master_pdf_path)
self.pdf_length = self.pdf.getNumPages()
self.json_points_path = json_points_path
self.ripped_certs_path = ripped_certs_path
self.ripped_cert_file_name = ripped_cert_file_name
def process(self):
recipient_groups = self.get_recipient_groups_from_points()
self.extract_pdf_from_master(recipient_groups)
def extract_pdf_from_master(self, recipient_groups):
current_page_index = self.start_page_index
process_index = 0
for recipient_group in recipient_groups:
recipient_group_name = recipient_group["name"]
recipient_group_tag = recipient_group["tag"]
recipient_slugs = recipient_group["recipient_slugs"]
print(
f"[*] Ripping \x1b[93m{recipient_group_name}\x1b[0m group ...")
for recipient_slug in recipient_slugs:
page = self.pdf.getPage(current_page_index)
file_name = self.ripped_cert_file_name.format(
index=current_page_index + 1,
tag=recipient_group_tag,
recipient=recipient_slug
)
pdf_writer = PdfFileWriter()
pdf_writer.addPage(page)
output_file_name = f"{self.ripped_certs_path}\\{file_name}.pdf"
with open(output_file_name, "wb") as out:
pdf_writer.write(out)
print(
f"\x1b[95m[{process_index}]\x1b[0m Ripped \x1b[92m[{file_name}]\x1b[0m from \x1b[94mpage {current_page_index + 1}\x1b[0m of master")
current_page_index += 1
process_index += 1
def get_recipient_groups_from_points(self):
recipient_groups = []
total_recipients = 0
with open(self.json_points_path, "r") as json_file:
points = json.load(json_file)
for point in points:
point_name = point["name"]
point_tag = point["tag"]
point_recipients = point["recipients"]
point_recipient_slugs = []
for point_recipient in point_recipients:
recipient_name = point_recipient["name"]
recipient_name_slug = "_".join(recipient_name.split())
point_recipient_slugs.append(recipient_name_slug)
total_recipients += 1
recipient_groups.append({
"name": point_name,
"tag": point_tag,
"recipient_slugs": point_recipient_slugs
})
total_groups = len(recipient_groups)
self.__check_pdf_length(total_recipients)
print(
f"Read \x1b[95m{total_groups} groups(s)\x1b[0m and \x1b[95m{total_recipients} recipient(s)\x1b[0m from JSON points")
return recipient_groups
def __check_pdf_length(self, recipients_length):
pdf_length = self.pdf_length - (self.start_page_index)
if pdf_length != recipients_length:
raise ValueError(
f"Number of recipients ({recipients_length}) does not match with PDF length ({pdf_length})"
)
if __name__ == "__main__":
load_dotenv()
ripper = CertRipper(
start_page_index=os.getenv("START_PAGE_INDEX"),
master_pdf_path=os.getenv("MASTER_PDF_PATH"),
json_points_path=os.getenv("JSON_POINTS_PATH"),
ripped_certs_path=os.getenv("RIPPED_CERTS_PATH"),
ripped_cert_file_name=os.getenv("RIPPED_CERT_FILE_NAME"),
)
ripper.process()
| [
"os.getenv",
"PyPDF2.PdfFileWriter",
"dotenv.load_dotenv",
"json.load",
"PyPDF2.PdfFileReader"
] | [((3709, 3722), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (3720, 3722), False, 'from dotenv import load_dotenv\n'), ((432, 462), 'PyPDF2.PdfFileReader', 'PdfFileReader', (['master_pdf_path'], {}), '(master_pdf_path)\n', (445, 462), False, 'from PyPDF2 import PdfFileReader, PdfFileWriter\n'), ((2328, 2348), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (2337, 2348), False, 'import json\n'), ((3774, 3803), 'os.getenv', 'os.getenv', (['"""START_PAGE_INDEX"""'], {}), "('START_PAGE_INDEX')\n", (3783, 3803), False, 'import os\n'), ((3829, 3857), 'os.getenv', 'os.getenv', (['"""MASTER_PDF_PATH"""'], {}), "('MASTER_PDF_PATH')\n", (3838, 3857), False, 'import os\n'), ((3884, 3913), 'os.getenv', 'os.getenv', (['"""JSON_POINTS_PATH"""'], {}), "('JSON_POINTS_PATH')\n", (3893, 3913), False, 'import os\n'), ((3941, 3971), 'os.getenv', 'os.getenv', (['"""RIPPED_CERTS_PATH"""'], {}), "('RIPPED_CERTS_PATH')\n", (3950, 3971), False, 'import os\n'), ((4003, 4037), 'os.getenv', 'os.getenv', (['"""RIPPED_CERT_FILE_NAME"""'], {}), "('RIPPED_CERT_FILE_NAME')\n", (4012, 4037), False, 'import os\n'), ((1647, 1662), 'PyPDF2.PdfFileWriter', 'PdfFileWriter', ([], {}), '()\n', (1660, 1662), False, 'from PyPDF2 import PdfFileReader, PdfFileWriter\n')] |
#
# Licensed Materials - Property of IBM
#
# (c) Copyright IBM Corp. 2007-2008
#
import unittest, sys
import ibm_db
import config
from testfunctions import IbmDbTestFunctions
class IbmDbTestCase(unittest.TestCase):
def test_111_FieldNumAddCol(self):
obj = IbmDbTestFunctions()
obj.assert_expect(self.run_test_111)
def run_test_111(self):
conn = ibm_db.connect(config.database, config.user, config.password)
server = ibm_db.server_info( conn )
if conn:
ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
insert = "INSERT INTO animals values (7, 'cat', 'Benji', 5.1)"
ibm_db.exec_immediate(conn, insert)
stmt = ibm_db.exec_immediate(conn, "SELECT breed, COUNT(breed) AS number FROM animals GROUP BY breed ORDER BY breed")
if (server.DBMS_NAME[0:3] == 'IDS'):
num1 = ibm_db.field_num(stmt, "id")
num2 = ibm_db.field_num(stmt, "breed")
num3 = ibm_db.field_num(stmt, "number")
num4 = ibm_db.field_num(stmt, "NUMBER")
num5 = ibm_db.field_num(stmt, "bREED")
num6 = ibm_db.field_num(stmt, 8)
num7 = ibm_db.field_num(stmt, 1)
num8 = ibm_db.field_num(stmt, "WEIGHT")
else:
num1 = ibm_db.field_num(stmt, "ID")
num2 = ibm_db.field_num(stmt, "BREED")
num3 = ibm_db.field_num(stmt, "NUMBER")
num4 = ibm_db.field_num(stmt, "number")
num5 = ibm_db.field_num(stmt, "Breed")
num6 = ibm_db.field_num(stmt, 8)
num7 = ibm_db.field_num(stmt, 1)
num8 = ibm_db.field_num(stmt, "weight")
print("%s" % num1)
print("int(%d)" % num2)
print("int(%d)" % num3)
print("%s" % num4)
print("%s" % num5)
print("%s" % num6)
print("int(%d)" % num7)
print("%s" % num8)
ibm_db.rollback(conn)
else:
print("Connection failed.")
#__END__
#__LUW_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
#__ZOS_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
#__SYSTEMI_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
#__IDS_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
| [
"ibm_db.connect",
"ibm_db.autocommit",
"ibm_db.exec_immediate",
"ibm_db.field_num",
"ibm_db.server_info",
"testfunctions.IbmDbTestFunctions",
"ibm_db.rollback"
] | [((268, 288), 'testfunctions.IbmDbTestFunctions', 'IbmDbTestFunctions', ([], {}), '()\n', (286, 288), False, 'from testfunctions import IbmDbTestFunctions\n'), ((368, 429), 'ibm_db.connect', 'ibm_db.connect', (['config.database', 'config.user', 'config.password'], {}), '(config.database, config.user, config.password)\n', (382, 429), False, 'import ibm_db\n'), ((443, 467), 'ibm_db.server_info', 'ibm_db.server_info', (['conn'], {}), '(conn)\n', (461, 467), False, 'import ibm_db\n'), ((490, 540), 'ibm_db.autocommit', 'ibm_db.autocommit', (['conn', 'ibm_db.SQL_AUTOCOMMIT_OFF'], {}), '(conn, ibm_db.SQL_AUTOCOMMIT_OFF)\n', (507, 540), False, 'import ibm_db\n'), ((617, 652), 'ibm_db.exec_immediate', 'ibm_db.exec_immediate', (['conn', 'insert'], {}), '(conn, insert)\n', (638, 652), False, 'import ibm_db\n'), ((673, 792), 'ibm_db.exec_immediate', 'ibm_db.exec_immediate', (['conn', '"""SELECT breed, COUNT(breed) AS number FROM animals GROUP BY breed ORDER BY breed"""'], {}), "(conn,\n 'SELECT breed, COUNT(breed) AS number FROM animals GROUP BY breed ORDER BY breed'\n )\n", (694, 792), False, 'import ibm_db\n'), ((1804, 1825), 'ibm_db.rollback', 'ibm_db.rollback', (['conn'], {}), '(conn)\n', (1819, 1825), False, 'import ibm_db\n'), ((847, 875), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""id"""'], {}), "(stmt, 'id')\n", (863, 875), False, 'import ibm_db\n'), ((891, 922), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""breed"""'], {}), "(stmt, 'breed')\n", (907, 922), False, 'import ibm_db\n'), ((938, 970), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""number"""'], {}), "(stmt, 'number')\n", (954, 970), False, 'import ibm_db\n'), ((986, 1018), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""NUMBER"""'], {}), "(stmt, 'NUMBER')\n", (1002, 1018), False, 'import ibm_db\n'), ((1034, 1065), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""bREED"""'], {}), "(stmt, 'bREED')\n", (1050, 1065), False, 'import ibm_db\n'), ((1081, 1106), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '(8)'], {}), '(stmt, 8)\n', (1097, 1106), False, 'import ibm_db\n'), ((1122, 1147), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '(1)'], {}), '(stmt, 1)\n', (1138, 1147), False, 'import ibm_db\n'), ((1163, 1195), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""WEIGHT"""'], {}), "(stmt, 'WEIGHT')\n", (1179, 1195), False, 'import ibm_db\n'), ((1223, 1251), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""ID"""'], {}), "(stmt, 'ID')\n", (1239, 1251), False, 'import ibm_db\n'), ((1267, 1298), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""BREED"""'], {}), "(stmt, 'BREED')\n", (1283, 1298), False, 'import ibm_db\n'), ((1314, 1346), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""NUMBER"""'], {}), "(stmt, 'NUMBER')\n", (1330, 1346), False, 'import ibm_db\n'), ((1362, 1394), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""number"""'], {}), "(stmt, 'number')\n", (1378, 1394), False, 'import ibm_db\n'), ((1410, 1441), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""Breed"""'], {}), "(stmt, 'Breed')\n", (1426, 1441), False, 'import ibm_db\n'), ((1457, 1482), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '(8)'], {}), '(stmt, 8)\n', (1473, 1482), False, 'import ibm_db\n'), ((1498, 1523), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '(1)'], {}), '(stmt, 1)\n', (1514, 1523), False, 'import ibm_db\n'), ((1539, 1571), 'ibm_db.field_num', 'ibm_db.field_num', (['stmt', '"""weight"""'], {}), "(stmt, 'weight')\n", (1555, 1571), False, 'import ibm_db\n')] |
"""Code for checking and inferring types."""
import collections
import logging
import re
import subprocess
from typing import Any, Dict, Union
from pytype import abstract
from pytype import abstract_utils
from pytype import convert_structural
from pytype import debug
from pytype import function
from pytype import metrics
from pytype import output
from pytype import special_builtins
from pytype import state as frame_state
from pytype import vm
from pytype.overlays import typing_overlay
from pytype.pytd import builtins
from pytype.pytd import escape
from pytype.pytd import optimize
from pytype.pytd import pytd
from pytype.pytd import pytd_utils
from pytype.pytd import visitors
from pytype.typegraph import cfg
log = logging.getLogger(__name__)
# Most interpreter functions (including lambdas) need to be analyzed as
# stand-alone functions. The exceptions are comprehensions and generators, which
# have names like "<listcomp>" and "<genexpr>".
_SKIP_FUNCTION_RE = re.compile("<(?!lambda).+>$")
CallRecord = collections.namedtuple(
"CallRecord", ["node", "function", "signatures", "positional_arguments",
"keyword_arguments", "return_value"])
# How deep to follow call chains:
INIT_MAXIMUM_DEPTH = 4 # during module loading
MAXIMUM_DEPTH = 3 # during non-quick analysis
QUICK_CHECK_MAXIMUM_DEPTH = 2 # during quick checking
QUICK_INFER_MAXIMUM_DEPTH = 1 # during quick inference
class _Initializing:
pass
class CallTracer(vm.VirtualMachine):
"""Virtual machine that records all function calls.
Attributes:
exitpoint: A CFG node representing the program exit. Needs to be set before
analyze_types.
"""
_CONSTRUCTORS = ("__new__", "__init__")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._unknowns = {}
self._calls = set()
self._method_calls = set()
# Used by init_class.
self._instance_cache: Dict[Any, Union[_Initializing, cfg.Variable]] = {}
# Used by call_init. Can differ from _instance_cache because we also call
# __init__ on classes not initialized via init_class.
self._initialized_instances = set()
self._interpreter_functions = []
self._interpreter_classes = []
self._analyzed_functions = set()
self._analyzed_classes = set()
self._generated_classes = {}
self.exitpoint = None
def create_varargs(self, node):
value = abstract.Instance(self.convert.tuple_type, self)
value.merge_instance_type_parameter(
node, abstract_utils.T, self.convert.create_new_unknown(node))
return value.to_variable(node)
def create_kwargs(self, node):
key_type = self.convert.primitive_class_instances[str].to_variable(node)
value_type = self.convert.create_new_unknown(node)
kwargs = abstract.Instance(self.convert.dict_type, self)
kwargs.merge_instance_type_parameter(node, abstract_utils.K, key_type)
kwargs.merge_instance_type_parameter(node, abstract_utils.V, value_type)
return kwargs.to_variable(node)
def create_method_arguments(self, node, method, use_defaults=False):
"""Create arguments for the given method.
Creates Unknown objects as arguments for the given method. Note that we
don't need to take parameter annotations into account as
InterpreterFunction.call() will take care of that.
Args:
node: The current node.
method: An abstract.InterpreterFunction.
use_defaults: Whether to use parameter defaults for arguments. When True,
unknown arguments are created with force=False, as it is fine to use
Unsolvable rather than Unknown objects for type-checking defaults.
Returns:
A tuple of a node and a function.Args object.
"""
args = []
num_posargs = method.argcount(node)
num_posargs_no_default = num_posargs - len(method.defaults)
for i in range(num_posargs):
default_idx = i - num_posargs_no_default
if use_defaults and default_idx >= 0:
arg = method.defaults[default_idx]
else:
arg = self.convert.create_new_unknown(node, force=not use_defaults)
args.append(arg)
kws = {}
for key in method.signature.kwonly_params:
if use_defaults and key in method.kw_defaults:
kws[key] = method.kw_defaults[key]
else:
kws[key] = self.convert.create_new_unknown(node, force=not use_defaults)
starargs = self.create_varargs(node) if method.has_varargs() else None
starstarargs = self.create_kwargs(node) if method.has_kwargs() else None
return node, function.Args(posargs=tuple(args),
namedargs=kws,
starargs=starargs,
starstarargs=starstarargs)
def call_function_with_args(self, node, val, args):
"""Call a function.
Args:
node: The given node.
val: A cfg.Binding containing the function.
args: A function.Args object.
Returns:
A tuple of (1) a node and (2) a cfg.Variable of the return value.
"""
fvar = val.AssignToNewVariable(node)
with val.data.record_calls():
new_node, ret = self.call_function_in_frame(node, fvar, *args)
return new_node, ret
def call_function_in_frame(self, node, var, args, kwargs,
starargs, starstarargs):
frame = frame_state.SimpleFrame(node=node)
self.push_frame(frame)
log.info("Analyzing %r", [v.name for v in var.data])
state = frame_state.FrameState.init(node, self)
state, ret = self.call_function_with_state(
state, var, args, kwargs, starargs, starstarargs)
self.pop_frame(frame)
return state.node, ret
def _maybe_fix_classmethod_cls_arg(self, node, cls, func, args):
sig = func.signature
if (args.posargs and sig.param_names and
(sig.param_names[0] not in sig.annotations)):
# fix "cls" parameter
return args._replace(
posargs=(cls.AssignToNewVariable(node),) + args.posargs[1:])
else:
return args
def maybe_analyze_method(self, node, val, cls=None):
method = val.data
fname = val.data.name
if isinstance(method, abstract.INTERPRETER_FUNCTION_TYPES):
self._analyzed_functions.add(method.get_first_opcode())
if (not self.options.analyze_annotated and
(method.signature.has_return_annotation or method.has_overloads) and
fname.rsplit(".", 1)[-1] not in self._CONSTRUCTORS):
log.info("%r has annotations, not analyzing further.", fname)
else:
for f in method.iter_signature_functions():
node, args = self.create_method_arguments(node, f)
if f.is_classmethod and cls:
args = self._maybe_fix_classmethod_cls_arg(node, cls, f, args)
node, _ = self.call_function_with_args(node, val, args)
return node
def _call_with_fake_args(self, node0, funcv):
"""Attempt to call the given function with made-up arguments."""
# TODO(tsudol): If expand this beyond __init__, need to handle
# DictKeyMissing
nodes = []
rets = []
for funcb in funcv.bindings:
func = funcb.data
log.info("Trying %s with fake arguments", func)
if isinstance(func, abstract.INTERPRETER_FUNCTION_TYPES):
node1, args = self.create_method_arguments(node0, func)
# Once the args are generated, try calling the function.
# call_function will check fallback_to_unsolvable if a DictKeyMissing or
# FailedFunctionCall error is raised when the target function is called.
# DictKeyMissing doesn't trigger call_with_fake_args, so that shouldn't
# be raised again, and generating fake arguments should avoid any
# FailedFunctionCall errors. To prevent an infinite recursion loop, set
# fallback_to_unsolvable to False just in case.
# This means any additional errors that may be raised will be passed to
# the call_function that called this method in the first place.
node2, ret = self.call_function(node1,
funcb.AssignToNewVariable(),
args,
fallback_to_unsolvable=False)
nodes.append(node2)
rets.append(ret)
if nodes:
ret = self.join_variables(node0, rets)
node = self.join_cfg_nodes(nodes)
if ret.bindings:
return node, ret
else:
node = node0
log.info("Unable to generate fake arguments for %s", funcv)
return node, self.new_unsolvable(node)
def analyze_method_var(self, node0, name, var, cls=None):
log.info("Analyzing %s", name)
node1 = node0.ConnectNew(name)
for val in var.bindings:
node2 = self.maybe_analyze_method(node1, val, cls)
node2.ConnectTo(node0)
return node0
def bind_method(self, node, name, methodvar, instance_var):
bound = self.program.NewVariable()
for m in methodvar.Data(node):
if isinstance(m, special_builtins.ClassMethodInstance):
m = m.func.data[0]
is_cls = True
else:
is_cls = (m.isinstance_InterpreterFunction() and m.is_classmethod)
bound.AddBinding(m.property_get(instance_var, is_cls), [], node)
return bound
def _instantiate_binding(self, node0, cls, container):
"""Instantiate a class binding."""
node1, new = cls.data.get_own_new(node0, cls)
if not new or (
any(not isinstance(f, abstract.InterpreterFunction) for f in new.data)):
# This assumes that any inherited __new__ method defined in a pyi file
# returns an instance of the current class.
return node0, cls.data.instantiate(node0, container=container)
instance = self.program.NewVariable()
nodes = []
for b in new.bindings:
self._analyzed_functions.add(b.data.get_first_opcode())
node2, args = self.create_method_arguments(node1, b.data)
args = self._maybe_fix_classmethod_cls_arg(node0, cls, b.data, args)
node3 = node2.ConnectNew()
node4, ret = self.call_function_with_args(node3, b, args)
instance.PasteVariable(ret)
nodes.append(node4)
return self.join_cfg_nodes(nodes), instance
def _instantiate_var(self, node, clsv, container):
"""Build an (dummy) instance from a class, for analyzing it."""
n = self.program.NewVariable()
for cls in clsv.Bindings(node, strict=False):
node, var = self._instantiate_binding(node, cls, container)
n.PasteVariable(var)
return node, n
def _mark_maybe_missing_members(self, values):
"""Set maybe_missing_members to True on these values and their type params.
Args:
values: A list of BaseValue objects. On every instance among
the values, recursively set maybe_missing_members to True on the
instance and its type parameters.
"""
values = list(values)
seen = set()
while values:
v = values.pop(0)
if v not in seen:
seen.add(v)
if isinstance(v, abstract.SimpleValue):
v.maybe_missing_members = True
for child in v.instance_type_parameters.values():
values.extend(child.data)
def init_class(self, node, cls, container=None, extra_key=None):
"""Instantiate a class, and also call __init__.
Calling __init__ can be expensive, so this method caches its created
instances. If you don't need __init__ called, use cls.instantiate instead.
Args:
node: The current node.
cls: The class to instantiate.
container: Optionally, a container to pass to the class's instantiate()
method, so that type parameters in the container's template are
instantiated to TypeParameterInstance.
extra_key: Optionally, extra information about the location at which the
instantion occurs. By default, this method keys on the current opcode
and the class, which sometimes isn't enough to disambiguate callers
that shouldn't get back the same cached instance.
Returns:
A tuple of node and instance variable.
"""
key = (self.frame and self.frame.current_opcode, extra_key, cls)
instance = self._instance_cache.get(key)
if not instance or isinstance(instance, _Initializing):
clsvar = cls.to_variable(node)
node, instance = self._instantiate_var(node, clsvar, container)
if key in self._instance_cache:
# We've encountered a recursive pattern such as
# class A:
# def __init__(self, x: "A"): ...
# Calling __init__ again would lead to an infinite loop, so
# we instead create an incomplete instance that will be
# overwritten later. Note that we have to create a new
# instance rather than using the one that we're already in
# the process of initializing - otherwise, setting
# maybe_missing_members to True would cause pytype to ignore
# all attribute errors on self in __init__.
self._mark_maybe_missing_members(instance.data)
else:
self._instance_cache[key] = _Initializing()
node = self.call_init(node, instance)
self._instance_cache[key] = instance
return node, instance
def _call_method(self, node, binding, method_name):
node, method = self.attribute_handler.get_attribute(
node, binding.data.get_class(), method_name, binding)
if method:
bound_method = self.bind_method(
node, method_name, method, binding.AssignToNewVariable())
node = self.analyze_method_var(node, method_name, bound_method)
return node
def _call_init_on_binding(self, node, b):
if isinstance(b.data, abstract.SimpleValue):
for param in b.data.instance_type_parameters.values():
node = self.call_init(node, param)
node = self._call_method(node, b, "__init__")
cls = b.data.get_class()
if isinstance(cls, abstract.InterpreterClass):
# Call any additional initalizers the class has registered.
for method in cls.additional_init_methods:
node = self._call_method(node, b, method)
return node
def call_init(self, node, instance):
# Call __init__ on each binding.
for b in instance.bindings:
if b.data in self._initialized_instances:
continue
self._initialized_instances.add(b.data)
node = self._call_init_on_binding(node, b)
return node
def reinitialize_if_initialized(self, node, instance):
if instance in self._initialized_instances:
self._call_init_on_binding(node, instance.to_binding(node))
def analyze_class(self, node, val):
self._analyzed_classes.add(val.data)
node, instance = self.init_class(node, val.data)
good_instances = [b for b in instance.bindings if val.data == b.data.cls]
if not good_instances:
# __new__ returned something that's not an instance of our class.
instance = val.data.instantiate(node)
node = self.call_init(node, instance)
elif len(good_instances) != len(instance.bindings):
# __new__ returned some extra possibilities we don't need.
instance = self.join_bindings(node, good_instances)
for instance_value in instance.data:
val.data.register_canonical_instance(instance_value)
for name, methodvar in sorted(val.data.members.items()):
if name in self._CONSTRUCTORS:
continue # We already called this method during initialization.
b = self.bind_method(node, name, methodvar, instance)
node = self.analyze_method_var(node, name, b, val)
return node
def analyze_function(self, node0, val):
if val.data.is_attribute_of_class:
# We'll analyze this function as part of a class.
log.info("Analyze functions: Skipping class method %s", val.data.name)
else:
node1 = node0.ConnectNew(val.data.name)
node2 = self.maybe_analyze_method(node1, val)
node2.ConnectTo(node0)
return node0
def _should_analyze_as_interpreter_function(self, data):
# We record analyzed functions by opcode rather than function object. The
# two ways of recording are equivalent except for closures, which are
# re-generated when the variables they close over change, but we don't want
# to re-analyze them.
return (isinstance(data, abstract.InterpreterFunction) and
not data.is_overload and
not data.is_class_builder and
data.get_first_opcode() not in self._analyzed_functions and
not _SKIP_FUNCTION_RE.search(data.name))
def analyze_toplevel(self, node, defs):
for name, var in sorted(defs.items()): # sort, for determinicity
if not self._is_typing_member(name, var):
for value in var.bindings:
if isinstance(value.data, abstract.InterpreterClass):
new_node = self.analyze_class(node, value)
elif (isinstance(value.data, abstract.INTERPRETER_FUNCTION_TYPES) and
not value.data.is_overload):
new_node = self.analyze_function(node, value)
else:
continue
if new_node is not node:
new_node.ConnectTo(node)
# Now go through all functions and classes we haven't analyzed yet.
# These are typically hidden under a decorator.
# Go through classes first so that the `is_attribute_of_class` will
# be set for all functions in class.
for c in self._interpreter_classes:
for value in c.bindings:
if (isinstance(value.data, abstract.InterpreterClass) and
value.data not in self._analyzed_classes):
node = self.analyze_class(node, value)
for f in self._interpreter_functions:
for value in f.bindings:
if self._should_analyze_as_interpreter_function(value.data):
node = self.analyze_function(node, value)
return node
def analyze(self, node, defs, maximum_depth):
assert not self.frame
self.maximum_depth = maximum_depth
self._analyzing = True
node = node.ConnectNew(name="Analyze")
return self.analyze_toplevel(node, defs)
def trace_unknown(self, name, unknown_binding):
self._unknowns[name] = unknown_binding
def trace_call(self, node, func, sigs, posargs, namedargs, result):
"""Add an entry into the call trace.
Args:
node: The CFG node right after this function call.
func: A cfg.Binding of a function that was called.
sigs: The signatures that the function might have been called with.
posargs: The positional arguments, an iterable over cfg.Value.
namedargs: The keyword arguments, a dict mapping str to cfg.Value.
result: A Variable of the possible result values.
"""
log.debug("Logging call to %r with %d args, return %r",
func, len(posargs), result)
args = tuple(posargs)
kwargs = tuple((namedargs or {}).items())
record = CallRecord(node, func, sigs, args, kwargs, result)
if isinstance(func.data, abstract.BoundPyTDFunction):
self._method_calls.add(record)
elif isinstance(func.data, abstract.PyTDFunction):
self._calls.add(record)
def trace_functiondef(self, f):
self._interpreter_functions.append(f)
def trace_classdef(self, c):
self._interpreter_classes.append(c)
def trace_namedtuple(self, nt):
# All namedtuple instances with the same name are equal, so it's fine to
# overwrite previous instances.
self._generated_classes[nt.name] = nt
def pytd_classes_for_unknowns(self):
classes = []
for name, val in self._unknowns.items():
if val in val.variable.Filter(self.exitpoint, strict=False):
classes.append(val.data.to_structural_def(self.exitpoint, name))
return classes
def pytd_for_types(self, defs):
# If a variable is annotated, we'll always output that type.
annotated_names = set()
data = []
pytd_convert = self.convert.pytd_convert
annots = abstract_utils.get_annotations_dict(defs)
for name, t in pytd_convert.annotations_to_instance_types(
self.exitpoint, annots):
annotated_names.add(name)
data.append(pytd.Constant(name, t))
for name, var in defs.items():
if (name in output.TOP_LEVEL_IGNORE or name in annotated_names or
self._is_typing_member(name, var)):
continue
options = var.FilteredData(self.exitpoint, strict=False)
if (len(options) > 1 and
not all(isinstance(o, abstract.FUNCTION_TYPES) for o in options)):
# It's ambiguous whether this is a type, a function or something
# else, so encode it as a constant.
combined_types = pytd_utils.JoinTypes(t.to_type(self.exitpoint)
for t in options)
data.append(pytd.Constant(name, combined_types))
elif options:
for option in options:
try:
d = option.to_pytd_def(self.exitpoint, name) # Deep definition
except NotImplementedError:
d = option.to_type(self.exitpoint) # Type only
if isinstance(d, pytd.NothingType):
if isinstance(option, abstract.Empty):
d = pytd.AnythingType()
else:
assert isinstance(option, typing_overlay.NoReturn)
if isinstance(d, pytd.Type) and not isinstance(d, pytd.TypeParameter):
data.append(pytd.Constant(name, d))
else:
data.append(d)
else:
log.error("No visible options for %s", name)
data.append(pytd.Constant(name, pytd.AnythingType()))
return pytd_utils.WrapTypeDeclUnit("inferred", data)
@staticmethod
def _call_traces_to_function(call_traces, name_transform=lambda x: x):
funcs = collections.defaultdict(pytd_utils.OrderedSet)
for node, func, sigs, args, kws, retvar in call_traces:
# The lengths may be different in the presence of optional and kw args.
arg_names = max((sig.get_positional_names() for sig in sigs), key=len)
for i in range(len(arg_names)):
if not isinstance(func.data, abstract.BoundFunction) or i > 0:
arg_names[i] = function.argname(i)
arg_types = (a.data.to_type(node) for a in args)
ret = pytd_utils.JoinTypes(t.to_type(node) for t in retvar.data)
starargs = None
starstarargs = None
funcs[func.data.name].add(pytd.Signature(
tuple(pytd.Parameter(n, t, False, False, None)
for n, t in zip(arg_names, arg_types)) +
tuple(pytd.Parameter(name, a.data.to_type(node), False, False, None)
for name, a in kws),
starargs, starstarargs,
ret, exceptions=(), template=()))
functions = []
for name, signatures in funcs.items():
functions.append(pytd.Function(name_transform(name), tuple(signatures),
pytd.MethodTypes.METHOD))
return functions
def _is_typing_member(self, name, var):
for module_name in ("typing", "typing_extensions"):
if module_name not in self.loaded_overlays:
continue
module = self.loaded_overlays[module_name].get_module(name)
if name in module.members and module.members[name].data == var.data:
return True
return False
def pytd_functions_for_call_traces(self):
return self._call_traces_to_function(self._calls, escape.pack_partial)
def pytd_classes_for_call_traces(self):
class_to_records = collections.defaultdict(list)
for call_record in self._method_calls:
args = call_record.positional_arguments
if not any(isinstance(a.data, abstract.Unknown) for a in args):
# We don't need to record call signatures that don't involve
# unknowns - there's nothing to solve for.
continue
cls = args[0].data.get_class()
if isinstance(cls, abstract.PyTDClass):
class_to_records[cls].append(call_record)
classes = []
for cls, call_records in class_to_records.items():
full_name = cls.module + "." + cls.name if cls.module else cls.name
classes.append(pytd.Class(
name=escape.pack_partial(full_name),
metaclass=None,
parents=(pytd.NamedType("builtins.object"),), # not used in solver
methods=tuple(self._call_traces_to_function(call_records)),
constants=(),
classes=(),
decorators=(),
slots=None,
template=(),
))
return classes
def pytd_classes_for_namedtuple_instances(self):
return tuple(v.generate_ast() for v in self._generated_classes.values())
def compute_types(self, defs):
classes = (tuple(self.pytd_classes_for_unknowns()) +
tuple(self.pytd_classes_for_call_traces()) +
self.pytd_classes_for_namedtuple_instances())
functions = tuple(self.pytd_functions_for_call_traces())
aliases = () # aliases are instead recorded as constants
ty = pytd_utils.Concat(
self.pytd_for_types(defs),
pytd_utils.CreateModule("unknowns", classes=classes,
functions=functions, aliases=aliases))
ty = ty.Visit(optimize.CombineReturnsAndExceptions())
ty = ty.Visit(optimize.PullInMethodClasses())
ty = ty.Visit(visitors.DefaceUnresolved(
[ty, self.loader.concat_all()], escape.UNKNOWN))
return ty.Visit(visitors.AdjustTypeParameters())
def _check_return(self, node, actual, formal):
if not self.options.report_errors:
return True
views = abstract_utils.get_views([actual], node)
# Check for typevars in the return value first, since bad_matches
# expects not to get any.
bad = [view for view in views
if actual in view and view[actual].data.formal]
if not bad:
bad = self.matcher.bad_matches(actual, formal, node)
if bad:
self.errorlog.bad_return_type(
self.frames, node, formal, actual, bad)
return not bad
def check_types(src, filename, errorlog, options, loader,
deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH,
maximum_depth=None, **kwargs):
"""Verify the Python code."""
tracer = CallTracer(errorlog=errorlog, options=options,
generate_unknowns=False, loader=loader, **kwargs)
loc, defs = tracer.run_program(src, filename, init_maximum_depth)
snapshotter = metrics.get_metric("memory", metrics.Snapshot)
snapshotter.take_snapshot("analyze:check_types:tracer")
if deep:
if maximum_depth is None:
maximum_depth = (
QUICK_CHECK_MAXIMUM_DEPTH if options.quick else MAXIMUM_DEPTH)
tracer.analyze(loc, defs, maximum_depth=maximum_depth)
snapshotter.take_snapshot("analyze:check_types:post")
_maybe_output_debug(options, tracer.program)
def infer_types(src, errorlog, options, loader,
filename=None, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH,
show_library_calls=False, maximum_depth=None, tracer_vm=None,
**kwargs):
"""Given Python source return its types.
Args:
src: A string containing Python source code.
errorlog: Where error messages go. Instance of errors.ErrorLog.
options: config.Options object
loader: A load_pytd.Loader instance to load PYI information.
filename: Filename of the program we're parsing.
deep: If True, analyze all functions, even the ones not called by the main
execution flow.
init_maximum_depth: Depth of analysis during module loading.
show_library_calls: If True, call traces are kept in the output.
maximum_depth: Depth of the analysis. Default: unlimited.
tracer_vm: An instance of CallTracer, in case the caller wants to
instantiate and retain the vm used for type inference.
**kwargs: Additional parameters to pass to vm.VirtualMachine
Returns:
A tuple of (ast: TypeDeclUnit, builtins: TypeDeclUnit)
Raises:
AssertionError: In case of a bad parameter combination.
"""
# If the caller has passed in a vm, use that.
if tracer_vm:
assert isinstance(tracer_vm, CallTracer)
tracer = tracer_vm
else:
tracer = CallTracer(errorlog=errorlog, options=options,
generate_unknowns=options.protocols,
store_all_calls=not deep, loader=loader, **kwargs)
loc, defs = tracer.run_program(src, filename, init_maximum_depth)
log.info("===Done running definitions and module-level code===")
snapshotter = metrics.get_metric("memory", metrics.Snapshot)
snapshotter.take_snapshot("analyze:infer_types:tracer")
if deep:
if maximum_depth is None:
if not options.quick:
maximum_depth = MAXIMUM_DEPTH
elif options.analyze_annotated:
# Since there's no point in analyzing annotated functions for inference,
# the presence of this option means that the user wants checking, too.
maximum_depth = QUICK_CHECK_MAXIMUM_DEPTH
else:
maximum_depth = QUICK_INFER_MAXIMUM_DEPTH
tracer.exitpoint = tracer.analyze(loc, defs, maximum_depth)
else:
tracer.exitpoint = loc
snapshotter.take_snapshot("analyze:infer_types:post")
ast = tracer.compute_types(defs)
ast = tracer.loader.resolve_ast(ast)
if tracer.has_unknown_wildcard_imports or any(
a in defs for a in abstract_utils.DYNAMIC_ATTRIBUTE_MARKERS):
if "__getattr__" not in ast:
ast = pytd_utils.Concat(
ast, builtins.GetDefaultAst(options.python_version))
# If merged with other if statement, triggers a ValueError: Unresolved class
# when attempts to load from the protocols file
if options.protocols:
protocols_pytd = tracer.loader.import_name("protocols")
else:
protocols_pytd = None
builtins_pytd = tracer.loader.concat_all()
# Insert type parameters, where appropriate
ast = ast.Visit(visitors.CreateTypeParametersForSignatures())
if options.protocols:
log.info("=========== PyTD to solve =============\n%s",
pytd_utils.Print(ast))
ast = convert_structural.convert_pytd(ast, builtins_pytd, protocols_pytd)
elif not show_library_calls:
log.info("Solving is turned off. Discarding call traces.")
# Rename remaining "~unknown" to "?"
ast = ast.Visit(visitors.RemoveUnknownClasses())
# Remove "~list" etc.:
ast = convert_structural.extract_local(ast)
_maybe_output_debug(options, tracer.program)
return ast, builtins_pytd
def _maybe_output_debug(options, program):
"""Maybe emit debugging output."""
if options.output_cfg or options.output_typegraph:
dot = debug.program_to_dot(program, set([]), bool(options.output_cfg))
svg_file = options.output_cfg or options.output_typegraph
proc = subprocess.Popen(["/usr/bin/dot", "-T", "svg", "-o", svg_file],
stdin=subprocess.PIPE, universal_newlines=True)
(_, stderr) = proc.communicate(dot)
if stderr:
log.info("Failed to create %s: %s", svg_file, stderr)
if options.output_debug:
text = debug.program_to_text(program)
if options.output_debug == "-":
log.info("=========== Program Dump =============\n%s", text)
else:
with options.open_function(options.output_debug, "w") as fi:
fi.write(text)
| [
"logging.getLogger",
"pytype.pytd.pytd.Parameter",
"re.compile",
"pytype.pytd.pytd_utils.Print",
"pytype.function.argname",
"pytype.pytd.pytd.Constant",
"pytype.abstract_utils.get_annotations_dict",
"pytype.pytd.optimize.PullInMethodClasses",
"subprocess.Popen",
"pytype.pytd.pytd_utils.CreateModule",
"pytype.pytd.visitors.RemoveUnknownClasses",
"pytype.pytd.builtins.GetDefaultAst",
"pytype.convert_structural.convert_pytd",
"pytype.pytd.optimize.CombineReturnsAndExceptions",
"pytype.convert_structural.extract_local",
"pytype.pytd.visitors.AdjustTypeParameters",
"collections.namedtuple",
"pytype.state.SimpleFrame",
"pytype.pytd.visitors.CreateTypeParametersForSignatures",
"pytype.state.FrameState.init",
"pytype.pytd.pytd_utils.WrapTypeDeclUnit",
"pytype.debug.program_to_text",
"pytype.pytd.pytd.NamedType",
"pytype.metrics.get_metric",
"pytype.abstract_utils.get_views",
"collections.defaultdict",
"pytype.pytd.escape.pack_partial",
"pytype.pytd.pytd.AnythingType",
"pytype.abstract.Instance"
] | [((726, 753), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (743, 753), False, 'import logging\n'), ((976, 1005), 're.compile', 're.compile', (['"""<(?!lambda).+>$"""'], {}), "('<(?!lambda).+>$')\n", (986, 1005), False, 'import re\n'), ((1021, 1158), 'collections.namedtuple', 'collections.namedtuple', (['"""CallRecord"""', "['node', 'function', 'signatures', 'positional_arguments',\n 'keyword_arguments', 'return_value']"], {}), "('CallRecord', ['node', 'function', 'signatures',\n 'positional_arguments', 'keyword_arguments', 'return_value'])\n", (1043, 1158), False, 'import collections\n'), ((26132, 26178), 'pytype.metrics.get_metric', 'metrics.get_metric', (['"""memory"""', 'metrics.Snapshot'], {}), "('memory', metrics.Snapshot)\n", (26150, 26178), False, 'from pytype import metrics\n'), ((28221, 28267), 'pytype.metrics.get_metric', 'metrics.get_metric', (['"""memory"""', 'metrics.Snapshot'], {}), "('memory', metrics.Snapshot)\n", (28239, 28267), False, 'from pytype import metrics\n'), ((2395, 2443), 'pytype.abstract.Instance', 'abstract.Instance', (['self.convert.tuple_type', 'self'], {}), '(self.convert.tuple_type, self)\n', (2412, 2443), False, 'from pytype import abstract\n'), ((2770, 2817), 'pytype.abstract.Instance', 'abstract.Instance', (['self.convert.dict_type', 'self'], {}), '(self.convert.dict_type, self)\n', (2787, 2817), False, 'from pytype import abstract\n'), ((5308, 5342), 'pytype.state.SimpleFrame', 'frame_state.SimpleFrame', ([], {'node': 'node'}), '(node=node)\n', (5331, 5342), True, 'from pytype import state as frame_state\n'), ((5439, 5478), 'pytype.state.FrameState.init', 'frame_state.FrameState.init', (['node', 'self'], {}), '(node, self)\n', (5466, 5478), True, 'from pytype import state as frame_state\n'), ((19753, 19794), 'pytype.abstract_utils.get_annotations_dict', 'abstract_utils.get_annotations_dict', (['defs'], {}), '(defs)\n', (19788, 19794), False, 'from pytype import abstract_utils\n'), ((21394, 21439), 'pytype.pytd.pytd_utils.WrapTypeDeclUnit', 'pytd_utils.WrapTypeDeclUnit', (['"""inferred"""', 'data'], {}), "('inferred', data)\n", (21421, 21439), False, 'from pytype.pytd import pytd_utils\n'), ((21542, 21588), 'collections.defaultdict', 'collections.defaultdict', (['pytd_utils.OrderedSet'], {}), '(pytd_utils.OrderedSet)\n', (21565, 21588), False, 'import collections\n'), ((23242, 23271), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (23265, 23271), False, 'import collections\n'), ((25286, 25326), 'pytype.abstract_utils.get_views', 'abstract_utils.get_views', (['[actual]', 'node'], {}), '([actual], node)\n', (25310, 25326), False, 'from pytype import abstract_utils\n'), ((29572, 29616), 'pytype.pytd.visitors.CreateTypeParametersForSignatures', 'visitors.CreateTypeParametersForSignatures', ([], {}), '()\n', (29614, 29616), False, 'from pytype.pytd import visitors\n'), ((29748, 29815), 'pytype.convert_structural.convert_pytd', 'convert_structural.convert_pytd', (['ast', 'builtins_pytd', 'protocols_pytd'], {}), '(ast, builtins_pytd, protocols_pytd)\n', (29779, 29815), False, 'from pytype import convert_structural\n'), ((30437, 30553), 'subprocess.Popen', 'subprocess.Popen', (["['/usr/bin/dot', '-T', 'svg', '-o', svg_file]"], {'stdin': 'subprocess.PIPE', 'universal_newlines': '(True)'}), "(['/usr/bin/dot', '-T', 'svg', '-o', svg_file], stdin=\n subprocess.PIPE, universal_newlines=True)\n", (30453, 30553), False, 'import subprocess\n'), ((30730, 30760), 'pytype.debug.program_to_text', 'debug.program_to_text', (['program'], {}), '(program)\n', (30751, 30760), False, 'from pytype import debug\n'), ((24780, 24874), 'pytype.pytd.pytd_utils.CreateModule', 'pytd_utils.CreateModule', (['"""unknowns"""'], {'classes': 'classes', 'functions': 'functions', 'aliases': 'aliases'}), "('unknowns', classes=classes, functions=functions,\n aliases=aliases)\n", (24803, 24874), False, 'from pytype.pytd import pytd_utils\n'), ((24922, 24960), 'pytype.pytd.optimize.CombineReturnsAndExceptions', 'optimize.CombineReturnsAndExceptions', ([], {}), '()\n', (24958, 24960), False, 'from pytype.pytd import optimize\n'), ((24980, 25010), 'pytype.pytd.optimize.PullInMethodClasses', 'optimize.PullInMethodClasses', ([], {}), '()\n', (25008, 25010), False, 'from pytype.pytd import optimize\n'), ((25134, 25165), 'pytype.pytd.visitors.AdjustTypeParameters', 'visitors.AdjustTypeParameters', ([], {}), '()\n', (25163, 25165), False, 'from pytype.pytd import visitors\n'), ((29715, 29736), 'pytype.pytd.pytd_utils.Print', 'pytd_utils.Print', (['ast'], {}), '(ast)\n', (29731, 29736), False, 'from pytype.pytd import pytd_utils\n'), ((30041, 30078), 'pytype.convert_structural.extract_local', 'convert_structural.extract_local', (['ast'], {}), '(ast)\n', (30073, 30078), False, 'from pytype import convert_structural\n'), ((19941, 19963), 'pytype.pytd.pytd.Constant', 'pytd.Constant', (['name', 't'], {}), '(name, t)\n', (19954, 19963), False, 'from pytype.pytd import pytd\n'), ((29168, 29214), 'pytype.pytd.builtins.GetDefaultAst', 'builtins.GetDefaultAst', (['options.python_version'], {}), '(options.python_version)\n', (29190, 29214), False, 'from pytype.pytd import builtins\n'), ((29971, 30002), 'pytype.pytd.visitors.RemoveUnknownClasses', 'visitors.RemoveUnknownClasses', ([], {}), '()\n', (30000, 30002), False, 'from pytype.pytd import visitors\n'), ((20579, 20614), 'pytype.pytd.pytd.Constant', 'pytd.Constant', (['name', 'combined_types'], {}), '(name, combined_types)\n', (20592, 20614), False, 'from pytype.pytd import pytd\n'), ((21938, 21957), 'pytype.function.argname', 'function.argname', (['i'], {}), '(i)\n', (21954, 21957), False, 'from pytype import function\n'), ((23895, 23925), 'pytype.pytd.escape.pack_partial', 'escape.pack_partial', (['full_name'], {}), '(full_name)\n', (23914, 23925), False, 'from pytype.pytd import escape\n'), ((21361, 21380), 'pytype.pytd.pytd.AnythingType', 'pytd.AnythingType', ([], {}), '()\n', (21378, 21380), False, 'from pytype.pytd import pytd\n'), ((23972, 24005), 'pytype.pytd.pytd.NamedType', 'pytd.NamedType', (['"""builtins.object"""'], {}), "('builtins.object')\n", (23986, 24005), False, 'from pytype.pytd import pytd\n'), ((21189, 21211), 'pytype.pytd.pytd.Constant', 'pytd.Constant', (['name', 'd'], {}), '(name, d)\n', (21202, 21211), False, 'from pytype.pytd import pytd\n'), ((22196, 22236), 'pytype.pytd.pytd.Parameter', 'pytd.Parameter', (['n', 't', '(False)', '(False)', 'None'], {}), '(n, t, False, False, None)\n', (22210, 22236), False, 'from pytype.pytd import pytd\n'), ((20977, 20996), 'pytype.pytd.pytd.AnythingType', 'pytd.AnythingType', ([], {}), '()\n', (20994, 20996), False, 'from pytype.pytd import pytd\n')] |
#!/usr/bin/env python
__author__ = "<EMAIL>"
"""
Given a pooled input GFF + demux CSV file, write out per-{barcode group} GFFs
If input fasta/fastq is given, optionally also output per-{barcode group} FASTA/FASTQ
"""
import re
from collections import defaultdict
from csv import DictReader
from typing import Optional
import typer
from Bio import SeqIO
import cupcake.sequence.GFF as GFF
from cupcake import version_callback
from cupcake import cupcake_logger as logger
rex_pbid = re.compile(r"(PB.\d+.\d+)(|\S+)")
app = typer.Typer(name="cupcake.post_isoseq_cluster.demux_by_barcode_groups")
def get_type_fafq(in_filename):
in_filename = in_filename.upper()
if in_filename.endswith(".FA") or in_filename.endswith("FASTA"):
return "fasta"
elif in_filename.endswith(".FQ") or in_filename.endswith("FASTQ"):
return "fastq"
else:
raise Exception(
f"Unrecognized file suffix .{in_filename[in_filename.find('.'):]}! Must end with .fasta or .fastq!"
)
def regroup_gff(
pooled_gff, demux_count_file, output_prefix, out_group_dict, in_fafq=None
):
"""
:param pooled_sam: SAM file
:param demux_count_file: comma-delimited per-barcode count file
:param output_prefix: output prefix for GFF
:param out_group_dict: dict of barcode name --> group to be long in (ex: {'EM1':'EM', 'EM2':'EM'})
:param in_fafq: optional fasta/fastq that was input to SAM
"""
if in_fafq is not None:
type_fafq = get_type_fafq(in_fafq)
in_tissue = defaultdict(
lambda: set()
) # pbid --> list of tissue it is in (EM, END, R)
for r in DictReader(open(demux_count_file), delimiter=","):
for k, v in r.items():
if k != "id" and int(v) > 0:
in_tissue[r["id"]].add(k)
# in_tissue = dict(in_tissue)
handles = {}
handles_fafq = {}
for g in out_group_dict.values():
handles[g] = open(f"{output_prefix}_{g}_only.gff", "w")
if in_fafq is not None:
handles_fafq[g] = open(f"{output_prefix}_{g}_only.{type_fafq}", "w")
if in_fafq is not None:
fafq_dict = SeqIO.to_dict(SeqIO.parse(open(in_fafq), type_fafq))
fafq_dict_keys = list(fafq_dict.keys())
for k in fafq_dict_keys:
m = rex_pbid.match(k)
if m is not None:
fafq_dict[m.group(1)] = fafq_dict[k]
reader = GFF.collapseGFFReader(pooled_gff)
for r in reader:
groups_to_write_in = set()
pbid = r.seqid
if pbid not in in_tissue:
logger.info(
f"WARNING: {pbid} does not belong to any group indicated by outgroup_dict"
)
for tissue in in_tissue[pbid]:
groups_to_write_in.add(out_group_dict[tissue])
for g in groups_to_write_in:
GFF.write_collapseGFF_format(handles[g], r)
if in_fafq is not None:
SeqIO.write(fafq_dict[pbid], handles_fafq[g], type_fafq)
@app.command(name="")
def main(
pooled_gff: str = typer.Argument(..., help="Pooled GFF file"),
demux_count_file: str = typer.Argument(..., help="Demux count file"),
output_prefix: str = typer.Argument(..., help="Output prefix for GFF outputs"),
outgroup_dict: str = typer.Argument(..., help="Tuples indicating barcode grouping"),
pooled_fastx: Optional[str] = typer.Option(
None,
help="Pooled FASTA/FASTQ (optional, if given, will also output demux fa/fq)",
),
version: bool = typer.Option(
None,
"--version",
callback=version_callback,
is_eager=True,
help="Prints the version of the SQANTI3 package.",
),
) -> None:
tmp = eval(outgroup_dict)
out_group_dict = dict([tmp]) if len(tmp) == 1 else dict(tmp)
regroup_gff(
pooled_gff,
demux_count_file,
output_prefix,
out_group_dict,
pooled_fastx,
)
if __name__ == "__main__":
typer.run(main)
| [
"cupcake.cupcake_logger.info",
"cupcake.sequence.GFF.collapseGFFReader",
"typer.Option",
"re.compile",
"typer.Typer",
"Bio.SeqIO.write",
"typer.run",
"cupcake.sequence.GFF.write_collapseGFF_format",
"typer.Argument"
] | [((485, 520), 're.compile', 're.compile', (['"""(PB.\\\\d+.\\\\d+)(|\\\\S+)"""'], {}), "('(PB.\\\\d+.\\\\d+)(|\\\\S+)')\n", (495, 520), False, 'import re\n'), ((527, 598), 'typer.Typer', 'typer.Typer', ([], {'name': '"""cupcake.post_isoseq_cluster.demux_by_barcode_groups"""'}), "(name='cupcake.post_isoseq_cluster.demux_by_barcode_groups')\n", (538, 598), False, 'import typer\n'), ((2404, 2437), 'cupcake.sequence.GFF.collapseGFFReader', 'GFF.collapseGFFReader', (['pooled_gff'], {}), '(pooled_gff)\n', (2425, 2437), True, 'import cupcake.sequence.GFF as GFF\n'), ((3038, 3081), 'typer.Argument', 'typer.Argument', (['...'], {'help': '"""Pooled GFF file"""'}), "(..., help='Pooled GFF file')\n", (3052, 3081), False, 'import typer\n'), ((3111, 3155), 'typer.Argument', 'typer.Argument', (['...'], {'help': '"""Demux count file"""'}), "(..., help='Demux count file')\n", (3125, 3155), False, 'import typer\n'), ((3182, 3239), 'typer.Argument', 'typer.Argument', (['...'], {'help': '"""Output prefix for GFF outputs"""'}), "(..., help='Output prefix for GFF outputs')\n", (3196, 3239), False, 'import typer\n'), ((3266, 3328), 'typer.Argument', 'typer.Argument', (['...'], {'help': '"""Tuples indicating barcode grouping"""'}), "(..., help='Tuples indicating barcode grouping')\n", (3280, 3328), False, 'import typer\n'), ((3364, 3465), 'typer.Option', 'typer.Option', (['None'], {'help': '"""Pooled FASTA/FASTQ (optional, if given, will also output demux fa/fq)"""'}), "(None, help=\n 'Pooled FASTA/FASTQ (optional, if given, will also output demux fa/fq)')\n", (3376, 3465), False, 'import typer\n'), ((3505, 3633), 'typer.Option', 'typer.Option', (['None', '"""--version"""'], {'callback': 'version_callback', 'is_eager': '(True)', 'help': '"""Prints the version of the SQANTI3 package."""'}), "(None, '--version', callback=version_callback, is_eager=True,\n help='Prints the version of the SQANTI3 package.')\n", (3517, 3633), False, 'import typer\n'), ((3955, 3970), 'typer.run', 'typer.run', (['main'], {}), '(main)\n', (3964, 3970), False, 'import typer\n'), ((2563, 2655), 'cupcake.cupcake_logger.info', 'logger.info', (['f"""WARNING: {pbid} does not belong to any group indicated by outgroup_dict"""'], {}), "(\n f'WARNING: {pbid} does not belong to any group indicated by outgroup_dict')\n", (2574, 2655), True, 'from cupcake import cupcake_logger as logger\n'), ((2829, 2872), 'cupcake.sequence.GFF.write_collapseGFF_format', 'GFF.write_collapseGFF_format', (['handles[g]', 'r'], {}), '(handles[g], r)\n', (2857, 2872), True, 'import cupcake.sequence.GFF as GFF\n'), ((2925, 2981), 'Bio.SeqIO.write', 'SeqIO.write', (['fafq_dict[pbid]', 'handles_fafq[g]', 'type_fafq'], {}), '(fafq_dict[pbid], handles_fafq[g], type_fafq)\n', (2936, 2981), False, 'from Bio import SeqIO\n')] |
import random
import numpy as np
import math
from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa
from skimage.io import imsave
from skimage.util import random_noise
maxSlope = 10 # restrict the maximum slope of generated lines for stability
minLength = 20 # restrict the minimum length of line segments
class ICircleDataset:
'''
Generator of circle segment images.
Images will have 1 random circle each, filled with noise and distractor lines.
Class also offers functionality for drawing line parameters, hypotheses and point predictions.
'''
def __init__(self, imgW = 64, imgH = 64, margin = -5, bg_clr = 0.5):
'''
Constructor.
imgW -- image width (default 64)
imgH -- image height (default 64)
margin -- lines segments are sampled within this margin, negative value means that a line segment can start or end outside the image (default -5)
bg_clr -- background intensity (default 0.5)
'''
self.imgW = imgW
self.imgH = imgH
self.margin = margin
self.bg_clr = bg_clr
def draw_circle(self, data, cX, cY, r, clr, alpha=1.0):
'''
Draw a circle with the given color and opacity.
data -- image to draw to
cX -- x value of circle center
cY -- y value of circle center
r -- radius of circle
clr -- line color, triple of values
alpha -- opacity (default 1.0)
'''
cY = int(cY * self.imgH)
cX = int(cX * self.imgW)
r = int(r * self.imgW)
rr, cc, val = circle_perimeter_aa(cY, cX, r)
set_color(data, (rr, cc), clr, val)
def draw_hyps(self, labels, scores, data=None):
'''
Draw a set of line hypothesis for a batch of images.
labels -- line parameters, array shape (NxMx2) where
N is the number of images in the batch
M is the number of hypotheses per image
2 is the number of line parameters (intercept, slope)
scores -- hypotheses scores, array shape (NxM), see above, higher score will be drawn with higher opacity
data -- batch of images to draw to, if empty a new batch wil be created according to the shape of labels
'''
n = labels.shape[0] # number of images
m = labels.shape[1] # number of hypotheses
if data is None: # create new batch of images
data = np.zeros((n, self.imgH, self.imgW, 3), dtype=np.float32)
data.fill(self.bg_clr)
clr = (0, 0, 1)
for i in range (0, n):
for j in range (0, m):
lY1 = int(labels[i, j, 0] * self.imgH)
lY2 = int(labels[i, j, 1] * self.imgW + labels[i, j, 0] * self.imgH)
self.draw_line(data[i], 0, lY1, self.imgW, lY2, clr, scores[i, j])
return data
def draw_models(self, labels, data=None, correct=None):
'''
Draw circles for a batch of images.
labels -- circle parameters, array shape (Nx3) where
N is the number of images in the batch
3 is the number of circles parameters (center x, center y, radius)
data -- batch of images to draw to, if empty a new batch wil be created according to the shape of labels
and circles will be green, circles will be blue otherwise
correct -- array of shape (N) indicating whether a circle estimate is correct
'''
n = labels.shape[0]
if data is None:
data = np.zeros((n, self.imgH, self.imgW, 3), dtype=np.float32)
data.fill(self.bg_clr)
clr = (0, 1, 0)
else:
clr = (0, 0, 1)
for i in range (0, n):
self.draw_circle(data[i], labels[i, 0], labels[i, 1], labels[i, 2], clr)
if correct is not None:
# draw border green if estiamte is correct, red otherwise
if correct[i]: borderclr = (0, 1, 0)
else: borderclr = (1, 0, 0)
set_color(data[i], line(0, 0, 0, self.imgW-1), borderclr)
set_color(data[i], line(0, 0, self.imgH-1, 0), borderclr)
set_color(data[i], line(self.imgH-1, 0, self.imgH-1, self.imgW-1), borderclr)
set_color(data[i], line(0, self.imgW-1, self.imgH-1, self.imgW-1), borderclr)
return data
def draw_points(self, points, data, inliers=None):
'''
Draw 2D points for a batch of images.
points -- 2D points, array shape (Nx2xM) where
N is the number of images in the batch
2 is the number of point dimensions (x, y)
M is the number of points
data -- batch of images to draw to
inliers -- soft inlier score for each point,
if given and score < 0.5 point will be drawn green, red otherwise
'''
n = points.shape[0] # number of images
m = points.shape[2] # number of points
for i in range (0, n):
for j in range(0, m):
clr = (0.2, 0.2, 0.2) # draw predicted points as dark circles
if inliers is not None and inliers[i, j] > 0.5:
clr = (0.7, 0.7, 0.7) # draw inliers as light circles
r = int(points[i, 0, j] * self.imgH)
c = int(points[i, 1, j] * self.imgW)
rr, cc = circle(r, c, 2)
set_color(data[i], (rr, cc), clr)
return data
def samples(self, n):
'''
Create new input images of random line segments and distractors along with ground truth parameters.
n -- number of images to create
'''
data = np.zeros((n, self.imgH, self.imgW, 3), dtype=np.float32)
data.fill(self.bg_clr)
labels = np.zeros((n, 3), dtype=np.float32)
for i in range (0, n):
data[i] = random_noise(data[i], mode='speckle')
return data, labels
| [
"skimage.draw.circle",
"skimage.draw.circle_perimeter_aa",
"skimage.draw.set_color",
"numpy.zeros",
"skimage.util.random_noise",
"skimage.draw.line"
] | [((1439, 1469), 'skimage.draw.circle_perimeter_aa', 'circle_perimeter_aa', (['cY', 'cX', 'r'], {}), '(cY, cX, r)\n', (1458, 1469), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((1472, 1507), 'skimage.draw.set_color', 'set_color', (['data', '(rr, cc)', 'clr', 'val'], {}), '(data, (rr, cc), clr, val)\n', (1481, 1507), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((4940, 4996), 'numpy.zeros', 'np.zeros', (['(n, self.imgH, self.imgW, 3)'], {'dtype': 'np.float32'}), '((n, self.imgH, self.imgW, 3), dtype=np.float32)\n', (4948, 4996), True, 'import numpy as np\n'), ((5033, 5067), 'numpy.zeros', 'np.zeros', (['(n, 3)'], {'dtype': 'np.float32'}), '((n, 3), dtype=np.float32)\n', (5041, 5067), True, 'import numpy as np\n'), ((2190, 2246), 'numpy.zeros', 'np.zeros', (['(n, self.imgH, self.imgW, 3)'], {'dtype': 'np.float32'}), '((n, self.imgH, self.imgW, 3), dtype=np.float32)\n', (2198, 2246), True, 'import numpy as np\n'), ((3128, 3184), 'numpy.zeros', 'np.zeros', (['(n, self.imgH, self.imgW, 3)'], {'dtype': 'np.float32'}), '((n, self.imgH, self.imgW, 3), dtype=np.float32)\n', (3136, 3184), True, 'import numpy as np\n'), ((5107, 5144), 'skimage.util.random_noise', 'random_noise', (['data[i]'], {'mode': '"""speckle"""'}), "(data[i], mode='speckle')\n", (5119, 5144), False, 'from skimage.util import random_noise\n'), ((4688, 4703), 'skimage.draw.circle', 'circle', (['r', 'c', '(2)'], {}), '(r, c, 2)\n', (4694, 4703), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((4708, 4741), 'skimage.draw.set_color', 'set_color', (['data[i]', '(rr, cc)', 'clr'], {}), '(data[i], (rr, cc), clr)\n', (4717, 4741), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((3556, 3584), 'skimage.draw.line', 'line', (['(0)', '(0)', '(0)', '(self.imgW - 1)'], {}), '(0, 0, 0, self.imgW - 1)\n', (3560, 3584), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((3621, 3649), 'skimage.draw.line', 'line', (['(0)', '(0)', '(self.imgH - 1)', '(0)'], {}), '(0, 0, self.imgH - 1, 0)\n', (3625, 3649), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((3686, 3738), 'skimage.draw.line', 'line', (['(self.imgH - 1)', '(0)', '(self.imgH - 1)', '(self.imgW - 1)'], {}), '(self.imgH - 1, 0, self.imgH - 1, self.imgW - 1)\n', (3690, 3738), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n'), ((3771, 3823), 'skimage.draw.line', 'line', (['(0)', '(self.imgW - 1)', '(self.imgH - 1)', '(self.imgW - 1)'], {}), '(0, self.imgW - 1, self.imgH - 1, self.imgW - 1)\n', (3775, 3823), False, 'from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa\n')] |
import logging
import logging.handlers
import os
class Logger(object):
def __init__(self, name, default_loglevel='INFO', fmt=None, syslog=None):
self.name = name
self.syslog = syslog
self.fmt = fmt if fmt is not None else "%(asctime)-15s %(name)s %(levelname)s %(message)s"
if 'LOGLEVEL' in os.environ:
self.level = os.environ['LOGLEVEL'].upper()
else:
self.level = default_loglevel.upper()
logging.basicConfig(format=self.fmt)
self.logger = logging.getLogger(self.name)
self.logger.setLevel(self.level)
if self.syslog is not None and self.syslog not in (False, 0):
if isinstance(self.syslog, (list, tuple)):
_addr = tuple(self.syslog)
elif isinstance(self.syslog, str):
_addr = self.syslog
else:
_addr = "/dev/log" if os.path.exists("/dev/log") else None
if _addr is not None:
handler = logging.handlers.SysLogHandler(address=_addr)
self.logger.addHandler(handler)
def get(self):
return self.logger
| [
"logging.basicConfig",
"os.path.exists",
"logging.getLogger",
"logging.handlers.SysLogHandler"
] | [((435, 471), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': 'self.fmt'}), '(format=self.fmt)\n', (454, 471), False, 'import logging\n'), ((490, 518), 'logging.getLogger', 'logging.getLogger', (['self.name'], {}), '(self.name)\n', (507, 518), False, 'import logging\n'), ((902, 947), 'logging.handlers.SysLogHandler', 'logging.handlers.SysLogHandler', ([], {'address': '_addr'}), '(address=_addr)\n', (932, 947), False, 'import logging\n'), ((818, 844), 'os.path.exists', 'os.path.exists', (['"""/dev/log"""'], {}), "('/dev/log')\n", (832, 844), False, 'import os\n')] |
#!/usr/bin/env python
# encoding: utf-8
from django.test import TestCase
from zoo import models
class AnimalTestCase(TestCase):
"""Test animals' sound """
def test_dog_says(self):
"""test dog says woof or not
"""
dog = models.Dog(name='Snoopy')
self.assertEqual(dog.says(), 'woof')
def test_cat_says(self):
"""test cat says meow of not
"""
cat = models.Cat(name='Garfield')
self.assertEqual(cat.says(), 'meow')
| [
"zoo.models.Dog",
"zoo.models.Cat"
] | [((259, 284), 'zoo.models.Dog', 'models.Dog', ([], {'name': '"""Snoopy"""'}), "(name='Snoopy')\n", (269, 284), False, 'from zoo import models\n'), ((424, 451), 'zoo.models.Cat', 'models.Cat', ([], {'name': '"""Garfield"""'}), "(name='Garfield')\n", (434, 451), False, 'from zoo import models\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-27 13:17
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mezzanine.core.fields
class Migration(migrations.Migration):
dependencies = [
('rpocore', '0006_auto_20160921_1924'),
]
operations = [
migrations.CreateModel(
name='SupportingOrganization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('_order', mezzanine.core.fields.OrderField(null=True, verbose_name='Order')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('logo', models.ImageField(upload_to='', verbose_name='Logo of organization')),
('url', models.CharField(max_length=200, verbose_name='URL')),
],
options={
'verbose_name_plural': 'Supporting organizations',
'ordering': ('_order',),
'verbose_name': 'Supporting organization',
},
),
migrations.AlterField(
model_name='carouselitem',
name='homepage',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='carousel_items', to='rpocore.HomepagePage', verbose_name='Homepage'),
),
migrations.AlterField(
model_name='homepagepage',
name='process',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='rpocore.Process', verbose_name='Process'),
),
migrations.AlterField(
model_name='notablesupporter',
name='supporter_page',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notable_supporters', to='rpocore.SupporterPage', verbose_name='Supporter page'),
),
migrations.AlterField(
model_name='phase',
name='process',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rpocore.Process', verbose_name='Process'),
),
migrations.AlterField(
model_name='statementpage',
name='formal_statements',
field=models.ManyToManyField(blank=True, to='rpocore.FormalStatement', verbose_name='Formal statements'),
),
migrations.AlterField(
model_name='statementpage',
name='informal_statements',
field=models.ManyToManyField(blank=True, to='rpocore.InformalStatement', verbose_name='Informal statements'),
),
migrations.AlterField(
model_name='supporter',
name='support_group',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='rpocore.SupportGroup', verbose_name='Support group'),
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.AutoField",
"django.db.models.ImageField",
"django.db.models.CharField"
] | [((1261, 1411), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""carousel_items"""', 'to': '"""rpocore.HomepagePage"""', 'verbose_name': '"""Homepage"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='carousel_items', to='rpocore.HomepagePage', verbose_name='Homepage')\n", (1278, 1411), False, 'from django.db import migrations, models\n'), ((1535, 1659), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""rpocore.Process"""', 'verbose_name': '"""Process"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='rpocore.Process', verbose_name='Process')\n", (1552, 1659), False, 'from django.db import migrations, models\n'), ((1795, 1970), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""notable_supporters"""', 'to': '"""rpocore.SupporterPage"""', 'verbose_name': '"""Supporter page"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='notable_supporters', to='rpocore.SupporterPage',\n verbose_name='Supporter page')\n", (1812, 1970), False, 'from django.db import migrations, models\n'), ((2084, 2197), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""rpocore.Process"""', 'verbose_name': '"""Process"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'rpocore.Process', verbose_name='Process')\n", (2101, 2197), False, 'from django.db import migrations, models\n'), ((2332, 2434), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'to': '"""rpocore.FormalStatement"""', 'verbose_name': '"""Formal statements"""'}), "(blank=True, to='rpocore.FormalStatement',\n verbose_name='Formal statements')\n", (2354, 2434), False, 'from django.db import migrations, models\n'), ((2572, 2678), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'to': '"""rpocore.InformalStatement"""', 'verbose_name': '"""Informal statements"""'}), "(blank=True, to='rpocore.InformalStatement',\n verbose_name='Informal statements')\n", (2594, 2678), False, 'from django.db import migrations, models\n'), ((2806, 2940), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.PROTECT', 'to': '"""rpocore.SupportGroup"""', 'verbose_name': '"""Support group"""'}), "(null=True, on_delete=django.db.models.deletion.PROTECT,\n to='rpocore.SupportGroup', verbose_name='Support group')\n", (2823, 2940), False, 'from django.db import migrations, models\n'), ((471, 564), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (487, 564), False, 'from django.db import migrations, models\n'), ((683, 736), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""Name"""'}), "(max_length=100, verbose_name='Name')\n", (699, 736), False, 'from django.db import migrations, models\n'), ((764, 832), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '""""""', 'verbose_name': '"""Logo of organization"""'}), "(upload_to='', verbose_name='Logo of organization')\n", (781, 832), False, 'from django.db import migrations, models\n'), ((859, 911), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""URL"""'}), "(max_length=200, verbose_name='URL')\n", (875, 911), False, 'from django.db import migrations, models\n')] |
#LineSensor test
from gpiozero import LineSensor
from time import sleep
from signal import pause
def lineDetected():
print('line detected')
def noLineDetected():
print('no line detected')
sensor = LineSensor(14)
sensor.when_line = lineDetected
sensor.when_no_line = noLineDetected
pause()
sensor.close()
| [
"gpiozero.LineSensor",
"signal.pause"
] | [((210, 224), 'gpiozero.LineSensor', 'LineSensor', (['(14)'], {}), '(14)\n', (220, 224), False, 'from gpiozero import LineSensor\n'), ((296, 303), 'signal.pause', 'pause', ([], {}), '()\n', (301, 303), False, 'from signal import pause\n')] |
#
# This file is part of LiteX-Boards.
#
# Copyright (c) 2021 <NAME> <<EMAIL>>
# SPDX-License-Identifier: BSD-2-Clause
from migen import *
from litex.build.generic_platform import *
from litex.build.gowin.platform import GowinPlatform
from litex.build.openfpgaloader import OpenFPGALoader
# IOs ----------------------------------------------------------------------------------------------
_io = [
# Clk / Rst
("clk12", 0, Pins("4"), IOStandard("LVCMOS33")),
# Leds
("user_led", 0, Pins("23"), IOStandard("LVCMOS33")),
("user_led", 1, Pins("24"), IOStandard("LVCMOS33")),
("user_led", 2, Pins("25"), IOStandard("LVCMOS33")),
("user_led", 3, Pins("26"), IOStandard("LVCMOS33")),
("user_led", 4, Pins("27"), IOStandard("LVCMOS33")),
("user_led", 5, Pins("28"), IOStandard("LVCMOS33")),
("user_led", 6, Pins("29"), IOStandard("LVCMOS33")),
("user_led", 7, Pins("30"), IOStandard("LVCMOS33")),
# RGB led, active-low
("rgb_led", 0,
Subsignal("r", Pins("112")),
Subsignal("g", Pins("114")),
Subsignal("b", Pins("113")),
IOStandard("LVCMOS33"),
),
("rgb_led", 1,
Subsignal("r", Pins("106")),
Subsignal("g", Pins("111")),
Subsignal("b", Pins("110")),
IOStandard("LVCMOS33"),
),
("rgb_led", 2,
Subsignal("r", Pins("101")),
Subsignal("g", Pins("104")),
Subsignal("b", Pins("102")),
IOStandard("LVCMOS33"),
),
("rgb_led", 3,
Subsignal("r", Pins("98")),
Subsignal("g", Pins("100")),
Subsignal("b", Pins("99")),
IOStandard("LVCMOS33"),
),
# Switches
("user_sw", 0, Pins("75"), IOStandard("LVCMOS33")),
("user_sw", 1, Pins("76"), IOStandard("LVCMOS33")),
("user_sw", 2, Pins("78"), IOStandard("LVCMOS33")),
("user_sw", 3, Pins("79"), IOStandard("LVCMOS33")),
("user_sw", 4, Pins("80"), IOStandard("LVCMOS33")),
("user_sw", 5, Pins("81"), IOStandard("LVCMOS33")),
("user_sw", 6, Pins("82"), IOStandard("LVCMOS33")),
("user_sw", 7, Pins("83"), IOStandard("LVCMOS33")),
# Buttons.
("user_btn", 0, Pins("58"), IOStandard("LVCMOS33")),
("user_btn", 1, Pins("59"), IOStandard("LVCMOS33")),
("user_btn", 2, Pins("60"), IOStandard("LVCMOS33")),
("user_btn", 3, Pins("61"), IOStandard("LVCMOS33")),
("user_btn", 4, Pins("62"), IOStandard("LVCMOS33")),
("user_btn", 5, Pins("63"), IOStandard("LVCMOS33")),
("user_btn", 6, Pins("64"), IOStandard("LVCMOS33")),
("user_btn", 7, Pins("65"), IOStandard("LVCMOS33")),
# Serial.
# FT232H has only one interface -> use (arbitrary) two pins from J2 to
# connect an external USB<->serial adapter
("serial", 0,
Subsignal("tx", Pins("116")), # J2.17
Subsignal("rx", Pins("115")), # J2.18
IOStandard("LVCMOS33")
),
# Seven Segment
("seven_seg_dig", 0, Pins("137"), IOStandard("LVCMOS33")),
("seven_seg_dig", 1, Pins("140"), IOStandard("LVCMOS33")),
("seven_seg_dig", 2, Pins("141"), IOStandard("LVCMOS33")),
("seven_seg_dig", 3, Pins("7"), IOStandard("LVCMOS33")),
("seven_seg", 0, Pins("138 142 9 11 12 139 8 10"), IOStandard("LVCMOS33")),
]
# Connectors ---------------------------------------------------------------------------------------
_connectors = [
["J1", "- 38 39 40 41 42 43 44 66 67 68 69 70 71 72 96 95 94 93 -"],
["J2", "- 136 135 134 133 132 131 130 129 128 123 122 121 120 119 118 117 116 115 -"],
]
# Platform -----------------------------------------------------------------------------------------
class Platform(GowinPlatform):
default_clk_name = "clk12"
default_clk_period = 1e9/12e6
def __init__(self, toolchain="gowin"):
GowinPlatform.__init__(self, "GW1N-UV4LQ144C6/I5", _io, _connectors, toolchain=toolchain, devicename="GW1N-4")
self.toolchain.options["use_mspi_as_gpio"] = 1
def create_programmer(self):
return OpenFPGALoader("runber")
def do_finalize(self, fragment):
GowinPlatform.do_finalize(self, fragment)
self.add_period_constraint(self.lookup_request("clk12", loose=True), 1e9/12e6)
| [
"litex.build.gowin.platform.GowinPlatform.__init__",
"litex.build.openfpgaloader.OpenFPGALoader",
"litex.build.gowin.platform.GowinPlatform.do_finalize"
] | [((3780, 3894), 'litex.build.gowin.platform.GowinPlatform.__init__', 'GowinPlatform.__init__', (['self', '"""GW1N-UV4LQ144C6/I5"""', '_io', '_connectors'], {'toolchain': 'toolchain', 'devicename': '"""GW1N-4"""'}), "(self, 'GW1N-UV4LQ144C6/I5', _io, _connectors,\n toolchain=toolchain, devicename='GW1N-4')\n", (3802, 3894), False, 'from litex.build.gowin.platform import GowinPlatform\n'), ((3995, 4019), 'litex.build.openfpgaloader.OpenFPGALoader', 'OpenFPGALoader', (['"""runber"""'], {}), "('runber')\n", (4009, 4019), False, 'from litex.build.openfpgaloader import OpenFPGALoader\n'), ((4066, 4107), 'litex.build.gowin.platform.GowinPlatform.do_finalize', 'GowinPlatform.do_finalize', (['self', 'fragment'], {}), '(self, fragment)\n', (4091, 4107), False, 'from litex.build.gowin.platform import GowinPlatform\n')] |
import numpy as np
import copy
import combo.misc
import cPickle as pickle
from results import history
from .. import utility
from ...variable import variable
from ..call_simulator import call_simulator
from ... import predictor
from ...gp import predictor as gp_predictor
from ...blm import predictor as blm_predictor
import combo.search.score
MAX_SEACH = int(20000)
class policy:
def __init__(self, test_X, config=None):
self.predictor = None
self.training = variable()
self.test = self._set_test(test_X)
self.actions = np.arange(0, self.test.X.shape[0])
self.history = history()
self.config = self._set_config(config)
def set_seed(self, seed):
self.seed = seed
np.random.seed(self.seed)
def delete_actions(self, index, actions=None):
actions = self._set_unchosed_actions(actions)
return np.delete(actions, index)
def write(self, action, t, X=None):
if X is None:
X = self.test.X[action, :]
Z = self.test.Z[action, :] if self.test.Z is not None else None
else:
Z = self.predictor.get_basis(X) \
if self.predictor is not None else None
self.new_data = variable(X, t, Z)
self.history.write(t, action)
self.training.add(X=X, t=t, Z=Z)
def random_search(self, max_num_probes, num_search_each_probe=1,
simulator=None, is_disp=True):
N = int(num_search_each_probe)
if int(max_num_probes) * N > len(self.actions):
raise ValueError('max_num_probes * num_search_each_probe must \
be smaller than the length of candidates')
if is_disp:
utility.show_interactive_mode(simulator, self.history)
for n in xrange(0, max_num_probes):
if is_disp and N > 1:
utility.show_start_message_multi_search(self.history.num_runs)
action = self.get_random_action(N)
if simulator is None:
return action
t, X = call_simulator(simulator, action)
self.write(action, t, X)
if is_disp:
utility.show_search_results(self.history, N)
return copy.deepcopy(self.history)
def bayes_search(self, training=None, max_num_probes=None,
num_search_each_probe=1,
predictor=None, is_disp=True,
simulator=None, score='TS', interval=0,
num_rand_basis=0):
if max_num_probes is None:
max_num_probes = 1
simulator = None
is_rand_expans = False if num_rand_basis == 0 else True
self.training = self._set_training(training)
if predictor is None:
self.predictor = self._init_predictor(is_rand_expans)
else:
self.predictor = predictor
N = int(num_search_each_probe)
for n in xrange(max_num_probes):
if utility.is_learning(n, interval):
self.predictor.fit(self.training, num_rand_basis)
self.test.Z = self.predictor.get_basis(self.test.X)
self.training.Z = self.predictor.get_basis(self.training.X)
self.predictor.prepare(self.training)
else:
try:
self.predictor.update(self.training, self.new_data)
except:
self.predictor.prepare(self.training)
if num_search_each_probe != 1:
utility.show_start_message_multi_search(self.history.num_runs,
score)
K = self.config.search.multi_probe_num_sampling
alpha = self.config.search.alpha
action = self.get_actions(score, N, K, alpha)
if simulator is None:
return action
t, X = call_simulator(simulator, action)
self.write(action, t, X)
if is_disp:
utility.show_search_results(self.history, N)
return copy.deepcopy(self.history)
def get_score(self, mode, predictor=None, training=None, alpha=1):
self._set_training(training)
self._set_predictor(predictor)
actions = self.actions
test = self.test.get_subset(actions)
if mode == 'EI':
f = combo.search.score.EI(predictor, training, test)
elif mode == 'PI':
f = combo.search.score.PI(predictor, training, test)
elif mode == 'TS':
f = combo.search.score.TS(predictor, training, test, alpha)
else:
raise NotImplementedError('mode must be EI, PI or TS.')
return f
def get_marginal_score(self, mode, chosed_actions, N, alpha):
f = np.zeros((N, len(self.actions)))
new_test = self.test.get_subset(chosed_actions)
virtual_t \
= self.predictor.get_predict_samples(self.training, new_test, N)
for n in xrange(N):
predictor = copy.deepcopy(self.predictor)
train = copy.deepcopy(self.training)
virtual_train = new_test
virtual_train.t = virtual_t[n, :]
if virtual_train.Z is None:
train.add(virtual_train.X, virtual_train.t)
else:
train.add(virtual_train.X, virtual_train.t, virtual_train.Z)
try:
predictor.update(train, virtual_train)
except:
predictor.prepare(train)
f[n, :] = self.get_score(mode, predictor, train)
return f
def get_actions(self, mode, N, K, alpha):
f = self.get_score(mode, self.predictor, self.training, alpha)
temp = np.argmax(f)
action = self.actions[temp]
self.actions = self.delete_actions(temp)
chosed_actions = np.zeros(N, dtype=int)
chosed_actions[0] = action
for n in xrange(1, N):
f = self.get_marginal_score(mode, chosed_actions[0:n], K, alpha)
temp = np.argmax(np.mean(f, 0))
chosed_actions[n] = self.actions[temp]
self.actions = self.delete_actions(temp)
return chosed_actions
def get_random_action(self, N):
random_index = np.random.permutation(xrange(self.actions.shape[0]))
index = random_index[0:N]
action = self.actions[index]
self.actions = self.delete_actions(index)
return action
def load(self, file_history, file_training=None, file_predictor=None):
self.history.load(file_history)
if file_training is None:
N = self.history.total_num_search
X = self.test.X[self.history.chosed_actions[0:N], :]
t = self.history.fx[0:N]
self.training = variable(X=X, t=t)
else:
self.training = variable()
self.training.load(file_training)
if file_predictor is not None:
with open(file_predictor) as f:
self.predictor = pickle.load(f)
def export_predictor(self):
return self.predictor
def export_training(self):
return self.training
def export_history(self):
return self.history
def _set_predictor(self, predictor=None):
if predictor is None:
predictor = self.predictor
return predictor
def _init_predictor(self, is_rand_expans, predictor=None):
self.predictor = self._set_predictor(predictor)
if self.predictor is None:
if is_rand_expans:
self.predictor = blm_predictor(self.config)
else:
self.predictor = gp_predictor(self.config)
return self.predictor
def _set_training(self, training=None):
if training is None:
training = self.training
return training
def _set_unchosed_actions(self, actions=None):
if actions is None:
actions = self.actions
return actions
def _set_test(self, test_X):
if isinstance(test_X, np.ndarray):
test = variable(X=test_X)
elif isinstance(test_X, variable):
test = test_X
else:
raise TypeError('The type of test_X must \
take ndarray or combo.variable')
return test
def _set_config(self, config=None):
if config is None:
config = combo.misc.set_config()
return config
| [
"numpy.mean",
"results.history",
"numpy.delete",
"numpy.argmax",
"numpy.zeros",
"numpy.random.seed",
"copy.deepcopy",
"cPickle.load",
"numpy.arange"
] | [((559, 593), 'numpy.arange', 'np.arange', (['(0)', 'self.test.X.shape[0]'], {}), '(0, self.test.X.shape[0])\n', (568, 593), True, 'import numpy as np\n'), ((617, 626), 'results.history', 'history', ([], {}), '()\n', (624, 626), False, 'from results import history\n'), ((738, 763), 'numpy.random.seed', 'np.random.seed', (['self.seed'], {}), '(self.seed)\n', (752, 763), True, 'import numpy as np\n'), ((885, 910), 'numpy.delete', 'np.delete', (['actions', 'index'], {}), '(actions, index)\n', (894, 910), True, 'import numpy as np\n'), ((2236, 2263), 'copy.deepcopy', 'copy.deepcopy', (['self.history'], {}), '(self.history)\n', (2249, 2263), False, 'import copy\n'), ((4089, 4116), 'copy.deepcopy', 'copy.deepcopy', (['self.history'], {}), '(self.history)\n', (4102, 4116), False, 'import copy\n'), ((5744, 5756), 'numpy.argmax', 'np.argmax', (['f'], {}), '(f)\n', (5753, 5756), True, 'import numpy as np\n'), ((5868, 5890), 'numpy.zeros', 'np.zeros', (['N'], {'dtype': 'int'}), '(N, dtype=int)\n', (5876, 5890), True, 'import numpy as np\n'), ((5040, 5069), 'copy.deepcopy', 'copy.deepcopy', (['self.predictor'], {}), '(self.predictor)\n', (5053, 5069), False, 'import copy\n'), ((5090, 5118), 'copy.deepcopy', 'copy.deepcopy', (['self.training'], {}), '(self.training)\n', (5103, 5118), False, 'import copy\n'), ((6064, 6077), 'numpy.mean', 'np.mean', (['f', '(0)'], {}), '(f, 0)\n', (6071, 6077), True, 'import numpy as np\n'), ((7032, 7046), 'cPickle.load', 'pickle.load', (['f'], {}), '(f)\n', (7043, 7046), True, 'import cPickle as pickle\n')] |
# -*- encoding: utf-8 -*-
'''
@project : LeetCode
@File : pondSizes.py
@Contact : <EMAIL>
@Desc :
你有一个用于表示一片土地的整数矩阵land,该矩阵中每个点的值代表对应地点的海拔高度。若值为0则表示水域。由垂直、水平或对角连接的水域为池塘。池塘的大小是指相连接的水域的个数。编写一个方法来计算矩阵中所有池塘的大小,返回值需要从小到大排序。
示例:
输入:
[
[0,2,1,0],
[0,1,0,1],
[1,1,0,1],
[0,1,0,1]
]
输出: [1,2,4]
提示:
0 < len(land) <= 1000
0 < len(land[i]) <= 1000
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/pond-sizes-lcci
@Modify Time @Author @Version @Desciption
------------ ------- -------- -----------
2020-03-07 zhan 1.0 None
'''
from typing import List
from collections import deque
class Solution:
def pondSizes(self, land: List[List[int]]) -> List[int]:
def neighbors(iR,iC,flag):
ans = set()
if (iR-1,iC-1) in flag:
ans.add((iR-1,iC-1))
if (iR-1,iC) in flag:
ans.add((iR-1,iC))
if (iR-1,iC+1) in flag:
ans.add((iR-1,iC+1))
if (iR,iC-1) in flag:
ans.add((iR,iC-1))
if (iR, iC + 1) in flag:
ans.add((iR, iC + 1))
if (iR + 1, iC-1) in flag:
ans.add((iR + 1, iC-1))
if (iR + 1, iC) in flag:
ans.add((iR + 1, iC))
if (iR+1, iC + 1) in flag:
ans.add((iR+1, iC + 1))
return ans
flag = {(i,j) for j in range(len(land[0])) for i in range(len(land)) if land[i][j] == 0}
ans = []
while flag:
tmpArea = 0
mydueque = deque()
mydueque.append(flag.pop())
while mydueque:
curEle = mydueque.popleft()
tmpArea +=1
for neighbor in neighbors(curEle[0], curEle[1], flag):
mydueque.append(neighbor)
flag.remove(neighbor)
ans.append(tmpArea)
ans.sort()
return ans
if __name__ == '__main__':
a = [
[0,2,1,0],
[0,1,0,1],
[1,1,0,1],
[0,1,0,1]
]
ans = Solution().pondSizes(a)
print(ans)
| [
"collections.deque"
] | [((1655, 1662), 'collections.deque', 'deque', ([], {}), '()\n', (1660, 1662), False, 'from collections import deque\n')] |
from setuptools import setup, find_packages
setup(
name='Pokedex',
version='0.1',
zip_safe=False,
packages=find_packages(),
package_data={
'pokedex': ['data/csv/*.csv']
},
install_requires=[
'SQLAlchemy>=1.0,<2.0',
'whoosh>=2.5,<2.7',
'markdown==2.4.1',
'construct==2.5.3',
'six>=1.9.0',
],
entry_points={
'console_scripts': [
'pokedex = pokedex.main:setuptools_entry',
],
},
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.7",
]
)
| [
"setuptools.find_packages"
] | [((124, 139), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (137, 139), False, 'from setuptools import setup, find_packages\n')] |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Encoders for the speech model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from six.moves import range
from six.moves import zip
import tensorflow as tf
from tensorflow.python.ops import inplace_ops
from lingvo.core import base_encoder
from lingvo.core import base_layer
from lingvo.core import layers
from lingvo.core import plot
from lingvo.core import py_utils
from lingvo.core import rnn_cell
from lingvo.core import rnn_layers
from lingvo.core import summary_utils
from lingvo.core import model_helper
ConvLSTMBlock = collections.namedtuple('ConvLSTMBlock', ('rnn', 'cnn'))
class AsrEncoder(base_encoder.BaseEncoder):
"""Speech encoder version 1."""
@classmethod
def Params(cls):
"""Configs for AsrEncoder."""
p = super(AsrEncoder, cls).Params()
p.Define('lstm_tpl', rnn_cell.LSTMCellSimple.Params(),
'Configs template for the RNN layer.')
p.Define('cnn_tpl', layers.ConvLayer.Params(),
'Configs template for the conv layer.')
p.Define('proj_tpl', layers.ProjectionLayer.Params(),
'Configs template for the projection layer.')
p.Define(
'highway_skip', False,
'If set, residual connections from different layers are gated. '
'Will only be used if residual_start is enabled.')
p.Define('highway_skip_tpl', layers.HighwaySkipLayer.Params(),
'Configs template for the highway skip layer.')
p.Define('conv_lstm_tpl', rnn_cell.ConvLSTMCell.Params(),
'Configs template for ConvLSTMCell.')
p.Define(
'after_conv_lstm_cnn_tpl', layers.ConvLayer.Params(),
'Configs template for the cnn layer immediately follow the'
' convlstm layer.')
p.Define('conv_filter_shapes', None, 'Filter shapes for each conv layer.')
p.Define('conv_filter_strides', None, 'Filter strides for each conv layer.')
p.Define('input_shape', [None, None, None, None],
'Shape of the input. This should a TensorShape with rank 4.')
p.Define('lstm_cell_size', 256, 'LSTM cell size for the RNN layer.')
p.Define('num_cnn_layers', 2, 'Number of conv layers to create.')
p.Define('num_conv_lstm_layers', 1, 'Number of conv lstm layers to create.')
p.Define('num_lstm_layers', 3, 'Number of rnn layers to create')
p.Define('project_lstm_output', True,
'Include projection layer after each encoder LSTM layer.')
p.Define('pad_steps', 6,
'Extra zero-padded timesteps to add to the input sequence. ')
p.Define(
'residual_start', 0, 'Start residual connections from this lstm layer. '
'Disabled if 0 or greater than num_lstm_layers.')
p.Define('residual_stride', 1,
'Number of lstm layers to skip per residual connection.')
p.Define(
'bidi_rnn_type', 'func', 'Options: func, native_cudnn. '
'func: BidirectionalFRNN, '
'native_cudnn: BidirectionalNativeCuDNNLSTM.')
# TODO(yonghui): Maybe move those configs to a separate file.
# Set some reasonable default values.
#
# NOTE(yonghui): The default config below assumes the following encoder
# architecture:
#
# cnn/batch-norm/relu ->
# cnn/batch-norm/relu ->
# bidirectional conv-lstm ->
# cnn/batch-norm/relu
# bidirectional lstm ->
# projection/batch-norm/relu ->
# bidirectional lstm ->
# projection/batch-norm/relu ->
# bidirectional lstm
#
# Default config for the rnn layer.
p.lstm_tpl.params_init = py_utils.WeightInit.Uniform(0.1)
# Default config for the convolution layer.
p.input_shape = [None, None, 80, 3]
p.conv_filter_shapes = [(3, 3, 3, 32), (3, 3, 32, 32)]
p.conv_filter_strides = [(2, 2), (2, 2)]
p.cnn_tpl.params_init = py_utils.WeightInit.TruncatedGaussian(0.1)
# TODO(yonghui): Disable variational noise logic.
# NOTE(yonghui): Fortunately, variational noise logic is currently not
# implemented for ConvLayer yet (as of sep 22, 2016).
# Default config for the projection layer.
p.proj_tpl.params_init = py_utils.WeightInit.TruncatedGaussian(0.1)
# TODO(yonghui): Disable variational noise logic.
# NOTE(yonghui): Fortunately, variational noise logic is currently not
# implemented for ProjectionLayer yet (as of sep 22, 2016).
p.conv_lstm_tpl.filter_shape = [1, 3] # height (time), width (frequency)
p.conv_lstm_tpl.inputs_shape = [None, None, None, None]
p.conv_lstm_tpl.cell_shape = [None, None, None, None]
p.conv_lstm_tpl.params_init = py_utils.WeightInit.TruncatedGaussian(0.1)
p.after_conv_lstm_cnn_tpl.filter_shape = [3, 3, None, None]
p.after_conv_lstm_cnn_tpl.params_init = (
py_utils.WeightInit.TruncatedGaussian(0.1))
p.after_conv_lstm_cnn_tpl.filter_stride = [1, 1]
return p
@base_layer.initializer
def __init__(self, params):
super(AsrEncoder, self).__init__(params)
p = self.params
assert p.packed_input is False, ('Packed inputs are not yet supported for '
'AsrEncoder.')
name = p.name
with tf.variable_scope(name):
# First create the conv layers.
assert p.num_cnn_layers == len(p.conv_filter_shapes)
assert p.num_cnn_layers == len(p.conv_filter_strides)
params_conv_layers = []
for i in range(p.num_cnn_layers):
conv_p = p.cnn_tpl.Copy()
conv_p.name = 'conv_L%d' % (i)
conv_p.filter_shape = p.conv_filter_shapes[i]
conv_p.filter_stride = p.conv_filter_strides[i]
conv_p.is_eval = p.is_eval
params_conv_layers.append(conv_p)
self.CreateChildren('conv', params_conv_layers)
conv_output_shape = tf.TensorShape(p.input_shape)
for i in range(p.num_cnn_layers):
conv_output_shape = self.conv[i].OutShape(conv_output_shape)
conv_output_shape = conv_output_shape.as_list()
assert len(conv_output_shape) == 4 # batch, height, width, channel.
params_conv_lstm_rnn = []
params_conv_lstm_cnn = []
for i in range(p.num_conv_lstm_layers):
# NOTE(yonghui): We assume that output from ConvLSTMBlock has the same
# shape as its input.
_, _, width, in_channel = conv_output_shape
f_conv_lstm_p = p.conv_lstm_tpl.Copy()
f_conv_lstm_p.name = 'f_conv_lstm_%d' % (i)
f_conv_lstm_p.inputs_shape = [None, 1, width, in_channel]
f_conv_lstm_p.cell_shape = [None, 1, width, in_channel]
b_conv_lstm_p = f_conv_lstm_p.Copy()
b_conv_lstm_p.name = 'b_conv_lstm_%d' % (i)
conv_lstm_rnn_p = self.CreateConvLstmLayerParams()
conv_lstm_rnn_p.name = 'conv_lstm_rnn'
conv_lstm_rnn_p.fwd = f_conv_lstm_p
conv_lstm_rnn_p.bak = b_conv_lstm_p
params_conv_lstm_rnn.append(conv_lstm_rnn_p)
cnn_p = p.after_conv_lstm_cnn_tpl.Copy()
cnn_p.name = 'conv_lstm_cnn_%d' % (i)
cnn_p.filter_shape[2] = 2 * in_channel
cnn_p.filter_shape[3] = in_channel
params_conv_lstm_cnn.append(cnn_p)
# TODO(yonghui): Refactor ConvLSTMBlock into a layer.
self.CreateChildren('conv_lstm_rnn', params_conv_lstm_rnn)
self.CreateChildren('conv_lstm_cnn', params_conv_lstm_cnn)
(self._first_lstm_input_dim,
self._first_lstm_input_dim_pad) = self.FirstLstmLayerInputDimAndPadding(
conv_output_shape, pad_to_multiple=16)
# Now create all the rnn layers and projection layers.
# TODO(yonghui): take care of device placement.
params_rnn_layers = []
params_proj_layers = []
params_highway_skip_layers = []
for i in range(p.num_lstm_layers):
if i == 0:
input_dim = self._first_lstm_input_dim
else:
input_dim = 2 * p.lstm_cell_size
forward_p = p.lstm_tpl.Copy()
forward_p.name = 'fwd_rnn_L%d' % (i)
forward_p.num_input_nodes = input_dim
forward_p.num_output_nodes = p.lstm_cell_size
backward_p = forward_p.Copy()
backward_p.name = 'bak_rnn_L%d' % (i)
rnn_p = self.CreateBidirectionalRNNParams(forward_p, backward_p)
rnn_p.name = 'brnn_L%d' % (i)
params_rnn_layers.append(rnn_p)
if p.project_lstm_output and (i < p.num_lstm_layers - 1):
proj_p = p.proj_tpl.Copy()
proj_p.input_dim = 2 * p.lstm_cell_size
proj_p.output_dim = 2 * p.lstm_cell_size
proj_p.name = 'proj_L%d' % (i)
proj_p.is_eval = p.is_eval
params_proj_layers.append(proj_p)
# add the skip layers
residual_index = i - p.residual_start + 1
if p.residual_start > 0 and residual_index >= 0 and p.highway_skip:
highway_skip = p.highway_skip_tpl.Copy()
highway_skip.name = 'enc_hwskip_%d' % len(params_highway_skip_layers)
highway_skip.input_dim = 2 * p.lstm_cell_size
params_highway_skip_layers.append(highway_skip)
self.CreateChildren('rnn', params_rnn_layers)
self.CreateChildren('proj', params_proj_layers)
self.CreateChildren('highway_skip', params_highway_skip_layers)
@property
def _use_functional(self):
return True
def CreateBidirectionalRNNParams(self, forward_p, backward_p):
return model_helper.CreateBidirectionalRNNParams(self.params, forward_p,
backward_p)
def CreateConvLstmLayerParams(self):
return rnn_layers.BidirectionalFRNN.Params()
def FirstLstmLayerInputDimAndPadding(self,
conv_output_shape,
pad_to_multiple=16):
lstm_input_shape = conv_output_shape
# Makes sure the lstm input dims is multiple of 16 (alignment
# requirement from FRNN).
first_lstm_input_dim_unpadded = lstm_input_shape[2] * lstm_input_shape[3]
if self._use_functional and (first_lstm_input_dim_unpadded % pad_to_multiple
!= 0):
first_lstm_input_dim = int(
(first_lstm_input_dim_unpadded + pad_to_multiple - 1) /
pad_to_multiple) * pad_to_multiple
else:
first_lstm_input_dim = first_lstm_input_dim_unpadded
first_lstm_input_dim_padding = (
first_lstm_input_dim - first_lstm_input_dim_unpadded)
return first_lstm_input_dim, first_lstm_input_dim_padding
@property
def supports_streaming(self):
return False
def zero_state(self, batch_size):
return py_utils.NestedMap()
def FProp(self, theta, batch, state0=None):
"""Encodes source as represented by 'inputs' and 'paddings'.
Args:
theta: A NestedMap object containing weights' values of this
layer and its children layers.
batch: A NestedMap with fields:
src_inputs - The inputs tensor. It is expected to be of shape [batch,
time, feature_dim, channels].
paddings - The paddings tensor. It is expected to be of shape [batch,
time].
state0: Recurrent input state. Not supported/ignored by this encoder.
Returns:
(outputs, out_paddings, state1) tuple. Outputs is of the shape
[time, batch, depth], and out_paddings is of the shape [time, batch]
"""
p = self.params
inputs, paddings = batch.src_inputs, batch.paddings
with tf.name_scope(p.name):
# Add a few extra padded timesteps at the end. This is for ensuring the
# correctness of the conv-layers at the edges.
if p.pad_steps > 0:
# inplace_update() is not supported by TPU for now. Since we have done
# padding on the input_generator, we may avoid this additional padding.
assert not py_utils.use_tpu()
inputs_pad = tf.zeros(
inplace_ops.inplace_update(tf.shape(inputs), 1, p.pad_steps),
inputs.dtype)
paddings_pad = tf.ones(
inplace_ops.inplace_update(tf.shape(paddings), 1, p.pad_steps),
paddings.dtype)
inputs = tf.concat([inputs, inputs_pad], 1, name='inputs')
paddings = tf.concat([paddings, paddings_pad], 1)
def ReshapeForPlot(tensor, padding, name):
"""Transposes and flattens channels to [batch, dim, seq_len] shape."""
# Flatten any dimensions beyond the third into the third.
batch_size = tf.shape(tensor)[0]
max_len = tf.shape(tensor)[1]
plot_tensor = tf.reshape(tensor, [batch_size, max_len, -1])
plot_tensor = tf.transpose(plot_tensor, [0, 2, 1], name=name)
return (plot_tensor, summary_utils.SequenceLength(padding))
plots = [
ReshapeForPlot(
tf.transpose(inputs, [0, 1, 3, 2]), paddings, 'inputs')
]
conv_out = inputs
out_padding = paddings
for i, conv_layer in enumerate(self.conv):
conv_out, out_padding = conv_layer.FProp(theta.conv[i], conv_out,
out_padding)
plots.append(
ReshapeForPlot(
tf.transpose(conv_out, [0, 1, 3, 2]), out_padding,
'conv_%d_out' % i))
def TransposeFirstTwoDims(t):
first_dim = tf.shape(t)[0]
second_dim = tf.shape(t)[1]
t_new = tf.transpose(
tf.reshape(t, [first_dim, second_dim, -1]), [1, 0, 2])
t_shape_new = tf.concat([[second_dim], [first_dim], tf.shape(t)[2:]], 0)
return tf.reshape(t_new, t_shape_new)
# Now the conv-lstm part.
conv_lstm_out = conv_out
conv_lstm_out_padding = out_padding
for i, (rnn, cnn) in enumerate(
zip(self.conv_lstm_rnn, self.conv_lstm_cnn)):
conv_lstm_in = conv_lstm_out
# Move time dimension to be the first.
conv_lstm_in = TransposeFirstTwoDims(conv_lstm_in)
conv_lstm_in = tf.expand_dims(conv_lstm_in, 2)
conv_lstm_in_padding = tf.expand_dims(
tf.transpose(conv_lstm_out_padding), 2)
lstm_out = rnn.FProp(theta.conv_lstm_rnn[i], conv_lstm_in,
conv_lstm_in_padding)
# Move time dimension to be the second.
cnn_in = TransposeFirstTwoDims(lstm_out)
cnn_in = tf.squeeze(cnn_in, 2)
cnn_in_padding = conv_lstm_out_padding
cnn_out, cnn_out_padding = cnn.FProp(theta.conv_lstm_cnn[i], cnn_in,
cnn_in_padding)
conv_lstm_out, conv_lstm_out_padding = cnn_out, cnn_out_padding
plots.append(
ReshapeForPlot(conv_lstm_out, conv_lstm_out_padding,
'conv_lstm_%d_out' % i))
# Need to do a reshape before starting the rnn layers.
conv_lstm_out = py_utils.HasRank(conv_lstm_out, 4)
conv_lstm_out_shape = tf.shape(conv_lstm_out)
new_shape = tf.concat([conv_lstm_out_shape[:2], [-1]], 0)
conv_lstm_out = tf.reshape(conv_lstm_out, new_shape)
if self._first_lstm_input_dim_pad:
conv_lstm_out = tf.pad(
conv_lstm_out,
[[0, 0], [0, 0], [0, self._first_lstm_input_dim_pad]])
conv_lstm_out = py_utils.HasShape(conv_lstm_out,
[-1, -1, self._first_lstm_input_dim])
# Transpose to move the time dimension to be the first.
rnn_in = tf.transpose(conv_lstm_out, [1, 0, 2])
rnn_padding = tf.expand_dims(tf.transpose(conv_lstm_out_padding), 2)
# rnn_in is of shape [time, batch, depth]
# rnn_padding is of shape [time, batch, 1]
# Now the rnn layers.
num_skips = 0
for i in range(p.num_lstm_layers):
rnn_out = self.rnn[i].FProp(theta.rnn[i], rnn_in, rnn_padding)
residual_index = i - p.residual_start + 1
if p.residual_start > 0 and residual_index >= 0:
if residual_index % p.residual_stride == 0:
residual_in = rnn_in
if residual_index % p.residual_stride == p.residual_stride - 1:
# Highway skip connection.
if p.highway_skip:
rnn_out = self.highway_skip[num_skips].FProp(
theta.highway_skip[num_skips], residual_in, rnn_out)
num_skips += 1
else:
# Residual skip connection.
rnn_out += py_utils.HasShape(residual_in, tf.shape(rnn_out))
if p.project_lstm_output and (i < p.num_lstm_layers - 1):
# Projection layers.
rnn_out = self.proj[i].FProp(theta.proj[i], rnn_out, rnn_padding)
if i == p.num_lstm_layers - 1:
rnn_out *= (1.0 - rnn_padding)
plots.append(
ReshapeForPlot(
tf.transpose(rnn_out, [1, 0, 2]),
tf.transpose(rnn_padding, [1, 0, 2]), 'rnn_%d_out' % i))
rnn_in = rnn_out
final_out = rnn_in
if self.cluster.add_summary:
fig = plot.MatplotlibFigureSummary(
'encoder_example', figsize=(8, len(plots) * 3.5))
# Order layers from bottom to top.
plots.reverse()
for tensor, seq_len in plots:
fig.AddSubplot(
[tensor, seq_len],
summary_utils.TrimPaddingAndPlotSequence,
title=tensor.name,
xlabel='Time')
fig.Finalize()
rnn_padding = tf.squeeze(rnn_padding, [2])
return final_out, rnn_padding, py_utils.NestedMap()
| [
"lingvo.core.rnn_layers.BidirectionalFRNN.Params",
"tensorflow.shape",
"tensorflow.pad",
"tensorflow.transpose",
"lingvo.core.model_helper.CreateBidirectionalRNNParams",
"lingvo.core.py_utils.use_tpu",
"lingvo.core.py_utils.HasRank",
"lingvo.core.layers.ProjectionLayer.Params",
"lingvo.core.py_utils.NestedMap",
"lingvo.core.rnn_cell.ConvLSTMCell.Params",
"lingvo.core.layers.ConvLayer.Params",
"lingvo.core.layers.HighwaySkipLayer.Params",
"lingvo.core.rnn_cell.LSTMCellSimple.Params",
"tensorflow.concat",
"lingvo.core.py_utils.HasShape",
"six.moves.zip",
"collections.namedtuple",
"tensorflow.variable_scope",
"lingvo.core.py_utils.WeightInit.Uniform",
"lingvo.core.summary_utils.SequenceLength",
"tensorflow.reshape",
"tensorflow.expand_dims",
"six.moves.range",
"lingvo.core.py_utils.WeightInit.TruncatedGaussian",
"tensorflow.name_scope",
"tensorflow.squeeze",
"tensorflow.TensorShape"
] | [((1225, 1280), 'collections.namedtuple', 'collections.namedtuple', (['"""ConvLSTMBlock"""', "('rnn', 'cnn')"], {}), "('ConvLSTMBlock', ('rnn', 'cnn'))\n", (1247, 1280), False, 'import collections\n'), ((4201, 4233), 'lingvo.core.py_utils.WeightInit.Uniform', 'py_utils.WeightInit.Uniform', (['(0.1)'], {}), '(0.1)\n', (4228, 4233), False, 'from lingvo.core import py_utils\n'), ((4455, 4497), 'lingvo.core.py_utils.WeightInit.TruncatedGaussian', 'py_utils.WeightInit.TruncatedGaussian', (['(0.1)'], {}), '(0.1)\n', (4492, 4497), False, 'from lingvo.core import py_utils\n'), ((4762, 4804), 'lingvo.core.py_utils.WeightInit.TruncatedGaussian', 'py_utils.WeightInit.TruncatedGaussian', (['(0.1)'], {}), '(0.1)\n', (4799, 4804), False, 'from lingvo.core import py_utils\n'), ((5229, 5271), 'lingvo.core.py_utils.WeightInit.TruncatedGaussian', 'py_utils.WeightInit.TruncatedGaussian', (['(0.1)'], {}), '(0.1)\n', (5266, 5271), False, 'from lingvo.core import py_utils\n'), ((5390, 5432), 'lingvo.core.py_utils.WeightInit.TruncatedGaussian', 'py_utils.WeightInit.TruncatedGaussian', (['(0.1)'], {}), '(0.1)\n', (5427, 5432), False, 'from lingvo.core import py_utils\n'), ((9912, 9989), 'lingvo.core.model_helper.CreateBidirectionalRNNParams', 'model_helper.CreateBidirectionalRNNParams', (['self.params', 'forward_p', 'backward_p'], {}), '(self.params, forward_p, backward_p)\n', (9953, 9989), False, 'from lingvo.core import model_helper\n'), ((10094, 10131), 'lingvo.core.rnn_layers.BidirectionalFRNN.Params', 'rnn_layers.BidirectionalFRNN.Params', ([], {}), '()\n', (10129, 10131), False, 'from lingvo.core import rnn_layers\n'), ((11119, 11139), 'lingvo.core.py_utils.NestedMap', 'py_utils.NestedMap', ([], {}), '()\n', (11137, 11139), False, 'from lingvo.core import py_utils\n'), ((1495, 1527), 'lingvo.core.rnn_cell.LSTMCellSimple.Params', 'rnn_cell.LSTMCellSimple.Params', ([], {}), '()\n', (1525, 1527), False, 'from lingvo.core import rnn_cell\n'), ((1605, 1630), 'lingvo.core.layers.ConvLayer.Params', 'layers.ConvLayer.Params', ([], {}), '()\n', (1628, 1630), False, 'from lingvo.core import layers\n'), ((1710, 1741), 'lingvo.core.layers.ProjectionLayer.Params', 'layers.ProjectionLayer.Params', ([], {}), '()\n', (1739, 1741), False, 'from lingvo.core import layers\n'), ((2012, 2044), 'lingvo.core.layers.HighwaySkipLayer.Params', 'layers.HighwaySkipLayer.Params', ([], {}), '()\n', (2042, 2044), False, 'from lingvo.core import layers\n'), ((2137, 2167), 'lingvo.core.rnn_cell.ConvLSTMCell.Params', 'rnn_cell.ConvLSTMCell.Params', ([], {}), '()\n', (2165, 2167), False, 'from lingvo.core import rnn_cell\n'), ((2269, 2294), 'lingvo.core.layers.ConvLayer.Params', 'layers.ConvLayer.Params', ([], {}), '()\n', (2292, 2294), False, 'from lingvo.core import layers\n'), ((5782, 5805), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (5799, 5805), True, 'import tensorflow as tf\n'), ((6010, 6033), 'six.moves.range', 'range', (['p.num_cnn_layers'], {}), '(p.num_cnn_layers)\n', (6015, 6033), False, 'from six.moves import range\n'), ((6376, 6405), 'tensorflow.TensorShape', 'tf.TensorShape', (['p.input_shape'], {}), '(p.input_shape)\n', (6390, 6405), True, 'import tensorflow as tf\n'), ((6421, 6444), 'six.moves.range', 'range', (['p.num_cnn_layers'], {}), '(p.num_cnn_layers)\n', (6426, 6444), False, 'from six.moves import range\n'), ((6724, 6753), 'six.moves.range', 'range', (['p.num_conv_lstm_layers'], {}), '(p.num_conv_lstm_layers)\n', (6729, 6753), False, 'from six.moves import range\n'), ((8303, 8327), 'six.moves.range', 'range', (['p.num_lstm_layers'], {}), '(p.num_lstm_layers)\n', (8308, 8327), False, 'from six.moves import range\n'), ((11949, 11970), 'tensorflow.name_scope', 'tf.name_scope', (['p.name'], {}), '(p.name)\n', (11962, 11970), True, 'import tensorflow as tf\n'), ((15274, 15308), 'lingvo.core.py_utils.HasRank', 'py_utils.HasRank', (['conv_lstm_out', '(4)'], {}), '(conv_lstm_out, 4)\n', (15290, 15308), False, 'from lingvo.core import py_utils\n'), ((15337, 15360), 'tensorflow.shape', 'tf.shape', (['conv_lstm_out'], {}), '(conv_lstm_out)\n', (15345, 15360), True, 'import tensorflow as tf\n'), ((15379, 15424), 'tensorflow.concat', 'tf.concat', (['[conv_lstm_out_shape[:2], [-1]]', '(0)'], {}), '([conv_lstm_out_shape[:2], [-1]], 0)\n', (15388, 15424), True, 'import tensorflow as tf\n'), ((15447, 15483), 'tensorflow.reshape', 'tf.reshape', (['conv_lstm_out', 'new_shape'], {}), '(conv_lstm_out, new_shape)\n', (15457, 15483), True, 'import tensorflow as tf\n'), ((15674, 15744), 'lingvo.core.py_utils.HasShape', 'py_utils.HasShape', (['conv_lstm_out', '[-1, -1, self._first_lstm_input_dim]'], {}), '(conv_lstm_out, [-1, -1, self._first_lstm_input_dim])\n', (15691, 15744), False, 'from lingvo.core import py_utils\n'), ((15863, 15901), 'tensorflow.transpose', 'tf.transpose', (['conv_lstm_out', '[1, 0, 2]'], {}), '(conv_lstm_out, [1, 0, 2])\n', (15875, 15901), True, 'import tensorflow as tf\n'), ((16138, 16162), 'six.moves.range', 'range', (['p.num_lstm_layers'], {}), '(p.num_lstm_layers)\n', (16143, 16162), False, 'from six.moves import range\n'), ((17813, 17841), 'tensorflow.squeeze', 'tf.squeeze', (['rnn_padding', '[2]'], {}), '(rnn_padding, [2])\n', (17823, 17841), True, 'import tensorflow as tf\n'), ((12610, 12659), 'tensorflow.concat', 'tf.concat', (['[inputs, inputs_pad]', '(1)'], {'name': '"""inputs"""'}), "([inputs, inputs_pad], 1, name='inputs')\n", (12619, 12659), True, 'import tensorflow as tf\n'), ((12679, 12717), 'tensorflow.concat', 'tf.concat', (['[paddings, paddings_pad]', '(1)'], {}), '([paddings, paddings_pad], 1)\n', (12688, 12717), True, 'import tensorflow as tf\n'), ((13014, 13059), 'tensorflow.reshape', 'tf.reshape', (['tensor', '[batch_size, max_len, -1]'], {}), '(tensor, [batch_size, max_len, -1])\n', (13024, 13059), True, 'import tensorflow as tf\n'), ((13082, 13129), 'tensorflow.transpose', 'tf.transpose', (['plot_tensor', '[0, 2, 1]'], {'name': 'name'}), '(plot_tensor, [0, 2, 1], name=name)\n', (13094, 13129), True, 'import tensorflow as tf\n'), ((14012, 14042), 'tensorflow.reshape', 'tf.reshape', (['t_new', 't_shape_new'], {}), '(t_new, t_shape_new)\n', (14022, 14042), True, 'import tensorflow as tf\n'), ((14197, 14240), 'six.moves.zip', 'zip', (['self.conv_lstm_rnn', 'self.conv_lstm_cnn'], {}), '(self.conv_lstm_rnn, self.conv_lstm_cnn)\n', (14200, 14240), False, 'from six.moves import zip\n'), ((14409, 14440), 'tensorflow.expand_dims', 'tf.expand_dims', (['conv_lstm_in', '(2)'], {}), '(conv_lstm_in, 2)\n', (14423, 14440), True, 'import tensorflow as tf\n'), ((14772, 14793), 'tensorflow.squeeze', 'tf.squeeze', (['cnn_in', '(2)'], {}), '(cnn_in, 2)\n', (14782, 14793), True, 'import tensorflow as tf\n'), ((15549, 15625), 'tensorflow.pad', 'tf.pad', (['conv_lstm_out', '[[0, 0], [0, 0], [0, self._first_lstm_input_dim_pad]]'], {}), '(conv_lstm_out, [[0, 0], [0, 0], [0, self._first_lstm_input_dim_pad]])\n', (15555, 15625), True, 'import tensorflow as tf\n'), ((15937, 15972), 'tensorflow.transpose', 'tf.transpose', (['conv_lstm_out_padding'], {}), '(conv_lstm_out_padding)\n', (15949, 15972), True, 'import tensorflow as tf\n'), ((17879, 17899), 'lingvo.core.py_utils.NestedMap', 'py_utils.NestedMap', ([], {}), '()\n', (17897, 17899), False, 'from lingvo.core import py_utils\n'), ((12307, 12325), 'lingvo.core.py_utils.use_tpu', 'py_utils.use_tpu', ([], {}), '()\n', (12323, 12325), False, 'from lingvo.core import py_utils\n'), ((12934, 12950), 'tensorflow.shape', 'tf.shape', (['tensor'], {}), '(tensor)\n', (12942, 12950), True, 'import tensorflow as tf\n'), ((12972, 12988), 'tensorflow.shape', 'tf.shape', (['tensor'], {}), '(tensor)\n', (12980, 12988), True, 'import tensorflow as tf\n'), ((13159, 13196), 'lingvo.core.summary_utils.SequenceLength', 'summary_utils.SequenceLength', (['padding'], {}), '(padding)\n', (13187, 13196), False, 'from lingvo.core import summary_utils\n'), ((13255, 13289), 'tensorflow.transpose', 'tf.transpose', (['inputs', '[0, 1, 3, 2]'], {}), '(inputs, [0, 1, 3, 2])\n', (13267, 13289), True, 'import tensorflow as tf\n'), ((13768, 13779), 'tensorflow.shape', 'tf.shape', (['t'], {}), '(t)\n', (13776, 13779), True, 'import tensorflow as tf\n'), ((13804, 13815), 'tensorflow.shape', 'tf.shape', (['t'], {}), '(t)\n', (13812, 13815), True, 'import tensorflow as tf\n'), ((13861, 13903), 'tensorflow.reshape', 'tf.reshape', (['t', '[first_dim, second_dim, -1]'], {}), '(t, [first_dim, second_dim, -1])\n', (13871, 13903), True, 'import tensorflow as tf\n'), ((14500, 14535), 'tensorflow.transpose', 'tf.transpose', (['conv_lstm_out_padding'], {}), '(conv_lstm_out_padding)\n', (14512, 14535), True, 'import tensorflow as tf\n'), ((12396, 12412), 'tensorflow.shape', 'tf.shape', (['inputs'], {}), '(inputs)\n', (12404, 12412), True, 'import tensorflow as tf\n'), ((12528, 12546), 'tensorflow.shape', 'tf.shape', (['paddings'], {}), '(paddings)\n', (12536, 12546), True, 'import tensorflow as tf\n'), ((13624, 13660), 'tensorflow.transpose', 'tf.transpose', (['conv_out', '[0, 1, 3, 2]'], {}), '(conv_out, [0, 1, 3, 2])\n', (13636, 13660), True, 'import tensorflow as tf\n'), ((17187, 17219), 'tensorflow.transpose', 'tf.transpose', (['rnn_out', '[1, 0, 2]'], {}), '(rnn_out, [1, 0, 2])\n', (17199, 17219), True, 'import tensorflow as tf\n'), ((17237, 17273), 'tensorflow.transpose', 'tf.transpose', (['rnn_padding', '[1, 0, 2]'], {}), '(rnn_padding, [1, 0, 2])\n', (17249, 17273), True, 'import tensorflow as tf\n'), ((13976, 13987), 'tensorflow.shape', 'tf.shape', (['t'], {}), '(t)\n', (13984, 13987), True, 'import tensorflow as tf\n'), ((16849, 16866), 'tensorflow.shape', 'tf.shape', (['rnn_out'], {}), '(rnn_out)\n', (16857, 16866), True, 'import tensorflow as tf\n')] |
import matplotlib.font_manager as fm
import matplotlib.pyplot as plt
import numpy as np
font_location = './wordcloud_file/malgun.ttf' # For Windows
font_name = fm.FontProperties(fname=font_location).get_name()
plt.rc('font', family=font_name)
def percent_graph2(movie_review) :
b = movie_review
labelss = sorted(b['score'].unique())## 라벨설정함. 한글이 적용이 안됨!!!
c = b['score'].value_counts().sort_index() ## 빈도
print(c)
print(labelss)
fig = plt.figure(figsize=(8,8)) ## 캔버스 생성
fig.set_facecolor('white') ## 캔버스 배경색을 하얀색으로 설정
ax = fig.add_subplot() ## 프레임 생성
pie = ax.pie(c, ## 파이차트 출력
startangle=90, ## 시작점을 90도(degree)로 지정
counterclock=False, ## 시계 방향으로 그린다.
# autopct=lambda p : '{:.2f}%'.format(p), ## 퍼센티지 출력
wedgeprops=dict(width=0.5),
colors = ['yellowgreen', 'orange'],
labels = labelss,
textprops={'fontsize': 22}
)
total = np.sum(c) ## 빈도수 총합
sum_pct = 0 ## 백분율 초기값
for i, l in enumerate(labelss):
ang1, ang2 = pie[0][i].theta1, pie[0][i].theta2 ## 각1, 각2
r = pie[0][i].r ## 원의 반지름
x = ((r + 0.5) / 2) * np.cos(np.pi / 180 * ((ang1 + ang2) / 2)) ## 정중앙 x좌표
y = ((r + 0.5) / 2) * np.sin(np.pi / 180 * ((ang1 + ang2) / 2)) ## 정중앙 y좌표
if i < len(labelss) - 1:
sum_pct += float(f'{c[i] / total * 100:.2f}') ## 백분율을 누적한다.
ax.text(x, y, f'{c[i] / total * 100:.2f}%', ha='center', va='center', size=22, color='white',
weight='bold') ## 백분율 텍스트 표시
else: ## 총합을 100으로 맞추기위해 마지막 백분율은 100에서 백분율 누적값을 빼준다.
ax.text(x, y, f'{100 - sum_pct:.2f}%', ha='center', va='center',size=22,color='white',
weight='bold')
# pie.rc('font', family=font_name)
# plt.legend(pie[0], labelss) ## 범례 표시
plt.savefig('./static/images/pos_neg_ratio.png') # 경로 | [
"matplotlib.pyplot.savefig",
"matplotlib.font_manager.FontProperties",
"numpy.sum",
"matplotlib.pyplot.figure",
"numpy.cos",
"numpy.sin",
"matplotlib.pyplot.rc"
] | [((211, 243), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'family': 'font_name'}), "('font', family=font_name)\n", (217, 243), True, 'import matplotlib.pyplot as plt\n'), ((461, 487), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (471, 487), True, 'import matplotlib.pyplot as plt\n'), ((933, 942), 'numpy.sum', 'np.sum', (['c'], {}), '(c)\n', (939, 942), True, 'import numpy as np\n'), ((1844, 1892), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""./static/images/pos_neg_ratio.png"""'], {}), "('./static/images/pos_neg_ratio.png')\n", (1855, 1892), True, 'import matplotlib.pyplot as plt\n'), ((161, 199), 'matplotlib.font_manager.FontProperties', 'fm.FontProperties', ([], {'fname': 'font_location'}), '(fname=font_location)\n', (178, 199), True, 'import matplotlib.font_manager as fm\n'), ((1151, 1192), 'numpy.cos', 'np.cos', (['(np.pi / 180 * ((ang1 + ang2) / 2))'], {}), '(np.pi / 180 * ((ang1 + ang2) / 2))\n', (1157, 1192), True, 'import numpy as np\n'), ((1235, 1276), 'numpy.sin', 'np.sin', (['(np.pi / 180 * ((ang1 + ang2) / 2))'], {}), '(np.pi / 180 * ((ang1 + ang2) / 2))\n', (1241, 1276), True, 'import numpy as np\n')] |
from django.shortcuts import render
from .forms import *
from django.shortcuts import redirect,get_object_or_404
from django.contrib.auth.decorators import login_required
from . models import *
from django.views import generic
@login_required(login_url='/accounts/login/')
def home(request):
mylocs = Myloc.objects.all()
return render(request, 'home.html',{"mylocs":mylocs,})
@login_required(login_url='accounts/login/')
def add_profile(request):
current_user = request.user
profile = Profile.objects.filter(id = current_user.id)
if request.method == 'POST':
form = NewProfileForm(request.POST, request.FILES)
if form.is_valid():
caption = form.save(commit=False)
caption.user = current_user
caption.save()
return redirect('myprofile')
else:
form = NewProfileForm()
return render(request, 'edit.html', {"form":form})
@login_required(login_url='accounts/login/')
def my_profile(request):
current_user = request.user
my_my_area = Myloc.objects.filter(user = current_user)
my_profile = Profile.objects.filter(user = current_user).first
return render(request, 'profile.html', {"my_my_areas":my_my_areas, "my_profile":my_profile})
@login_required(login_url='/accounts/login/')
def addmy_area(request):
current_user = request.user
if request.method == 'POST':
form = MylocForm(request.POST, request.FILES)
if form.is_valid():
image = form.save(commit=False)
image.user = current_user
image.save()
return redirect('home')
else:
form = MylocForm()
return render(request, 'addmy_area.html', {"form": form})
def myloc_details(request,myloc_id):
activities=Activity.objects.filter(myloc=myloc_id)
posts=Post.objects.filter(myloc=myloc_id)
myloc=Myloc.objects.get(pk=myloc_id)
return render(request,'details.html',{'myloc':myloc,'activities':activities,'posts':posts})
@login_required(login_url="/accounts/login/")
def new_activity(request,pk):
current_user = request.user
myloc = get_object_or_404(Myloc,pk=pk)
if request.method == 'POST':
activity_form = NewActivityForm(request.POST, request.FILES)
if activity_form.is_valid():
activity = activity_form.save(commit=False)
activity.user = current_user
activity.myloc=myloc
activity.save()
return redirect('detail', myloc_id=myloc.id)
else:
activity_form = NewActivityForm()
return render(request, 'new_activity.html', {"form": activity_form,'myloc':myloc})
@login_required(login_url="/accounts/login/")
def new_post(request,pk):
current_user = request.user
myloc = get_object_or_404(Myloc,pk=pk)
if request.method == 'POST':
post_form = NewPostForm(request.POST, request.FILES)
if post_form.is_valid():
post = post_form.save(commit=False)
post.user = current_user
post.myloc=myloc
post.save()
return redirect('detail', myloc_id=myloc.id)
else:
post_form = NewPostForm()
return render(request, 'new_post.html', {"form": post_form,'myloc':myloc})
@login_required(login_url='/accounts/login/')
def search_project(request):
if 'project_name' in request.GET and request.GET["project_name"]:
search_term = request.GET.get("project_name")
searched_project = Myloc.search_by_location(search_term)
message = f"{search_term}"
return render(request, "search.html",{"message":message,"project": searched_project})
else:
message = "No search history"
return render(request, 'search.html',{"message":message})
| [
"django.shortcuts.render",
"django.shortcuts.redirect",
"django.shortcuts.get_object_or_404",
"django.contrib.auth.decorators.login_required"
] | [((229, 273), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (243, 273), False, 'from django.contrib.auth.decorators import login_required\n'), ((387, 430), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""accounts/login/"""'}), "(login_url='accounts/login/')\n", (401, 430), False, 'from django.contrib.auth.decorators import login_required\n'), ((925, 968), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""accounts/login/"""'}), "(login_url='accounts/login/')\n", (939, 968), False, 'from django.contrib.auth.decorators import login_required\n'), ((1251, 1295), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (1265, 1295), False, 'from django.contrib.auth.decorators import login_required\n'), ((1989, 2033), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (2003, 2033), False, 'from django.contrib.auth.decorators import login_required\n'), ((2630, 2674), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (2644, 2674), False, 'from django.contrib.auth.decorators import login_required\n'), ((3220, 3264), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/accounts/login/"""'}), "(login_url='/accounts/login/')\n", (3234, 3264), False, 'from django.contrib.auth.decorators import login_required\n'), ((337, 385), 'django.shortcuts.render', 'render', (['request', '"""home.html"""', "{'mylocs': mylocs}"], {}), "(request, 'home.html', {'mylocs': mylocs})\n", (343, 385), False, 'from django.shortcuts import render\n'), ((875, 919), 'django.shortcuts.render', 'render', (['request', '"""edit.html"""', "{'form': form}"], {}), "(request, 'edit.html', {'form': form})\n", (881, 919), False, 'from django.shortcuts import render\n'), ((1163, 1254), 'django.shortcuts.render', 'render', (['request', '"""profile.html"""', "{'my_my_areas': my_my_areas, 'my_profile': my_profile}"], {}), "(request, 'profile.html', {'my_my_areas': my_my_areas, 'my_profile':\n my_profile})\n", (1169, 1254), False, 'from django.shortcuts import render\n'), ((1655, 1705), 'django.shortcuts.render', 'render', (['request', '"""addmy_area.html"""', "{'form': form}"], {}), "(request, 'addmy_area.html', {'form': form})\n", (1661, 1705), False, 'from django.shortcuts import render\n'), ((1902, 1997), 'django.shortcuts.render', 'render', (['request', '"""details.html"""', "{'myloc': myloc, 'activities': activities, 'posts': posts}"], {}), "(request, 'details.html', {'myloc': myloc, 'activities': activities,\n 'posts': posts})\n", (1908, 1997), False, 'from django.shortcuts import render\n'), ((2108, 2139), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Myloc'], {'pk': 'pk'}), '(Myloc, pk=pk)\n', (2125, 2139), False, 'from django.shortcuts import redirect, get_object_or_404\n'), ((2552, 2629), 'django.shortcuts.render', 'render', (['request', '"""new_activity.html"""', "{'form': activity_form, 'myloc': myloc}"], {}), "(request, 'new_activity.html', {'form': activity_form, 'myloc': myloc})\n", (2558, 2629), False, 'from django.shortcuts import render\n'), ((2745, 2776), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Myloc'], {'pk': 'pk'}), '(Myloc, pk=pk)\n', (2762, 2776), False, 'from django.shortcuts import redirect, get_object_or_404\n'), ((3149, 3218), 'django.shortcuts.render', 'render', (['request', '"""new_post.html"""', "{'form': post_form, 'myloc': myloc}"], {}), "(request, 'new_post.html', {'form': post_form, 'myloc': myloc})\n", (3155, 3218), False, 'from django.shortcuts import render\n'), ((1590, 1606), 'django.shortcuts.redirect', 'redirect', (['"""home"""'], {}), "('home')\n", (1598, 1606), False, 'from django.shortcuts import redirect, get_object_or_404\n'), ((2451, 2488), 'django.shortcuts.redirect', 'redirect', (['"""detail"""'], {'myloc_id': 'myloc.id'}), "('detail', myloc_id=myloc.id)\n", (2459, 2488), False, 'from django.shortcuts import redirect, get_object_or_404\n'), ((3056, 3093), 'django.shortcuts.redirect', 'redirect', (['"""detail"""'], {'myloc_id': 'myloc.id'}), "('detail', myloc_id=myloc.id)\n", (3064, 3093), False, 'from django.shortcuts import redirect, get_object_or_404\n'), ((3533, 3618), 'django.shortcuts.render', 'render', (['request', '"""search.html"""', "{'message': message, 'project': searched_project}"], {}), "(request, 'search.html', {'message': message, 'project':\n searched_project})\n", (3539, 3618), False, 'from django.shortcuts import render\n'), ((3675, 3727), 'django.shortcuts.render', 'render', (['request', '"""search.html"""', "{'message': message}"], {}), "(request, 'search.html', {'message': message})\n", (3681, 3727), False, 'from django.shortcuts import render\n'), ((800, 821), 'django.shortcuts.redirect', 'redirect', (['"""myprofile"""'], {}), "('myprofile')\n", (808, 821), False, 'from django.shortcuts import redirect, get_object_or_404\n')] |
"""
Day 1 Main Module
"""
from day01 import parse_input, part1, part2
if __name__ == "__main__":
# trying out the new walrus[:=] oprtr in python
if (part := int(input("Enter Part: "))) == 1:
print(part1(parse_input("input.txt")))
elif part == 2:
print(part2(parse_input("input.txt")))
else:
print("Wrong choice [1|2]")
| [
"day01.parse_input"
] | [((221, 245), 'day01.parse_input', 'parse_input', (['"""input.txt"""'], {}), "('input.txt')\n", (232, 245), False, 'from day01 import parse_input, part1, part2\n'), ((288, 312), 'day01.parse_input', 'parse_input', (['"""input.txt"""'], {}), "('input.txt')\n", (299, 312), False, 'from day01 import parse_input, part1, part2\n')] |
"""
Django settings for quiz_app project.
Generated by 'django-admin startproject' using Django 2.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
SITE_ID = 1
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DEBUG', False)
ALLOWED_HOSTS = ['ignas-quiz.herokuapp.com','localhost','127.0.0.1']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'storages',
'quiz',
'multichoice',
'true_false',
'essay',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'quiz_app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
},
},
]
WSGI_APPLICATION = 'quiz_app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
# STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS= (
os.path.join(BASE_DIR, "static"),
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
DATABASES = {'default': dj_database_url.parse(os.environ.get('DATABASE_URL')) }
AWS_HEADERS = { # see http://developer.yahoo.com/performance/rules.html#expires
'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT',
'Cache-Control': 'max-age=94608000',
}
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_STORAGE_BUCKET_NAME")
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
AWS_S3_HOST = 's3-eu-west-1.amazonaws.com'
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
STATICFILES_LOCATION = 'static'
STATICFILES_STORAGE = 'custom_storages.StaticStorage'
STATIC_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, STATICFILES_LOCATION)
MEDIAFILES_LOCATION = 'media'
MEDIA_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, MEDIAFILES_LOCATION)
DEFAULT_FILE_STORAGE = 'custom_storages.MediaStorage' | [
"os.path.join",
"os.environ.get",
"os.path.abspath"
] | [((715, 743), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (729, 743), False, 'import os\n'), ((819, 849), 'os.environ.get', 'os.environ.get', (['"""DEBUG"""', '(False)'], {}), "('DEBUG', False)\n", (833, 849), False, 'import os\n'), ((3470, 3501), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""media"""'], {}), "(BASE_DIR, 'media')\n", (3482, 3501), False, 'import os\n'), ((3806, 3847), 'os.environ.get', 'os.environ.get', (['"""AWS_STORAGE_BUCKET_NAME"""'], {}), "('AWS_STORAGE_BUCKET_NAME')\n", (3820, 3847), False, 'import os\n'), ((3868, 3903), 'os.environ.get', 'os.environ.get', (['"""AWS_ACCESS_KEY_ID"""'], {}), "('AWS_ACCESS_KEY_ID')\n", (3882, 3903), False, 'import os\n'), ((3928, 3967), 'os.environ.get', 'os.environ.get', (['"""AWS_SECRET_ACCESS_KEY"""'], {}), "('AWS_SECRET_ACCESS_KEY')\n", (3942, 3967), False, 'import os\n'), ((3421, 3453), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (3433, 3453), False, 'import os\n'), ((469, 494), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (484, 494), False, 'import os\n'), ((3572, 3602), 'os.environ.get', 'os.environ.get', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (3586, 3602), False, 'import os\n')] |
import numpy as np
import pickle
from os.path import exists, realpath
import sys
import math
from topple_data_loader import ToppleData, ToppleDataLoader
import transforms3d
class ToppleNormalizationInfo():
'''
Structure to hold all the normalization information for a dataset.
'''
def __init__(self):
# max element of any linear vel vector
self.max_lin_vel = None
# max element of any angular vel vector
self.max_ang_vel = None
# max distance between positions in two contiguous timesteps
self.max_pos = None
# max change in rotation around any axis between two contiguous timesteps (for euler rot)
self.max_rot = None
# max angle of rotation between two steps for axis-angle representation
self.max_delta_rot = None
# max 2-norm of applied impulse vector
self.force_vec_max = None
# max 2-norm of a point in an object point cloud (used for point cloud and force pos)
self.pc_max = None
# normalization values for shape-related stuff
self.density_offset = None
self.density_max = None
self.mass_offset = None
self.mass_max = None
self.inertia_offset = None
self.inertia_max = None
self.friction_offset = None
self.friction_max = None
def print_out(self):
print({'max_lin_vel' : self.max_lin_vel, 'max_ang_vel' : self.max_ang_vel, 'max_pos' : self.max_pos, \
'max_rot' : self.max_rot, 'max_delta_rot' : self.max_delta_rot, 'force_vec_max' : self.force_vec_max, 'pc_max' : self.pc_max, \
'density_off' : self.density_offset, 'density_max' : self.density_max, 'mass_off' : self.mass_offset, \
'mass_max' : self.mass_max, 'inertia_off' : self.inertia_offset, 'inertia_max' : self.inertia_max, \
'friction_off' : self.friction_offset, 'friction_max' : self.friction_max
})
def save(self, pkl_file):
''' Saves normalization info object to a specified .pkl file. '''
with open(pkl_file, 'wb') as f:
pickle.dump(self, f)
def load_from(self, pkl_file):
''' Load normalization info into this object from a specified .pkl file. '''
with open(pkl_file, 'rb') as f:
norm_info = pickle.load(f)
self.copy_from(norm_info)
def copy_from(self, norm_info):
'''
Takes values from the given normalization info object and copies them to this one
'''
self.max_lin_vel = norm_info.max_lin_vel
self.max_ang_vel = norm_info.max_ang_vel
self.max_pos = norm_info.max_pos
self.max_rot = norm_info.max_rot
try:
self.max_delta_rot = norm_info.max_delta_rot
except:
# old versions of data doesn't have max delta rot
pass
self.force_vec_max = norm_info.force_vec_max
self.pc_max = norm_info.pc_max
self.density_offset = norm_info.density_offset
self.density_max = norm_info.density_max
self.mass_offset = norm_info.mass_offset
self.mass_max = norm_info.mass_max
self.inertia_offset = norm_info.inertia_offset
self.inertia_max = norm_info.inertia_max
try:
self.friction_offset = norm_info.friction_offset
self.friction_max = norm_info.friction_max
except:
# old version doesn't have this
pass
class ToppleBatch(object):
'''
Structure to hold a single batch of data.
'''
def __init__(self, size, seq_len, num_pts):
self.size = size
self.num_steps = seq_len
self.num_pts = num_pts
self.point_cloud = np.zeros((self.size, self.num_pts, 3))
self.lin_vel = np.zeros((self.size, self.num_steps, 3))
self.ang_vel = np.zeros((self.size, self.num_steps, 3))
self.pos = np.zeros((self.size, self.num_steps, 3))
# cummulative euler angles
self.rot = np.zeros((self.size, self.num_steps, 3))
# change in rotation in quaternion rep (w, x, y, z)
self.delta_quat = np.zeros((self.size, self.num_steps, 4))
# change in rotation between steps in axis-angle rep (scaled 3 vec)
self.delta_rot = np.zeros((self.size, self.num_steps, 3))
# change in rotation between steps in split axis-angle rep (4-vec)
self.delta_rot_split = np.zeros((self.size, self.num_steps, 4))
# 0 if before topple idx, 1 if after
self.topple_label = np.zeros((self.size, self.num_steps), dtype=int)
# other meta-data not directly used in network
self.toppled = []
self.shape_name = []
self.body_friction = np.zeros((self.size))
self.mass = np.zeros((self.size))
self.scale = np.zeros((self.size, 3))
self.rot_euler = np.zeros((self.size, self.num_steps, 3))
class ToppleDataset(object):
'''
Loads toppling data and provides batches for training and model evaluation.
'''
def __init__(self, roots, norm_info_file, batch_size=32, num_steps=15, shuffle=False, num_pts=None, perturb_pts=0.0):
'''
- roots : list of directories containing data to load for this dataset
- norm_info_file : .pkl file containing normalization information
- batch_size : number of sequences to return in each batch
- num_steps : number of timesteps to return in each sequence
- shuffle : randomly shuffles the returned sequence ordering
- num_pts : the number of points to use in the returned point cloud. If None uses all points in the data.
- perturb_pts : the stdev to randomly perturb point clouds with. If None no perturbation is performed.
-
'''
# settings
self.batch_size = batch_size
self.steps_per_seq = num_steps
self.shuffle = shuffle
self.perturb_std = perturb_pts
self.num_pts = num_pts
# load in data
for root in roots:
if not exists(root):
print('Could not find dataset at ' + root)
return
data_loader = ToppleDataLoader()
self.data = data_loader.load_data(roots)
if num_pts is None:
# use all the points in the point cloud
self.num_pts = self.data.point_cloud.shape[1]
# load in normalization info
if not exists(norm_info_file):
print('Could not find normalization info at ' + norm_info_file)
return
self.norm_info = ToppleNormalizationInfo()
self.norm_info.load_from(norm_info_file)
print('Loaded normalization info!')
# see if we have axis-angle info (for backwards compat)
self.use_aa = False
self.use_aa_split = False
self.use_topple_idx = False
self.use_delta_quat = False
if len(self.data.delta_rot) > 0:
self.use_aa = True
if len(self.data.delta_rot_split) > 0:
self.use_aa_split = True
if len(self.data.topple_idx) > 0:
self.use_topple_idx = True
if len(self.data.body_friction) > 0:
self.use_body_friction = True
if len(self.data.delta_quat) > 0:
self.use_delta_quat = True
# normalize the data
print('Normalizing data...')
self.normalize_data(self.data, self.norm_info)
print('Finished normalizing!')
# order to iterate through data when returning batches (in order by default)
self.iter_inds = range(0, self.data.size)
# prepare to iterate through
self.reset()
def normalize_data(self, data, norm_info):
'''
Normalizes (in place) the given ToppleData using the ToppleNormalizationInfo.
'''
# point clouds -> [-1, 1]
data.point_cloud /= norm_info.pc_max
# force pos -> [-1, 1]
data.force_pos /= norm_info.pc_max
# force vec -> [-1, 1]
data.force_vec /= norm_info.force_vec_max
# density -> [0, 1]
data.density = (data.density - norm_info.density_offset) / norm_info.density_max
# mass -> [0, 1]
data.mass = (data.mass - norm_info.mass_offset) / norm_info.mass_max
# inertia -> [0, 1]
data.inertia = (data.inertia - norm_info.inertia_offset) / norm_info.inertia_max
# friction -> [0, 1]
if norm_info.friction_offset is not None:
data.body_friction = (data.body_friction - norm_info.friction_offset) / norm_info.friction_max
# now time sequence data
# velocities -> [-1, 1]
for i, lin_vel_steps in enumerate(data.lin_vel):
data.lin_vel[i] = [(x / norm_info.max_lin_vel) for x in lin_vel_steps]
for i, ang_vel_steps in enumerate(data.ang_vel):
data.ang_vel[i] = [(x / norm_info.max_ang_vel) for x in ang_vel_steps]
# delta position -> [-1, 1]
for i, pos_steps in enumerate(data.pos):
data.pos[i] = [(x / norm_info.max_pos) for x in pos_steps]
# delta rotation -> [-1, 1]
for i, rot_steps in enumerate(data.total_rot):
data.total_rot[i] = [(x / norm_info.max_rot) for x in rot_steps]
# delta rot axis-angle -> [-1, 1] norm
if self.use_aa:
for i, delta_rot_steps in enumerate(data.delta_rot):
data.delta_rot[i] = [(x / norm_info.max_delta_rot) for x in delta_rot_steps]
# make axes unit and and normalize angle -> [-1, 1]
if self.use_aa_split:
for i, delta_rot_split_steps in enumerate(data.delta_rot_split):
data.delta_rot_split[i] = [np.append(x[:3] / np.linalg.norm(x[:3]), x[3] / norm_info.max_delta_rot) for x in delta_rot_split_steps]
def reset(self):
'''
Prepares to iterate through dataset.
'''
if self.shuffle:
np.random.shuffle(self.iter_inds)
# we consider an epoch as returning one sequence from every single simulation
# ( though if the sequence length is shorter than sim length the unique sequences contained
# in the dataset will be much more than an epoch length )
self.num_batches = (self.data.size + self.batch_size - 1) // self.batch_size
self.batch_idx = 0
def has_next_batch(self):
'''
Returns false if done with the current "epoch" (seen each sim once).
'''
return self.batch_idx < self.num_batches
def next_batch(self, random_window=True, focus_toppling=False):
'''
Returns the next batch of data. if random_window=True will get a random sequence of correct length (otherwise
starts at 0). If focus_toppling=True, will make sure this sequence includes the part of the sequence where toppling occurs.
'''
# size is either batch_size, or shorter if we're at the end of the data
start_idx = self.batch_idx * self.batch_size
end_idx = min((self.batch_idx + 1) * self.batch_size, self.data.size)
batch_size = end_idx - start_idx
# get batch data
batch = ToppleBatch(self.batch_size, self.steps_per_seq, self.num_pts)
for i in range(batch_size):
pc, lin_vel, ang_vel, pos, rot, delta_quat, delta_rot, delta_rot_split, topple_label, meta_info = \
self.get_seq(self.iter_inds[start_idx + i], self.steps_per_seq, random_window, focus_toppling)
batch.point_cloud[i] = pc
batch.lin_vel[i] = lin_vel
batch.ang_vel[i] = ang_vel
batch.pos[i] = pos
batch.rot[i] = rot
if self.use_delta_quat:
batch.delta_quat[i] = delta_quat
if self.use_aa:
batch.delta_rot[i] = delta_rot
if self.use_aa_split:
batch.delta_rot_split[i] = delta_rot_split
if self.use_topple_idx:
batch.topple_label[i] = topple_label
batch.toppled.append(meta_info[0])
batch.shape_name.append(meta_info[1])
batch.scale[i] = meta_info[2]
batch.rot_euler[i] = meta_info[3]
if self.use_body_friction:
batch.body_friction[i] = meta_info[4]
batch.mass[i] = meta_info[5]
if batch_size != self.batch_size:
# need to pad the end with repeat of data
for i in range(self.batch_size - batch_size):
batch.point_cloud[batch_size + i] = batch.point_cloud[i]
batch.lin_vel[batch_size + i] = batch.lin_vel[i]
batch.ang_vel[batch_size + i] = batch.ang_vel[i]
batch.pos[batch_size + i] = batch.pos[i]
batch.rot[batch_size + i] = batch.rot[i]
if self.use_delta_quat:
batch.delta_quat[batch_size + i] = batch.delta_quat[i]
batch.toppled.append(batch.toppled[i])
batch.shape_name.append(batch.shape_name[i])
batch.scale[batch_size + i] = batch.scale[i]
batch.rot_euler[batch_size + i] = batch.rot_euler[i]
batch.mass[batch_size + i] = batch.mass[i]
if self.use_aa:
batch.delta_rot[batch_size + i] = batch.delta_rot[i]
if self.use_aa_split:
batch.delta_rot_split[batch_size + i] = batch.delta_rot_split[i]
if self.use_topple_idx:
batch.topple_label[batch_size + i] = batch.topple_label[i]
if self.use_body_friction:
batch.body_friction[batch_size + i] = batch.body_friction[i]
self.batch_idx += 1
return batch
def get_seq(self, idx, num_steps, random_window=True, focus_toppling=False):
'''
Returns a random contiguous sequence from the simulation at the given idx and length num_steps.
If num_steps > sim_length the final (sim_length-num_steps) steps are padded with the value at
sim[sim_length].
'''
# get the normalized canonical point cloud for this simulation
pc = np.copy(self.data.point_cloud[self.data.shape_idx[idx]])
scale = self.data.scale[idx]
# scale accordingly
pc *= np.reshape(scale, (1, -1))
# randomly perturb point cloud
pc += np.random.normal(0.0, self.perturb_std, pc.shape)
# randomly draw a subset of points if desired
if self.num_pts < pc.shape[0]:
pc_inds = np.random.choice(pc.shape[0], self.num_pts, replace=False)
pc = pc[pc_inds, :]
# randomly choose a size num_steps sequence from the simulation to return time-series data
total_steps = len(self.data.lin_vel[idx])
max_start_step = total_steps - num_steps
start_step = 0
if max_start_step < 0:
# simulation is shorter than desired sequence length
pad_len = abs(max_start_step)
lin_vel_list = self.data.lin_vel[idx]
lin_vel_out = np.array(lin_vel_list + [lin_vel_list[-1]]*pad_len)
ang_vel_list = self.data.ang_vel[idx]
ang_vel_out = np.array(ang_vel_list + [ang_vel_list[-1]]*pad_len)
pos_list = self.data.pos[idx]
pos_out = np.array(pos_list + [pos_list[-1]]*pad_len)
rot_list = self.data.total_rot[idx]
rot_out = np.array(rot_list + [rot_list[-1]]*pad_len)
if self.use_delta_quat:
delta_quat_list = self.data.delta_quat[idx]
delta_quat_out = np.array(delta_quat_list + [delta_quat_list[-1]]*pad_len)
euler_rot_list = self.data.rot_euler[idx]
euler_rot_out = np.array(euler_rot_list + [euler_rot_list[-1]]*pad_len)
if self.use_aa:
delta_rot_list = self.data.delta_rot[idx]
delta_rot_out = np.array(delta_rot_list + [delta_rot_list[-1]]*pad_len)
if self.use_aa_split:
delta_rot_split_list = self.data.delta_rot_split[idx]
delta_rot_split_out = np.array(delta_rot_split_list + [delta_rot_split_list[-1]]*pad_len)
if self.use_topple_idx:
topple_label_out = np.zeros((total_steps + pad_len), dtype=int)
seq_topple_idx = self.data.topple_idx[idx]
if seq_topple_idx > 0:
topple_label_out[seq_topple_idx:] = 1
else:
start_step = 0
if random_window:
if focus_toppling and self.data.toppled[idx]:
# choose window around the index where it topples
topple_idx = self.data.topple_idx[idx]
min_idx = max([topple_idx - num_steps + 1, 0])
if min_idx >= max_start_step:
# just pick the max index
start_step = max_start_step
else:
# our window is guaranteed to see some part of toppling
start_step = np.random.randint(min_idx, max_start_step+1)
else:
start_step = np.random.randint(0, max_start_step+1)
end_step = start_step + num_steps
# print('Range: %d, %d' % (start_step, end_step))
lin_vel_out = np.array(self.data.lin_vel[idx][start_step:end_step])
ang_vel_out = np.array(self.data.ang_vel[idx][start_step:end_step])
pos_out = np.array(self.data.pos[idx][start_step:end_step])
rot_out = np.array(self.data.total_rot[idx][start_step:end_step])
if self.use_delta_quat:
delta_quat_out = np.array(self.data.delta_quat[idx][start_step:end_step])
euler_rot_out = np.array(self.data.rot_euler[idx][start_step:end_step])
if self.use_aa:
delta_rot_out = np.array(self.data.delta_rot[idx][start_step:end_step])
if self.use_aa_split:
delta_rot_split_out = np.array(self.data.delta_rot_split[idx][start_step:end_step])
if self.use_topple_idx:
topple_label_out = np.zeros((num_steps), dtype=int)
seq_topple_idx = self.data.topple_idx[idx]
if seq_topple_idx > 0:
if seq_topple_idx <= start_step:
topple_label_out[:] = 1
elif seq_topple_idx < end_step:
topple_label_out[seq_topple_idx-start_step:] = 1
# rotate point cloud to align with first frame of sequence
init_rot = self.data.rot_euler[idx][start_step]
xrot, yrot, zrot = np.radians(init_rot)
R = transforms3d.euler.euler2mat(zrot, xrot, yrot, axes='szxy') # unity applies euler angles in z, x, y ordering
pc = np.dot(pc, R.T)
toppled = self.data.toppled[idx]
shape_name = self.data.shape_name[idx]
mass = self.data.mass[idx]
body_fric = -1.0
if self.use_body_friction:
body_fric = self.data.body_friction[idx]
meta_info = (toppled, shape_name, scale, euler_rot_out, body_fric, mass)
if not self.use_aa:
delta_rot_out = None
if not self.use_aa_split:
delta_rot_split_out = None
if not self.use_topple_idx:
topple_label_out = None
if not self.use_delta_quat:
delta_quat_out = None
return pc, lin_vel_out, ang_vel_out, pos_out, rot_out, delta_quat_out, delta_rot_out, delta_rot_split_out, topple_label_out, meta_info
def get_norm_info(self):
return self.norm_info
if __name__=='__main__':
# norm_info = ToppleNormalizationInfo()
# norm_info.load_from('../../data/sim/normalization_info/cube_train.pkl')
# norm_info.print_out()
topple_data = ToppleDataset(roots=['./data/sim/Cube/Cube30k_ObjSplit/Cube30kVal'], norm_info_file='./data/sim/normalization_info/cube_30k.pkl', \
batch_size=5, num_steps=10, shuffle=True, num_pts=None, perturb_pts=0.01)
count = 0
while topple_data.has_next_batch():
batch = topple_data.next_batch(random_window=True, focus_toppling=False)
count += 1
# print(batch.lin_vel[0])
# print(batch.toppled[0])
# print(batch.delta_rot_split[0])
# print(batch.delta_rot[0])
# print(batch.topple_label[0])
# print(batch.pos)
# print(batch.body_friction)
# print(batch.delta_quat[0])
# print(np.degrees(2*np.arccos(batch.delta_quat[0, :, 0])))
print('Total num batches: ' + str(count))
topple_data.reset()
count = 0
while topple_data.has_next_batch():
batch = topple_data.next_batch()
count += 1
print(batch.size)
print('Total num batches: ' + str(count))
| [
"numpy.random.normal",
"numpy.copy",
"numpy.radians",
"os.path.exists",
"numpy.reshape",
"pickle.dump",
"numpy.random.shuffle",
"transforms3d.euler.euler2mat",
"numpy.random.choice",
"pickle.load",
"numpy.array",
"numpy.zeros",
"numpy.dot",
"numpy.random.randint",
"numpy.linalg.norm",
"topple_data_loader.ToppleDataLoader"
] | [((3728, 3766), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_pts, 3)'], {}), '((self.size, self.num_pts, 3))\n', (3736, 3766), True, 'import numpy as np\n'), ((3790, 3830), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (3798, 3830), True, 'import numpy as np\n'), ((3854, 3894), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (3862, 3894), True, 'import numpy as np\n'), ((3914, 3954), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (3922, 3954), True, 'import numpy as np\n'), ((4009, 4049), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (4017, 4049), True, 'import numpy as np\n'), ((4136, 4176), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 4)'], {}), '((self.size, self.num_steps, 4))\n', (4144, 4176), True, 'import numpy as np\n'), ((4278, 4318), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (4286, 4318), True, 'import numpy as np\n'), ((4425, 4465), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 4)'], {}), '((self.size, self.num_steps, 4))\n', (4433, 4465), True, 'import numpy as np\n'), ((4540, 4588), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps)'], {'dtype': 'int'}), '((self.size, self.num_steps), dtype=int)\n', (4548, 4588), True, 'import numpy as np\n'), ((4729, 4748), 'numpy.zeros', 'np.zeros', (['self.size'], {}), '(self.size)\n', (4737, 4748), True, 'import numpy as np\n'), ((4771, 4790), 'numpy.zeros', 'np.zeros', (['self.size'], {}), '(self.size)\n', (4779, 4790), True, 'import numpy as np\n'), ((4814, 4838), 'numpy.zeros', 'np.zeros', (['(self.size, 3)'], {}), '((self.size, 3))\n', (4822, 4838), True, 'import numpy as np\n'), ((4864, 4904), 'numpy.zeros', 'np.zeros', (['(self.size, self.num_steps, 3)'], {}), '((self.size, self.num_steps, 3))\n', (4872, 4904), True, 'import numpy as np\n'), ((6156, 6174), 'topple_data_loader.ToppleDataLoader', 'ToppleDataLoader', ([], {}), '()\n', (6172, 6174), False, 'from topple_data_loader import ToppleData, ToppleDataLoader\n'), ((14138, 14194), 'numpy.copy', 'np.copy', (['self.data.point_cloud[self.data.shape_idx[idx]]'], {}), '(self.data.point_cloud[self.data.shape_idx[idx]])\n', (14145, 14194), True, 'import numpy as np\n'), ((14274, 14300), 'numpy.reshape', 'np.reshape', (['scale', '(1, -1)'], {}), '(scale, (1, -1))\n', (14284, 14300), True, 'import numpy as np\n'), ((14354, 14403), 'numpy.random.normal', 'np.random.normal', (['(0.0)', 'self.perturb_std', 'pc.shape'], {}), '(0.0, self.perturb_std, pc.shape)\n', (14370, 14403), True, 'import numpy as np\n'), ((18650, 18670), 'numpy.radians', 'np.radians', (['init_rot'], {}), '(init_rot)\n', (18660, 18670), True, 'import numpy as np\n'), ((18683, 18742), 'transforms3d.euler.euler2mat', 'transforms3d.euler.euler2mat', (['zrot', 'xrot', 'yrot'], {'axes': '"""szxy"""'}), "(zrot, xrot, yrot, axes='szxy')\n", (18711, 18742), False, 'import transforms3d\n'), ((18805, 18820), 'numpy.dot', 'np.dot', (['pc', 'R.T'], {}), '(pc, R.T)\n', (18811, 18820), True, 'import numpy as np\n'), ((2119, 2139), 'pickle.dump', 'pickle.dump', (['self', 'f'], {}), '(self, f)\n', (2130, 2139), False, 'import pickle\n'), ((2325, 2339), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2336, 2339), False, 'import pickle\n'), ((6416, 6438), 'os.path.exists', 'exists', (['norm_info_file'], {}), '(norm_info_file)\n', (6422, 6438), False, 'from os.path import exists, realpath\n'), ((9912, 9945), 'numpy.random.shuffle', 'np.random.shuffle', (['self.iter_inds'], {}), '(self.iter_inds)\n', (9929, 9945), True, 'import numpy as np\n'), ((14520, 14578), 'numpy.random.choice', 'np.random.choice', (['pc.shape[0]', 'self.num_pts'], {'replace': '(False)'}), '(pc.shape[0], self.num_pts, replace=False)\n', (14536, 14578), True, 'import numpy as np\n'), ((15047, 15100), 'numpy.array', 'np.array', (['(lin_vel_list + [lin_vel_list[-1]] * pad_len)'], {}), '(lin_vel_list + [lin_vel_list[-1]] * pad_len)\n', (15055, 15100), True, 'import numpy as np\n'), ((15175, 15228), 'numpy.array', 'np.array', (['(ang_vel_list + [ang_vel_list[-1]] * pad_len)'], {}), '(ang_vel_list + [ang_vel_list[-1]] * pad_len)\n', (15183, 15228), True, 'import numpy as np\n'), ((15291, 15336), 'numpy.array', 'np.array', (['(pos_list + [pos_list[-1]] * pad_len)'], {}), '(pos_list + [pos_list[-1]] * pad_len)\n', (15299, 15336), True, 'import numpy as np\n'), ((15405, 15450), 'numpy.array', 'np.array', (['(rot_list + [rot_list[-1]] * pad_len)'], {}), '(rot_list + [rot_list[-1]] * pad_len)\n', (15413, 15450), True, 'import numpy as np\n'), ((15718, 15775), 'numpy.array', 'np.array', (['(euler_rot_list + [euler_rot_list[-1]] * pad_len)'], {}), '(euler_rot_list + [euler_rot_list[-1]] * pad_len)\n', (15726, 15775), True, 'import numpy as np\n'), ((17327, 17380), 'numpy.array', 'np.array', (['self.data.lin_vel[idx][start_step:end_step]'], {}), '(self.data.lin_vel[idx][start_step:end_step])\n', (17335, 17380), True, 'import numpy as np\n'), ((17407, 17460), 'numpy.array', 'np.array', (['self.data.ang_vel[idx][start_step:end_step]'], {}), '(self.data.ang_vel[idx][start_step:end_step])\n', (17415, 17460), True, 'import numpy as np\n'), ((17483, 17532), 'numpy.array', 'np.array', (['self.data.pos[idx][start_step:end_step]'], {}), '(self.data.pos[idx][start_step:end_step])\n', (17491, 17532), True, 'import numpy as np\n'), ((17555, 17610), 'numpy.array', 'np.array', (['self.data.total_rot[idx][start_step:end_step]'], {}), '(self.data.total_rot[idx][start_step:end_step])\n', (17563, 17610), True, 'import numpy as np\n'), ((17765, 17820), 'numpy.array', 'np.array', (['self.data.rot_euler[idx][start_step:end_step]'], {}), '(self.data.rot_euler[idx][start_step:end_step])\n', (17773, 17820), True, 'import numpy as np\n'), ((6038, 6050), 'os.path.exists', 'exists', (['root'], {}), '(root)\n', (6044, 6050), False, 'from os.path import exists, realpath\n'), ((15578, 15637), 'numpy.array', 'np.array', (['(delta_quat_list + [delta_quat_list[-1]] * pad_len)'], {}), '(delta_quat_list + [delta_quat_list[-1]] * pad_len)\n', (15586, 15637), True, 'import numpy as np\n'), ((15892, 15949), 'numpy.array', 'np.array', (['(delta_rot_list + [delta_rot_list[-1]] * pad_len)'], {}), '(delta_rot_list + [delta_rot_list[-1]] * pad_len)\n', (15900, 15949), True, 'import numpy as np\n'), ((16090, 16159), 'numpy.array', 'np.array', (['(delta_rot_split_list + [delta_rot_split_list[-1]] * pad_len)'], {}), '(delta_rot_split_list + [delta_rot_split_list[-1]] * pad_len)\n', (16098, 16159), True, 'import numpy as np\n'), ((16229, 16271), 'numpy.zeros', 'np.zeros', (['(total_steps + pad_len)'], {'dtype': 'int'}), '(total_steps + pad_len, dtype=int)\n', (16237, 16271), True, 'import numpy as np\n'), ((17680, 17736), 'numpy.array', 'np.array', (['self.data.delta_quat[idx][start_step:end_step]'], {}), '(self.data.delta_quat[idx][start_step:end_step])\n', (17688, 17736), True, 'import numpy as np\n'), ((17881, 17936), 'numpy.array', 'np.array', (['self.data.delta_rot[idx][start_step:end_step]'], {}), '(self.data.delta_rot[idx][start_step:end_step])\n', (17889, 17936), True, 'import numpy as np\n'), ((18009, 18070), 'numpy.array', 'np.array', (['self.data.delta_rot_split[idx][start_step:end_step]'], {}), '(self.data.delta_rot_split[idx][start_step:end_step])\n', (18017, 18070), True, 'import numpy as np\n'), ((18142, 18172), 'numpy.zeros', 'np.zeros', (['num_steps'], {'dtype': 'int'}), '(num_steps, dtype=int)\n', (18150, 18172), True, 'import numpy as np\n'), ((17154, 17194), 'numpy.random.randint', 'np.random.randint', (['(0)', '(max_start_step + 1)'], {}), '(0, max_start_step + 1)\n', (17171, 17194), True, 'import numpy as np\n'), ((17054, 17100), 'numpy.random.randint', 'np.random.randint', (['min_idx', '(max_start_step + 1)'], {}), '(min_idx, max_start_step + 1)\n', (17071, 17100), True, 'import numpy as np\n'), ((9681, 9702), 'numpy.linalg.norm', 'np.linalg.norm', (['x[:3]'], {}), '(x[:3])\n', (9695, 9702), True, 'import numpy as np\n')] |
import numpy as np
from operator import truediv
def AA_andEachClassAccuracy(confusion_matrix):
counter = confusion_matrix.shape[0]
list_diag = np.diag(confusion_matrix)
list_raw_sum = np.sum(confusion_matrix, axis=1)
each_acc = np.nan_to_num(truediv(list_diag, list_raw_sum))
average_acc = np.mean(each_acc)
return each_acc, average_acc | [
"operator.truediv",
"numpy.sum",
"numpy.mean",
"numpy.diag"
] | [((157, 182), 'numpy.diag', 'np.diag', (['confusion_matrix'], {}), '(confusion_matrix)\n', (164, 182), True, 'import numpy as np\n'), ((203, 235), 'numpy.sum', 'np.sum', (['confusion_matrix'], {'axis': '(1)'}), '(confusion_matrix, axis=1)\n', (209, 235), True, 'import numpy as np\n'), ((319, 336), 'numpy.mean', 'np.mean', (['each_acc'], {}), '(each_acc)\n', (326, 336), True, 'import numpy as np\n'), ((266, 298), 'operator.truediv', 'truediv', (['list_diag', 'list_raw_sum'], {}), '(list_diag, list_raw_sum)\n', (273, 298), False, 'from operator import truediv\n')] |
#!/usr/bin/env python
#########################################################################################
#
# Apply transformations. This function is a wrapper for sct_WarpImageMultiTransform
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca>
# Authors: <NAME>, <NAME>
# Modified: 2014-07-20
#
# About the license: see the file LICENSE.TXT
#########################################################################################
# TODO: display message at the end
# TODO: interpolation methods
from __future__ import division, absolute_import
import sys, io, os, time, functools
from msct_parser import Parser
import sct_utils as sct
import sct_convert
import sct_image
import spinalcordtoolbox.image as msct_image
from sct_crop_image import ImageCropper
class Param:
def __init__(self):
self.verbose = '1'
self.remove_temp_files = '1'
# PARSER
# ==========================================================================================
def get_parser():
# parser initialisation
parser = Parser(__file__)
parser.usage.set_description('Apply transformations. This function is a wrapper for antsApplyTransforms (ANTs).')
parser.add_option(name="-i",
type_value="file",
description="input image",
mandatory=True,
example="t2.nii.gz")
parser.add_option(name="-d",
type_value="file",
description="destination image",
mandatory=True,
example="out.nii.gz")
parser.add_option(name="-w",
type_value=[[','], "file"],
description="Transformation, which can be a warping field (nifti image) or an affine transformation matrix (text file).",
mandatory=True,
example="warp1.nii.gz,warp2.nii.gz")
parser.add_option(name="-crop",
type_value="multiple_choice",
description="Crop Reference. 0 : no reference. 1 : sets background to 0. 2 : use normal background",
mandatory=False,
default_value='0',
example=['0', '1', '2'])
parser.add_option(name="-c",
type_value=None,
description="Crop Reference. 0 : no reference. 1 : sets background to 0. 2 : use normal background",
mandatory=False,
deprecated_by='-crop')
parser.add_option(name="-o",
type_value="file_output",
description="registered source.",
mandatory=False,
default_value='',
example="dest.nii.gz")
parser.add_option(name="-x",
type_value="multiple_choice",
description="interpolation method",
mandatory=False,
default_value='spline',
example=['nn', 'linear', 'spline'])
parser.add_option(name="-r",
type_value="multiple_choice",
description="""Remove temporary files.""",
mandatory=False,
default_value='1',
example=['0', '1'])
parser.add_option(name="-v",
type_value="multiple_choice",
description="""Verbose.""",
mandatory=False,
default_value='1',
example=['0', '1', '2'])
return parser
class Transform:
def __init__(self, input_filename, warp, fname_dest, output_filename='', verbose=0, crop=0, interp='spline', remove_temp_files=1, debug=0):
self.input_filename = input_filename
if isinstance(warp, str):
self.warp_input = list([warp])
else:
self.warp_input = warp
self.fname_dest = fname_dest
self.output_filename = output_filename
self.interp = interp
self.crop = crop
self.verbose = verbose
self.remove_temp_files = remove_temp_files
self.debug = debug
def apply(self):
# Initialization
fname_src = self.input_filename # source image (moving)
fname_warp_list = self.warp_input # list of warping fields
fname_out = self.output_filename # output
fname_dest = self.fname_dest # destination image (fix)
verbose = self.verbose
remove_temp_files = self.remove_temp_files
crop_reference = self.crop # if = 1, put 0 everywhere around warping field, if = 2, real crop
interp = sct.get_interpolation('isct_antsApplyTransforms', self.interp)
# Parse list of warping fields
sct.printv('\nParse list of warping fields...', verbose)
use_inverse = []
fname_warp_list_invert = []
# fname_warp_list = fname_warp_list.replace(' ', '') # remove spaces
# fname_warp_list = fname_warp_list.split(",") # parse with comma
for idx_warp, path_warp in enumerate(fname_warp_list):
# Check if inverse matrix is specified with '-' at the beginning of file name
if path_warp.startswith("-"):
use_inverse.append('-i')
fname_warp_list[idx_warp] = path_warp[1:] # remove '-'
fname_warp_list_invert += [[use_inverse[idx_warp], fname_warp_list[idx_warp]]]
else:
use_inverse.append('')
fname_warp_list_invert += [[path_warp]]
path_warp = fname_warp_list[idx_warp]
if path_warp.endswith((".nii", ".nii.gz")) \
and msct_image.Image(fname_warp_list[idx_warp]).header.get_intent()[0] != 'vector':
raise ValueError("Displacement field in {} is invalid: should be encoded" \
" in a 5D file with vector intent code" \
" (see https://nifti.nimh.nih.gov/pub/dist/src/niftilib/nifti1.h" \
.format(path_warp))
# need to check if last warping field is an affine transfo
isLastAffine = False
path_fname, file_fname, ext_fname = sct.extract_fname(fname_warp_list_invert[-1][-1])
if ext_fname in ['.txt', '.mat']:
isLastAffine = True
# check if destination file is 3d
if not sct.check_if_3d(fname_dest):
sct.printv('ERROR: Destination data must be 3d')
# N.B. Here we take the inverse of the warp list, because sct_WarpImageMultiTransform concatenates in the reverse order
fname_warp_list_invert.reverse()
fname_warp_list_invert = functools.reduce(lambda x,y: x+y, fname_warp_list_invert)
# Extract path, file and extension
path_src, file_src, ext_src = sct.extract_fname(fname_src)
path_dest, file_dest, ext_dest = sct.extract_fname(fname_dest)
# Get output folder and file name
if fname_out == '':
path_out = '' # output in user's current directory
file_out = file_src + '_reg'
ext_out = ext_src
fname_out = os.path.join(path_out, file_out + ext_out)
# Get dimensions of data
sct.printv('\nGet dimensions of data...', verbose)
img_src = msct_image.Image(fname_src)
nx, ny, nz, nt, px, py, pz, pt = img_src.dim
# nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_src)
sct.printv(' ' + str(nx) + ' x ' + str(ny) + ' x ' + str(nz) + ' x ' + str(nt), verbose)
# if 3d
if nt == 1:
# Apply transformation
sct.printv('\nApply transformation...', verbose)
if nz in [0, 1]:
dim = '2'
else:
dim = '3'
sct.run(['isct_antsApplyTransforms',
'-d', dim,
'-i', fname_src,
'-o', fname_out,
'-t',
] + fname_warp_list_invert + [
'-r', fname_dest,
] + interp, verbose=verbose, is_sct_binary=True)
# if 4d, loop across the T dimension
else:
path_tmp = sct.tmp_create(basename="apply_transfo", verbose=verbose)
# convert to nifti into temp folder
sct.printv('\nCopying input data to tmp folder and convert to nii...', verbose)
img_src.save(os.path.join(path_tmp, "data.nii"))
sct.copy(fname_dest, os.path.join(path_tmp, file_dest + ext_dest))
fname_warp_list_tmp = []
for fname_warp in fname_warp_list:
path_warp, file_warp, ext_warp = sct.extract_fname(fname_warp)
sct.copy(fname_warp, os.path.join(path_tmp, file_warp + ext_warp))
fname_warp_list_tmp.append(file_warp + ext_warp)
fname_warp_list_invert_tmp = fname_warp_list_tmp[::-1]
curdir = os.getcwd()
os.chdir(path_tmp)
# split along T dimension
sct.printv('\nSplit along T dimension...', verbose)
im_dat = msct_image.Image('data.nii')
im_header = im_dat.hdr
data_split_list = sct_image.split_data(im_dat, 3)
for im in data_split_list:
im.save()
# apply transfo
sct.printv('\nApply transformation to each 3D volume...', verbose)
for it in range(nt):
file_data_split = 'data_T' + str(it).zfill(4) + '.nii'
file_data_split_reg = 'data_reg_T' + str(it).zfill(4) + '.nii'
status, output = sct.run(['isct_antsApplyTransforms',
'-d', '3',
'-i', file_data_split,
'-o', file_data_split_reg,
'-t',
] + fname_warp_list_invert_tmp + [
'-r', file_dest + ext_dest,
] + interp, verbose, is_sct_binary=True)
# Merge files back
sct.printv('\nMerge file back...', verbose)
import glob
path_out, name_out, ext_out = sct.extract_fname(fname_out)
# im_list = [Image(file_name) for file_name in glob.glob('data_reg_T*.nii')]
# concat_data use to take a list of image in input, now takes a list of file names to open the files one by one (see issue #715)
fname_list = glob.glob('data_reg_T*.nii')
fname_list.sort()
im_out = sct_image.concat_data(fname_list, 3, im_header['pixdim'])
im_out.save(name_out + ext_out)
os.chdir(curdir)
sct.generate_output_file(os.path.join(path_tmp, name_out + ext_out), fname_out)
# Delete temporary folder if specified
if int(remove_temp_files):
sct.printv('\nRemove temporary files...', verbose)
sct.rmtree(path_tmp, verbose=verbose)
# 2. crop the resulting image using dimensions from the warping field
warping_field = fname_warp_list_invert[-1]
# if last warping field is an affine transfo, we need to compute the space of the concatenate warping field:
if isLastAffine:
sct.printv('WARNING: the resulting image could have wrong apparent results. You should use an affine transformation as last transformation...', verbose, 'warning')
elif crop_reference == 1:
ImageCropper(input_file=fname_out, output_file=fname_out, ref=warping_field, background=0).crop()
# sct.run('sct_crop_image -i '+fname_out+' -o '+fname_out+' -ref '+warping_field+' -b 0')
elif crop_reference == 2:
ImageCropper(input_file=fname_out, output_file=fname_out, ref=warping_field).crop()
# sct.run('sct_crop_image -i '+fname_out+' -o '+fname_out+' -ref '+warping_field)
sct.display_viewer_syntax([fname_dest, fname_out], verbose=verbose)
# MAIN
# ==========================================================================================
def main(args=None):
# check user arguments
if not args:
args = sys.argv[1:]
# Get parser info
parser = get_parser()
arguments = parser.parse(args)
input_filename = arguments["-i"]
fname_dest = arguments["-d"]
warp_filename = arguments["-w"]
transform = Transform(input_filename=input_filename, fname_dest=fname_dest, warp=warp_filename)
if "-crop" in arguments:
transform.crop = arguments["-crop"]
if "-o" in arguments:
transform.output_filename = arguments["-o"]
if "-x" in arguments:
transform.interp = arguments["-x"]
if "-r" in arguments:
transform.remove_temp_files = int(arguments["-r"])
transform.verbose = int(arguments.get('-v'))
sct.init_sct(log_level=transform.verbose, update=True) # Update log level
transform.apply()
# START PROGRAM
# ==========================================================================================
if __name__ == "__main__":
sct.init_sct()
# # initialize parameters
param = Param()
# call main function
main()
| [
"sct_utils.init_sct",
"sct_utils.printv",
"sct_utils.check_if_3d",
"sct_image.split_data",
"spinalcordtoolbox.image.Image",
"sct_crop_image.ImageCropper",
"glob.glob",
"functools.reduce",
"sct_image.concat_data",
"sct_utils.tmp_create",
"sct_utils.run",
"sct_utils.extract_fname",
"sct_utils.get_interpolation",
"sct_utils.display_viewer_syntax",
"os.path.join",
"os.getcwd",
"os.chdir",
"msct_parser.Parser",
"sct_utils.rmtree"
] | [((1140, 1156), 'msct_parser.Parser', 'Parser', (['__file__'], {}), '(__file__)\n', (1146, 1156), False, 'from msct_parser import Parser\n'), ((12832, 12886), 'sct_utils.init_sct', 'sct.init_sct', ([], {'log_level': 'transform.verbose', 'update': '(True)'}), '(log_level=transform.verbose, update=True)\n', (12844, 12886), True, 'import sct_utils as sct\n'), ((13072, 13086), 'sct_utils.init_sct', 'sct.init_sct', ([], {}), '()\n', (13084, 13086), True, 'import sct_utils as sct\n'), ((4810, 4872), 'sct_utils.get_interpolation', 'sct.get_interpolation', (['"""isct_antsApplyTransforms"""', 'self.interp'], {}), "('isct_antsApplyTransforms', self.interp)\n", (4831, 4872), True, 'import sct_utils as sct\n'), ((4921, 4980), 'sct_utils.printv', 'sct.printv', (['"""\nParse list of warping fields..."""', 'verbose'], {}), '("""\nParse list of warping fields...""", verbose)\n', (4931, 4980), True, 'import sct_utils as sct\n'), ((6325, 6374), 'sct_utils.extract_fname', 'sct.extract_fname', (['fname_warp_list_invert[-1][-1]'], {}), '(fname_warp_list_invert[-1][-1])\n', (6342, 6374), True, 'import sct_utils as sct\n'), ((6800, 6860), 'functools.reduce', 'functools.reduce', (['(lambda x, y: x + y)', 'fname_warp_list_invert'], {}), '(lambda x, y: x + y, fname_warp_list_invert)\n', (6816, 6860), False, 'import sys, io, os, time, functools\n'), ((6940, 6968), 'sct_utils.extract_fname', 'sct.extract_fname', (['fname_src'], {}), '(fname_src)\n', (6957, 6968), True, 'import sct_utils as sct\n'), ((7010, 7039), 'sct_utils.extract_fname', 'sct.extract_fname', (['fname_dest'], {}), '(fname_dest)\n', (7027, 7039), True, 'import sct_utils as sct\n'), ((7355, 7408), 'sct_utils.printv', 'sct.printv', (['"""\nGet dimensions of data..."""', 'verbose'], {}), '("""\nGet dimensions of data...""", verbose)\n', (7365, 7408), True, 'import sct_utils as sct\n'), ((7424, 7451), 'spinalcordtoolbox.image.Image', 'msct_image.Image', (['fname_src'], {}), '(fname_src)\n', (7440, 7451), True, 'import spinalcordtoolbox.image as msct_image\n'), ((11918, 11985), 'sct_utils.display_viewer_syntax', 'sct.display_viewer_syntax', (['[fname_dest, fname_out]'], {'verbose': 'verbose'}), '([fname_dest, fname_out], verbose=verbose)\n', (11943, 11985), True, 'import sct_utils as sct\n'), ((6507, 6534), 'sct_utils.check_if_3d', 'sct.check_if_3d', (['fname_dest'], {}), '(fname_dest)\n', (6522, 6534), True, 'import sct_utils as sct\n'), ((6548, 6596), 'sct_utils.printv', 'sct.printv', (['"""ERROR: Destination data must be 3d"""'], {}), "('ERROR: Destination data must be 3d')\n", (6558, 6596), True, 'import sct_utils as sct\n'), ((7270, 7312), 'os.path.join', 'os.path.join', (['path_out', '(file_out + ext_out)'], {}), '(path_out, file_out + ext_out)\n', (7282, 7312), False, 'import sys, io, os, time, functools\n'), ((7759, 7810), 'sct_utils.printv', 'sct.printv', (['"""\nApply transformation..."""', 'verbose'], {}), '("""\nApply transformation...""", verbose)\n', (7769, 7810), True, 'import sct_utils as sct\n'), ((7919, 8107), 'sct_utils.run', 'sct.run', (["(['isct_antsApplyTransforms', '-d', dim, '-i', fname_src, '-o', fname_out,\n '-t'] + fname_warp_list_invert + ['-r', fname_dest] + interp)"], {'verbose': 'verbose', 'is_sct_binary': '(True)'}), "(['isct_antsApplyTransforms', '-d', dim, '-i', fname_src, '-o',\n fname_out, '-t'] + fname_warp_list_invert + ['-r', fname_dest] + interp,\n verbose=verbose, is_sct_binary=True)\n", (7926, 8107), True, 'import sct_utils as sct\n'), ((8283, 8340), 'sct_utils.tmp_create', 'sct.tmp_create', ([], {'basename': '"""apply_transfo"""', 'verbose': 'verbose'}), "(basename='apply_transfo', verbose=verbose)\n", (8297, 8340), True, 'import sct_utils as sct\n'), ((8402, 8488), 'sct_utils.printv', 'sct.printv', (['"""\nCopying input data to tmp folder and convert to nii..."""', 'verbose'], {}), '("""\nCopying input data to tmp folder and convert to nii...""",\n verbose)\n', (8412, 8488), True, 'import sct_utils as sct\n'), ((9022, 9033), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (9031, 9033), False, 'import sys, io, os, time, functools\n'), ((9046, 9064), 'os.chdir', 'os.chdir', (['path_tmp'], {}), '(path_tmp)\n', (9054, 9064), False, 'import sys, io, os, time, functools\n'), ((9116, 9170), 'sct_utils.printv', 'sct.printv', (['"""\nSplit along T dimension..."""', 'verbose'], {}), '("""\nSplit along T dimension...""", verbose)\n', (9126, 9170), True, 'import sct_utils as sct\n'), ((9190, 9218), 'spinalcordtoolbox.image.Image', 'msct_image.Image', (['"""data.nii"""'], {}), "('data.nii')\n", (9206, 9218), True, 'import spinalcordtoolbox.image as msct_image\n'), ((9284, 9315), 'sct_image.split_data', 'sct_image.split_data', (['im_dat', '(3)'], {}), '(im_dat, 3)\n', (9304, 9315), False, 'import sct_image\n'), ((9422, 9491), 'sct_utils.printv', 'sct.printv', (['"""\nApply transformation to each 3D volume..."""', 'verbose'], {}), '("""\nApply transformation to each 3D volume...""", verbose)\n', (9432, 9491), True, 'import sct_utils as sct\n'), ((10082, 10128), 'sct_utils.printv', 'sct.printv', (['"""\nMerge file back..."""', 'verbose'], {}), '("""\nMerge file back...""", verbose)\n', (10092, 10128), True, 'import sct_utils as sct\n'), ((10192, 10220), 'sct_utils.extract_fname', 'sct.extract_fname', (['fname_out'], {}), '(fname_out)\n', (10209, 10220), True, 'import sct_utils as sct\n'), ((10476, 10504), 'glob.glob', 'glob.glob', (['"""data_reg_T*.nii"""'], {}), "('data_reg_T*.nii')\n", (10485, 10504), False, 'import glob\n'), ((10556, 10613), 'sct_image.concat_data', 'sct_image.concat_data', (['fname_list', '(3)', "im_header['pixdim']"], {}), "(fname_list, 3, im_header['pixdim'])\n", (10577, 10613), False, 'import sct_image\n'), ((10671, 10687), 'os.chdir', 'os.chdir', (['curdir'], {}), '(curdir)\n', (10679, 10687), False, 'import sys, io, os, time, functools\n'), ((11275, 11448), 'sct_utils.printv', 'sct.printv', (['"""WARNING: the resulting image could have wrong apparent results. You should use an affine transformation as last transformation..."""', 'verbose', '"""warning"""'], {}), "(\n 'WARNING: the resulting image could have wrong apparent results. You should use an affine transformation as last transformation...'\n , verbose, 'warning')\n", (11285, 11448), True, 'import sct_utils as sct\n'), ((8507, 8541), 'os.path.join', 'os.path.join', (['path_tmp', '"""data.nii"""'], {}), "(path_tmp, 'data.nii')\n", (8519, 8541), False, 'import sys, io, os, time, functools\n'), ((8576, 8620), 'os.path.join', 'os.path.join', (['path_tmp', '(file_dest + ext_dest)'], {}), '(path_tmp, file_dest + ext_dest)\n', (8588, 8620), False, 'import sys, io, os, time, functools\n'), ((8755, 8784), 'sct_utils.extract_fname', 'sct.extract_fname', (['fname_warp'], {}), '(fname_warp)\n', (8772, 8784), True, 'import sct_utils as sct\n'), ((9706, 9917), 'sct_utils.run', 'sct.run', (["(['isct_antsApplyTransforms', '-d', '3', '-i', file_data_split, '-o',\n file_data_split_reg, '-t'] + fname_warp_list_invert_tmp + ['-r', \n file_dest + ext_dest] + interp)", 'verbose'], {'is_sct_binary': '(True)'}), "(['isct_antsApplyTransforms', '-d', '3', '-i', file_data_split, '-o',\n file_data_split_reg, '-t'] + fname_warp_list_invert_tmp + ['-r', \n file_dest + ext_dest] + interp, verbose, is_sct_binary=True)\n", (9713, 9917), True, 'import sct_utils as sct\n'), ((10725, 10767), 'os.path.join', 'os.path.join', (['path_tmp', '(name_out + ext_out)'], {}), '(path_tmp, name_out + ext_out)\n', (10737, 10767), False, 'import sys, io, os, time, functools\n'), ((10886, 10939), 'sct_utils.printv', 'sct.printv', (['"""\nRemove temporary files..."""', 'verbose'], {}), '("""\nRemove temporary files...""", verbose)\n', (10896, 10939), True, 'import sct_utils as sct\n'), ((10953, 10990), 'sct_utils.rmtree', 'sct.rmtree', (['path_tmp'], {'verbose': 'verbose'}), '(path_tmp, verbose=verbose)\n', (10963, 10990), True, 'import sct_utils as sct\n'), ((8822, 8866), 'os.path.join', 'os.path.join', (['path_tmp', '(file_warp + ext_warp)'], {}), '(path_tmp, file_warp + ext_warp)\n', (8834, 8866), False, 'import sys, io, os, time, functools\n'), ((11485, 11579), 'sct_crop_image.ImageCropper', 'ImageCropper', ([], {'input_file': 'fname_out', 'output_file': 'fname_out', 'ref': 'warping_field', 'background': '(0)'}), '(input_file=fname_out, output_file=fname_out, ref=warping_field,\n background=0)\n', (11497, 11579), False, 'from sct_crop_image import ImageCropper\n'), ((11731, 11807), 'sct_crop_image.ImageCropper', 'ImageCropper', ([], {'input_file': 'fname_out', 'output_file': 'fname_out', 'ref': 'warping_field'}), '(input_file=fname_out, output_file=fname_out, ref=warping_field)\n', (11743, 11807), False, 'from sct_crop_image import ImageCropper\n'), ((5832, 5875), 'spinalcordtoolbox.image.Image', 'msct_image.Image', (['fname_warp_list[idx_warp]'], {}), '(fname_warp_list[idx_warp])\n', (5848, 5875), True, 'import spinalcordtoolbox.image as msct_image\n')] |
# Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests methods in plugin_base.py
"""
import copy
import sys
sys.path[0:0] = [""]
from mongo_connector.plugins.plugin_base import PluginBase
from tests import unittest
from tests.plugins.helpers import (BAD_PLUGIN_CONFIGS, get_test_namespace)
class TestPluginBase(unittest.TestCase):
""" Tests the utils
"""
def setUp(self):
"""Initialize test instance.
"""
self.namespace = get_test_namespace()
def test_name(self):
"""Test name.
"""
configs = self.namespace.plugins[0]
for cfg in configs:
obj = PluginBase(cfg)
self.assertEqual(cfg['pluginName'], obj.name())
for cfg in BAD_PLUGIN_CONFIGS:
obj = PluginBase(cfg)
self.assertEqual(obj.name().index('generated'), 0)
def test_info(self):
"""Test info.
"""
configs = self.namespace.plugins[0]
for cfg in configs:
obj = PluginBase(cfg)
self.assertEqual(cfg['config'], obj.info())
for cfg in BAD_PLUGIN_CONFIGS:
obj = PluginBase(cfg)
self.assertEqual(obj.info(), {})
def _test_not_implemented_method_by_name(self, name):
"""Test not implemented method by name.
"""
configs = copy.deepcopy(self.namespace.plugins)
configs.extend(BAD_PLUGIN_CONFIGS)
for cfg in configs:
obj = PluginBase(cfg)
try:
method = getattr(obj, name)
if not method or not callable(method):
raise KeyError
method()
except NotImplementedError as exc:
pass
return True
def test_invoke(self):
"""Test invoke.
"""
flag = self._test_not_implemented_method_by_name('invoke')
self.assertEqual(flag, True)
def test_bulk_invoke(self):
"""Test bulk_invoke.
"""
# Bulk invoke is really implemented but it calls invoke in loop
# which returns an not implemented exception.
flag = self._test_not_implemented_method_by_name('bulk_invoke')
self.assertEqual(flag, True)
def test_commit(self):
"""Test commit.
"""
flag = self._test_not_implemented_method_by_name('commit')
self.assertEqual(flag, True)
def test_stop(self):
"""Test stop.
"""
flag = self._test_not_implemented_method_by_name('stop')
self.assertEqual(flag, True)
if __name__ == '__main__':
unittest.main()
| [
"tests.unittest.main",
"tests.plugins.helpers.get_test_namespace",
"mongo_connector.plugins.plugin_base.PluginBase",
"copy.deepcopy"
] | [((3116, 3131), 'tests.unittest.main', 'unittest.main', ([], {}), '()\n', (3129, 3131), False, 'from tests import unittest\n'), ((996, 1016), 'tests.plugins.helpers.get_test_namespace', 'get_test_namespace', ([], {}), '()\n', (1014, 1016), False, 'from tests.plugins.helpers import BAD_PLUGIN_CONFIGS, get_test_namespace\n'), ((1861, 1898), 'copy.deepcopy', 'copy.deepcopy', (['self.namespace.plugins'], {}), '(self.namespace.plugins)\n', (1874, 1898), False, 'import copy\n'), ((1168, 1183), 'mongo_connector.plugins.plugin_base.PluginBase', 'PluginBase', (['cfg'], {}), '(cfg)\n', (1178, 1183), False, 'from mongo_connector.plugins.plugin_base import PluginBase\n'), ((1302, 1317), 'mongo_connector.plugins.plugin_base.PluginBase', 'PluginBase', (['cfg'], {}), '(cfg)\n', (1312, 1317), False, 'from mongo_connector.plugins.plugin_base import PluginBase\n'), ((1532, 1547), 'mongo_connector.plugins.plugin_base.PluginBase', 'PluginBase', (['cfg'], {}), '(cfg)\n', (1542, 1547), False, 'from mongo_connector.plugins.plugin_base import PluginBase\n'), ((1662, 1677), 'mongo_connector.plugins.plugin_base.PluginBase', 'PluginBase', (['cfg'], {}), '(cfg)\n', (1672, 1677), False, 'from mongo_connector.plugins.plugin_base import PluginBase\n'), ((1988, 2003), 'mongo_connector.plugins.plugin_base.PluginBase', 'PluginBase', (['cfg'], {}), '(cfg)\n', (1998, 2003), False, 'from mongo_connector.plugins.plugin_base import PluginBase\n')] |
import logging
import sys
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models.fields.related import ForeignObject
from django.utils.encoding import python_2_unicode_compatible
try:
from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor
except ImportError:
from django.db.models.fields.related import ReverseSingleRelatedObjectDescriptor as ForwardManyToOneDescriptor
logger = logging.getLogger(__name__)
if sys.version > '3':
long = int
basestring = (str, bytes)
unicode = str
__all__ = ['Country', 'State', 'Locality', 'Address', 'AddressField']
class InconsistentDictError(Exception):
pass
def _to_python(value):
raw = value.get('raw', '')
country = value.get('country', '')
country_code = value.get('country_code', '')
state = value.get('state', '')
state_code = value.get('state_code', '')
locality = value.get('locality', '')
sublocality = value.get('sublocality', '')
postal_code = value.get('postal_code', '')
street_number = value.get('street_number', '')
route = value.get('route', '')
formatted = value.get('formatted', '')
latitude = value.get('latitude', None)
longitude = value.get('longitude', None)
# If there is no value (empty raw) then return None.
if not raw:
return None
# Fix issue with NYC boroughs (https://code.google.com/p/gmaps-api-issues/issues/detail?id=635)
if not locality and sublocality:
locality = sublocality
# If we have an inconsistent set of value bail out now.
if (country or state or locality) and not (country and state and locality):
raise InconsistentDictError
# Handle the country.
try:
country_obj = Country.objects.get(name=country)
except Country.DoesNotExist:
if country:
if len(country_code) > Country._meta.get_field('code').max_length:
if country_code != country:
raise ValueError('Invalid country code (too long): %s' % country_code)
country_code = ''
country_obj = Country.objects.create(name=country, code=country_code)
else:
country_obj = None
# Handle the state.
try:
state_obj = State.objects.get(name=state, country=country_obj)
except State.DoesNotExist:
if state:
if len(state_code) > State._meta.get_field('code').max_length:
if state_code != state:
raise ValueError('Invalid state code (too long): %s' % state_code)
state_code = ''
state_obj = State.objects.create(name=state, code=state_code, country=country_obj)
else:
state_obj = None
# Handle the locality.
try:
locality_obj = Locality.objects.get(name=locality, postal_code=postal_code, state=state_obj)
except Locality.DoesNotExist:
if locality:
locality_obj = Locality.objects.create(name=locality, postal_code=postal_code, state=state_obj)
else:
locality_obj = None
# Handle the address.
try:
if not (street_number or route or locality):
address_obj = Address.objects.get(raw=raw)
else:
address_obj = Address.objects.get(
street_number=street_number,
route=route,
locality=locality_obj
)
except Address.DoesNotExist:
address_obj = Address(
street_number=street_number,
route=route,
raw=raw,
locality=locality_obj,
formatted=formatted,
latitude=latitude,
longitude=longitude,
)
# If "formatted" is empty try to construct it from other values.
if not address_obj.formatted:
address_obj.formatted = unicode(address_obj)
# Need to save.
address_obj.save()
# Done.
return address_obj
##
# Convert a dictionary to an address.
##
def to_python(value):
# Keep `None`s.
if value is None:
return None
# Is it already an address object?
if isinstance(value, Address):
return value
# If we have an integer, assume it is a model primary key. This is mostly for
# Django being a cunt.
elif isinstance(value, (int, long)):
return value
# A string is considered a raw value.
elif isinstance(value, basestring):
obj = Address(raw=value)
obj.save()
return obj
# A dictionary of named address components.
elif isinstance(value, dict):
# Attempt a conversion.
try:
return _to_python(value)
except InconsistentDictError:
return Address.objects.create(raw=value['raw'])
# Not in any of the formats I recognise.
raise ValidationError('Invalid address value.')
##
# A country.
##
@python_2_unicode_compatible
class Country(models.Model):
name = models.CharField(max_length=40, unique=True, blank=True)
code = models.CharField(max_length=2, blank=True) # not unique as there are duplicates (IT)
class Meta:
verbose_name_plural = 'Countries'
ordering = ('name',)
def __str__(self):
return '%s' % (self.name or self.code)
##
# A state. Google refers to this as `administration_level_1`.
##
@python_2_unicode_compatible
class State(models.Model):
name = models.CharField(max_length=165, blank=True)
code = models.CharField(max_length=3, blank=True)
country = models.ForeignKey(Country, on_delete=models.CASCADE, related_name='states')
class Meta:
unique_together = ('name', 'country')
ordering = ('country', 'name')
def __str__(self):
txt = self.to_str()
country = '%s' % self.country
if country and txt:
txt += ', '
txt += country
return txt
def to_str(self):
return '%s' % (self.name or self.code)
##
# A locality (suburb).
##
@python_2_unicode_compatible
class Locality(models.Model):
name = models.CharField(max_length=165, blank=True)
postal_code = models.CharField(max_length=10, blank=True)
state = models.ForeignKey(State, on_delete=models.CASCADE, related_name='localities')
class Meta:
verbose_name_plural = 'Localities'
unique_together = ('name', 'postal_code', 'state')
ordering = ('state', 'name')
def __str__(self):
txt = '%s' % self.name
state = self.state.to_str() if self.state else ''
if txt and state:
txt += ', '
txt += state
if self.postal_code:
txt += ' %s' % self.postal_code
cntry = '%s' % (self.state.country if self.state and self.state.country else '')
if cntry:
txt += ', %s' % cntry
return txt
##
# An address. If for any reason we are unable to find a matching
# decomposed address we will store the raw address string in `raw`.
##
@python_2_unicode_compatible
class Address(models.Model):
street_number = models.CharField(max_length=20, blank=True)
route = models.CharField(max_length=100, blank=True)
locality = models.ForeignKey(Locality, on_delete=models.CASCADE, related_name='addresses', blank=True, null=True)
raw = models.CharField(max_length=200)
formatted = models.CharField(max_length=200, blank=True)
latitude = models.FloatField(blank=True, null=True)
longitude = models.FloatField(blank=True, null=True)
class Meta:
verbose_name_plural = 'Addresses'
ordering = ('locality', 'route', 'street_number')
# unique_together = ('locality', 'route', 'street_number')
def __str__(self):
if self.formatted != '':
txt = '%s' % self.formatted
elif self.locality:
txt = ''
if self.street_number:
txt = '%s' % self.street_number
if self.route:
if txt:
txt += ' %s' % self.route
locality = '%s' % self.locality
if txt and locality:
txt += ', '
txt += locality
else:
txt = '%s' % self.raw
return txt
def clean(self):
if not self.raw:
raise ValidationError('Addresses may not have a blank `raw` field.')
def as_dict(self):
ad = dict(
street_number=self.street_number,
route=self.route,
raw=self.raw,
formatted=self.formatted,
latitude=self.latitude if self.latitude else '',
longitude=self.longitude if self.longitude else '',
)
if self.locality:
ad['locality'] = self.locality.name
ad['postal_code'] = self.locality.postal_code
if self.locality.state:
ad['state'] = self.locality.state.name
ad['state_code'] = self.locality.state.code
if self.locality.state.country:
ad['country'] = self.locality.state.country.name
ad['country_code'] = self.locality.state.country.code
return ad
class AddressDescriptor(ForwardManyToOneDescriptor):
def __set__(self, inst, value):
super(AddressDescriptor, self).__set__(inst, to_python(value))
##
# A field for addresses in other models.
##
class AddressField(models.ForeignKey):
description = 'An address'
def __init__(self, *args, **kwargs):
kwargs['to'] = 'address.Address'
super(AddressField, self).__init__(*args, **kwargs)
def contribute_to_class(self, cls, name, virtual_only=False):
from address.compat import compat_contribute_to_class
compat_contribute_to_class(self, cls, name, virtual_only)
# super(ForeignObject, self).contribute_to_class(cls, name, virtual_only=virtual_only)
setattr(cls, self.name, AddressDescriptor(self))
# def deconstruct(self):
# name, path, args, kwargs = super(AddressField, self).deconstruct()
# del kwargs['to']
# return name, path, args, kwargs
def formfield(self, **kwargs):
from .forms import AddressField as AddressFormField
defaults = dict(form_class=AddressFormField)
defaults.update(kwargs)
return super(AddressField, self).formfield(**defaults)
| [
"logging.getLogger",
"django.db.models.FloatField",
"django.db.models.ForeignKey",
"address.compat.compat_contribute_to_class",
"django.core.exceptions.ValidationError",
"django.db.models.CharField"
] | [((465, 492), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (482, 492), False, 'import logging\n'), ((4865, 4906), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Invalid address value."""'], {}), "('Invalid address value.')\n", (4880, 4906), False, 'from django.core.exceptions import ValidationError\n'), ((4998, 5054), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'unique': '(True)', 'blank': '(True)'}), '(max_length=40, unique=True, blank=True)\n', (5014, 5054), False, 'from django.db import models\n'), ((5066, 5108), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2)', 'blank': '(True)'}), '(max_length=2, blank=True)\n', (5082, 5108), False, 'from django.db import models\n'), ((5449, 5493), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(165)', 'blank': '(True)'}), '(max_length=165, blank=True)\n', (5465, 5493), False, 'from django.db import models\n'), ((5505, 5547), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)', 'blank': '(True)'}), '(max_length=3, blank=True)\n', (5521, 5547), False, 'from django.db import models\n'), ((5562, 5637), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Country'], {'on_delete': 'models.CASCADE', 'related_name': '"""states"""'}), "(Country, on_delete=models.CASCADE, related_name='states')\n", (5579, 5637), False, 'from django.db import models\n'), ((6096, 6140), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(165)', 'blank': '(True)'}), '(max_length=165, blank=True)\n', (6112, 6140), False, 'from django.db import models\n'), ((6159, 6202), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)'}), '(max_length=10, blank=True)\n', (6175, 6202), False, 'from django.db import models\n'), ((6215, 6292), 'django.db.models.ForeignKey', 'models.ForeignKey', (['State'], {'on_delete': 'models.CASCADE', 'related_name': '"""localities"""'}), "(State, on_delete=models.CASCADE, related_name='localities')\n", (6232, 6292), False, 'from django.db import models\n'), ((7086, 7129), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'blank': '(True)'}), '(max_length=20, blank=True)\n', (7102, 7129), False, 'from django.db import models\n'), ((7142, 7186), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)'}), '(max_length=100, blank=True)\n', (7158, 7186), False, 'from django.db import models\n'), ((7202, 7309), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Locality'], {'on_delete': 'models.CASCADE', 'related_name': '"""addresses"""', 'blank': '(True)', 'null': '(True)'}), "(Locality, on_delete=models.CASCADE, related_name=\n 'addresses', blank=True, null=True)\n", (7219, 7309), False, 'from django.db import models\n'), ((7315, 7347), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (7331, 7347), False, 'from django.db import models\n'), ((7364, 7408), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'blank': '(True)'}), '(max_length=200, blank=True)\n', (7380, 7408), False, 'from django.db import models\n'), ((7424, 7464), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7441, 7464), False, 'from django.db import models\n'), ((7481, 7521), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7498, 7521), False, 'from django.db import models\n'), ((9734, 9791), 'address.compat.compat_contribute_to_class', 'compat_contribute_to_class', (['self', 'cls', 'name', 'virtual_only'], {}), '(self, cls, name, virtual_only)\n', (9760, 9791), False, 'from address.compat import compat_contribute_to_class\n'), ((8297, 8359), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Addresses may not have a blank `raw` field."""'], {}), "('Addresses may not have a blank `raw` field.')\n", (8312, 8359), False, 'from django.core.exceptions import ValidationError\n')] |
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import sys
import numpy as np
from matplotlib.colors import LinearSegmentedColormap
from matplotlib.colors import BoundaryNorm
def plot_images(
num_sample_perclass=10, x=None, y=None, labels=None, title=None, cmap=None
):
grid_x = num_sample_perclass + 1
grid_y = len(labels)
plt.figure(figsize=(grid_y, grid_x))
gs1 = gridspec.GridSpec(grid_y, grid_x)
gs1.update(wspace=0.025, hspace=0.05)
font = {"family": "serif", "weight": "bold"}
plt.suptitle(title)
j = 0
for i in range(grid_y):
idxs = [0] + list(np.where(y == list(labels.keys())[i])[0][: grid_x - 1])
label = labels[list(labels.keys())[i]]
for k, idx in enumerate(idxs):
ax1 = plt.subplot(gs1[j])
if k == 0:
ax1.text(0, 0.25, label, ha="right", wrap=True, fontdict=font)
else:
ax1.imshow(x[idx, ...], cmap=cmap)
plt.axis("off")
j += 1
plt.show()
def plot_2D(x, y, title, axis="off"):
BLUE, ORANGE = "#57B5E8", "#E69E00"
plt.figure(figsize=(8, 8))
plt.scatter(
x[:, 0],
x[:, 1],
s=18,
facecolors="none",
edgecolors=np.array([BLUE, ORANGE])[y],
)
if axis == "off":
plt.axis("off")
elif axis == "on":
plt.xlabel("x_1")
plt.ylabel("x_2")
else:
print("incorrect values for arg: axis (on or off only)")
sys.exit()
plt.title(title)
plt.show()
def plot_dna(df, label):
matrix = df.values
col_names = df.columns
rows = np.arange(matrix.shape[0])
cols = np.arange(matrix.shape[1])
np.random.seed(3)
np.random.shuffle(rows)
np.random.shuffle(cols)
matrix = matrix[:, cols[:100]].T
matrix = matrix[:, rows]
col_names = col_names[cols[:100]]
label = label[rows]
mat_min = np.min(matrix)
mat_max = np.max(matrix)
mat_min = -np.max([np.abs(mat_min), mat_max])
mat_max = np.max([np.abs(mat_min), mat_max])
matrix = np.ma.masked_where(np.abs(matrix) <= 0.3, matrix)
plt.figure(figsize=(6, 12))
cmap_list = ["red", "darkred", "green", "lime", "lightgreen"]
cmap = LinearSegmentedColormap.from_list("Custom cmap", cmap_list, len(cmap_list))
cmap.set_bad("black")
bounds = np.linspace(
mat_min + 6, mat_max - 6, 5
) # np.arange(mat_min + 6, mat_max - 6, 0.1)
idx = np.searchsorted(bounds, 0)
bounds = np.insert(bounds, idx, 0)
norm = BoundaryNorm(bounds, cmap.N)
plt.imshow(matrix, cmap=cmap, norm=norm)
plt.xticks(np.arange(len(label)))
plt.yticks(np.arange(len(col_names)))
ax = plt.gca()
ax.set_xticklabels(label, rotation=90)
ax.set_yticklabels(col_names)
ax.yaxis.tick_right()
ax.tick_params(axis=u"both", which=u"both", labelsize=5, length=0.0)
plt.tight_layout()
fig = plt.gcf()
# fig.set_size_inches((6, 12), forward=False)
# fig.savefig("img/dna.png", dpi=200)
plt.show()
| [
"matplotlib.pyplot.ylabel",
"numpy.array",
"sys.exit",
"numpy.arange",
"matplotlib.pyplot.imshow",
"numpy.searchsorted",
"matplotlib.pyplot.xlabel",
"numpy.max",
"matplotlib.gridspec.GridSpec",
"numpy.linspace",
"numpy.random.seed",
"numpy.min",
"matplotlib.pyplot.axis",
"numpy.abs",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.title",
"matplotlib.pyplot.suptitle",
"matplotlib.pyplot.show",
"numpy.insert",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.tight_layout",
"matplotlib.colors.BoundaryNorm",
"matplotlib.pyplot.subplot",
"numpy.random.shuffle"
] | [((366, 402), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(grid_y, grid_x)'}), '(figsize=(grid_y, grid_x))\n', (376, 402), True, 'import matplotlib.pyplot as plt\n'), ((413, 446), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['grid_y', 'grid_x'], {}), '(grid_y, grid_x)\n', (430, 446), True, 'import matplotlib.gridspec as gridspec\n'), ((544, 563), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['title'], {}), '(title)\n', (556, 563), True, 'import matplotlib.pyplot as plt\n'), ((1035, 1045), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1043, 1045), True, 'import matplotlib.pyplot as plt\n'), ((1130, 1156), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (1140, 1156), True, 'import matplotlib.pyplot as plt\n'), ((1523, 1539), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (1532, 1539), True, 'import matplotlib.pyplot as plt\n'), ((1544, 1554), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1552, 1554), True, 'import matplotlib.pyplot as plt\n'), ((1643, 1669), 'numpy.arange', 'np.arange', (['matrix.shape[0]'], {}), '(matrix.shape[0])\n', (1652, 1669), True, 'import numpy as np\n'), ((1681, 1707), 'numpy.arange', 'np.arange', (['matrix.shape[1]'], {}), '(matrix.shape[1])\n', (1690, 1707), True, 'import numpy as np\n'), ((1712, 1729), 'numpy.random.seed', 'np.random.seed', (['(3)'], {}), '(3)\n', (1726, 1729), True, 'import numpy as np\n'), ((1734, 1757), 'numpy.random.shuffle', 'np.random.shuffle', (['rows'], {}), '(rows)\n', (1751, 1757), True, 'import numpy as np\n'), ((1762, 1785), 'numpy.random.shuffle', 'np.random.shuffle', (['cols'], {}), '(cols)\n', (1779, 1785), True, 'import numpy as np\n'), ((1929, 1943), 'numpy.min', 'np.min', (['matrix'], {}), '(matrix)\n', (1935, 1943), True, 'import numpy as np\n'), ((1958, 1972), 'numpy.max', 'np.max', (['matrix'], {}), '(matrix)\n', (1964, 1972), True, 'import numpy as np\n'), ((2140, 2167), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 12)'}), '(figsize=(6, 12))\n', (2150, 2167), True, 'import matplotlib.pyplot as plt\n'), ((2361, 2401), 'numpy.linspace', 'np.linspace', (['(mat_min + 6)', '(mat_max - 6)', '(5)'], {}), '(mat_min + 6, mat_max - 6, 5)\n', (2372, 2401), True, 'import numpy as np\n'), ((2470, 2496), 'numpy.searchsorted', 'np.searchsorted', (['bounds', '(0)'], {}), '(bounds, 0)\n', (2485, 2496), True, 'import numpy as np\n'), ((2511, 2536), 'numpy.insert', 'np.insert', (['bounds', 'idx', '(0)'], {}), '(bounds, idx, 0)\n', (2520, 2536), True, 'import numpy as np\n'), ((2548, 2576), 'matplotlib.colors.BoundaryNorm', 'BoundaryNorm', (['bounds', 'cmap.N'], {}), '(bounds, cmap.N)\n', (2560, 2576), False, 'from matplotlib.colors import BoundaryNorm\n'), ((2582, 2622), 'matplotlib.pyplot.imshow', 'plt.imshow', (['matrix'], {'cmap': 'cmap', 'norm': 'norm'}), '(matrix, cmap=cmap, norm=norm)\n', (2592, 2622), True, 'import matplotlib.pyplot as plt\n'), ((2712, 2721), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2719, 2721), True, 'import matplotlib.pyplot as plt\n'), ((2902, 2920), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2918, 2920), True, 'import matplotlib.pyplot as plt\n'), ((2931, 2940), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (2938, 2940), True, 'import matplotlib.pyplot as plt\n'), ((3037, 3047), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3045, 3047), True, 'import matplotlib.pyplot as plt\n'), ((1333, 1348), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (1341, 1348), True, 'import matplotlib.pyplot as plt\n'), ((789, 808), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs1[j]'], {}), '(gs1[j])\n', (800, 808), True, 'import matplotlib.pyplot as plt\n'), ((995, 1010), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (1003, 1010), True, 'import matplotlib.pyplot as plt\n'), ((1380, 1397), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x_1"""'], {}), "('x_1')\n", (1390, 1397), True, 'import matplotlib.pyplot as plt\n'), ((1406, 1423), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""x_2"""'], {}), "('x_2')\n", (1416, 1423), True, 'import matplotlib.pyplot as plt\n'), ((1507, 1517), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1515, 1517), False, 'import sys\n'), ((2045, 2060), 'numpy.abs', 'np.abs', (['mat_min'], {}), '(mat_min)\n', (2051, 2060), True, 'import numpy as np\n'), ((2104, 2118), 'numpy.abs', 'np.abs', (['matrix'], {}), '(matrix)\n', (2110, 2118), True, 'import numpy as np\n'), ((1268, 1292), 'numpy.array', 'np.array', (['[BLUE, ORANGE]'], {}), '([BLUE, ORANGE])\n', (1276, 1292), True, 'import numpy as np\n'), ((1996, 2011), 'numpy.abs', 'np.abs', (['mat_min'], {}), '(mat_min)\n', (2002, 2011), True, 'import numpy as np\n')] |
import logging
import os
import random
from abc import ABC, abstractmethod
from random import randint
from time import sleep, strftime
HOSTNAME = ['defence-first.rs', 'defence-first.de', 'defence-first.ru']
HOSTIP = ['172.16.17.32', '192.168.127.12', '172.16.58.3']
SOURCEIP = ['192.168.3.11', '192.168.127.12', '172.16.58.3', '172.16.58.3', '172.16.17.32']
USERNAMES = ['user1', 'user2', 'user3', 'user4', 'user5']
FACILITY = ['KERN', 'USER', 'MAIL', 'DAEMON', 'AUTH', 'SYSLOG', 'LPR', 'NEWS',
'UUCP', 'CLOCK_DAEMON', 'AUTHPRIV', 'FTP', 'NTP', 'LOGAUDIT', 'LOGALERT',
'CRON', 'LOCAL0', 'LOCAL1', 'LOCAL2', 'LOCAL3', 'LOCAL4', 'LOCAL5', 'LOCAL6', 'LOCAL7']
SEVERITY = ['DEBUG', 'INFORMATIONAL', 'NOTICE', 'WARNING', 'ERROR', 'CRITICAL', 'ALERT', 'EMERGENCY']
FORMAT = '%(asctime)s %(hostname)s-Application-%(hostip)s-%(sourceip)s %(severity)s-%(facility)s %(' \
'message)s '
RESOURCES = ['index.html', 'document.xml', 'dashboard.html']
LOGS_PATH = 'logs'
class State(ABC):
@abstractmethod
def run(self, context):
return NotImplemented
class DoSAttack(State):
def run(self, context):
d = {'hostname': HOSTNAME[0], 'hostip': HOSTIP[0], 'severity': SEVERITY[1],
'facility': FACILITY[1]}
http_response_code = '200'
for i in range(25):
if i >= 20:
http_response_code = '503'
d['severity'] = SEVERITY[5]
for sourceip in SOURCEIP:
d['sourceip'] = sourceip
context.logger.info('Requested resource index.html {}'.format(http_response_code), extra=d)
context.state = NormalState()
class NormalState(State):
def run(self, context):
normal = {'hostname': HOSTNAME[1], 'hostip': HOSTIP[1], 'severity': SEVERITY[1],
'facility': FACILITY[1]}
while True:
normal['sourceip'] = random.choice(SOURCEIP)
if random.random() < 0.3:
context.logger.info(
'Successful authorization on username "{}"'.format(USERNAMES[SOURCEIP.index(normal['sourceip'])]),
extra=normal)
else:
context.logger.info('Requested resource {} 200'.format(random.choice(RESOURCES)), extra=normal)
sleep(1)
if random.random() < 0.1:
rand = randint(1, 3)
if rand == 1:
context.state = DoSAttack()
elif rand == 2:
context.state = BruteForce()
elif rand == 3:
context.state = DatabaseError()
context.state.run(context)
class BruteForce(State):
def run(self, context):
attack = {'hostname': HOSTNAME[1], 'hostip': HOSTIP[1], 'sourceip': SOURCEIP[0], 'severity': SEVERITY[2],
'facility': FACILITY[4]}
normal = {'hostname': HOSTNAME[1], 'hostip': HOSTIP[1], 'severity': SEVERITY[1],
'facility': FACILITY[1]}
for i in range(30):
if i > 5:
attack['severity'] = SEVERITY[3]
if random.random() < 0.45:
normal['sourceip'] = random.choice(SOURCEIP)
context.logger.info('Requested resource {} 200'.format(random.choice(RESOURCES)), extra=normal)
sleep(0.5)
context.logger.info('Failed authorization on username "user1"', extra=attack)
sleep(0.5)
context.state = NormalState()
class DatabaseError(State):
def run(self, context):
d = {'hostname': HOSTNAME[2], 'hostip': HOSTIP[2], 'sourceip': SOURCEIP[0], 'severity': SEVERITY[4],
'facility': FACILITY[3]}
context.logger.info('Database error', extra=d)
sleep(1)
context.state = NormalState()
class Context:
def __init__(self):
self.state = NormalState()
formatter = logging.Formatter(FORMAT, "%Y-%m-%d %H:%M:%S")
logger = logging.getLogger('simulator')
if not os.path.exists(LOGS_PATH):
os.mkdir(LOGS_PATH)
fileHandler = logging.FileHandler(
os.path.join(LOGS_PATH, 'application_log-{}.log'.format(strftime('%Y-%m-%d'))))
fileHandler.setFormatter(formatter)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(formatter)
logger.addHandler(fileHandler)
logger.addHandler(consoleHandler)
logger.setLevel(logging.INFO)
self.logger = logger
def run(self):
self.state.run(self)
@property
def state(self):
return self._state
@state.setter
def state(self, value):
self._state = value
if __name__ == '__main__':
sm = Context()
sm.run()
| [
"logging.getLogger",
"os.path.exists",
"logging.StreamHandler",
"random.choice",
"logging.Formatter",
"time.strftime",
"time.sleep",
"os.mkdir",
"random.random",
"random.randint"
] | [((3793, 3801), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (3798, 3801), False, 'from time import sleep, strftime\n'), ((3937, 3983), 'logging.Formatter', 'logging.Formatter', (['FORMAT', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(FORMAT, '%Y-%m-%d %H:%M:%S')\n", (3954, 3983), False, 'import logging\n'), ((4002, 4032), 'logging.getLogger', 'logging.getLogger', (['"""simulator"""'], {}), "('simulator')\n", (4019, 4032), False, 'import logging\n'), ((4314, 4337), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (4335, 4337), False, 'import logging\n'), ((1920, 1943), 'random.choice', 'random.choice', (['SOURCEIP'], {}), '(SOURCEIP)\n', (1933, 1943), False, 'import random\n'), ((2314, 2322), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (2319, 2322), False, 'from time import sleep, strftime\n'), ((3474, 3484), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (3479, 3484), False, 'from time import sleep, strftime\n'), ((4049, 4074), 'os.path.exists', 'os.path.exists', (['LOGS_PATH'], {}), '(LOGS_PATH)\n', (4063, 4074), False, 'import os\n'), ((4088, 4107), 'os.mkdir', 'os.mkdir', (['LOGS_PATH'], {}), '(LOGS_PATH)\n', (4096, 4107), False, 'import os\n'), ((1959, 1974), 'random.random', 'random.random', ([], {}), '()\n', (1972, 1974), False, 'import random\n'), ((2339, 2354), 'random.random', 'random.random', ([], {}), '()\n', (2352, 2354), False, 'import random\n'), ((2385, 2398), 'random.randint', 'randint', (['(1)', '(3)'], {}), '(1, 3)\n', (2392, 2398), False, 'from random import randint\n'), ((3147, 3162), 'random.random', 'random.random', ([], {}), '()\n', (3160, 3162), False, 'import random\n'), ((3208, 3231), 'random.choice', 'random.choice', (['SOURCEIP'], {}), '(SOURCEIP)\n', (3221, 3231), False, 'import random\n'), ((3360, 3370), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (3365, 3370), False, 'from time import sleep, strftime\n'), ((4220, 4240), 'time.strftime', 'strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (4228, 4240), False, 'from time import sleep, strftime\n'), ((2261, 2285), 'random.choice', 'random.choice', (['RESOURCES'], {}), '(RESOURCES)\n', (2274, 2285), False, 'import random\n'), ((3303, 3327), 'random.choice', 'random.choice', (['RESOURCES'], {}), '(RESOURCES)\n', (3316, 3327), False, 'import random\n')] |
from bc4py_extension import PyAddress
import hashlib
def is_address(ck: PyAddress, hrp, ver):
"""check bech32 format and version"""
try:
if ck.hrp != hrp:
return False
if ck.version != ver:
return False
except ValueError:
return False
return True
def get_address(pk, hrp, ver) -> PyAddress:
"""get address from public key"""
identifier = hashlib.new('ripemd160', hashlib.sha256(pk).digest()).digest()
return PyAddress.from_param(hrp, ver, identifier)
def convert_address(ck: PyAddress, hrp, ver) -> PyAddress:
"""convert address's version"""
return PyAddress.from_param(hrp, ver, ck.identifier())
def dummy_address(dummy_identifier) -> PyAddress:
assert len(dummy_identifier) == 20
return PyAddress.from_param('dummy', 0, dummy_identifier)
__all__ = [
"is_address",
"get_address",
"convert_address",
"dummy_address",
]
| [
"hashlib.sha256",
"bc4py_extension.PyAddress.from_param"
] | [((488, 530), 'bc4py_extension.PyAddress.from_param', 'PyAddress.from_param', (['hrp', 'ver', 'identifier'], {}), '(hrp, ver, identifier)\n', (508, 530), False, 'from bc4py_extension import PyAddress\n'), ((789, 839), 'bc4py_extension.PyAddress.from_param', 'PyAddress.from_param', (['"""dummy"""', '(0)', 'dummy_identifier'], {}), "('dummy', 0, dummy_identifier)\n", (809, 839), False, 'from bc4py_extension import PyAddress\n'), ((439, 457), 'hashlib.sha256', 'hashlib.sha256', (['pk'], {}), '(pk)\n', (453, 457), False, 'import hashlib\n')] |
from data.data_reader import BIZCARD_LABEL_MAP, BizcardDataParser
import argparse
from pathlib import Path
import os
import json
import cv2
import numpy as np
def convert_bizcard_to_coco_format(image_dir, json_dir, id_list, out_dir, out_name):
coco_json = {}
images = []
annotations = []
categories = []
for _, key in enumerate(BIZCARD_LABEL_MAP.keys()):
categories.append({
'id': BIZCARD_LABEL_MAP[key],
'name': key
})
with open(id_list) as fp:
ids = fp.readlines()
for idx, file_id in enumerate(ids):
file_id = Path(file_id.strip())
print(idx, file_id)
if not (image_dir / file_id).with_suffix('.jpg').exists():
file_id = file_id.with_suffix('.jpeg')
else:
file_id = file_id.with_suffix('.jpg')
height, width = cv2.imread(str(image_dir / file_id)).shape[:2]
images.append({
'file_name': str(file_id),
'id': idx,
'height': height,
'width': width
})
try:
gt = BizcardDataParser.parse_data(str((json_dir / file_id).with_suffix('.json')), str(image_dir / file_id))[
0]
for word in gt.words:
anno = {
'id': len(annotations),
'image_id': idx,
'bbox': [word.bbox.min_x, word.bbox.min_y, (word.bbox.max_x - word.bbox.min_x),
(word.bbox.max_y - word.bbox.min_y)],
'segmentation': [word.bbox.val],
'category_id': word.label,
'iscrowd': 0,
'area': cv2.contourArea(np.reshape(word.bbox.val, [-1, 2]).astype(np.float32))
}
annotations.append(anno)
except Exception as e:
print(e)
print(str(image_dir / file_id))
coco_json['images'] = images
coco_json['annotations'] = annotations
coco_json['categories'] = categories
with open(Path(out_dir, out_name), 'w') as f:
json.dump(coco_json, f)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--img_dir', type=str)
parser.add_argument('--gt_dir', type=str)
parser.add_argument('--data_list', type=str)
parser.add_argument('--out_dir', type=str)
parser.add_argument('--out_name', type=str)
args = parser.parse_args()
if not Path(args.out_dir).exists():
Path(args.out_dir).mkdir()
convert_bizcard_to_coco_format(
Path(args.img_dir),
Path(args.gt_dir),
args.data_list,
args.out_dir,
args.out_name)
| [
"data.data_reader.BIZCARD_LABEL_MAP.keys",
"numpy.reshape",
"argparse.ArgumentParser",
"pathlib.Path",
"json.dump"
] | [((2152, 2177), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2175, 2177), False, 'import argparse\n'), ((351, 375), 'data.data_reader.BIZCARD_LABEL_MAP.keys', 'BIZCARD_LABEL_MAP.keys', ([], {}), '()\n', (373, 375), False, 'from data.data_reader import BIZCARD_LABEL_MAP, BizcardDataParser\n'), ((2086, 2109), 'json.dump', 'json.dump', (['coco_json', 'f'], {}), '(coco_json, f)\n', (2095, 2109), False, 'import json\n'), ((2567, 2585), 'pathlib.Path', 'Path', (['args.img_dir'], {}), '(args.img_dir)\n', (2571, 2585), False, 'from pathlib import Path\n'), ((2595, 2612), 'pathlib.Path', 'Path', (['args.gt_dir'], {}), '(args.gt_dir)\n', (2599, 2612), False, 'from pathlib import Path\n'), ((2042, 2065), 'pathlib.Path', 'Path', (['out_dir', 'out_name'], {}), '(out_dir, out_name)\n', (2046, 2065), False, 'from pathlib import Path\n'), ((2458, 2476), 'pathlib.Path', 'Path', (['args.out_dir'], {}), '(args.out_dir)\n', (2462, 2476), False, 'from pathlib import Path\n'), ((2495, 2513), 'pathlib.Path', 'Path', (['args.out_dir'], {}), '(args.out_dir)\n', (2499, 2513), False, 'from pathlib import Path\n'), ((1700, 1734), 'numpy.reshape', 'np.reshape', (['word.bbox.val', '[-1, 2]'], {}), '(word.bbox.val, [-1, 2])\n', (1710, 1734), True, 'import numpy as np\n')] |
import asyncio
import contextvars
import aioredis
import uvloop
from aioredis import Redis
from fastapi import FastAPI
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.staticfiles import StaticFiles
from RLog import rprint
from routers import apirest, websockets
REDIS_HOST = 'redis'
REDIS_PORT = 6379
PORT = 9080
HOST = "0.0.0.0"
cvar_redis = contextvars.ContextVar('redis', default=None)
class CustomHeaderMiddleware(BaseHTTPMiddleware):
def __init__(self, app, header_value='Example'):
rprint('__init__')
super().__init__(app)
self.header_value = header_value
async def dispatch(self, request, call_next):
response = await call_next(request)
response.headers['Custom'] = self.header_value
return response
# uvloop is written in Cython and is built on top of libuv http://magic.io/blog/uvloop-blazing-fast-python-networking/
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
app = FastAPI()
app.mount("/static", StaticFiles(directory="static"), name="static")
app.add_middleware(CustomHeaderMiddleware)
app.include_router(apirest.router)
app.include_router(websockets.router)
@app.on_event("startup")
async def handle_startup() -> None:
rprint("startup")
try:
pool = await aioredis.create_redis_pool((REDIS_HOST, REDIS_PORT), encoding='utf-8', maxsize=20)
cvar_redis.set(pool)
rprint("Connected to Redis on ", REDIS_HOST, REDIS_PORT)
except ConnectionRefusedError as e:
rprint('cannot connect to redis on:', REDIS_HOST, REDIS_PORT)
return
@app.on_event("shutdown")
async def handle_shutdown() -> None:
if cvar_redis.get() is not None:
redis: Redis = cvar_redis.get()
redis.close()
await redis.wait_closed()
rprint("closed connection Redis on ", REDIS_HOST, REDIS_PORT)
else:
rprint("ERROR: cvar_redis.get() devuelve NONE")
if __name__ == "__main__":
import uvicorn
rprint("Starting app")
rprint(dir(app))
rprint(app.url_path_for('websocket_endpoint'))
uvicorn.run('chat:app', host=HOST, port=PORT, log_level='info', reload=True)#, uds='uvicorn.sock')
| [
"aioredis.create_redis_pool",
"fastapi.FastAPI",
"RLog.rprint",
"uvicorn.run",
"contextvars.ContextVar",
"starlette.staticfiles.StaticFiles",
"uvloop.EventLoopPolicy"
] | [((368, 413), 'contextvars.ContextVar', 'contextvars.ContextVar', (['"""redis"""'], {'default': 'None'}), "('redis', default=None)\n", (390, 413), False, 'import contextvars\n'), ((975, 984), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (982, 984), False, 'from fastapi import FastAPI\n'), ((943, 967), 'uvloop.EventLoopPolicy', 'uvloop.EventLoopPolicy', ([], {}), '()\n', (965, 967), False, 'import uvloop\n'), ((1006, 1037), 'starlette.staticfiles.StaticFiles', 'StaticFiles', ([], {'directory': '"""static"""'}), "(directory='static')\n", (1017, 1037), False, 'from starlette.staticfiles import StaticFiles\n'), ((1237, 1254), 'RLog.rprint', 'rprint', (['"""startup"""'], {}), "('startup')\n", (1243, 1254), False, 'from RLog import rprint\n'), ((1973, 1995), 'RLog.rprint', 'rprint', (['"""Starting app"""'], {}), "('Starting app')\n", (1979, 1995), False, 'from RLog import rprint\n'), ((2072, 2148), 'uvicorn.run', 'uvicorn.run', (['"""chat:app"""'], {'host': 'HOST', 'port': 'PORT', 'log_level': '"""info"""', 'reload': '(True)'}), "('chat:app', host=HOST, port=PORT, log_level='info', reload=True)\n", (2083, 2148), False, 'import uvicorn\n'), ((527, 545), 'RLog.rprint', 'rprint', (['"""__init__"""'], {}), "('__init__')\n", (533, 545), False, 'from RLog import rprint\n'), ((1405, 1461), 'RLog.rprint', 'rprint', (['"""Connected to Redis on """', 'REDIS_HOST', 'REDIS_PORT'], {}), "('Connected to Redis on ', REDIS_HOST, REDIS_PORT)\n", (1411, 1461), False, 'from RLog import rprint\n'), ((1793, 1854), 'RLog.rprint', 'rprint', (['"""closed connection Redis on """', 'REDIS_HOST', 'REDIS_PORT'], {}), "('closed connection Redis on ', REDIS_HOST, REDIS_PORT)\n", (1799, 1854), False, 'from RLog import rprint\n'), ((1873, 1920), 'RLog.rprint', 'rprint', (['"""ERROR: cvar_redis.get() devuelve NONE"""'], {}), "('ERROR: cvar_redis.get() devuelve NONE')\n", (1879, 1920), False, 'from RLog import rprint\n'), ((1285, 1371), 'aioredis.create_redis_pool', 'aioredis.create_redis_pool', (['(REDIS_HOST, REDIS_PORT)'], {'encoding': '"""utf-8"""', 'maxsize': '(20)'}), "((REDIS_HOST, REDIS_PORT), encoding='utf-8',\n maxsize=20)\n", (1311, 1371), False, 'import aioredis\n'), ((1510, 1571), 'RLog.rprint', 'rprint', (['"""cannot connect to redis on:"""', 'REDIS_HOST', 'REDIS_PORT'], {}), "('cannot connect to redis on:', REDIS_HOST, REDIS_PORT)\n", (1516, 1571), False, 'from RLog import rprint\n')] |
#
# Copyright The NOMAD Authors.
#
# This file is part of NOMAD.
# See https://nomad-lab.eu for further info.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
import os
import re
import logging
from nomad.units import ureg
from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser
from nomad.datamodel.metainfo.simulation.run import Run, Program
from nomad.datamodel.metainfo.simulation.method import (
Method, DFT, Electronic, Smearing, XCFunctional, Functional,
GW as GWMethod, Scf, BasisSet
)
from nomad.datamodel.metainfo.simulation.system import (
System, Atoms
)
from nomad.datamodel.metainfo.simulation.calculation import (
Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges,
Forces, ForcesEntry, ScfIteration, BandGap
)
from nomad.datamodel.metainfo.workflow import Workflow, GeometryOptimization
from .metainfo.exciting import x_exciting_section_MT_charge_atom, x_exciting_section_MT_moment_atom,\
x_exciting_section_spin, x_exciting_section_fermi_surface,\
x_exciting_section_atoms_group
re_float = r'[-+]?\d+\.\d*(?:[Ee][-+]\d+)?'
class GWInfoParser(TextParser):
def __init__(self):
super().__init__(None)
def init_quantities(self):
self._quantities = []
def str_to_frequency(val_in):
val = [v.split() for v in val_in.split('\n')]
val = np.transpose(np.array([v for v in val if len(v) == 3], float))
return dict(
number=np.array(val[0], dtype=int), values=val[1] * ureg.hartree,
weights=val[2])
# TODO Read also input parameters here if input_GW.xml does not exist
self._quantities.append(
Quantity(
'frequency_data', r'frequency list:\s*\<\s*#\s*freqs\s*weight\s*>\s*([\d\.Ee\s\-]+)',
str_operation=str_to_frequency, repeats=False)
)
self._quantities.append(
Quantity(
'fermi_energy', r'\-\s*G0W0.+\-\s*\-+\s*[\s\S]*?Fermi [Ee]nergy\s*[:=](\s*-?[\d\.]+)\s',
unit=ureg.hartree, repeats=False)
)
self._quantities.append(
Quantity(
'direct_band_gap', r'\-\s*G0W0\s*\-\s*\-+\s*[\s\S]*?Direct BandGap\s*\((?P<__unit>\w+)\)\s*\:(\s*[\d\.]+)\s',
repeats=False)
)
self._quantities.append(
Quantity(
'fundamental_band_gap', r'\-\s*G0W0\s*\-\s*\-+\s*[\s\S]*?Fundamental BandGap\s*\((?P<__unit>\w+)\)\s*\:(\s*[\d\.]+)\s',
repeats=False)
)
self._quantities.append(
Quantity(
'optical_band_gap', r'\-\s*G0W0\s*\-\s*\-+\s*[\s\S]*?Optical BandGap\s*\((?P<__unit>\w+)\)\s*\:(\s*[\d\.]+)\s',
repeats=False)
)
class ExcitingEvalqpParser(TextParser):
def __init__(self):
super().__init__(None)
def init_quantities(self):
self._quantities = []
def str_to_eigenvalue(val_in):
val = val_in.strip().split('\n')
kpts = np.array(val[0].split(), dtype=float)
keys = val[1].split()
eigs = np.transpose(np.array([v.split() for v in val[2:]], dtype=float))
eigs = {keys[i]: eigs[i] for i in range(len(keys))}
return [kpts, eigs]
self._quantities.append(
Quantity(
'kpoints_eigenvalues', r'\s*k\-point \#\s*\d+:\s*([\d\s\.\-]+)([ \w\(\)]+\n)([\s\d\.\-Ee]+)',
str_operation=str_to_eigenvalue, repeats=True))
class BandstructureDatParser(DataTextParser):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._nspin = kwargs.get('nspin', None)
self._energy_unit = kwargs.get('energy_unit', None)
def init_parameters(self):
# TODO make a parent clas for bandstructure dat and xml
self._nspin = None
self._nkpts_segment = None
self._neigs_segment = None
self._vertices = None
self._distances = None
self._band_energies = None
self._band_k_points = None
@property
def band_energies(self):
if self._band_energies is None:
if self.data is None:
return
data = np.transpose(self.data)
n_kpoints = int(max(data[1]))
bands = data[6:]
bands = np.reshape(bands, (
self.number_of_spin_channels, self.number_of_band_segment_eigenvalues, n_kpoints))
self._band_energies = []
start = 0
for nkpts_segment in self.number_of_k_points_per_segment:
end = start + nkpts_segment
band_energy = np.array([np.transpose(band)[start:end] for band in bands])
if self._energy_unit:
band_energy = band_energy * self._energy_unit
self._band_energies.append(band_energy)
start = end
return self._band_energies
@property
def band_k_points(self):
if self._band_k_points is None:
data = np.transpose(self.data)
self._band_k_points = []
start = 0
for nkpts_segment in self.number_of_k_points_per_segment:
end = start + nkpts_segment
self._band_k_points.append(
np.transpose(data[2:5])[start:end])
start = end
return self._band_k_points
@property
def distances(self):
if self._distances is None:
data = np.transpose(self.data)
self._distances = data[5][:int(max(data[1]))]
return self._distances
@property
def number_of_spin_channels(self):
if self._nspin is None:
self._nspin = np.shape(np.transpose(self.data))[0] - 6
return self._nspin
@property
def number_of_k_points_per_segment(self):
if self._nkpts_segment is None:
self._nkpts_segment = []
count = 1
for i in range(1, len(self.distances)):
if self.distances[i] == self.distances[i - 1]:
self._nkpts_segment.append(count)
count = 1
else:
count += 1
self._nkpts_segment.append(count)
return self._nkpts_segment
@property
def number_of_band_segment_eigenvalues(self):
if self._neigs_segment is None:
data = np.transpose(self.data)
self._neigs_segment = int(max(data[0]))
return self._neigs_segment
class BandOutParser(DataTextParser):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._nspin = kwargs.get('nspin', None)
self._energy_unit = kwargs.get('energy_unit', None)
def init_parameters(self):
self._nspin = None
self._distances = None
self._band_energies = None
self._neigs_segment = None
self._nkpts_segment = None
@property
def band_energies(self):
if self._band_energies is None:
data = np.transpose(self.data)
n_kpoints = np.where(data[0] == data[0][0])[0][1]
bands = data[1:]
bands = np.reshape(bands, (
self.number_of_spin_channels, self.number_of_band_segment_eigenvalues, n_kpoints))
self._band_energies = []
start = 0
for nkpts_segment in self.number_of_k_points_per_segment:
end = start + nkpts_segment
band_energy = np.array([np.transpose(band)[start:end] for band in bands])
if self._energy_unit:
band_energy = band_energy * self._energy_unit
self._band_energies.append(band_energy)
start = end
return self._band_energies
@property
def distances(self):
if self._distances is None:
dist = np.transpose(self.data)[0]
n_k_points = np.where(dist == dist[0])[0][1]
self._distances = dist[:n_k_points]
return self._distances
@property
def number_of_spin_channels(self):
if self._nspin is None:
self._nspin = np.shape(np.transpose(self.data)[1:])[0]
return self._nspin
@property
def number_of_k_points_per_segment(self):
if self._nkpts_segment is None:
self._nkpts_segment = []
count = 1
for i in range(1, len(self.distances)):
if self.distances[i] == self.distances[i - 1]:
self._nkpts_segment.append(count)
count = 1
else:
count += 1
self._nkpts_segment.append(count)
return self._nkpts_segment
@property
def number_of_band_segment_eigenvalues(self):
if self._neigs_segment is None:
data = np.transpose(self.data)[0]
self._neigs_segment = len(np.where(data == data[0])[0])
return self._neigs_segment
class BandstructureXMLParser(XMLParser):
def __init__(self, **kwargs):
# TODO make a parent class for dos and bandstructure
super().__init__(None)
self._distance_key = 'distance'
self._coord_key = 'coord'
self._energy_key = 'eval'
self._vertex_key = 'vertex'
self._band_key = 'band'
self._atom_key = 'atom'
self._nspin = kwargs.get('nspin', None)
self._energy_unit = kwargs.get('energy_unit', None)
def init_parameters(self):
self._nspin = None
self._nkpts_segment = None
self._neigs_segment = None
self._bands = None
self._vertices = None
self._distances = None
self._species = None
@property
def distances(self):
if self._distances is None:
if not self.bands:
return
self._distances = [
point.attrib.get(self._distance_key) for point in self.bands[0][0]]
self._distances = np.array(self._distances, dtype=float)
return self._distances
@property
def bands(self):
if self._bands is None:
bands = self.root.findall('./%s' % self._band_key)
self._bands = []
if bands:
self._bands.append(bands)
# add atom-resolved
bands_atom = self.root.findall('./*/%s' % self._atom_key)
for band in bands_atom:
self._bands.append(band.findall('./%s' % self._band_key))
return self._bands
@property
def vertices(self):
if self._vertices is None:
self._vertices = self.root.findall('./%s' % self._vertex_key)
return self._vertices
@property
def number_of_spin_channels(self):
if self._nspin is None:
self._nspin = 1
return self._nspin
@property
def number_of_k_points_per_segment(self):
if self._nkpts_segment is None:
self._nkpts_segment = []
count = 1
for i in range(1, len(self.distances)):
if self.distances[i] == self.distances[i - 1]:
self._nkpts_segment .append(count)
count = 1
else:
count += 1
self._nkpts_segment.append(count)
return self._nkpts_segment
@property
def number_of_band_segment_eigenvalues(self):
if self._neigs_segment is None:
self._neigs_segment = len(self.bands[0]) // self.number_of_spin_channels
return self._neigs_segment
def parse(self, key):
if self._results is None:
self._results = dict()
if not self.bands:
return
if key == 'band_energies':
# TODO I am not certain about the format for the spin polarized case
# I cannot find an example bandstructure file
# atom-resolved bandstructure are added as separate section_k_band
res = []
for n in range(len(self.bands)):
res_n = []
start = 0
band_energies = np.zeros((
self.number_of_spin_channels, self.number_of_band_segment_eigenvalues,
len(self.distances)), dtype=float)
for i in range(len(self.bands[n])):
band_energies[i % self.number_of_spin_channels][i] = np.array(
[e.attrib.get(self._energy_key) for e in self.bands[n][i]])
for nkpts_segment in self.number_of_k_points_per_segment:
end = start + nkpts_segment
band_energy = np.array([
np.transpose(energy)[start:end] for energy in band_energies])
if self._energy_unit is not None:
band_energy = band_energy * self._energy_unit
res_n.append(band_energy)
start = end
res.append(res_n)
elif key == 'band_k_points':
res = []
for i in range(len(self.number_of_k_points_per_segment)):
start = np.array(
self.vertices[i].attrib.get(self._coord_key).split(), dtype=float)
end = np.array(
self.vertices[i + 1].attrib.get(self._coord_key).split(), dtype=float)
res.append(np.linspace(start, end, self.number_of_k_points_per_segment[i]))
elif key == 'band_segm_labels':
res = []
for i in range(len(self.vertices) - 1):
start = self.vertices[i].attrib.get('label')
end = self.vertices[i + 1].attrib.get('label')
res.append([
'\u0393' if start.lower() == 'gamma' else start,
'\u0393' if end.lower() == 'gamma' else end])
elif key == 'band_segm_start_end':
res = []
for i in range(len(self.number_of_k_points_per_segment)):
start = self.vertices[i].attrib.get(self._coord_key).split()
end = self.vertices[i + 1].attrib.get(self._coord_key).split()
res.append([start, end])
else:
res = None
self._results[key] = res
class DOSXMLParser(XMLParser):
def __init__(self, **kwargs):
super().__init__(None)
self._nspin_key = 'nspin'
self._totaldos_key = 'totaldos'
self._partialdos_key = 'partialdos'
self._diagram_key = 'diagram'
self._l_key = 'l'
self._m_key = 'm'
self._energy_key = 'e'
self._dos_key = 'dos'
self._unit_key = 'unit'
self._energy_unit = kwargs.get('energy_unit', None)
self._units_mapping = dict(hartree=ureg.hartree)
def init_parameters(self):
self._ndos = None
self._natoms = None
self._nspin = None
self._nlm = None
self._energies = None
self._total_dos = None
self._partial_dos = None
@property
def energy_unit(self):
if self._energy_unit is None:
axis = self.root.find('./axis')
if axis is None:
return
self._energy_unit = self._units_mapping.get(axis.attrib.get(self._unit_key).lower(), 1)
return self._energy_unit
@property
def number_of_spin_channels(self):
if self._nspin is None:
if not self.total_dos:
return
self._nspin = len(self.total_dos)
return self._nspin
@property
def number_of_atoms(self):
if self._natoms is None:
partial_dos = self.root.findall('./%s' % self._partialdos_key)
self._natoms = len(partial_dos)
return self._natoms
@property
def number_of_dos(self):
if self._ndos is None:
total_dos = self.root.find('./%s/%s' % (self._totaldos_key, self._diagram_key))
self._ndos = len(total_dos)
return self._ndos
@property
def number_of_lm(self):
if self._nlm is None:
if self.partial_dos is None:
return
self._nlm = 0
l_list = set([int(e.attrib.get(self._l_key)) for e in self.partial_dos])
for li in l_list:
self._nlm += 2 * li + 1
return self._nlm
@property
def total_dos(self):
if self._total_dos is None:
self._total_dos = self.root.findall('./%s/%s' % (self._totaldos_key, self._diagram_key))
return self._total_dos
@property
def partial_dos(self):
if self._partial_dos is None:
self._partial_dos = self.root.findall('./%s/%s' % (self._partialdos_key, self._diagram_key))
return self._partial_dos
@property
def energies(self):
if self._energies is None:
if self.total_dos is None:
return
self._energies = np.array(
[float(point.attrib.get(self._energy_key)) for point in self.total_dos[0]])
if self.energy_unit is not None:
self._energies = self._energies * self.energy_unit
return self._energies
def _get_dos(self, diagram):
dos = np.array(
[point.attrib.get(self._dos_key) for point in diagram], dtype=float)
return dos
def parse(self, key):
if self._results is None:
self._results = dict()
if 'total' in key:
if not self.total_dos:
return
res = np.zeros((self.number_of_spin_channels, self.number_of_dos))
for i in range(len(self.total_dos)):
spin = self.total_dos[i].attrib.get(self._nspin_key, i)
res[i] = self._get_dos(self._total_dos[i])
if self.energy_unit is not None:
res = res * (1 / self.energy_unit)
elif 'partial' in key:
if not self.partial_dos:
return
res = np.zeros((
self.number_of_lm, self.number_of_spin_channels, self.number_of_atoms, self.number_of_dos))
for i in range(len(self.partial_dos)):
spin = self.partial_dos[i].attrib.get(self._nspin_key, None)
if spin is None:
spin = (i % (self.number_of_spin_channels * self.number_of_lm)) // self.number_of_lm
else:
spin = int(spin) - 1
val_l = self.partial_dos[i].attrib.get(self._l_key, None)
val_m = self.partial_dos[i].attrib.get(self._m_key, None)
if val_l is None or val_m is None:
lm = i % self.number_of_lm
else:
lm = int(val_l) ** 2 + int(val_m) + int(val_l)
atom = i // (self.number_of_lm * self.number_of_spin_channels)
res[lm][spin][atom] = self._get_dos(self.partial_dos[i])
if self.energy_unit is not None:
res = res * (1 / self.energy_unit)
elif key == 'energies':
return self.energies
else:
res = None
self._results[key] = res
class ExcitingFermiSurfaceBxsfParser(TextParser):
def __init__(self):
super().__init__(None)
def init_quantities(self):
self._quantities = []
self._quantities.append(
Quantity(
'fermi_energy', r'Fermi Energy:\s*([\d\.]+)\s*', unit=ureg.hartree, repeats=False))
def str_to_band_parameters(val_in):
val = val_in.strip().split('\n')
nbands = int(val[0])
mesh = np.array(val[1].split(), dtype=int)
origin = np.array(val[2].split(), dtype=float)
vector = np.array([v.split() for v in val[3:6]], dtype=float)
return [nbands, mesh, origin, vector]
self._quantities.append(
Quantity(
'band_parameters', r'BANDGRID_3D_BANDS\s*([\d\.\-Ee\s]+)',
str_operation=str_to_band_parameters, repeats=False))
self._quantities.append(
Quantity(
'fermi_surface', r'BAND:\s*\d+\s*([\d\-\+\.Ee\s]+)\n *E*', unit=ureg.hartree,
repeats=True))
class ExcitingEigenvalueParser(TextParser):
def __init__(self):
super().__init__(None)
def init_quantities(self):
self._quantities = []
self._quantities.append(
Quantity(
'k_points', r'\s*\d+\s*([\d\.Ee\- ]+):\s*k\-point', repeats=True))
def str_to_eigenvalues(val_in):
val = val_in[:val_in.rfind('\n \n')].strip()
val = np.array([v.split() for v in val.split('\n')], dtype=float)
val = np.transpose(val)
occs = val[-1]
eigs = val[-2]
nspin = 2 if occs[0] == 1. else 1
data = dict()
data['occupancies'] = np.reshape(occs, (nspin, len(occs) // nspin))
data['eigenvalues'] = np.reshape(eigs, (nspin, len(eigs) // nspin))
return data
self._quantities.append(
Quantity(
'eigenvalues_occupancies', r'\(state\, eigenvalue and occupancy below\)\s*([\d\.Ee\-\s]+?(?:\n *\n))',
str_operation=str_to_eigenvalues, repeats=True))
class ExcitingGWOutParser(TextParser):
def __init__(self, mainfile, logger):
super().__init__(mainfile, logger=logger)
def init_quantities(self):
self._quantities = []
class ExcitingInfoParser(TextParser):
def __init__(self):
super().__init__(None)
def init_quantities(self):
re_symbol = re.compile(r'([A-Z][a-z]?)')
def str_to_array(val_in):
val = [v.split(':')[-1].split() for v in val_in.strip().split('\n')]
val = val[0] if len(val) == 1 else val
return np.array(val, dtype=float)
def str_to_atom_properties_dict(val_in):
unit = None
if 'charge' in val_in:
unit = ureg.elementary_charge
elif 'moment' in val_in:
unit = ureg.elementary_charge * ureg.bohr
val = val_in.strip().split('\n')
properties = dict()
atom_resolved = []
species = None
for v in val:
v = v.strip().split(':')
if len(v) < 2:
continue
elif v[0].startswith('species'):
species = re.search(re_symbol, v[-1]).group(1)
elif v[0].startswith('atom'):
v[0] = v[0].split()
v[1] = [float(vi) for vi in v[1].split()]
v[1] = v[1][0] if len(v[1]) == 1 else v[1]
if species is None:
species = v[0][2]
atom_resolved.append(((species, v[1] * unit)))
else:
vi = [float(vii) for vii in v[1].split()]
vi = vi[0] if len(vi) == 1 else vi
properties[v[0].strip()] = vi * unit
properties['atom_resolved'] = atom_resolved
return properties
def str_to_quantity_tolerances(val_in):
return val_in.strip().replace('(', '').replace(')', '').split()
def str_to_energy_dict(val_in):
val = val_in.strip().split('\n')
energies = dict()
for v in val:
v = v.split(':')
if len(v) < 2:
continue
energies[v[0].strip()] = float(v[1]) * ureg.hartree
return energies
self._quantities = [Quantity(
'program_version', r'\s*EXCITING\s*([\w\-\(\)\. ]+)\s*started', repeats=False,
dtype=str, flatten=False)]
initialization_quantities = [
Quantity(
'lattice_vectors',
r'Lattice vectors\s*[\(cartesian\)]*\s*:\s*([\-0-9\.\s]+)\n',
str_operation=str_to_array, unit=ureg.bohr, repeats=False, convert=False),
Quantity(
'lattice_vectors_reciprocal',
r'Reciprocal lattice vectors\s*[\(cartesian\)]*\s*:\s*([\-0-9\.\s]+)\n',
str_operation=str_to_array, unit=1 / ureg.bohr, repeats=False, convert=False),
]
self._system_keys_mapping = {
'x_exciting_unit_cell_volume': ('Unit cell volume', ureg.bohr ** 3),
'x_exciting_brillouin_zone_volume': ('Brillouin zone volume', 1 / ureg.bohr ** 3),
'x_exciting_number_of_atoms': ('Total number of atoms per unit cell', None),
'x_exciting_spin_treatment': ('Spin treatment', None),
'x_exciting_number_of_bravais_lattice_symmetries': ('Number of Bravais lattice symmetries', None),
'x_exciting_number_of_crystal_symmetries': ('Number of crystal symmetries', None),
'x_exciting_kpoint_grid': (r'k\-point grid', None),
'x_exciting_kpoint_offset': (r'k\-point offset', None),
'x_exciting_number_kpoints': (r'Total number of k\-points', None),
'x_exciting_rgkmax': (r'R\^MT\_min \* \|G\+k\|\_max \(rgkmax\)', None),
'x_exciting_species_rtmin': (r'Species with R\^MT\_min', None),
'x_exciting_gkmax': (r'Maximum \|G\+k\| for APW functions', 1 / ureg.bohr),
'x_exciting_gmaxvr': (r'Maximum \|G\| for potential and density', 1 / ureg.bohr),
'x_exciting_gvector_size': (r'G\-vector grid sizes', None),
'x_exciting_gvector_total': (r'Total number of G\-vectors', None),
'x_exciting_lmaxapw': (r' APW functions', None),
'x_exciting_nuclear_charge': ('Total nuclear charge', ureg.elementary_charge),
'x_exciting_electronic_charge': ('Total electronic charge', ureg.elementary_charge),
'x_exciting_core_charge_initial': ('Total core charge', ureg.elementary_charge),
'x_exciting_valence_charge_initial': ('Total valence charge', ureg.elementary_charge),
'x_exciting_wigner_radius': (r'Effective Wigner radius, r\_s', ureg.bohr),
'x_exciting_empty_states': ('Number of empty states', None),
'x_exciting_valence_states': ('Total number of valence states', None),
'x_exciting_hamiltonian_size': ('Maximum Hamiltonian size', None),
'x_exciting_pw': (r'Maximum number of plane\-waves', None),
'x_exciting_lo': (r'Total number of local\-orbitals', None)}
self._method_keys_mapping = {
'smearing_kind': ('Smearing scheme', None),
'smearing_width': ('Smearing width', None)}
for name, key_unit in self._system_keys_mapping.items():
initialization_quantities.append(
Quantity(
name, r'%s\s*:\s*([\s\S]*?)\n' % key_unit[0], unit=key_unit[1], repeats=False)
)
for name, key_unit in self._method_keys_mapping.items():
initialization_quantities.append(
Quantity(
name, r'%s\s*:\s*([\s\S]*?)\n' % key_unit[0], unit=key_unit[1], repeats=False)
)
initialization_quantities.append(Quantity(
'species',
rf'(Species : *\d+ *\(\w+\)[\s\S]+?{re_float} *{re_float} *{re_float}\n\s*\n)',
repeats=True, sub_parser=TextParser(quantities=[
Quantity('number', r'Species : *(\d+)', dtype=np.int32),
Quantity('symbol', r'\((\w+)\)'),
Quantity('file', r'parameters loaded from *: *(.+)'),
Quantity('name', r'name *: *(.+)'),
Quantity('nuclear_charge', rf'nuclear charge *: *({re_float})', dtype=np.float64, unit=ureg.elementary_charge),
Quantity('electronic_charge', rf'electronic charge *: *({re_float})', dtype=np.float64, unit=ureg.elementary_charge),
Quantity('atomic_mass', rf'atomic mass *: *({re_float})', dtype=np.float64, unit=ureg.electron_mass),
Quantity('muffin_tin_radius', rf'muffin-tin radius *: *({re_float})', dtype=np.float64, unit=ureg.bohr),
Quantity('radial_points', rf'radial points in muffin-tin *: *({re_float})', dtype=np.int32),
Quantity('positions_format', r'atomic positions \((.+?)\)', flatten=False),
Quantity(
'positions',
rf'\d+ : *({re_float}) *({re_float}) *({re_float})',
repeats=True, dtype=np.dtype(np.float64))])))
initialization_quantities.append(Quantity(
'potential_mixing', r'Using ([\w ]+) potential mixing', repeats=False, flatten=False)
)
initialization_quantities.append(Quantity(
'xc_functional', r'(Exchange-correlation type[\s\S]+?\n *\n)',
sub_parser=TextParser(quantities=[
Quantity('type', r'Exchange-correlation type +: +(\S+)'),
Quantity(
'name_reference',
r'\n *(.+?,.+)',
str_operation=lambda x: [v.strip() for v in x.split(':')]),
Quantity(
'parameters',
r'\n *(.+?:.+)', repeats=True,
str_operation=lambda x: [v.strip() for v in x.split(':')])]))
)
self._quantities.append(Quantity(
'initialization',
r'(?:All units are atomic|Starting initialization)([\s\S]+?)(?:Using|Ending initialization)', repeats=False,
sub_parser=TextParser(quantities=initialization_quantities))
)
scf_quantities = [
Quantity(
'energy_total', r'[Tt]*otal energy\s*:\s*([\-\d\.Ee]+)', repeats=False,
dtype=float, unit=ureg.hartree),
Quantity(
'energy_contributions', r'(?:Energies|_)([\+\-\s\w\.\:]+?)\n *(?:DOS|Density)',
str_operation=str_to_energy_dict, repeats=False, convert=False),
Quantity(
'x_exciting_dos_fermi',
r'DOS at Fermi energy \(states\/Ha\/cell\)\s*:\s*([\-\d\.Ee]+)',
repeats=False, dtype=float, unit=1 / ureg.hartree),
Quantity(
'charge_contributions',
r'(?:Charges|Electron charges\s*\:*\s*)([\-\s\w\.\:\(\)]+?)\n *[A-Z\+]',
str_operation=str_to_atom_properties_dict, repeats=False, convert=False),
Quantity(
'moment_contributions',
r'(?:Moments\s*\:*\s*)([\-\s\w\.\:\(\)]+?)\n *[A-Z\+]',
str_operation=str_to_atom_properties_dict, repeats=False, convert=False)]
self._miscellaneous_keys_mapping = {
'x_exciting_gap': (r'Estimated fundamental gap', ureg.hartree),
'time': (r'Wall time \(seconds\)', ureg.s)}
for name, key_unit in self._miscellaneous_keys_mapping.items():
scf_quantities.append(Quantity(
name, r'%s\s*\:*\s*([\-\d\.Ee]+)' % key_unit[0], repeats=False,
unit=key_unit[1]))
self._convergence_keys_mapping = {
'x_exciting_effective_potential_convergence': (
r'RMS change in effective potential \(target\)', ureg.hartree),
'x_exciting_energy_convergence': (
r'Absolute change in total energy\s*\(target\)', ureg.hartree),
'x_exciting_charge_convergence': (
r'Charge distance\s*\(target\)', ureg.elementary_charge),
'x_exciting_IBS_force_convergence': (
r'Abs\. change in max\-nonIBS\-force\s*\(target\)', ureg.hartree / ureg.bohr)}
for name, key_unit in self._convergence_keys_mapping.items():
scf_quantities.append(Quantity(
name, r'%s\s*\:*\s*([\(\)\d\.\-\+Ee ]+)' % key_unit[0],
str_operation=str_to_quantity_tolerances, unit=key_unit[1], repeats=False))
module_quantities = [
Quantity(
'scf_iteration', r'(?:I| i)teration number :([\s\S]+?)(?:\n *\n\+{10}|\+\-{10})',
sub_parser=TextParser(quantities=scf_quantities), repeats=True),
Quantity(
'final',
r'(?:Convergence targets achieved\. Performing final SCF iteration|Reached self-consistent loops maximum)([\s\S]+?)(\n *\n\+{10})',
sub_parser=TextParser(quantities=scf_quantities), repeats=False),
Quantity(
'atomic_positions',
r'(Atomic positions\s*\([\s\S]+?)\n\n',
sub_parser=TextParser(quantities=[
Quantity(
'positions_format', r'Atomic positions\s*\(([a-z]+)\)'),
Quantity(
'symbols', r'atom\s*\d+\s*(\w+)', repeats=True, dtype=str),
Quantity(
'positions', r'\s*:\s*([\d\.\-]+\s*[\d\.\-]+\s*[\d\.\-]+)',
repeats=True, dtype=float)])),
Quantity(
'forces', r'Total atomic forces including IBS \(\w+\)\s*\:(\s*atom[\-\s\w\.\:]*?)\n *Atomic',
repeats=False, str_operation=str_to_array, dtype=float, unit=ureg.hartree / ureg.bohr)
]
self._quantities.append(Quantity(
'groundstate',
r'(?:Self\-consistent loop started|Groundstate module started)([\s\S]+?)Groundstate module stopped',
sub_parser=TextParser(quantities=module_quantities), repeats=False))
optimization_quantities = [
Quantity(
'atomic_positions',
r'(Atomic positions at this step\s*\([\s\S]+?)\n\n',
sub_parser=TextParser(quantities=[
Quantity(
'positions_format', r'Atomic positions at this step\s*\(([a-z]+)\)'),
Quantity(
'symbols', r'atom\s*\d+\s*(\w+)', repeats=True, dtype=str),
Quantity(
'positions', r'\s*:\s*([\d\.\-]+\s*[\d\.\-]+\s*[\d\.\-]+)',
repeats=True, dtype=float)])),
Quantity(
'forces',
r'Total atomic forces including IBS \(\w+\)\s*\:(\s*atom[\-\s\w\.\:]*?)\n *Time',
repeats=False, str_operation=str_to_array, convert=False, unit=ureg.hartree / ureg.bohr),
Quantity(
'step', r'Optimization step\s*(\d+)', repeats=False, dtype=int),
Quantity(
'method', r'method\s*=\s*(\w+)', repeats=False, dtype=str),
Quantity(
'n_scf_iterations',
r'Number of (?:total)* scf iterations\s*\:\s*(\d+)', repeats=False, dtype=int),
Quantity(
'force_convergence',
r'Maximum force magnitude\s*\(target\)\s*\:(\s*[\(\)\d\.\-\+Ee ]+)',
str_operation=str_to_quantity_tolerances, unit=ureg.hartree / ureg.bohr, repeats=False,
dtype=float),
Quantity(
'energy_total', r'Total energy at this optimization step\s*\:\s*([\-\d\.Ee]+)',
unit=ureg.hartree, repeats=False, dtype=float),
Quantity(
'time', r'Time spent in this optimization step\s*\:\s*([\-\d\.Ee]+)\s*seconds',
unit=ureg.s, repeats=False, dtype=float)
]
self._quantities.append(Quantity(
'structure_optimization',
r'Structure\-optimization module started([\s\S]+?)Structure\-optimization module stopped',
sub_parser=TextParser(quantities=[
Quantity(
'optimization_step',
r'(Optimization step\s*\d+[\s\S]+?(?:\n *\n\-{10}|Time spent in this optimization step\s*:\s*[\d\.]+ seconds))',
sub_parser=TextParser(quantities=optimization_quantities),
repeats=True),
Quantity(
'final',
r'Force convergence target achieved([\s\S]+?Opt)',
sub_parser=TextParser(quantities=scf_quantities),
repeats=False),
Quantity(
'atomic_positions',
r'(imized atomic positions\s*\([\s\S]+?)\n\n',
sub_parser=TextParser(quantities=[
Quantity(
'positions_format', r'imized atomic positions\s*\(([a-z]+)\)'),
Quantity(
'symbols', r'atom\s*\d+\s*(\w+)', repeats=True, dtype=str),
Quantity(
'positions', r'\s*:\s*([\d\.\-]+\s*[\d\.\-]+\s*[\d\.\-]+)',
repeats=True, dtype=float)])),
Quantity(
'forces',
r'Total atomic forces including IBS \(\w+\)\s*\:(\s*atom[\-\s\w\.\:]*?)\n *Atomic',
repeats=False, str_operation=str_to_array, dtype=float, unit=ureg.hartree / ureg.bohr),
]), repeats=False))
self._quantities.append(Quantity(
'hybrids',
r'Hybrids module started([\s\S]+?)Hybrids module stopped',
sub_parser=TextParser(quantities=module_quantities)
))
def get_atom_labels(self, section):
labels = section.get('symbols')
if labels is None:
# we get it by concatenating species symbols
species = self.get('initialization', {}).get('species', [])
labels = []
for specie in species:
labels += [specie.get('symbol')] * len(specie.get('positions'))
return labels
def get_positions_format(self, section):
positions_format = section.get('positions_format')
if positions_format is None:
species = self.get_initialization_parameter('species', [])
for specie in species:
positions_format = specie.get('positions_format', None)
if positions_format is not None:
break
return positions_format
def get_atom_positions(self, section={}, positions=None, positions_format=None):
positions = positions if positions is not None else section.get('positions')
if positions is None:
species = self.get_initialization_parameter('species', [])
if species:
positions = np.vstack([s.get('positions') for s in species])
if positions is None:
return
positions = np.array(positions)
positions_format = positions_format if positions_format is not None else self.get_positions_format(section)
if positions_format == 'lattice':
cell = self.get_initialization_parameter('lattice_vectors')
if cell is None:
return
positions = np.dot(positions, cell.magnitude)
return positions * ureg.bohr
def get_scf_threshold(self, name):
reference = self.get('groundstate', self.get('hybrids', {}))
return reference.get('scf_iteration', [{}])[-1].get(
name, [None, None])[-1]
def get_scf_quantity(self, name):
n_scf = len(self.get('energy_total_scf_iteration', []))
quantity = self.get('%s_scf_iteration' % name)
if quantity is None:
return
# this is really problematic if some scf steps dont have the quantity
# the only thing that we can do is to assume that the first steps are the
# ones with the missing quantity
if len(quantity) < n_scf:
quantity = [None] * (n_scf - len(quantity)) + quantity
return quantity
def get_xc_functional_name(self):
# TODO expand list to include other xcf
xc_functional_map = {
2: ['LDA_C_PZ', 'LDA_X_PZ'],
3: ['LDA_C_PW', 'LDA_X_PZ'],
4: ['LDA_C_XALPHA'],
5: ['LDA_C_VBH'],
20: ['GGA_C_PBE', 'GGA_X_PBE'],
21: ['GGA_C_PBE', 'GGA_X_PBE_R'],
22: ['GGA_C_PBE_SOL', 'GGA_X_PBE_SOL'],
26: ['GGA_C_PBE', 'GGA_X_WC'],
30: ['GGA_C_AM05', 'GGA_C_AM05'],
300: ['GGA_C_BGCP', 'GGA_X_PBE'],
406: ['HYB_GGA_XC_PBEH'],
408: ['HYB_GGA_XC_HSE03']}
xc_functional = self.get('initialization', {}).get('xc_functional', None)
if xc_functional is None:
return []
name = xc_functional_map.get(xc_functional.type, [])
return name
@property
def n_optimization_steps(self):
return len(self.get('structure_optimization', {}).get('optimization_step', []))
def get_number_of_spin_channels(self):
spin_treatment = self.get('initialization', {}).get(
'x_exciting_spin_treatment', 'spin-unpolarised')
n_spin = 1 if spin_treatment.lower() == 'spin-unpolarised' else 2
return n_spin
def get_unit_cell_volume(self):
return self.get('initialization', {}).get('x_exciting_unit_cell_volume', 1.0 * ureg.bohr ** 3)
def get_initialization_parameter(self, key, default=None):
return self.get('initialization', {}).get(key, default)
class ExcitingParser:
def __init__(self):
self.info_parser = ExcitingInfoParser()
self.dos_parser = DOSXMLParser(energy_unit=ureg.hartree)
self.bandstructure_parser = BandstructureXMLParser(energy_unit=ureg.hartree)
self.eigval_parser = ExcitingEigenvalueParser()
self.fermisurf_parser = ExcitingFermiSurfaceBxsfParser()
self.evalqp_parser = ExcitingEvalqpParser()
self.dos_out_parser = DataTextParser()
self.bandstructure_dat_parser = BandstructureDatParser(energy_unit=ureg.hartree)
self.band_out_parser = BandOutParser(energy_unit=ureg.hartree)
self.info_gw_parser = GWInfoParser()
self.input_xml_parser = XMLParser()
self.data_xs_parser = DataTextParser()
self.data_clathrate_parser = DataTextParser(dtype=str)
# different names for different versions of exciting
self._energy_keys_mapping = {
'energy_total': ['Total energy', 'total energy'],
'x_exciting_fermi_energy': ['Fermi energy', 'Fermi'],
'energy_kinetic_electronic': ['Kinetic energy', 'electronic kinetic'],
'energy_coulomb': ['Coulomb energy', 'Coulomb'],
'x_exciting_coulomb_energy': ['Coulomb energy', 'Coulomb'],
'energy_exchange': ['Exchange energy', 'exchange'],
'x_exciting_exchange_energy': ['Exchange energy', 'exchange'],
'energy_correlation': ['Correlation energy', 'correlation'],
'x_exciting_correlation_energy': ['Correlation energy', 'correlation'],
'energy_sum_eigenvalues': ['Sum of eigenvalues', 'sum of eigenvalues'],
'x_exciting_effective_potential_energy': ['Effective potential energy'],
'x_exciting_coulomb_potential_energy': ['Coulomb potential energy', 'Coulomb potential'],
'energy_xc_potential': ['xc potential energy', 'xc potential'],
'energy_electrostatic': ['Hartree energy', 'Hartree'],
'x_exciting_hartree_energy': ['Hartree energy', 'Hartree'],
'x_exciting_electron_nuclear_energy': ['Electron-nuclear energy', 'electron-nuclear '],
'x_exciting_nuclear_nuclear_energy': ['Nuclear-nuclear energy', 'nuclear-nuclear'],
'x_exciting_madelung_energy': ['Madelung energy', 'Madelung'],
'x_exciting_core_electron_kinetic_energy': ['Core-electron kinetic energy', 'core electron kinetic'],
'x_exciting_dft_d2_dispersion_correction': ['DFT-D2 dispersion correction']
}
self._electron_charge_keys_mapping = {
'x_exciting_core_charge': ['core'],
'x_exciting_core_leakage': ['core leakage'],
'x_exciting_valence_charge': ['valence'],
'x_exciting_interstitial_charge': ['interstitial'],
'x_exciting_total_MT_charge': ['total charge in muffin-tins', 'total in muffin-tins'],
'charge_total': ['total charge'],
'x_exciting_section_MT_charge_atom': ['atom_resolved']
}
self._moment_keys_mapping = {
'x_exciting_interstitial_moment': ['interstitial'],
'x_exciting_total_MT_moment': ['total moment in muffin-tins'],
'x_exciting_total_moment': ['total moment'],
'x_exciting_section_MT_moment_atom': ['atom_resolved']
}
def get_exciting_files(self, default):
mainfile = os.path.basename(self.info_parser.mainfile)
suffix = mainfile.strip('INFO.OUT')
target = default.rsplit('.', 1)
filename = '%s%s' % (target[0], suffix)
if target[1:]:
filename = '%s.%s' % (filename, target[1])
filename = os.path.join(self.info_parser.maindir, filename)
if os.path.isfile(filename):
return [filename]
filename = os.path.join(self.info_parser.maindir, default)
if not os.path.isfile(filename):
file_ext = default.split('.')[-1]
mainfile_base = mainfile.rsplit('.', 1)[0].replace('INFO', '')
options = [
f for f in os.listdir(
self.info_parser.maindir) if target[0] in f and mainfile_base in f]
options = [f for f in options if f.endswith(file_ext)]
options.sort()
filenames = [os.path.join(self.info_parser.maindir, f) for f in options]
else:
filenames = [filename]
filenames = [f for f in filenames if os.access(f, os.F_OK)]
return filenames
def file_exists(self, filename):
"""Checks if a the given filename exists and is accessible in the same
folder where the mainfile is stored.
"""
mainfile = os.path.basename(self.info_parser.mainfile)
suffix = mainfile.strip('INFO.OUT')
target = filename.rsplit('.', 1)
filepath = '%s%s' % (target[0], suffix)
if target[1:]:
filepath = '%s.%s' % (filepath, target[1])
filepath = os.path.join(self.info_parser.maindir, filepath)
if os.path.isfile(filepath) and os.access(filepath, os.F_OK):
return True
return False
def _parse_dos(self, sec_scc):
if self.dos_parser.get('totaldos', None) is None:
return
# Get fermi energy: it is used to un-shift the DOS to
# the original scale in which also other energies are reported.
energy_fermi = sec_scc.energy.fermi
if energy_fermi is None:
return
energy_fermi = (energy_fermi.magnitude * ureg.joule).to('hartree')
sec_dos = sec_scc.m_create(Dos, Calculation.dos_electronic)
sec_dos.n_energies = self.dos_parser.number_of_dos
sec_dos.energies = self.dos_parser.energies + energy_fermi
volume = self.info_parser.get_unit_cell_volume()
totaldos = self.dos_parser.get('totaldos') * volume.to('m**3').magnitude
for spin in range(len(totaldos)):
sec_dos_values = sec_dos.m_create(DosValues, Dos.total)
sec_dos_values.spin = spin
sec_dos_values.value = totaldos[spin]
partialdos = self.dos_parser.get('partialdos')
if partialdos is None:
return
partialdos = partialdos.to('1/joule').magnitude
lm_values = np.column_stack((np.arange(len(partialdos)), np.zeros(len(partialdos), dtype=np.int32)))
for lm in range(len(partialdos)):
for spin in range(len(partialdos[lm])):
for atom in range(len(partialdos[lm][spin])):
sec_dos_values = sec_dos.m_create(DosValues, Dos.atom_projected)
sec_dos_values.m_kind = 'spherical'
sec_dos_values.lm = lm_values[lm]
sec_dos_values.spin = spin
sec_dos_values.atom_index = atom
sec_dos_values.value = partialdos[lm][spin][atom]
def _parse_bandstructure(self, sec_scc):
# we need to set nspin again as this is overwritten when setting mainfile
self.bandstructure_parser._nspin = self.info_parser.get_number_of_spin_channels()
band_energies = self.bandstructure_parser.get('band_energies', [])
for n in range(len(band_energies)):
# Get fermi energy: it is used to un-shift the band structure to
# the original scale in which also other energies are reported.
energy_fermi = sec_scc.energy.fermi
if energy_fermi is None:
continue
energy_fermi = energy_fermi.to("hartree")
sec_k_band = sec_scc.m_create(BandStructure, Calculation.band_structure_electronic)
sec_k_band.energy_fermi = energy_fermi
band_k_points = self.bandstructure_parser.get('band_k_points')
nkpts_segment = self.bandstructure_parser.number_of_k_points_per_segment
band_seg_labels = self.bandstructure_parser.get('band_segm_labels')
for nb in range(len(band_energies[n])):
sec_k_band_segment = sec_k_band.m_create(BandEnergies)
sec_k_band_segment.n_kpoints = nkpts_segment[nb]
sec_k_band_segment.kpoints = band_k_points[nb]
sec_k_band_segment.endpoints_labels = band_seg_labels[nb]
sec_k_band_segment.energies = band_energies[n][nb] + energy_fermi
def _parse_eigenvalues(self, sec_scc):
if self.eigval_parser.get('eigenvalues_occupancies', None) is None:
return
nspin = self.info_parser.get_number_of_spin_channels()
def get_data(key):
data = self.eigval_parser.get('eigenvalues_occupancies')
# reshaping is not necessary as this is done in parser, however nspin is
# determined from occupancies which is problematic sometimes
res = np.hstack([np.reshape(v[key], (nspin, np.size(v[key]) // nspin)) for v in data])
res = res.reshape((len(res), len(data), len(res[0]) // len(data)))
if key == 'eigenvalues':
res = res * ureg.hartree
return res
sec_eigenvalues = sec_scc.m_create(BandEnergies)
sec_eigenvalues.kpoints = self.eigval_parser.get('k_points')
sec_eigenvalues.occupations = get_data('occupancies')
sec_eigenvalues.energies = get_data('eigenvalues')
def _parse_fermisurface(self, sec_scc):
fermi_surface = self.fermisurf_parser.get('fermi_surface', [None])[0]
if fermi_surface is None:
return
sec_fermisurface = sec_scc.m_create(x_exciting_section_fermi_surface)
band_parameters = self.fermisurf_parser.get('band_parameters', None)
if band_parameters is not None:
sec_fermisurface.x_exciting_number_of_bands_fermi_surface = band_parameters[0]
sec_fermisurface.x_exciting_number_of_mesh_points_fermi_surface = np.product(band_parameters[1])
sec_fermisurface.x_exciting_grid_fermi_surface = band_parameters[1]
sec_fermisurface.x_exciting_origin_fermi_surface = band_parameters[2]
sec_fermisurface.x_exciting_vectors_fermi_surface = band_parameters[3]
fermi_energy = self.fermisurf_parser.get('fermi_energy', None)
if fermi_energy is not None:
sec_fermisurface.x_exciting_fermi_energy_fermi_surface = fermi_energy
sec_fermisurface.x_exciting_values_fermi_surface = fermi_surface
def _parse_evalqp(self, sec_scc):
data = self.evalqp_parser.get('kpoints_eigenvalues')
if data is None:
return
def get_data(key):
if key == 'k_points':
return np.array([d[0][:3] for d in data])
elif key == 'Znk':
return np.array([d[1].get(key, None) for d in data])
else:
energy = np.array([d[1].get(key, None) for d in data])
if None in energy:
return energy
return np.array([d[1].get(key) for d in data]) * ureg.hartree
eigs_gw = get_data('E_GW')
if eigs_gw[0] is None:
return
nspin = self.info_parser.get_number_of_spin_channels()
def reshape(data):
if data[0] is None:
return
return np.reshape(data, (nspin, len(data) // nspin, len(data[0])))
sec_gw_eigenvalues = sec_scc.m_create(BandEnergies)
sec_gw_eigenvalues.qp_linearization_prefactor = reshape(get_data('Znk'))
sec_gw_eigenvalues.n_bands = len(eigs_gw[0])
sec_gw_eigenvalues.n_kpoints = len(eigs_gw)
sec_gw_eigenvalues.kpoints = get_data('k_points')
sec_gw_eigenvalues.energies = reshape(eigs_gw)
sec_gw_eigenvalues.value_exchange = reshape(get_data('Sx'))
eigs_gw_C = reshape(get_data('Sc'))
if eigs_gw_C is None:
eigs_gw_C = reshape(get_data('Re(Sc)'))
sec_gw_eigenvalues.value_correlation = eigs_gw_C
sec_gw_eigenvalues.value_xc_potential = reshape(get_data('Vxc'))
def _parse_dos_out(self, sec_scc):
data = self.dos_out_parser.data
if data is None:
return
# Get fermi energy: it is used to un-shift the DOS to
# the original scale in which also other energies are reported.
energy_fermi = sec_scc.energy.fermi
if energy_fermi is None:
return
energy_fermi = (energy_fermi.magnitude * ureg.joule).to('hartree')
# TODO I am not sure about format for spin-polarized case! I assume it is
# energy dos_up dos_down
nspin = self.info_parser.get_number_of_spin_channels()
sec_dos = sec_scc.m_create(Dos, Calculation.dos_electronic)
sec_dos.n_energies = len(data) // nspin
data = np.reshape(data, (nspin, len(data) // nspin, 2))
data = np.transpose(data, axes=(2, 0, 1))
sec_dos.energies = data[0][0] * ureg.hartree + energy_fermi
volume = self.info_parser.get_unit_cell_volume()
dos = data[1] * (1 / ureg.hartree) * volume.to('m**3').magnitude
for spin in range(len(dos)):
sec_dos_values = sec_dos.m_create(DosValues, Dos.total)
sec_dos_values.spin = spin
sec_dos_values.value = dos[spin]
# TODO add PDOS
def _parse_bandstructure_dat(self, sec_scc):
self.bandstructure_dat_parser._nspin = self.info_parser.get_number_of_spin_channels()
band_energies = self.bandstructure_dat_parser.band_energies
if band_energies is None:
return
# Get fermi energy: it is used to un-shift the band structure to
# the original scale in which also other energies are reported.
energy_fermi = sec_scc.energy.fermi
if energy_fermi is None:
return
energy_fermi = (energy_fermi.magnitude * ureg.joule).to('hartree')
sec_k_band = sec_scc.m_create(BandStructure, Calculation.band_structure_electronic)
sec_k_band.energy_fermi = energy_fermi
band_k_points = self.bandstructure_dat_parser.band_k_points
nkpts_segment = self.bandstructure_dat_parser.number_of_k_points_per_segment
for nb in range(len(band_energies)):
sec_k_band_segment = sec_k_band.m_create(BandEnergies)
sec_k_band_segment.n_kpoints = nkpts_segment[nb]
sec_k_band_segment.kpoints = band_k_points[nb]
sec_k_band_segment.energies = band_energies[nb] + energy_fermi
def _parse_band_out(self, sec_scc):
self.band_out_parser._nspin = self.info_parser.get_number_of_spin_channels()
band_energies = self.band_out_parser.band_energies
if band_energies is None:
return
# Get fermi energy: it is used to un-shift the band structure to
# the original scale in which also other energies are reported.
energy_fermi = 0.0 * ureg.hartree
if sec_scc.energy is not None:
energy_fermi = sec_scc.energy.fermi
energy_fermi = (energy_fermi.magnitude * ureg.joule).to('hartree')
sec_k_band = sec_scc.m_create(BandStructure, Calculation.band_structure_electronic)
sec_k_band.energy_fermi = energy_fermi
nkpts_segment = self.band_out_parser.number_of_k_points_per_segment
for nb in range(len(band_energies)):
sec_k_band_segment = sec_k_band.m_create(BandEnergies)
sec_k_band_segment.n_kpoints = nkpts_segment[nb]
sec_k_band_segment.value = band_energies[nb] + energy_fermi
def parse_file(self, name, section):
# TODO add support for info.xml, wannier.out
if name.startswith('dos') and name.endswith('xml'):
parser = self.dos_parser
parser_function = self._parse_dos
elif name.startswith('bandstructure') and name.endswith('xml'):
parser = self.bandstructure_parser
parser_function = self._parse_bandstructure
elif name.startswith('EIGVAL') and name.endswith('OUT'):
parser = self.eigval_parser
parser_function = self._parse_eigenvalues
elif (name.startswith('FERMISURF') or name.startswith('FS')) and name.endswith('bxsf'):
parser = self.fermisurf_parser
parser_function = self._parse_fermisurface
elif name.startswith('EVALQP') and (name.endswith('DAT') or name.endswith('TXT')):
parser = self.evalqp_parser
parser_function = self._parse_evalqp
elif name.startswith('TDOS') and name.endswith('OUT'):
parser = self.dos_out_parser
parser_function = self._parse_dos_out
elif name.startswith('bandstructure') and name.endswith('dat'):
parser = self.bandstructure_dat_parser
parser_function = self._parse_bandstructure_dat
elif name.startswith('BAND') and name.endswith('OUT'):
parser = self.band_out_parser
parser_function = self._parse_band_out
elif name.startswith('input') and name.endswith('xml'):
parser = self.input_xml_parser
if self._calculation_type == 'gw':
parser_function = self._parse_input_gw
elif self._calculation_type == 'xs':
parser_function = self._parse_input_xs
else:
# TODO implement reading of parameters from input.xml for normal calculations
# in addition to INFO.OUT
return
else:
return
files = self.get_exciting_files(name)
if len(files) > 1:
self.logger.warn('Found multiple files. Will read all!', data=dict(file=name))
for n in range(len(files)):
parser.mainfile = files[n]
parser_function(section)
# free up memory
parser.mainfile = None
def _parse_input_xs(self, sec_method):
xstype = self.input_xml_parser.get('xs/xstype', None)
if xstype is not None:
sec_method.x_exciting_xs_xstype = xstype
sec_method.x_exciting_electronic_structure_method = xstype
sec_method.x_exciting_xs_broadening = self.input_xml_parser.get(
'xs/broad', 0.01, 'hartree')
sec_method.x_exciting_xs_gqmax = self.input_xml_parser.get(
'xs/gqmax', 0.0, '1/bohr')
sec_method.x_exciting_xs_lmaxapw = self.input_xml_parser.get('xs/lmaxapw', 10)
sec_method.x_exciting_xs_number_of_empty_states = self.input_xml_parser.get(
'xs/nempty', 5)
sec_method.x_exciting_xs_ngridq = self.input_xml_parser.get('xs/ngridq', [1, 1, 1])
sec_method.x_exciting_xs_ngridk = self.input_xml_parser.get('xs/ngridk', [1, 1, 1])
rgkmax = self.input_xml_parser.get('xs/rgkmax', None)
if rgkmax is None:
rgkmax = self.info_parser.get_initialization_parameter('x_exciting_rgkmax', 0.)
sec_method.x_exciting_xs_rgkmax = rgkmax
sec_method.x_exciting_xs_scissor = self.input_xml_parser.get('xs/scissor', 0.0)
sec_method.x_exciting_xs_vkloff = self.input_xml_parser.get('xs/vkloff', [0., 0., 0.])
# TODO I am not certain if screening/BSE are children of xs
if self.input_xml_parser.get('xs/screening') is not None:
sec_method.x_exciting_xs_screening_number_of_empty_states = self.input_xml_parser.get(
'xs/screening/nempty', 0)
sec_method.x_exciting_xs_screening_ngridk = self.input_xml_parser.get(
'xs/screening/ngridk', [0, 0, 0])
rgkmax = self.input_xml_parser.get('xs/screening/rgkmax', None)
if rgkmax is None:
rgkmax = self.info_parser.get_initialization_parameter('x_exciting_rgkmax', 0.)
sec_method.x_exciting_xs_screening_rgkmax = rgkmax
sec_method.x_exciting_xs_screening_type = self.input_xml_parser.get(
'xs/screening/screentype', 'full')
if self.input_xml_parser.get('xs/BSE') is not None:
sec_method.x_exciting_xs_bse_antiresonant = self.input_xml_parser.get(
'xs/BSE/aresbse', True)
sec_method.x_exciting_xs_bse_angular_momentum_cutoff = self.input_xml_parser.get(
'xs/BSE/lmaxdielt', 14)
rgkmax = self.input_xml_parser.get('xs/BSE/rgkmax', None)
if rgkmax is None:
rgkmax = self.info_parser.get_initialization_parameter('x_exciting_rgkmax', 0)
sec_method.x_exciting_xs_bse_rgkmax = rgkmax
sec_method.x_exciting_xs_bse_sciavbd = self.input_xml_parser.get(
'xs/BSE/sciavbd', True)
sec_method.x_exciting_xs_bse_sciavqbd = self.input_xml_parser.get(
'xs/BSE/sciavqbd', False)
sec_method.x_exciting_xs_bse_sciavqhd = self.input_xml_parser.get(
'xs/BSE/sciavqhd', False)
sec_method.x_exciting_xs_bse_sciavqwg = self.input_xml_parser.get(
'xs/BSE/sciavqwg', False)
sec_method.x_exciting_xs_bse_sciavtype = self.input_xml_parser.get(
'xs/BSE/sciavtype', 'spherical')
sec_method.x_exciting_xs_bse_xas = self.input_xml_parser.get(
'xs/BSE/xas', False)
sec_method.x_exciting_xs_bse_number_of_bands = self.input_xml_parser.get(
'xs/BSE/nstlbse', [0, 0, 0, 0])
if sec_method.x_exciting_xs_bse_xas:
sec_method.x_exciting_xs_bse_xasatom = self.input_xml_parser.get(
'xs/BSE/xasatom', 0)
sec_method.x_exciting_xs_bse_xasedge = self.input_xml_parser.get(
'xs/BSE/xasedge', 'K')
sec_method.x_exciting_xs_bse_xasspecies = self.input_xml_parser.get(
'xs/BSE/xasspecies', 0)
sec_method.x_exciting_xs_bse_xas_number_of_bands = self.input_xml_parser.get(
'xs/BSE/nstlxas', [0, 0])
if self.input_xml_parser.get('xs/tddft') is not None:
sec_method.x_exciting_xs_tddft_analytic_continuation = self.input_xml_parser.get(
'xs/tddft/acont', False)
sec_method.x_exciting_xs_tddft_anomalous_Hall_conductivity = self.input_xml_parser.get(
'xs/tddft/ahc', False)
sec_method.x_exciting_xs_tddft_anti_resonant_dielectric = self.input_xml_parser.get(
'xs/tddft/aresdf', False)
sec_method.x_exciting_xs_tddft_anti_resonant_xc_kernel = self.input_xml_parser.get(
'xs/tddft/aresfxc', True)
sec_method.x_exciting_xs_tddft_drude = self.input_xml_parser.get(
'xs/tddft/drude', [0., 0.])
sec_method.x_exciting_xs_tddft_split_parameter = self.input_xml_parser.get(
'xs/tddft/fxcbsesplit', 0.00001, 'hartree')
sec_method.x_exciting_xs_tddft_xc_kernel = self.input_xml_parser.get(
'xs/tddft/fxctype', 'RPA')
sec_method.x_exciting_xs_tddft_finite_q_intraband_contribution = self.input_xml_parser.get(
'xs/tddft/intraband', False)
sec_method.x_exciting_xs_tddft_diagonal_xc_kernel = self.input_xml_parser.get(
'xs/tddft/kerndiag', False)
sec_method.x_exciting_xs_tddft_lmax_alda = self.input_xml_parser.get(
'xs/tddft/lmaxalda', 3)
sec_method.x_exciting_xs_tddft_macroscopic_dielectric_function_q_treatment = self.input_xml_parser.get(
'xs/tddft/mdfqtype', 0)
sec_method.x_exciting_xs_tddft_analytic_continuation_number_of_intervals = self.input_xml_parser.get(
'xs/tddft/nwacont', 0)
sec_method.x_exciting_xs_tetra = self.input_xml_parser.get(
'xs/tetra/tetradf', False)
def _parse_xs_bse(self):
sec_run = self.archive.run[-1]
# TODO read from xml file
def get_files(name):
bse_types = ['IP', 'singlet', 'triplet', 'RPA']
scr_types = ['full', 'diag', 'noinvdiag', 'longrange']
bse_files = []
for bse_type in bse_types:
for scr_type in scr_types:
files = self.get_exciting_files(
'%s_BSE%s_SCR%s.OUT' % (name, bse_type, scr_type))
bse_files.append(files)
return bse_files
def get_data(files):
data = []
for f in files:
self.data_xs_parser.mainfile = f
if self.data_xs_parser.data is None:
continue
data.append(self.data_xs_parser.data)
return data
def parse_exciton(data, sec_scc):
n_components = len(data)
data = np.transpose(np.vstack(data))
sec_scc.x_exciting_xs_bse_number_of_components = n_components
n_excitons = len(data[0]) // n_components
sec_scc.x_exciting_xs_bse_number_of_excitons = n_excitons
sec_scc.x_exciting_xs_bse_exciton_energies = np.reshape(
data[1], (n_components, n_excitons)) * ureg.hartree
sec_scc.x_exciting_xs_bse_exciton_binding_energies = np.reshape(
data[2], (n_components, n_excitons)) * ureg.hartree
sec_scc.x_exciting_xs_bse_exciton_oscillator_strength = np.reshape(
data[3], (n_components, n_excitons))
sec_scc.x_exciting_xs_bse_exciton_amplitude_re = np.reshape(
data[4], (n_components, n_excitons))
sec_scc.x_exciting_xs_bse_exciton_amplitude_im = np.reshape(
data[5], (n_components, n_excitons))
def parse_epsilon(data, sec_scc):
n_components = len(data)
data = np.transpose(np.vstack(data))
n_epsilon = len(data[0]) // n_components
sec_scc.x_exciting_xs_bse_number_of_energy_points = n_epsilon
sec_scc.x_exciting_xs_bse_epsilon_energies = np.reshape(
data[0], (n_components, n_epsilon)) * ureg.hartree
sec_scc.x_exciting_xs_bse_epsilon_re = np.reshape(
data[1], (n_components, n_epsilon))
sec_scc.x_exciting_xs_bse_epsilon_im = np.reshape(
data[2], (n_components, n_epsilon))
def parse_sigma(data, sec_scc):
n_components = len(data)
data = np.transpose(np.vstack(data))
n_sigma = len(data[0]) // n_components
sec_scc.x_exciting_xs_bse_sigma_energies = np.reshape(
data[0], (n_components, n_sigma)) * ureg.hartree
sec_scc.x_exciting_xs_bse_sigma_re = np.reshape(
data[1], (n_components, n_sigma))
sec_scc.x_exciting_xs_bse_sigma_im = np.reshape(
data[2], (n_components, n_sigma))
def parse_loss(data, sec_scc):
n_components = len(data)
data = np.transpose(np.vstack(data))
n_loss = len(data[0]) // n_components
sec_scc.x_exciting_xs_bse_loss_energies = np.reshape(
data[0], (n_components, n_loss)) * ureg.hartree
sec_scc.x_exciting_xs_bse_loss = np.reshape(
data[1], (n_components, n_loss))
# TODO check if format of files are really correct, i.e. columns are supposed
# to be what they are. What is the fourth column in epsilon which is not parsed?
sccs = []
for quantity in ['EXCITON', 'EPSILON', 'SIGMA', 'LOSS']:
files = get_files(quantity)
for i in range(len(files)):
data = get_data(files[i])
if not data:
sccs.append(None)
continue
if quantity == 'EXCITON':
sec_scc = sec_run.m_create(Calculation)
sccs.append(sec_scc)
else:
sec_scc = sccs[i]
if sec_scc is None:
# This is the case when there is a mismatch between files
self.logger.warn(
'Mismatch in EXCITON and file type', data=dict(file=quantity))
sec_scc = sec_run.m_create(Calculation)
if quantity == 'EXCITON':
parse_function = parse_exciton
elif quantity == 'EPSILON':
parse_function = parse_epsilon
elif quantity == 'SIGMA':
parse_function = parse_sigma
elif quantity == 'LOSS':
parse_function = parse_loss
else:
continue
try:
parse_function(data, sec_scc)
except Exception:
self.logger.error('Error setting xs data', data=dict(file=quantity))
def _parse_xs_tddft(self):
sec_run = self.archive.run[-1]
fxctype = self.input_xml_parser.get('xs/tddft/fxctype', 'RPA')
tetradf = self.input_xml_parser.get('xs/tetra/tetradf', None)
nwacont = self.input_xml_parser.get('xs/tddft/nwacont', None)
aresdf = self.input_xml_parser.get('xs/tddft/aresdf', True)
file_ext_list = [
'TET' if tetradf else None, 'AC' if nwacont else None, 'NAR' if not aresdf else None]
file_ext = '_'.join([e for e in file_ext_list if e])
# read q points
qpoints = self.input_xml_parser.get('xs/qpointset/qpoint')
def get_data(quantity, ext):
# all files related to quantity at all qpoints
files = self.get_exciting_files('%s_%s%s%s.OUT' % (quantity, file_ext, ext, fxctype))
data = [[], [], []]
for i in range(len(qpoints)):
data_q = []
files_q = [f for f in files if f.endswith('QMT%s.OUT' % str(i + 1).rjust(3, '0'))]
for f in files_q:
self.data_xs_parser.mainfile = f
if self.data_xs_parser.data is None:
continue
data_q.append(self.data_xs_parser.data)
if not data_q:
continue
data_q = np.transpose(data_q, axes=(2, 0, 1))
for j in range(len(data)):
data[j].append(data_q[j])
return data
for quantity in ['EPSILON', 'LOSS', 'SIGMA']:
for ext in ['FXC', 'NLF_FXC']:
data = get_data(quantity, ext)
if not data[0]:
continue
if quantity == 'EPSILON' and ext == 'FXC':
sec_scc = sec_run.m_create(Calculation)
sec_scc.x_exciting_xs_tddft_number_of_epsilon_values = len(data[0][0][0])
sec_scc.x_exciting_xs_tddft_epsilon_energies = data[0][0][0] * ureg.hartree
sec_scc.x_exciting_xs_tddft_dielectric_function_local_field = data[1:]
elif quantity == 'EPSILON' and ext == 'NLF_FXC':
sec_scc.x_exciting_xs_tddft_dielectric_function_no_local_field = data[1:3]
elif quantity == 'LOSS' and ext == 'FXC':
sec_scc.x_exciting_xs_tddft_loss_function_local_field = data[1]
elif quantity == 'LOSS' and ext == 'NLF_FXC':
sec_scc.x_exciting_xs_tddft_loss_function_no_local_field = data[1]
elif quantity == 'SIGMA' and ext == 'FXC':
sec_scc.x_exciting_xs_tddft_sigma_local_field = data[1:3]
elif quantity == 'SIGMA' and ext == 'NLF_FXC':
sec_scc.x_exciting_xs_tddft_sigma_no_local_field = data[1:3]
def parse_xs(self):
sec_run = self.archive.run[-1]
xs_info_files = self.get_exciting_files('INFOXS.OUT')
if not xs_info_files:
return
self._calculation_type = 'xs'
# inconsistency in the naming convention for xs input xml file
sec_method = sec_run.m_create(Method)
sec_method_ref = self.archive.run[-1].method[0]
sec_method.starting_method_ref = sec_method_ref
sec_method.methods_ref = [sec_method_ref]
self.parse_file('input.xml', sec_method)
# parse properties
input_file = self.get_exciting_files('input.xml')
if not input_file:
return
self.input_xml_parser.mainfile = input_file[0]
xstype = self.input_xml_parser.get('xs/xstype', '')
if xstype.lower() == 'bse':
self._parse_xs_bse()
elif xstype.lower() == 'tddft':
self._parse_xs_tddft()
def _parse_input_gw(self, sec_method):
sec_gw = sec_method.m_create(GWMethod)
sec_gw.type = 'G0W0'
gmaxvr = self.info_parser.get_initialization_parameter('x_exciting_gmaxvr', 0)
sec_gw.core_treatment = self.input_xml_parser.get(
'gw/coreflag', 'all')
sec_gw.polarizability_number_of_empty_states = int(
self.input_xml_parser.get('gw/nempty', 0))
sec_gw.ngridq = self.input_xml_parser.get('gw/ngridq', [1, 1, 1])
sec_gw.basis_set = 'mixed'
sec_gw.qp_equation_treatment = 'linearization'
sec_gw.max_frequency = self.input_xml_parser.get(
'gw/freqgrid/freqmax', 1.0)
sec_gw.frequency_grid_type = self.input_xml_parser.get(
'gw/freqgrid/fgrid', 'gaule2')
sec_gw.number_of_frequencies = int(self.input_xml_parser.get(
'gw/freqgrid/nomeg', 16))
sec_gw.self_energy_c_number_of_poles = int(self.input_xml_parser.get(
'gw/selfenergy/npol', 0))
sec_gw.self_energy_c_number_of_empty_states = int(self.input_xml_parser.get(
'gw/selfenergy/nempty', 0))
sec_gw.self_energy_singularity_treatment = self.input_xml_parser.get(
'gw/selfenergy/singularity', 'mpd')
sec_gw.self_energy_c_analytical_continuation = self.input_xml_parser.get(
'gw/selfenergy/actype', 'pade')
sec_gw.mixed_basis_lmax = int(self.input_xml_parser.get(
'gw/mixbasis/lmaxmb', 3))
sec_gw.mixed_basis_tolerance = self.input_xml_parser.get(
'gw/mixbasis/epsmb', 0.0001)
gmb = self.input_xml_parser.get('gw/mixbasis/gmb', 1.0)
sec_gw.mixed_basis_gmax = gmb * gmaxvr
pwm = self.input_xml_parser.get('gw/barecoul/pwm', 2.0)
sec_gw.bare_coulomb_gmax = pwm * gmb * gmaxvr
sec_gw.bare_coulomb_cutofftype = self.input_xml_parser.get(
'gw/barecoul/cutofftype', 'none')
sec_gw.screened_coulomb_volume_average = self.input_xml_parser.get(
'gw/scrcoul/sciavtype', 'isotropic')
sec_gw.screened_Coulomb = self.input_xml_parser.get(
'gw/scrcoul/scrtype', 'rpa')
def parse_gw(self):
sec_run = self.archive.run[-1]
# two versions of gw info files
gw_info_files = ['GW_INFO.OUT', 'GWINFO.OUT']
for f in gw_info_files:
if self.get_exciting_files(f):
self._calculation_type = 'gw'
gw_info_file = f
break
if not self._calculation_type == 'gw':
return
sec_method = sec_run.m_create(Method)
sec_method_ref = self.archive.run[-1].method[0]
sec_method.starting_method_ref = sec_method_ref
sec_method.methods_ref = [sec_method_ref]
# parse input xml file, there seems to be two versions, input_gw.xml and input-gw.xml
for f in ['input_gw.xml', 'input-gw.xml', 'input.xml']:
self.parse_file(f, sec_method)
xc_functional_name = ' '.join(self.info_parser.get_xc_functional_name())
sec_method.gw.starting_point = xc_functional_name
sec_scc = sec_run.m_create(Calculation)
sec_scc.method_ref = sec_method
if sec_run.system:
sec_scc.system_ref = sec_run.system[-1]
sec_scc_ref = sec_run.calculation[0]
sec_scc.starting_calculation_ref = sec_scc_ref
sec_scc.calculations_ref = [sec_scc_ref]
# parse properties
gw_info_files = self.get_exciting_files(gw_info_file)
if len(gw_info_files) > 1:
self.logger.warn('Found multiple GW info files, will read only first!')
self.info_gw_parser.mainfile = gw_info_files[0]
fermi_energy = self.info_gw_parser.get('fermi_energy', None)
if fermi_energy is not None:
sec_scc.energy = Energy(fermi=fermi_energy)
gw_files = ['EVALQP.DAT', 'EVALQP.TXT', 'TDOS-QP.OUT']
# Parse GW band structure from one of the files:
bs_files = ['bandstructure-qp.dat', 'BAND-QP.OUT']
for fname in bs_files:
if self.file_exists(fname):
gw_files.append(fname)
break
for f in gw_files:
self.parse_file(f, sec_scc)
frequency_data = self.info_gw_parser.get('frequency_data', None)
if frequency_data is not None:
number = frequency_data.get('number')
sec_method.gw.number_of_frequencies = len(number)
sec_method.gw.frequency_number = number
sec_method.gw.frequency_values = frequency_data.get('values')
sec_method.gw.frequency_weights = frequency_data.get('weights')
fundamental_band_gap = self.info_gw_parser.get('direct_band_gap', None)
if fundamental_band_gap is None:
fundamental_band_gap = self.info_gw_parser.get('fundamental_band_gap', None)
sec_gap = sec_scc.eigenvalues[-1].m_create(BandGap)
if fundamental_band_gap is not None:
sec_gap.value_fundamental = fundamental_band_gap
optical_band_gap = self.info_gw_parser.get('optical_band_gap', None)
if optical_band_gap is not None:
sec_gap.value_optical = optical_band_gap
def parse_miscellaneous(self):
sec_worfklow = self.archive.m_create(Workflow)
sec_worfklow.type = 'single_point'
structure_optimization = self.info_parser.get('structure_optimization')
if structure_optimization is not None:
sec_worfklow.type = 'geometry_optimization'
sec_geometry_opt = sec_worfklow.m_create(GeometryOptimization)
threshold_force = structure_optimization.get(
'optimization_step', [{}])[0].get('force_convergence', [0., 0.])[-1]
sec_geometry_opt.input_force_maximum_tolerance = threshold_force
def parse_method(self):
sec_run = self.archive.run[-1]
sec_method = sec_run.m_create(Method)
sec_method.basis_set.append(BasisSet(type='(L)APW+lo'))
sec_dft = sec_method.m_create(DFT)
sec_electronic = sec_method.m_create(Electronic)
sec_electronic.method = 'DFT'
smearing_kind_map = {
'Gaussian': 'gaussian', 'Methfessel-Paxton': 'methfessel-paxton',
'Fermi-Dirac': 'fermi', 'Extended': 'tetrahedra'}
sec_smearing = sec_electronic.m_create(Smearing)
smearing_kind = self.info_parser.get_initialization_parameter('smearing_kind')
if smearing_kind is not None:
if not isinstance(smearing_kind, str):
smearing_kind = smearing_kind[0]
smearing_kind = smearing_kind_map[smearing_kind]
sec_smearing.kind = smearing_kind
smearing_width = self.info_parser.get_initialization_parameter('smearing_width')
if smearing_width is not None:
smearing_width = (smearing_width * ureg.hartree).to('joule')
# TODO smearing with should have units of energy
sec_smearing.width = smearing_width.magnitude
for name in self.info_parser._convergence_keys_mapping.keys():
threshold = self.info_parser.get_scf_threshold(name)
if threshold is None:
continue
metainfo_name = 'x_exciting_scf_threshold_%s_change' % name.split('_')[-2]
setattr(sec_method, metainfo_name, threshold)
# additionally, set threshold to global metainfo. This is killing me!
if metainfo_name == 'x_exciting_scf_threshold_energy_change':
sec_method.scf = Scf(threshold_energy_change=threshold)
xc_functional_names = self.info_parser.get_xc_functional_name()
if not xc_functional_names:
# get it from input.xml
input_file = self.get_exciting_files('input.xml')
for f in input_file:
self.input_xml_parser.mainfile = f
correlation = self.input_xml_parser.get('libxc/correlation', None)
xc_functional_names.append(correlation)
exchange = self.input_xml_parser.get('libxc/exchange', None)
xc_functional_names.append(exchange)
sec_xc_functional = sec_dft.m_create(XCFunctional)
for name in xc_functional_names:
if name is None:
continue
if '_X_' in name:
sec_xc_functional.exchange.append(Functional(name=name))
elif '_C_' in name:
sec_xc_functional.correlation.append(Functional(name=name))
elif 'HYB' in name:
sec_xc_functional.hybrid.append(Functional(name=name))
else:
sec_xc_functional.contributions.append(Functional(name=name))
if not xc_functional_names:
# simply write parameters
xc_functional = self.info_parser.get('initialization', {}).get('xc_functional')
if xc_functional is not None:
sec_xc_functional.name = xc_functional.get('name_reference', [None, None])[0]
sec_xc_functional.reference = xc_functional.get('name_reference', [None, None])[1]
sec_electronic.n_spin_channels = self.info_parser.get_number_of_spin_channels()
if self._calculation_type == 'volume_optimization':
sec_method.x_exciting_volume_optimization = True
def parse_scc(self, section):
sec_run = self.archive.run[-1]
final = section if section.get('energy_total') is not None else section.get('final')
if final is None:
# get it from last scf_iteration or optimization_step
final = section.get('scf_iteration', [None])[-1]
final = section.get('optimization_step', [None])[-1] if final is None else final
if final is None:
return
sec_scc = sec_run.m_create(Calculation)
def parse_scf(iteration, msection):
energy_total = iteration.get('energy_total')
sec_energy = msection.m_create(Energy)
if energy_total is not None:
sec_energy.total = EnergyEntry(value=energy_total)
x_exciting_dos_fermi = iteration.get('x_exciting_dos_fermi')
if x_exciting_dos_fermi is not None:
setattr(msection, 'x_exciting_dos_fermi', x_exciting_dos_fermi)
# energy contributions
energy_contributions = iteration.get('energy_contributions', {})
for key, names in self._energy_keys_mapping.items():
val = None
for name in names:
val = energy_contributions.get(name, None)
if val is not None:
break
if val is None:
continue
if key.startswith('energy_'):
sec_energy.m_add_sub_section(getattr(
Energy, key.replace('energy_', '')), EnergyEntry(value=val))
else:
setattr(msection, key, val)
if key == 'x_exciting_fermi_energy':
sec_energy.fermi = val
# charge contributions
charge_contributions = iteration.get('charge_contributions', {})
for key, names in self._electron_charge_keys_mapping.items():
val = None
for name in names:
val = charge_contributions.get(name, None)
if val is not None:
break
if val is None:
continue
if key == 'x_exciting_section_MT_charge_atom':
for n in range(len(val)):
sec_mt_charge_atom = msection.m_create(x_exciting_section_MT_charge_atom)
sec_mt_charge_atom.x_exciting_MT_charge_atom_index = n + 1
sec_mt_charge_atom.x_exciting_MT_charge_atom_symbol = val[n][0]
sec_mt_charge_atom.x_exciting_MT_charge_atom_value = val[n][1]
sec_charges = msection.m_create(Charges)
sec_charges.value = [
val[n][1].magnitude for n in range(len(val))] * val[0][1].units
sec_charges.total = charge_contributions.get('total charge')
elif key == 'charge_total':
pass
else:
setattr(msection, key, val)
# moment contributions
moment_contributions = iteration.get('moment_contributions', {})
for key, names in self._moment_keys_mapping.items():
val = None
for name in names:
val = moment_contributions.get(name, None)
if val is not None:
break
if val is None:
continue
if key == 'x_exciting_section_MT_moment_atom':
for n in range(len(val)):
sec_mt_moment_atom = msection.m_create(x_exciting_section_MT_moment_atom)
sec_mt_moment_atom.x_exciting_MT_moment_atom_index = n + 1
sec_mt_moment_atom.x_exciting_MT_moment_atom_symbol = val[n][0]
sec_mt_moment_atom.x_exciting_MT_moment_atom_value = val[n][1]
else:
setattr(msection, key, val)
# convergence values
for name in self.info_parser._convergence_keys_mapping.keys():
val = iteration.get(name)
if val is None:
continue
setattr(msection, name, val)
# other metainfo
for name in self.info_parser._miscellaneous_keys_mapping.keys():
val = iteration.get(name)
if val is None:
continue
if name == 'time':
msection.time_calculation = val
else:
setattr(msection, name, val)
# energy, moment, charge contributions
parse_scf(final, sec_scc)
# forces
forces = section.get('forces')
if forces is not None:
sec_forces = sec_scc.m_create(Forces)
sec_forces.total = ForcesEntry(value=forces)
# scf iterations
scf_iterations = section.get('scf_iteration', [])
for scf_iteration in scf_iterations:
sec_scf_iteration = sec_scc.m_create(ScfIteration)
parse_scf(scf_iteration, sec_scf_iteration)
return sec_scc
def parse_system(self, section):
sec_run = self.archive.run[-1]
positions = self.info_parser.get_atom_positions(section.get('atomic_positions', {}))
lattice_vectors = self.info_parser.get_initialization_parameter('lattice_vectors')
atom_labels = self.info_parser.get_atom_labels(section.get('atomic_positions', {}))
input_file = self.get_exciting_files('input.xml')
if positions is None:
# get it from input.xml
for f in input_file:
self.input_xml_parser.mainfile = f
positions = self.input_xml_parser.get('structure/species/atom/coord')
lattice_vectors = self.input_xml_parser.get(
'structure/crystal/basevect', np.eye(3))
species = self.input_xml_parser.get('structure/species/speciesfile')
if positions is None or lattice_vectors is None or species is None:
continue
lattice_vectors = np.array(lattice_vectors, dtype=float)
lattice_vectors *= self.input_xml_parser.get('structure/crystal/scale', 1.0)
positions = np.dot(positions, lattice_vectors) * ureg.bohr
lattice_vectors = lattice_vectors * ureg.bohr
atoms = self.input_xml_parser.get('structure/species/atom')
atom_labels = []
for n in range(len(atoms)):
atom_labels.extend([species[n].split('.')[0]] * len(atoms[n]))
if positions is None or atom_labels is None:
return
sec_system = sec_run.m_create(System)
sec_atoms = sec_system.m_create(Atoms)
sec_atoms.positions = positions
sec_atoms.labels = atom_labels
sec_atoms.periodic = [True] * 3
# TODO confirm no cell optimization in exciting
sec_atoms.lattice_vectors = lattice_vectors
lattice_vectors_reciprocal = self.info_parser.get_initialization_parameter(
'lattice_vectors_reciprocal')
sec_atoms.lattice_vectors_reciprocal = lattice_vectors_reciprocal
if len(sec_run.system) > 1:
return sec_system
for name in self.info_parser._system_keys_mapping.keys():
val = self.info_parser.get_initialization_parameter(name)
if val is None:
continue
if name == 'x_exciting_spin_treatment':
sub_sec = sec_system.m_create(x_exciting_section_spin)
sub_sec.x_exciting_spin_treatment = val
elif name == 'x_exciting_species_rtmin':
setattr(sec_system, name, ' '.join([str(v) for v in val]))
else:
try:
setattr(sec_system, name, val)
except Exception:
self.logger.warn('Error setting metainfo.')
# species
species = self.info_parser.get_initialization_parameter('species', [])
for specie in species:
sec_atoms_group = sec_system.m_create(x_exciting_section_atoms_group)
sec_atoms_group.x_exciting_geometry_atom_labels = specie.get('symbol')
sec_atoms_group.x_exciting_geometry_atom_number = str(specie.get('number'))
sec_atoms_group.x_exciting_muffin_tin_points = specie.get('radial_points')
sec_atoms_group.x_exciting_muffin_tin_radius = specie.get('muffin_tin_radius')
positions_format = specie.get('positions_format')
sec_atoms_group.x_exciting_atom_position_format = positions_format
positions = specie.get('positions')
positions = self.info_parser.get_atom_positions(
positions=positions, positions_format=positions_format).to('m')
sec_atoms_group.x_exciting_geometry_atom_positions = positions.magnitude
# clathrate info
clathrate_file = self.get_exciting_files('str.out')
if clathrate_file:
sec_system.x_exciting_clathrates = True
self.data_clathrate_parser.mainfile = clathrate_file[0]
if self.data_clathrate_parser.data:
data = np.transpose(self.data_clathrate_parser.data)
sec_system.x_exciting_clathrates_atom_coordinates = np.transpose(
np.array(data[:3], dtype=float))
sec_system.x_exciting_clathrates_atom_labels = list(data[3])
else:
sec_system.x_exciting_clathrates = False
potential_mixing = self.info_parser.get_initialization_parameter('potential_mixing')
if potential_mixing is not None:
sec_system.x_exciting_potential_mixing = potential_mixing
return sec_system
def parse_configurations(self):
sec_run = self.archive.run[-1]
def parse_configuration(section):
if not section:
return
sec_scc = self.parse_scc(section)
if sec_scc is None:
return
sec_system = self.parse_system(section)
if sec_system is not None:
sec_scc.system_ref = sec_system
sec_scc.method_ref = sec_run.method[-1]
return sec_scc
# groundstate and hybrids calculation
for module in ['groundstate', 'hybrids']:
sec_scc = parse_configuration(self.info_parser.get(module))
if sec_scc is None:
continue
# add data to scc
# TODO add support for more output files and properties
exciting_files = ['EIGVAL.OUT', 'FERMISURF.bxsf', 'FS.bxsf']
# Parse DFT DOS from one of the files
bs_files = ['dos.xml', 'TDOS.OUT']
for fname in bs_files:
if self.file_exists(fname):
exciting_files.append(fname)
break
# Parse DFT band structure from one of the files
bs_files = ['bandstructure.xml', 'BAND.OUT', 'bandstructure.dat']
for fname in bs_files:
if self.file_exists(fname):
exciting_files.append(fname)
break
for f in exciting_files:
self.parse_file(f, sec_scc)
# structure optimization
structure_optimization = self.info_parser.get('structure_optimization', {})
for optimization_step in structure_optimization.get('optimization_step', []):
sec_scc = parse_configuration(optimization_step)
if optimization_step.get('method') is not None:
sec_scc.x_exciting_geometry_optimization_method = optimization_step.get('method')
if optimization_step.get('step') is not None:
sec_scc.x_exciting_geometry_optimization_step = optimization_step.get('step')
force_convergence = optimization_step.get('force_convergence')
if force_convergence is not None:
sec_scc.x_exciting_maximum_force_magnitude = force_convergence[0]
sec_scc.x_exciting_geometry_optimization_threshold_force = force_convergence[1]
sec_scc = parse_configuration(structure_optimization)
if sec_scc is None:
return
# volume optimizations
volume_index = 1
while True:
info_volume = self.get_exciting_files('run_dir%s/INFO.OUT' % str(volume_index).rjust(2, '0'))
if not info_volume:
break
sec_scc.calculations_path.append(info_volume[0])
def init_parser(self):
self.info_parser.mainfile = self.filepath
self.info_parser.logger = self.logger
self.dos_parser.logger = self.logger
self.bandstructure_parser.logger = self.logger
self.eigval_parser.logger = self.logger
self.fermisurf_parser.logger = self.logger
self.evalqp_parser.logger = self.logger
self.dos_out_parser.logger = self.logger
self.bandstructure_dat_parser.logger = self.logger
self.band_out_parser.logger = self.logger
self.info_gw_parser.logger = self.logger
self.input_xml_parser.logger = self.logger
self.data_xs_parser.logger = self.logger
self.data_clathrate_parser.logger = self.logger
def reuse_parser(self, parser):
self.info_parser.quantities = parser.info_parser.quantities
self.eigval_parser.quantities = parser.eigval_parser.quantities
self.fermisurf_parser.quantities = parser.fermisurf_parser.quantities
self.evalqp_parser.quantities = parser.evalqp_parser.quantities
self.info_gw_parser.quantities = parser.info_gw_parser.quantities
def parse(self, filepath, archive, logger):
self.filepath = filepath
self.archive = archive
self.logger = logger if logger is not None else logging
self._calculation_type = None
self.init_parser()
sec_run = self.archive.m_create(Run)
sec_run.program = Program(
name='exciting', version=self.info_parser.get('program_version', '').strip())
# method goes first since reference needed for sec_scc
self.parse_method()
self.parse_configurations()
self.parse_gw()
self.parse_xs()
self.parse_miscellaneous()
| [
"numpy.product",
"nomad.datamodel.metainfo.simulation.method.Scf",
"re.compile",
"numpy.array",
"nomad.parsing.file_parser.XMLParser",
"nomad.parsing.file_parser.TextParser",
"re.search",
"os.listdir",
"numpy.reshape",
"nomad.datamodel.metainfo.simulation.method.Functional",
"numpy.where",
"nomad.datamodel.metainfo.simulation.method.BasisSet",
"numpy.dot",
"numpy.linspace",
"numpy.vstack",
"numpy.dtype",
"numpy.eye",
"numpy.size",
"os.access",
"os.path.isfile",
"nomad.datamodel.metainfo.simulation.calculation.Energy",
"nomad.parsing.file_parser.Quantity",
"numpy.transpose",
"nomad.datamodel.metainfo.simulation.calculation.ForcesEntry",
"os.path.join",
"nomad.datamodel.metainfo.simulation.calculation.EnergyEntry",
"numpy.zeros",
"os.path.basename",
"nomad.parsing.file_parser.DataTextParser"
] | [((22188, 22215), 're.compile', 're.compile', (['"""([A-Z][a-z]?)"""'], {}), "('([A-Z][a-z]?)')\n", (22198, 22215), False, 'import re\n'), ((39102, 39121), 'numpy.array', 'np.array', (['positions'], {}), '(positions)\n', (39110, 39121), True, 'import numpy as np\n'), ((42197, 42213), 'nomad.parsing.file_parser.DataTextParser', 'DataTextParser', ([], {}), '()\n', (42211, 42213), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((42451, 42462), 'nomad.parsing.file_parser.XMLParser', 'XMLParser', ([], {}), '()\n', (42460, 42462), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((42493, 42509), 'nomad.parsing.file_parser.DataTextParser', 'DataTextParser', ([], {}), '()\n', (42507, 42509), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((42547, 42572), 'nomad.parsing.file_parser.DataTextParser', 'DataTextParser', ([], {'dtype': 'str'}), '(dtype=str)\n', (42561, 42572), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((45150, 45193), 'os.path.basename', 'os.path.basename', (['self.info_parser.mainfile'], {}), '(self.info_parser.mainfile)\n', (45166, 45193), False, 'import os\n'), ((45423, 45471), 'os.path.join', 'os.path.join', (['self.info_parser.maindir', 'filename'], {}), '(self.info_parser.maindir, filename)\n', (45435, 45471), False, 'import os\n'), ((45484, 45508), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (45498, 45508), False, 'import os\n'), ((45560, 45607), 'os.path.join', 'os.path.join', (['self.info_parser.maindir', 'default'], {}), '(self.info_parser.maindir, default)\n', (45572, 45607), False, 'import os\n'), ((46437, 46480), 'os.path.basename', 'os.path.basename', (['self.info_parser.mainfile'], {}), '(self.info_parser.mainfile)\n', (46453, 46480), False, 'import os\n'), ((46711, 46759), 'os.path.join', 'os.path.join', (['self.info_parser.maindir', 'filepath'], {}), '(self.info_parser.maindir, filepath)\n', (46723, 46759), False, 'import os\n'), ((54545, 54579), 'numpy.transpose', 'np.transpose', (['data'], {'axes': '(2, 0, 1)'}), '(data, axes=(2, 0, 1))\n', (54557, 54579), True, 'import numpy as np\n'), ((2249, 2409), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""frequency_data"""', '"""frequency list:\\\\s*\\\\<\\\\s*#\\\\s*freqs\\\\s*weight\\\\s*>\\\\s*([\\\\d\\\\.Ee\\\\s\\\\-]+)"""'], {'str_operation': 'str_to_frequency', 'repeats': '(False)'}), "('frequency_data',\n 'frequency list:\\\\s*\\\\<\\\\s*#\\\\s*freqs\\\\s*weight\\\\s*>\\\\s*([\\\\d\\\\.Ee\\\\s\\\\-]+)'\n , str_operation=str_to_frequency, repeats=False)\n", (2257, 2409), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((2480, 2632), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""fermi_energy"""', '"""\\\\-\\\\s*G0W0.+\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Fermi [Ee]nergy\\\\s*[:=](\\\\s*-?[\\\\d\\\\.]+)\\\\s"""'], {'unit': 'ureg.hartree', 'repeats': '(False)'}), "('fermi_energy',\n '\\\\-\\\\s*G0W0.+\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Fermi [Ee]nergy\\\\s*[:=](\\\\s*-?[\\\\d\\\\.]+)\\\\s'\n , unit=ureg.hartree, repeats=False)\n", (2488, 2632), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((2701, 2861), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""direct_band_gap"""', '"""\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Direct BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s"""'], {'repeats': '(False)'}), "('direct_band_gap',\n '\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Direct BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s'\n , repeats=False)\n", (2709, 2861), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((2924, 3094), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""fundamental_band_gap"""', '"""\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Fundamental BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s"""'], {'repeats': '(False)'}), "('fundamental_band_gap',\n '\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Fundamental BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s'\n , repeats=False)\n", (2932, 3094), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((3157, 3319), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""optical_band_gap"""', '"""\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Optical BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s"""'], {'repeats': '(False)'}), "('optical_band_gap',\n '\\\\-\\\\s*G0W0\\\\s*\\\\-\\\\s*\\\\-+\\\\s*[\\\\s\\\\S]*?Optical BandGap\\\\s*\\\\((?P<__unit>\\\\w+)\\\\)\\\\s*\\\\:(\\\\s*[\\\\d\\\\.]+)\\\\s'\n , repeats=False)\n", (3165, 3319), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((3898, 4073), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""kpoints_eigenvalues"""', '"""\\\\s*k\\\\-point \\\\#\\\\s*\\\\d+:\\\\s*([\\\\d\\\\s\\\\.\\\\-]+)([ \\\\w\\\\(\\\\)]+\\\\n)([\\\\s\\\\d\\\\.\\\\-Ee]+)"""'], {'str_operation': 'str_to_eigenvalue', 'repeats': '(True)'}), "('kpoints_eigenvalues',\n '\\\\s*k\\\\-point \\\\#\\\\s*\\\\d+:\\\\s*([\\\\d\\\\s\\\\.\\\\-]+)([ \\\\w\\\\(\\\\)]+\\\\n)([\\\\s\\\\d\\\\.\\\\-Ee]+)'\n , str_operation=str_to_eigenvalue, repeats=True)\n", (3906, 4073), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((4792, 4815), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (4804, 4815), True, 'import numpy as np\n'), ((4907, 5013), 'numpy.reshape', 'np.reshape', (['bands', '(self.number_of_spin_channels, self.number_of_band_segment_eigenvalues,\n n_kpoints)'], {}), '(bands, (self.number_of_spin_channels, self.\n number_of_band_segment_eigenvalues, n_kpoints))\n', (4917, 5013), True, 'import numpy as np\n'), ((5617, 5640), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (5629, 5640), True, 'import numpy as np\n'), ((6073, 6096), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (6085, 6096), True, 'import numpy as np\n'), ((6985, 7008), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (6997, 7008), True, 'import numpy as np\n'), ((7610, 7633), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (7622, 7633), True, 'import numpy as np\n'), ((7745, 7851), 'numpy.reshape', 'np.reshape', (['bands', '(self.number_of_spin_channels, self.number_of_band_segment_eigenvalues,\n n_kpoints)'], {}), '(bands, (self.number_of_spin_channels, self.\n number_of_band_segment_eigenvalues, n_kpoints))\n', (7755, 7851), True, 'import numpy as np\n'), ((10547, 10585), 'numpy.array', 'np.array', (['self._distances'], {'dtype': 'float'}), '(self._distances, dtype=float)\n', (10555, 10585), True, 'import numpy as np\n'), ((18088, 18148), 'numpy.zeros', 'np.zeros', (['(self.number_of_spin_channels, self.number_of_dos)'], {}), '((self.number_of_spin_channels, self.number_of_dos))\n', (18096, 18148), True, 'import numpy as np\n'), ((19927, 20026), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""fermi_energy"""', '"""Fermi Energy:\\\\s*([\\\\d\\\\.]+)\\\\s*"""'], {'unit': 'ureg.hartree', 'repeats': '(False)'}), "('fermi_energy', 'Fermi Energy:\\\\s*([\\\\d\\\\.]+)\\\\s*', unit=ureg.\n hartree, repeats=False)\n", (19935, 20026), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((20446, 20574), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""band_parameters"""', '"""BANDGRID_3D_BANDS\\\\s*([\\\\d\\\\.\\\\-Ee\\\\s]+)"""'], {'str_operation': 'str_to_band_parameters', 'repeats': '(False)'}), "('band_parameters', 'BANDGRID_3D_BANDS\\\\s*([\\\\d\\\\.\\\\-Ee\\\\s]+)',\n str_operation=str_to_band_parameters, repeats=False)\n", (20454, 20574), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((20647, 20759), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""fermi_surface"""', '"""BAND:\\\\s*\\\\d+\\\\s*([\\\\d\\\\-\\\\+\\\\.Ee\\\\s]+)\\\\n *E*"""'], {'unit': 'ureg.hartree', 'repeats': '(True)'}), "('fermi_surface', 'BAND:\\\\s*\\\\d+\\\\s*([\\\\d\\\\-\\\\+\\\\.Ee\\\\s]+)\\\\n *E*',\n unit=ureg.hartree, repeats=True)\n", (20655, 20759), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((20990, 21076), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""k_points"""', '"""\\\\s*\\\\d+\\\\s*([\\\\d\\\\.Ee\\\\- ]+):\\\\s*k\\\\-point"""'], {'repeats': '(True)'}), "('k_points', '\\\\s*\\\\d+\\\\s*([\\\\d\\\\.Ee\\\\- ]+):\\\\s*k\\\\-point', repeats\n =True)\n", (20998, 21076), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((21277, 21294), 'numpy.transpose', 'np.transpose', (['val'], {}), '(val)\n', (21289, 21294), True, 'import numpy as np\n'), ((21652, 21829), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""eigenvalues_occupancies"""', '"""\\\\(state\\\\, eigenvalue and occupancy below\\\\)\\\\s*([\\\\d\\\\.Ee\\\\-\\\\s]+?(?:\\\\n *\\\\n))"""'], {'str_operation': 'str_to_eigenvalues', 'repeats': '(True)'}), "('eigenvalues_occupancies',\n '\\\\(state\\\\, eigenvalue and occupancy below\\\\)\\\\s*([\\\\d\\\\.Ee\\\\-\\\\s]+?(?:\\\\n *\\\\n))'\n , str_operation=str_to_eigenvalues, repeats=True)\n", (21660, 21829), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((22403, 22429), 'numpy.array', 'np.array', (['val'], {'dtype': 'float'}), '(val, dtype=float)\n', (22411, 22429), True, 'import numpy as np\n'), ((24191, 24319), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""program_version"""', '"""\\\\s*EXCITING\\\\s*([\\\\w\\\\-\\\\(\\\\)\\\\. ]+)\\\\s*started"""'], {'repeats': '(False)', 'dtype': 'str', 'flatten': '(False)'}), "('program_version',\n '\\\\s*EXCITING\\\\s*([\\\\w\\\\-\\\\(\\\\)\\\\. ]+)\\\\s*started', repeats=False,\n dtype=str, flatten=False)\n", (24199, 24319), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((24382, 24561), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""lattice_vectors"""', '"""Lattice vectors\\\\s*[\\\\(cartesian\\\\)]*\\\\s*:\\\\s*([\\\\-0-9\\\\.\\\\s]+)\\\\n"""'], {'str_operation': 'str_to_array', 'unit': 'ureg.bohr', 'repeats': '(False)', 'convert': '(False)'}), "('lattice_vectors',\n 'Lattice vectors\\\\s*[\\\\(cartesian\\\\)]*\\\\s*:\\\\s*([\\\\-0-9\\\\.\\\\s]+)\\\\n',\n str_operation=str_to_array, unit=ureg.bohr, repeats=False, convert=False)\n", (24390, 24561), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((24608, 24818), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""lattice_vectors_reciprocal"""', '"""Reciprocal lattice vectors\\\\s*[\\\\(cartesian\\\\)]*\\\\s*:\\\\s*([\\\\-0-9\\\\.\\\\s]+)\\\\n"""'], {'str_operation': 'str_to_array', 'unit': '(1 / ureg.bohr)', 'repeats': '(False)', 'convert': '(False)'}), "('lattice_vectors_reciprocal',\n 'Reciprocal lattice vectors\\\\s*[\\\\(cartesian\\\\)]*\\\\s*:\\\\s*([\\\\-0-9\\\\.\\\\s]+)\\\\n'\n , str_operation=str_to_array, unit=1 / ureg.bohr, repeats=False,\n convert=False)\n", (24616, 24818), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((29117, 29216), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""potential_mixing"""', '"""Using ([\\\\w ]+) potential mixing"""'], {'repeats': '(False)', 'flatten': '(False)'}), "('potential_mixing', 'Using ([\\\\w ]+) potential mixing', repeats=\n False, flatten=False)\n", (29125, 29216), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((30184, 30304), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""energy_total"""', '"""[Tt]*otal energy\\\\s*:\\\\s*([\\\\-\\\\d\\\\.Ee]+)"""'], {'repeats': '(False)', 'dtype': 'float', 'unit': 'ureg.hartree'}), "('energy_total', '[Tt]*otal energy\\\\s*:\\\\s*([\\\\-\\\\d\\\\.Ee]+)',\n repeats=False, dtype=float, unit=ureg.hartree)\n", (30192, 30304), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((30343, 30509), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""energy_contributions"""', '"""(?:Energies|_)([\\\\+\\\\-\\\\s\\\\w\\\\.\\\\:]+?)\\\\n *(?:DOS|Density)"""'], {'str_operation': 'str_to_energy_dict', 'repeats': '(False)', 'convert': '(False)'}), "('energy_contributions',\n '(?:Energies|_)([\\\\+\\\\-\\\\s\\\\w\\\\.\\\\:]+?)\\\\n *(?:DOS|Density)',\n str_operation=str_to_energy_dict, repeats=False, convert=False)\n", (30351, 30509), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((30542, 30706), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""x_exciting_dos_fermi"""', '"""DOS at Fermi energy \\\\(states\\\\/Ha\\\\/cell\\\\)\\\\s*:\\\\s*([\\\\-\\\\d\\\\.Ee]+)"""'], {'repeats': '(False)', 'dtype': 'float', 'unit': '(1 / ureg.hartree)'}), "('x_exciting_dos_fermi',\n 'DOS at Fermi energy \\\\(states\\\\/Ha\\\\/cell\\\\)\\\\s*:\\\\s*([\\\\-\\\\d\\\\.Ee]+)',\n repeats=False, dtype=float, unit=1 / ureg.hartree)\n", (30550, 30706), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((30753, 30951), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""charge_contributions"""', '"""(?:Charges|Electron charges\\\\s*\\\\:*\\\\s*)([\\\\-\\\\s\\\\w\\\\.\\\\:\\\\(\\\\)]+?)\\\\n *[A-Z\\\\+]"""'], {'str_operation': 'str_to_atom_properties_dict', 'repeats': '(False)', 'convert': '(False)'}), "('charge_contributions',\n '(?:Charges|Electron charges\\\\s*\\\\:*\\\\s*)([\\\\-\\\\s\\\\w\\\\.\\\\:\\\\(\\\\)]+?)\\\\n *[A-Z\\\\+]'\n , str_operation=str_to_atom_properties_dict, repeats=False, convert=False)\n", (30761, 30951), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((30994, 31174), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""moment_contributions"""', '"""(?:Moments\\\\s*\\\\:*\\\\s*)([\\\\-\\\\s\\\\w\\\\.\\\\:\\\\(\\\\)]+?)\\\\n *[A-Z\\\\+]"""'], {'str_operation': 'str_to_atom_properties_dict', 'repeats': '(False)', 'convert': '(False)'}), "('moment_contributions',\n '(?:Moments\\\\s*\\\\:*\\\\s*)([\\\\-\\\\s\\\\w\\\\.\\\\:\\\\(\\\\)]+?)\\\\n *[A-Z\\\\+]',\n str_operation=str_to_atom_properties_dict, repeats=False, convert=False)\n", (31002, 31174), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((33552, 33766), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""forces"""', '"""Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Atomic"""'], {'repeats': '(False)', 'str_operation': 'str_to_array', 'dtype': 'float', 'unit': '(ureg.hartree / ureg.bohr)'}), "('forces',\n 'Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Atomic'\n , repeats=False, str_operation=str_to_array, dtype=float, unit=ureg.\n hartree / ureg.bohr)\n", (33560, 33766), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((34683, 34897), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""forces"""', '"""Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Time"""'], {'repeats': '(False)', 'str_operation': 'str_to_array', 'convert': '(False)', 'unit': '(ureg.hartree / ureg.bohr)'}), "('forces',\n 'Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Time'\n , repeats=False, str_operation=str_to_array, convert=False, unit=ureg.\n hartree / ureg.bohr)\n", (34691, 34897), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((34935, 35008), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""step"""', '"""Optimization step\\\\s*(\\\\d+)"""'], {'repeats': '(False)', 'dtype': 'int'}), "('step', 'Optimization step\\\\s*(\\\\d+)', repeats=False, dtype=int)\n", (34943, 35008), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((35038, 35107), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""method"""', '"""method\\\\s*=\\\\s*(\\\\w+)"""'], {'repeats': '(False)', 'dtype': 'str'}), "('method', 'method\\\\s*=\\\\s*(\\\\w+)', repeats=False, dtype=str)\n", (35046, 35107), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((35136, 35254), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""n_scf_iterations"""', '"""Number of (?:total)* scf iterations\\\\s*\\\\:\\\\s*(\\\\d+)"""'], {'repeats': '(False)', 'dtype': 'int'}), "('n_scf_iterations',\n 'Number of (?:total)* scf iterations\\\\s*\\\\:\\\\s*(\\\\d+)', repeats=False,\n dtype=int)\n", (35144, 35254), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((35290, 35514), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""force_convergence"""', '"""Maximum force magnitude\\\\s*\\\\(target\\\\)\\\\s*\\\\:(\\\\s*[\\\\(\\\\)\\\\d\\\\.\\\\-\\\\+Ee ]+)"""'], {'str_operation': 'str_to_quantity_tolerances', 'unit': '(ureg.hartree / ureg.bohr)', 'repeats': '(False)', 'dtype': 'float'}), "('force_convergence',\n 'Maximum force magnitude\\\\s*\\\\(target\\\\)\\\\s*\\\\:(\\\\s*[\\\\(\\\\)\\\\d\\\\.\\\\-\\\\+Ee ]+)'\n , str_operation=str_to_quantity_tolerances, unit=ureg.hartree / ureg.\n bohr, repeats=False, dtype=float)\n", (35298, 35514), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((35568, 35716), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""energy_total"""', '"""Total energy at this optimization step\\\\s*\\\\:\\\\s*([\\\\-\\\\d\\\\.Ee]+)"""'], {'unit': 'ureg.hartree', 'repeats': '(False)', 'dtype': 'float'}), "('energy_total',\n 'Total energy at this optimization step\\\\s*\\\\:\\\\s*([\\\\-\\\\d\\\\.Ee]+)',\n unit=ureg.hartree, repeats=False, dtype=float)\n", (35576, 35716), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((35750, 35894), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""time"""', '"""Time spent in this optimization step\\\\s*\\\\:\\\\s*([\\\\-\\\\d\\\\.Ee]+)\\\\s*seconds"""'], {'unit': 'ureg.s', 'repeats': '(False)', 'dtype': 'float'}), "('time',\n 'Time spent in this optimization step\\\\s*\\\\:\\\\s*([\\\\-\\\\d\\\\.Ee]+)\\\\s*seconds'\n , unit=ureg.s, repeats=False, dtype=float)\n", (35758, 35894), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((39429, 39462), 'numpy.dot', 'np.dot', (['positions', 'cell.magnitude'], {}), '(positions, cell.magnitude)\n', (39435, 39462), True, 'import numpy as np\n'), ((45623, 45647), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (45637, 45647), False, 'import os\n'), ((46772, 46796), 'os.path.isfile', 'os.path.isfile', (['filepath'], {}), '(filepath)\n', (46786, 46796), False, 'import os\n'), ((46801, 46829), 'os.access', 'os.access', (['filepath', 'os.F_OK'], {}), '(filepath, os.F_OK)\n', (46810, 46829), False, 'import os\n'), ((51601, 51631), 'numpy.product', 'np.product', (['band_parameters[1]'], {}), '(band_parameters[1])\n', (51611, 51631), True, 'import numpy as np\n'), ((66982, 67029), 'numpy.reshape', 'np.reshape', (['data[3]', '(n_components, n_excitons)'], {}), '(data[3], (n_components, n_excitons))\n', (66992, 67029), True, 'import numpy as np\n'), ((67108, 67155), 'numpy.reshape', 'np.reshape', (['data[4]', '(n_components, n_excitons)'], {}), '(data[4], (n_components, n_excitons))\n', (67118, 67155), True, 'import numpy as np\n'), ((67234, 67281), 'numpy.reshape', 'np.reshape', (['data[5]', '(n_components, n_excitons)'], {}), '(data[5], (n_components, n_excitons))\n', (67244, 67281), True, 'import numpy as np\n'), ((67743, 67789), 'numpy.reshape', 'np.reshape', (['data[1]', '(n_components, n_epsilon)'], {}), '(data[1], (n_components, n_epsilon))\n', (67753, 67789), True, 'import numpy as np\n'), ((67858, 67904), 'numpy.reshape', 'np.reshape', (['data[2]', '(n_components, n_epsilon)'], {}), '(data[2], (n_components, n_epsilon))\n', (67868, 67904), True, 'import numpy as np\n'), ((68282, 68326), 'numpy.reshape', 'np.reshape', (['data[1]', '(n_components, n_sigma)'], {}), '(data[1], (n_components, n_sigma))\n', (68292, 68326), True, 'import numpy as np\n'), ((68393, 68437), 'numpy.reshape', 'np.reshape', (['data[2]', '(n_components, n_sigma)'], {}), '(data[2], (n_components, n_sigma))\n', (68403, 68437), True, 'import numpy as np\n'), ((68807, 68850), 'numpy.reshape', 'np.reshape', (['data[1]', '(n_components, n_loss)'], {}), '(data[1], (n_components, n_loss))\n', (68817, 68850), True, 'import numpy as np\n'), ((78099, 78125), 'nomad.datamodel.metainfo.simulation.calculation.Energy', 'Energy', ([], {'fermi': 'fermi_energy'}), '(fermi=fermi_energy)\n', (78105, 78125), False, 'from nomad.datamodel.metainfo.simulation.calculation import Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges, Forces, ForcesEntry, ScfIteration, BandGap\n'), ((80248, 80274), 'nomad.datamodel.metainfo.simulation.method.BasisSet', 'BasisSet', ([], {'type': '"""(L)APW+lo"""'}), "(type='(L)APW+lo')\n", (80256, 80274), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((88548, 88573), 'nomad.datamodel.metainfo.simulation.calculation.ForcesEntry', 'ForcesEntry', ([], {'value': 'forces'}), '(value=forces)\n', (88559, 88573), False, 'from nomad.datamodel.metainfo.simulation.calculation import Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges, Forces, ForcesEntry, ScfIteration, BandGap\n'), ((8447, 8470), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (8459, 8470), True, 'import numpy as np\n'), ((9409, 9432), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (9421, 9432), True, 'import numpy as np\n'), ((18538, 18644), 'numpy.zeros', 'np.zeros', (['(self.number_of_lm, self.number_of_spin_channels, self.number_of_atoms,\n self.number_of_dos)'], {}), '((self.number_of_lm, self.number_of_spin_channels, self.\n number_of_atoms, self.number_of_dos))\n', (18546, 18644), True, 'import numpy as np\n'), ((27328, 27423), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['name', "('%s\\\\s*:\\\\s*([\\\\s\\\\S]*?)\\\\n' % key_unit[0])"], {'unit': 'key_unit[1]', 'repeats': '(False)'}), "(name, '%s\\\\s*:\\\\s*([\\\\s\\\\S]*?)\\\\n' % key_unit[0], unit=key_unit[1],\n repeats=False)\n", (27336, 27423), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((27579, 27674), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['name', "('%s\\\\s*:\\\\s*([\\\\s\\\\S]*?)\\\\n' % key_unit[0])"], {'unit': 'key_unit[1]', 'repeats': '(False)'}), "(name, '%s\\\\s*:\\\\s*([\\\\s\\\\S]*?)\\\\n' % key_unit[0], unit=key_unit[1],\n repeats=False)\n", (27587, 27674), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((31491, 31591), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['name', "('%s\\\\s*\\\\:*\\\\s*([\\\\-\\\\d\\\\.Ee]+)' % key_unit[0])"], {'repeats': '(False)', 'unit': 'key_unit[1]'}), "(name, '%s\\\\s*\\\\:*\\\\s*([\\\\-\\\\d\\\\.Ee]+)' % key_unit[0], repeats=\n False, unit=key_unit[1])\n", (31499, 31591), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((32298, 32449), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['name', "('%s\\\\s*\\\\:*\\\\s*([\\\\(\\\\)\\\\d\\\\.\\\\-\\\\+Ee ]+)' % key_unit[0])"], {'str_operation': 'str_to_quantity_tolerances', 'unit': 'key_unit[1]', 'repeats': '(False)'}), "(name, '%s\\\\s*\\\\:*\\\\s*([\\\\(\\\\)\\\\d\\\\.\\\\-\\\\+Ee ]+)' % key_unit[0],\n str_operation=str_to_quantity_tolerances, unit=key_unit[1], repeats=False)\n", (32306, 32449), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((46041, 46082), 'os.path.join', 'os.path.join', (['self.info_parser.maindir', 'f'], {}), '(self.info_parser.maindir, f)\n', (46053, 46082), False, 'import os\n'), ((46196, 46217), 'os.access', 'os.access', (['f', 'os.F_OK'], {}), '(f, os.F_OK)\n', (46205, 46217), False, 'import os\n'), ((52371, 52405), 'numpy.array', 'np.array', (['[d[0][:3] for d in data]'], {}), '([d[0][:3] for d in data])\n', (52379, 52405), True, 'import numpy as np\n'), ((66416, 66431), 'numpy.vstack', 'np.vstack', (['data'], {}), '(data)\n', (66425, 66431), True, 'import numpy as np\n'), ((66689, 66736), 'numpy.reshape', 'np.reshape', (['data[1]', '(n_components, n_excitons)'], {}), '(data[1], (n_components, n_excitons))\n', (66699, 66736), True, 'import numpy as np\n'), ((66834, 66881), 'numpy.reshape', 'np.reshape', (['data[2]', '(n_components, n_excitons)'], {}), '(data[2], (n_components, n_excitons))\n', (66844, 66881), True, 'import numpy as np\n'), ((67411, 67426), 'numpy.vstack', 'np.vstack', (['data'], {}), '(data)\n', (67420, 67426), True, 'import numpy as np\n'), ((67613, 67659), 'numpy.reshape', 'np.reshape', (['data[0]', '(n_components, n_epsilon)'], {}), '(data[0], (n_components, n_epsilon))\n', (67623, 67659), True, 'import numpy as np\n'), ((68032, 68047), 'numpy.vstack', 'np.vstack', (['data'], {}), '(data)\n', (68041, 68047), True, 'import numpy as np\n'), ((68156, 68200), 'numpy.reshape', 'np.reshape', (['data[0]', '(n_components, n_sigma)'], {}), '(data[0], (n_components, n_sigma))\n', (68166, 68200), True, 'import numpy as np\n'), ((68564, 68579), 'numpy.vstack', 'np.vstack', (['data'], {}), '(data)\n', (68573, 68579), True, 'import numpy as np\n'), ((68686, 68729), 'numpy.reshape', 'np.reshape', (['data[0]', '(n_components, n_loss)'], {}), '(data[0], (n_components, n_loss))\n', (68696, 68729), True, 'import numpy as np\n'), ((71830, 71866), 'numpy.transpose', 'np.transpose', (['data_q'], {'axes': '(2, 0, 1)'}), '(data_q, axes=(2, 0, 1))\n', (71842, 71866), True, 'import numpy as np\n'), ((81826, 81864), 'nomad.datamodel.metainfo.simulation.method.Scf', 'Scf', ([], {'threshold_energy_change': 'threshold'}), '(threshold_energy_change=threshold)\n', (81829, 81864), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((84341, 84372), 'nomad.datamodel.metainfo.simulation.calculation.EnergyEntry', 'EnergyEntry', ([], {'value': 'energy_total'}), '(value=energy_total)\n', (84352, 84372), False, 'from nomad.datamodel.metainfo.simulation.calculation import Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges, Forces, ForcesEntry, ScfIteration, BandGap\n'), ((89850, 89888), 'numpy.array', 'np.array', (['lattice_vectors'], {'dtype': 'float'}), '(lattice_vectors, dtype=float)\n', (89858, 89888), True, 'import numpy as np\n'), ((92984, 93029), 'numpy.transpose', 'np.transpose', (['self.data_clathrate_parser.data'], {}), '(self.data_clathrate_parser.data)\n', (92996, 93029), True, 'import numpy as np\n'), ((2033, 2060), 'numpy.array', 'np.array', (['val[0]'], {'dtype': 'int'}), '(val[0], dtype=int)\n', (2041, 2060), True, 'import numpy as np\n'), ((7658, 7689), 'numpy.where', 'np.where', (['(data[0] == data[0][0])'], {}), '(data[0] == data[0][0])\n', (7666, 7689), True, 'import numpy as np\n'), ((8499, 8524), 'numpy.where', 'np.where', (['(dist == dist[0])'], {}), '(dist == dist[0])\n', (8507, 8524), True, 'import numpy as np\n'), ((9474, 9499), 'numpy.where', 'np.where', (['(data == data[0])'], {}), '(data == data[0])\n', (9482, 9499), True, 'import numpy as np\n'), ((30084, 30132), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'initialization_quantities'}), '(quantities=initialization_quantities)\n', (30094, 30132), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((32650, 32687), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'scf_quantities'}), '(quantities=scf_quantities)\n', (32660, 32687), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((32926, 32963), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'scf_quantities'}), '(quantities=scf_quantities)\n', (32936, 32963), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((33991, 34031), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'module_quantities'}), '(quantities=module_quantities)\n', (34001, 34031), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((37777, 37817), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'module_quantities'}), '(quantities=module_quantities)\n', (37787, 37817), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((45821, 45857), 'os.listdir', 'os.listdir', (['self.info_parser.maindir'], {}), '(self.info_parser.maindir)\n', (45831, 45857), False, 'import os\n'), ((82660, 82681), 'nomad.datamodel.metainfo.simulation.method.Functional', 'Functional', ([], {'name': 'name'}), '(name=name)\n', (82670, 82681), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((89606, 89615), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (89612, 89615), True, 'import numpy as np\n'), ((90010, 90044), 'numpy.dot', 'np.dot', (['positions', 'lattice_vectors'], {}), '(positions, lattice_vectors)\n', (90016, 90044), True, 'import numpy as np\n'), ((93132, 93163), 'numpy.array', 'np.array', (['data[:3]'], {'dtype': 'float'}), '(data[:3], dtype=float)\n', (93140, 93163), True, 'import numpy as np\n'), ((5878, 5901), 'numpy.transpose', 'np.transpose', (['data[2:5]'], {}), '(data[2:5])\n', (5890, 5901), True, 'import numpy as np\n'), ((6308, 6331), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (6320, 6331), True, 'import numpy as np\n'), ((8732, 8755), 'numpy.transpose', 'np.transpose', (['self.data'], {}), '(self.data)\n', (8744, 8755), True, 'import numpy as np\n'), ((13936, 13999), 'numpy.linspace', 'np.linspace', (['start', 'end', 'self.number_of_k_points_per_segment[i]'], {}), '(start, end, self.number_of_k_points_per_segment[i])\n', (13947, 13999), True, 'import numpy as np\n'), ((82768, 82789), 'nomad.datamodel.metainfo.simulation.method.Functional', 'Functional', ([], {'name': 'name'}), '(name=name)\n', (82778, 82789), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((85175, 85197), 'nomad.datamodel.metainfo.simulation.calculation.EnergyEntry', 'EnergyEntry', ([], {'value': 'val'}), '(value=val)\n', (85186, 85197), False, 'from nomad.datamodel.metainfo.simulation.calculation import Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges, Forces, ForcesEntry, ScfIteration, BandGap\n'), ((5240, 5258), 'numpy.transpose', 'np.transpose', (['band'], {}), '(band)\n', (5252, 5258), True, 'import numpy as np\n'), ((8078, 8096), 'numpy.transpose', 'np.transpose', (['band'], {}), '(band)\n', (8090, 8096), True, 'import numpy as np\n'), ((82871, 82892), 'nomad.datamodel.metainfo.simulation.method.Functional', 'Functional', ([], {'name': 'name'}), '(name=name)\n', (82881, 82892), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((82967, 82988), 'nomad.datamodel.metainfo.simulation.method.Functional', 'Functional', ([], {'name': 'name'}), '(name=name)\n', (82977, 82988), False, 'from nomad.datamodel.metainfo.simulation.method import Method, DFT, Electronic, Smearing, XCFunctional, Functional, GW as GWMethod, Scf, BasisSet\n'), ((13237, 13257), 'numpy.transpose', 'np.transpose', (['energy'], {}), '(energy)\n', (13249, 13257), True, 'import numpy as np\n'), ((23024, 23051), 're.search', 're.search', (['re_symbol', 'v[-1]'], {}), '(re_symbol, v[-1])\n', (23033, 23051), False, 'import re\n'), ((27946, 28001), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""number"""', '"""Species : *(\\\\d+)"""'], {'dtype': 'np.int32'}), "('number', 'Species : *(\\\\d+)', dtype=np.int32)\n", (27954, 28001), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28019, 28053), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""symbol"""', '"""\\\\((\\\\w+)\\\\)"""'], {}), "('symbol', '\\\\((\\\\w+)\\\\)')\n", (28027, 28053), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28069, 28120), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""file"""', '"""parameters loaded from *: *(.+)"""'], {}), "('file', 'parameters loaded from *: *(.+)')\n", (28077, 28120), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28139, 28172), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""name"""', '"""name *: *(.+)"""'], {}), "('name', 'name *: *(.+)')\n", (28147, 28172), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28191, 28305), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""nuclear_charge"""', 'f"""nuclear charge *: *({re_float})"""'], {'dtype': 'np.float64', 'unit': 'ureg.elementary_charge'}), "('nuclear_charge', f'nuclear charge *: *({re_float})', dtype=np.\n float64, unit=ureg.elementary_charge)\n", (28199, 28305), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28319, 28439), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""electronic_charge"""', 'f"""electronic charge *: *({re_float})"""'], {'dtype': 'np.float64', 'unit': 'ureg.elementary_charge'}), "('electronic_charge', f'electronic charge *: *({re_float})', dtype=\n np.float64, unit=ureg.elementary_charge)\n", (28327, 28439), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28453, 28556), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""atomic_mass"""', 'f"""atomic mass *: *({re_float})"""'], {'dtype': 'np.float64', 'unit': 'ureg.electron_mass'}), "('atomic_mass', f'atomic mass *: *({re_float})', dtype=np.float64,\n unit=ureg.electron_mass)\n", (28461, 28556), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28571, 28678), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""muffin_tin_radius"""', 'f"""muffin-tin radius *: *({re_float})"""'], {'dtype': 'np.float64', 'unit': 'ureg.bohr'}), "('muffin_tin_radius', f'muffin-tin radius *: *({re_float})', dtype=\n np.float64, unit=ureg.bohr)\n", (28579, 28678), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28692, 28786), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""radial_points"""', 'f"""radial points in muffin-tin *: *({re_float})"""'], {'dtype': 'np.int32'}), "('radial_points', f'radial points in muffin-tin *: *({re_float})',\n dtype=np.int32)\n", (28700, 28786), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((28801, 28876), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions_format"""', '"""atomic positions \\\\((.+?)\\\\)"""'], {'flatten': '(False)'}), "('positions_format', 'atomic positions \\\\((.+?)\\\\)', flatten=False)\n", (28809, 28876), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((29425, 29481), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""type"""', '"""Exchange-correlation type +: +(\\\\S+)"""'], {}), "('type', 'Exchange-correlation type +: +(\\\\S+)')\n", (29433, 29481), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((33166, 33232), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions_format"""', '"""Atomic positions\\\\s*\\\\(([a-z]+)\\\\)"""'], {}), "('positions_format', 'Atomic positions\\\\s*\\\\(([a-z]+)\\\\)')\n", (33174, 33232), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((33277, 33347), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""symbols"""', '"""atom\\\\s*\\\\d+\\\\s*(\\\\w+)"""'], {'repeats': '(True)', 'dtype': 'str'}), "('symbols', 'atom\\\\s*\\\\d+\\\\s*(\\\\w+)', repeats=True, dtype=str)\n", (33285, 33347), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((33391, 33506), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions"""', '"""\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)"""'], {'repeats': '(True)', 'dtype': 'float'}), "('positions',\n '\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)', repeats=True,\n dtype=float)\n", (33399, 33506), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((34284, 34363), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions_format"""', '"""Atomic positions at this step\\\\s*\\\\(([a-z]+)\\\\)"""'], {}), "('positions_format', 'Atomic positions at this step\\\\s*\\\\(([a-z]+)\\\\)')\n", (34292, 34363), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((34408, 34478), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""symbols"""', '"""atom\\\\s*\\\\d+\\\\s*(\\\\w+)"""'], {'repeats': '(True)', 'dtype': 'str'}), "('symbols', 'atom\\\\s*\\\\d+\\\\s*(\\\\w+)', repeats=True, dtype=str)\n", (34416, 34478), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((34522, 34637), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions"""', '"""\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)"""'], {'repeats': '(True)', 'dtype': 'float'}), "('positions',\n '\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)', repeats=True,\n dtype=float)\n", (34530, 34637), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((37333, 37547), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""forces"""', '"""Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Atomic"""'], {'repeats': '(False)', 'str_operation': 'str_to_array', 'dtype': 'float', 'unit': '(ureg.hartree / ureg.bohr)'}), "('forces',\n 'Total atomic forces including IBS \\\\(\\\\w+\\\\)\\\\s*\\\\:(\\\\s*atom[\\\\-\\\\s\\\\w\\\\.\\\\:]*?)\\\\n *Atomic'\n , repeats=False, str_operation=str_to_array, dtype=float, unit=ureg.\n hartree / ureg.bohr)\n", (37341, 37547), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((50587, 50602), 'numpy.size', 'np.size', (['v[key]'], {}), '(v[key])\n', (50594, 50602), True, 'import numpy as np\n'), ((29049, 29069), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (29057, 29069), True, 'import numpy as np\n'), ((36385, 36431), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'optimization_quantities'}), '(quantities=optimization_quantities)\n', (36395, 36431), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((36625, 36662), 'nomad.parsing.file_parser.TextParser', 'TextParser', ([], {'quantities': 'scf_quantities'}), '(quantities=scf_quantities)\n', (36635, 36662), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((36912, 36985), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions_format"""', '"""imized atomic positions\\\\s*\\\\(([a-z]+)\\\\)"""'], {}), "('positions_format', 'imized atomic positions\\\\s*\\\\(([a-z]+)\\\\)')\n", (36920, 36985), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((37038, 37108), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""symbols"""', '"""atom\\\\s*\\\\d+\\\\s*(\\\\w+)"""'], {'repeats': '(True)', 'dtype': 'str'}), "('symbols', 'atom\\\\s*\\\\d+\\\\s*(\\\\w+)', repeats=True, dtype=str)\n", (37046, 37108), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n'), ((37160, 37275), 'nomad.parsing.file_parser.Quantity', 'Quantity', (['"""positions"""', '"""\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)"""'], {'repeats': '(True)', 'dtype': 'float'}), "('positions',\n '\\\\s*:\\\\s*([\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+\\\\s*[\\\\d\\\\.\\\\-]+)', repeats=True,\n dtype=float)\n", (37168, 37275), False, 'from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser\n')] |
# coding: utf-8
"""
simcore-service-storage API
API definition for simcore-service-storage service # noqa: E501
OpenAPI spec version: 0.1.0
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from simcore_service_storage_sdk.api_client import ApiClient
class UsersApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def check_action_post(self, action, **kwargs): # noqa: E501
"""Test checkpoint to ask server to fail or echo back the transmitted data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_action_post(action, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str action: (required)
:param str data:
:param FakeType fake_type:
:return: FakeEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.check_action_post_with_http_info(action, **kwargs) # noqa: E501
else:
(data) = self.check_action_post_with_http_info(action, **kwargs) # noqa: E501
return data
def check_action_post_with_http_info(self, action, **kwargs): # noqa: E501
"""Test checkpoint to ask server to fail or echo back the transmitted data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_action_post_with_http_info(action, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str action: (required)
:param str data:
:param FakeType fake_type:
:return: FakeEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['action', 'data', 'fake_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_action_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'action' is set
if ('action' not in local_var_params or
local_var_params['action'] is None):
raise ValueError("Missing the required parameter `action` when calling `check_action_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'action' in local_var_params:
path_params['action'] = local_var_params['action'] # noqa: E501
query_params = []
if 'data' in local_var_params:
query_params.append(('data', local_var_params['data'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'fake_type' in local_var_params:
body_params = local_var_params['fake_type']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/check/{action}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FakeEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_file(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Deletes File # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_file(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.delete_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def delete_file_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Deletes File # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_file_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `delete_file`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `delete_file`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `delete_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def download_file(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns download link for requested file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_file(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.download_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.download_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def download_file_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns download link for requested file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_file_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method download_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `download_file`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `download_file`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `download_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PresignedLinkEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_file_metadata(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Get File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_file_metadata(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_file_metadata_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.get_file_metadata_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def get_file_metadata_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Get File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_file_metadata_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_file_metadata" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `get_file_metadata`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `get_file_metadata`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_file_metadata`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}/metadata', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileMetaDataEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_files_metadata(self, location_id, user_id, **kwargs): # noqa: E501
"""Get Files Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_files_metadata(location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str location_id: (required)
:param str user_id: (required)
:param str uuid_filter:
:return: FileMetaDataArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_files_metadata_with_http_info(location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.get_files_metadata_with_http_info(location_id, user_id, **kwargs) # noqa: E501
return data
def get_files_metadata_with_http_info(self, location_id, user_id, **kwargs): # noqa: E501
"""Get Files Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_files_metadata_with_http_info(location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str location_id: (required)
:param str user_id: (required)
:param str uuid_filter:
:return: FileMetaDataArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['location_id', 'user_id', 'uuid_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_files_metadata" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `get_files_metadata`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_files_metadata`") # noqa: E501
collection_formats = {}
path_params = {}
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
if 'uuid_filter' in local_var_params:
query_params.append(('uuid_filter', local_var_params['uuid_filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/metadata', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileMetaDataArrayEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_storage_locations(self, user_id, **kwargs): # noqa: E501
"""Get available storage locations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_storage_locations(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: (required)
:return: FileLocationArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_storage_locations_with_http_info(user_id, **kwargs) # noqa: E501
else:
(data) = self.get_storage_locations_with_http_info(user_id, **kwargs) # noqa: E501
return data
def get_storage_locations_with_http_info(self, user_id, **kwargs): # noqa: E501
"""Get available storage locations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_storage_locations_with_http_info(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: (required)
:return: FileLocationArrayEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_storage_locations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_storage_locations`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileLocationArrayEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def health_check(self, **kwargs): # noqa: E501
"""Service health-check endpoint # noqa: E501
Some general information on the API and state of the service behind # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.health_check(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: HealthCheckEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.health_check_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.health_check_with_http_info(**kwargs) # noqa: E501
return data
def health_check_with_http_info(self, **kwargs): # noqa: E501
"""Service health-check endpoint # noqa: E501
Some general information on the API and state of the service behind # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.health_check_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: HealthCheckEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method health_check" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HealthCheckEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_file_meta_data(self, file_id, location_id, **kwargs): # noqa: E501
"""Update File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_file_meta_data(file_id, location_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param FileMetaDataType file_meta_data_type:
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_file_meta_data_with_http_info(file_id, location_id, **kwargs) # noqa: E501
else:
(data) = self.update_file_meta_data_with_http_info(file_id, location_id, **kwargs) # noqa: E501
return data
def update_file_meta_data_with_http_info(self, file_id, location_id, **kwargs): # noqa: E501
"""Update File Metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_file_meta_data_with_http_info(file_id, location_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param FileMetaDataType file_meta_data_type:
:return: FileMetaDataEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'file_meta_data_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_file_meta_data" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `update_file_meta_data`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `update_file_meta_data`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'file_meta_data_type' in local_var_params:
body_params = local_var_params['file_meta_data_type']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}/metadata', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileMetaDataEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def upload_file(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns upload link or performs copy operation to datcore # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:param str extra_location:
:param str extra_source:
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.upload_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
else:
(data) = self.upload_file_with_http_info(file_id, location_id, user_id, **kwargs) # noqa: E501
return data
def upload_file_with_http_info(self, file_id, location_id, user_id, **kwargs): # noqa: E501
"""Returns upload link or performs copy operation to datcore # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file_with_http_info(file_id, location_id, user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str file_id: (required)
:param str location_id: (required)
:param str user_id: (required)
:param str extra_location:
:param str extra_source:
:return: PresignedLinkEnveloped
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file_id', 'location_id', 'user_id', 'extra_location', 'extra_source'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file_id' is set
if ('file_id' not in local_var_params or
local_var_params['file_id'] is None):
raise ValueError("Missing the required parameter `file_id` when calling `upload_file`") # noqa: E501
# verify the required parameter 'location_id' is set
if ('location_id' not in local_var_params or
local_var_params['location_id'] is None):
raise ValueError("Missing the required parameter `location_id` when calling `upload_file`") # noqa: E501
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `upload_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_id' in local_var_params:
path_params['fileId'] = local_var_params['file_id'] # noqa: E501
if 'location_id' in local_var_params:
path_params['location_id'] = local_var_params['location_id'] # noqa: E501
query_params = []
if 'user_id' in local_var_params:
query_params.append(('user_id', local_var_params['user_id'])) # noqa: E501
if 'extra_location' in local_var_params:
query_params.append(('extra_location', local_var_params['extra_location'])) # noqa: E501
if 'extra_source' in local_var_params:
query_params.append(('extra_source', local_var_params['extra_source'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/locations/{location_id}/files/{fileId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PresignedLinkEnveloped', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"six.iteritems",
"simcore_service_storage_sdk.api_client.ApiClient"
] | [((2736, 2777), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (2749, 2777), False, 'import six\n'), ((7017, 7058), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (7030, 7058), False, 'import six\n'), ((11833, 11874), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (11846, 11874), False, 'import six\n'), ((16650, 16691), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (16663, 16691), False, 'import six\n'), ((21445, 21486), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (21458, 21486), False, 'import six\n'), ((25767, 25808), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (25780, 25808), False, 'import six\n'), ((29463, 29504), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (29476, 29504), False, 'import six\n'), ((33028, 33069), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (33041, 33069), False, 'import six\n'), ((37993, 38034), 'six.iteritems', 'six.iteritems', (["local_var_params['kwargs']"], {}), "(local_var_params['kwargs'])\n", (38006, 38034), False, 'import six\n'), ((688, 699), 'simcore_service_storage_sdk.api_client.ApiClient', 'ApiClient', ([], {}), '()\n', (697, 699), False, 'from simcore_service_storage_sdk.api_client import ApiClient\n')] |
#coding:utf-8
from nadmin.sites import site
from nadmin.views import BaseAdminPlugin, ListAdminView
SORTBY_VAR = '_sort_by'
class SortablePlugin(BaseAdminPlugin):
sortable_fields = ['sort']
# Media
def get_media(self, media):
if self.sortable_fields and self.request.GET.get(SORTBY_VAR):
media = media + self.vendor('nadmin.plugin.sortable.js')
return media
# Block Views
def block_top_toolbar(self, context, nodes):
if self.sortable_fields:
pass
# current_refresh = self.request.GET.get(REFRESH_VAR)
# context.update({
# 'has_refresh': bool(current_refresh),
# 'clean_refresh_url': self.admin_view.get_query_string(remove=(REFRESH_VAR,)),
# 'current_refresh': current_refresh,
# 'refresh_times': [{
# 'time': r,
# 'url': self.admin_view.get_query_string({REFRESH_VAR: r}),
# 'selected': str(r) == current_refresh,
# } for r in self.refresh_times],
# })
# nodes.append(loader.render_to_string('nadmin/blocks/refresh.html', context_instance=context))
site.register_plugin(SortablePlugin, ListAdminView)
| [
"nadmin.sites.site.register_plugin"
] | [((1214, 1265), 'nadmin.sites.site.register_plugin', 'site.register_plugin', (['SortablePlugin', 'ListAdminView'], {}), '(SortablePlugin, ListAdminView)\n', (1234, 1265), False, 'from nadmin.sites import site\n')] |
import operator
import os
from unittest.mock import patch
import pytest
import requests
from rotkehlchen.chain.ethereum.manager import NodeName
from rotkehlchen.constants.assets import A_BTC
from rotkehlchen.tests.utils.blockchain import mock_etherscan_query
from rotkehlchen.typing import SupportedBlockchain
@pytest.mark.skipif(
os.name == 'nt',
reason='Not testing running with geth in windows at the moment',
)
@pytest.mark.parametrize('have_blockchain_backend', [True])
def test_eth_connection_initial_balances(
blockchain,
inquirer, # pylint: disable=unused-argument
):
"""TODO for this test. Either:
1. Not use own chain but use a normal open node for this test.
2. If we use own chain, deploy the eth-scan contract there.
But probably (1) makes more sense
"""
msg = 'Should be connected to ethereum node'
assert blockchain.ethereum.web3_mapping.get(NodeName.OWN) is not None, msg
def test_query_btc_balances(blockchain):
blockchain.query_btc_balances()
assert 'BTC' not in blockchain.totals
account = '<KEY>'
blockchain.modify_btc_account(account, 'append', operator.add)
blockchain.query_btc_balances()
assert blockchain.totals[A_BTC].usd_value is not None
assert blockchain.totals[A_BTC].amount is not None
@pytest.mark.parametrize('number_of_eth_accounts', [0])
def test_add_remove_account_assure_all_balances_not_always_queried(blockchain):
"""Due to a programming mistake at addition and removal of blockchain accounts
after the first time all balances were queried every time. That slowed
everything down (https://github.com/rotki/rotki/issues/678).
This is a regression test for that behaviour
TODO: Is this still needed? Shouldn't it just be removed?
Had to add lots of mocks to make it not be a slow test
"""
addr1 = '0xe188c6BEBB81b96A65aa20dDB9e2aef62627fa4c'
addr2 = '<KEY>'
etherscan_patch = mock_etherscan_query(
eth_map={addr1: {'ETH': 1}, addr2: {'ETH': 2}},
etherscan=blockchain.ethereum.etherscan,
original_requests_get=requests.get,
original_queries=[],
)
ethtokens_max_chunks_patch = patch(
'rotkehlchen.chain.ethereum.tokens.ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH',
new=800,
)
with etherscan_patch, ethtokens_max_chunks_patch:
blockchain.add_blockchain_accounts(
blockchain=SupportedBlockchain.ETHEREUM,
accounts=[addr1],
)
assert addr1 in blockchain.accounts.eth
with etherscan_patch, ethtokens_max_chunks_patch, patch.object(blockchain, 'query_balances') as mock: # noqa: E501
blockchain.remove_blockchain_accounts(
blockchain=SupportedBlockchain.ETHEREUM,
accounts=[addr1],
)
assert addr1 not in blockchain.accounts.eth
assert mock.call_count == 0, 'blockchain.query_balances() should not have been called'
addr2 = '0x78a087fCf440315b843632cFd6FDE6E5adcCc2C2'
with etherscan_patch, ethtokens_max_chunks_patch, patch.object(blockchain, 'query_balances') as mock: # noqa: E501
blockchain.add_blockchain_accounts(
blockchain=SupportedBlockchain.ETHEREUM,
accounts=[addr2],
)
| [
"rotkehlchen.tests.utils.blockchain.mock_etherscan_query",
"pytest.mark.parametrize",
"pytest.mark.skipif",
"unittest.mock.patch.object",
"unittest.mock.patch"
] | [((315, 420), 'pytest.mark.skipif', 'pytest.mark.skipif', (["(os.name == 'nt')"], {'reason': '"""Not testing running with geth in windows at the moment"""'}), "(os.name == 'nt', reason=\n 'Not testing running with geth in windows at the moment')\n", (333, 420), False, 'import pytest\n'), ((428, 486), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""have_blockchain_backend"""', '[True]'], {}), "('have_blockchain_backend', [True])\n", (451, 486), False, 'import pytest\n'), ((1310, 1364), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""number_of_eth_accounts"""', '[0]'], {}), "('number_of_eth_accounts', [0])\n", (1333, 1364), False, 'import pytest\n'), ((1947, 2122), 'rotkehlchen.tests.utils.blockchain.mock_etherscan_query', 'mock_etherscan_query', ([], {'eth_map': "{addr1: {'ETH': 1}, addr2: {'ETH': 2}}", 'etherscan': 'blockchain.ethereum.etherscan', 'original_requests_get': 'requests.get', 'original_queries': '[]'}), "(eth_map={addr1: {'ETH': 1}, addr2: {'ETH': 2}},\n etherscan=blockchain.ethereum.etherscan, original_requests_get=requests\n .get, original_queries=[])\n", (1967, 2122), False, 'from rotkehlchen.tests.utils.blockchain import mock_etherscan_query\n'), ((2186, 2274), 'unittest.mock.patch', 'patch', (['"""rotkehlchen.chain.ethereum.tokens.ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH"""'], {'new': '(800)'}), "('rotkehlchen.chain.ethereum.tokens.ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH',\n new=800)\n", (2191, 2274), False, 'from unittest.mock import patch\n'), ((2584, 2626), 'unittest.mock.patch.object', 'patch.object', (['blockchain', '"""query_balances"""'], {}), "(blockchain, 'query_balances')\n", (2596, 2626), False, 'from unittest.mock import patch\n'), ((3042, 3084), 'unittest.mock.patch.object', 'patch.object', (['blockchain', '"""query_balances"""'], {}), "(blockchain, 'query_balances')\n", (3054, 3084), False, 'from unittest.mock import patch\n')] |
from flask import Flask
from config import Config
from sqlalchemy import MetaData
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_login import LoginManager
from flask_moment import Moment
from flask_misaka import Misaka
from flask_bootstrap import Bootstrap
import os
import logging
from logging.handlers import RotatingFileHandler
from elasticsearch import Elasticsearch
convention = {
"ix": 'ix_%(column_0_label)s',
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
}
metadata = MetaData(naming_convention=convention)
db = SQLAlchemy(metadata=metadata)
migrate = Migrate()
login = LoginManager()
login.login_view = "auth.login"
moment = Moment()
md = Misaka()
bootstrap = Bootstrap()
def create_app(config_class=Config):
app = Flask(__name__)
app.config.from_object(Config)
db.init_app(app)
with app.app_context():
if db.engine.url.drivername == 'sqlite':
migrate.init_app(app, db, render_as_batch=True)
else:
migrate.init_app(app, db)
# migrate.init_app(app, db)
login.init_app(app)
moment.init_app(app)
md.init_app(app)
bootstrap.init_app(app)
from app.errors import bp as errors_bp
app.register_blueprint(errors_bp)
from app.auth import bp as auth_bp
app.register_blueprint(auth_bp, url_prefix='/auth')
from app.main import bp as main_bp
app.register_blueprint(main_bp)
from app.cli import bp as cli_bp
app.register_blueprint(cli_bp)
app.elasticsearch = Elasticsearch([app.config['ELASTICSEARCH_URL']]) \
if app.config['ELASTICSEARCH_URL'] else None
from app import models
if not app.debug and not app.testing:
if not os.path.exists("logs"):
os.mkdir("logs")
file_handler = RotatingFileHandler(
"logs/moviedb.log", maxBytes=10240, backupCount=10
)
file_handler.setFormatter(
logging.Formatter(
"%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]"
)
)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info("Moviedb startup")
return app
| [
"flask_login.LoginManager",
"os.path.exists",
"flask_misaka.Misaka",
"flask.Flask",
"elasticsearch.Elasticsearch",
"logging.Formatter",
"logging.handlers.RotatingFileHandler",
"sqlalchemy.MetaData",
"flask_moment.Moment",
"flask_migrate.Migrate",
"os.mkdir",
"flask_bootstrap.Bootstrap",
"flask_sqlalchemy.SQLAlchemy"
] | [((677, 715), 'sqlalchemy.MetaData', 'MetaData', ([], {'naming_convention': 'convention'}), '(naming_convention=convention)\n', (685, 715), False, 'from sqlalchemy import MetaData\n'), ((722, 751), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', ([], {'metadata': 'metadata'}), '(metadata=metadata)\n', (732, 751), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((762, 771), 'flask_migrate.Migrate', 'Migrate', ([], {}), '()\n', (769, 771), False, 'from flask_migrate import Migrate\n'), ((780, 794), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (792, 794), False, 'from flask_login import LoginManager\n'), ((836, 844), 'flask_moment.Moment', 'Moment', ([], {}), '()\n', (842, 844), False, 'from flask_moment import Moment\n'), ((850, 858), 'flask_misaka.Misaka', 'Misaka', ([], {}), '()\n', (856, 858), False, 'from flask_misaka import Misaka\n'), ((871, 882), 'flask_bootstrap.Bootstrap', 'Bootstrap', ([], {}), '()\n', (880, 882), False, 'from flask_bootstrap import Bootstrap\n'), ((931, 946), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (936, 946), False, 'from flask import Flask\n'), ((1675, 1723), 'elasticsearch.Elasticsearch', 'Elasticsearch', (["[app.config['ELASTICSEARCH_URL']]"], {}), "([app.config['ELASTICSEARCH_URL']])\n", (1688, 1723), False, 'from elasticsearch import Elasticsearch\n'), ((1941, 2012), 'logging.handlers.RotatingFileHandler', 'RotatingFileHandler', (['"""logs/moviedb.log"""'], {'maxBytes': '(10240)', 'backupCount': '(10)'}), "('logs/moviedb.log', maxBytes=10240, backupCount=10)\n", (1960, 2012), False, 'from logging.handlers import RotatingFileHandler\n'), ((1865, 1887), 'os.path.exists', 'os.path.exists', (['"""logs"""'], {}), "('logs')\n", (1879, 1887), False, 'import os\n'), ((1901, 1917), 'os.mkdir', 'os.mkdir', (['"""logs"""'], {}), "('logs')\n", (1909, 1917), False, 'import os\n'), ((2082, 2175), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]"""'], {}), "(\n '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')\n", (2099, 2175), False, 'import logging\n')] |
from c_int import Int
from casting import cast
from globals_consts import NAMESPACE
from temps import used_temps, get_temp, get_temp_func
def binary_expression(copy_strings, expression, target, variables_name, vtypes):
from expression import generate_expression
c1, t1, tt1 = generate_expression(None, expression.left, vtypes, variables_name, copy_strings, False)
c2, t2, tt2 = generate_expression(None, expression.right, vtypes, variables_name, copy_strings, False)
for ttt in tt1: used_temps.remove(ttt)
for ttt in tt2: used_temps.remove(ttt)
ot = cast(t1, t2)
rt = ot
if expression.op in ['<', '>', '<=', '>=', '==', '!=', '&&']:
rt = Int()
if target is None or target == []:
target = [get_temp() for _ in range(ot.size)]
used_temps.extend(target)
code = ''
if expression.op in ['&&', '||']:
if expression.op == '&&':
code += c1
code += t1.cast(ot, tt1, target)
f2 = get_temp_func()
f2h = open(f'{f2}.mcfunction', 'w')
f2h.write(c2)
f2h.write(t2.cast(ot, tt2, target))
f2h.close()
code += f'execute unless score {target[0]} {NAMESPACE} matches 0 run function {NAMESPACE}:{f2}\n'
elif expression.op == '||':
code += c1
code += t1.cast(ot, tt1, target)
f2 = get_temp_func()
f2h = open(f'{f2}.mcfunction', 'w')
f2h.write(c2)
f2h.write(t2.cast(ot, tt2, target))
f2h.close()
code += f'execute if score {target[0]} {NAMESPACE} matches 0 run function {NAMESPACE}:{f2}\n'
else:
if ot == t1:
code += c1
code += c2
code += t2.cast(ot, tt2, target)
code += ot.binary(expression.op, tt1, target, target)
else:
code += c1
code += t1.cast(ot, tt1, target)
code += c2
code += ot.binary(expression.op, target, tt2, target)
return code, rt, target | [
"temps.used_temps.extend",
"expression.generate_expression",
"c_int.Int",
"casting.cast",
"temps.get_temp",
"temps.get_temp_func",
"temps.used_temps.remove"
] | [((286, 377), 'expression.generate_expression', 'generate_expression', (['None', 'expression.left', 'vtypes', 'variables_name', 'copy_strings', '(False)'], {}), '(None, expression.left, vtypes, variables_name,\n copy_strings, False)\n', (305, 377), False, 'from expression import generate_expression\n'), ((392, 484), 'expression.generate_expression', 'generate_expression', (['None', 'expression.right', 'vtypes', 'variables_name', 'copy_strings', '(False)'], {}), '(None, expression.right, vtypes, variables_name,\n copy_strings, False)\n', (411, 484), False, 'from expression import generate_expression\n'), ((576, 588), 'casting.cast', 'cast', (['t1', 't2'], {}), '(t1, t2)\n', (580, 588), False, 'from casting import cast\n'), ((501, 523), 'temps.used_temps.remove', 'used_temps.remove', (['ttt'], {}), '(ttt)\n', (518, 523), False, 'from temps import used_temps, get_temp, get_temp_func\n'), ((544, 566), 'temps.used_temps.remove', 'used_temps.remove', (['ttt'], {}), '(ttt)\n', (561, 566), False, 'from temps import used_temps, get_temp, get_temp_func\n'), ((680, 685), 'c_int.Int', 'Int', ([], {}), '()\n', (683, 685), False, 'from c_int import Int\n'), ((787, 812), 'temps.used_temps.extend', 'used_temps.extend', (['target'], {}), '(target)\n', (804, 812), False, 'from temps import used_temps, get_temp, get_temp_func\n'), ((743, 753), 'temps.get_temp', 'get_temp', ([], {}), '()\n', (751, 753), False, 'from temps import used_temps, get_temp, get_temp_func\n'), ((984, 999), 'temps.get_temp_func', 'get_temp_func', ([], {}), '()\n', (997, 999), False, 'from temps import used_temps, get_temp, get_temp_func\n'), ((1377, 1392), 'temps.get_temp_func', 'get_temp_func', ([], {}), '()\n', (1390, 1392), False, 'from temps import used_temps, get_temp, get_temp_func\n')] |
"""
Plot up surface or bottom (or any fixed level) errors from a profile object
with no z_dim (vertical dimension). Provide an array of netcdf files and
mess with the options to get a figure you like.
You can define how many rows and columns the plot will have. This script will
plot the provided list of netcdf datasets from left to right and top to bottom.
A colorbar will be placed right of the figure.
"""
import xarray as xr
import matplotlib.pyplot as plt
import numpy as np
import sys
sys.path.append("/Users/dbyrne/code/COAsT")
import coast
import pandas as pd
#%% File settings
run_name = "test"
# List of analysis output files. Profiles from each will be plotted
# on each axis of the plot
fn_list = [
"~/transfer/test_grid.nc",
"~/transfer/test_grid.nc",
]
# Filename for the output
fn_out = "/Users/dbyrne/transfer/surface_gridded_errors_{0}.png".format(run_name)
#%% General Plot Settings
var_name = "abs_diff_temperature" # Variable name in analysis file to plot
# If you used var modified to make gridded data
# then this is where to select season etc.
save_plot = False
# Masking out grid cells that don't contain many points
min_points_in_average = 5
name_of_count_variable = "grid_N"
# Subplot axes settings
n_r = 2 # Number of subplot rows
n_c = 2 # Number of subplot columns
figsize = (10, 5) # Figure size
lonbounds = [-15, 9.5] # Longitude bounds
latbounds = [45, 64] # Latitude bounds
subplot_padding = 0.5 # Amount of vertical and horizontal padding between plots
fig_pad = (0.075, 0.075, 0.1, 0.1) # Figure padding (left, top, right, bottom)
# Leave some space on right for colorbar
# Scatter opts
marker_size = 3 # Marker size
cmap = "bwr" # Colormap for normal points
clim = (-1, 1) # Color limits for normal points
discrete_cmap = True # Discretize colormap
cmap_levels = 14
# Labels and Titles
fig_title = "SST Errors" # Whole figure title
title_fontsize = 13 # Fontsize of title
title_fontweight = "bold" # Fontweight to use for title
dataset_names = ["CO9p0", "CO9p0", "CO9p0"] # Names to use for labelling plots
subtitle_fontsize = 11 # Fontsize for dataset subtitles
subtitle_fontweight = "normal" # Fontweight for dataset subtitles
# PLOT SEASONS. Make sure n_r = 2 and n_c = 2
# If this option is true, only the first dataset will be plotted, with seasonal
# variables on each subplot. The season_suffixes will be added to var_name
# for each subplot panel.
plot_seasons = True
season_suffixes = ["DJF", "MAM", "JJA", "SON"]
#%% Read and plotdata
# Read all datasets into list
ds_list = [xr.open_dataset(dd) for dd in fn_list]
n_ds = len(ds_list)
n_ax = n_r * n_c
# Create plot and flatten axis array
f, a = coast.plot_util.create_geo_subplots(lonbounds, latbounds, n_r, n_c, figsize=figsize)
a_flat = a.flatten()
# Dicretize colormap maybe
if discrete_cmap:
cmap = plt.cm.get_cmap(cmap, cmap_levels)
# Determine if we will extend the colormap or not
extend_cbar = []
# Loop over dataset
for ii in range(n_ax):
ur_index = np.unravel_index(ii, (n_r, n_c))
# Select season if required
if plot_seasons:
ds = ds_list[0]
var_ii = var_name + "_{0}".format(season_suffixes[ii])
N_var = "{0}_{1}".format(name_of_count_variable, season_suffixes[ii])
a_flat[ii].text(0.05, 1.02, season_suffixes[ii], transform=a_flat[ii].transAxes, fontweight="bold")
else:
ds = ds_list[ii]
var_ii = var_name
a_flat[ii].set_title(dataset_names[ii], fontsize=subtitle_fontsize, fontweight=subtitle_fontweight)
N_var = name_of_count_variable
data = ds[var_ii].values
count_var = ds[N_var]
data[count_var < min_points_in_average] = np.nan
# Scatter and set title
pc = a_flat[ii].pcolormesh(
ds.longitude,
ds.latitude,
data,
cmap=cmap,
vmin=clim[0],
vmax=clim[1],
)
# Will we extend the colorbar for this dataset?
extend_cbar.append(coast.plot_util.determine_colorbar_extension(data, clim[0], clim[1]))
# Set Figure title
f.suptitle(fig_title, fontsize=title_fontsize, fontweight=title_fontweight)
# Set tight figure layout
f.tight_layout(w_pad=subplot_padding, h_pad=subplot_padding)
f.subplots_adjust(left=(fig_pad[0]), bottom=(fig_pad[1]), right=(1 - fig_pad[2]), top=(1 - fig_pad[3]))
# Handle colorbar -- will we extend it?
if "both" in extend_cbar:
extend = "both"
elif "max" in extend_cbar and "min" in extend_cbar:
extend = "both"
elif "max" in extend_cbar:
extend = "max"
elif "min" in extend_cbar:
extend = "min"
else:
extend = "neither"
cbar_ax = f.add_axes([(1 - fig_pad[2] + fig_pad[2] * 0.15), 0.15, 0.025, 0.7])
f.colorbar(pc, cax=cbar_ax, extend=extend)
# Save plot maybe
if save_plot:
f.savefig(fn_out)
| [
"coast.plot_util.determine_colorbar_extension",
"coast.plot_util.create_geo_subplots",
"numpy.unravel_index",
"matplotlib.pyplot.cm.get_cmap",
"xarray.open_dataset",
"sys.path.append"
] | [((497, 540), 'sys.path.append', 'sys.path.append', (['"""/Users/dbyrne/code/COAsT"""'], {}), "('/Users/dbyrne/code/COAsT')\n", (512, 540), False, 'import sys\n'), ((2685, 2774), 'coast.plot_util.create_geo_subplots', 'coast.plot_util.create_geo_subplots', (['lonbounds', 'latbounds', 'n_r', 'n_c'], {'figsize': 'figsize'}), '(lonbounds, latbounds, n_r, n_c, figsize\n =figsize)\n', (2720, 2774), False, 'import coast\n'), ((2564, 2583), 'xarray.open_dataset', 'xr.open_dataset', (['dd'], {}), '(dd)\n', (2579, 2583), True, 'import xarray as xr\n'), ((2848, 2882), 'matplotlib.pyplot.cm.get_cmap', 'plt.cm.get_cmap', (['cmap', 'cmap_levels'], {}), '(cmap, cmap_levels)\n', (2863, 2882), True, 'import matplotlib.pyplot as plt\n'), ((3010, 3042), 'numpy.unravel_index', 'np.unravel_index', (['ii', '(n_r, n_c)'], {}), '(ii, (n_r, n_c))\n', (3026, 3042), True, 'import numpy as np\n'), ((3950, 4018), 'coast.plot_util.determine_colorbar_extension', 'coast.plot_util.determine_colorbar_extension', (['data', 'clim[0]', 'clim[1]'], {}), '(data, clim[0], clim[1])\n', (3994, 4018), False, 'import coast\n')] |
from sklearn.feature_extraction.text import TfidfVectorizer
def compute_tf_idf(corpus):
"""Computing term frequency (tf) - inverse document frequency (idf).
:param corpus: List of documents.
:returns: tf-idf of corpus.
"""
return TfidfVectorizer().fit_transform(corpus)
if __name__ == '__main__':
sample_corpus = [
'This is sample document.',
'another random document.',
'third sample document text'
]
print(compute_tf_idf(sample_corpus))
| [
"sklearn.feature_extraction.text.TfidfVectorizer"
] | [((252, 269), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {}), '()\n', (267, 269), False, 'from sklearn.feature_extraction.text import TfidfVectorizer\n')] |
import logging
import sched
import time
(
MENU,
EDIT_COIN_LIST,
EDIT_USER_CONFIG,
DELETE_DB,
UPDATE_TG,
UPDATE_BTB,
PANIC_BUTTON,
CUSTOM_SCRIPT,
) = range(8)
BOUGHT, BUYING, SOLD, SELLING = range(4)
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", level=logging.INFO
)
logger = logging.getLogger("btb_manager_telegram_logger")
scheduler = sched.scheduler(time.time, time.sleep)
| [
"logging.basicConfig",
"sched.scheduler",
"logging.getLogger"
] | [((234, 341), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""', 'level': 'logging.INFO'}), "(format=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)\n", (253, 341), False, 'import logging\n'), ((352, 400), 'logging.getLogger', 'logging.getLogger', (['"""btb_manager_telegram_logger"""'], {}), "('btb_manager_telegram_logger')\n", (369, 400), False, 'import logging\n'), ((414, 452), 'sched.scheduler', 'sched.scheduler', (['time.time', 'time.sleep'], {}), '(time.time, time.sleep)\n', (429, 452), False, 'import sched\n')] |
import modutil
mod, __getattr__ = modutil.lazy_import(__name__,
['tests.test_data.A', '.B', '.C as still_C'])
def trigger_A():
return mod.A
def trigger_B():
return mod.B
def trigger_C():
return mod.still_C
def trigger_failure():
return mod.does_not_exist
| [
"modutil.lazy_import"
] | [((36, 111), 'modutil.lazy_import', 'modutil.lazy_import', (['__name__', "['tests.test_data.A', '.B', '.C as still_C']"], {}), "(__name__, ['tests.test_data.A', '.B', '.C as still_C'])\n", (55, 111), False, 'import modutil\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import tqdm
import torch
import pickle
import resource
import numpy as np
import matplotlib.pyplot as plt
from args import parse_args
from modelSummary import model_dict
from pytorchtools import load_from_file
from torch.utils.data import DataLoader
from helperfunctions import mypause, stackall_Dict
from loss import get_seg2ptLoss
from utils import get_nparams, get_predictions
from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts
sys.path.append(os.path.abspath(os.path.join(os.getcwd(), os.pardir)))
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (2048*10, rlimit[1]))
#%%
if __name__ == '__main__':
args = parse_args()
device=torch.device("cuda")
torch.cuda.manual_seed(12)
if torch.cuda.device_count() > 1:
print('Moving to a multiGPU setup.')
args.useMultiGPU = True
else:
args.useMultiGPU = False
torch.backends.cudnn.deterministic=False
if args.model not in model_dict:
print("Model not found.")
print("valid models are: {}".format(list(model_dict.keys())))
exit(1)
LOGDIR = os.path.join(os.getcwd(), 'logs', args.model, args.expname)
path2model = os.path.join(LOGDIR, 'weights')
path2checkpoint = os.path.join(LOGDIR, 'checkpoints')
path2writer = os.path.join(LOGDIR, 'TB.lock')
path2op = os.path.join(os.getcwd(), 'op', str(args.curObj))
os.makedirs(LOGDIR, exist_ok=True)
os.makedirs(path2model, exist_ok=True)
os.makedirs(path2checkpoint, exist_ok=True)
os.makedirs(path2writer, exist_ok=True)
os.makedirs(path2op, exist_ok=True)
model = model_dict[args.model]
netDict = load_from_file([args.loadfile,
os.path.join(path2checkpoint, 'checkpoint.pt')])
startEp = netDict['epoch'] if 'epoch' in netDict.keys() else 0
if 'state_dict' in netDict.keys():
model.load_state_dict(netDict['state_dict'])
print('Parameters: {}'.format(get_nparams(model)))
model = model if not args.useMultiGPU else torch.nn.DataParallel(model)
model = model.to(device).to(args.prec)
f = open(os.path.join('curObjects',
'baseline',
'cond_'+str(args.curObj)+'.pkl'), 'rb')
_, _, testObj = pickle.load(f)
testObj.path2data = os.path.join(args.path2data, 'Datasets', 'All')
testObj.augFlag = False
testloader = DataLoader(testObj,
batch_size=args.batchsize,
shuffle=False,
num_workers=args.workers,
drop_last=False)
if args.disp:
fig, axs = plt.subplots(nrows=1, ncols=1)
#%%
accLoss = 0.0
imCounter = 0
ious = []
dists_pupil_latent = []
dists_pupil_seg = []
dists_iris_latent = []
dists_iris_seg = []
model.eval()
opDict = {'id':[], 'archNum': [], 'archName': [], 'code': [],
'scores':{'iou':[], 'lat_dst':[], 'seg_dst':[]},
'pred':{'pup_latent_c':[],
'pup_seg_c':[],
'iri_latent_c':[],
'iri_seg_c':[],
'mask':[]},
'gt':{'pup_c':[], 'mask':[]}}
with torch.no_grad():
for bt, batchdata in enumerate(tqdm.tqdm(testloader)):
img, labels, spatialWeights, distMap, pupil_center, iris_center, elNorm, cond, imInfo = batchdata
out_tup = model(img.to(device).to(args.prec),
labels.to(device).long(),
pupil_center.to(device).to(args.prec),
elNorm.to(device).to(args.prec),
spatialWeights.to(device).to(args.prec),
distMap.to(device).to(args.prec),
cond.to(device).to(args.prec),
imInfo[:, 2].to(device).to(torch.long),
0.5)
output, elOut, latent, loss = out_tup
latent_pupil_center = elOut[:, 0:2].detach().cpu().numpy()
latent_iris_center = elOut[:, 5:7].detach().cpu().numpy()
_, seg_pupil_center = get_seg2ptLoss(output[:, 2, ...].cpu(), pupil_center, temperature=4)
_, seg_iris_center = get_seg2ptLoss(-output[:, 0, ...].cpu(), iris_center, temperature=4)
loss = loss if args.useMultiGPU else loss.mean()
accLoss += loss.detach().cpu().item()
predict = get_predictions(output)
iou, iou_bySample = getSeg_metrics(labels.numpy(),
predict.numpy(),
cond[:, 1].numpy())[1:]
latent_pupil_dist, latent_pupil_dist_bySample = getPoint_metric(pupil_center.numpy(),
latent_pupil_center,
cond[:,0].numpy(),
img.shape[2:],
True) # Unnormalizes the points
seg_pupil_dist, seg_pupil_dist_bySample = getPoint_metric(pupil_center.numpy(),
seg_pupil_center,
cond[:,1].numpy(),
img.shape[2:],
True) # Unnormalizes the points
latent_iris_dist, latent_iris_dist_bySample = getPoint_metric(iris_center.numpy(),
latent_iris_center,
cond[:,1].numpy(),
img.shape[2:],
True) # Unnormalizes the points
seg_iris_dist, seg_iris_dist_bySample = getPoint_metric(iris_center.numpy(),
seg_iris_center,
cond[:,1].numpy(),
img.shape[2:],
True) # Unnormalizes the points
dists_pupil_latent.append(latent_pupil_dist)
dists_iris_latent.append(latent_iris_dist)
dists_pupil_seg.append(seg_pupil_dist)
dists_iris_seg.append(seg_iris_dist)
ious.append(iou)
pup_latent_c = unnormPts(latent_pupil_center,
img.shape[2:])
pup_seg_c = unnormPts(seg_pupil_center,
img.shape[2:])
iri_latent_c = unnormPts(latent_iris_center,
img.shape[2:])
iri_seg_c = unnormPts(seg_iris_center,
img.shape[2:])
dispI = generateImageGrid(img.numpy().squeeze(),
predict.numpy(),
elOut.detach().cpu().numpy().reshape(-1, 2, 5),
pup_seg_c,
cond.numpy(),
override=True,
heatmaps=False)
for i in range(0, img.shape[0]):
archNum = testObj.imList[imCounter, 1]
opDict['id'].append(testObj.imList[imCounter, 0])
opDict['code'].append(latent[i,...].detach().cpu().numpy())
opDict['archNum'].append(archNum)
opDict['archName'].append(testObj.arch[archNum])
opDict['pred']['pup_latent_c'].append(pup_latent_c[i, :])
opDict['pred']['pup_seg_c'].append(pup_seg_c[i, :])
opDict['pred']['iri_latent_c'].append(iri_latent_c[i, :])
opDict['pred']['iri_seg_c'].append(iri_seg_c[i, :])
if args.test_save_op_masks:
opDict['pred']['mask'].append(predict[i,...].numpy().astype(np.uint8))
opDict['scores']['iou'].append(iou_bySample[i, ...])
opDict['scores']['lat_dst'].append(latent_pupil_dist_bySample[i, ...])
opDict['scores']['seg_dst'].append(seg_pupil_dist_bySample[i, ...])
opDict['gt']['pup_c'].append(pupil_center[i,...].numpy())
if args.test_save_op_masks:
opDict['gt']['mask'].append(labels[i,...].numpy().astype(np.uint8))
imCounter+=1
if args.disp:
if bt == 0:
h_im = plt.imshow(dispI.permute(1, 2, 0))
plt.pause(0.01)
else:
h_im.set_data(dispI.permute(1, 2, 0))
mypause(0.01)
opDict = stackall_Dict(opDict)
ious = np.stack(ious, axis=0)
ious = np.nanmean(ious, axis=0)
print('mIoU: {}. IoUs: {}'.format(np.mean(ious), ious))
print('Latent space PUPIL dist. Med: {}, STD: {}'.format(np.nanmedian(dists_pupil_latent),
np.nanstd(dists_pupil_latent)))
print('Segmentation PUPIL dist. Med: {}, STD: {}'.format(np.nanmedian(dists_pupil_seg),
np.nanstd(dists_pupil_seg)))
print('Latent space IRIS dist. Med: {}, STD: {}'.format(np.nanmedian(dists_iris_latent),
np.nanstd(dists_iris_latent)))
print('Segmentation IRIS dist. Med: {}, STD: {}'.format(np.nanmedian(dists_iris_seg),
np.nanstd(dists_iris_seg)))
print('--- Saving output directory ---')
f = open(os.path.join(path2op, 'opDict.pkl'), 'wb')
pickle.dump(opDict, f)
f.close()
| [
"modelSummary.model_dict.keys",
"torch.cuda.device_count",
"utils.get_predictions",
"numpy.nanmean",
"utils.unnormPts",
"numpy.mean",
"helperfunctions.mypause",
"numpy.stack",
"resource.setrlimit",
"helperfunctions.stackall_Dict",
"numpy.nanstd",
"pickle.load",
"utils.get_nparams",
"matplotlib.pyplot.pause",
"args.parse_args",
"torch.device",
"pickle.dump",
"os.makedirs",
"numpy.nanmedian",
"resource.getrlimit",
"tqdm.tqdm",
"os.path.join",
"torch.nn.DataParallel",
"os.getcwd",
"torch.utils.data.DataLoader",
"torch.no_grad",
"torch.cuda.manual_seed",
"matplotlib.pyplot.subplots"
] | [((615, 657), 'resource.getrlimit', 'resource.getrlimit', (['resource.RLIMIT_NOFILE'], {}), '(resource.RLIMIT_NOFILE)\n', (633, 657), False, 'import resource\n'), ((658, 724), 'resource.setrlimit', 'resource.setrlimit', (['resource.RLIMIT_NOFILE', '(2048 * 10, rlimit[1])'], {}), '(resource.RLIMIT_NOFILE, (2048 * 10, rlimit[1]))\n', (676, 724), False, 'import resource\n'), ((767, 779), 'args.parse_args', 'parse_args', ([], {}), '()\n', (777, 779), False, 'from args import parse_args\n'), ((792, 812), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (804, 812), False, 'import torch\n'), ((817, 843), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['(12)'], {}), '(12)\n', (839, 843), False, 'import torch\n'), ((1296, 1327), 'os.path.join', 'os.path.join', (['LOGDIR', '"""weights"""'], {}), "(LOGDIR, 'weights')\n", (1308, 1327), False, 'import os\n'), ((1350, 1385), 'os.path.join', 'os.path.join', (['LOGDIR', '"""checkpoints"""'], {}), "(LOGDIR, 'checkpoints')\n", (1362, 1385), False, 'import os\n'), ((1404, 1435), 'os.path.join', 'os.path.join', (['LOGDIR', '"""TB.lock"""'], {}), "(LOGDIR, 'TB.lock')\n", (1416, 1435), False, 'import os\n'), ((1505, 1539), 'os.makedirs', 'os.makedirs', (['LOGDIR'], {'exist_ok': '(True)'}), '(LOGDIR, exist_ok=True)\n', (1516, 1539), False, 'import os\n'), ((1544, 1582), 'os.makedirs', 'os.makedirs', (['path2model'], {'exist_ok': '(True)'}), '(path2model, exist_ok=True)\n', (1555, 1582), False, 'import os\n'), ((1587, 1630), 'os.makedirs', 'os.makedirs', (['path2checkpoint'], {'exist_ok': '(True)'}), '(path2checkpoint, exist_ok=True)\n', (1598, 1630), False, 'import os\n'), ((1635, 1674), 'os.makedirs', 'os.makedirs', (['path2writer'], {'exist_ok': '(True)'}), '(path2writer, exist_ok=True)\n', (1646, 1674), False, 'import os\n'), ((1679, 1714), 'os.makedirs', 'os.makedirs', (['path2op'], {'exist_ok': '(True)'}), '(path2op, exist_ok=True)\n', (1690, 1714), False, 'import os\n'), ((2376, 2390), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2387, 2390), False, 'import pickle\n'), ((2415, 2462), 'os.path.join', 'os.path.join', (['args.path2data', '"""Datasets"""', '"""All"""'], {}), "(args.path2data, 'Datasets', 'All')\n", (2427, 2462), False, 'import os\n'), ((2509, 2618), 'torch.utils.data.DataLoader', 'DataLoader', (['testObj'], {'batch_size': 'args.batchsize', 'shuffle': '(False)', 'num_workers': 'args.workers', 'drop_last': '(False)'}), '(testObj, batch_size=args.batchsize, shuffle=False, num_workers=\n args.workers, drop_last=False)\n', (2519, 2618), False, 'from torch.utils.data import DataLoader\n'), ((851, 876), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (874, 876), False, 'import torch\n'), ((1232, 1243), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1241, 1243), False, 'import os\n'), ((1463, 1474), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1472, 1474), False, 'import os\n'), ((2138, 2166), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['model'], {}), '(model)\n', (2159, 2166), False, 'import torch\n'), ((2764, 2794), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(1)'}), '(nrows=1, ncols=1)\n', (2776, 2794), True, 'import matplotlib.pyplot as plt\n'), ((3354, 3369), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3367, 3369), False, 'import torch\n'), ((9323, 9344), 'helperfunctions.stackall_Dict', 'stackall_Dict', (['opDict'], {}), '(opDict)\n', (9336, 9344), False, 'from helperfunctions import mypause, stackall_Dict\n'), ((9360, 9382), 'numpy.stack', 'np.stack', (['ious'], {'axis': '(0)'}), '(ious, axis=0)\n', (9368, 9382), True, 'import numpy as np\n'), ((9398, 9422), 'numpy.nanmean', 'np.nanmean', (['ious'], {'axis': '(0)'}), '(ious, axis=0)\n', (9408, 9422), True, 'import numpy as np\n'), ((10349, 10371), 'pickle.dump', 'pickle.dump', (['opDict', 'f'], {}), '(opDict, f)\n', (10360, 10371), False, 'import pickle\n'), ((579, 590), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (588, 590), False, 'import os\n'), ((1827, 1873), 'os.path.join', 'os.path.join', (['path2checkpoint', '"""checkpoint.pt"""'], {}), "(path2checkpoint, 'checkpoint.pt')\n", (1839, 1873), False, 'import os\n'), ((2070, 2088), 'utils.get_nparams', 'get_nparams', (['model'], {}), '(model)\n', (2081, 2088), False, 'from utils import get_nparams, get_predictions\n'), ((3410, 3431), 'tqdm.tqdm', 'tqdm.tqdm', (['testloader'], {}), '(testloader)\n', (3419, 3431), False, 'import tqdm\n'), ((4611, 4634), 'utils.get_predictions', 'get_predictions', (['output'], {}), '(output)\n', (4626, 4634), False, 'from utils import get_nparams, get_predictions\n'), ((6975, 7020), 'utils.unnormPts', 'unnormPts', (['latent_pupil_center', 'img.shape[2:]'], {}), '(latent_pupil_center, img.shape[2:])\n', (6984, 7020), False, 'from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts\n'), ((7082, 7124), 'utils.unnormPts', 'unnormPts', (['seg_pupil_center', 'img.shape[2:]'], {}), '(seg_pupil_center, img.shape[2:])\n', (7091, 7124), False, 'from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts\n'), ((7186, 7230), 'utils.unnormPts', 'unnormPts', (['latent_iris_center', 'img.shape[2:]'], {}), '(latent_iris_center, img.shape[2:])\n', (7195, 7230), False, 'from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts\n'), ((7292, 7333), 'utils.unnormPts', 'unnormPts', (['seg_iris_center', 'img.shape[2:]'], {}), '(seg_iris_center, img.shape[2:])\n', (7301, 7333), False, 'from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts\n'), ((10298, 10333), 'os.path.join', 'os.path.join', (['path2op', '"""opDict.pkl"""'], {}), "(path2op, 'opDict.pkl')\n", (10310, 10333), False, 'import os\n'), ((9465, 9478), 'numpy.mean', 'np.mean', (['ious'], {}), '(ious)\n', (9472, 9478), True, 'import numpy as np\n'), ((9552, 9584), 'numpy.nanmedian', 'np.nanmedian', (['dists_pupil_latent'], {}), '(dists_pupil_latent)\n', (9564, 9584), True, 'import numpy as np\n'), ((9646, 9675), 'numpy.nanstd', 'np.nanstd', (['dists_pupil_latent'], {}), '(dists_pupil_latent)\n', (9655, 9675), True, 'import numpy as np\n'), ((9743, 9772), 'numpy.nanmedian', 'np.nanmedian', (['dists_pupil_seg'], {}), '(dists_pupil_seg)\n', (9755, 9772), True, 'import numpy as np\n'), ((9834, 9860), 'numpy.nanstd', 'np.nanstd', (['dists_pupil_seg'], {}), '(dists_pupil_seg)\n', (9843, 9860), True, 'import numpy as np\n'), ((9927, 9958), 'numpy.nanmedian', 'np.nanmedian', (['dists_iris_latent'], {}), '(dists_iris_latent)\n', (9939, 9958), True, 'import numpy as np\n'), ((10019, 10047), 'numpy.nanstd', 'np.nanstd', (['dists_iris_latent'], {}), '(dists_iris_latent)\n', (10028, 10047), True, 'import numpy as np\n'), ((10114, 10142), 'numpy.nanmedian', 'np.nanmedian', (['dists_iris_seg'], {}), '(dists_iris_seg)\n', (10126, 10142), True, 'import numpy as np\n'), ((10203, 10228), 'numpy.nanstd', 'np.nanstd', (['dists_iris_seg'], {}), '(dists_iris_seg)\n', (10212, 10228), True, 'import numpy as np\n'), ((1168, 1185), 'modelSummary.model_dict.keys', 'model_dict.keys', ([], {}), '()\n', (1183, 1185), False, 'from modelSummary import model_dict\n'), ((9175, 9190), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.01)'], {}), '(0.01)\n', (9184, 9190), True, 'import matplotlib.pyplot as plt\n'), ((9291, 9304), 'helperfunctions.mypause', 'mypause', (['(0.01)'], {}), '(0.01)\n', (9298, 9304), False, 'from helperfunctions import mypause, stackall_Dict\n')] |
#!/usr/bin/env python
"""
<Program Name>
test_util.py
<Author>
<NAME>.
<Started>
February 1, 2013.
<Copyright>
See LICENSE for licensing information.
<Purpose>
Unit test for 'util.py'
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import gzip
import shutil
import logging
import tempfile
import unittest
import tuf
import tuf.log
import tuf.hash
import tuf.util
import tuf.unittest_toolbox as unittest_toolbox
import tuf._vendor.six as six
logger = logging.getLogger('tuf.test_util')
class TestUtil(unittest_toolbox.Modified_TestCase):
def setUp(self):
unittest_toolbox.Modified_TestCase.setUp(self)
self.temp_fileobj = tuf.util.TempFile()
def tearDown(self):
unittest_toolbox.Modified_TestCase.tearDown(self)
self.temp_fileobj.close_temp_file()
def test_A1_tempfile_close_temp_file(self):
# Was the temporary file closed?
self.temp_fileobj.close_temp_file()
self.assertTrue(self.temp_fileobj.temporary_file.closed)
def _extract_tempfile_directory(self, config_temp_dir=None):
"""
Takes a directory (essentially specified in the conf.py as
'temporary_directory') and substitutes tempfile.TemporaryFile() with
tempfile.mkstemp() in order to extract actual directory of the stored
tempfile. Returns the config's temporary directory (or default temp
directory) and actual directory.
"""
# Patching 'tuf.conf.temporary_directory'.
tuf.conf.temporary_directory = config_temp_dir
if config_temp_dir is None:
# 'config_temp_dir' needs to be set to default.
config_temp_dir = tempfile.gettempdir()
# Patching 'tempfile.TemporaryFile()' (by substituting
# temfile.TemporaryFile() with tempfile.mkstemp()) in order to get the
# directory of the stored tempfile object.
saved_tempfile_TemporaryFile = tuf.util.tempfile.NamedTemporaryFile
tuf.util.tempfile.NamedTemporaryFile = tempfile.mkstemp
_temp_fileobj = tuf.util.TempFile()
tuf.util.tempfile.NamedTemporaryFile = saved_tempfile_TemporaryFile
junk, _tempfilepath = _temp_fileobj.temporary_file
_tempfile_dir = os.path.dirname(_tempfilepath)
# In the case when 'config_temp_dir' is None or some other discrepancy,
# '_temp_fileobj' needs to be closed manually since tempfile.mkstemp()
# was used.
if os.path.exists(_tempfilepath):
os.remove(_tempfilepath)
return config_temp_dir, _tempfile_dir
def test_A2_tempfile_init(self):
# Goal: Verify that temporary files are stored in the appropriate temp
# directory. The location of the temporary files is set in 'tuf.conf.py'.
# Test: Expected input verification.
# Assumed 'tuf.conf.temporary_directory' is 'None' initially.
temp_file = tuf.util.TempFile()
temp_file_directory = os.path.dirname(temp_file.temporary_file.name)
self.assertEqual(tempfile.gettempdir(), temp_file_directory)
saved_temporary_directory = tuf.conf.temporary_directory
temp_directory = self.make_temp_directory()
tuf.conf.temporary_directory = temp_directory
temp_file = tuf.util.TempFile()
temp_file_directory = os.path.dirname(temp_file.temporary_file.name)
self.assertEqual(temp_directory, temp_file_directory)
tuf.conf.temporary_directory = saved_temporary_directory
# Test: Unexpected input handling.
config_temp_dirs = [self.random_string(), 123, ['a'], {'a':1}]
for config_temp_dir in config_temp_dirs:
config_temp_dir, actual_dir = \
self._extract_tempfile_directory(config_temp_dir)
self.assertEqual(tempfile.gettempdir(), actual_dir)
def test_A3_tempfile_read(self):
filepath = self.make_temp_data_file(data = '1234567890')
fileobj = open(filepath, 'rb')
# Patching 'temp_fileobj.temporary_file'.
self.temp_fileobj.temporary_file = fileobj
# Test: Expected input.
self.assertEqual(self.temp_fileobj.read().decode('utf-8'), '1234567890')
self.assertEqual(self.temp_fileobj.read(4).decode('utf-8'), '1234')
# Test: Unexpected input.
for bogus_arg in ['abcd', ['abcd'], {'a':'a'}, -100]:
self.assertRaises(tuf.FormatError, self.temp_fileobj.read, bogus_arg)
def test_A4_tempfile_write(self):
data = self.random_string()
self.temp_fileobj.write(data.encode('utf-8'))
self.assertEqual(data, self.temp_fileobj.read().decode('utf-8'))
self.temp_fileobj.write(data.encode('utf-8'), auto_flush=False)
self.assertEqual(data, self.temp_fileobj.read().decode('utf-8'))
def test_A5_tempfile_move(self):
# Destination directory to save the temporary file in.
dest_temp_dir = self.make_temp_directory()
dest_path = os.path.join(dest_temp_dir, self.random_string())
self.temp_fileobj.write(self.random_string().encode('utf-8'))
self.temp_fileobj.move(dest_path)
self.assertTrue(dest_path)
def _compress_existing_file(self, filepath):
"""
[Helper]Compresses file 'filepath' and returns file path of
the compresses file.
"""
# NOTE: DO NOT forget to remove the newly created compressed file!
if os.path.exists(filepath):
compressed_filepath = filepath+'.gz'
f_in = open(filepath, 'rb')
f_out = gzip.open(compressed_filepath, 'wb')
f_out.writelines(f_in)
f_out.close()
f_in.close()
return compressed_filepath
else:
logger.error('Compression of '+repr(filepath)+' failed. Path does not exist.')
sys.exit(1)
def _decompress_file(self, compressed_filepath):
"""[Helper]"""
if os.path.exists(compressed_filepath):
f = gzip.open(compressed_filepath, 'rb')
file_content = f.read()
f.close()
return file_content
else:
logger.error('Decompression of '+repr(compressed_filepath)+' failed. '+\
'Path does not exist.')
sys.exit(1)
def test_A6_tempfile_decompress_temp_file_object(self):
# Setup: generate a temp file (self.make_temp_data_file()),
# compress it. Write it to self.temp_fileobj().
filepath = self.make_temp_data_file()
fileobj = open(filepath, 'rb')
compressed_filepath = self._compress_existing_file(filepath)
compressed_fileobj = open(compressed_filepath, 'rb')
self.temp_fileobj.write(compressed_fileobj.read())
os.remove(compressed_filepath)
# Try decompression using incorrect compression type i.e. compressions
# other than 'gzip'. In short feeding incorrect input.
bogus_args = ['zip', 1234, self.random_string()]
for arg in bogus_args:
self.assertRaises(tuf.Error,
self.temp_fileobj.decompress_temp_file_object, arg)
self.temp_fileobj.decompress_temp_file_object('gzip')
self.assertEqual(self.temp_fileobj.read(), fileobj.read())
# Checking the content of the TempFile's '_orig_file' instance.
check_compressed_original = self.make_temp_file()
with open(check_compressed_original, 'wb') as file_object:
file_object.write(self.temp_fileobj._orig_file.read())
data_in_orig_file = self._decompress_file(check_compressed_original)
fileobj.seek(0)
self.assertEqual(data_in_orig_file, fileobj.read())
# Try decompressing once more.
self.assertRaises(tuf.Error,
self.temp_fileobj.decompress_temp_file_object, 'gzip')
# Test decompression of invalid gzip file.
temp_file = tuf.util.TempFile()
fileobj.seek(0)
temp_file.write(fileobj.read())
temp_file.decompress_temp_file_object('gzip')
def test_B1_get_file_details(self):
# Goal: Verify proper output given certain expected/unexpected input.
# Making a temporary file.
filepath = self.make_temp_data_file()
# Computing the hash and length of the tempfile.
digest_object = tuf.hash.digest_filename(filepath, algorithm='sha256')
file_hash = {'sha256' : digest_object.hexdigest()}
file_length = os.path.getsize(filepath)
# Test: Expected input.
self.assertEqual(tuf.util.get_file_details(filepath), (file_length, file_hash))
# Test: Incorrect input.
bogus_inputs = [self.random_string(), 1234, [self.random_string()],
{'a': 'b'}, None]
for bogus_input in bogus_inputs:
if isinstance(bogus_input, six.string_types):
self.assertRaises(tuf.Error, tuf.util.get_file_details, bogus_input)
else:
self.assertRaises(tuf.FormatError, tuf.util.get_file_details, bogus_input)
def test_B2_ensure_parent_dir(self):
existing_parent_dir = self.make_temp_directory()
non_existing_parent_dir = os.path.join(existing_parent_dir, 'a', 'b')
for parent_dir in [existing_parent_dir, non_existing_parent_dir, 12, [3]]:
if isinstance(parent_dir, six.string_types):
tuf.util.ensure_parent_dir(os.path.join(parent_dir, 'a.txt'))
self.assertTrue(os.path.isdir(parent_dir))
else:
self.assertRaises(tuf.FormatError, tuf.util.ensure_parent_dir, parent_dir)
def test_B3_file_in_confined_directories(self):
# Goal: Provide invalid input for 'filepath' and 'confined_directories'.
# Include inputs like: '[1, 2, "a"]' and such...
# Reference to 'file_in_confined_directories()' to improve readability.
in_confined_directory = tuf.util.file_in_confined_directories
list_of_confined_directories = ['a', 12, {'a':'a'}, [1]]
list_of_filepaths = [12, ['a'], {'a':'a'}, 'a']
for bogus_confined_directory in list_of_confined_directories:
for filepath in list_of_filepaths:
self.assertRaises(tuf.FormatError, in_confined_directory,
filepath, bogus_confined_directory)
# Test: Inputs that evaluate to False.
confined_directories = ['a/b/', 'a/b/c/d/']
self.assertFalse(in_confined_directory('a/b/c/1.txt', confined_directories))
confined_directories = ['a/b/c/d/e/']
self.assertFalse(in_confined_directory('a', confined_directories))
self.assertFalse(in_confined_directory('a/b', confined_directories))
self.assertFalse(in_confined_directory('a/b/c', confined_directories))
self.assertFalse(in_confined_directory('a/b/c/d', confined_directories))
# Below, 'e' is a file in the 'a/b/c/d/' directory.
self.assertFalse(in_confined_directory('a/b/c/d/e', confined_directories))
# Test: Inputs that evaluate to True.
self.assertTrue(in_confined_directory('a/b/c.txt', ['']))
self.assertTrue(in_confined_directory('a/b/c.txt', ['a/b/']))
self.assertTrue(in_confined_directory('a/b/c.txt', ['x', '']))
self.assertTrue(in_confined_directory('a/b/c/..', ['a/']))
def test_B4_import_json(self):
self.assertTrue('json' in sys.modules)
def test_B5_load_json_string(self):
# Test normal case.
data = ['a', {'b': ['c', None, 30.3, 29]}]
json_string = tuf.util.json.dumps(data)
self.assertEqual(data, tuf.util.load_json_string(json_string))
# Test invalid arguments.
self.assertRaises(tuf.Error, tuf.util.load_json_string, 8)
invalid_json_string = {'a': tuf.FormatError}
self.assertRaises(tuf.Error, tuf.util.load_json_string, invalid_json_string)
def test_B6_load_json_file(self):
data = ['a', {'b': ['c', None, 30.3, 29]}]
filepath = self.make_temp_file()
fileobj = open(filepath, 'wt')
tuf.util.json.dump(data, fileobj)
fileobj.close()
self.assertEqual(data, tuf.util.load_json_file(filepath))
# Test a gzipped file.
compressed_filepath = self._compress_existing_file(filepath)
self.assertEqual(data, tuf.util.load_json_file(compressed_filepath))
Errors = (tuf.FormatError, IOError)
for bogus_arg in [b'a', 1, [b'a'], {'a':b'b'}]:
self.assertRaises(Errors, tuf.util.load_json_file, bogus_arg)
def test_C1_get_target_hash(self):
# Test normal case.
expected_target_hashes = {
'/file1.txt': 'e3a3d89eb3b70ce3fbce6017d7b8c12d4abd5635427a0e8a238f53157df85b3d',
'/README.txt': '8faee106f1bb69f34aaf1df1e3c2e87d763c4d878cb96b91db13495e32ceb0b0',
'/warehouse/file2.txt': 'd543a573a2cec67026eff06e75702303559e64e705eba06f65799baaf0424417'
}
for filepath, target_hash in six.iteritems(expected_target_hashes):
self.assertTrue(tuf.formats.RELPATH_SCHEMA.matches(filepath))
self.assertTrue(tuf.formats.HASH_SCHEMA.matches(target_hash))
self.assertEqual(tuf.util.get_target_hash(filepath), target_hash)
# Test for improperly formatted argument.
self.assertRaises(tuf.FormatError, tuf.util.get_target_hash, 8)
def test_C2_find_delegated_role(self):
# Test normal case. Create an expected role list, which is one of the
# required arguments to 'find_delegated_role()'.
role_list = [
{
"keyids": [
"<KEY>"
],
"name": "targets/warehouse",
"paths": [
"/file1.txt", "/README.txt", '/warehouse/'
],
"threshold": 3
},
{
"keyids": [
"<KEY>"
],
"name": "targets/tuf",
"paths": [
"/updater.py", "formats.py", '/tuf/'
],
"threshold": 4
}
]
self.assertTrue(tuf.formats.ROLELIST_SCHEMA.matches(role_list))
self.assertEqual(tuf.util.find_delegated_role(role_list, 'targets/tuf'), 1)
self.assertEqual(tuf.util.find_delegated_role(role_list, 'targets/warehouse'), 0)
# Test for non-existent role. 'find_delegated_role()' returns 'None'
# if the role is not found.
self.assertEqual(tuf.util.find_delegated_role(role_list, 'targets/non-existent'),
None)
# Test improperly formatted arguments.
self.assertRaises(tuf.FormatError, tuf.util.find_delegated_role, 8, role_list)
self.assertRaises(tuf.FormatError, tuf.util.find_delegated_role, 8, 'targets/tuf')
# Test duplicate roles.
role_list.append(role_list[1])
self.assertRaises(tuf.RepositoryError, tuf.util.find_delegated_role, role_list,
'targets/tuf')
# Test missing 'name' attribute (optional, but required by
# 'find_delegated_role()'.
# Delete the duplicate role, and the remaining role's 'name' attribute.
del role_list[2]
del role_list[0]['name']
self.assertRaises(tuf.RepositoryError, tuf.util.find_delegated_role, role_list,
'targets/warehouse')
def test_C3_paths_are_consistent_with_hash_prefixes(self):
# Test normal case.
path_hash_prefixes = ['e3a3', '8fae', 'd543']
list_of_targets = ['/file1.txt', '/README.txt', '/warehouse/file2.txt']
# Ensure the paths of 'list_of_targets' each have the epected path hash
# prefix listed in 'path_hash_prefixes'.
for filepath in list_of_targets:
self.assertTrue(tuf.util.get_target_hash(filepath)[0:4] in path_hash_prefixes)
self.assertTrue(tuf.util.paths_are_consistent_with_hash_prefixes(list_of_targets,
path_hash_prefixes))
extra_invalid_prefix = ['e3a3', '8fae', 'd543', '0000']
self.assertTrue(tuf.util.paths_are_consistent_with_hash_prefixes(list_of_targets,
extra_invalid_prefix))
# Test improperly formatted arguments.
self.assertRaises(tuf.FormatError,
tuf.util.paths_are_consistent_with_hash_prefixes, 8,
path_hash_prefixes)
self.assertRaises(tuf.FormatError,
tuf.util.paths_are_consistent_with_hash_prefixes,
list_of_targets, 8)
self.assertRaises(tuf.FormatError,
tuf.util.paths_are_consistent_with_hash_prefixes,
list_of_targets, ['zza1'])
# Test invalid list of targets.
bad_target_path = '/file5.txt'
self.assertTrue(tuf.util.get_target_hash(bad_target_path)[0:4] not in
path_hash_prefixes)
self.assertFalse(tuf.util.paths_are_consistent_with_hash_prefixes([bad_target_path],
path_hash_prefixes))
# Add invalid target path to 'list_of_targets'.
list_of_targets.append(bad_target_path)
self.assertFalse(tuf.util.paths_are_consistent_with_hash_prefixes(list_of_targets,
path_hash_prefixes))
def test_C4_ensure_all_targets_allowed(self):
# Test normal case.
rolename = 'targets/warehouse'
self.assertTrue(tuf.formats.ROLENAME_SCHEMA.matches(rolename))
list_of_targets = ['/file1.txt', '/README.txt', '/warehouse/file2.txt']
self.assertTrue(tuf.formats.RELPATHS_SCHEMA.matches(list_of_targets))
parent_delegations = {"keys": {
"<KEY>": {
"keytype": "ed25519",
"keyval": {
"public": "<KEY>"
}
}
},
"roles": [
{
"keyids": [
"<KEY>"
],
"name": "targets/warehouse",
"paths": [
"/file1.txt", "/README.txt", '/warehouse/'
],
"threshold": 1
}
]
}
self.assertTrue(tuf.formats.DELEGATIONS_SCHEMA.matches(parent_delegations))
tuf.util.ensure_all_targets_allowed(rolename, list_of_targets,
parent_delegations)
# The target files of 'targets' are always allowed. 'list_of_targets' and
# 'parent_delegations' are not checked in this case.
tuf.util.ensure_all_targets_allowed('targets', list_of_targets,
parent_delegations)
# Test improperly formatted arguments.
self.assertRaises(tuf.FormatError, tuf.util.ensure_all_targets_allowed,
8, list_of_targets, parent_delegations)
self.assertRaises(tuf.FormatError, tuf.util.ensure_all_targets_allowed,
rolename, 8, parent_delegations)
self.assertRaises(tuf.FormatError, tuf.util.ensure_all_targets_allowed,
rolename, list_of_targets, 8)
# Test for invalid 'rolename', which has not been delegated by its parent,
# 'targets'.
self.assertRaises(tuf.RepositoryError, tuf.util.ensure_all_targets_allowed,
'targets/non-delegated_rolename', list_of_targets,
parent_delegations)
# Test for target file that is not allowed by the parent role.
self.assertRaises(tuf.ForbiddenTargetError, tuf.util.ensure_all_targets_allowed,
'targets/warehouse', ['file8.txt'], parent_delegations)
self.assertRaises(tuf.ForbiddenTargetError, tuf.util.ensure_all_targets_allowed,
'targets/warehouse', ['file1.txt', 'bad-README.txt'],
parent_delegations)
# Test for required attributes.
# Missing 'paths' attribute.
del parent_delegations['roles'][0]['paths']
self.assertRaises(tuf.FormatError, tuf.util.ensure_all_targets_allowed,
'targets/warehouse', list_of_targets, parent_delegations)
# Test 'path_hash_prefixes' attribute.
path_hash_prefixes = ['e3a3', '8fae', 'd543']
parent_delegations['roles'][0]['path_hash_prefixes'] = path_hash_prefixes
# Test normal case for 'path_hash_prefixes'.
tuf.util.ensure_all_targets_allowed('targets/warehouse', list_of_targets,
parent_delegations)
# Test target file with a path_hash_prefix that is not allowed in its
# parent role.
path_hash_prefix = tuf.util.get_target_hash('file5.txt')[0:4]
self.assertTrue(path_hash_prefix not in parent_delegations['roles'][0]
['path_hash_prefixes'])
self.assertRaises(tuf.ForbiddenTargetError, tuf.util.ensure_all_targets_allowed,
'targets/warehouse', ['file5.txt'], parent_delegations)
def test_C5_unittest_toolbox_make_temp_directory(self):
# Verify that the tearDown function does not fail when
# unittest_toolbox.make_temp_directory deletes the generated temp directory
# here.
temp_directory = self.make_temp_directory()
os.rmdir(temp_directory)
def test_c6_get_compressed_length(self):
self.temp_fileobj.write(b'hello world')
self.assertTrue(self.temp_fileobj.get_compressed_length() == 11)
temp_file = tuf.util.TempFile()
# Run unit test.
if __name__ == '__main__':
unittest.main()
| [
"logging.getLogger",
"tuf.formats.RELPATHS_SCHEMA.matches",
"gzip.open",
"tuf.util.ensure_all_targets_allowed",
"sys.exit",
"unittest.main",
"os.remove",
"tuf.hash.digest_filename",
"os.path.exists",
"tuf.formats.HASH_SCHEMA.matches",
"os.path.isdir",
"tuf.util.load_json_file",
"tuf.unittest_toolbox.Modified_TestCase.tearDown",
"tuf.formats.ROLENAME_SCHEMA.matches",
"tuf.formats.ROLELIST_SCHEMA.matches",
"os.path.getsize",
"tuf.unittest_toolbox.Modified_TestCase.setUp",
"tuf.formats.DELEGATIONS_SCHEMA.matches",
"os.path.dirname",
"tuf.util.json.dump",
"tuf.util.json.dumps",
"tuf.util.get_target_hash",
"tuf.formats.RELPATH_SCHEMA.matches",
"tuf._vendor.six.iteritems",
"os.path.join",
"tuf.util.load_json_string",
"tuf.util.get_file_details",
"tuf.util.find_delegated_role",
"tuf.util.paths_are_consistent_with_hash_prefixes",
"os.rmdir",
"tempfile.gettempdir",
"tuf.util.TempFile"
] | [((826, 860), 'logging.getLogger', 'logging.getLogger', (['"""tuf.test_util"""'], {}), "('tuf.test_util')\n", (843, 860), False, 'import logging\n'), ((20747, 20762), 'unittest.main', 'unittest.main', ([], {}), '()\n', (20760, 20762), False, 'import unittest\n'), ((939, 985), 'tuf.unittest_toolbox.Modified_TestCase.setUp', 'unittest_toolbox.Modified_TestCase.setUp', (['self'], {}), '(self)\n', (979, 985), True, 'import tuf.unittest_toolbox as unittest_toolbox\n'), ((1010, 1029), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (1027, 1029), False, 'import tuf\n'), ((1061, 1110), 'tuf.unittest_toolbox.Modified_TestCase.tearDown', 'unittest_toolbox.Modified_TestCase.tearDown', (['self'], {}), '(self)\n', (1104, 1110), True, 'import tuf.unittest_toolbox as unittest_toolbox\n'), ((2320, 2339), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (2337, 2339), False, 'import tuf\n'), ((2487, 2517), 'os.path.dirname', 'os.path.dirname', (['_tempfilepath'], {}), '(_tempfilepath)\n', (2502, 2517), False, 'import os\n'), ((2694, 2723), 'os.path.exists', 'os.path.exists', (['_tempfilepath'], {}), '(_tempfilepath)\n', (2708, 2723), False, 'import os\n'), ((3116, 3135), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (3133, 3135), False, 'import tuf\n'), ((3162, 3208), 'os.path.dirname', 'os.path.dirname', (['temp_file.temporary_file.name'], {}), '(temp_file.temporary_file.name)\n', (3177, 3208), False, 'import os\n'), ((3450, 3469), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (3467, 3469), False, 'import tuf\n'), ((3496, 3542), 'os.path.dirname', 'os.path.dirname', (['temp_file.temporary_file.name'], {}), '(temp_file.temporary_file.name)\n', (3511, 3542), False, 'import os\n'), ((5457, 5481), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (5471, 5481), False, 'import os\n'), ((5918, 5953), 'os.path.exists', 'os.path.exists', (['compressed_filepath'], {}), '(compressed_filepath)\n', (5932, 5953), False, 'import os\n'), ((6658, 6688), 'os.remove', 'os.remove', (['compressed_filepath'], {}), '(compressed_filepath)\n', (6667, 6688), False, 'import os\n'), ((7756, 7775), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (7773, 7775), False, 'import tuf\n'), ((8145, 8199), 'tuf.hash.digest_filename', 'tuf.hash.digest_filename', (['filepath'], {'algorithm': '"""sha256"""'}), "(filepath, algorithm='sha256')\n", (8169, 8199), False, 'import tuf\n'), ((8273, 8298), 'os.path.getsize', 'os.path.getsize', (['filepath'], {}), '(filepath)\n', (8288, 8298), False, 'import os\n'), ((8950, 8993), 'os.path.join', 'os.path.join', (['existing_parent_dir', '"""a"""', '"""b"""'], {}), "(existing_parent_dir, 'a', 'b')\n", (8962, 8993), False, 'import os\n'), ((11192, 11217), 'tuf.util.json.dumps', 'tuf.util.json.dumps', (['data'], {}), '(data)\n', (11211, 11217), False, 'import tuf\n'), ((11673, 11706), 'tuf.util.json.dump', 'tuf.util.json.dump', (['data', 'fileobj'], {}), '(data, fileobj)\n', (11691, 11706), False, 'import tuf\n'), ((12529, 12566), 'tuf._vendor.six.iteritems', 'six.iteritems', (['expected_target_hashes'], {}), '(expected_target_hashes)\n', (12542, 12566), True, 'import tuf._vendor.six as six\n'), ((17521, 17607), 'tuf.util.ensure_all_targets_allowed', 'tuf.util.ensure_all_targets_allowed', (['rolename', 'list_of_targets', 'parent_delegations'], {}), '(rolename, list_of_targets,\n parent_delegations)\n', (17556, 17607), False, 'import tuf\n'), ((17782, 17869), 'tuf.util.ensure_all_targets_allowed', 'tuf.util.ensure_all_targets_allowed', (['"""targets"""', 'list_of_targets', 'parent_delegations'], {}), "('targets', list_of_targets,\n parent_delegations)\n", (17817, 17869), False, 'import tuf\n'), ((19596, 19693), 'tuf.util.ensure_all_targets_allowed', 'tuf.util.ensure_all_targets_allowed', (['"""targets/warehouse"""', 'list_of_targets', 'parent_delegations'], {}), "('targets/warehouse', list_of_targets,\n parent_delegations)\n", (19631, 19693), False, 'import tuf\n'), ((20477, 20501), 'os.rmdir', 'os.rmdir', (['temp_directory'], {}), '(temp_directory)\n', (20485, 20501), False, 'import os\n'), ((20678, 20697), 'tuf.util.TempFile', 'tuf.util.TempFile', ([], {}), '()\n', (20695, 20697), False, 'import tuf\n'), ((1962, 1983), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (1981, 1983), False, 'import tempfile\n'), ((2731, 2755), 'os.remove', 'os.remove', (['_tempfilepath'], {}), '(_tempfilepath)\n', (2740, 2755), False, 'import os\n'), ((3230, 3251), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (3249, 3251), False, 'import tempfile\n'), ((5574, 5610), 'gzip.open', 'gzip.open', (['compressed_filepath', '"""wb"""'], {}), "(compressed_filepath, 'wb')\n", (5583, 5610), False, 'import gzip\n'), ((5825, 5836), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5833, 5836), False, 'import sys\n'), ((5965, 6001), 'gzip.open', 'gzip.open', (['compressed_filepath', '"""rb"""'], {}), "(compressed_filepath, 'rb')\n", (5974, 6001), False, 'import gzip\n'), ((6210, 6221), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6218, 6221), False, 'import sys\n'), ((8350, 8385), 'tuf.util.get_file_details', 'tuf.util.get_file_details', (['filepath'], {}), '(filepath)\n', (8375, 8385), False, 'import tuf\n'), ((11245, 11283), 'tuf.util.load_json_string', 'tuf.util.load_json_string', (['json_string'], {}), '(json_string)\n', (11270, 11283), False, 'import tuf\n'), ((11754, 11787), 'tuf.util.load_json_file', 'tuf.util.load_json_file', (['filepath'], {}), '(filepath)\n', (11777, 11787), False, 'import tuf\n'), ((11909, 11953), 'tuf.util.load_json_file', 'tuf.util.load_json_file', (['compressed_filepath'], {}), '(compressed_filepath)\n', (11932, 11953), False, 'import tuf\n'), ((13502, 13548), 'tuf.formats.ROLELIST_SCHEMA.matches', 'tuf.formats.ROLELIST_SCHEMA.matches', (['role_list'], {}), '(role_list)\n', (13537, 13548), False, 'import tuf\n'), ((13571, 13625), 'tuf.util.find_delegated_role', 'tuf.util.find_delegated_role', (['role_list', '"""targets/tuf"""'], {}), "(role_list, 'targets/tuf')\n", (13599, 13625), False, 'import tuf\n'), ((13651, 13711), 'tuf.util.find_delegated_role', 'tuf.util.find_delegated_role', (['role_list', '"""targets/warehouse"""'], {}), "(role_list, 'targets/warehouse')\n", (13679, 13711), False, 'import tuf\n'), ((13843, 13906), 'tuf.util.find_delegated_role', 'tuf.util.find_delegated_role', (['role_list', '"""targets/non-existent"""'], {}), "(role_list, 'targets/non-existent')\n", (13871, 13906), False, 'import tuf\n'), ((15200, 15289), 'tuf.util.paths_are_consistent_with_hash_prefixes', 'tuf.util.paths_are_consistent_with_hash_prefixes', (['list_of_targets', 'path_hash_prefixes'], {}), '(list_of_targets,\n path_hash_prefixes)\n', (15248, 15289), False, 'import tuf\n'), ((15428, 15519), 'tuf.util.paths_are_consistent_with_hash_prefixes', 'tuf.util.paths_are_consistent_with_hash_prefixes', (['list_of_targets', 'extra_invalid_prefix'], {}), '(list_of_targets,\n extra_invalid_prefix)\n', (15476, 15519), False, 'import tuf\n'), ((16310, 16401), 'tuf.util.paths_are_consistent_with_hash_prefixes', 'tuf.util.paths_are_consistent_with_hash_prefixes', (['[bad_target_path]', 'path_hash_prefixes'], {}), '([bad_target_path],\n path_hash_prefixes)\n', (16358, 16401), False, 'import tuf\n'), ((16577, 16666), 'tuf.util.paths_are_consistent_with_hash_prefixes', 'tuf.util.paths_are_consistent_with_hash_prefixes', (['list_of_targets', 'path_hash_prefixes'], {}), '(list_of_targets,\n path_hash_prefixes)\n', (16625, 16666), False, 'import tuf\n'), ((16854, 16899), 'tuf.formats.ROLENAME_SCHEMA.matches', 'tuf.formats.ROLENAME_SCHEMA.matches', (['rolename'], {}), '(rolename)\n', (16889, 16899), False, 'import tuf\n'), ((16998, 17050), 'tuf.formats.RELPATHS_SCHEMA.matches', 'tuf.formats.RELPATHS_SCHEMA.matches', (['list_of_targets'], {}), '(list_of_targets)\n', (17033, 17050), False, 'import tuf\n'), ((17456, 17514), 'tuf.formats.DELEGATIONS_SCHEMA.matches', 'tuf.formats.DELEGATIONS_SCHEMA.matches', (['parent_delegations'], {}), '(parent_delegations)\n', (17494, 17514), False, 'import tuf\n'), ((19846, 19883), 'tuf.util.get_target_hash', 'tuf.util.get_target_hash', (['"""file5.txt"""'], {}), "('file5.txt')\n", (19870, 19883), False, 'import tuf\n'), ((3932, 3953), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (3951, 3953), False, 'import tempfile\n'), ((12590, 12634), 'tuf.formats.RELPATH_SCHEMA.matches', 'tuf.formats.RELPATH_SCHEMA.matches', (['filepath'], {}), '(filepath)\n', (12624, 12634), False, 'import tuf\n'), ((12658, 12702), 'tuf.formats.HASH_SCHEMA.matches', 'tuf.formats.HASH_SCHEMA.matches', (['target_hash'], {}), '(target_hash)\n', (12689, 12702), False, 'import tuf\n'), ((12727, 12761), 'tuf.util.get_target_hash', 'tuf.util.get_target_hash', (['filepath'], {}), '(filepath)\n', (12751, 12761), False, 'import tuf\n'), ((9160, 9193), 'os.path.join', 'os.path.join', (['parent_dir', '"""a.txt"""'], {}), "(parent_dir, 'a.txt')\n", (9172, 9193), False, 'import os\n'), ((9219, 9244), 'os.path.isdir', 'os.path.isdir', (['parent_dir'], {}), '(parent_dir)\n', (9232, 9244), False, 'import os\n'), ((16195, 16236), 'tuf.util.get_target_hash', 'tuf.util.get_target_hash', (['bad_target_path'], {}), '(bad_target_path)\n', (16219, 16236), False, 'import tuf\n'), ((15116, 15150), 'tuf.util.get_target_hash', 'tuf.util.get_target_hash', (['filepath'], {}), '(filepath)\n', (15140, 15150), False, 'import tuf\n')] |
from django import forms
class FlightrForm(forms.Form):
flight_number = forms.CharField(max_length=30, label="航班号", widget=forms.TextInput(attrs={'class': 'form-control'}))
plane_type_choices = [
('波音', (
('1', '747'),
('2', '777'),
('3', '787'),
)
),
('空客', (
('4', 'A300'),
('5', 'A310'),
('6', 'A320'),
('7', 'A350'),
)
),
]
plane_type = forms.ChoiceField(label='飞机型号', choices=plane_type_choices,widget=forms.Select)
origination = forms.CharField(max_length=30,label="始发地", widget=forms.TextInput(attrs={'class': 'form-control'}))
destination = forms.CharField(max_length=30,label="目的地", widget=forms.TextInput(attrs={'class': 'form-control'}))
starting_time = forms.TimeField(label="始发时间",widget=forms.TimeInput(attrs={'class': 'form-control'}))
departure_airport = forms.CharField(max_length=64, label="始发机场", widget=forms.TextInput(attrs={'class': 'form-control'}))
landing_airport = forms.CharField(max_length=64, label="目的机场", widget=forms.TextInput(attrs={'class': 'form-control'}))
arrival_time = forms.TimeField(label="到达时间",widget=forms.TimeInput(attrs={'class': 'form-control'}))
first_class_price = forms.FloatField(label="头等舱价格",widget=forms.NumberInput(attrs={'class': 'form-control'}))
# highlevel_economy_class_price = forms.FloatField(label="高级经济舱价格",widget=forms.NumberInput(attrs={'class': 'form-control'}))
business_class_price = forms.FloatField(label="商务舱价格",widget=forms.NumberInput(attrs={'class': 'form-control'}))
economy_class_price = forms.FloatField(label="经济舱价格",widget=forms.NumberInput(attrs={'class': 'form-control'}))
starting_date = forms.DateField(label="始发日期", widget=forms.DateInput(attrs={'class': 'form-control'}))
ending_date = forms.DateField(label="终止日期", widget=forms.DateInput(attrs={'class': 'form-control'}))
class StartStopDateForm(forms.Form):
starting_date = forms.DateField(label="始发日期", widget=forms.DateInput(attrs={'class': 'form-control'}))
ending_date = forms.DateField(label="终止日期", widget=forms.DateInput(attrs={'class': 'form-control'}))
flight_number = forms.CharField(max_length=30, label="航班号", widget=forms.TextInput(attrs={'class': 'form-control'}))
# book_sum = forms.IntegerField(label="订票总数")
# plane_capacity = forms.IntegerField(label="飞机容量")
class flight_number_Form(forms.Form):
flight_number = forms.CharField(max_length=30, label="航班号", widget=forms.TextInput(attrs={'class': 'form-control'}))
class concrete_flight_id_Form(forms.Form):
concrete_flight_id = forms.CharField(max_length=30, label="航班id", widget=forms.TextInput(attrs={'class': 'form-control'})) | [
"django.forms.DateInput",
"django.forms.TimeInput",
"django.forms.NumberInput",
"django.forms.ChoiceField",
"django.forms.TextInput"
] | [((497, 582), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'label': '"""飞机型号"""', 'choices': 'plane_type_choices', 'widget': 'forms.Select'}), "(label='飞机型号', choices=plane_type_choices, widget=forms.Select\n )\n", (514, 582), False, 'from django import forms\n'), ((137, 185), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (152, 185), False, 'from django import forms\n'), ((651, 699), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (666, 699), False, 'from django import forms\n'), ((769, 817), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (784, 817), False, 'from django import forms\n'), ((877, 925), 'django.forms.TimeInput', 'forms.TimeInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (892, 925), False, 'from django import forms\n'), ((1004, 1052), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1019, 1052), False, 'from django import forms\n'), ((1128, 1176), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1143, 1176), False, 'from django import forms\n'), ((1233, 1281), 'django.forms.TimeInput', 'forms.TimeInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1248, 1281), False, 'from django import forms\n'), ((1347, 1397), 'django.forms.NumberInput', 'forms.NumberInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1364, 1397), False, 'from django import forms\n'), ((1594, 1644), 'django.forms.NumberInput', 'forms.NumberInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1611, 1644), False, 'from django import forms\n'), ((1710, 1760), 'django.forms.NumberInput', 'forms.NumberInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1727, 1760), False, 'from django import forms\n'), ((1817, 1865), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1832, 1865), False, 'from django import forms\n'), ((1922, 1970), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (1937, 1970), False, 'from django import forms\n'), ((2066, 2114), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (2081, 2114), False, 'from django import forms\n'), ((2171, 2219), 'django.forms.DateInput', 'forms.DateInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (2186, 2219), False, 'from django import forms\n'), ((2290, 2338), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (2305, 2338), False, 'from django import forms\n'), ((2555, 2603), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (2570, 2603), False, 'from django import forms\n'), ((2724, 2772), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (2739, 2772), False, 'from django import forms\n')] |
"""
Django settings for api project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
from urlparse import urlparse
from website import settings as osf_settings
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
DATABASES = {
'default': {
'CONN_MAX_AGE': 0,
'ENGINE': 'osf.db.backends.postgresql', # django.db.backends.postgresql
'NAME': os.environ.get('OSF_DB_NAME', 'osf'),
'USER': os.environ.get('OSF_DB_USER', 'postgres'),
'PASSWORD': os.environ.get('OSF_DB_PASSWORD', ''),
'HOST': os.environ.get('OSF_DB_HOST', '127.0.0.1'),
'PORT': os.environ.get('OSF_DB_PORT', '5432'),
'ATOMIC_REQUESTS': True,
'TEST': {
'SERIALIZE': False,
},
},
}
DATABASE_ROUTERS = ['osf.db.router.PostgreSQLFailoverRouter', ]
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
AUTH_USER_MODEL = 'osf.OSFUser'
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = osf_settings.SECRET_KEY
AUTHENTICATION_BACKENDS = (
'api.base.authentication.backends.ODMBackend',
'guardian.backends.ObjectPermissionBackend',
)
# SECURITY WARNING: don't run with debug turned on in production!
DEV_MODE = osf_settings.DEV_MODE
DEBUG = osf_settings.DEBUG_MODE
DEBUG_PROPAGATE_EXCEPTIONS = True
# session:
SESSION_COOKIE_NAME = 'api'
SESSION_COOKIE_SECURE = osf_settings.SECURE_MODE
SESSION_COOKIE_HTTPONLY = osf_settings.SESSION_COOKIE_HTTPONLY
# csrf:
CSRF_COOKIE_NAME = 'api-csrf'
CSRF_COOKIE_SECURE = osf_settings.SECURE_MODE
CSRF_COOKIE_HTTPONLY = osf_settings.SECURE_MODE
ALLOWED_HOSTS = [
'.osf.io',
]
# Application definition
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
# 3rd party
'django_celery_beat',
'django_celery_results',
'rest_framework',
'corsheaders',
'raven.contrib.django.raven_compat',
'django_extensions',
'guardian',
'storages',
'waffle',
'elasticsearch_metrics',
# OSF
'osf',
# Addons
'addons.osfstorage',
'addons.bitbucket',
'addons.box',
'addons.dataverse',
'addons.dropbox',
'addons.figshare',
'addons.forward',
'addons.github',
'addons.gitlab',
'addons.googledrive',
'addons.mendeley',
'addons.onedrive',
'addons.owncloud',
'addons.s3',
'addons.twofactor',
'addons.wiki',
'addons.zotero',
)
# local development using https
if osf_settings.SECURE_MODE and DEBUG:
INSTALLED_APPS += ('sslserver',)
# TODO: Are there more granular ways to configure reporting specifically related to the API?
RAVEN_CONFIG = {
'tags': {'App': 'api'},
'dsn': osf_settings.SENTRY_DSN,
'release': osf_settings.VERSION,
}
BULK_SETTINGS = {
'DEFAULT_BULK_LIMIT': 100,
}
MAX_PAGE_SIZE = 100
REST_FRAMEWORK = {
'PAGE_SIZE': 10,
'DEFAULT_RENDERER_CLASSES': (
'api.base.renderers.JSONAPIRenderer',
'api.base.renderers.JSONRendererWithESISupport',
'api.base.renderers.BrowsableAPIRendererNoForms',
),
'DEFAULT_PARSER_CLASSES': (
'api.base.parsers.JSONAPIParser',
'api.base.parsers.JSONAPIParserForRegularJSON',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser',
),
'EXCEPTION_HANDLER': 'api.base.exceptions.json_api_exception_handler',
'DEFAULT_CONTENT_NEGOTIATION_CLASS': 'api.base.content_negotiation.JSONAPIContentNegotiation',
'DEFAULT_VERSIONING_CLASS': 'api.base.versioning.BaseVersioning',
'DEFAULT_VERSION': '2.0',
'ALLOWED_VERSIONS': (
'2.0',
'2.1',
'2.2',
'2.3',
'2.4',
'2.5',
'2.6',
'2.7',
'2.8',
'2.9',
'2.10',
'2.11',
'2.12',
'2.13',
'2.14',
'2.15',
'2.16',
'2.17',
),
'DEFAULT_FILTER_BACKENDS': ('api.base.filters.OSFOrderingFilter',),
'DEFAULT_PAGINATION_CLASS': 'api.base.pagination.JSONAPIPagination',
'ORDERING_PARAM': 'sort',
'DEFAULT_AUTHENTICATION_CLASSES': (
# Custom auth classes
'api.base.authentication.drf.OSFBasicAuthentication',
'api.base.authentication.drf.OSFSessionAuthentication',
'api.base.authentication.drf.OSFCASAuthentication',
),
'DEFAULT_THROTTLE_CLASSES': (
'rest_framework.throttling.UserRateThrottle',
'api.base.throttling.NonCookieAuthThrottle',
),
'DEFAULT_THROTTLE_RATES': {
'user': '10000/day',
'non-cookie-auth': '100/hour',
'add-contributor': '10/second',
'create-guid': '1000/hour',
'root-anon-throttle': '1000/hour',
'test-user': '2/hour',
'test-anon': '1/hour',
'send-email': '2/minute',
},
}
# Settings related to CORS Headers addon: allow API to receive authenticated requests from OSF
# CORS plugin only matches based on "netloc" part of URL, so as workaround we add that to the list
CORS_ORIGIN_ALLOW_ALL = False
CORS_ORIGIN_WHITELIST = (
urlparse(osf_settings.DOMAIN).netloc,
osf_settings.DOMAIN,
)
# This needs to remain True to allow cross origin requests that are in CORS_ORIGIN_WHITELIST to
# use cookies.
CORS_ALLOW_CREDENTIALS = True
# Set dynamically on app init
ORIGINS_WHITELIST = ()
MIDDLEWARE = (
'api.base.middleware.DjangoGlobalMiddleware',
'api.base.middleware.CeleryTaskMiddleware',
'api.base.middleware.PostcommitTaskMiddleware',
# A profiling middleware. ONLY FOR DEV USE
# Uncomment and add "prof" to url params to recieve a profile for that url
# 'api.base.middleware.ProfileMiddleware',
# 'django.contrib.sessions.middleware.SessionMiddleware',
'api.base.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
# 'django.contrib.auth.middleware.AuthenticationMiddleware',
# 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
# 'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'waffle.middleware.WaffleMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
},
]
ROOT_URLCONF = 'api.base.urls'
WSGI_APPLICATION = 'api.base.wsgi.application'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# https://django-storages.readthedocs.io/en/latest/backends/gcloud.html
if os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', False):
# Required to interact with Google Cloud Storage
DEFAULT_FILE_STORAGE = 'api.base.storage.RequestlessURLGoogleCloudStorage'
GS_BUCKET_NAME = os.environ.get('GS_BUCKET_NAME', 'cos-osf-stage-cdn-us')
GS_FILE_OVERWRITE = os.environ.get('GS_FILE_OVERWRITE', False)
elif osf_settings.DEV_MODE or osf_settings.DEBUG_MODE:
DEFAULT_FILE_STORAGE = 'api.base.storage.DevFileSystemStorage'
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static/vendor')
API_BASE = 'v2/'
API_PRIVATE_BASE = '_/'
STATIC_URL = '/static/'
NODE_CATEGORY_MAP = osf_settings.NODE_CATEGORY_MAP
DEBUG_TRANSACTIONS = DEBUG
JWT_SECRET = 'osf_api_cas_login_jwt_secret_32b'
JWE_SECRET = 'osf_api_cas_login_jwe_secret_32b'
ENABLE_VARNISH = osf_settings.ENABLE_VARNISH
ENABLE_ESI = osf_settings.ENABLE_ESI
VARNISH_SERVERS = osf_settings.VARNISH_SERVERS
ESI_MEDIA_TYPES = osf_settings.ESI_MEDIA_TYPES
ADDONS_FOLDER_CONFIGURABLE = ['box', 'dropbox', 's3', 'googledrive', 'figshare', 'owncloud', 'onedrive']
ADDONS_OAUTH = ADDONS_FOLDER_CONFIGURABLE + ['dataverse', 'github', 'bitbucket', 'gitlab', 'mendeley', 'zotero', 'forward']
BYPASS_THROTTLE_TOKEN = '<PASSWORD>'
OSF_SHELL_USER_IMPORTS = None
# Settings for use in the admin
OSF_URL = 'https://osf.io'
SELECT_FOR_UPDATE_ENABLED = True
# Disable anonymous user permissions in django-guardian
ANONYMOUS_USER_NAME = None
# If set to True, automated tests with extra queries will fail.
NPLUSONE_RAISE = False
# salt used for generating hashids
HASHIDS_SALT = 'pinkhimalayan'
# django-elasticsearch-metrics
ELASTICSEARCH_DSL = {
'default': {
'hosts': os.environ.get('ELASTIC6_URI', '127.0.0.1:9201'),
'retry_on_timeout': True,
},
}
# Store yearly indices for time-series metrics
ELASTICSEARCH_METRICS_DATE_FORMAT = '%Y'
WAFFLE_CACHE_NAME = 'waffle_cache'
STORAGE_USAGE_CACHE_NAME = 'storage_usage'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
STORAGE_USAGE_CACHE_NAME: {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'osf_cache_table',
},
WAFFLE_CACHE_NAME: {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
}
| [
"os.path.join",
"os.environ.get",
"os.path.abspath",
"urlparse.urlparse"
] | [((7215, 7270), 'os.environ.get', 'os.environ.get', (['"""GOOGLE_APPLICATION_CREDENTIALS"""', '(False)'], {}), "('GOOGLE_APPLICATION_CREDENTIALS', False)\n", (7229, 7270), False, 'import os\n'), ((7747, 7786), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static/vendor"""'], {}), "(BASE_DIR, 'static/vendor')\n", (7759, 7786), False, 'import os\n'), ((7425, 7481), 'os.environ.get', 'os.environ.get', (['"""GS_BUCKET_NAME"""', '"""cos-osf-stage-cdn-us"""'], {}), "('GS_BUCKET_NAME', 'cos-osf-stage-cdn-us')\n", (7439, 7481), False, 'import os\n'), ((7506, 7548), 'os.environ.get', 'os.environ.get', (['"""GS_FILE_OVERWRITE"""', '(False)'], {}), "('GS_FILE_OVERWRITE', False)\n", (7520, 7548), False, 'import os\n'), ((767, 803), 'os.environ.get', 'os.environ.get', (['"""OSF_DB_NAME"""', '"""osf"""'], {}), "('OSF_DB_NAME', 'osf')\n", (781, 803), False, 'import os\n'), ((821, 862), 'os.environ.get', 'os.environ.get', (['"""OSF_DB_USER"""', '"""postgres"""'], {}), "('OSF_DB_USER', 'postgres')\n", (835, 862), False, 'import os\n'), ((884, 921), 'os.environ.get', 'os.environ.get', (['"""OSF_DB_PASSWORD"""', '""""""'], {}), "('OSF_DB_PASSWORD', '')\n", (898, 921), False, 'import os\n'), ((939, 981), 'os.environ.get', 'os.environ.get', (['"""OSF_DB_HOST"""', '"""127.0.0.1"""'], {}), "('OSF_DB_HOST', '127.0.0.1')\n", (953, 981), False, 'import os\n'), ((999, 1036), 'os.environ.get', 'os.environ.get', (['"""OSF_DB_PORT"""', '"""5432"""'], {}), "('OSF_DB_PORT', '5432')\n", (1013, 1036), False, 'import os\n'), ((5625, 5654), 'urlparse.urlparse', 'urlparse', (['osf_settings.DOMAIN'], {}), '(osf_settings.DOMAIN)\n', (5633, 5654), False, 'from urlparse import urlparse\n'), ((8927, 8975), 'os.environ.get', 'os.environ.get', (['"""ELASTIC6_URI"""', '"""127.0.0.1:9201"""'], {}), "('ELASTIC6_URI', '127.0.0.1:9201')\n", (8941, 8975), False, 'import os\n'), ((447, 472), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (462, 472), False, 'import os\n'), ((6892, 6927), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (6904, 6927), False, 'import os\n')] |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import fibcapi_pb2 as fibcapi__pb2
import fibcapis_pb2 as fibcapis__pb2
class FIBCApApiStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Monitor = channel.unary_stream(
'/fibcapi.FIBCApApi/Monitor',
request_serializer=fibcapis__pb2.ApMonitorRequest.SerializeToString,
response_deserializer=fibcapis__pb2.ApMonitorReply.FromString,
)
self.GetPortStats = channel.unary_stream(
'/fibcapi.FIBCApApi/GetPortStats',
request_serializer=fibcapis__pb2.ApGetPortStatsRequest.SerializeToString,
response_deserializer=fibcapi__pb2.FFPortStats.FromString,
)
self.ModPortStats = channel.unary_unary(
'/fibcapi.FIBCApApi/ModPortStats',
request_serializer=fibcapis__pb2.ApModPortStatsRequest.SerializeToString,
response_deserializer=fibcapis__pb2.ApModPortStatsReply.FromString,
)
self.GetPortEntries = channel.unary_stream(
'/fibcapi.FIBCApApi/GetPortEntries',
request_serializer=fibcapis__pb2.ApGetPortEntriesRequest.SerializeToString,
response_deserializer=fibcapis__pb2.DbPortEntry.FromString,
)
self.GetIDEntries = channel.unary_stream(
'/fibcapi.FIBCApApi/GetIDEntries',
request_serializer=fibcapis__pb2.ApGetIdEntriesRequest.SerializeToString,
response_deserializer=fibcapis__pb2.DbIdEntry.FromString,
)
self.GetDpEntries = channel.unary_stream(
'/fibcapi.FIBCApApi/GetDpEntries',
request_serializer=fibcapis__pb2.ApGetDpEntriesRequest.SerializeToString,
response_deserializer=fibcapis__pb2.DbDpEntry.FromString,
)
self.AddPortEntry = channel.unary_unary(
'/fibcapi.FIBCApApi/AddPortEntry',
request_serializer=fibcapis__pb2.DbPortEntry.SerializeToString,
response_deserializer=fibcapis__pb2.ApAddPortEntryReply.FromString,
)
self.AddIDEntry = channel.unary_unary(
'/fibcapi.FIBCApApi/AddIDEntry',
request_serializer=fibcapis__pb2.DbIdEntry.SerializeToString,
response_deserializer=fibcapis__pb2.ApAddIdEntryReply.FromString,
)
self.DelPortEntry = channel.unary_unary(
'/fibcapi.FIBCApApi/DelPortEntry',
request_serializer=fibcapis__pb2.DbPortKey.SerializeToString,
response_deserializer=fibcapis__pb2.ApDelPortEntryReply.FromString,
)
self.DelIDEntry = channel.unary_unary(
'/fibcapi.FIBCApApi/DelIDEntry',
request_serializer=fibcapis__pb2.DbIdEntry.SerializeToString,
response_deserializer=fibcapis__pb2.ApDelIdEntryReply.FromString,
)
self.GetStats = channel.unary_stream(
'/fibcapi.FIBCApApi/GetStats',
request_serializer=fibcapis__pb2.ApGetStatsRequest.SerializeToString,
response_deserializer=fibcapis__pb2.StatsEntry.FromString,
)
self.RunOAM = channel.unary_unary(
'/fibcapi.FIBCApApi/RunOAM',
request_serializer=fibcapi__pb2.OAM.Request.SerializeToString,
response_deserializer=fibcapis__pb2.OAMReplyAck.FromString,
)
class FIBCApApiServicer(object):
# missing associated documentation comment in .proto file
pass
def Monitor(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPortStats(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ModPortStats(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPortEntries(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetIDEntries(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDpEntries(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddPortEntry(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddIDEntry(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DelPortEntry(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DelIDEntry(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetStats(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RunOAM(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FIBCApApiServicer_to_server(servicer, server):
rpc_method_handlers = {
'Monitor': grpc.unary_stream_rpc_method_handler(
servicer.Monitor,
request_deserializer=fibcapis__pb2.ApMonitorRequest.FromString,
response_serializer=fibcapis__pb2.ApMonitorReply.SerializeToString,
),
'GetPortStats': grpc.unary_stream_rpc_method_handler(
servicer.GetPortStats,
request_deserializer=fibcapis__pb2.ApGetPortStatsRequest.FromString,
response_serializer=fibcapi__pb2.FFPortStats.SerializeToString,
),
'ModPortStats': grpc.unary_unary_rpc_method_handler(
servicer.ModPortStats,
request_deserializer=fibcapis__pb2.ApModPortStatsRequest.FromString,
response_serializer=fibcapis__pb2.ApModPortStatsReply.SerializeToString,
),
'GetPortEntries': grpc.unary_stream_rpc_method_handler(
servicer.GetPortEntries,
request_deserializer=fibcapis__pb2.ApGetPortEntriesRequest.FromString,
response_serializer=fibcapis__pb2.DbPortEntry.SerializeToString,
),
'GetIDEntries': grpc.unary_stream_rpc_method_handler(
servicer.GetIDEntries,
request_deserializer=fibcapis__pb2.ApGetIdEntriesRequest.FromString,
response_serializer=fibcapis__pb2.DbIdEntry.SerializeToString,
),
'GetDpEntries': grpc.unary_stream_rpc_method_handler(
servicer.GetDpEntries,
request_deserializer=fibcapis__pb2.ApGetDpEntriesRequest.FromString,
response_serializer=fibcapis__pb2.DbDpEntry.SerializeToString,
),
'AddPortEntry': grpc.unary_unary_rpc_method_handler(
servicer.AddPortEntry,
request_deserializer=fibcapis__pb2.DbPortEntry.FromString,
response_serializer=fibcapis__pb2.ApAddPortEntryReply.SerializeToString,
),
'AddIDEntry': grpc.unary_unary_rpc_method_handler(
servicer.AddIDEntry,
request_deserializer=fibcapis__pb2.DbIdEntry.FromString,
response_serializer=fibcapis__pb2.ApAddIdEntryReply.SerializeToString,
),
'DelPortEntry': grpc.unary_unary_rpc_method_handler(
servicer.DelPortEntry,
request_deserializer=fibcapis__pb2.DbPortKey.FromString,
response_serializer=fibcapis__pb2.ApDelPortEntryReply.SerializeToString,
),
'DelIDEntry': grpc.unary_unary_rpc_method_handler(
servicer.DelIDEntry,
request_deserializer=fibcapis__pb2.DbIdEntry.FromString,
response_serializer=fibcapis__pb2.ApDelIdEntryReply.SerializeToString,
),
'GetStats': grpc.unary_stream_rpc_method_handler(
servicer.GetStats,
request_deserializer=fibcapis__pb2.ApGetStatsRequest.FromString,
response_serializer=fibcapis__pb2.StatsEntry.SerializeToString,
),
'RunOAM': grpc.unary_unary_rpc_method_handler(
servicer.RunOAM,
request_deserializer=fibcapi__pb2.OAM.Request.FromString,
response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'fibcapi.FIBCApApi', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class FIBCVmApiStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SendHello = channel.unary_unary(
'/fibcapi.FIBCVmApi/SendHello',
request_serializer=fibcapi__pb2.Hello.SerializeToString,
response_deserializer=fibcapis__pb2.HelloReply.FromString,
)
self.SendPortConfig = channel.unary_unary(
'/fibcapi.FIBCVmApi/SendPortConfig',
request_serializer=fibcapi__pb2.PortConfig.SerializeToString,
response_deserializer=fibcapis__pb2.PortConfigReply.FromString,
)
self.SendFlowMod = channel.unary_unary(
'/fibcapi.FIBCVmApi/SendFlowMod',
request_serializer=fibcapi__pb2.FlowMod.SerializeToString,
response_deserializer=fibcapis__pb2.FlowModReply.FromString,
)
self.SendGroupMod = channel.unary_unary(
'/fibcapi.FIBCVmApi/SendGroupMod',
request_serializer=fibcapi__pb2.GroupMod.SerializeToString,
response_deserializer=fibcapis__pb2.GroupModReply.FromString,
)
self.SendOAMReply = channel.unary_unary(
'/fibcapi.FIBCVmApi/SendOAMReply',
request_serializer=fibcapis__pb2.OAMReply.SerializeToString,
response_deserializer=fibcapis__pb2.OAMReplyAck.FromString,
)
self.Monitor = channel.unary_stream(
'/fibcapi.FIBCVmApi/Monitor',
request_serializer=fibcapis__pb2.VmMonitorRequest.SerializeToString,
response_deserializer=fibcapis__pb2.VmMonitorReply.FromString,
)
class FIBCVmApiServicer(object):
# missing associated documentation comment in .proto file
pass
def SendHello(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendPortConfig(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendFlowMod(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendGroupMod(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendOAMReply(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Monitor(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FIBCVmApiServicer_to_server(servicer, server):
rpc_method_handlers = {
'SendHello': grpc.unary_unary_rpc_method_handler(
servicer.SendHello,
request_deserializer=fibcapi__pb2.Hello.FromString,
response_serializer=fibcapis__pb2.HelloReply.SerializeToString,
),
'SendPortConfig': grpc.unary_unary_rpc_method_handler(
servicer.SendPortConfig,
request_deserializer=fibcapi__pb2.PortConfig.FromString,
response_serializer=fibcapis__pb2.PortConfigReply.SerializeToString,
),
'SendFlowMod': grpc.unary_unary_rpc_method_handler(
servicer.SendFlowMod,
request_deserializer=fibcapi__pb2.FlowMod.FromString,
response_serializer=fibcapis__pb2.FlowModReply.SerializeToString,
),
'SendGroupMod': grpc.unary_unary_rpc_method_handler(
servicer.SendGroupMod,
request_deserializer=fibcapi__pb2.GroupMod.FromString,
response_serializer=fibcapis__pb2.GroupModReply.SerializeToString,
),
'SendOAMReply': grpc.unary_unary_rpc_method_handler(
servicer.SendOAMReply,
request_deserializer=fibcapis__pb2.OAMReply.FromString,
response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString,
),
'Monitor': grpc.unary_stream_rpc_method_handler(
servicer.Monitor,
request_deserializer=fibcapis__pb2.VmMonitorRequest.FromString,
response_serializer=fibcapis__pb2.VmMonitorReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'fibcapi.FIBCVmApi', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class FIBCVsApiStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SendHello = channel.unary_unary(
'/fibcapi.FIBCVsApi/SendHello',
request_serializer=fibcapi__pb2.FFHello.SerializeToString,
response_deserializer=fibcapis__pb2.FFHelloReply.FromString,
)
self.SendFFPacket = channel.unary_unary(
'/fibcapi.FIBCVsApi/SendFFPacket',
request_serializer=fibcapi__pb2.FFPacket.SerializeToString,
response_deserializer=fibcapis__pb2.FFPacketReply.FromString,
)
self.SendPacketIn = channel.unary_unary(
'/fibcapi.FIBCVsApi/SendPacketIn',
request_serializer=fibcapi__pb2.FFPacketIn.SerializeToString,
response_deserializer=fibcapis__pb2.FFPacketInReply.FromString,
)
self.SendOAMReply = channel.unary_unary(
'/fibcapi.FIBCVsApi/SendOAMReply',
request_serializer=fibcapis__pb2.OAMReply.SerializeToString,
response_deserializer=fibcapis__pb2.OAMReplyAck.FromString,
)
self.Monitor = channel.unary_stream(
'/fibcapi.FIBCVsApi/Monitor',
request_serializer=fibcapis__pb2.VsMonitorRequest.SerializeToString,
response_deserializer=fibcapis__pb2.VsMonitorReply.FromString,
)
class FIBCVsApiServicer(object):
# missing associated documentation comment in .proto file
pass
def SendHello(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendFFPacket(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendPacketIn(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendOAMReply(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Monitor(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FIBCVsApiServicer_to_server(servicer, server):
rpc_method_handlers = {
'SendHello': grpc.unary_unary_rpc_method_handler(
servicer.SendHello,
request_deserializer=fibcapi__pb2.FFHello.FromString,
response_serializer=fibcapis__pb2.FFHelloReply.SerializeToString,
),
'SendFFPacket': grpc.unary_unary_rpc_method_handler(
servicer.SendFFPacket,
request_deserializer=fibcapi__pb2.FFPacket.FromString,
response_serializer=fibcapis__pb2.FFPacketReply.SerializeToString,
),
'SendPacketIn': grpc.unary_unary_rpc_method_handler(
servicer.SendPacketIn,
request_deserializer=fibcapi__pb2.FFPacketIn.FromString,
response_serializer=fibcapis__pb2.FFPacketInReply.SerializeToString,
),
'SendOAMReply': grpc.unary_unary_rpc_method_handler(
servicer.SendOAMReply,
request_deserializer=fibcapis__pb2.OAMReply.FromString,
response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString,
),
'Monitor': grpc.unary_stream_rpc_method_handler(
servicer.Monitor,
request_deserializer=fibcapis__pb2.VsMonitorRequest.FromString,
response_serializer=fibcapis__pb2.VsMonitorReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'fibcapi.FIBCVsApi', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class FIBCDpApiStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SendHello = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendHello',
request_serializer=fibcapi__pb2.FFHello.SerializeToString,
response_deserializer=fibcapis__pb2.FFHelloReply.FromString,
)
self.SendPacketIn = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendPacketIn',
request_serializer=fibcapi__pb2.FFPacketIn.SerializeToString,
response_deserializer=fibcapis__pb2.FFPacketInReply.FromString,
)
self.SendPortStatus = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendPortStatus',
request_serializer=fibcapi__pb2.FFPortStatus.SerializeToString,
response_deserializer=fibcapis__pb2.FFPortStatusReply.FromString,
)
self.SendL2AddrStatus = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendL2AddrStatus',
request_serializer=fibcapi__pb2.FFL2AddrStatus.SerializeToString,
response_deserializer=fibcapis__pb2.L2AddrStatusReply.FromString,
)
self.SendMultipartReply = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendMultipartReply',
request_serializer=fibcapis__pb2.DpMultipartReply.SerializeToString,
response_deserializer=fibcapis__pb2.DpMultipartReplyAck.FromString,
)
self.SendOAMReply = channel.unary_unary(
'/fibcapi.FIBCDpApi/SendOAMReply',
request_serializer=fibcapis__pb2.OAMReply.SerializeToString,
response_deserializer=fibcapis__pb2.OAMReplyAck.FromString,
)
self.Monitor = channel.unary_stream(
'/fibcapi.FIBCDpApi/Monitor',
request_serializer=fibcapis__pb2.DpMonitorRequest.SerializeToString,
response_deserializer=fibcapis__pb2.DpMonitorReply.FromString,
)
class FIBCDpApiServicer(object):
# missing associated documentation comment in .proto file
pass
def SendHello(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendPacketIn(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendPortStatus(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendL2AddrStatus(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendMultipartReply(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SendOAMReply(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Monitor(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_FIBCDpApiServicer_to_server(servicer, server):
rpc_method_handlers = {
'SendHello': grpc.unary_unary_rpc_method_handler(
servicer.SendHello,
request_deserializer=fibcapi__pb2.FFHello.FromString,
response_serializer=fibcapis__pb2.FFHelloReply.SerializeToString,
),
'SendPacketIn': grpc.unary_unary_rpc_method_handler(
servicer.SendPacketIn,
request_deserializer=fibcapi__pb2.FFPacketIn.FromString,
response_serializer=fibcapis__pb2.FFPacketInReply.SerializeToString,
),
'SendPortStatus': grpc.unary_unary_rpc_method_handler(
servicer.SendPortStatus,
request_deserializer=fibcapi__pb2.FFPortStatus.FromString,
response_serializer=fibcapis__pb2.FFPortStatusReply.SerializeToString,
),
'SendL2AddrStatus': grpc.unary_unary_rpc_method_handler(
servicer.SendL2AddrStatus,
request_deserializer=fibcapi__pb2.FFL2AddrStatus.FromString,
response_serializer=fibcapis__pb2.L2AddrStatusReply.SerializeToString,
),
'SendMultipartReply': grpc.unary_unary_rpc_method_handler(
servicer.SendMultipartReply,
request_deserializer=fibcapis__pb2.DpMultipartReply.FromString,
response_serializer=fibcapis__pb2.DpMultipartReplyAck.SerializeToString,
),
'SendOAMReply': grpc.unary_unary_rpc_method_handler(
servicer.SendOAMReply,
request_deserializer=fibcapis__pb2.OAMReply.FromString,
response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString,
),
'Monitor': grpc.unary_stream_rpc_method_handler(
servicer.Monitor,
request_deserializer=fibcapis__pb2.DpMonitorRequest.FromString,
response_serializer=fibcapis__pb2.DpMonitorReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'fibcapi.FIBCDpApi', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| [
"grpc.method_handlers_generic_handler",
"grpc.unary_unary_rpc_method_handler",
"grpc.unary_stream_rpc_method_handler"
] | [((9777, 9855), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""fibcapi.FIBCApApi"""', 'rpc_method_handlers'], {}), "('fibcapi.FIBCApApi', rpc_method_handlers)\n", (9813, 9855), False, 'import grpc\n'), ((14833, 14911), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""fibcapi.FIBCVmApi"""', 'rpc_method_handlers'], {}), "('fibcapi.FIBCVmApi', rpc_method_handlers)\n", (14869, 14911), False, 'import grpc\n'), ((19141, 19219), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""fibcapi.FIBCVsApi"""', 'rpc_method_handlers'], {}), "('fibcapi.FIBCVsApi', rpc_method_handlers)\n", (19177, 19219), False, 'import grpc\n'), ((25085, 25163), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""fibcapi.FIBCDpApi"""', 'rpc_method_handlers'], {}), "('fibcapi.FIBCDpApi', rpc_method_handlers)\n", (25121, 25163), False, 'import grpc\n'), ((6769, 6965), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.Monitor'], {'request_deserializer': 'fibcapis__pb2.ApMonitorRequest.FromString', 'response_serializer': 'fibcapis__pb2.ApMonitorReply.SerializeToString'}), '(servicer.Monitor, request_deserializer\n =fibcapis__pb2.ApMonitorRequest.FromString, response_serializer=\n fibcapis__pb2.ApMonitorReply.SerializeToString)\n', (6805, 6965), False, 'import grpc\n'), ((7018, 7218), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetPortStats'], {'request_deserializer': 'fibcapis__pb2.ApGetPortStatsRequest.FromString', 'response_serializer': 'fibcapi__pb2.FFPortStats.SerializeToString'}), '(servicer.GetPortStats,\n request_deserializer=fibcapis__pb2.ApGetPortStatsRequest.FromString,\n response_serializer=fibcapi__pb2.FFPortStats.SerializeToString)\n', (7054, 7218), False, 'import grpc\n'), ((7273, 7481), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.ModPortStats'], {'request_deserializer': 'fibcapis__pb2.ApModPortStatsRequest.FromString', 'response_serializer': 'fibcapis__pb2.ApModPortStatsReply.SerializeToString'}), '(servicer.ModPortStats,\n request_deserializer=fibcapis__pb2.ApModPortStatsRequest.FromString,\n response_serializer=fibcapis__pb2.ApModPortStatsReply.SerializeToString)\n', (7308, 7481), False, 'import grpc\n'), ((7538, 7743), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetPortEntries'], {'request_deserializer': 'fibcapis__pb2.ApGetPortEntriesRequest.FromString', 'response_serializer': 'fibcapis__pb2.DbPortEntry.SerializeToString'}), '(servicer.GetPortEntries,\n request_deserializer=fibcapis__pb2.ApGetPortEntriesRequest.FromString,\n response_serializer=fibcapis__pb2.DbPortEntry.SerializeToString)\n', (7574, 7743), False, 'import grpc\n'), ((7798, 7997), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetIDEntries'], {'request_deserializer': 'fibcapis__pb2.ApGetIdEntriesRequest.FromString', 'response_serializer': 'fibcapis__pb2.DbIdEntry.SerializeToString'}), '(servicer.GetIDEntries,\n request_deserializer=fibcapis__pb2.ApGetIdEntriesRequest.FromString,\n response_serializer=fibcapis__pb2.DbIdEntry.SerializeToString)\n', (7834, 7997), False, 'import grpc\n'), ((8052, 8251), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetDpEntries'], {'request_deserializer': 'fibcapis__pb2.ApGetDpEntriesRequest.FromString', 'response_serializer': 'fibcapis__pb2.DbDpEntry.SerializeToString'}), '(servicer.GetDpEntries,\n request_deserializer=fibcapis__pb2.ApGetDpEntriesRequest.FromString,\n response_serializer=fibcapis__pb2.DbDpEntry.SerializeToString)\n', (8088, 8251), False, 'import grpc\n'), ((8306, 8504), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.AddPortEntry'], {'request_deserializer': 'fibcapis__pb2.DbPortEntry.FromString', 'response_serializer': 'fibcapis__pb2.ApAddPortEntryReply.SerializeToString'}), '(servicer.AddPortEntry,\n request_deserializer=fibcapis__pb2.DbPortEntry.FromString,\n response_serializer=fibcapis__pb2.ApAddPortEntryReply.SerializeToString)\n', (8341, 8504), False, 'import grpc\n'), ((8557, 8749), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.AddIDEntry'], {'request_deserializer': 'fibcapis__pb2.DbIdEntry.FromString', 'response_serializer': 'fibcapis__pb2.ApAddIdEntryReply.SerializeToString'}), '(servicer.AddIDEntry,\n request_deserializer=fibcapis__pb2.DbIdEntry.FromString,\n response_serializer=fibcapis__pb2.ApAddIdEntryReply.SerializeToString)\n', (8592, 8749), False, 'import grpc\n'), ((8804, 9000), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DelPortEntry'], {'request_deserializer': 'fibcapis__pb2.DbPortKey.FromString', 'response_serializer': 'fibcapis__pb2.ApDelPortEntryReply.SerializeToString'}), '(servicer.DelPortEntry,\n request_deserializer=fibcapis__pb2.DbPortKey.FromString,\n response_serializer=fibcapis__pb2.ApDelPortEntryReply.SerializeToString)\n', (8839, 9000), False, 'import grpc\n'), ((9053, 9245), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.DelIDEntry'], {'request_deserializer': 'fibcapis__pb2.DbIdEntry.FromString', 'response_serializer': 'fibcapis__pb2.ApDelIdEntryReply.SerializeToString'}), '(servicer.DelIDEntry,\n request_deserializer=fibcapis__pb2.DbIdEntry.FromString,\n response_serializer=fibcapis__pb2.ApDelIdEntryReply.SerializeToString)\n', (9088, 9245), False, 'import grpc\n'), ((9296, 9488), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.GetStats'], {'request_deserializer': 'fibcapis__pb2.ApGetStatsRequest.FromString', 'response_serializer': 'fibcapis__pb2.StatsEntry.SerializeToString'}), '(servicer.GetStats,\n request_deserializer=fibcapis__pb2.ApGetStatsRequest.FromString,\n response_serializer=fibcapis__pb2.StatsEntry.SerializeToString)\n', (9332, 9488), False, 'import grpc\n'), ((9537, 9722), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.RunOAM'], {'request_deserializer': 'fibcapi__pb2.OAM.Request.FromString', 'response_serializer': 'fibcapis__pb2.OAMReplyAck.SerializeToString'}), '(servicer.RunOAM, request_deserializer=\n fibcapi__pb2.OAM.Request.FromString, response_serializer=fibcapis__pb2.\n OAMReplyAck.SerializeToString)\n', (9572, 9722), False, 'import grpc\n'), ((13378, 13558), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendHello'], {'request_deserializer': 'fibcapi__pb2.Hello.FromString', 'response_serializer': 'fibcapis__pb2.HelloReply.SerializeToString'}), '(servicer.SendHello,\n request_deserializer=fibcapi__pb2.Hello.FromString, response_serializer\n =fibcapis__pb2.HelloReply.SerializeToString)\n', (13413, 13558), False, 'import grpc\n'), ((13614, 13808), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendPortConfig'], {'request_deserializer': 'fibcapi__pb2.PortConfig.FromString', 'response_serializer': 'fibcapis__pb2.PortConfigReply.SerializeToString'}), '(servicer.SendPortConfig,\n request_deserializer=fibcapi__pb2.PortConfig.FromString,\n response_serializer=fibcapis__pb2.PortConfigReply.SerializeToString)\n', (13649, 13808), False, 'import grpc\n'), ((13862, 14047), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendFlowMod'], {'request_deserializer': 'fibcapi__pb2.FlowMod.FromString', 'response_serializer': 'fibcapis__pb2.FlowModReply.SerializeToString'}), '(servicer.SendFlowMod,\n request_deserializer=fibcapi__pb2.FlowMod.FromString,\n response_serializer=fibcapis__pb2.FlowModReply.SerializeToString)\n', (13897, 14047), False, 'import grpc\n'), ((14102, 14290), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendGroupMod'], {'request_deserializer': 'fibcapi__pb2.GroupMod.FromString', 'response_serializer': 'fibcapis__pb2.GroupModReply.SerializeToString'}), '(servicer.SendGroupMod,\n request_deserializer=fibcapi__pb2.GroupMod.FromString,\n response_serializer=fibcapis__pb2.GroupModReply.SerializeToString)\n', (14137, 14290), False, 'import grpc\n'), ((14345, 14532), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendOAMReply'], {'request_deserializer': 'fibcapis__pb2.OAMReply.FromString', 'response_serializer': 'fibcapis__pb2.OAMReplyAck.SerializeToString'}), '(servicer.SendOAMReply,\n request_deserializer=fibcapis__pb2.OAMReply.FromString,\n response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString)\n', (14380, 14532), False, 'import grpc\n'), ((14582, 14778), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.Monitor'], {'request_deserializer': 'fibcapis__pb2.VmMonitorRequest.FromString', 'response_serializer': 'fibcapis__pb2.VmMonitorReply.SerializeToString'}), '(servicer.Monitor, request_deserializer\n =fibcapis__pb2.VmMonitorRequest.FromString, response_serializer=\n fibcapis__pb2.VmMonitorReply.SerializeToString)\n', (14618, 14778), False, 'import grpc\n'), ((17925, 18108), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendHello'], {'request_deserializer': 'fibcapi__pb2.FFHello.FromString', 'response_serializer': 'fibcapis__pb2.FFHelloReply.SerializeToString'}), '(servicer.SendHello,\n request_deserializer=fibcapi__pb2.FFHello.FromString,\n response_serializer=fibcapis__pb2.FFHelloReply.SerializeToString)\n', (17960, 18108), False, 'import grpc\n'), ((18163, 18351), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendFFPacket'], {'request_deserializer': 'fibcapi__pb2.FFPacket.FromString', 'response_serializer': 'fibcapis__pb2.FFPacketReply.SerializeToString'}), '(servicer.SendFFPacket,\n request_deserializer=fibcapi__pb2.FFPacket.FromString,\n response_serializer=fibcapis__pb2.FFPacketReply.SerializeToString)\n', (18198, 18351), False, 'import grpc\n'), ((18406, 18598), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendPacketIn'], {'request_deserializer': 'fibcapi__pb2.FFPacketIn.FromString', 'response_serializer': 'fibcapis__pb2.FFPacketInReply.SerializeToString'}), '(servicer.SendPacketIn,\n request_deserializer=fibcapi__pb2.FFPacketIn.FromString,\n response_serializer=fibcapis__pb2.FFPacketInReply.SerializeToString)\n', (18441, 18598), False, 'import grpc\n'), ((18653, 18840), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendOAMReply'], {'request_deserializer': 'fibcapis__pb2.OAMReply.FromString', 'response_serializer': 'fibcapis__pb2.OAMReplyAck.SerializeToString'}), '(servicer.SendOAMReply,\n request_deserializer=fibcapis__pb2.OAMReply.FromString,\n response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString)\n', (18688, 18840), False, 'import grpc\n'), ((18890, 19086), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.Monitor'], {'request_deserializer': 'fibcapis__pb2.VsMonitorRequest.FromString', 'response_serializer': 'fibcapis__pb2.VsMonitorReply.SerializeToString'}), '(servicer.Monitor, request_deserializer\n =fibcapis__pb2.VsMonitorRequest.FromString, response_serializer=\n fibcapis__pb2.VsMonitorReply.SerializeToString)\n', (18926, 19086), False, 'import grpc\n'), ((23326, 23509), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendHello'], {'request_deserializer': 'fibcapi__pb2.FFHello.FromString', 'response_serializer': 'fibcapis__pb2.FFHelloReply.SerializeToString'}), '(servicer.SendHello,\n request_deserializer=fibcapi__pb2.FFHello.FromString,\n response_serializer=fibcapis__pb2.FFHelloReply.SerializeToString)\n', (23361, 23509), False, 'import grpc\n'), ((23564, 23756), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendPacketIn'], {'request_deserializer': 'fibcapi__pb2.FFPacketIn.FromString', 'response_serializer': 'fibcapis__pb2.FFPacketInReply.SerializeToString'}), '(servicer.SendPacketIn,\n request_deserializer=fibcapi__pb2.FFPacketIn.FromString,\n response_serializer=fibcapis__pb2.FFPacketInReply.SerializeToString)\n', (23599, 23756), False, 'import grpc\n'), ((23813, 24011), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendPortStatus'], {'request_deserializer': 'fibcapi__pb2.FFPortStatus.FromString', 'response_serializer': 'fibcapis__pb2.FFPortStatusReply.SerializeToString'}), '(servicer.SendPortStatus,\n request_deserializer=fibcapi__pb2.FFPortStatus.FromString,\n response_serializer=fibcapis__pb2.FFPortStatusReply.SerializeToString)\n', (23848, 24011), False, 'import grpc\n'), ((24070, 24272), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendL2AddrStatus'], {'request_deserializer': 'fibcapi__pb2.FFL2AddrStatus.FromString', 'response_serializer': 'fibcapis__pb2.L2AddrStatusReply.SerializeToString'}), '(servicer.SendL2AddrStatus,\n request_deserializer=fibcapi__pb2.FFL2AddrStatus.FromString,\n response_serializer=fibcapis__pb2.L2AddrStatusReply.SerializeToString)\n', (24105, 24272), False, 'import grpc\n'), ((24333, 24542), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendMultipartReply'], {'request_deserializer': 'fibcapis__pb2.DpMultipartReply.FromString', 'response_serializer': 'fibcapis__pb2.DpMultipartReplyAck.SerializeToString'}), '(servicer.SendMultipartReply,\n request_deserializer=fibcapis__pb2.DpMultipartReply.FromString,\n response_serializer=fibcapis__pb2.DpMultipartReplyAck.SerializeToString)\n', (24368, 24542), False, 'import grpc\n'), ((24597, 24784), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.SendOAMReply'], {'request_deserializer': 'fibcapis__pb2.OAMReply.FromString', 'response_serializer': 'fibcapis__pb2.OAMReplyAck.SerializeToString'}), '(servicer.SendOAMReply,\n request_deserializer=fibcapis__pb2.OAMReply.FromString,\n response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString)\n', (24632, 24784), False, 'import grpc\n'), ((24834, 25030), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.Monitor'], {'request_deserializer': 'fibcapis__pb2.DpMonitorRequest.FromString', 'response_serializer': 'fibcapis__pb2.DpMonitorReply.SerializeToString'}), '(servicer.Monitor, request_deserializer\n =fibcapis__pb2.DpMonitorRequest.FromString, response_serializer=\n fibcapis__pb2.DpMonitorReply.SerializeToString)\n', (24870, 25030), False, 'import grpc\n')] |
from marshmallow import Schema, fields
from marshmallow.validate import Range, Length
from sqlalchemy import Column, Integer, Boolean, DateTime
from ..db import Base
from ..shared.models import StringTypes
# ---- Error-report
class ErrorReport(Base):
__tablename__ = 'error_report'
id = Column(Integer, primary_key=True)
description = Column(StringTypes.LONG_STRING, nullable=False)
time_stamp = Column(DateTime)
status_code = Column(Integer)
endpoint = Column(StringTypes.MEDIUM_STRING)
solved = Column(Boolean, default=False)
def __repr__(self):
return f"<Error-report(id={self.id})>"
class ErrorReportSchema(Schema):
id = fields.Integer(dump_only=True, required=True, validate=Range(min=1))
description = fields.String(required=True, validate=Length(min=1))
time_stamp = fields.DateTime()
status_code = fields.Integer()
endpoint = fields.String()
solved = fields.Boolean()
| [
"marshmallow.validate.Range",
"marshmallow.fields.DateTime",
"sqlalchemy.Column",
"marshmallow.validate.Length",
"marshmallow.fields.String",
"marshmallow.fields.Integer",
"marshmallow.fields.Boolean"
] | [((300, 333), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (306, 333), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((352, 399), 'sqlalchemy.Column', 'Column', (['StringTypes.LONG_STRING'], {'nullable': '(False)'}), '(StringTypes.LONG_STRING, nullable=False)\n', (358, 399), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((417, 433), 'sqlalchemy.Column', 'Column', (['DateTime'], {}), '(DateTime)\n', (423, 433), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((452, 467), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (458, 467), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((483, 516), 'sqlalchemy.Column', 'Column', (['StringTypes.MEDIUM_STRING'], {}), '(StringTypes.MEDIUM_STRING)\n', (489, 516), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((530, 560), 'sqlalchemy.Column', 'Column', (['Boolean'], {'default': '(False)'}), '(Boolean, default=False)\n', (536, 560), False, 'from sqlalchemy import Column, Integer, Boolean, DateTime\n'), ((834, 851), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {}), '()\n', (849, 851), False, 'from marshmallow import Schema, fields\n'), ((870, 886), 'marshmallow.fields.Integer', 'fields.Integer', ([], {}), '()\n', (884, 886), False, 'from marshmallow import Schema, fields\n'), ((902, 917), 'marshmallow.fields.String', 'fields.String', ([], {}), '()\n', (915, 917), False, 'from marshmallow import Schema, fields\n'), ((931, 947), 'marshmallow.fields.Boolean', 'fields.Boolean', ([], {}), '()\n', (945, 947), False, 'from marshmallow import Schema, fields\n'), ((732, 744), 'marshmallow.validate.Range', 'Range', ([], {'min': '(1)'}), '(min=1)\n', (737, 744), False, 'from marshmallow.validate import Range, Length\n'), ((802, 815), 'marshmallow.validate.Length', 'Length', ([], {'min': '(1)'}), '(min=1)\n', (808, 815), False, 'from marshmallow.validate import Range, Length\n')] |
import pytest
from plenum.server.view_change.view_changer import ViewChanger
from stp_core.common.log import getlogger
from plenum.test.pool_transactions.helper import start_not_added_node, add_started_node
logger = getlogger()
@pytest.fixture(scope="module", autouse=True)
def tconf(tconf):
old_vc_timeout = tconf.VIEW_CHANGE_TIMEOUT
tconf.VIEW_CHANGE_TIMEOUT = 10
yield tconf
tconf.VIEW_CHANGE_TIMEOUT = old_vc_timeout
def test_no_instance_change_on_primary_disconnection_for_not_ready_node(
looper, txnPoolNodeSet, tdir, tconf,
allPluginsPath, sdk_pool_handle, sdk_wallet_steward):
"""
Test steps:
1. create a new node, but don't add it to the pool (so not send NODE txn), so that the node is not ready.
2. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary)
3. make sure no InstanceChange sent by the new node
4. add the node to the pool (send NODE txn) and make sure that the node is ready now.
5. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary)
6. make sure no InstanceChange sent by the new node
"""
# 1. create a new node, but don't add it to the pool (so not send NODE txn), so that the node is not ready.
sigseed, bls_key, new_node, node_ha, client_ha = \
start_not_added_node(looper,
tdir, tconf, allPluginsPath,
"TestTheta")
# 2. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary)
looper.runFor(tconf.VIEW_CHANGE_TIMEOUT + 2)
# 3. make sure no InstanceChange sent by the new node
assert 0 == new_node.view_changer.spylog.count(ViewChanger.sendInstanceChange.__name__)
logger.info("Start added node {}".format(new_node))
# 4. add the node to the pool (send NODE txn) and make sure that the node is ready now.
add_started_node(looper,
new_node,
node_ha,
client_ha,
txnPoolNodeSet,
sdk_pool_handle,
sdk_wallet_steward,
bls_key)
# 5. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary)
looper.runFor(tconf.VIEW_CHANGE_TIMEOUT + 2)
# 6. make sure no InstanceChange sent by the new node
assert 0 == new_node.view_changer.spylog.count(ViewChanger.sendInstanceChange.__name__)
| [
"pytest.fixture",
"plenum.test.pool_transactions.helper.add_started_node",
"plenum.test.pool_transactions.helper.start_not_added_node",
"stp_core.common.log.getlogger"
] | [((219, 230), 'stp_core.common.log.getlogger', 'getlogger', ([], {}), '()\n', (228, 230), False, 'from stp_core.common.log import getlogger\n'), ((234, 278), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""', 'autouse': '(True)'}), "(scope='module', autouse=True)\n", (248, 278), False, 'import pytest\n'), ((1346, 1416), 'plenum.test.pool_transactions.helper.start_not_added_node', 'start_not_added_node', (['looper', 'tdir', 'tconf', 'allPluginsPath', '"""TestTheta"""'], {}), "(looper, tdir, tconf, allPluginsPath, 'TestTheta')\n", (1366, 1416), False, 'from plenum.test.pool_transactions.helper import start_not_added_node, add_started_node\n'), ((1933, 2053), 'plenum.test.pool_transactions.helper.add_started_node', 'add_started_node', (['looper', 'new_node', 'node_ha', 'client_ha', 'txnPoolNodeSet', 'sdk_pool_handle', 'sdk_wallet_steward', 'bls_key'], {}), '(looper, new_node, node_ha, client_ha, txnPoolNodeSet,\n sdk_pool_handle, sdk_wallet_steward, bls_key)\n', (1949, 2053), False, 'from plenum.test.pool_transactions.helper import start_not_added_node, add_started_node\n')] |
"""
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import os
from programy.config.file.json_file import JSONConfigurationFile
from programy.clients.events.console.config import ConsoleConfiguration
from programy.utils.substitutions.substitues import Substitutions
from programytest.config.file.base_file_tests import ConfigurationBaseFileTests
class JSONConfigurationFileTests(ConfigurationBaseFileTests):
def test_get_methods(self):
config_data = JSONConfigurationFile()
self.assertIsNotNone(config_data)
configuration = config_data.load_from_text("""
{
"brain": {
"overrides": {
"allow_system_aiml": true,
"allow_learn_aiml": true,
"allow_learnf_aiml": true
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
section = config_data.get_section("brainx")
self.assertIsNone(section)
section = config_data.get_section("brain")
self.assertIsNotNone(section)
child_section = config_data.get_section("overrides", section)
self.assertIsNotNone(child_section)
keys = list(config_data.get_child_section_keys("overrides", section))
self.assertIsNotNone(keys)
self.assertEqual(3, len(keys))
self.assertTrue("allow_system_aiml" in keys)
self.assertTrue("allow_learn_aiml" in keys)
self.assertTrue("allow_learnf_aiml" in keys)
self.assertIsNone(config_data.get_child_section_keys("missing", section))
self.assertEqual(True, config_data.get_option(child_section, "allow_system_aiml"))
self.assertEqual(True, config_data.get_option(child_section, "missing", missing_value=True))
self.assertEqual(True, config_data.get_bool_option(child_section, "allow_system_aiml"))
self.assertEqual(False, config_data.get_bool_option(child_section, "other_value"))
self.assertEqual(0, config_data.get_int_option(child_section, "other_value"))
def test_load_from_file(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_file(os.path.dirname(__file__) + os.sep + "test_json.json", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assert_configuration(configuration)
def test_load_from_text_multis_one_value(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"bot": {
"brain": "bot1"
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assertEqual(1, len(configuration.client_configuration.configurations[0].configurations))
def test_load_from_text_multis_multiple_values(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"console": {
"bot": "bot"
},
"bot": {
"brain": ["bot1", "bot2"]
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assertEqual(2, len(configuration.client_configuration.configurations[0].configurations))
def test_load_from_text(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"console": {
"bot": "bot",
"prompt": ">>>",
"scheduler": {
"name": "Scheduler1",
"debug_level": 50,
"add_listeners": false,
"remove_all_jobs": false
},
"storage": {
"entities": {
"users": "sql",
"linked_accounts": "sql",
"links": "sql",
"properties": "file",
"conversations": "file",
"categories": "file",
"maps": "file",
"sets": "file",
"rdf": "file",
"denormal": "file",
"normal": "file",
"gender": "file",
"person": "file",
"person2": "file",
"spelling_corpus": "file",
"license_keys": "file",
"nodes": "file",
"binaries": "file",
"braintree": "file",
"preprocessors": "file",
"postprocessors": "file",
"regex_templates": "file",
"usergroups": "file",
"learnf": "file"
},
"stores": {
"sql": {
"type": "sql",
"config": {
"url": "sqlite:///:memory",
"echo": false,
"encoding": "utf-8",
"create_db": true,
"drop_all_first": true
}
},
"mongo": {
"type": "mongo",
"config": {
"url": "mongodb://localhost:27017/",
"database": "programy",
"drop_all_first": true
}
},
"redis": {
"type": "redis",
"config": {
"host": "localhost",
"port": 6379,
"password": <PASSWORD>,
"db": 0,
"prefix": "programy",
"drop_all_first": true
}
},
"file": {
"type": "file",
"config": {
"category_storage": {
"files": "./storage/categories"
},
"conversations_storage": {
"files": "./storage/conversations"
},
"sets_storage": {
"files": "./storage/sets",
"extension": ".txt",
"directories": false
},
"maps_storage": {
"files": "./storage/maps",
"extension": ".txt",
"directories": false
},
"regex_templates": {
"files": "./storage/regex"
},
"lookups_storage": {
"files": "./storage/lookups",
"extension": ".txt",
"directories": false
},
"properties_storage": {
"file": "./storage/properties.txt"
},
"defaults_storage": {
"file": "./storage/defaults.txt"
},
"rdf_storage": {
"files": "./storage/rdfs",
"extension": ".txt",
"directories": true
},
"spelling_corpus": {
"file": "./storage/spelling/corpus.txt"
},
"license_keys": {
"file": "./storage/license.keys"
},
"nodes": {
"files": "./storage/nodes"
},
"binaries": {
"files": "./storage/binaries"
},
"braintree": {
"file": "./storage/braintree/braintree.xml",
"format": "xml"
},
"preprocessors": {
"file": "./storage/processing/preprocessors.txt"
},
"postprocessors": {
"file": "./storage/processing/postprocessing.txt"
},
"usergroups": {
"files": "./storage/security/usergroups.txt"
},
"learnf": {
"files": "./storage/categories/learnf"
}
}
}
}
},
"logger": {
"type": "logger",
"config": {
"conversation_logger": "conversation"
}
}
},
"voice": {
"license_keys": "$BOT_ROOT/config/license.keys",
"tts": "osx",
"stt": "azhang",
"osx": {
"classname": "talky.clients.voice.tts.osxsay.OSXSayTextToSpeach"
},
"pytts": {
"classname": "talky.clients.voice.tts.pyttssay.PyTTSSayTextToSpeach",
"rate_adjust": 10
},
"azhang": {
"classname": "talky.clients.voice.stt.azhang.AnthonyZhangSpeechToText",
"ambient_adjust": 3,
"service": "ibm"
}
},
"rest": {
"host": "0.0.0.0",
"port": 8989,
"debug": false,
"workers": 4,
"license_keys": "$BOT_ROOT/config/license.keys"
},
"webchat": {
"host": "0.0.0.0",
"port": 8090,
"debug": false,
"license_keys": "$BOT_ROOT/config/license.keys",
"api": "/api/web/v1.0/ask"
},
"twitter": {
"polling": true,
"polling_interval": 49,
"streaming": false,
"use_status": true,
"use_direct_message": true,
"auto_follow": true,
"storage": "file",
"welcome_message": "Thanks for following me, send me a message and I'll try and help",
"license_keys": "file"
},
"xmpp": {
"server": "talk.google.com",
"port": 5222,
"xep_0030": true,
"xep_0004": true,
"xep_0060": true,
"xep_0199": true,
"license_keys": "file"
},
"socket": {
"host": "127.0.0.1",
"port": 9999,
"queue": 5,
"debug": true,
"license_keys": "file"
},
"telegram": {
"unknown_command": "Sorry, that is not a command I have been taught yet!",
"license_keys": "file"
},
"facebook": {
"host": "127.0.0.1",
"port": 5000,
"debug": false,
"license_keys": "file"
},
"twilio": {
"host": "127.0.0.1",
"port": 5000,
"debug": false,
"license_keys": "file"
},
"slack": {
"polling_interval": 1,
"license_keys": "file"
},
"viber": {
"name": "Servusai",
"avatar": "http://viber.com/avatar.jpg",
"license_keys": "file"
},
"line": {
"host": "127.0.0.1",
"port": 8084,
"debug": false,
"license_keys": "file"
},
"kik": {
"bot_name": "servusai",
"webhook": "https://93638f7a.ngrok.io/api/kik/v1.0/ask",
"host": "127.0.0.1",
"port": 8082,
"debug": false,
"license_keys": "file"
},
"bot": {
"brain": "brain",
"initial_question": "Hi, how can I help you today?",
"initial_question_srai": "YINITIALQUESTION",
"default_response": "Sorry, I don't have an answer for that!",
"default_response_srai": "YEMPTY",
"empty_string": "YEMPTY",
"exit_response": "So long, and thanks for the fish!",
"exit_response_srai": "YEXITRESPONSE",
"override_properties": true,
"max_question_recursion": 1000,
"max_question_timeout": 60,
"max_search_depth": 100,
"max_search_timeout": 60,
"spelling": {
"load": true,
"classname": "programy.spelling.norvig.NorvigSpellingChecker",
"check_before": true,
"check_and_retry": true
},
"conversations": {
"max_histories": 100,
"restore_last_topic": false,
"initial_topic": "TOPIC1",
"empty_on_start": false
}
},
"brain": {
"overrides": {
"allow_system_aiml": true,
"allow_learn_aiml": true,
"allow_learnf_aiml": true
},
"defaults": {
"default-get": "unknown",
"default-property": "unknown",
"default-map": "unknown",
"learnf-path": "file"
},
"binaries": {
"save_binary": true,
"load_binary": true,
"load_aiml_on_binary_fail": true
},
"braintree": {
"create": true
},
"services": {
"REST": {
"classname": "programy.services.rest.GenericRESTService",
"method": "GET",
"host": "0.0.0.0",
"port": 8080
},
"Pannous": {
"classname": "programy.services.pannous.PannousService",
"url": "http://weannie.pannous.com/api"
}
},
"security": {
"authentication": {
"classname": "programy.security.authenticate.passthrough.BasicPassThroughAuthenticationService",
"denied_srai": "AUTHENTICATION_FAILED"
},
"authorisation": {
"classname": "programy.security.authorise.usergroupsauthorisor.BasicUserGroupAuthorisationService",
"denied_srai": "AUTHORISATION_FAILED",
"usergroups": {
"storage": "file"
}
}
},
"oob": {
"default": {
"classname": "programy.oob.defaults.default.DefaultOutOfBandProcessor"
},
"alarm": {
"classname": "programy.oob.defaults.alarm.AlarmOutOfBandProcessor"
},
"camera": {
"classname": "programy.oob.defaults.camera.CameraOutOfBandProcessor"
},
"clear": {
"classname": "programy.oob.defaults.clear.ClearOutOfBandProcessor"
},
"dial": {
"classname": "programy.oob.defaults.dial.DialOutOfBandProcessor"
},
"dialog": {
"classname": "programy.oob.defaults.dialog.DialogOutOfBandProcessor"
},
"email": {
"classname": "programy.oob.defaults.email.EmailOutOfBandProcessor"
},
"geomap": {
"classname": "programy.oob.defaults.map.MapOutOfBandProcessor"
},
"schedule": {
"classname": "programy.oob.defaults.schedule.ScheduleOutOfBandProcessor"
},
"search": {
"classname": "programy.oob.defaults.search.SearchOutOfBandProcessor"
},
"sms": {
"classname": "programy.oob.defaults.sms.SMSOutOfBandProcessor"
},
"url": {
"classname": "programy.oob.defaults.url.URLOutOfBandProcessor"
},
"wifi": {
"classname": "programy.oob.defaults.wifi.WifiOutOfBandProcessor"
}
},
"dynamic": {
"variables": {
"gettime": "programy.dynamic.variables.datetime.GetTime"
},
"sets": {
"numeric": "programy.dynamic.sets.numeric.IsNumeric",
"roman": "programy.dynamic.sets.roman.IsRomanNumeral"
},
"maps": {
"romantodec": "programy.dynamic.maps.roman.MapRomanToDecimal",
"dectoroman": "programy.dynamic.maps.roman.MapDecimalToRoman"
}
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
self.assert_configuration(configuration)
def test_load_additionals(self):
config = JSONConfigurationFile()
self.assertIsNotNone(config)
configuration = config.load_from_text("""
{
"console": {
"bot": "bot"
},
"bot": {
"brain": "brain"
},
"brain": {
"security": {
"authentication": {
"classname": "programy.security.authenticate.passthrough.PassThroughAuthenticationService",
"denied_srai": "ACCESS_DENIED"
}
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
auth_service = configuration.client_configuration.configurations[0].configurations[0].security.authentication
self.assertIsNotNone(auth_service)
self.assertEqual("ACCESS_DENIED", auth_service.denied_srai)
def test_load_with_subs(self):
subs = Substitutions()
subs.add_substitute("$ALLOW_SYSTEM", True)
config_data = JSONConfigurationFile()
self.assertIsNotNone(config_data)
configuration = config_data.load_from_text("""
{
"brain": {
"overrides": {
"allow_system_aiml": true,
"allow_learn_aiml": true,
"allow_learnf_aiml": true
}
}
}
""", ConsoleConfiguration(), ".")
self.assertIsNotNone(configuration)
section = config_data.get_section("brainx")
self.assertIsNone(section)
section = config_data.get_section("brain")
self.assertIsNotNone(section)
child_section = config_data.get_section("overrides", section)
self.assertIsNotNone(child_section)
self.assertEqual(True, config_data.get_option(child_section, "allow_system_aiml"))
self.assertEqual(True, config_data.get_bool_option(child_section, "allow_system_aiml"))
self.assertEqual(False, config_data.get_bool_option(child_section, "other_value"))
| [
"os.path.dirname",
"programy.utils.substitutions.substitues.Substitutions",
"programy.clients.events.console.config.ConsoleConfiguration",
"programy.config.file.json_file.JSONConfigurationFile"
] | [((1485, 1508), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (1506, 1508), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((3072, 3095), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (3093, 3095), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((3426, 3449), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (3447, 3449), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((3909, 3932), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (3930, 3932), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((4460, 4483), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (4481, 4483), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((14820, 14843), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (14841, 14843), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((15788, 15803), 'programy.utils.substitutions.substitues.Substitutions', 'Substitutions', ([], {}), '()\n', (15801, 15803), False, 'from programy.utils.substitutions.substitues import Substitutions\n'), ((15878, 15901), 'programy.config.file.json_file.JSONConfigurationFile', 'JSONConfigurationFile', ([], {}), '()\n', (15899, 15901), False, 'from programy.config.file.json_file import JSONConfigurationFile\n'), ((1795, 1817), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (1815, 1817), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((3234, 3256), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (3254, 3256), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((3657, 3679), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (3677, 3679), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((4231, 4253), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (4251, 4253), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((14642, 14664), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (14662, 14664), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((15432, 15454), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (15452, 15454), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((16265, 16287), 'programy.clients.events.console.config.ConsoleConfiguration', 'ConsoleConfiguration', ([], {}), '()\n', (16285, 16287), False, 'from programy.clients.events.console.config import ConsoleConfiguration\n'), ((3179, 3204), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3194, 3204), False, 'import os\n')] |
import torch
import lib.modeling.resnet as resnet
import lib.modeling.semseg_heads as snet
import torch.nn as nn
import torch.optim as optim
import utils.resnet_weights_helper as resnet_utils
from torch.autograd import Variable
from roi_data.loader import RoiDataLoader, MinibatchSampler, collate_minibatch, collate_minibatch_semseg
from datasets.roidb import combined_roidb_for_training, combined_roidb_for_training_semseg
import os
import numpy as np
import nn as mynn
import cv2
from modeling.model_builder_3DSD import Generalized_3DSD
from modeling.model_builder_PSP3D import DispSeg
from core.config import cfg, cfg_from_file, cfg_from_list, assert_and_infer_cfg
#load net
class load_net(nn.Module):
def __init__(self):
super(load_net, self).__init__()
build=snet.ModelBuilder()
fc_dim = 2048
self.encoder = build.build_encoder(
arch= 'resnet50_dilated8',
fc_dim=fc_dim)
self.decoder = build.build_decoder(
arch = 'ppm_bilinear',
num_class=19,
fc_dim=fc_dim,
use_softmax=False)
def _init_modules(self):
resnet_utils.load_pretrained_imagenet_weights(self)
def forward(self, data):
pred=self.decoder(self.encoder(data, return_feature_maps=True))
pred = nn.functional.interpolate(
pred, size=[128,128],
mode='bilinear', align_corners=False)
pred = nn.functional.log_softmax(pred, dim=1)
return pred
def dataloader(bs, gpus):
inputs = {}
inputs['data'] = Variable(torch.randn(2*bs, 3, 128, 128)).to('cuda')
inputs['semseg_label_0'] = Variable(torch.LongTensor(
np.random.randint(0, 19, (bs, 128//8, 128//8), dtype=np.long))).to('cuda')
inputs['disp_label_0'] = Variable(torch.rand(bs, 128//8, 128//8)).to('cuda')
inputs['disp_scans'] = Variable(torch.arange(0,
cfg.DISP.MAX_DISPLACEMENT).float().view(1,cfg.DISP.MAX_DISPLACEMENT,1,1).repeat(bs,1,1,1)).to('cuda')
inputs['semseg_scans'] = Variable(torch.arange(0,
cfg.MODEL.NUM_CLASSES).float().view(1, cfg.MODEL.NUM_CLASSES, 1, 1).repeat(bs,1,1,1)).to('cuda')
return inputs
cfg_file = 'e2e_segdisp-R-50_3Dpool_1x.yaml'
cfg_from_file(cfg_file)
print (cfg.SEM)
print (cfg.DISP)
#cfg_from_list(cfg_file)
#assert_and_infer_cfg()
devices_ids=[5]
os.environ["CUDA_VISIBLE_DEVICES"] = ','.join([str(ids) for ids in devices_ids])
torch.backends.cudnn.benchmark=True
#torch.cuda.set_device(3)
len_gpus = len(devices_ids)
batch_size = 2 * len_gpus
#net = mynn.DataParallel(load_net().to('cuda'), minibatch=True)
net = mynn.DataParallel(DispSeg().to('cuda'), minibatch=True)
optimizer = optim.SGD(net.parameters(), lr=0.000875, momentum=0.9)
criterion = nn.NLLLoss(ignore_index=255)
#dataloader= dataloader(batch_size, len_gpus)
for i in range(10):
#for i, inputs in zip(range(1000), dataloader):
inputs = dataloader(batch_size, len_gpus)
for key in inputs:
inputs[key] = torch.chunk(inputs[key], chunks=len_gpus, dim=0)
optimizer.zero_grad()
loss=net(**inputs)
optimizer.step()
for k in loss['losses'].keys():
print (loss['losses'][k].item())
| [
"core.config.cfg_from_file",
"numpy.random.randint",
"torch.chunk",
"torch.nn.NLLLoss",
"modeling.model_builder_PSP3D.DispSeg",
"torch.nn.functional.interpolate",
"torch.nn.functional.log_softmax",
"utils.resnet_weights_helper.load_pretrained_imagenet_weights",
"torch.arange",
"lib.modeling.semseg_heads.ModelBuilder",
"torch.rand",
"torch.randn"
] | [((2264, 2287), 'core.config.cfg_from_file', 'cfg_from_file', (['cfg_file'], {}), '(cfg_file)\n', (2277, 2287), False, 'from core.config import cfg, cfg_from_file, cfg_from_list, assert_and_infer_cfg\n'), ((2788, 2816), 'torch.nn.NLLLoss', 'nn.NLLLoss', ([], {'ignore_index': '(255)'}), '(ignore_index=255)\n', (2798, 2816), True, 'import torch.nn as nn\n'), ((786, 805), 'lib.modeling.semseg_heads.ModelBuilder', 'snet.ModelBuilder', ([], {}), '()\n', (803, 805), True, 'import lib.modeling.semseg_heads as snet\n'), ((1162, 1213), 'utils.resnet_weights_helper.load_pretrained_imagenet_weights', 'resnet_utils.load_pretrained_imagenet_weights', (['self'], {}), '(self)\n', (1207, 1213), True, 'import utils.resnet_weights_helper as resnet_utils\n'), ((1340, 1430), 'torch.nn.functional.interpolate', 'nn.functional.interpolate', (['pred'], {'size': '[128, 128]', 'mode': '"""bilinear"""', 'align_corners': '(False)'}), "(pred, size=[128, 128], mode='bilinear',\n align_corners=False)\n", (1365, 1430), True, 'import torch.nn as nn\n'), ((1474, 1512), 'torch.nn.functional.log_softmax', 'nn.functional.log_softmax', (['pred'], {'dim': '(1)'}), '(pred, dim=1)\n', (1499, 1512), True, 'import torch.nn as nn\n'), ((3022, 3070), 'torch.chunk', 'torch.chunk', (['inputs[key]'], {'chunks': 'len_gpus', 'dim': '(0)'}), '(inputs[key], chunks=len_gpus, dim=0)\n', (3033, 3070), False, 'import torch\n'), ((2671, 2680), 'modeling.model_builder_PSP3D.DispSeg', 'DispSeg', ([], {}), '()\n', (2678, 2680), False, 'from modeling.model_builder_PSP3D import DispSeg\n'), ((1605, 1637), 'torch.randn', 'torch.randn', (['(2 * bs)', '(3)', '(128)', '(128)'], {}), '(2 * bs, 3, 128, 128)\n', (1616, 1637), False, 'import torch\n'), ((1827, 1861), 'torch.rand', 'torch.rand', (['bs', '(128 // 8)', '(128 // 8)'], {}), '(bs, 128 // 8, 128 // 8)\n', (1837, 1861), False, 'import torch\n'), ((1714, 1779), 'numpy.random.randint', 'np.random.randint', (['(0)', '(19)', '(bs, 128 // 8, 128 // 8)'], {'dtype': 'np.long'}), '(0, 19, (bs, 128 // 8, 128 // 8), dtype=np.long)\n', (1731, 1779), True, 'import numpy as np\n'), ((1907, 1949), 'torch.arange', 'torch.arange', (['(0)', 'cfg.DISP.MAX_DISPLACEMENT'], {}), '(0, cfg.DISP.MAX_DISPLACEMENT)\n', (1919, 1949), False, 'import torch\n'), ((2075, 2113), 'torch.arange', 'torch.arange', (['(0)', 'cfg.MODEL.NUM_CLASSES'], {}), '(0, cfg.MODEL.NUM_CLASSES)\n', (2087, 2113), False, 'import torch\n')] |
import platform
import shutil
import tempfile
import warnings
from pathlib import Path
import requests
from tqdm import tqdm
DOCKER_VERSION = "20.10.5"
BUILDX_VERSION = "0.5.1"
CACHE_DIR = Path.home() / ".cache" / "python-on-whales"
TEMPLATE_CLI = (
"https://download.docker.com/{os}/static/stable/{arch}/docker-{version}.tgz"
)
WINDOWS_CLI_URL = "https://github.com/StefanScherer/docker-cli-builder/releases/download/{version}/docker.exe"
def get_docker_binary_path_in_cache():
return CACHE_DIR / "docker-cli" / DOCKER_VERSION / "docker"
def get_docker_cli_url():
user_os = get_user_os()
if user_os == "windows":
return WINDOWS_CLI_URL.format(version=DOCKER_VERSION)
arch = get_arch_for_docker_cli_url()
return TEMPLATE_CLI.format(os=user_os, arch=arch, version=DOCKER_VERSION)
def download_docker_cli():
file_to_download = get_docker_cli_url()
extension = file_to_download.split(".")[-1]
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_dir = Path(tmp_dir)
downloaded_file_path = tmp_dir / f"docker.{extension}"
download_from_url(file_to_download, downloaded_file_path)
docker_binary_path = get_docker_binary_path_in_cache()
docker_binary_path.parent.mkdir(exist_ok=True, parents=True)
if extension == "tgz":
extract_dir = tmp_dir / "extracted"
shutil.unpack_archive(str(downloaded_file_path), str(extract_dir))
shutil.move(extract_dir / "docker" / "docker", docker_binary_path)
elif extension == "exe":
shutil.move(downloaded_file_path, docker_binary_path)
warnings.warn(
f"The docker client binary file {DOCKER_VERSION} was downloaded and put "
f"in `{docker_binary_path.absolute()}`. \n"
f"You can feel free to remove it if you wish, Python on whales will download "
f"it again if needed."
)
def download_from_url(url, dst):
try:
_download_from_url(url, dst)
except Exception as e:
raise ConnectionError(f"Error while downloading {url}") from e
def _download_from_url(url, dst):
# Streaming, so we can iterate over the response.
response = requests.get(url, stream=True)
total_size_in_bytes = int(response.headers.get("content-length", 0))
block_size = 1024
progress_bar = tqdm(total=total_size_in_bytes, unit="iB", unit_scale=True)
with open(dst, "wb") as file:
for data in response.iter_content(block_size):
progress_bar.update(len(data))
file.write(data)
progress_bar.close()
if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes:
raise ConnectionError(
f"Total size should be {total_size_in_bytes}, downloaded {progress_bar.n}"
)
def get_user_os():
user_os = platform.system()
if user_os == "Linux":
return "linux"
elif user_os == "Darwin":
return "mac"
elif user_os == "Windows":
return "windows"
else:
raise NotImplementedError(
f"Unknown OS: {user_os}, cannot determine which Docker CLI binary file to "
f"download. \n"
f"Please open an issue at \n"
f"https://github.com/gabrieldemarmiesse/python-on-whales/issues \n"
f"and in the meantime, install Docker manually to make python-on-whales "
f"work."
)
def get_arch_for_docker_cli_url():
arch = platform.architecture()[0]
# I don't know the exact list of possible architectures,
# so if a user reports a NotImplementedError, we can easily add
# his/her platform here.
arch_mapping = {
"NotImplementedError": "aarch64",
"NotImplementedError2": "armel",
"NotImplementedError3": "armhf",
"NotImplementedError4": "ppc64le",
"NotImplementedError5": "s390x",
"64bit": "x86_64",
}
try:
return arch_mapping[arch]
except KeyError:
raise NotImplementedError(
f"The architecture detected on your system is `{arch}`, the list of "
f"available architectures is {list(arch_mapping.values())}. \n"
f"Please open an issue at \n"
f"https://github.com/gabrieldemarmiesse/python-on-whales/issues "
f"and make sure to copy past this error message. \n"
f"In the meantime, install Docker manually on your system."
)
| [
"tempfile.TemporaryDirectory",
"pathlib.Path",
"shutil.move",
"pathlib.Path.home",
"tqdm.tqdm",
"requests.get",
"platform.system",
"platform.architecture"
] | [((2185, 2215), 'requests.get', 'requests.get', (['url'], {'stream': '(True)'}), '(url, stream=True)\n', (2197, 2215), False, 'import requests\n'), ((2330, 2389), 'tqdm.tqdm', 'tqdm', ([], {'total': 'total_size_in_bytes', 'unit': '"""iB"""', 'unit_scale': '(True)'}), "(total=total_size_in_bytes, unit='iB', unit_scale=True)\n", (2334, 2389), False, 'from tqdm import tqdm\n'), ((2814, 2831), 'platform.system', 'platform.system', ([], {}), '()\n', (2829, 2831), False, 'import platform\n'), ((192, 203), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (201, 203), False, 'from pathlib import Path\n'), ((950, 979), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (977, 979), False, 'import tempfile\n'), ((1010, 1023), 'pathlib.Path', 'Path', (['tmp_dir'], {}), '(tmp_dir)\n', (1014, 1023), False, 'from pathlib import Path\n'), ((3437, 3460), 'platform.architecture', 'platform.architecture', ([], {}), '()\n', (3458, 3460), False, 'import platform\n'), ((1457, 1523), 'shutil.move', 'shutil.move', (["(extract_dir / 'docker' / 'docker')", 'docker_binary_path'], {}), "(extract_dir / 'docker' / 'docker', docker_binary_path)\n", (1468, 1523), False, 'import shutil\n'), ((1569, 1622), 'shutil.move', 'shutil.move', (['downloaded_file_path', 'docker_binary_path'], {}), '(downloaded_file_path, docker_binary_path)\n', (1580, 1622), False, 'import shutil\n')] |
import boto3
import json
import os
import logging
from contextlib import closing
from boto3.dynamodb.conditions import Key, Attr
from botocore.exceptions import ClientError
from random import shuffle
import time
import pyqrcode
import png
__BUCKET_NAME__ = "project-cerebro"
dynamo = boto3.client('dynamodb')
logger = None
print("In initialize fn ...")
logger = logging.getLogger()
if int(os.environ['DEBUG_MODE']):
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
logger.info("Initialize: Just a test")
logger.debug("Initialize: debug a test")
def create_presigned_url(bucket_name, object_name, expiration=3600):
"""Generate a presigned URL to share an S3 object
:param bucket_name: string
:param object_name: string
:param expiration: Time in seconds for the presigned URL to remain valid
:return: Presigned URL as string. If error, returns None.
"""
# Generate a presigned URL for the S3 object
s3_client = boto3.client('s3')
try:
response = s3_client.generate_presigned_url('get_object',
Params={'Bucket': bucket_name,
'Key': object_name},
ExpiresIn=expiration)
except ClientError as e:
logging.error(e)
return None
# The response contains the presigned URL
return response
def respond(err, res=None):
return {
'statusCode': '400' if err else '200',
'body': err.message if err else json.dumps(res),
'headers': {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*'
},
}
# input parameters are:
# 1. image ID
# output parameters are:
# 1. generated QRCode
# workflow:
# 1. first get the image_id
# 2. confirm this exists in s3
# 3. generate a presigned URL with this s3 path
# 4. create a QR Code image with this url embedded
# 5. return the QR code stored in S3 temp.
def main(event, context):
logger.info("In main ...")
start_time = int(round(time.time() * 1000))
body_params = json.loads(event["body"])
logger.debug("Body params:")
logger.debug(body_params)
response_data = {}
# 1. get the image_id
if "image_id" in body_params:
image_id = body_params["image_id"]
# prefix and check for existence
s3_prefix = "production/%s" % image_id
# 2. check for the object in s3
s3 = boto3.resource('s3')
s3_object = s3.Object(__BUCKET_NAME__, s3_prefix)
obj_metadata = s3_object.load() # fetches metadata for the object, but not data.
logger.info("metadata found:")
logger.info(obj_metadata)
if obj_metadata:
response_data["s3_image"] = s3_prefix
# 3. generate the presigned url
presigned_url = create_presigned_url(bucket_name = __BUCKET_NAME__, object_name=s3_prefix, expiration=5*60)
logger.info("generated the presigned URL:")
logger.info(presigned_url)
if presigned_url:
response_data["presigned_url"] = presigned_url
logger.info("assigned presigned url")
# 4. generate the qrcode, convert to png
url = pyqrcode.create(presigned_url)
url.png('/tmp/code.png', scale=5)
logger.info("Created a png file by now!")
# 5. save to s3
target_file='/tmp/code.png'
qrcode_key = "qrcodes/current_qrcode.png"
logger.info("Now trying to put s3 object ...")
# Create an S3 client
s3 = boto3.client('s3')
response = s3.put_object(
Body=open(target_file, 'rb'),
Bucket=__BUCKET_NAME__,
Key=qrcode_key)
logger.info("Now trying to put s3 object - completed!")
response_data["qrcode_key"] = qrcode_key
else:
response_data["result"] = "Failure"
return respond(None, response_data)
end_time = int(round(time.time() * 1000))
logger.info("Time Taken: %f" % (end_time - start_time))
logger.info("Done with main!")
response_data["result"] = "Success"
response_data["time_taken"] = str(end_time - start_time)
return respond(None, response_data)
def lambda_handler(event, context):
return main(event, context)
| [
"logging.getLogger",
"json.loads",
"boto3.client",
"json.dumps",
"boto3.resource",
"pyqrcode.create",
"time.time",
"logging.error"
] | [((291, 315), 'boto3.client', 'boto3.client', (['"""dynamodb"""'], {}), "('dynamodb')\n", (303, 315), False, 'import boto3\n'), ((371, 390), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (388, 390), False, 'import logging\n'), ((989, 1007), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (1001, 1007), False, 'import boto3\n'), ((2172, 2197), 'json.loads', 'json.loads', (["event['body']"], {}), "(event['body'])\n", (2182, 2197), False, 'import json\n'), ((2530, 2550), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (2544, 2550), False, 'import boto3\n'), ((1358, 1374), 'logging.error', 'logging.error', (['e'], {}), '(e)\n', (1371, 1374), False, 'import logging\n'), ((1591, 1606), 'json.dumps', 'json.dumps', (['res'], {}), '(res)\n', (1601, 1606), False, 'import json\n'), ((3298, 3328), 'pyqrcode.create', 'pyqrcode.create', (['presigned_url'], {}), '(presigned_url)\n', (3313, 3328), False, 'import pyqrcode\n'), ((3663, 3681), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (3675, 3681), False, 'import boto3\n'), ((2132, 2143), 'time.time', 'time.time', ([], {}), '()\n', (2141, 2143), False, 'import time\n'), ((4087, 4098), 'time.time', 'time.time', ([], {}), '()\n', (4096, 4098), False, 'import time\n')] |
from typing import NamedTuple
from django.contrib.auth.models import AbstractUser
from django.db import models
from msg.models import Msg
class User(AbstractUser):
phone_number: 'str' = models.CharField(max_length=255,
null=True, blank=True)
class HelloSMSMessage(NamedTuple):
phone_number: 'str'
username: 'str'
def send_hello_sms(self):
if not self.phone_number:
raise ValueError('User has to have a phone number'
'to send a sms message.')
hello = self.HelloSMSMessage(
username=self.username,
phone_number=self.phone_number,
)
Msg.new(hello, dispatch_now=True)
| [
"msg.models.Msg.new",
"django.db.models.CharField"
] | [((194, 249), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)', 'blank': '(True)'}), '(max_length=255, null=True, blank=True)\n', (210, 249), False, 'from django.db import models\n'), ((704, 737), 'msg.models.Msg.new', 'Msg.new', (['hello'], {'dispatch_now': '(True)'}), '(hello, dispatch_now=True)\n', (711, 737), False, 'from msg.models import Msg\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.