max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
third_party/logging.py | sweeneyb/iot-core-micropython | 50 | 9600 | # MIT License
#
# Copyright (c) 2019 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
CRITICAL = 50
ERROR = 40
WARNING = 30
INFO = 20
DEBUG = 10
NOTSET = 0
_level_dict = {
CRITICAL: "CRIT",
ERROR: "ERROR",
WARNING: "WARN",
INFO: "INFO",
DEBUG: "DEBUG",
}
_stream = sys.stderr
class Logger:
level = NOTSET
def __init__(self, name):
self.name = name
def _level_str(self, level):
l = _level_dict.get(level)
if l is not None:
return l
return "LVL%s" % level
def setLevel(self, level):
self.level = level
def isEnabledFor(self, level):
return level >= (self.level or _level)
def log(self, level, msg, *args):
if level >= (self.level or _level):
_stream.write("%s:%s:" % (self._level_str(level), self.name))
if not args:
print(msg, file=_stream)
else:
print(msg % args, file=_stream)
def debug(self, msg, *args):
self.log(DEBUG, msg, *args)
def info(self, msg, *args):
self.log(INFO, msg, *args)
def warning(self, msg, *args):
self.log(WARNING, msg, *args)
def error(self, msg, *args):
self.log(ERROR, msg, *args)
def critical(self, msg, *args):
self.log(CRITICAL, msg, *args)
def exc(self, e, msg, *args):
self.log(ERROR, msg, *args)
sys.print_exception(e, _stream)
def exception(self, msg, *args):
self.exc(sys.exc_info()[1], msg, *args)
_level = INFO
_loggers = {}
def getLogger(name):
if name in _loggers:
return _loggers[name]
l = Logger(name)
_loggers[name] = l
return l
def info(msg, *args):
getLogger(None).info(msg, *args)
def debug(msg, *args):
getLogger(None).debug(msg, *args)
def basicConfig(level=INFO, filename=None, stream=None, format=None):
global _level, _stream
_level = level
if stream:
_stream = stream
if filename is not None:
print("logging.basicConfig: filename arg is not supported")
if format is not None:
print("logging.basicConfig: format arg is not supported")
| 1.992188 | 2 |
assessments/migrations/0003_auto_20210212_1943.py | acounsel/django_msat | 0 | 9601 | # Generated by Django 3.1.6 on 2021-02-12 19:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('assessments', '0002_auto_20210212_1904'),
]
operations = [
migrations.AlterField(
model_name='country',
name='region',
field=models.CharField(blank=True, choices=[('america', 'Americas'), ('europe', 'Europe'), ('africa', 'Africa'), ('asia', 'Asia'), ('oceania', 'Oceania')], max_length=100, null=True),
),
]
| 1.632813 | 2 |
noxfile.py | sethmlarson/workplace-search-python | 5 | 9602 | <gh_stars>1-10
import nox
SOURCE_FILES = (
"setup.py",
"noxfile.py",
"elastic_workplace_search/",
"tests/",
)
@nox.session(python=["2.7", "3.4", "3.5", "3.6", "3.7", "3.8"])
def test(session):
session.install(".")
session.install("-r", "dev-requirements.txt")
session.run("pytest", "--record-mode=none", "tests/")
@nox.session()
def blacken(session):
session.install("black")
session.run("black", *SOURCE_FILES)
lint(session)
@nox.session()
def lint(session):
session.install("flake8", "black")
session.run("black", "--check", *SOURCE_FILES)
session.run("flake8", "--select=E,W,F", "--max-line-length=88", *SOURCE_FILES)
| 1.742188 | 2 |
komodo2_rl/src/environments/Spawner.py | osheraz/komodo | 5 | 9603 | <reponame>osheraz/komodo
# !/usr/bin/env python
import rospy
import numpy as np
from gazebo_msgs.srv import SpawnModel, SpawnModelRequest, SpawnModelResponse
from copy import deepcopy
from tf.transformations import quaternion_from_euler
sdf_cube = """<?xml version="1.0" ?>
<sdf version="1.4">
<model name="MODELNAME">
<static>0</static>
<link name="link">
<inertial>
<mass>1.0</mass>
<inertia>
<ixx>0.01</ixx>
<ixy>0.0</ixy>
<ixz>0.0</ixz>
<iyy>0.01</iyy>
<iyz>0.0</iyz>
<izz>0.01</izz>
</inertia>
</inertial>
<collision name="stairs_collision0">
<pose>0 0 0 0 0 0</pose>
<geometry>
<box>
<size>SIZEXYZ</size>
</box>
</geometry>
<surface>
<bounce />
<friction>
<ode>
<mu>1.0</mu>
<mu2>1.0</mu2>
</ode>
</friction>
<contact>
<ode>
<kp>10000000.0</kp>
<kd>1.0</kd>
<min_depth>0.0</min_depth>
<max_vel>0.0</max_vel>
</ode>
</contact>
</surface>
</collision>
<visual name="stairs_visual0">
<pose>0 0 0 0 0 0</pose>
<geometry>
<box>
<size>SIZEXYZ</size>
</box>
</geometry>
<material>
<script>
<uri>file://media/materials/scripts/gazebo.material</uri>
<name>Gazebo/Wood</name>
</script>
</material>
</visual>
<velocity_decay>
<linear>0.000000</linear>
<angular>0.000000</angular>
</velocity_decay>
<self_collide>0</self_collide>
<kinematic>0</kinematic>
<gravity>1</gravity>
</link>
</model>
</sdf>
"""
sdf_sand = """<?xml version='1.0'?>
<sdf version='1.6'>
<model name="MODELNAME">
<link name='link'>
<pose frame=''>0 0 0.01 0 0 0 </pose>
<inertial>
<mass>1</mass>
<inertia>
<ixx>0.1</ixx>
<ixy>0</ixy>
<ixz>0</ixz>
<iyy>0.1</iyy>
<iyz>0</iyz>
<izz>0.1</izz>
</inertia>
</inertial>
<visual name='visual'>
<pose frame=''>0 0 0 0 -0 0</pose>
<geometry>
<mesh>
<scale>SIZEXYZ</scale>
<uri>model://sand/sand_particle.stl</uri>
</mesh>
</geometry>
<material>
<lighting>1</lighting>
<script>
<uri>file://media/materials/scripts/gazebo.material</uri>
<name>Gazebo/Yellow</name>
</script>
<ambient>0.3 0.25 0.1 1</ambient>
<diffuse>0.7 0.6 0.4 1</diffuse>
<specular>0.01 0.005 0.001 1</specular>
<emissive>0 0 0 1</emissive>
</material>
<transparency>0</transparency>
<cast_shadows>1</cast_shadows>
</visual>
<collision name='collision'>
<laser_retro>0</laser_retro>
<max_contacts>10</max_contacts>
<pose frame=''>0 0 0 0 -0 0</pose>
<geometry>
<mesh>
<scale>SIZEXYZ</scale>
<uri>model://sand/sand_particle.stl</uri>
</mesh>
</geometry>
<surface>
<friction>
<ode>
<mu>1</mu>
<mu2>1</mu2>
<fdir1>0 0 0</fdir1>
<slip1>0</slip1>
<slip2>0</slip2>
</ode>
<torsional>
<coefficient>1</coefficient>
<patch_radius>0</patch_radius>
<surface_radius>0</surface_radius>
<use_patch_radius>1</use_patch_radius>
<ode>
<slip>0</slip>
</ode>
</torsional>
</friction>
<bounce>
<restitution_coefficient>0.2</restitution_coefficient>
<threshold>1.01</threshold>
</bounce>
<contact>
<collide_without_contact>0</collide_without_contact>
<collide_without_contact_bitmask>1</collide_without_contact_bitmask>
<collide_bitmask>1</collide_bitmask>
<ode>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
<max_vel>0.01</max_vel>
<min_depth>0</min_depth>
</ode>
<bullet>
<split_impulse>1</split_impulse>
<split_impulse_penetration_threshold>-0.01</split_impulse_penetration_threshold>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
</bullet>
</contact>
</surface>
</collision>
</link>
<static>0</static>
<allow_auto_disable>1</allow_auto_disable>
</model>
</sdf>
"""
sdf_sand_box = """<sdf version='1.6'>
<model name='sand_box_osher'>
<link name='sand_box_osher'>
<pose frame=''>0 0 0 0 -0 0</pose>
<inertial>
<pose frame=''>-0.35285 -0.305 0.11027 0 -0 0</pose>
<mass>2000.892</mass>
<inertia>
<ixx>130.2204</ixx>
<ixy>-220.5538e-15</ixy>
<ixz>-4.85191</ixz>
<iyy>276.363</iyy>
<iyz>-77.9029e-15</iyz>
<izz>135.62</izz>
</inertia>
</inertial>
<collision name='sand_box_osher_collision'>
<pose frame=''>0 0 0 1.5708 -0 0</pose>
<geometry>
<mesh>
<scale>1 0.8 1</scale>
<uri>model://sand_box_osher/meshes/sand_box_osher.STL</uri>
</mesh>
</geometry>
</collision>
<visual name='sand_box_osher_visual'>
<pose frame=''>0 0 0 1.5708 -0 0</pose>
<geometry>
<mesh>
<scale>1 0.8 1</scale>
<uri>model://sand_box_osher/meshes/sand_box_osher.STL</uri>
</mesh>
</geometry>
<material>
<ambient>0.3 0.25 0.1 1</ambient>
<diffuse>0.7 0.6 0.4 1</diffuse>
<specular>0.01 0.005 0.001 1</specular>
<emissive>0 0 0 1</emissive>
</material>
<transparency>0.5</transparency>
</visual>
</link>
</model>
</sdf>
"""
sdf_unit_sphere = """<?xml version='1.0'?>
<sdf version='1.6'>
<model name="MODELNAME">
<link name='link'>
<pose frame=''>0 0 0 0 -0 0</pose>
<inertial>
<mass>0.1</mass>
<inertia>
<ixx>0.0000490147</ixx>
<ixy>0</ixy>
<ixz>0</ixz>
<iyy>0.000049147</iyy>
<iyz>0</iyz>
<izz>0.000049147</izz>
</inertia>
<pose frame=''>0 0 0 0 -0 0</pose>
</inertial>
<self_collide>0</self_collide>
<kinematic>0</kinematic>
<visual name='visual'>
<geometry>
<sphere>
<radius>RADIUS</radius>
</sphere>
</geometry>
<material>
<lighting>1</lighting>
<script>
<uri>file://media/materials/scripts/gazebo.material</uri>
<name>Gazebo/Yellow</name>
</script>
<ambient>0.3 0.25 0.1 1</ambient>
<diffuse>0.7 0.6 0.4 1</diffuse>
<specular>0.01 0.005 0.001 1</specular>
<emissive>0 0 0 1</emissive>
</material>
<pose frame=''>0 0 0 0 -0 0</pose>
<transparency>0</transparency>
<cast_shadows>1</cast_shadows>
</visual>
<collision name='collision'>
<laser_retro>0</laser_retro>
<max_contacts>10</max_contacts>
<pose frame=''>0 0 0 0 -0 0</pose>
<geometry>
<sphere>
<radius>RADIUS</radius>
</sphere>
</geometry>
<surface>
<friction>
<ode>
<mu>1</mu>
<mu2>1</mu2>
<fdir1>0 0 0</fdir1>
<slip1>0</slip1>
<slip2>0</slip2>
</ode>
<torsional>
<coefficient>1</coefficient>
<patch_radius>0</patch_radius>
<surface_radius>0</surface_radius>
<use_patch_radius>1</use_patch_radius>
<ode>
<slip>0</slip>
</ode>
</torsional>
</friction>
<bounce>
<restitution_coefficient>0</restitution_coefficient>
<threshold>1e+06</threshold>
</bounce>
<contact>
<collide_without_contact>0</collide_without_contact>
<collide_without_contact_bitmask>1</collide_without_contact_bitmask>
<collide_bitmask>1</collide_bitmask>
<ode>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
<max_vel>0.01</max_vel>
<min_depth>0</min_depth>
</ode>
<bullet>
<split_impulse>1</split_impulse>
<split_impulse_penetration_threshold>-0.01</split_impulse_penetration_threshold>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
</bullet>
</contact>
</surface>
</collision>
</link>
<static>0</static>
<allow_auto_disable>1</allow_auto_disable>
</model>
</sdf>
"""
sdf_sand2 = """<?xml version='1.0'?>
<sdf version='1.6'>
<model name="MODELNAME">
<link name='link'>
<pose frame=''>0 0 0.01 0 0 0 </pose>
<inertial>
<mass>1</mass>
<inertia>
<ixx>0.1</ixx>
<ixy>0</ixy>
<ixz>0</ixz>
<iyy>0.1</iyy>
<iyz>0</iyz>
<izz>0.1</izz>
</inertia>
</inertial>
<visual name='visual'>
<pose frame=''>0 0 0 0 -0 0</pose>
<geometry>
<mesh>
<scale>SIZEXYZ</scale>
<uri>model://sand/sand_particle.stl</uri>
</mesh>
</geometry>
<material>
<lighting>1</lighting>
<script>
<uri>file://media/materials/scripts/gazebo.material</uri>
<name>Gazebo/Yellow</name>
</script>
<ambient>0.3 0.25 0.1 1</ambient>
<diffuse>0.7 0.6 0.4 1</diffuse>
<specular>0.01 0.005 0.001 1</specular>
<emissive>0 0 0 1</emissive>
</material>
<transparency>0</transparency>
<cast_shadows>1</cast_shadows>
</visual>
<collision name='collision'>
<laser_retro>0</laser_retro>
<max_contacts>10</max_contacts>
<pose frame=''>0 0 0 0 -0 0</pose>
<geometry>
<mesh>
<scale>SIZEXYZ</scale>
<uri>model://sand/sand_particle.stl</uri>
</mesh>
</geometry>
<surface>
<friction>
<ode>
<mu>1</mu>
<mu2>1</mu2>
<fdir1>0 0 0</fdir1>
<slip1>0</slip1>
<slip2>0</slip2>
</ode>
<torsional>
<coefficient>1</coefficient>
<patch_radius>0</patch_radius>
<surface_radius>0</surface_radius>
<use_patch_radius>1</use_patch_radius>
<ode>
<slip>0</slip>
</ode>
</torsional>
</friction>
<bounce>
<restitution_coefficient>0</restitution_coefficient>
<threshold>1e+06</threshold>
</bounce>
<contact>
<collide_without_contact>0</collide_without_contact>
<collide_without_contact_bitmask>1</collide_without_contact_bitmask>
<collide_bitmask>1</collide_bitmask>
<ode>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
<max_vel>0.01</max_vel>
<min_depth>0</min_depth>
</ode>
<bullet>
<split_impulse>1</split_impulse>
<split_impulse_penetration_threshold>-0.01</split_impulse_penetration_threshold>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
</bullet>
</contact>
</surface>
</collision>
</link>
<static>0</static>
<allow_auto_disable>1</allow_auto_disable>
</model>
</sdf>
"""
class Spawner:
def __init__(self):
self.px = 0
self.py = 0
self.pz = 0
self.rr = 0
self.rp = 0
self.rz = 0
self.sx = 0
self.sy = 0
self.sz = 0
def create_cube_request(self,modelname, px, py, pz, rr, rp, ry, sx, sy, sz):
"""Create a SpawnModelRequest with the parameters of the cube given.
modelname: name of the model for gazebo
px py pz: position of the cube (and it's collision cube)
rr rp ry: rotation (roll, pitch, yaw) of the model
sx sy sz: size of the cube"""
cube = deepcopy(sdf_sand2)
# Replace size of model
size_str = str(round(sx, 3)) + " " + \
str(round(sy, 3)) + " " + str(round(sz, 3))
cube = cube.replace('SIZEXYZ', size_str)
# Replace modelname
cube = cube.replace('MODELNAME', str(modelname))
req = SpawnModelRequest()
req.model_name = modelname
req.model_xml = cube
req.initial_pose.position.x = px
req.initial_pose.position.y = py
req.initial_pose.position.z = pz
q = quaternion_from_euler(rr, rp, ry)
req.initial_pose.orientation.x = q[0]
req.initial_pose.orientation.y = q[1]
req.initial_pose.orientation.z = q[2]
req.initial_pose.orientation.w = q[3]
return req
def create_sphere_request(self,modelname, px, py, pz, rr, rp, ry, r):
"""Create a SpawnModelRequest with the parameters of the cube given.
modelname: name of the model for gazebo
px py pz: position of the cube (and it's collision cube)
rr rp ry: rotation (roll, pitch, yaw) of the model
sx sy sz: size of the cube"""
cube = deepcopy(sdf_unit_sphere)
# Replace size of model
cube = cube.replace('RADIUS', str(r))
# Replace modelname
cube = cube.replace('MODELNAME', str(modelname))
req = SpawnModelRequest()
req.model_name = modelname
req.model_xml = cube
req.initial_pose.position.x = px
req.initial_pose.position.y = py
req.initial_pose.position.z = pz
q = quaternion_from_euler(rr, rp, ry)
req.initial_pose.orientation.x = q[0]
req.initial_pose.orientation.y = q[1]
req.initial_pose.orientation.z = q[2]
req.initial_pose.orientation.w = q[3]
return req
def create_box_request(self,modelname, px, py, pz, rr, rp, ry):
"""Create a SpawnModelRequest with the parameters of the cube given.
modelname: name of the model for gazebo
px py pz: position of the cube (and it's collision cube)
rr rp ry: rotation (roll, pitch, yaw) of the model"""
cube = deepcopy(sdf_sand_box)
req = SpawnModelRequest()
req.model_name = modelname
req.model_xml = cube
req.initial_pose.position.x = px
req.initial_pose.position.y = py
req.initial_pose.position.z = pz
q = quaternion_from_euler(rr, rp, ry)
req.initial_pose.orientation.x = q[0]
req.initial_pose.orientation.y = q[1]
req.initial_pose.orientation.z = q[2]
req.initial_pose.orientation.w = q[3]
return req
| 2.109375 | 2 |
output/models/ms_data/regex/re_l32_xsd/__init__.py | tefra/xsdata-w3c-tests | 1 | 9604 | <reponame>tefra/xsdata-w3c-tests<filename>output/models/ms_data/regex/re_l32_xsd/__init__.py<gh_stars>1-10
from output.models.ms_data.regex.re_l32_xsd.re_l32 import (
Regex,
Doc,
)
__all__ = [
"Regex",
"Doc",
]
| 1.023438 | 1 |
sdk/python/tests/dsl/metadata_tests.py | ConverJens/pipelines | 6 | 9605 | <reponame>ConverJens/pipelines
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp.components.structures import ComponentSpec, InputSpec, OutputSpec
import unittest
class TestComponentMeta(unittest.TestCase):
def test_to_dict(self):
component_meta = ComponentSpec(name='foobar',
description='foobar example',
inputs=[InputSpec(name='input1',
description='input1 desc',
type={'GCSPath': {
'bucket_type': 'directory',
'file_type': 'csv'
}},
default='default1'
),
InputSpec(name='input2',
description='input2 desc',
type={'TFModel': {
'input_data': 'tensor',
'version': '1.8.0'
}},
default='default2'
),
InputSpec(name='input3',
description='input3 desc',
type='Integer',
default='default3'
),
],
outputs=[OutputSpec(name='output1',
description='output1 desc',
type={'Schema': {
'file_type': 'tsv'
}},
)
]
)
golden_meta = {
'name': 'foobar',
'description': 'foobar example',
'inputs': [
{
'name': 'input1',
'description': 'input1 desc',
'type': {
'GCSPath': {
'bucket_type': 'directory',
'file_type': 'csv'
}
},
'default': 'default1'
},
{
'name': 'input2',
'description': 'input2 desc',
'type': {
'TFModel': {
'input_data': 'tensor',
'version': '1.8.0'
}
},
'default': 'default2'
},
{
'name': 'input3',
'description': 'input3 desc',
'type': 'Integer',
'default': 'default3'
}
],
'outputs': [
{
'name': 'output1',
'description': 'output1 desc',
'type': {
'Schema': {
'file_type': 'tsv'
}
},
}
]
}
self.assertEqual(component_meta.to_dict(), golden_meta)
| 1.875 | 2 |
challenges/python-solutions/day-25.py | elifloresch/thirty-days-challenge | 0 | 9606 | <gh_stars>0
import math
def is_prime_number(number):
if number < 2:
return False
if number == 2 or number == 3:
return True
if number % 2 == 0 or number % 3 == 0:
return False
number_sqrt = math.sqrt(number)
int_number_sqrt = int(number_sqrt) + 1
for d in range(6, int_number_sqrt, 6):
if number % (d - 1) == 0 or number % (d + 1) == 0:
return False
return True
test_cases = int(input())
numbers = []
for test_case in range(test_cases):
numbers.append(int(input()))
for n in numbers:
if is_prime_number(n):
print('Prime')
else:
print('Not prime')
| 3.890625 | 4 |
examples/path_config.py | rnixx/garden.cefpython | 13 | 9607 | <reponame>rnixx/garden.cefpython
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Minimal example of the CEFBrowser widget use. Here you don't have any controls
(back / forth / reload) or whatsoever. Just a kivy app displaying the
chromium-webview.
In this example we demonstrate how the cache path of CEF can be set.
"""
import os
from kivy.app import App
from kivy.garden.cefpython import CEFBrowser
from kivy.logger import Logger
if __name__ == '__main__':
class SimpleBrowserApp(App):
def build(self):
# Set runtime data paths
CEFBrowser.set_data_path(os.path.realpath("./cef_data"))
# CEFBrowser.set_caches_path(os.path.realpath("./cef_caches"))
# CEFBrowser.set_cookies_path(os.path.realpath("./cef_cookies"))
# CEFBrowser.set_logs_path(os.path.realpath("./cef_logs"))
Logger.info("Example: The CEF pathes have been set to")
Logger.info("- Cache %s", CEFBrowser._caches_path)
Logger.info("- Cookies %s", CEFBrowser._cookies_path)
Logger.info("- Logs %s", CEFBrowser._logs_path)
# Create CEFBrowser instance. Go to test-site.
cb = CEFBrowser(url="http://jegger.ch/datapool/app/test.html")
return cb
SimpleBrowserApp().run()
| 2.703125 | 3 |
simple-systems/and_xor_shift.py | laserbat/random-projects | 3 | 9608 | #!/usr/bin/python3
# If F(a) is any function that can be defined as composition of bitwise XORs, ANDs and left shifts
# Then the dynac system x_(n+1) = F(x_n) is Turing complete
# Proof by simulation (rule110)
a = 1
while a:
print(bin(a))
a = a ^ (a << 1) ^ (a & (a << 1)) ^ (a & (a << 1) & (a << 2))
| 3.34375 | 3 |
trinity/protocol/common/peer_pool_event_bus.py | Gauddel/trinity | 0 | 9609 | from abc import (
abstractmethod,
)
from typing import (
Any,
Callable,
cast,
FrozenSet,
Generic,
Type,
TypeVar,
)
from cancel_token import (
CancelToken,
)
from p2p.exceptions import (
PeerConnectionLost,
)
from p2p.kademlia import Node
from p2p.peer import (
BasePeer,
PeerSubscriber,
)
from p2p.peer_pool import (
BasePeerPool,
)
from p2p.protocol import (
Command,
PayloadType,
)
from p2p.service import (
BaseService,
)
from trinity.endpoint import (
TrinityEventBusEndpoint,
)
from .events import (
ConnectToNodeCommand,
DisconnectPeerEvent,
HasRemoteEvent,
PeerCountRequest,
PeerCountResponse,
)
TPeer = TypeVar('TPeer', bound=BasePeer)
TStreamEvent = TypeVar('TStreamEvent', bound=HasRemoteEvent)
class PeerPoolEventServer(BaseService, PeerSubscriber, Generic[TPeer]):
"""
Base class to create a bridge between the ``PeerPool`` and the event bus so that peer
messages become available to external processes (e.g. isolated plugins). In the opposite
direction, other processes can also retrieve information or execute actions on the peer pool by
sending specific events through the event bus that the ``PeerPoolEventServer`` answers.
This class bridges all common APIs but protocol specific communication can be enabled through
subclasses that add more handlers.
"""
msg_queue_maxsize: int = 2000
subscription_msg_types: FrozenSet[Type[Command]] = frozenset({})
def __init__(self,
event_bus: TrinityEventBusEndpoint,
peer_pool: BasePeerPool,
token: CancelToken = None) -> None:
super().__init__(token)
self.peer_pool = peer_pool
self.event_bus = event_bus
async def _run(self) -> None:
self.logger.debug("Running %s", self.__class__.__name__)
self.run_daemon_event(
DisconnectPeerEvent,
lambda peer, event: peer.disconnect_nowait(event.reason)
)
self.run_daemon_task(self.handle_peer_count_requests())
self.run_daemon_task(self.handle_connect_to_node_requests())
self.run_daemon_task(self.handle_native_peer_messages())
await self.cancellation()
def run_daemon_event(self,
event_type: Type[TStreamEvent],
event_handler_fn: Callable[[TPeer, TStreamEvent], Any]) -> None:
"""
Register a handler to be run every time that an event of type ``event_type`` appears.
"""
self.run_daemon_task(self.handle_stream(event_type, event_handler_fn))
@abstractmethod
async def handle_native_peer_message(self,
remote: Node,
cmd: Command,
msg: PayloadType) -> None:
"""
Process every native peer message. Subclasses should overwrite this to forward specific
peer messages on the event bus. The handler is called for every message that is defined in
``self.subscription_msg_types``.
"""
pass
def get_peer(self, remote: Node) -> TPeer:
"""
Look up and return a peer from the ``PeerPool`` that matches the given node.
Raise ``PeerConnectionLost`` if the peer is no longer in the pool or is winding down.
"""
try:
peer = self.peer_pool.connected_nodes[remote]
except KeyError:
self.logger.debug("Peer with remote %s does not exist in the pool anymore", remote)
raise PeerConnectionLost()
else:
if not peer.is_operational:
self.logger.debug("Peer %s is not operational when selecting from pool", peer)
raise PeerConnectionLost()
else:
return cast(TPeer, peer)
async def handle_connect_to_node_requests(self) -> None:
async for command in self.wait_iter(self.event_bus.stream(ConnectToNodeCommand)):
self.logger.debug('Received request to connect to %s', command.remote)
self.run_task(self.peer_pool.connect_to_node(command.remote))
async def handle_peer_count_requests(self) -> None:
async for req in self.wait_iter(self.event_bus.stream(PeerCountRequest)):
await self.event_bus.broadcast(
PeerCountResponse(len(self.peer_pool)),
req.broadcast_config()
)
async def handle_stream(self,
event_type: Type[TStreamEvent],
event_handler_fn: Callable[[TPeer, TStreamEvent], Any]) -> None:
async for event in self.wait_iter(self.event_bus.stream(event_type)):
try:
peer = self.get_peer(event.remote)
except PeerConnectionLost:
pass
else:
event_handler_fn(peer, event)
async def handle_native_peer_messages(self) -> None:
with self.subscribe(self.peer_pool):
while self.is_operational:
peer, cmd, msg = await self.wait(self.msg_queue.get())
await self.handle_native_peer_message(peer.remote, cmd, msg)
class DefaultPeerPoolEventServer(PeerPoolEventServer[BasePeer]):
async def handle_native_peer_message(self,
remote: Node,
cmd: Command,
msg: PayloadType) -> None:
pass
| 2.1875 | 2 |
tests/e2e/performance/csi_tests/test_pvc_creation_deletion_performance.py | annagitel/ocs-ci | 1 | 9610 | <gh_stars>1-10
"""
Test to verify performance of PVC creation and deletion
for RBD, CephFS and RBD-Thick interfaces
"""
import time
import logging
import datetime
import pytest
import ocs_ci.ocs.exceptions as ex
import threading
import statistics
from concurrent.futures import ThreadPoolExecutor
from uuid import uuid4
from ocs_ci.framework.testlib import performance
from ocs_ci.ocs.perftests import PASTest
from ocs_ci.helpers import helpers, performance_lib
from ocs_ci.ocs import constants
from ocs_ci.helpers.helpers import get_full_test_logs_path
from ocs_ci.ocs.perfresult import PerfResult
from ocs_ci.framework import config
log = logging.getLogger(__name__)
class ResultsAnalyse(PerfResult):
"""
This class generates results for all tests as one unit
and saves them to an elastic search server on the cluster
"""
def __init__(self, uuid, crd, full_log_path):
"""
Initialize the object by reading some of the data from the CRD file and
by connecting to the ES server and read all results from it.
Args:
uuid (str): the unique uid of the test
crd (dict): dictionary with test parameters - the test yaml file
that modify it in the test itself.
full_log_path (str): the path of the results files to be found
"""
super(ResultsAnalyse, self).__init__(uuid, crd)
self.new_index = "pvc_create_delete_fullres"
self.full_log_path = full_log_path
# make sure we have connection to the elastic search server
self.es_connect()
@performance
class TestPVCCreationDeletionPerformance(PASTest):
"""
Test to verify performance of PVC creation and deletion
"""
def setup(self):
"""
Setting up test parameters
"""
log.info("Starting the test setup")
super(TestPVCCreationDeletionPerformance, self).setup()
self.benchmark_name = "PVC_Creation-Deletion"
self.uuid = uuid4().hex
self.crd_data = {
"spec": {
"test_user": "Homer simpson",
"clustername": "test_cluster",
"elasticsearch": {
"server": config.PERF.get("production_es_server"),
"port": config.PERF.get("production_es_port"),
"url": f"http://{config.PERF.get('production_es_server')}:{config.PERF.get('production_es_port')}",
},
}
}
if self.dev_mode:
self.crd_data["spec"]["elasticsearch"] = {
"server": config.PERF.get("dev_es_server"),
"port": config.PERF.get("dev_es_port"),
"url": f"http://{config.PERF.get('dev_es_server')}:{config.PERF.get('dev_es_port')}",
}
@pytest.fixture()
def base_setup(self, interface_type, storageclass_factory, pod_factory):
"""
A setup phase for the test
Args:
interface_type: A fixture to iterate over ceph interfaces
storageclass_factory: A fixture to create everything needed for a
storageclass
pod_factory: A fixture to create new pod
"""
self.interface = interface_type
if self.interface == constants.CEPHBLOCKPOOL_THICK:
self.sc_obj = storageclass_factory(
interface=constants.CEPHBLOCKPOOL,
new_rbd_pool=True,
rbd_thick_provision=True,
)
else:
self.sc_obj = storageclass_factory(self.interface)
self.pod_factory = pod_factory
@pytest.fixture()
def namespace(self, project_factory):
"""
Create a new project
"""
proj_obj = project_factory()
self.namespace = proj_obj.namespace
def init_full_results(self, full_results):
"""
Initialize the full results object which will send to the ES server
Args:
full_results (obj): an empty FIOResultsAnalyse object
Returns:
FIOResultsAnalyse (obj): the input object fill with data
"""
for key in self.environment:
full_results.add_key(key, self.environment[key])
full_results.add_key("storageclass", self.sc)
full_results.add_key("index", full_results.new_index)
return full_results
@pytest.mark.parametrize(
argnames=["interface_type", "pvc_size"],
argvalues=[
pytest.param(
*[constants.CEPHBLOCKPOOL, "5Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHBLOCKPOOL, "15Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHBLOCKPOOL, "25Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHFILESYSTEM, "5Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHFILESYSTEM, "15Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHFILESYSTEM, "25Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHBLOCKPOOL_THICK, "5Gi"],
marks=[pytest.mark.performance_extended],
),
pytest.param(
*[constants.CEPHBLOCKPOOL_THICK, "15Gi"],
marks=[pytest.mark.performance_extended],
),
pytest.param(
*[constants.CEPHBLOCKPOOL_THICK, "25Gi"],
marks=[pytest.mark.performance_extended],
),
],
)
@pytest.mark.usefixtures(base_setup.__name__)
def test_pvc_creation_deletion_measurement_performance(
self, teardown_factory, pvc_size
):
"""
Measuring PVC creation and deletion times for pvc samples
Verifying that those times are within the required limits
"""
# Getting the full path for the test logs
self.full_log_path = get_full_test_logs_path(cname=self)
if self.interface == constants.CEPHBLOCKPOOL:
self.sc = "RBD"
elif self.interface == constants.CEPHFILESYSTEM:
self.sc = "CephFS"
elif self.interface == constants.CEPHBLOCKPOOL_THICK:
self.sc = "RBD-Thick"
self.full_log_path += f"-{self.sc}-{pvc_size}"
log.info(f"Logs file path name is : {self.full_log_path}")
self.start_time = time.strftime("%Y-%m-%dT%H:%M:%SGMT", time.gmtime())
self.get_env_info()
# Initialize the results doc file.
self.full_results = self.init_full_results(
ResultsAnalyse(self.uuid, self.crd_data, self.full_log_path)
)
self.full_results.add_key("pvc_size", pvc_size)
num_of_samples = 5
accepted_creation_time = (
600 if self.interface == constants.CEPHBLOCKPOOL_THICK else 1
)
# accepted deletion time for RBD is 1 sec, for CephFS is 2 secs and for RBD Thick is 5 secs
if self.interface == constants.CEPHFILESYSTEM:
accepted_deletion_time = 2
elif self.interface == constants.CEPHBLOCKPOOL:
accepted_deletion_time = 1
else:
accepted_deletion_time = 5
self.full_results.add_key("samples", num_of_samples)
accepted_creation_deviation_percent = 50
accepted_deletion_deviation_percent = 50
creation_time_measures = []
deletion_time_measures = []
msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."
for i in range(num_of_samples):
logging.info(f"{msg_prefix} Start creating PVC number {i + 1}.")
start_time = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
pvc_obj = helpers.create_pvc(sc_name=self.sc_obj.name, size=pvc_size)
timeout = 600 if self.interface == constants.CEPHBLOCKPOOL_THICK else 60
helpers.wait_for_resource_state(
pvc_obj, constants.STATUS_BOUND, timeout=timeout
)
pvc_obj.reload()
creation_time = performance_lib.measure_pvc_creation_time(
self.interface, pvc_obj.name, start_time
)
logging.info(
f"{msg_prefix} PVC number {i + 1} was created in {creation_time} seconds."
)
if creation_time > accepted_creation_time:
raise ex.PerformanceException(
f"{msg_prefix} PVC creation time is {creation_time} and is greater than "
f"{accepted_creation_time} seconds."
)
creation_time_measures.append(creation_time)
pv_name = pvc_obj.backed_pv
pvc_reclaim_policy = pvc_obj.reclaim_policy
pod_obj = self.write_file_on_pvc(pvc_obj)
pod_obj.delete(wait=True)
teardown_factory(pvc_obj)
logging.info(f"{msg_prefix} Start deleting PVC number {i + 1}")
if pvc_reclaim_policy == constants.RECLAIM_POLICY_DELETE:
pvc_obj.delete()
pvc_obj.ocp.wait_for_delete(pvc_obj.name)
helpers.validate_pv_delete(pvc_obj.backed_pv)
deletion_time = helpers.measure_pvc_deletion_time(
self.interface, pv_name
)
logging.info(
f"{msg_prefix} PVC number {i + 1} was deleted in {deletion_time} seconds."
)
if deletion_time > accepted_deletion_time:
raise ex.PerformanceException(
f"{msg_prefix} PVC deletion time is {deletion_time} and is greater than "
f"{accepted_deletion_time} seconds."
)
deletion_time_measures.append(deletion_time)
else:
logging.info(
f"Reclaim policy of the PVC {pvc_obj.name} is not Delete;"
f" therefore not measuring deletion time for this PVC."
)
creation_average = self.process_time_measurements(
"creation",
creation_time_measures,
accepted_creation_deviation_percent,
msg_prefix,
)
self.full_results.add_key("creation-time", creation_average)
deletion_average = self.process_time_measurements(
"deletion",
deletion_time_measures,
accepted_deletion_deviation_percent,
msg_prefix,
)
self.full_results.add_key("deletion-time", deletion_average)
self.full_results.all_results["creation"] = creation_time_measures
self.full_results.all_results["deletion"] = deletion_time_measures
self.end_time = time.strftime("%Y-%m-%dT%H:%M:%SGMT", time.gmtime())
self.full_results.add_key(
"test_time", {"start": self.start_time, "end": self.end_time}
)
self.full_results.es_write()
log.info(f"The Result can be found at : {self.full_results.results_link()}")
def process_time_measurements(
self, action_name, time_measures, accepted_deviation_percent, msg_prefix
):
"""
Analyses the given time measured. If the standard deviation of these times is bigger than the
provided accepted deviation percent, fails the test
Args:
action_name (str): Name of the action for which these measurements were collected; used for the logging
time_measures (list of floats): A list of time measurements
accepted_deviation_percent (int): Accepted deviation percent to which computed standard deviation may be
compared
msg_prefix (str) : A string for comprehensive logging
Returns:
(float) The average value of the provided time measurements
"""
average = statistics.mean(time_measures)
log.info(
f"{msg_prefix} The average {action_name} time for the sampled {len(time_measures)} "
f"PVCs is {average} seconds."
)
if self.interface == constants.CEPHBLOCKPOOL_THICK:
st_deviation = statistics.stdev(time_measures)
st_deviation_percent = st_deviation / average * 100.0
if st_deviation_percent > accepted_deviation_percent:
log.error(
f"{msg_prefix} The standard deviation percent for {action_name} of {len(time_measures)} sampled "
f"PVCs is {st_deviation_percent}% which is bigger than accepted {accepted_deviation_percent}."
)
else:
log.info(
f"{msg_prefix} The standard deviation percent for {action_name} of {len(time_measures)} sampled "
f"PVCs is {st_deviation_percent}% and is within the accepted range."
)
self.full_results.add_key(
f"{action_name}_deviation_pct", st_deviation_percent
)
return average
def write_file_on_pvc(self, pvc_obj, filesize=1):
"""
Writes a file on given PVC
Args:
pvc_obj: PVC object to write a file on
filesize: size of file to write (in GB - default is 1GB)
Returns:
Pod on this pvc on which the file was written
"""
pod_obj = self.pod_factory(
interface=self.interface, pvc=pvc_obj, status=constants.STATUS_RUNNING
)
# filesize to be written is always 1 GB
file_size = f"{int(filesize * 1024)}M"
log.info(f"Starting IO on the POD {pod_obj.name}")
# Going to run only write IO
pod_obj.fillup_fs(size=file_size, fio_filename=f"{pod_obj.name}_file")
# Wait for the fio to finish
fio_result = pod_obj.get_fio_results()
err_count = fio_result.get("jobs")[0].get("error")
assert (
err_count == 0
), f"IO error on pod {pod_obj.name}. FIO result: {fio_result}"
log.info("IO on the PVC has finished")
return pod_obj
@pytest.mark.parametrize(
argnames=["interface_type"],
argvalues=[
pytest.param(
*[constants.CEPHBLOCKPOOL],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHFILESYSTEM],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHBLOCKPOOL_THICK],
marks=[pytest.mark.performance_extended],
),
],
)
@pytest.mark.usefixtures(base_setup.__name__)
@pytest.mark.usefixtures(namespace.__name__)
@pytest.mark.polarion_id("OCS-2618")
def test_multiple_pvc_deletion_measurement_performance(self, teardown_factory):
"""
Measuring PVC deletion time of 120 PVCs in 180 seconds
Args:
teardown_factory: A fixture used when we want a new resource that was created during the tests
to be removed in the teardown phase.
Returns:
"""
number_of_pvcs = 120
pvc_size = "1Gi"
msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."
log.info(f"{msg_prefix} Start creating new 120 PVCs")
pvc_objs, _ = helpers.create_multiple_pvcs(
sc_name=self.sc_obj.name,
namespace=self.namespace,
number_of_pvc=number_of_pvcs,
size=pvc_size,
burst=True,
)
for pvc_obj in pvc_objs:
pvc_obj.reload()
teardown_factory(pvc_obj)
timeout = 600 if self.interface == constants.CEPHBLOCKPOOL_THICK else 60
with ThreadPoolExecutor(max_workers=5) as executor:
for pvc_obj in pvc_objs:
executor.submit(
helpers.wait_for_resource_state,
pvc_obj,
constants.STATUS_BOUND,
timeout=timeout,
)
executor.submit(pvc_obj.reload)
pod_objs = []
for pvc_obj in pvc_objs:
pod_obj = self.write_file_on_pvc(pvc_obj, 0.3)
pod_objs.append(pod_obj)
# Get pvc_name, require pvc_name to fetch deletion time data from log
threads = list()
for pvc_obj in pvc_objs:
process = threading.Thread(target=pvc_obj.reload)
process.start()
threads.append(process)
for process in threads:
process.join()
pvc_name_list, pv_name_list = ([] for i in range(2))
threads = list()
for pvc_obj in pvc_objs:
process1 = threading.Thread(target=pvc_name_list.append(pvc_obj.name))
process2 = threading.Thread(target=pv_name_list.append(pvc_obj.backed_pv))
process1.start()
process2.start()
threads.append(process1)
threads.append(process2)
for process in threads:
process.join()
log.info(f"{msg_prefix} Preparing to delete 120 PVC")
# Delete PVC
for pvc_obj, pod_obj in zip(pvc_objs, pod_objs):
pod_obj.delete(wait=True)
pvc_obj.delete()
pvc_obj.ocp.wait_for_delete(pvc_obj.name)
# Get PVC deletion time
pvc_deletion_time = helpers.measure_pv_deletion_time_bulk(
interface=self.interface, pv_name_list=pv_name_list
)
log.info(
f"{msg_prefix} {number_of_pvcs} bulk deletion time is {pvc_deletion_time}"
)
# accepted deletion time is 2 secs for each PVC
accepted_pvc_deletion_time = number_of_pvcs * 2
for del_time in pvc_deletion_time.values():
if del_time > accepted_pvc_deletion_time:
raise ex.PerformanceException(
f"{msg_prefix} {number_of_pvcs} PVCs deletion time is {pvc_deletion_time.values()} and is "
f"greater than {accepted_pvc_deletion_time} seconds"
)
logging.info(f"{msg_prefix} {number_of_pvcs} PVCs deletion times are:")
for name, a_time in pvc_deletion_time.items():
logging.info(f"{name} deletion time is: {a_time} seconds")
| 2.3125 | 2 |
templates/t/searchresult_withnone.py | MikeBirdsall/food-log | 0 | 9611 | <reponame>MikeBirdsall/food-log
#!/usr/bin/python3
from jinja2 import Environment, FileSystemLoader
def spacenone(value):
return "" if value is None else str(value)
results = [
dict(
description="Noodles and Company steak Stromboli",
comment="",
size="small",
cals=530,
carbs=50,
fat=25,
protein=27,
score=30),
dict(
description="Steak sandwich",
comment="",
size="4 oz and bun",
cals=480,
carbs=44,
fat=20,
protein=27,
score=30),
dict(
description="chipotle tacos",
comment="Steak, no beans, gu...",
size="",
cals=285,
carbs=None,
fat=16,
protein=None,
score=30),
dict(
description="Steak Sandwich",
comment="",
size="",
cals=380,
carbs=45,
fat=3.5,
protein=34,
score=30),
]
input_ = dict(
title="Search for Courses",
h1="Full Text Search: steak NOT shake",
results=results,
)
env = Environment(loader=FileSystemLoader(".."))
env.filters['spacenone'] = spacenone
template = env.get_template("searchresult.html")
output = template.render(input_)
print(output)
| 2.640625 | 3 |
payments/views.py | aman-roy/pune.pycon.org | 0 | 9612 | from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from payments.models import Invoice, RazorpayKeys
from payments.razorpay.razorpay_payments import RazorpayPayments
from payments.models import Payment, Order
import json
@csrf_exempt
def webhook(request):
if request.method == 'POST':
keys = RazorpayKeys.objects.first()
payment = RazorpayPayments(keys.api_key, keys.api_secret)
data = json.loads(request.body)
if 'payload' not in data or 'invoice' not in data['payload']:
return JsonResponse({"message": "Invalid Data"})
invoice_entity = data['payload']['invoice']['entity']
order_entity = data['payload']['order']['entity']
payment_entity = data['payload']['payment']['entity']
invoice = Invoice.objects.get(invoice_id=invoice_entity['id'])
invoice.status = invoice_entity['status']
invoice.save()
payment.save_payment(payment_entity)
payment.save_order(order_entity)
return JsonResponse({"message": "Success"})
return JsonResponse({"message": "Method Not Allowed"})
def sync(request):
keys = RazorpayKeys.objects.first()
payment = RazorpayPayments(keys.api_key, keys.api_secret)
invoices = Invoice.objects.all()
for invoice in invoices:
invoice_details = payment.fetch_invoices(invoice.invoice_id)
invoice.status = invoice_details['status']
invoice.save()
if invoice.status == 'paid':
orders = Order.objects.filter(order_id=invoice_details['order_id'])
if len(orders) == 0:
order_details = payment.fetch_orders(
invoice_details['order_id'])
payment.save_order(order_details)
if invoice_details['payment_id']:
payments = Payment.objects.filter(payment_id=invoice_details['payment_id'])
if len(payments) == 0:
payment_details = payment.fetch_payment(invoice_details['payment_id'])
payment.save_payment(payment_details)
return JsonResponse({"message": "synced"}) | 2.03125 | 2 |
src/convnet/image_classifier.py | danschef/gear-detector | 1 | 9613 | import configparser
import os
import sys
from time import localtime, strftime, mktime
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
from net import Net
from geo_helper import store_image_bounds
from image_helper import CLASSES
from image_helper import save_image
from image_helper import test_set_loader
from image_helper import train_set_loader
from image_helper import validation_set_loader
CONFIG = configparser.ConfigParser()
CONFIG.read('./src/config.ini')
###########################################
# Training Stage
###########################################
def train(net, epochs=50, learning_rate=0.001):
start_time = strftime('%H:%M:%S', localtime())
print(f"Started training at: {start_time}")
datetime = strftime("%Y%m%d_%H%M", localtime())
logfile = f"{CONFIG['CNN Paths']['accuracy_log_path']}/{datetime}.log"
###########################################
# Loss Function
###########################################
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(net.parameters(), lr=learning_rate, momentum=0.9)
for epoch in range(epochs): # loop over the dataset multiple times
running_loss = 0.0
for i, (images, labels) in enumerate(train_set_loader(), 0):
# Wrap images and labels into Variables
images, labels = Variable(images), Variable(labels)
# Clear all accumulated gradients
optimizer.zero_grad()
# Predict classes using images from the test set
outputs = net(images)
# Compute the loss based on the predictions and actual labels
loss = criterion(outputs, labels)
# Backpropagate the loss
loss.backward()
# Adjust parameters according to the computed gradients
optimizer.step()
# print statistics
running_loss += loss.item()
if i % 100 == 99: # print every 100 mini-batches
print('[%d, %5d] loss: %.3f, accuracy: %.3f' %
(epoch + 1, i + 1, running_loss / 100, validate(logfile, net)))
running_loss = 0.0
end_time = strftime('%H:%M:%S', localtime())
print(f"Finished Training: {end_time}")
#####################################
# Validation stage
#####################################
def validate(logfile, net):
dataiter = iter(validation_set_loader())
hits = 0.0
for idx, item in enumerate(dataiter):
images, labels = item
outputs = net(Variable(images))
_, predicted = torch.max(outputs.data, 1)
if (labels == predicted[0]).all():
hits += 1
accuracy = hits / (idx + 1)
log_accuracy(logfile, accuracy)
return accuracy
def log_accuracy(filename, accuracy):
with open(filename, "a") as file:
file.write(str(accuracy)+ '\n')
#####################################
# Prediction stage
#####################################
def predict(net):
print(f"Prediction started at: {strftime('%H:%M:%S', localtime())}")
dataiter = iter(test_set_loader())
prediction_cnt = {
'cloud': 0,
'edge': 0,
'land': 0,
'nets': 0,
'rock': 0,
'vessel': 0,
'water': 0
}
datetime = strftime("%Y%m%d_%H%M", localtime())
prediction_log = f"{CONFIG['CNN Paths']['predicted_geodata_path']}/{datetime}.json"
prediction_img_folder = f"{CONFIG['CNN Paths']['predicted_imagery_path']}/{datetime}"
for idx, item in enumerate(dataiter):
if idx > int(CONFIG['CNN Prediction']['batch_size']):
break
if idx % 100 == 0:
print('.', end='', flush=True)
images, _labels = item
##########################################################
# Feed the images into the CNN and check what it predicts
##########################################################
outputs = net(Variable(images))
_, predicted = torch.max(outputs.data, 1)
# Save images from prediction for visual check
if CLASSES[predicted[0]] == 'nets':
image_path = dataiter._dataset.imgs[idx][0]
save_image(image_path, prediction_img_folder)
store_image_bounds(image_path, prediction_log)
prediction_cnt[CLASSES[predicted[0]]] += 1
print(f"\nPrediction ended at: {strftime('%H:%M:%S', localtime())}")
print(f"\nPredicted: {prediction_cnt}")
def model_full_path(path, checkpoint):
return f"{path}_{checkpoint}.pt"
################################################################
# Train network or use existing one for prediction
################################################################
def main(mode=''):
image_bands = int(CONFIG['CNN Training']['image_bands'])
training_epochs = int(CONFIG['CNN Training']['epochs'])
resume_epochs = int(CONFIG['CNN Resume Training']['epochs'])
learning_rate = float(CONFIG['CNN Training']['learning_rate'])
batch_size = CONFIG['CNN Prediction']['batch_size']
if len(sys.argv) > 1:
mode = sys.argv[1]
net = Net(in_channels=image_bands)
model_path = CONFIG['CNN Paths']['model_path']
checkpoint = CONFIG['CNN Prediction']['checkpoint']
# Use network for prediction
if mode == 'predict' and os.path.exists(model_full_path(model_path, checkpoint)):
print(f"Use trained network {checkpoint} for prediction of max {batch_size} images")
# Load existing model
model = torch.load(model_full_path(model_path, checkpoint))
net.load_state_dict(model)
predict(net)
# Start training
elif mode == 'train':
print(f"Start network training for {training_epochs} epochs")
train(net, training_epochs, learning_rate)
# Save model after training
checkpoint = strftime("%Y%m%d_%H%M", localtime())
torch.save(net.state_dict(), model_full_path(model_path, checkpoint))
# Resume training
elif mode == 'resume':
checkpoint = CONFIG['CNN Resume Training']['checkpoint']
print(f"Resume training on Model {checkpoint} for {resume_epochs} epochs")
# Load existing model and resume training
model = torch.load(model_full_path(model_path, checkpoint))
net.load_state_dict(model)
train(net, resume_epochs, learning_rate)
torch.save(net.state_dict(), model_full_path(model_path, checkpoint))
else:
print('No mode provided.')
main()
| 2.21875 | 2 |
src/modules/AlphabetPlotter.py | aaanh/duplicated_accelcamp | 0 | 9614 | import tkinter as tk
from tkinter import filedialog
import csv
import matplotlib.pyplot as plt
root = tk.Tk(screenName=':0.0')
root.withdraw()
file_path = filedialog.askopenfilename()
lastIndex = len(file_path.split('/')) - 1
v0 = [0, 0, 0]
x0 = [0, 0, 0]
fToA = 1
error = 0.28
errorZ = 3
t = []
time = []
m = [[] for i in range(3)]
magnitude = [[] for i in range(3)]
shift_x = 0
shift_y = 0
# For when the data starts at (2,1)
if file_path.split('/')[lastIndex].split('.')[2] == "pocket":
shift_x = 2
shift_y = 1
error = 0.3
fToA = 1
# For when the data starts at (0,0)
elif file_path.split('/')[lastIndex].split('.')[2] == "pocket_mobile":
shift_x = 0
shift_y = 0
error = 0.3
fToA = 1
# For when the data starts at (1,0)
elif file_path.split('/')[lastIndex].split('.')[2] == "android":
shift_x = 0
shift_y = 1
error = 0.02
fToA = 9.81
errorZ = 100
shift = 0
uselessboolean = True
with open(file_path, 'r+') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
if shift < shift_y:
shift += 1
else:
t = row[shift_x]
m[0] = row[1 + shift_x]
m[1] = row[2 + shift_x]
m[2] = row[3 + shift_x]
time.append(float(t))
for i in range(0, 3):
magnitude[i].append(float(m[i]) if abs(float(m[i])) > error else 0)
acceleration = [[(j * fToA) for j in i] for i in magnitude]
acceleration[2] = [i - 9.805 for i in acceleration[2]]
# Translates Data into Position
velocity = [[0 for i in time] for i in range(3)]
position = [[0 for i in time] for i in range(3)]
for j in range(3):
velocity[j][0] = v0[j]
for i in range(1, len(time)):
velocity[j][i] = velocity[j][i - 1] + acceleration[j][i - 1] * (time[i] - time[i - 1])
for j in range(3):
position[j][0] = x0[j]
for i in range(1, len(time)):
position[j][i] = position[j][i - 1] + velocity[j][i - 1] * (time[i] - time[i - 1])
for i in range(len(acceleration[2])):
if abs(velocity[2][i]) > errorZ:
position[0][i] = 0
position[1][i] = 0
fig, axs = plt.subplots(2)
axs[0].plot(time, acceleration[0])
axs[0].set_xlabel('Time (s)')
axs[0].set_ylabel('AccelerationX (m/s^2)')
axs[1].plot(time, acceleration[1])
axs[1].set_xlabel('Time (s)')
axs[1].set_ylabel('AccelerationY (m/s^2)')
'''
axs[2].scatter(time, acceleration[2])
axs[2].set_xlabel('Time (s)')
axs[2].set_ylabel('AccelerationZ (m/s^2)')
axs[3].scatter(time, velocity[2])
axs[3].set_xlabel('Time (s)')
axs[3].set_ylabel('VelocityZ (m/s)')
axs[4].scatter(time, position[2])
axs[4].set_xlabel('Time (s)')
axs[4].set_ylabel('PositionZ (m)')
axs.scatter(position[0], position[1], marker = "_", linewidth = 70)
axs.set_xlabel('PositionX')
axs.set_ylabel('PositionY')
plt.plot(position[0], position[1], marker = '_', markersize = 30, linewidth = 3, markeredgewidth = 10)'''
plt.show() | 3.234375 | 3 |
users/migrations/0008_profile_fields_optional.py | mitodl/mit-xpro | 10 | 9615 | # Generated by Django 2.2.3 on 2019-07-15 19:24
from django.db import migrations, models
def backpopulate_incomplete_profiles(apps, schema):
"""Backpopulate users who don't have a profile record"""
User = apps.get_model("users", "User")
Profile = apps.get_model("users", "Profile")
for user in User.objects.annotate(
has_profile=models.Exists(Profile.objects.filter(user=models.OuterRef("pk")))
).filter(has_profile=False):
Profile.objects.get_or_create(user=user)
def remove_incomplete_profiles(apps, schema):
"""Delete records that will cause rollbacks on nullable/blankable fields to fail"""
Profile = apps.get_model("users", "Profile")
Profile.objects.filter(
models.Q(birth_year__isnull=True)
| models.Q(gender__exact="")
| models.Q(job_title__exact="")
| models.Q(company__exact="")
).delete()
class Migration(migrations.Migration):
dependencies = [("users", "0007_validate_country_and_state")]
operations = [
migrations.AlterField(
model_name="profile",
name="birth_year",
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name="profile",
name="company",
field=models.CharField(blank=True, default="", max_length=128),
),
migrations.AlterField(
model_name="profile",
name="gender",
field=models.CharField(
blank=True,
choices=[
("m", "Male"),
("f", "Female"),
("o", "Other/Prefer Not to Say"),
],
default="",
max_length=10,
),
),
migrations.AlterField(
model_name="profile",
name="industry",
field=models.CharField(blank=True, default="", max_length=60),
),
migrations.AlterField(
model_name="profile",
name="job_function",
field=models.CharField(blank=True, default="", max_length=60),
),
migrations.AlterField(
model_name="profile",
name="job_title",
field=models.CharField(blank=True, default="", max_length=128),
),
migrations.AlterField(
model_name="profile",
name="leadership_level",
field=models.CharField(blank=True, default="", max_length=60),
),
migrations.RunPython(
backpopulate_incomplete_profiles, reverse_code=remove_incomplete_profiles
),
]
| 2.484375 | 2 |
test/unit_testing/grid/element_linear_dx_data/test_element_linearC/element/geom_element_AD.py | nwukie/ChiDG | 36 | 9616 | from __future__ import division
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import numpy as np
import sys
import os
import time
#
# TORCH INSTALLATION: refer to https://pytorch.org/get-started/locally/
#
def update_progress(job_title, progress):
length = 20 # modify this to change the length
block = int(round(length*progress))
msg = "\r{0}: [{1}] {2}%".format(job_title, "#"*block + "-"*(length-block), round(progress*100, 2))
if progress >= 1: msg += " DONE\r\n"
sys.stdout.write(msg)
sys.stdout.flush()
def cls():
os.system('cls' if os.name=='nt' else 'clear')
cls()
################################################################################################################
# Initialize torch tensor for coordiantes
coords_data = [[ 0.0 , 0.0 , 0.0 ],
[ 1.0/(2.0**0.5), 0.0 , 1.0/(2.0**0.5)],
[ 1.0/(2.0**0.5), 0.0 ,-1.0/(2.0**0.5)],
[ 2.0**0.5 , 0.0 , 0.0 ],
[ 0.0 , 1.0 , 0.0 ],
[ 1.0/(2.0**0.5), 1.0 , 1.0/(2.0**0.5)],
[ 1.0/(2.0**0.5), 1.0 ,-1.0/(2.0**0.5)],
[ 2.0**0.5 , 1.0 , 0.0 ],
]
coords = torch.tensor(coords_data,requires_grad=True,dtype=torch.float64)
nnodes_r = coords.size(0)
nnodes_ie = 8
nnodes_if = 4
nterms_s = 8
ndirs = 3
coord_sys = 'CARTESIAN'
# Define matrix of polynomial basis terms at support nodes
val_r_data = [[ 1.0,-1.0,-1.0,-1.0, 1.0, 1.0, 1.0,-1.0],
[ 1.0,-1.0,-1.0, 1.0,-1.0,-1.0, 1.0, 1.0],
[ 1.0, 1.0,-1.0,-1.0,-1.0, 1.0,-1.0, 1.0],
[ 1.0, 1.0,-1.0, 1.0, 1.0,-1.0,-1.0,-1.0],
[ 1.0,-1.0, 1.0,-1.0, 1.0,-1.0,-1.0, 1.0],
[ 1.0,-1.0, 1.0, 1.0,-1.0, 1.0,-1.0,-1.0],
[ 1.0, 1.0, 1.0,-1.0,-1.0,-1.0, 1.0,-1.0],
[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
]
val_r = torch.tensor(val_r_data,requires_grad=False,dtype=torch.float64)
# Define matrices at interpolation nodes (quadrature, level = 1)
val_i_data = [[ 1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0, 1.0/3.0, 1.0/3.0,-1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0,-1.0/3.0, 1.0/3.0, 1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0, 1.0/3.0,-1.0/3.0, 1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0,-1.0/3.0,-1.0/3.0,-1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0,-1.0/3.0,-1.0/3.0, 1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0, 1.0/3.0,-1.0/3.0,-1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0,-1.0/3.0, 1.0/3.0,-1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0, 1.0/3.0, 1.0/3.0, 1.0/3.0*np.sqrt(1.0/3.0)],
]
val_i = torch.tensor(val_i_data,requires_grad=False,dtype=torch.float64)
ddxi_i_data = [[ 0.0,0.0,0.0,1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),0.0, 1.0/3.0],
[ 0.0,0.0,0.0,1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),0.0, 1.0/3.0],
[ 0.0,0.0,0.0,1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),0.0,-1.0/3.0],
[ 0.0,0.0,0.0,1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),0.0,-1.0/3.0],
[ 0.0,0.0,0.0,1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),0.0,-1.0/3.0],
[ 0.0,0.0,0.0,1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),0.0,-1.0/3.0],
[ 0.0,0.0,0.0,1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),0.0, 1.0/3.0],
[ 0.0,0.0,0.0,1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),0.0, 1.0/3.0],
]
ddxi_i = torch.tensor(ddxi_i_data,requires_grad=False,dtype=torch.float64)
ddeta_i_data = [[ 0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),0.0,-np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),0.0,-np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),0.0,-np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),0.0,-np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),0.0, np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),0.0, np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),0.0, np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),0.0, np.sqrt(1.0/3.0), 1.0/3.0],
]
ddeta_i = torch.tensor(ddeta_i_data,requires_grad=False,dtype=torch.float64)
ddzeta_i_data= [[ 0.0,0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0],
]
ddzeta_i = torch.tensor(ddzeta_i_data,requires_grad=False,dtype=torch.float64)
# Define element interpolation nodes weights for linear element
weights_e_data = [1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0]
weights_e = torch.tensor(weights_e_data,requires_grad=False,dtype=torch.float64)
# Define val_f for each face
# Face 1, XI_MIN
val_1_data = [[ 1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0,-1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0, 1.0/3.0],
[ 1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0, 1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0,-1.0/3.0],
]
val_1 = torch.tensor(val_1_data,requires_grad=False,dtype=torch.float64)
# Face 2, XI_MAX
val_2_data = [[ 1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0, 1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0,-1.0/3.0],
[ 1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0,-1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0, 1.0/3.0],
]
val_2 = torch.tensor(val_2_data,requires_grad=False,dtype=torch.float64)
# Face 3, ETA_MIN
val_3_data = [[ 1.0,-1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0, np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0,-1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0, np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0,-1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0,-np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0,-1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0,-np.sqrt(1.0/3.0),-1.0/3.0],
]
val_3 = torch.tensor(val_3_data,requires_grad=False,dtype=torch.float64)
# Face 4, ETA_MAX
val_4_data = [[ 1.0,1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0,-np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0,1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0,-np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0,1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0, np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0,1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0, np.sqrt(1.0/3.0), 1.0/3.0],
]
val_4 = torch.tensor(val_4_data,requires_grad=False,dtype=torch.float64)
# Face 5, ZETA_MIN
val_5_data = [[ 1.0,-np.sqrt(1.0/3.0),-1.0,-np.sqrt(1.0/3.0), 1.0/3.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0,-np.sqrt(1.0/3.0),-1.0, np.sqrt(1.0/3.0),-1.0/3.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),-1.0,-np.sqrt(1.0/3.0),-1.0/3.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),-1.0, np.sqrt(1.0/3.0), 1.0/3.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0],
]
val_5 = torch.tensor(val_5_data,requires_grad=False,dtype=torch.float64)
# Face 6, ZETA_MAX
val_6_data = [[ 1.0,-np.sqrt(1.0/3.0),1.0,-np.sqrt(1.0/3.0), 1.0/3.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0,-np.sqrt(1.0/3.0),1.0, np.sqrt(1.0/3.0),-1.0/3.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),1.0,-np.sqrt(1.0/3.0),-1.0/3.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),1.0, np.sqrt(1.0/3.0), 1.0/3.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0],
]
val_6 = torch.tensor(val_6_data,requires_grad=False,dtype=torch.float64)
#--------------------------------------------------------------------
# Matrix modes_to_nodes
val_r_inv = torch.inverse(val_r)
# Computes coordiantes modes
coords_modes = torch.mm(val_r_inv,coords)
# Initialized coordiantes
interp_coords = torch.mm(val_i,coords_modes)
# Initialized jacobian
jacobian = torch.empty(3,3,nnodes_ie, dtype=torch.float64)
for inode in range(0,nnodes_ie):
jacobian[0,0,inode] = torch.dot(ddxi_i[inode,:] , coords_modes[:,0])
jacobian[0,1,inode] = torch.dot(ddeta_i[inode,:] , coords_modes[:,0])
jacobian[0,2,inode] = torch.dot(ddzeta_i[inode,:] , coords_modes[:,0])
jacobian[1,0,inode] = torch.dot(ddxi_i[inode,:] , coords_modes[:,1])
jacobian[1,1,inode] = torch.dot(ddeta_i[inode,:] , coords_modes[:,1])
jacobian[1,2,inode] = torch.dot(ddzeta_i[inode,:] , coords_modes[:,1])
jacobian[2,0,inode] = torch.dot(ddxi_i[inode,:] , coords_modes[:,2])
jacobian[2,1,inode] = torch.dot(ddeta_i[inode,:] , coords_modes[:,2])
jacobian[2,2,inode] = torch.dot(ddzeta_i[inode,:] , coords_modes[:,2])
update_progress("Computing Jacobian ", inode/(nnodes_ie-1))
if coord_sys == 'CYLINDRICAL':
scaling_factor = torch.mm(val_i,coords_modes[:,0])
for inode in range(0,nnodes_ie):
jacobian[1,0,inode] = jacobian[1,0,inode] * scaling_factor[inode]
jacobian[1,1,inode] = jacobian[1,1,inode] * scaling_factor[inode]
jacobian[1,2,inode] = jacobian[1,2,inode] * scaling_factor[inode]
# Matrics and Determinant
metrics = torch.empty(3,3,nnodes_ie, dtype=torch.float64)
jinv = torch.empty(nnodes_ie, dtype=torch.float64)
for inode in range(0,nnodes_ie):
ijacobian = torch.empty(3,3, dtype=torch.float64)
imetric = torch.empty(3,3, dtype=torch.float64)
for irow in range(0,3):
for icol in range(0,3):
ijacobian[irow,icol] = jacobian[irow,icol,inode]
# Compute jacobian for the ith node
update_progress("Computing Jinv and Metric ", inode/(nnodes_ie-1))
jinv[inode] = torch.det(ijacobian)
imetric = torch.inverse(ijacobian)
for irow in range(0,3):
for icol in range(0,3):
metrics[irow,icol,inode] = imetric[irow,icol]
# Compute inverse Mass matrix
invmass = torch.empty(nterms_s,nterms_s,nnodes_ie, dtype=torch.float64)
mass = torch.empty(nterms_s,nterms_s,nnodes_ie, dtype=torch.float64)
val_tmp = torch.empty(nterms_s,nnodes_ie, dtype=torch.float64)
i = 1
for iterm in range(0,nterms_s):
for inode in range(0,nnodes_ie):
val_tmp[inode,iterm] = val_i[inode,iterm] * weights_e[inode] * jinv[inode]
update_progress("Computing invmass ", i/(nterms_s*nnodes_ie))
i += 1
mass = torch.mm(torch.t(val_tmp),val_i)
invmass = torch.inverse(mass)
# Compute BR2_VOL for each face
br2_vol_face1 = torch.mm(val_i,torch.mm(invmass,torch.t(val_1)))
br2_vol_face2 = torch.mm(val_i,torch.mm(invmass,torch.t(val_2)))
br2_vol_face3 = torch.mm(val_i,torch.mm(invmass,torch.t(val_3)))
br2_vol_face4 = torch.mm(val_i,torch.mm(invmass,torch.t(val_4)))
br2_vol_face5 = torch.mm(val_i,torch.mm(invmass,torch.t(val_5)))
br2_vol_face6 = torch.mm(val_i,torch.mm(invmass,torch.t(val_6)))
update_progress("Computing br2_vol ", 1)
# Compute BR2_FACE for each face
br2_face_face1 = torch.mm(val_1,torch.mm(invmass,torch.t(val_1)))
br2_face_face2 = torch.mm(val_2,torch.mm(invmass,torch.t(val_2)))
br2_face_face3 = torch.mm(val_3,torch.mm(invmass,torch.t(val_3)))
br2_face_face4 = torch.mm(val_4,torch.mm(invmass,torch.t(val_4)))
br2_face_face5 = torch.mm(val_5,torch.mm(invmass,torch.t(val_5)))
br2_face_face6 = torch.mm(val_6,torch.mm(invmass,torch.t(val_6)))
update_progress("Computing br2_face ", 1)
# Grad1, Grad2, and Grad3
grad1 = torch.empty(nnodes_ie,nterms_s, dtype=torch.float64)
grad2 = torch.empty(nnodes_ie,nterms_s, dtype=torch.float64)
grad3 = torch.empty(nnodes_ie,nterms_s, dtype=torch.float64)
i = 1
for iterm in range(0,nterms_s):
for inode in range(0,nnodes_ie):
grad1[inode,iterm] = metrics[0,0,inode] * ddxi_i[inode,iterm] + metrics[1,0,inode] * ddeta_i[inode,iterm] + metrics[2,0,inode] * ddzeta_i[inode,iterm]
grad2[inode,iterm] = metrics[0,1,inode] * ddxi_i[inode,iterm] + metrics[1,1,inode] * ddeta_i[inode,iterm] + metrics[2,1,inode] * ddzeta_i[inode,iterm]
grad3[inode,iterm] = metrics[0,2,inode] * ddxi_i[inode,iterm] + metrics[1,2,inode] * ddeta_i[inode,iterm] + metrics[2,2,inode] * ddzeta_i[inode,iterm]
update_progress("Computing grad1, grad2, grad3 ", i/(nnodes_ie*nterms_s))
i += 1
#WRITE_____________________
#
# Metrics
#
f = open("metrics.txt","w")
i = 1
for inode in range (0,nnodes_ie):
f.write("Metric interpolation node %d \n" % (inode+1))
array = np.zeros([3, 3])
for irow in range(0,3):
for icol in range(0,3):
array[irow,icol] = metrics[irow,icol,inode].item()
update_progress("Writing metrics to file ", i/(nnodes_ie*9))
i += 1
np.savetxt(f,array)
f.close()
#
# jinv
#
f = open("jinv.txt","w")
array = np.zeros([1])
i = 1
for inode in range (0,nnodes_ie):
f.write("Jinv interpolation node %d \n" % (inode+1))
array[0] = jinv[inode].item()
np.savetxt(f,array)
update_progress("Writing jinv to file ", i/(nnodes_ie))
i += 1
f.close()
#
# Grad1
#
f = open("grad1.txt","w")
f.write("Grad1 \n")
array = np.zeros([nnodes_ie,nterms_s])
i = 1
for inode in range (0,nnodes_ie):
for iterm in range(0,nterms_s):
array[inode,iterm] = grad1[inode,iterm].item()
update_progress("Writing grad1 to file ", i/(nnodes_ie*nterms_s))
i += 1
np.savetxt(f,array)
f.close()
#
# Grad2
#
f = open("grad2.txt","w")
f.write("Grad2 \n")
array = np.zeros([nnodes_ie,nterms_s])
i = 1
for inode in range (0,nnodes_ie):
for iterm in range(0,nterms_s):
array[inode,iterm] = grad2[inode,iterm].item()
update_progress("Writing grad2 to file ", i/(nnodes_ie*nterms_s))
i += 1
np.savetxt(f,array)
f.close()
#
# Grad3
#
f = open("grad3.txt","w")
f.write("Grad3 \n")
array = np.zeros([nnodes_ie,nterms_s])
i = 1
for inode in range (0,nnodes_ie):
for iterm in range(0,nterms_s):
array[inode,iterm] = grad3[inode,iterm].item()
update_progress("Writing grad3 to file ", i/(nnodes_ie*nterms_s))
i += 1
np.savetxt(f,array)
f.close()
#
# dmetric_dx
#
f = open("dmetric_dx.txt","w")
i = 1
for inode in range (0,nnodes_ie):
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
array = np.zeros([3,3])
f.write("dmetric_dx interpolation node %s, diff_node %s, diff_dir %s \n" % (inode+1,inode_diff+1,idir+1))
for irow in range(0,3):
for icol in range(0,3):
data = metrics[irow,icol,inode]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dmetric_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*3*3))
# This avoid to accumulate derivatives
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# interp_coords_dx
#
f = open("dinterp_xcoords_dx.txt","w")
i = 1
f.write("xcoord interpolation, coord 1, row=node, col=nnodes_r*dir \n")
array = np.zeros([nnodes_ie,nnodes_r*ndirs])
for inode in range (0,nnodes_ie):
data = interp_coords[inode,0]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
if idir == 0:
index = inode_diff
elif idir == 1:
index = nnodes_r + inode_diff
elif idir == 2:
index = 2*nnodes_r + inode_diff
array[inode,index] = ddata_np[inode_diff,idir]
update_progress("Writing interp_xcoords_dx to file ", i/(nnodes_ie*nnodes_r*3))
i += 1
# This avoid to accumulate derivatives
dummy = coords.grad.data.zero_()
np.savetxt(f,array)
f.close()
f = open("dinterp_ycoords_dx.txt","w")
i = 1
f.write("ycoord interpolation, coord 2, row=node, col=nnodes_r*dir \n")
array = np.zeros([nnodes_ie,nnodes_r*ndirs])
for inode in range (0,nnodes_ie):
data = interp_coords[inode,1]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
if idir == 0:
index = inode_diff
elif idir == 1:
index = nnodes_r + inode_diff
elif idir == 2:
index = 2*nnodes_r + inode_diff
array[inode,index] = ddata_np[inode_diff,idir]
update_progress("Writing interp_ycoords_dx to file ", i/(nnodes_ie*nnodes_r*3))
i += 1
# This avoid to accumulate derivatives
dummy = coords.grad.data.zero_()
np.savetxt(f,array)
f.close()
f = open("dinterp_zcoords_dx.txt","w")
i = 1
f.write("zcoord interpolation, coord 3, row=node, col=nnodes_r*dir \n")
array = np.zeros([nnodes_ie,nnodes_r*ndirs])
for inode in range (0,nnodes_ie):
data = interp_coords[inode,2]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
if idir == 0:
index = inode_diff
elif idir == 1:
index = nnodes_r + inode_diff
elif idir == 2:
index = 2*nnodes_r + inode_diff
array[inode,index] = ddata_np[inode_diff,idir]
update_progress("Writing interp_zcoords_dx to file ", i/(nnodes_ie*nnodes_r*3))
i += 1
# This avoid to accumulate derivatives
dummy = coords.grad.data.zero_()
np.savetxt(f,array)
f.close()
#
# djinv_dx
#
f = open("djinv_dx.txt","w")
i = 1
for inode in range (0,nnodes_ie):
array = np.zeros([nnodes_r,ndirs])
f.write("djinv_dx interpolation node %s, row=inode_diff, col=dir \n" % (inode+1))
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
data = jinv[inode]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[inode_diff,idir] = ddata_np[inode_diff,idir]
update_progress("Writing djinv_dx to file ", i/(nnodes_ie*nnodes_r*ndirs))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dmass_dx
#
f = open("dmass_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dmass_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nterms_s,nterms_s])
for irow in range(0,nterms_s):
for icol in range(0,nterms_s):
data = mass[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dmass_dx to file ", i/(nterms_s*nnodes_r*ndirs*nterms_s))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dinvmass_dx
#
f = open("dinvmass_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dinvmass_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nterms_s,nterms_s])
for irow in range(0,nterms_s):
for icol in range(0,nterms_s):
data = invmass[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dinvmass_dx to file ", i/(nterms_s*nnodes_r*ndirs*nterms_s))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dbr2_vol_dx
#
#
f = open("dbr2_vol_face1_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face1_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face1[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face1_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_vol_face2_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face2_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face2[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face2_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_vol_face3_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face3_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face3[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face3_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_vol_face4_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face4_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face4[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face4_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_vol_face5_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face5_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face5[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face5_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_vol_face6_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face6_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face6[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face6_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dbr2_face_dx
#
#
f = open("dbr2_face_face1_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face1_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face1[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face1_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_face_face2_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face2_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face2[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face2_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_face_face3_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face3_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face3[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face3_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_face_face4_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face4_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face4[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face4_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_face_face5_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face5_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face5[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face5_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_face_face6_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face6_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face6[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face6_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dgrad1_dx
#
f = open("dgrad1_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dgrad1_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nterms_s])
for irow in range(0,nnodes_ie):
for icol in range(0,nterms_s):
data = grad1[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dgrad1_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nterms_s))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dgrad2_dx
#
f = open("dgrad2_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dgrad2_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nterms_s])
for irow in range(0,nnodes_ie):
for icol in range(0,nterms_s):
data = grad2[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dgrad2_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nterms_s))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dgrad3_dx
#
f = open("dgrad3_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dgrad3_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nterms_s])
for irow in range(0,nnodes_ie):
for icol in range(0,nterms_s):
data = grad3[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dgrad3_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nterms_s))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
| 2.453125 | 2 |
osr_stat_generator/generator.py | brian-thomas/osr_stat_generator | 0 | 9617 |
"""
OSR (LOTFP) stat generator.
"""
import random
def d(num_sides):
"""
Represents rolling a die of size 'num_sides'.
Returns random number from that size die
"""
return random.randint(1, num_sides)
def xdy(num_dice, num_sides):
""" represents rolling num_dice of size num_sides.
Returns random number from that many dice being 'rolled'.
"""
return sum(d(num_sides) for i in range(num_dice))
class LotFP_Stat (object):
def _get_bonus(attribute):
if attribute <= 3:
return -3
if attribute >= 4 and attribute <= 5:
return -2
if attribute >= 6 and attribute <= 8:
return -1
if attribute >= 13 and attribute <= 15:
return 1
if attribute >= 16 and attribute <= 17:
return 2
if attribute >= 18:
return 3
# the default
return 0
@property
def bonus(self): return self._bonus
@property
def name(self): return self._name
@property
def value(self): return self._value
def __str__(self):
return (f"%s : %s(%s)" % (self.name, self.value, self.bonus))
def __init__(self, name, value):
self._name = name
self._value = value
self._bonus = LotFP_Stat._get_bonus(value)
class Stat_Set(object):
"""
Define a package of OSR/DnD stats
"""
_Stat_Name = ["CON", "DEX", "INT", "WIS", "STR", "CHA"]
@property
def stats(self)->list:
return self._stats
def sum(self)->int:
# get a summed value for all stats in this set
ssum = 0
for s in self.stats:
ssum += s.value
return ssum
@property
def is_hopeless(self)->bool:
""" Determine if the character is so poorly stated they have
bonus sum less than 1.
"""
bonuses = [s.bonus for s in self._stats]
if sum(bonuses) < 1:
return True
return False
def __str__(self)->str:
string = ""
for stat in stats:
string = string + " " + str(stat.value) + " ("+str(stat.bonus) + ")"
return string
def __init__(self, stats):
self._stats = []
for i in range(0,len(stats)):
self._stats.append(LotFP_Stat(Stat_Set._Stat_Name[i], stats[i]))
def generate_stats (nrof_sets:int=1, no_hopeless_char:bool=True)->list:
""" Generate stats for a character.
"""
stat_sets = []
while (nrof_sets > 0):
stats = []
for i in range (0, 6):
stats.append(xdy(3,6))
stat_set = Stat_Set(stats)
# no "hopeless" characters
if no_hopeless_char and stat_set.is_hopeless:
continue
stat_sets.append(stat_set)
nrof_sets -= 1
return stat_sets
| 3.515625 | 4 |
cohesity_management_sdk/models/health_tile.py | nick6655/management-sdk-python | 18 | 9618 | # -*- coding: utf-8 -*-
# Copyright 2021 Cohesity Inc.
import cohesity_management_sdk.models.alert
class HealthTile(object):
"""Implementation of the 'HealthTile' model.
Health for Dashboard.
Attributes:
capacity_bytes (long|int): Raw Cluster Capacity in Bytes. This is not
usable capacity and does not take replication factor into
account.
cluster_cloud_usage_bytes (long|int): Usage in Bytes on the cloud.
last_day_alerts (list of Alert): Alerts in last 24 hours.
last_day_num_criticals (long|int): Number of Critical Alerts.
last_day_num_warnings (long|int): Number of Warning Alerts.
num_nodes (int): Number of nodes in the cluster.
num_nodes_with_issues (int): Number of nodes in the cluster that are
unhealthy.
percent_full (float): Percent the cluster is full.
raw_used_bytes (long|int): Raw Bytes used in the cluster.
"""
# Create a mapping from Model property names to API property names
_names = {
"capacity_bytes":'capacityBytes',
"cluster_cloud_usage_bytes":'clusterCloudUsageBytes',
"last_day_alerts":'lastDayAlerts',
"last_day_num_criticals":'lastDayNumCriticals',
"last_day_num_warnings":'lastDayNumWarnings',
"num_nodes":'numNodes',
"num_nodes_with_issues":'numNodesWithIssues',
"percent_full":'percentFull',
"raw_used_bytes":'rawUsedBytes'
}
def __init__(self,
capacity_bytes=None,
cluster_cloud_usage_bytes=None,
last_day_alerts=None,
last_day_num_criticals=None,
last_day_num_warnings=None,
num_nodes=None,
num_nodes_with_issues=None,
percent_full=None,
raw_used_bytes=None):
"""Constructor for the HealthTile class"""
# Initialize members of the class
self.capacity_bytes = capacity_bytes
self.cluster_cloud_usage_bytes = cluster_cloud_usage_bytes
self.last_day_alerts = last_day_alerts
self.last_day_num_criticals = last_day_num_criticals
self.last_day_num_warnings = last_day_num_warnings
self.num_nodes = num_nodes
self.num_nodes_with_issues = num_nodes_with_issues
self.percent_full = percent_full
self.raw_used_bytes = raw_used_bytes
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
capacity_bytes = dictionary.get('capacityBytes')
cluster_cloud_usage_bytes = dictionary.get('clusterCloudUsageBytes')
last_day_alerts = None
if dictionary.get('lastDayAlerts') != None:
last_day_alerts = list()
for structure in dictionary.get('lastDayAlerts'):
last_day_alerts.append(cohesity_management_sdk.models.alert.Alert.from_dictionary(structure))
last_day_num_criticals = dictionary.get('lastDayNumCriticals')
last_day_num_warnings = dictionary.get('lastDayNumWarnings')
num_nodes = dictionary.get('numNodes')
num_nodes_with_issues = dictionary.get('numNodesWithIssues')
percent_full = dictionary.get('percentFull')
raw_used_bytes = dictionary.get('rawUsedBytes')
# Return an object of this model
return cls(capacity_bytes,
cluster_cloud_usage_bytes,
last_day_alerts,
last_day_num_criticals,
last_day_num_warnings,
num_nodes,
num_nodes_with_issues,
percent_full,
raw_used_bytes)
| 2.296875 | 2 |
TextRank/textrank.py | nihanjali/PageRank | 0 | 9619 | <reponame>nihanjali/PageRank<gh_stars>0
import os
import sys
import copy
import collections
import nltk
import nltk.tokenize
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
import pagerank
'''
textrank.py
-----------
This module implements TextRank, an unsupervised keyword
significance scoring algorithm. TextRank builds a weighted
graph representation of a document using words as nodes
and coocurrence frequencies between pairs of words as edge
weights. It then applies PageRank to this graph, and
treats the PageRank score of each word as its significance.
The original research paper proposing this algorithm is
available here:
https://web.eecs.umich.edu/~mihalcea/papers/mihalcea.emnlp04.pdf
'''
## TextRank #####################################################################################
def __preprocessDocument(document, relevantPosTags):
'''
This function accepts a string representation
of a document as input, and returns a tokenized
list of words corresponding to that document.
'''
words = __tokenizeWords(document)
posTags = __tagPartsOfSpeech(words)
# Filter out words with irrelevant POS tags
filteredWords = []
for index, word in enumerate(words):
word = word.lower()
tag = posTags[index]
if not __isPunctuation(word) and tag in relevantPosTags:
filteredWords.append(word)
return filteredWords
def textrank(document, windowSize=2, rsp=0.15, relevantPosTags=["NN", "ADJ"]):
'''
This function accepts a string representation
of a document and three hyperperameters as input.
It returns Pandas matrix (that can be treated
as a dictionary) that maps words in the
document to their associated TextRank significance
scores. Note that only words that are classified
as having relevant POS tags are present in the
map.
'''
# Tokenize document:
words = __preprocessDocument(document, relevantPosTags)
# Build a weighted graph where nodes are words and
# edge weights are the number of times words cooccur
# within a window of predetermined size. In doing so
# we double count each coocurrence, but that will not
# alter relative weights which ultimately determine
# TextRank scores.
edgeWeights = collections.defaultdict(lambda: collections.Counter())
for index, word in enumerate(words):
for otherIndex in range(index - windowSize, index + windowSize + 1):
if otherIndex >= 0 and otherIndex < len(words) and otherIndex != index:
otherWord = words[otherIndex]
edgeWeights[word][otherWord] += 1.0
# Apply PageRank to the weighted graph:
wordProbabilities = pagerank.powerIteration(edgeWeights, rsp=rsp)
wordProbabilities.sort_values(ascending=False)
return wordProbabilities
## NLP utilities ################################################################################
def __asciiOnly(string):
return "".join([char if ord(char) < 128 else "" for char in string])
def __isPunctuation(word):
return word in [".", "?", "!", ",", "\"", ":", ";", "'", "-"]
def __tagPartsOfSpeech(words):
return [pair[1] for pair in nltk.pos_tag(words)]
def __tokenizeWords(sentence):
return nltk.tokenize.word_tokenize(sentence)
## tests ########################################################################################
def applyTextRank(fileName, title="a document"):
print
print "Reading \"%s\" ..." % title
filePath = os.path.join(os.path.dirname(__file__), fileName)
document = open(filePath).read()
document = __asciiOnly(document)
print "Applying TextRank to \"%s\" ..." % title
keywordScores = textrank(document)
print
header = "Keyword Significance Scores for \"%s\":" % title
print header
print "-" * len(header)
print keywordScores
print
def main():
applyTextRank("Cinderalla.txt", "Cinderalla")
applyTextRank("Beauty_and_the_Beast.txt", "Beauty and the Beast")
applyTextRank("Rapunzel.txt", "Rapunzel")
if __name__ == "__main__":
main()
| 3.078125 | 3 |
tests/test_exploration.py | lionelkusch/neurolib | 0 | 9620 | import logging
import os
import random
import string
import time
import unittest
import neurolib.utils.paths as paths
import neurolib.utils.pypetUtils as pu
import numpy as np
import pytest
import xarray as xr
from neurolib.models.aln import ALNModel
from neurolib.models.fhn import FHNModel
from neurolib.models.multimodel import MultiModel
from neurolib.models.multimodel.builder.fitzhugh_nagumo import FitzHughNagumoNetwork
from neurolib.optimize.exploration import BoxSearch
from neurolib.utils.loadData import Dataset
from neurolib.utils.parameterSpace import ParameterSpace
def randomString(stringLength=10):
"""Generate a random string of fixed length"""
letters = string.ascii_lowercase
return "".join(random.choice(letters) for i in range(stringLength))
class TestBoxSearch(unittest.TestCase):
"""
Basic tests.
"""
def test_assertions(self):
parameters = ParameterSpace(
{"mue_ext_mean": np.linspace(0, 3, 2), "mui_ext_mean": np.linspace(0, 3, 2)}, kind="sequence"
)
with pytest.raises(AssertionError):
_ = BoxSearch(model=None, parameterSpace=parameters)
with pytest.raises(AssertionError):
_ = BoxSearch(model=None, parameterSpace=None)
with pytest.raises(AssertionError):
_ = BoxSearch(model=None, parameterSpace=parameters, evalFunction=None)
def test_fillin_default_parameters_for_sequential(self):
in_dict = {"a": [None, None, 1, 2], "b": [4, 5, None, None]}
SHOULD_BE = {"a": [0, 0, 1, 2], "b": [4, 5, 12, 12]}
model_params = {"a": 0, "b": 12}
parameters = ParameterSpace({"mue_ext_mean": [1.0, 2.0]})
search = BoxSearch(model=ALNModel(), parameterSpace=parameters)
out_dict = search._fillin_default_parameters_for_sequential(in_dict, model_params)
self.assertDictEqual(out_dict, SHOULD_BE)
class TestExplorationSingleNode(unittest.TestCase):
"""
ALN single node exploration.
"""
def test_single_node(self):
start = time.time()
model = ALNModel()
parameters = ParameterSpace({"mue_ext_mean": np.linspace(0, 3, 2), "mui_ext_mean": np.linspace(0, 3, 2)})
search = BoxSearch(model, parameters, filename="test_single_nodes.hdf")
search.run()
search.loadResults()
dataarray = search.xr()
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertFalse(dataarray.attrs)
for i in search.dfResults.index:
search.dfResults.loc[i, "max_r"] = np.max(
search.results[i]["rates_exc"][:, -int(1000 / model.params["dt"]) :]
)
end = time.time()
logging.info("\t > Done in {:.2f} s".format(end - start))
class TestExplorationBrainNetwork(unittest.TestCase):
"""
FHN brain network simulation with BOLD simulation.
"""
def test_fhn_brain_network_exploration(self):
ds = Dataset("hcp")
model = FHNModel(Cmat=ds.Cmat, Dmat=ds.Dmat)
model.params.duration = 10 * 1000 # ms
model.params.dt = 0.2
model.params.bold = True
parameters = ParameterSpace(
{
"x_ext": [np.ones((model.params["N"],)) * a for a in np.linspace(0, 2, 2)],
"K_gl": np.linspace(0, 2, 2),
"coupling": ["additive", "diffusive"],
},
kind="grid",
)
search = BoxSearch(model=model, parameterSpace=parameters, filename="test_fhn_brain_network_exploration.hdf")
search.run(chunkwise=True, bold=True)
pu.getTrajectorynamesInFile(os.path.join(paths.HDF_DIR, "test_fhn_brain_network_exploration.hdf"))
search.loadDfResults()
search.getRun(0, pypetShortNames=True)
search.getRun(0, pypetShortNames=False)
search.loadResults()
# firing rate xr
dataarray = search.xr()
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertFalse(dataarray.attrs)
# bold xr
dataarray = search.xr(bold=True)
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertFalse(dataarray.attrs)
search.info()
class TestExplorationBrainNetworkPostprocessing(unittest.TestCase):
"""
ALN brain network simulation with custom evaluation function.
"""
@classmethod
def setUpClass(cls):
# def test_brain_network_postprocessing(self):
ds = Dataset("hcp")
model = ALNModel(Cmat=ds.Cmat, Dmat=ds.Dmat)
# Resting state fits
model.params["mue_ext_mean"] = 1.57
model.params["mui_ext_mean"] = 1.6
model.params["sigma_ou"] = 0.09
model.params["b"] = 5.0
model.params["signalV"] = 2
model.params["dt"] = 0.2
model.params["duration"] = 0.2 * 60 * 1000
# multi stage evaluation function
def evaluateSimulation(traj):
model = search.getModelFromTraj(traj)
model.randomICs()
model.params["dt"] = 0.2
model.params["duration"] = 4 * 1000.0
model.run(bold=True)
result_dict = {"outputs": model.outputs}
search.saveToPypet(result_dict, traj)
# define and run exploration
parameters = ParameterSpace({"mue_ext_mean": np.linspace(0, 3, 2), "mui_ext_mean": np.linspace(0, 3, 2)})
search = BoxSearch(
evalFunction=evaluateSimulation,
model=model,
parameterSpace=parameters,
filename=f"test_brain_postprocessing_{randomString(20)}.hdf",
)
search.run()
cls.model = model
cls.search = search
cls.ds = ds
def test_getRun(self):
self.search.getRun(0)
def test_loadResults(self):
self.search.loadResults()
def test_loadResults_all_False(self):
self.search.loadResults(all=False)
class TestCustomParameterExploration(unittest.TestCase):
"""Exploration with custom function"""
def test_circle_exploration(self):
def explore_me(traj):
pars = search.getParametersFromTraj(traj)
# let's calculate the distance to a circle
computation_result = abs((pars["x"] ** 2 + pars["y"] ** 2) - 1)
result_dict = {"scalar_result": computation_result, "list_result": [1, 2, 3, 4], "array_result": np.ones(3)}
search.saveToPypet(result_dict, traj)
parameters = ParameterSpace({"x": np.linspace(-2, 2, 2), "y": np.linspace(-2, 2, 2)})
search = BoxSearch(evalFunction=explore_me, parameterSpace=parameters, filename="test_circle_exploration.hdf")
search.run()
search.loadResults(pypetShortNames=False)
# call the result dataframe
search.dfResults
# test integrity of dataframe
for i in search.dfResults.index:
self.assertEqual(search.dfResults.loc[i, "scalar_result"], search.results[i]["scalar_result"])
self.assertListEqual(search.dfResults.loc[i, "list_result"], search.results[i]["list_result"])
np.testing.assert_array_equal(search.dfResults.loc[i, "array_result"], search.results[i]["array_result"])
class TestExplorationMultiModel(unittest.TestCase):
"""
MultiModel exploration test - uses FHN network.
"""
def test_multimodel_explore(self):
start = time.time()
DELAY = 13.0
fhn_net = FitzHughNagumoNetwork(np.random.rand(2, 2), np.array([[0.0, DELAY], [DELAY, 0.0]]))
model = MultiModel(fhn_net)
parameters = ParameterSpace({"*input*sigma": [0.0, 0.05], "*epsilon*": [0.5, 0.6]}, allow_star_notation=True)
search = BoxSearch(model, parameters, filename="test_multimodel.hdf")
search.run()
search.loadResults()
dataarray = search.xr()
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertTrue(isinstance(dataarray.attrs, dict))
self.assertListEqual(
list(dataarray.attrs.keys()),
[k.replace("*", "_").replace(".", "_").replace("|", "_") for k in parameters.dict().keys()],
)
end = time.time()
logging.info("\t > Done in {:.2f} s".format(end - start))
class TestExplorationMultiModelSequential(unittest.TestCase):
"""
MultiModel exploration test with sequential exploration - uses FHN network.
"""
def test_multimodel_explore(self):
start = time.time()
DELAY = 13.0
fhn_net = FitzHughNagumoNetwork(np.random.rand(2, 2), np.array([[0.0, DELAY], [DELAY, 0.0]]))
model = MultiModel(fhn_net)
parameters = ParameterSpace(
{"*input*sigma": [0.0, 0.05], "*epsilon*": [0.5, 0.6, 0.7]}, allow_star_notation=True, kind="sequence"
)
search = BoxSearch(model, parameters, filename="test_multimodel.hdf")
search.run()
search.loadResults()
dataarray = search.xr()
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertTrue("run_no" in dataarray.dims)
self.assertEqual(len(dataarray["run_no"]), 5)
self.assertTrue(isinstance(dataarray.attrs, dict))
self.assertListEqual(
list(dataarray.attrs.keys()),
[k.replace("*", "_").replace(".", "_").replace("|", "_") for k in parameters.dict().keys()],
)
end = time.time()
logging.info("\t > Done in {:.2f} s".format(end - start))
class TestExplorationSingleNodeSequential(unittest.TestCase):
"""
ALN single node test with sequential exploration.
"""
def test_single_node(self):
start = time.time()
model = ALNModel()
parameters = ParameterSpace({"mue_ext_mean": [0.0, 1.5, 3.0], "mui_ext_mean": [1.5, 3.0]}, kind="sequence")
search = BoxSearch(model, parameters, filename="test_single_nodes.hdf")
search.run()
search.loadResults()
dataarray = search.xr()
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertTrue("run_no" in dataarray.dims)
self.assertEqual(len(dataarray["run_no"]), 5)
self.assertFalse(dataarray.attrs)
for i in search.dfResults.index:
search.dfResults.loc[i, "max_r"] = np.max(
search.results[i]["rates_exc"][:, -int(1000 / model.params["dt"]) :]
)
end = time.time()
logging.info("\t > Done in {:.2f} s".format(end - start))
if __name__ == "__main__":
unittest.main()
| 2.1875 | 2 |
irc3/tags.py | belst/irc3 | 0 | 9621 | <reponame>belst/irc3
# -*- coding: utf-8 -*-
'''
Module offering 2 functions, encode() and decode(), to transcode between
IRCv3.2 tags and python dictionaries.
'''
import re
import random
import string
_escapes = (
("\\", "\\\\"),
(";", r"\:"),
(" ", r"\s"),
("\r", r"\r"),
("\n", r"\n"),
)
# make the possibility of the substitute actually appearing in the text
# negligible. Even for targeted attacks
_substitute = (";TEMP:%s;" %
''.join(random.choice(string.ascii_letters) for i in range(20)))
_unescapes = (
("\\\\", _substitute),
(r"\:", ";"),
(r"\s", " "),
(r"\r", "\r"),
(r"\n", "\n"),
(_substitute, "\\"),
)
# valid tag-keys must contain of alphanumerics and hyphens only.
# for vendor-tagnames: TLD with slash appended
_valid_key = re.compile("^([\w.-]+/)?[\w-]+$")
# valid escaped tag-values must not contain
# NUL, CR, LF, semicolons or spaces
_valid_escaped_value = re.compile("^[^ ;\n\r\0]*$")
def _unescape(string):
for a, b in _unescapes:
string = string.replace(a, b)
return string
def _escape(string):
for a, b in _escapes:
string = string.replace(a, b)
return string
def encode(tags):
'''Encodes a dictionary of tags to fit into an IRC-message.
See IRC Message Tags: http://ircv3.net/specs/core/message-tags-3.2.html
>>> from collections import OrderedDict
>>> encode({'key': 'value'})
'key=value'
>>> d = {'aaa': 'bbb', 'ccc': None, 'example.com/ddd': 'eee'}
>>> d_ordered = OrderedDict(sorted(d.items(), key=lambda t: t[0]))
>>> encode(d_ordered)
'aaa=bbb;ccc;example.com/ddd=eee'
>>> d = {'key': 'value;with special\\\\characters', 'key2': 'with=equals'}
>>> d_ordered = OrderedDict(sorted(d.items(), key=lambda t: t[0]))
>>> print(encode(d_ordered))
key=value\\:with\\sspecial\\\characters;key2=with=equals
>>> print(encode({'key': r'\\something'}))
key=\\\\something
'''
tagstrings = []
for key, value in tags.items():
if not _valid_key.match(key):
raise ValueError("dictionary key is invalid as tag key: " + key)
# if no value, just append the key
if value:
tagstrings.append(key + "=" + _escape(value))
else:
tagstrings.append(key)
return ";".join(tagstrings)
def decode(tagstring):
'''Decodes a tag-string from an IRC-message into a python dictionary.
See IRC Message Tags: http://ircv3.net/specs/core/message-tags-3.2.html
>>> from pprint import pprint
>>> pprint(decode('key=value'))
{'key': 'value'}
>>> pprint(decode('aaa=bbb;ccc;example.com/ddd=eee'))
{'aaa': 'bbb', 'ccc': None, 'example.com/ddd': 'eee'}
>>> s = r'key=value\\:with\\sspecial\\\\characters;key2=with=equals'
>>> pprint(decode(s))
{'key': 'value;with special\\\\characters', 'key2': 'with=equals'}
>>> print(decode(s)['key'])
value;with special\\characters
>>> print(decode(r'key=\\\\something')['key'])
\\something
'''
if not tagstring:
# None/empty = no tags
return {}
tags = {}
for tag in tagstring.split(";"):
# value is either everything after "=", or None
key, value = (tag.split("=", 1) + [None])[:2]
if not _valid_key.match(key):
raise ValueError("invalid tag key: " + key)
if value:
if not _valid_escaped_value.match(value):
raise ValueError("invalid escaped tag value: " + value)
value = _unescape(value)
tags[key] = value
return tags
| 2.890625 | 3 |
app/forms.py | Rahmatullina/FinalYearProject | 0 | 9622 | from django import forms
from django.contrib.auth.forms import PasswordResetForm, SetPasswordForm
# from .models import RegionModel
# from .models import SERVICE_CHOICES, REGION_CHOICES
from django.contrib.auth import authenticate
# from django.contrib.auth.forms import UserCreationForm, UserChangeForm
# from .models import CustomUser
class LoginForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'}), max_length=100)
password = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control','type':'password'}),max_length=100)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
user = authenticate(username=username, password=password)
if not user or not user.is_active:
raise forms.ValidationError("Sorry, that login was invalid or user is inactive. Please try again.")
return self.cleaned_data
def login(self, request):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
user = authenticate(username=username, password=password)
return user
# class PassResetForm(PasswordResetForm):
# email = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Enter email',
# 'type':'email'}), max_length=100)
#
#
# class PassResetConfirmForm(SetPasswordForm):
# new_password1 = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control',
# 'placeholder':'Enter new password',
# 'type':'password'}), max_length=100)
# new_password2 = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control',
# 'placeholder': 'Enter new password again',
# 'type': 'password'}), max_length=100)
# class CustomUserCreationForm(UserCreationForm):
#
# class Meta(UserCreationForm):
# model = CustomUser
# fields = UserCreationForm.Meta.fields + ('region_name',)
#
#
# class CustomUserChangeForm(UserChangeForm):
# email = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'}), max_length=100)
# username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'}), max_length=254)
#
# class Meta:
# model = CustomUser
# fields = ('email','username')
| 2.453125 | 2 |
src/fedavg_trainer.py | MrZhang1994/mobile-federated-learning | 0 | 9623 | # newly added libraries
import copy
import wandb
import time
import math
import csv
import shutil
from tqdm import tqdm
import torch
import numpy as np
import pandas as pd
from client import Client
from config import *
import scheduler as sch
class FedAvgTrainer(object):
def __init__(self, dataset, model, device, args):
self.device = device
self.args = args
[client_num, _, _, train_data_global, _, train_data_local_num_dict, train_data_local_dict, test_data_local_dict, class_num] = dataset
# record the client number of the dataset
self.client_num = client_num
self.class_num = class_num
# setup dataset
self.data_shape = list(train_data_global[0][0].size())
self.train_data_local_num_dict = train_data_local_num_dict
self.test_data_local_dict = test_data_local_dict
self.train_data_local_dict = train_data_local_dict
if args.partition_method == "noniid":
logger.info("-----------non-i.i.d transform----------")
# generate the non i.i.d dataset
self.gene_non_iid_dataset(train_data_global, "tmp")
# read the non i.i.d dataset
self.read_non_iid_dataset("tmp")
# rm the tmp directory
shutil.rmtree(os.path.join('.', 'tmp'))
self.client_list = []
self.setup_clients(train_data_local_num_dict, train_data_local_dict, test_data_local_dict)
# initialize the recorder of invalid dataset
self.invalid_datasets = dict()
# time counter starts from the first line
self.time_counter = channel_data['Time'][0]
# initialize the cycle_num here
self.cycle_num = 0
# initialize the scheduler function
if self.args.method == "sch_pn_method_1" or self.args.method == "sch_pn_method_1_empty":
for _ in range(100):
self.scheduler = sch.Scheduler_PN_method_1()
client_indexes, _ = self.scheduler.sch_pn_test(1, 2002)
if len(client_indexes) > 5:
break
elif self.args.method == "sch_pn_method_2" or self.args.method == "sch_pn_method_2_empty":
for _ in range(100):
self.scheduler = sch.Scheduler_PN_method_2()
client_indexes, _ = self.scheduler.sch_pn_test(1, 2002)
if len(client_indexes) > 5:
break
elif self.args.method == "sch_pn_method_3" or self.args.method == "sch_pn_method_3_empty":
for _ in range(100):
self.scheduler = sch.Scheduler_PN_method_3()
client_indexes, _ = self.scheduler.sch_pn_test(1, 2002)
if len(client_indexes) > 5:
break
elif self.args.method == "sch_random":
self.scheduler = sch.sch_random
elif self.args.method == "sch_channel":
self.scheduler = sch.sch_channel
elif self.args.method == "sch_rrobin":
self.scheduler = sch.sch_rrobin
elif self.args.method == "sch_loss":
self.scheduler = sch.sch_loss
else:
self.scheduler = sch.sch_random
self.model = model
self.model_global = model(self.args, model_name=self.args.model, output_dim=self.class_num)
self.model_global.train()
def setup_clients(self, train_data_local_num_dict, train_data_local_dict, test_data_local_dict):
logger.debug("############setup_clients (START)#############")
for client_idx in range(client_num_per_round):
c = Client(client_idx, train_data_local_dict[client_idx], test_data_local_dict[client_idx],
train_data_local_num_dict[client_idx], self.args, self.device)
self.client_list.append(c)
logger.debug("############setup_clients (END)#############")
def train(self):
"""
Global initialized values
"""
# maintain a lst for local losses
local_loss_lst = np.zeros((1, client_num_in_total))
# maintain a lst for local acc
_, dataset_acc_lst = self.local_test_on_all_clients(self.model_global, 0, True, False)
local_acc_lst = dataset_acc_lst[np.arange(client_num_in_total) % self.client_num]
# counting days
counting_days, reward = 0, 0
# initialize values for calculating iteration num
delta, rho, beta, rho_flag, beta_flag = np.random.rand(1)[0], np.random.rand(1)[0], np.random.rand(1)[0], True, True
# Initialize values for calculating FPF2 index
local_itr_lst = torch.zeros(self.args.comm_round, int(client_num_in_total)).to(self.device) # historical local iterations.
G_mat = torch.zeros(int(client_num_in_total)).to(self.device) # initial the value of G with zero
# if weight size is larger than THRESHOLD_WEIGHT_SIZE we will use a simpler method to calculate FPF
weight_size = sum([self.model_global.cpu().state_dict()[para].numpy().ravel().shape[0] for para in self.model_global.state_dict().keys()])
if weight_size < THRESHOLD_WEIGHT_SIZE:
A_mat = torch.ones(weight_size).to(self.device) # initial the value of A with ones.
local_w_diffs = torch.zeros((int(client_num_in_total), weight_size)).to(self.device)
else:
logger.warning("The weight size of the model {} is too large. Thus, we turn to use a more simple method to calculate FPF.".format(self.args.model))
LRU_itr_lst = torch.zeros(int(client_num_in_total)).to(self.device) # store the iteration gap for each client.
# show weight size for the model.
logger.debug("weight size: {}".format(weight_size))
"""
starts training, entering the loop of command round.
"""
Inform = {}
traffic = 0
for round_idx in range(self.args.comm_round):
logger.info("################Communication round : {}".format(round_idx))
# set the time_counter
self.time_counter = np.array(channel_data['Time'][channel_data['Time'] >= self.time_counter])[0]
logger.info("time_counter: {}".format(self.time_counter))
self.model_global.train()
# get client_indexes from scheduler
reward, loss_a, loss_c = 0, 0, 0
if (self.args.method)[:6] == "sch_pn":
if self.args.method[-5:] == "empty" or round_idx == 0:
client_indexes, local_itr = self.scheduler.sch_pn_empty(round_idx, self.time_counter)
else:
client_indexes, local_itr, (reward, loss_a, loss_c) = self.scheduler.sch_pn(round_idx, self.time_counter, loss_locals, FPF2_idx_lst, local_loss_lst, )
else:
if self.args.method == "sch_loss":
if round_idx == 0:
loss_locals = []
client_indexes, local_itr = self.scheduler(round_idx, self.time_counter, loss_locals)
else:
client_indexes, local_itr = self.scheduler(round_idx, self.time_counter)
# write to the scheduler csv
with open(scheduler_csv, mode = "a+", encoding='utf-8', newline='') as file:
csv_writer = csv.writer(file)
if round_idx == 0:
csv_writer.writerow(['time counter', 'client index', 'iteration'])
csv_writer.writerow([self.time_counter, str(client_indexes), local_itr])
file.flush()
logger.info("client_indexes = " + str(client_indexes))
traffic += len(client_indexes)
# write one line to trainer_csv
trainer_csv_line = [round_idx, self.time_counter, str(client_indexes), traffic]
# contribute to time counter
self.tx_time(list(client_indexes)) # transmit time
# store the last model's training parameters.
last_w = copy.deepcopy(self.model_global.cpu().state_dict())
# local Initialization
w_locals, loss_locals, beta_locals, rho_locals, cycle_locals = [], [], [], [], []
"""
for scalability: following the original FedAvg algorithm, we uniformly sample a fraction of clients in each round.
Instead of changing the 'Client' instances, our implementation keeps the 'Client' instances and then updates their local dataset
"""
for idx in range(len(client_indexes)):
# update dataset
client = self.client_list[idx]
client_idx = client_indexes[idx]
dataset_idx = client_idx % self.client_num
if dataset_idx in self.invalid_datasets.keys():
current_idx = self.invalid_datasets[dataset_idx]
else:
current_idx = dataset_idx
while True:
client.update_local_dataset(current_idx, self.train_data_local_dict[current_idx],
self.test_data_local_dict[current_idx],
self.train_data_local_num_dict[current_idx])
# train on new dataset
# add a new parameter "local_itr" to the funciton "client.train()"
# add a new return value "time_interval" which is the time consumed for training model in client.
w, loss, local_beta, local_rho, local_acc, local_cycle = client.train(net=copy.deepcopy(self.model_global).to(self.device), local_iteration = local_itr)
if loss != None and local_beta != None and local_rho != None and local_acc != None:
if dataset_idx != current_idx:
self.invalid_datasets[dataset_idx] = current_idx
break
current_idx = np.random.randint(self.class_num)
logger.warning("changing dataset for {} to {}".format(client_idx, current_idx))
# record current cycle
cycle_locals.append([client.get_sample_number(), local_cycle])
# record current w into w_locals
w_locals.append((client.get_sample_number(), copy.deepcopy(w)))
# record current loss into loss_locals
loss_locals.append(loss)
# record local beta into beta_locals
beta_locals.append(local_beta)
# record local beta into rho_locals
rho_locals.append(local_rho)
# update the local_loss_lst
local_loss_lst[0, client_idx] = loss
# update local_w_diffs
if weight_size < THRESHOLD_WEIGHT_SIZE:
local_w_diffs[client_idx, :] = torch.cat([w[para].reshape((-1, )) - last_w[para].reshape((-1, )) for para in self.model_global.state_dict().keys()]).to(self.device)
# update local_acc_lst
local_acc_lst[client_idx] = local_acc
# loss
logger.info('Client {:3d}, loss {:.3f}'.format(client_idx, loss))
# update global weights
w_glob = self.aggregate(w_locals)
# copy weight to net_glob
self.model_global.load_state_dict(w_glob)
# update the time counter
if list(client_indexes):
self.time_counter += math.ceil(LOCAL_TRAINING_TIME)
logger.debug("time_counter after training: {}".format(self.time_counter))
trainer_csv_line += [self.time_counter-trainer_csv_line[1], np.var(local_loss_lst), str(loss_locals), np.var(loss_locals), np.var(local_acc_lst)]
# print loss
if not loss_locals:
logger.info('Round {:3d}, Average loss None'.format(round_idx))
trainer_csv_line.append('None')
else:
loss_avg = sum(loss_locals) / len(loss_locals)
logger.info('Round {:3d}, Average loss {:.3f}'.format(round_idx, loss_avg))
trainer_csv_line.append(loss_avg)
if cycle_locals:
cycle_locals = np.asarray(cycle_locals)
logger.info('Elapsed cycles {:.3f}'.format(np.sum(cycle_locals[:, 0] * cycle_locals[:, 1]) / np.sum(cycle_locals[:, 0])))
# local test on all client.
if round_idx % self.args.frequency_of_the_test == 0 or round_idx == self.args.comm_round - 1:
test_acc, _ = self.local_test_on_all_clients(self.model_global, round_idx, EVAL_ON_TRAIN, True)
trainer_csv_line.append(test_acc)
# write headers for csv
with open(trainer_csv, mode = "a+", encoding='utf-8', newline='') as file:
csv_writer = csv.writer(file)
if round_idx == 0:
csv_writer.writerow(['round index', 'time counter', 'client index', 'traffic', 'train time', 'fairness',
'local loss', "local loss var", "local acc var", 'global loss', 'test accuracy'])
csv_writer.writerow(trainer_csv_line)
file.flush()
# log on wandb
Inform["reward"] = reward
wandb.log(Inform)
Inform = {
"reward": reward, "loss_a": loss_a,
"loss_c": loss_c, "round": round_idx,
"traffic": traffic,
"beta": beta, "rho": rho, "delta": delta,
"cum_time": trainer_csv_line[1]+self.cycle_num*59361,
"local_itr": local_itr,
"client_num": len(client_indexes),
"C3": (rho*delta)/beta,
"local_loss_var": np.var(loss_locals),
"local_acc_var": np.var(local_acc_lst)
}
# update FPF index list
if weight_size < THRESHOLD_WEIGHT_SIZE:
FPF2_idx_lst = torch.norm(local_w_diffs * A_mat, dim = 1) / G_mat
else:
FPF2_idx_lst = LRU_itr_lst / G_mat
FPF2_idx_lst = FPF2_idx_lst.cpu().numpy()
FPF2_idx_lst[np.bitwise_or(np.isnan(FPF2_idx_lst), np.isinf(FPF2_idx_lst))] = 0
# FPF2_idx_lst = FPF2_idx_lst / max(FPF2_idx_lst)
FPF2_idx_lst[np.bitwise_or(np.isnan(FPF2_idx_lst), np.isinf(FPF2_idx_lst))] = 0
# write FPF index list to csv
with open(FPF_csv, mode = "a+", encoding='utf-8', newline='') as file:
csv_writer = csv.writer(file)
if round_idx == 0:
csv_writer.writerow(['time counter'] + ["car_"+str(i) for i in range(client_num_in_total)])
csv_writer.writerow([trainer_csv_line[1]]+FPF2_idx_lst.tolist())
file.flush()
# update beta & delta & rho
if w_locals and loss_locals:
sample_nums = np.array([sample_num for sample_num, _ in w_locals])
local_w_diff_norms = np.array([torch.norm(torch.cat([w[para].reshape((-1, )) - w_glob[para].reshape((-1, )) for para in self.model_global.state_dict().keys()])).item() for _, w in w_locals])
# calculate delta
delta_tmp = np.sum(sample_nums * local_w_diff_norms) / np.sum(sample_nums) / self.args.lr
if (not np.isnan(delta_tmp) and not np.isinf(delta_tmp)):
delta = delta_tmp
# update rho
rho_tmp = np.sum(sample_nums * np.array(rho_locals)) / np.sum(sample_nums)
if rho_tmp > rho or rho_flag:
if (not np.isnan(rho_tmp) and not np.isinf(rho_tmp)) and rho_tmp < THRESHOLD_RHO:
rho, rho_flag = rho_tmp, False
# update beta
beta_tmp = np.sum(sample_nums * np.array(beta_locals)) / np.sum(sample_nums)
if beta_tmp > beta or beta_flag:
if (not np.isnan(beta_tmp) and not np.isinf(beta_tmp)) and beta_tmp < THRESHOLD_BETA:
beta, beta_flag = beta_tmp, False
if self.args.method == "sch_pn_method_1" or self.args.method == "sch_pn_method_1_empty":
self.scheduler.calculate_itr_method_1(delta)
elif self.args.method == "sch_pn_method_2" or self.args.method == "sch_pn_method_2_empty":
self.scheduler.calculate_itr_method_2(rho, beta, delta)
elif self.args.method == "sch_pn_method_3" or self.args.method == "sch_pn_method_3_empty":
self.scheduler.calculate_itr_method_3(rho, beta, delta)
if weight_size < THRESHOLD_WEIGHT_SIZE:
# update local_w_diffs
global_w_diff = torch.cat([w_glob[para].reshape((-1, )) - last_w[para].reshape((-1, )) for para in self.model_global.state_dict().keys()]).to(self.device)
local_w_diffs[list(set(list(range(client_num_in_total))) - set(list(client_indexes))), :] -= global_w_diff
# update A_mat
A_mat = A_mat * (1 - 1/G2) + (global_w_diff) / G2 / global_w_diff.mean()
# Update local_itr_lst
if list(client_indexes) and local_itr > 0: # only if client_idx is not empty and local_iter > 0, then I will update following values
local_itr_lst[round_idx, list(client_indexes)] = float(local_itr)
if weight_size >= THRESHOLD_WEIGHT_SIZE:
LRU_itr_lst += float(local_itr)
LRU_itr_lst[list(client_indexes)] = 0
# update G_mat
G_mat = G_mat * (1 - 1 / G1) + local_itr_lst[round_idx, :] / G1
# if current time_counter has exceed the channel table, I will simply stop early
if self.time_counter >= time_cnt_max[counting_days]:
counting_days += 1
if counting_days % RESTART_DAYS == 0:
if self.args.method == "find_constant" and loss_locals:
w_optimal, loss_optimal = self.central_train()
w = torch.cat([param.view(-1) for param in self.model_global.parameters()])
w_diff_optimal = torch.norm(w.cpu() - w_optimal.cpu())
logger.info("The norm of difference between w_optmal & w: {}".format(w_diff_optimal.item()))
logger.info("The norm of difference between loss & loss_optimal: {}".format(loss_avg - loss_optimal))
break
logger.info("################reinitialize model")
self.model_global = self.model(self.args, model_name=self.args.model, output_dim=self.class_num)
delta, rho, beta, rho_flag, beta_flag = np.random.rand(1)[0], np.random.rand(1)[0], np.random.rand(1)[0], True, True
traffic = 0
if counting_days >= DATE_LENGTH:
logger.info("################training restarts")
counting_days = 0
self.time_counter = 0
self.cycle_num = self.cycle_num+1
def central_train(self):
logger.info("################global optimal weights calculation")
model = self.model(self.args, model_name=self.args.model, output_dim=self.class_num)
criterion = torch.nn.CrossEntropyLoss().to(self.device)
model.to(self.device)
if self.args.client_optimizer == "sgd":
optimizer = torch.optim.SGD(model.parameters(), lr=self.args.lr)
else:
optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=self.args.lr,
weight_decay=self.args.wd, amsgrad=True)
for _ in tqdm(range(self.args.central_round)):
for client_idx in range(self.client_num):
x, labels = next(iter(self.train_data_local_dict[client_idx]))
x, labels = x.to(self.device), labels.to(self.device)
model.train()
model.zero_grad()
log_probs = model(x)
loss = criterion(log_probs, labels)
loss.backward()
loss = loss.item()
optimizer.step()
wandb.log({"central_training/loss": loss})
w_optimal = torch.cat([param.view(-1) for param in model.parameters()])
loss_optimal = loss
return w_optimal, loss_optimal
def gene_non_iid_dataset(self, train_global, directory):
"""
changing self.train_data_local_dict to non-i.i.d. dataset.
And change self.train_data_local_num_dict correspondingly.
"""
data, labels = train_global[0][0], train_global[0][1] # read the tensor from train_global.
# transform shape
data = data.view(data.shape[0], -1)
labels = labels.view(labels.shape[0], -1)
# get full_df
full_df = pd.DataFrame(np.concatenate((data.numpy(), labels.numpy()), axis=1)).sample(frac=1, random_state=self.args.seed)
# temporary store the data in dir
save_dir = os.path.join(".", directory)
if not os.path.exists(save_dir):
os.mkdir(save_dir)
for client_idx in tqdm(range(self.client_num)):
# get selected classes
try:
selected_classes = set(list(np.random.choice(list(set(full_df.iloc[:, -1])), CLASS_NUM)))
except:
selected_classes = set(full_df.iloc[:, -1])
# got valid data
valid_data = full_df[full_df.iloc[:, -1].isin(selected_classes)]
# get number of data on the local client
local_num = self.train_data_local_num_dict[client_idx]
# got selected data # remember to shuffle the data
try:
selected_data = valid_data[0:local_num]
except:
selected_data = valid_data
self.train_data_local_dict[client_idx] = len(selected_data)
# update the local client data
np.save(os.path.join(save_dir, "client_{}_data.npy".format(client_idx)), selected_data.iloc[:, 0:-1].values)
np.save(os.path.join(save_dir, "client_{}_labels.npy".format(client_idx)), selected_data.iloc[:, -1].values)
# remove the data from the full_df
full_df = full_df.drop(index=selected_data.index)
def read_non_iid_dataset(self, directory):
for client_idx in tqdm(range(self.client_num)):
data_shape = [self.train_data_local_num_dict[client_idx]] + self.data_shape[1:]
data_path = os.path.join(".", directory, "client_{}_data.npy".format(client_idx))
labels_path = os.path.join(".", directory, "client_{}_labels.npy".format(client_idx))
self.train_data_local_dict[client_idx] = [(torch.from_numpy(np.load(data_path)).view(tuple(data_shape)).float(), torch.from_numpy(np.load(labels_path)).long())]
def tx_time(self, client_indexes):
if not client_indexes:
self.time_counter += 1
return
# read the channel condition for corresponding cars.
channel_res = np.reshape(np.array(channel_data[channel_data['Time'] == self.time_counter * channel_data['Car'].isin(client_indexes)]["Distance to BS(4982,905)"]), (1, -1))
logger.debug("channel_res: {}".format(channel_res))
# linearly resolve the optimazation problem
tmp_t = 1
if self.args.radio_alloc == "optimal":
while np.sum(RES_WEIGHT * channel_res * RES_RATIO / tmp_t) > 1:
tmp_t += 1
elif self.args.radio_alloc == "uniform":
while np.max(channel_res) * RES_WEIGHT * RES_RATIO * len(channel_res) / tmp_t > 1:
tmp_t += 1
self.time_counter += math.ceil(TIME_COMPRESSION_RATIO*tmp_t)
logger.debug("time_counter after tx_time: {}".format(self.time_counter))
def aggregate(self, w_locals):
if not w_locals:
return copy.deepcopy(self.model_global.cpu().state_dict())
training_num = 0
for idx in range(len(w_locals)):
(sample_num, averaged_params) = w_locals[idx]
training_num += sample_num
(sample_num, averaged_params) = w_locals[0]
for k in averaged_params.keys():
for i in range(0, len(w_locals)):
local_sample_number, local_model_params = w_locals[i]
w = local_sample_number / training_num
if i == 0:
averaged_params[k] = local_model_params[k] * w
else:
averaged_params[k] += local_model_params[k] * w
return averaged_params
def local_test_on_all_clients(self, model_global, round_idx, eval_on_train=False, if_log=True):
logger.info("################local_test_on_all_clients : {}".format(round_idx))
train_metrics = {
'num_samples': [],
'num_correct': [],
'losses': []
}
test_metrics = {
'num_samples': [],
'num_correct': [],
'losses': []
}
client = self.client_list[0]
for client_idx in tqdm(range(min(int(client_num_in_total), self.client_num))):
"""
Note: for datasets like "fed_CIFAR100" and "fed_shakespheare",
the training client number is larger than the testing client number
"""
if self.test_data_local_dict[client_idx] is None or client_idx in self.invalid_datasets.keys():
continue
client.update_local_dataset(client_idx, self.train_data_local_dict[client_idx],
self.test_data_local_dict[client_idx],
self.train_data_local_num_dict[client_idx])
# test data
test_local_metrics = client.local_test(model_global, True)
test_metrics['num_samples'].append(copy.deepcopy(test_local_metrics['test_total']))
test_metrics['num_correct'].append(copy.deepcopy(test_local_metrics['test_correct']))
test_metrics['losses'].append(copy.deepcopy(test_local_metrics['test_loss']))
# train data
if eval_on_train:
train_local_metrics = client.local_test(model_global, False)
train_metrics['num_samples'].append(copy.deepcopy(train_local_metrics['test_total']))
train_metrics['num_correct'].append(copy.deepcopy(train_local_metrics['test_correct']))
train_metrics['losses'].append(copy.deepcopy(train_local_metrics['test_loss']))
# test on test dataset
test_acc = sum(test_metrics['num_correct']) / sum(test_metrics['num_samples'])
test_loss = sum(test_metrics['losses']) / sum(test_metrics['num_samples'])
stats = {
"Test/Acc": test_acc,
"Test/Loss": test_loss,
"round": round_idx,
"cum_time": self.time_counter+self.cycle_num*59361,
}
# test on training dataset
if eval_on_train:
train_acc = sum(train_metrics['num_correct']) / sum(train_metrics['num_samples'])
train_loss = sum(train_metrics['losses']) / sum(train_metrics['num_samples'])
stats.update({
'Train/Acc': train_acc,
'Train/Loss': train_loss,
"round": round_idx,
"cum_time": self.time_counter+self.cycle_num*59361,
})
if if_log:
logger.info(stats)
wandb.log(stats)
return test_acc, np.array(train_metrics['num_correct']) / np.array(train_metrics['num_samples'])
if if_log:
logger.info(stats)
wandb.log(stats)
return test_acc, None | 2.1875 | 2 |
src/test.py | jfparentledartech/DEFT | 0 | 9624 | <reponame>jfparentledartech/DEFT
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import cv2
import matplotlib.pyplot as plt
import numpy as np
from progress.bar import Bar
import torch
import pickle
import motmetrics as mm
from lib.opts import opts
from lib.logger import Logger
from lib.utils.utils import AverageMeter
from lib.dataset.dataset_factory import dataset_factory
from lib.utils.pixset_metrics import compute_metrics
pixset_categories = [
'car',
'truck',
'bus',
'pedestrian',
'motorcyclist',
'cyclist',
'van'
]
opt = opts().parse()
filename = '../options/test_opt_pixset.txt'
with open(filename, 'wb') as f:
pickle.dump(opt, f)
# # print('dataset -> ', opt.dataset)
# print('lstm -> ', opt.lstm)
# print(f'saved {filename}')
# with open(filename, 'rb') as f:
# opt = pickle.load(f)
# print('use pixell ->', opt.use_pixell)
from lib.detector import Detector
from lib.utils.image import plot_tracking, plot_tracking_ddd
import json
min_box_area = 20
_vehicles = ["car", "truck", "bus", "van"]
_cycles = ["motorcyclist", "cyclist"]
_pedestrians = ["pedestrian"]
attribute_to_id = {
"": 0,
"cycle.with_rider": 1,
"cycle.without_rider": 2,
"pedestrian.moving": 3,
"pedestrian.standing": 4,
"pedestrian.sitting_lying_down": 5,
"vehicle.moving": 6,
"vehicle.parked": 7,
"vehicle.stopped": 8,
}
id_to_attribute = {v: k for k, v in attribute_to_id.items()}
nuscenes_att = np.zeros(8, np.float32)
class PrefetchDataset(torch.utils.data.Dataset):
def __init__(self, opt, dataset, pre_process_func):
self.images = dataset.images
self.load_image_func = dataset.coco.loadImgs
self.get_ann_ids = dataset.coco.getAnnIds
self.load_annotations = dataset.coco.loadAnns
self.img_dir = dataset.img_dir
self.pre_process_func = pre_process_func
self.get_default_calib = dataset.get_default_calib
self.opt = opt
def __getitem__(self, index):
self.images.sort() # TODO remove
img_id = self.images[index]
img_info = self.load_image_func(ids=[img_id])[0]
img_path = os.path.join(self.img_dir, img_info["file_name"])
image = cv2.imread(img_path)
annotation_ids = self.get_ann_ids(imgIds=[img_id])
annotations = self.load_annotations(ids=annotation_ids)
images, meta = {}, {}
for scale in opt.test_scales:
input_meta = {}
calib = (
img_info["calib"]
if "calib" in img_info
else self.get_default_calib(image.shape[1], image.shape[0])
)
input_meta["calib"] = calib
images[scale], meta[scale] = self.pre_process_func(image, scale, input_meta)
ret = {
"images": images,
"image": image,
"meta": meta,
"frame_id": img_info["frame_id"],
"annotations": annotations
}
if "frame_id" in img_info and img_info["frame_id"] == 1:
ret["is_first_frame"] = 1
ret["video_id"] = img_info["video_id"]
return img_id, ret, img_info
def __len__(self):
return len(self.images)
def prefetch_test(opt):
start_time = time.time()
show_image = True
if not opt.not_set_cuda_env:
os.environ["CUDA_VISIBLE_DEVICES"] = opt.gpus_str
Dataset = dataset_factory[opt.test_dataset]
opt = opts().update_dataset_info_and_set_heads(opt, Dataset)
# split = "val" if not opt.trainval else "test"
split = "test"
# split = "val"
dataset = Dataset(opt, split)
detector = Detector(opt)
if opt.load_results != "":
load_results = json.load(open(opt.load_results, "r"))
for img_id in load_results:
for k in range(len(load_results[img_id])):
if load_results[img_id][k]["class"] - 1 in opt.ignore_loaded_cats:
load_results[img_id][k]["score"] = -1
else:
load_results = {}
data_loader = torch.utils.data.DataLoader(
PrefetchDataset(opt, dataset, detector.pre_process),
batch_size=1,
shuffle=False,
num_workers=0,
pin_memory=True,
)
results = {}
num_iters = len(data_loader) if opt.num_iters < 0 else opt.num_iters
bar = Bar("{}".format(opt.exp_id), max=num_iters)
time_stats = ["tot", "load", "pre", "net", "dec", "post", "merge", "track"]
avg_time_stats = {t: AverageMeter() for t in time_stats}
if opt.use_loaded_results:
for img_id in data_loader.dataset.images:
results[img_id] = load_results["{}".format(img_id)]
num_iters = 0
final_results = []
out_path = ""
if opt.dataset in ["nuscenes", "pixset"]:
ret = {
"meta": {
"use_camera": True,
"use_lidar": False,
"use_radar": False,
"use_map": False,
"use_external": False,
},
"results": {},
}
accumulators = [mm.MOTAccumulator(auto_id=True) for _ in pixset_categories]
for ind, (img_id, pre_processed_images, img_info) in enumerate(data_loader):
bar.next()
if ind >= num_iters:
break
if opt.dataset == "nuscenes":
sample_token = img_info["sample_token"][0]
sensor_id = img_info["sensor_id"].numpy().tolist()[0]
if opt.dataset == "pixset":
sample_token = img_info["sample_token"][0]
sensor_id = img_info["sensor_id"].numpy().tolist()[0]
if opt.tracking and ("is_first_frame" in pre_processed_images):
if "{}".format(int(img_id.numpy().astype(np.int32)[0])) in load_results:
pre_processed_images["meta"]["pre_dets"] = load_results[
"{}".format(int(img_id.numpy().astype(np.int32)[0]))
]
else:
print(
"No pre_dets for",
int(img_id.numpy().astype(np.int32)[0]),
". Use empty initialization.",
)
pre_processed_images["meta"]["pre_dets"] = []
if final_results and opt.dataset not in ["nuscenes", "pixset"]:
write_results(out_path, final_results, opt.dataset)
final_results = []
img0 = pre_processed_images["image"][0].numpy()
h, w, _ = img0.shape
detector.img_height = h
detector.img_width = w
if opt.dataset in ["nuscenes", "pixset"]:
save_video_name = os.path.join(
opt.dataset + "_videos/",
"MOT"
+ str(int(pre_processed_images["video_id"]))
+ "_"
+ str(int(img_info["sensor_id"]))
+ str(int(img_info["video_id"]))
+ ".avi",
)
elif opt.dataset == "kitti_tracking":
save_video_name = os.path.join(
opt.dataset + "_videos/",
"KITTI_" + str(int(pre_processed_images["video_id"])) + ".avi",
)
else:
save_video_name = os.path.join(
opt.dataset + "_videos/",
"MOT" + str(int(pre_processed_images["video_id"])) + ".avi",
)
results_dir = opt.dataset + "_results"
if not os.path.exists(opt.dataset + "_videos/"):
os.mkdir(opt.dataset + "_videos/")
if not os.path.exists(results_dir):
os.mkdir(results_dir)
for video in dataset.coco.dataset["videos"]:
video_id = video["id"]
file_name = video["file_name"]
if pre_processed_images[
"video_id"
] == video_id and opt.dataset not in ["nuscenes", "pixset"]:
out_path = os.path.join(results_dir, "{}.txt".format(file_name))
break
detector.reset_tracking(opt)
vw = cv2.VideoWriter(
save_video_name, cv2.VideoWriter_fourcc("M", "J", "P", "G"), 10, (w, h)
)
print("Start tracking video", int(pre_processed_images["video_id"]))
if opt.public_det:
if "{}".format(int(img_id.numpy().astype(np.int32)[0])) in load_results:
pre_processed_images["meta"]["cur_dets"] = load_results[
"{}".format(int(img_id.numpy().astype(np.int32)[0]))
]
else:
print("No cur_dets for", int(img_id.numpy().astype(np.int32)[0]))
pre_processed_images["meta"]["cur_dets"] = []
online_targets = detector.run(pre_processed_images, image_info=img_info)
online_tlwhs = []
online_ids = []
online_ddd_boxes = []
sample_results = []
classes = []
image = pre_processed_images["image"][0].numpy()
for acc_i in range(len(accumulators)):
gt_list, hyp_list, distances = compute_metrics(pre_processed_images['annotations'],
online_targets, eval_type='distance',
im=image, category=pixset_categories[acc_i])
accumulators[acc_i].update(gt_list, hyp_list, distances)
idx = 0
print(ind)
print(accumulators[idx].mot_events.loc[ind])
mh = mm.metrics.create()
summary = mh.compute(accumulators[idx], metrics=['num_frames', 'mota', 'precision', 'recall'], name=f'acc {pixset_categories[idx]}')
print(summary)
print('-----------------------------------------')
for t in online_targets:
tlwh = t.tlwh
tid = t.track_id
if tlwh[2] * tlwh[3] > min_box_area:
online_tlwhs.append(tlwh)
online_ids.append(tid)
classes.append(t.classe)
if opt.dataset in ["nuscenes", "pixset"]:
online_ddd_boxes.append(t.org_ddd_box)
class_name = t.classe
if class_name in _cycles:
att = id_to_attribute[np.argmax(nuscenes_att[0:2]) + 1]
elif class_name in _pedestrians:
att = id_to_attribute[np.argmax(nuscenes_att[2:5]) + 3]
elif class_name in _vehicles:
att = id_to_attribute[np.argmax(nuscenes_att[5:8]) + 6]
ddd_box = t.ddd_bbox.copy()
ddd_box_submission = t.ddd_submission.tolist()
translation, size, rotation = (
ddd_box_submission[:3],
ddd_box_submission[3:6],
ddd_box_submission[6:],
)
result = {
"sample_token": sample_token,
"translation": translation,
"size": size,
"rotation": rotation,
"velocity": [0, 0],
"detection_name": t.classe,
# "attribute_name": att,
"attribute_name": None,
"detection_score": t.score,
"tracking_name": t.classe,
"tracking_score": t.score,
"tracking_id": tid,
"sensor_id": sensor_id,
"det_id": -1,
}
sample_results.append(result.copy())
if opt.dataset in ["nuscenes", "pixset"]:
if sample_token in ret["results"]:
ret["results"][sample_token] = (
ret["results"][sample_token] + sample_results
)
else:
ret["results"][sample_token] = sample_results
final_results.append(
(pre_processed_images["frame_id"].cpu().item(), online_tlwhs, online_ids)
)
if show_image:
img0 = pre_processed_images["image"][0].numpy()
if opt.dataset in ["nuscenes", "pixset"]:
online_im = plot_tracking_ddd(
img0,
online_tlwhs,
online_ddd_boxes,
online_ids,
frame_id=pre_processed_images["frame_id"],
calib=img_info["calib"],
trans_matrix=img_info["trans_matrix"],
camera_matrix=img_info["camera_matrix"],
distortion_coeffs=img_info["distortion_coefficients"],
classes=classes,
)
else:
online_im = plot_tracking(
img0,
online_tlwhs,
online_ids,
frame_id=pre_processed_images["frame_id"],
)
vw.write(online_im)
if opt.dataset not in ["nuscenes", "pixset"] and final_results:
write_results(out_path, final_results, opt.dataset)
final_results = []
if opt.dataset in ["nuscenes", "pixset"]:
for sample_token in ret["results"].keys():
confs = sorted(
[
(-d["detection_score"], ind)
for ind, d in enumerate(ret["results"][sample_token])
]
)
ret["results"][sample_token] = [
ret["results"][sample_token][ind]
for _, ind in confs[: min(500, len(confs))]
]
mh = mm.metrics.create()
metrics = ['num_frames', 'mota', 'motp', 'precision', 'recall']
summary = mh.compute_many(
accumulators, names=pixset_categories, metrics=metrics, generate_overall=True
)
print(summary)
save_summary(summary, 'overall')
print('total test time', time.time() - start_time)
def save_summary(summary, acc_name):
with open(f"../pixset_results/test/{acc_name}.txt", "w") as text_file:
text_file.write(summary.to_string())
def _to_list(results):
for img_id in results:
for t in range(len(results[img_id])):
for k in results[img_id][t]:
if isinstance(results[img_id][t][k], (np.ndarray, np.float32)):
results[img_id][t][k] = results[img_id][t][k].tolist()
return results
def write_results(filename, results, data_type):
if data_type == "mot":
save_format = "{frame},{id},{x1},{y1},{w},{h},1,-1,-1,-1\n"
elif data_type == "kitti_tracking":
save_format = "{frame} {id} Car 0 0 -10 {x1} {y1} {x2} {y2} -10 -10 -10 -1000 -1000 -1000 -10\n"
else:
raise ValueError(data_type)
with open(filename, "w") as f:
for frame_id, tlwhs, track_ids in results:
if data_type == "kitti_tracking":
frame_id -= 1
for tlwh, track_id in zip(tlwhs, track_ids):
if track_id < 0:
continue
x1, y1, w, h = tlwh
x2, y2 = x1 + w, y1 + h
line = save_format.format(
frame=frame_id, id=track_id, x1=x1, y1=y1, x2=x2, y2=y2, w=w, h=h
)
f.write(line)
if __name__ == "__main__":
# opt = opts().parse()
prefetch_test(opt)
| 1.859375 | 2 |
compiler_gym/envs/gcc/datasets/csmith.py | AkillesAILimited/CompilerGym | 0 | 9625 | <reponame>AkillesAILimited/CompilerGym
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import shutil
import subprocess
import tempfile
from pathlib import Path
from threading import Lock
from typing import Iterable, Optional, Union
import numpy as np
from fasteners import InterProcessLock
from compiler_gym.datasets import Benchmark, BenchmarkSource, Dataset
from compiler_gym.datasets.benchmark import BenchmarkWithSource
from compiler_gym.envs.gcc.gcc import Gcc
from compiler_gym.util.decorators import memoized_property
from compiler_gym.util.runfiles_path import runfiles_path
from compiler_gym.util.shell_format import plural
from compiler_gym.util.truncate import truncate
# The maximum value for the --seed argument to csmith.
UINT_MAX = (2 ** 32) - 1
_CSMITH_BIN = runfiles_path("compiler_gym/third_party/csmith/csmith/bin/csmith")
_CSMITH_INCLUDES = runfiles_path(
"compiler_gym/third_party/csmith/csmith/include/csmith-2.3.0"
)
_CSMITH_INSTALL_LOCK = Lock()
# TODO(github.com/facebookresearch/CompilerGym/issues/325): This can be merged
# with the LLVM implementation.
class CsmithBenchmark(BenchmarkWithSource):
"""A CSmith benchmark."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._src = None
@classmethod
def create(cls, uri: str, bitcode: bytes, src: bytes) -> Benchmark:
"""Create a benchmark from paths."""
benchmark = cls.from_file_contents(uri, bitcode)
benchmark._src = src # pylint: disable=protected-access
return benchmark
@memoized_property
def sources(self) -> Iterable[BenchmarkSource]:
return [
BenchmarkSource(filename="source.c", contents=self._src),
]
@property
def source(self) -> str:
"""Return the single source file contents as a string."""
return self._src.decode("utf-8")
class CsmithDataset(Dataset):
"""A dataset which uses Csmith to generate programs.
Csmith is a tool that can generate random conformant C99 programs. It is
described in the publication:
<NAME>, <NAME>, <NAME>, and <NAME>. "Finding and
understanding bugs in C compilers." In Proceedings of the 32nd ACM
SIGPLAN conference on Programming Language Design and Implementation
(PLDI), pp. 283-294. 2011.
For up-to-date information about Csmith, see:
https://embed.cs.utah.edu/csmith/
Note that Csmith is a tool that is used to find errors in compilers. As
such, there is a higher likelihood that the benchmark cannot be used for an
environment and that :meth:`env.reset()
<compiler_gym.envs.CompilerEnv.reset>` will raise :class:`BenchmarkInitError
<compiler_gym.datasets.BenchmarkInitError>`.
"""
def __init__(
self,
gcc_bin: Union[Path, str],
site_data_base: Path,
sort_order: int = 0,
csmith_bin: Optional[Path] = None,
csmith_includes: Optional[Path] = None,
):
"""Constructor.
:param site_data_base: The base path of a directory that will be used to
store installed files.
:param sort_order: An optional numeric value that should be used to
order this dataset relative to others. Lowest value sorts first.
:param csmith_bin: The path of the Csmith binary to use. If not
provided, the version of Csmith shipped with CompilerGym is used.
:param csmith_includes: The path of the Csmith includes directory. If
not provided, the includes of the Csmith shipped with CompilerGym is
used.
"""
super().__init__(
name="generator://csmith-v0",
description="Random conformant C99 programs",
references={
"Paper": "http://web.cse.ohio-state.edu/~rountev.1/5343/pdf/pldi11.pdf",
"Homepage": "https://embed.cs.utah.edu/csmith/",
},
license="BSD",
site_data_base=site_data_base,
sort_order=sort_order,
benchmark_class=CsmithBenchmark,
)
self.gcc_bin = gcc_bin
self.csmith_bin_path = csmith_bin or _CSMITH_BIN
self.csmith_includes_path = csmith_includes or _CSMITH_INCLUDES
self._install_lockfile = self.site_data_path / ".install.LOCK"
@property
def size(self) -> int:
# Actually 2^32 - 1, but practically infinite for all intents and
# purposes.
return 0
@memoized_property
def gcc(self):
# Defer instantiation of Gcc from the constructor as it will fail if the
# given Gcc is not available. Memoize the result as initialization is
# expensive.
return Gcc(bin=self.gcc_bin)
def benchmark_uris(self) -> Iterable[str]:
return (f"{self.name}/{i}" for i in range(UINT_MAX))
def benchmark(self, uri: str) -> CsmithBenchmark:
return self.benchmark_from_seed(int(uri.split("/")[-1]))
def _random_benchmark(self, random_state: np.random.Generator) -> Benchmark:
seed = random_state.integers(UINT_MAX)
return self.benchmark_from_seed(seed)
@property
def installed(self) -> bool:
return super().installed and (self.site_data_path / "includes").is_dir()
def install(self) -> None:
super().install()
if self.installed:
return
with _CSMITH_INSTALL_LOCK, InterProcessLock(self._install_lockfile):
if (self.site_data_path / "includes").is_dir():
return
# Copy the Csmith headers into the dataset's site directory path because
# in bazel builds this includes directory is a symlink, and we need
# actual files that we can use in a docker volume.
shutil.copytree(
self.csmith_includes_path,
self.site_data_path / "includes.tmp",
)
# Atomic directory rename to prevent race on install().
(self.site_data_path / "includes.tmp").rename(
self.site_data_path / "includes"
)
def benchmark_from_seed(
self, seed: int, max_retries: int = 3, retry_count: int = 0
) -> CsmithBenchmark:
"""Get a benchmark from a uint32 seed.
:param seed: A number in the range 0 <= n < 2^32.
:return: A benchmark instance.
:raises OSError: If Csmith fails.
:raises BenchmarkInitError: If the C program generated by Csmith cannot
be lowered to LLVM-IR.
"""
if retry_count >= max_retries:
raise OSError(
f"Csmith failed after {retry_count} {plural(retry_count, 'attempt', 'attempts')} "
f"with seed {seed}"
)
self.install()
# Run csmith with the given seed and pipe the output to clang to
# assemble a bitcode.
self.logger.debug("Exec csmith --seed %d", seed)
csmith = subprocess.Popen(
[str(self.csmith_bin_path), "--seed", str(seed)],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# Generate the C source.
src, stderr = csmith.communicate(timeout=300)
if csmith.returncode:
try:
stderr = "\n".join(
truncate(stderr.decode("utf-8"), max_line_len=200, max_lines=20)
)
logging.warning("Csmith failed with seed %d: %s", seed, stderr)
except UnicodeDecodeError:
# Failed to interpret the stderr output, generate a generic
# error message.
logging.warning("Csmith failed with seed %d", seed)
return self.benchmark_from_seed(
seed, max_retries=max_retries, retry_count=retry_count + 1
)
# Pre-process the source.
with tempfile.TemporaryDirectory() as tmpdir:
src_file = f"{tmpdir}/src.c"
with open(src_file, "wb") as f:
f.write(src)
preprocessed_src = self.gcc(
"-E",
"-I",
str(self.site_data_path / "includes"),
"-o",
"-",
src_file,
cwd=tmpdir,
timeout=60,
volumes={
str(self.site_data_path / "includes"): {
"bind": str(self.site_data_path / "includes"),
"mode": "ro",
}
},
)
return self.benchmark_class.create(
f"{self.name}/{seed}", preprocessed_src.encode("utf-8"), src
)
| 1.765625 | 2 |
dans_pymodules/power_of_two.py | DanielWinklehner/dans_pymodules | 0 | 9626 | <reponame>DanielWinklehner/dans_pymodules
__author__ = "<NAME>"
__doc__ = "Find out if a number is a power of two"
def power_of_two(number):
"""
Function that checks if the input value (data) is a power of 2
(i.e. 2, 4, 8, 16, 32, ...)
"""
res = 0
while res == 0:
res = number % 2
number /= 2.0
print("res: {}, data: {}".format(res, number))
if number == 1 and res == 0:
return True
return False
| 3.828125 | 4 |
examples/index/context.py | rmorshea/viewdom | 0 | 9627 | from viewdom import html, render, use_context, Context
expected = '<h1>My Todos</h1><ul><li>Item: first</li></ul>'
# start-after
title = 'My Todos'
todos = ['first']
def Todo(label):
prefix = use_context('prefix')
return html('<li>{prefix}{label}</li>')
def TodoList(todos):
return html('<ul>{[Todo(label) for label in todos]}</ul>')
result = render(html('''
<{Context} prefix="Item: ">
<h1>{title}</h1>
<{TodoList} todos={todos} />
<//>
'''))
# '<h1>My Todos</h1><ul><li>Item: first</li></ul>'
| 2.546875 | 3 |
biblioteca/views.py | Dagmoores/ProjetoIntegradorIUnivesp | 0 | 9628 | from django.views.generic import DetailView, ListView, TemplateView
from .models import Books
class BooksListView(ListView):
model = Books
class BooksDeitalView(DetailView):
model = Books
class Home(TemplateView):
template_name = './biblioteca/index.html'
class TermsOfService(TemplateView):
template_name = './biblioteca/termsOfService.html' | 2.078125 | 2 |
choir/evaluation/__init__.py | scwangdyd/large_vocabulary_hoi_detection | 9 | 9629 | <filename>choir/evaluation/__init__.py<gh_stars>1-10
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from .evaluator import DatasetEvaluator, DatasetEvaluators, inference_context, inference_on_dataset
from .testing import print_csv_format, verify_results
from .hico_evaluation import HICOEvaluator
from .swig_evaluation import SWIGEvaluator
# from .doh_evaluation import DOHDetectionEvaluator
__all__ = [k for k in globals().keys() if not k.startswith("_")]
| 1.304688 | 1 |
api_yamdb/reviews/models.py | LHLHLHE/api_yamdb | 0 | 9630 | <reponame>LHLHLHE/api_yamdb<filename>api_yamdb/reviews/models.py
import datetime as dt
from django.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
from django.core.exceptions import ValidationError
from users.models import CustomUser
def validate_year(value):
"""
Год выпуска произведения не может быть больше текущего.
"""
if value > dt.datetime.now().year:
raise ValidationError(
'Год выпуска превышает текущий!')
return value
class Category(models.Model):
"""Модель категорий."""
name = models.CharField(max_length=256, verbose_name='Название')
slug = models.SlugField(
max_length=50,
unique=True,
verbose_name='Идентификатор')
class Meta:
ordering = ('name',)
verbose_name = 'Категория'
verbose_name_plural = 'Категории'
def __str__(self):
return self.slug
class Genre(models.Model):
"""Модель жанров."""
name = models.CharField(max_length=256, verbose_name='Название')
slug = models.SlugField(
max_length=50,
unique=True,
verbose_name='Идентификатор')
class Meta:
ordering = ('name',)
verbose_name = 'Жанр'
verbose_name_plural = 'Жанры'
def __str__(self):
return self.slug
class Title(models.Model):
"""Модель произведений."""
name = models.TextField(verbose_name='Название')
year = models.IntegerField(
validators=[validate_year],
verbose_name='Год выпуска')
description = models.TextField(
blank=True,
verbose_name='Описание')
genre = models.ManyToManyField(
Genre,
through='GenreTitle',
verbose_name='Жанры')
category = models.ForeignKey(
Category,
on_delete=models.SET_NULL,
blank=True,
null=True,
related_name='titles',
verbose_name='Категория')
class Meta:
ordering = ('name',)
verbose_name = 'Произведение'
verbose_name_plural = 'Произведения'
def __str__(self):
return (
f'name: {self.name}, '
f'year: {self.year}, '
)
class GenreTitle(models.Model):
"""Модель для связи произведений и жанров отношением многие ко многим."""
genre = models.ForeignKey(
Genre,
on_delete=models.SET_NULL,
blank=True,
null=True)
title = models.ForeignKey(Title, on_delete=models.CASCADE)
def __str__(self):
return f'{self.genre} --- {self.title}'
class Review(models.Model):
title = models.ForeignKey(
Title,
on_delete=models.CASCADE,
verbose_name='Произведение',
)
text = models.TextField(
verbose_name='текст',
)
author = models.ForeignKey(
CustomUser,
on_delete=models.CASCADE,
verbose_name='Автор'
)
score = models.IntegerField(
validators=[
MinValueValidator(1),
MaxValueValidator(10)
],
verbose_name='Оценка'
)
pub_date = models.DateTimeField(
auto_now_add=True,
verbose_name='Дата публикации'
)
class Meta:
constraints = [
models.UniqueConstraint(
fields=['author', 'title'],
name='unique review'
)
]
verbose_name = 'Отзыв'
verbose_name_plural = 'Отзывы'
default_related_name = 'reviews'
def __str__(self):
return self.text[:60]
class Comment(models.Model):
review = models.ForeignKey(
Review,
on_delete=models.CASCADE,
related_name='comments',
verbose_name='Отзыв',
)
text = models.TextField(verbose_name='Текст')
author = models.ForeignKey(
CustomUser,
on_delete=models.CASCADE,
related_name='comments',
verbose_name='Автор'
)
pub_date = models.DateTimeField(
auto_now_add=True,
verbose_name='Дата публикации'
)
class Meta:
verbose_name = 'Комментарий'
verbose_name_plural = 'Комментарии'
def __str__(self):
return self.text
| 2.40625 | 2 |
angr/engines/pcode/arch/ArchPcode_PowerPC_LE_32_QUICC.py | matthewpruett/angr | 6,132 | 9631 | ###
### This file was automatically generated
###
from archinfo.arch import register_arch, Endness, Register
from .common import ArchPcode
class ArchPcode_PowerPC_LE_32_QUICC(ArchPcode):
name = 'PowerPC:LE:32:QUICC'
pcode_arch = 'PowerPC:LE:32:QUICC'
description = 'PowerQUICC-III 32-bit little endian family'
bits = 32
ip_offset = 0x780
sp_offset = 0x4
bp_offset = sp_offset
instruction_endness = Endness.LE
register_list = [
Register('r0', 4, 0x0),
Register('r1', 4, 0x4),
Register('r2', 4, 0x8),
Register('r3', 4, 0xc),
Register('r4', 4, 0x10),
Register('r5', 4, 0x14),
Register('r6', 4, 0x18),
Register('r7', 4, 0x1c),
Register('r8', 4, 0x20),
Register('r9', 4, 0x24),
Register('r10', 4, 0x28),
Register('r11', 4, 0x2c),
Register('r12', 4, 0x30),
Register('r13', 4, 0x34),
Register('r14', 4, 0x38),
Register('r15', 4, 0x3c),
Register('r16', 4, 0x40),
Register('r17', 4, 0x44),
Register('r18', 4, 0x48),
Register('r19', 4, 0x4c),
Register('r20', 4, 0x50),
Register('r21', 4, 0x54),
Register('r22', 4, 0x58),
Register('r23', 4, 0x5c),
Register('r24', 4, 0x60),
Register('r25', 4, 0x64),
Register('r26', 4, 0x68),
Register('r27', 4, 0x6c),
Register('r28', 4, 0x70),
Register('r29', 4, 0x74),
Register('r30', 4, 0x78),
Register('r31', 4, 0x7c),
Register('xer_so', 1, 0x400),
Register('xer_ov', 1, 0x401),
Register('xer_ov32', 1, 0x402),
Register('xer_ca', 1, 0x403),
Register('xer_ca32', 1, 0x404),
Register('xer_count', 1, 0x405),
Register('fp_fx', 1, 0x500),
Register('fp_fex', 1, 0x501),
Register('fp_vx', 1, 0x502),
Register('fp_ox', 1, 0x503),
Register('fp_ux', 1, 0x504),
Register('fp_zx', 1, 0x505),
Register('fp_xx', 1, 0x506),
Register('fp_vxsnan', 1, 0x507),
Register('fp_vxisi', 1, 0x508),
Register('fp_vxidi', 1, 0x509),
Register('fp_vxzdz', 1, 0x50a),
Register('fp_vximz', 1, 0x50b),
Register('fp_vxvc', 1, 0x50c),
Register('fp_fr', 1, 0x50d),
Register('fp_fi', 1, 0x50e),
Register('fp_c', 1, 0x50f),
Register('fp_cc0', 1, 0x510),
Register('fp_cc1', 1, 0x511),
Register('fp_cc2', 1, 0x512),
Register('fp_cc3', 1, 0x513),
Register('fp_reserve1', 1, 0x514),
Register('fp_vxsoft', 1, 0x515),
Register('fp_vxsqrt', 1, 0x516),
Register('fp_vxcvi', 1, 0x517),
Register('fp_ve', 1, 0x518),
Register('fp_oe', 1, 0x519),
Register('fp_ue', 1, 0x51a),
Register('fp_ze', 1, 0x51b),
Register('fp_xe', 1, 0x51c),
Register('fp_ni', 1, 0x51d),
Register('fp_rn0', 1, 0x51e),
Register('fp_rn1', 1, 0x51f),
Register('msr', 4, 0x700),
Register('reserve_address', 4, 0x720),
Register('reserve', 1, 0x728),
Register('reserve_length', 1, 0x730),
Register('pc', 4, 0x780, alias_names=('ip',)),
Register('sr0', 4, 0x800),
Register('sr1', 4, 0x804),
Register('sr2', 4, 0x808),
Register('sr3', 4, 0x80c),
Register('sr4', 4, 0x810),
Register('sr5', 4, 0x814),
Register('sr6', 4, 0x818),
Register('sr7', 4, 0x81c),
Register('sr8', 4, 0x820),
Register('sr9', 4, 0x824),
Register('sr10', 4, 0x828),
Register('sr11', 4, 0x82c),
Register('sr12', 4, 0x830),
Register('sr13', 4, 0x834),
Register('sr14', 4, 0x838),
Register('sr15', 4, 0x83c),
Register('crall', 8, 0x900),
Register('cr0', 1, 0x900),
Register('cr1', 1, 0x901),
Register('cr2', 1, 0x902),
Register('cr3', 1, 0x903),
Register('cr4', 1, 0x904),
Register('cr5', 1, 0x905),
Register('cr6', 1, 0x906),
Register('cr7', 1, 0x907),
Register('tea', 4, 0x980),
Register('r2save', 4, 0x988),
Register('spr000', 4, 0x1000),
Register('xer', 4, 0x1004),
Register('spr002', 4, 0x1008),
Register('spr003', 4, 0x100c),
Register('spr004', 4, 0x1010),
Register('spr005', 4, 0x1014),
Register('spr006', 4, 0x1018),
Register('spr007', 4, 0x101c),
Register('lr', 4, 0x1020),
Register('ctr', 4, 0x1024),
Register('spr00a', 4, 0x1028),
Register('spr00b', 4, 0x102c),
Register('spr00c', 4, 0x1030),
Register('spr00d', 4, 0x1034),
Register('spr00e', 4, 0x1038),
Register('spr00f', 4, 0x103c),
Register('spr010', 4, 0x1040),
Register('spr011', 4, 0x1044),
Register('spr012', 4, 0x1048),
Register('spr013', 4, 0x104c),
Register('spr014', 4, 0x1050),
Register('spr015', 4, 0x1054),
Register('spr016', 4, 0x1058),
Register('spr017', 4, 0x105c),
Register('spr018', 4, 0x1060),
Register('spr019', 4, 0x1064),
Register('srr0', 4, 0x1068),
Register('srr1', 4, 0x106c),
Register('spr01c', 4, 0x1070),
Register('spr01d', 4, 0x1074),
Register('spr01e', 4, 0x1078),
Register('spr01f', 4, 0x107c),
Register('spr020', 4, 0x1080),
Register('spr021', 4, 0x1084),
Register('spr022', 4, 0x1088),
Register('spr023', 4, 0x108c),
Register('spr024', 4, 0x1090),
Register('spr025', 4, 0x1094),
Register('spr026', 4, 0x1098),
Register('spr027', 4, 0x109c),
Register('spr028', 4, 0x10a0),
Register('spr029', 4, 0x10a4),
Register('spr02a', 4, 0x10a8),
Register('spr02b', 4, 0x10ac),
Register('spr02c', 4, 0x10b0),
Register('spr02d', 4, 0x10b4),
Register('spr02e', 4, 0x10b8),
Register('spr02f', 4, 0x10bc),
Register('spr030', 4, 0x10c0),
Register('spr031', 4, 0x10c4),
Register('spr032', 4, 0x10c8),
Register('spr033', 4, 0x10cc),
Register('spr034', 4, 0x10d0),
Register('spr035', 4, 0x10d4),
Register('spr036', 4, 0x10d8),
Register('spr037', 4, 0x10dc),
Register('spr038', 4, 0x10e0),
Register('spr039', 4, 0x10e4),
Register('spr03a', 4, 0x10e8),
Register('spr03b', 4, 0x10ec),
Register('spr03c', 4, 0x10f0),
Register('spr03d', 4, 0x10f4),
Register('spr03e', 4, 0x10f8),
Register('spr03f', 4, 0x10fc),
Register('spr040', 4, 0x1100),
Register('spr041', 4, 0x1104),
Register('spr042', 4, 0x1108),
Register('spr043', 4, 0x110c),
Register('spr044', 4, 0x1110),
Register('spr045', 4, 0x1114),
Register('spr046', 4, 0x1118),
Register('spr047', 4, 0x111c),
Register('spr048', 4, 0x1120),
Register('spr049', 4, 0x1124),
Register('spr04a', 4, 0x1128),
Register('spr04b', 4, 0x112c),
Register('spr04c', 4, 0x1130),
Register('spr04d', 4, 0x1134),
Register('spr04e', 4, 0x1138),
Register('spr04f', 4, 0x113c),
Register('spr050', 4, 0x1140),
Register('spr051', 4, 0x1144),
Register('spr052', 4, 0x1148),
Register('spr053', 4, 0x114c),
Register('spr054', 4, 0x1150),
Register('spr055', 4, 0x1154),
Register('spr056', 4, 0x1158),
Register('spr057', 4, 0x115c),
Register('spr058', 4, 0x1160),
Register('spr059', 4, 0x1164),
Register('spr05a', 4, 0x1168),
Register('spr05b', 4, 0x116c),
Register('spr05c', 4, 0x1170),
Register('spr05d', 4, 0x1174),
Register('spr05e', 4, 0x1178),
Register('spr05f', 4, 0x117c),
Register('spr060', 4, 0x1180),
Register('spr061', 4, 0x1184),
Register('spr062', 4, 0x1188),
Register('spr063', 4, 0x118c),
Register('spr064', 4, 0x1190),
Register('spr065', 4, 0x1194),
Register('spr066', 4, 0x1198),
Register('spr067', 4, 0x119c),
Register('spr068', 4, 0x11a0),
Register('spr069', 4, 0x11a4),
Register('spr06a', 4, 0x11a8),
Register('spr06b', 4, 0x11ac),
Register('spr06c', 4, 0x11b0),
Register('spr06d', 4, 0x11b4),
Register('spr06e', 4, 0x11b8),
Register('spr06f', 4, 0x11bc),
Register('spr070', 4, 0x11c0),
Register('spr071', 4, 0x11c4),
Register('spr072', 4, 0x11c8),
Register('spr073', 4, 0x11cc),
Register('spr074', 4, 0x11d0),
Register('spr075', 4, 0x11d4),
Register('spr076', 4, 0x11d8),
Register('spr077', 4, 0x11dc),
Register('spr078', 4, 0x11e0),
Register('spr079', 4, 0x11e4),
Register('spr07a', 4, 0x11e8),
Register('spr07b', 4, 0x11ec),
Register('spr07c', 4, 0x11f0),
Register('spr07d', 4, 0x11f4),
Register('spr07e', 4, 0x11f8),
Register('spr07f', 4, 0x11fc),
Register('spr080', 4, 0x1200),
Register('spr081', 4, 0x1204),
Register('spr082', 4, 0x1208),
Register('spr083', 4, 0x120c),
Register('spr084', 4, 0x1210),
Register('spr085', 4, 0x1214),
Register('spr086', 4, 0x1218),
Register('spr087', 4, 0x121c),
Register('spr088', 4, 0x1220),
Register('spr089', 4, 0x1224),
Register('spr08a', 4, 0x1228),
Register('spr08b', 4, 0x122c),
Register('spr08c', 4, 0x1230),
Register('spr08d', 4, 0x1234),
Register('spr08e', 4, 0x1238),
Register('spr08f', 4, 0x123c),
Register('spr090', 4, 0x1240),
Register('spr091', 4, 0x1244),
Register('spr092', 4, 0x1248),
Register('spr093', 4, 0x124c),
Register('spr094', 4, 0x1250),
Register('spr095', 4, 0x1254),
Register('spr096', 4, 0x1258),
Register('spr097', 4, 0x125c),
Register('spr098', 4, 0x1260),
Register('spr099', 4, 0x1264),
Register('spr09a', 4, 0x1268),
Register('spr09b', 4, 0x126c),
Register('spr09c', 4, 0x1270),
Register('spr09d', 4, 0x1274),
Register('spr09e', 4, 0x1278),
Register('spr09f', 4, 0x127c),
Register('spr0a0', 4, 0x1280),
Register('spr0a1', 4, 0x1284),
Register('spr0a2', 4, 0x1288),
Register('spr0a3', 4, 0x128c),
Register('spr0a4', 4, 0x1290),
Register('spr0a5', 4, 0x1294),
Register('spr0a6', 4, 0x1298),
Register('spr0a7', 4, 0x129c),
Register('spr0a8', 4, 0x12a0),
Register('spr0a9', 4, 0x12a4),
Register('spr0aa', 4, 0x12a8),
Register('spr0ab', 4, 0x12ac),
Register('spr0ac', 4, 0x12b0),
Register('spr0ad', 4, 0x12b4),
Register('spr0ae', 4, 0x12b8),
Register('spr0af', 4, 0x12bc),
Register('spr0b0', 4, 0x12c0),
Register('spr0b1', 4, 0x12c4),
Register('spr0b2', 4, 0x12c8),
Register('spr0b3', 4, 0x12cc),
Register('spr0b4', 4, 0x12d0),
Register('spr0b5', 4, 0x12d4),
Register('spr0b6', 4, 0x12d8),
Register('spr0b7', 4, 0x12dc),
Register('spr0b8', 4, 0x12e0),
Register('spr0b9', 4, 0x12e4),
Register('spr0ba', 4, 0x12e8),
Register('spr0bb', 4, 0x12ec),
Register('spr0bc', 4, 0x12f0),
Register('spr0bd', 4, 0x12f4),
Register('spr0be', 4, 0x12f8),
Register('spr0bf', 4, 0x12fc),
Register('spr0c0', 4, 0x1300),
Register('spr0c1', 4, 0x1304),
Register('spr0c2', 4, 0x1308),
Register('spr0c3', 4, 0x130c),
Register('spr0c4', 4, 0x1310),
Register('spr0c5', 4, 0x1314),
Register('spr0c6', 4, 0x1318),
Register('spr0c7', 4, 0x131c),
Register('spr0c8', 4, 0x1320),
Register('spr0c9', 4, 0x1324),
Register('spr0ca', 4, 0x1328),
Register('spr0cb', 4, 0x132c),
Register('spr0cc', 4, 0x1330),
Register('spr0cd', 4, 0x1334),
Register('spr0ce', 4, 0x1338),
Register('spr0cf', 4, 0x133c),
Register('spr0d0', 4, 0x1340),
Register('spr0d1', 4, 0x1344),
Register('spr0d2', 4, 0x1348),
Register('spr0d3', 4, 0x134c),
Register('spr0d4', 4, 0x1350),
Register('spr0d5', 4, 0x1354),
Register('spr0d6', 4, 0x1358),
Register('spr0d7', 4, 0x135c),
Register('spr0d8', 4, 0x1360),
Register('spr0d9', 4, 0x1364),
Register('spr0da', 4, 0x1368),
Register('spr0db', 4, 0x136c),
Register('spr0dc', 4, 0x1370),
Register('spr0dd', 4, 0x1374),
Register('spr0de', 4, 0x1378),
Register('spr0df', 4, 0x137c),
Register('spr0e0', 4, 0x1380),
Register('spr0e1', 4, 0x1384),
Register('spr0e2', 4, 0x1388),
Register('spr0e3', 4, 0x138c),
Register('spr0e4', 4, 0x1390),
Register('spr0e5', 4, 0x1394),
Register('spr0e6', 4, 0x1398),
Register('spr0e7', 4, 0x139c),
Register('spr0e8', 4, 0x13a0),
Register('spr0e9', 4, 0x13a4),
Register('spr0ea', 4, 0x13a8),
Register('spr0eb', 4, 0x13ac),
Register('spr0ec', 4, 0x13b0),
Register('spr0ed', 4, 0x13b4),
Register('spr0ee', 4, 0x13b8),
Register('spr0ef', 4, 0x13bc),
Register('spr0f0', 4, 0x13c0),
Register('spr0f1', 4, 0x13c4),
Register('spr0f2', 4, 0x13c8),
Register('spr0f3', 4, 0x13cc),
Register('spr0f4', 4, 0x13d0),
Register('spr0f5', 4, 0x13d4),
Register('spr0f6', 4, 0x13d8),
Register('spr0f7', 4, 0x13dc),
Register('spr0f8', 4, 0x13e0),
Register('spr0f9', 4, 0x13e4),
Register('spr0fa', 4, 0x13e8),
Register('spr0fb', 4, 0x13ec),
Register('spr0fc', 4, 0x13f0),
Register('spr0fd', 4, 0x13f4),
Register('spr0fe', 4, 0x13f8),
Register('spr0ff', 4, 0x13fc),
Register('spr100', 4, 0x1400),
Register('spr101', 4, 0x1404),
Register('spr102', 4, 0x1408),
Register('spr103', 4, 0x140c),
Register('spr104', 4, 0x1410),
Register('spr105', 4, 0x1414),
Register('spr106', 4, 0x1418),
Register('spr107', 4, 0x141c),
Register('spr108', 4, 0x1420),
Register('spr109', 4, 0x1424),
Register('spr10a', 4, 0x1428),
Register('spr10b', 4, 0x142c),
Register('tblr', 4, 0x1430),
Register('tbur', 4, 0x1434),
Register('spr10e', 4, 0x1438),
Register('spr10f', 4, 0x143c),
Register('spr110', 4, 0x1440),
Register('spr111', 4, 0x1444),
Register('spr112', 4, 0x1448),
Register('spr113', 4, 0x144c),
Register('spr114', 4, 0x1450),
Register('spr115', 4, 0x1454),
Register('spr116', 4, 0x1458),
Register('spr117', 4, 0x145c),
Register('spr118', 4, 0x1460),
Register('spr119', 4, 0x1464),
Register('spr11a', 4, 0x1468),
Register('spr11b', 4, 0x146c),
Register('tblw', 4, 0x1470),
Register('tbuw', 4, 0x1474),
Register('spr11e', 4, 0x1478),
Register('spr11f', 4, 0x147c),
Register('spr120', 4, 0x1480),
Register('spr121', 4, 0x1484),
Register('spr122', 4, 0x1488),
Register('spr123', 4, 0x148c),
Register('spr124', 4, 0x1490),
Register('spr125', 4, 0x1494),
Register('spr126', 4, 0x1498),
Register('spr127', 4, 0x149c),
Register('spr128', 4, 0x14a0),
Register('spr129', 4, 0x14a4),
Register('spr12a', 4, 0x14a8),
Register('spr12b', 4, 0x14ac),
Register('spr12c', 4, 0x14b0),
Register('spr12d', 4, 0x14b4),
Register('spr12e', 4, 0x14b8),
Register('spr12f', 4, 0x14bc),
Register('spr130', 4, 0x14c0),
Register('spr131', 4, 0x14c4),
Register('spr132', 4, 0x14c8),
Register('spr133', 4, 0x14cc),
Register('spr134', 4, 0x14d0),
Register('spr135', 4, 0x14d4),
Register('spr136', 4, 0x14d8),
Register('spr137', 4, 0x14dc),
Register('spr138', 4, 0x14e0),
Register('spr139', 4, 0x14e4),
Register('spr13a', 4, 0x14e8),
Register('spr13b', 4, 0x14ec),
Register('spr13c', 4, 0x14f0),
Register('spr13d', 4, 0x14f4),
Register('spr13e', 4, 0x14f8),
Register('spr13f', 4, 0x14fc),
Register('spr140', 4, 0x1500),
Register('spr141', 4, 0x1504),
Register('spr142', 4, 0x1508),
Register('spr143', 4, 0x150c),
Register('spr144', 4, 0x1510),
Register('spr145', 4, 0x1514),
Register('spr146', 4, 0x1518),
Register('spr147', 4, 0x151c),
Register('spr148', 4, 0x1520),
Register('spr149', 4, 0x1524),
Register('spr14a', 4, 0x1528),
Register('spr14b', 4, 0x152c),
Register('spr14c', 4, 0x1530),
Register('spr14d', 4, 0x1534),
Register('spr14e', 4, 0x1538),
Register('spr14f', 4, 0x153c),
Register('spr150', 4, 0x1540),
Register('spr151', 4, 0x1544),
Register('spr152', 4, 0x1548),
Register('spr153', 4, 0x154c),
Register('spr154', 4, 0x1550),
Register('spr155', 4, 0x1554),
Register('spr156', 4, 0x1558),
Register('spr157', 4, 0x155c),
Register('spr158', 4, 0x1560),
Register('spr159', 4, 0x1564),
Register('spr15a', 4, 0x1568),
Register('spr15b', 4, 0x156c),
Register('spr15c', 4, 0x1570),
Register('spr15d', 4, 0x1574),
Register('spr15e', 4, 0x1578),
Register('spr15f', 4, 0x157c),
Register('spr160', 4, 0x1580),
Register('spr161', 4, 0x1584),
Register('spr162', 4, 0x1588),
Register('spr163', 4, 0x158c),
Register('spr164', 4, 0x1590),
Register('spr165', 4, 0x1594),
Register('spr166', 4, 0x1598),
Register('spr167', 4, 0x159c),
Register('spr168', 4, 0x15a0),
Register('spr169', 4, 0x15a4),
Register('spr16a', 4, 0x15a8),
Register('spr16b', 4, 0x15ac),
Register('spr16c', 4, 0x15b0),
Register('spr16d', 4, 0x15b4),
Register('spr16e', 4, 0x15b8),
Register('spr16f', 4, 0x15bc),
Register('spr170', 4, 0x15c0),
Register('spr171', 4, 0x15c4),
Register('spr172', 4, 0x15c8),
Register('spr173', 4, 0x15cc),
Register('spr174', 4, 0x15d0),
Register('spr175', 4, 0x15d4),
Register('spr176', 4, 0x15d8),
Register('spr177', 4, 0x15dc),
Register('spr178', 4, 0x15e0),
Register('spr179', 4, 0x15e4),
Register('spr17a', 4, 0x15e8),
Register('spr17b', 4, 0x15ec),
Register('spr17c', 4, 0x15f0),
Register('spr17d', 4, 0x15f4),
Register('spr17e', 4, 0x15f8),
Register('spr17f', 4, 0x15fc),
Register('spr180', 4, 0x1600),
Register('spr181', 4, 0x1604),
Register('spr182', 4, 0x1608),
Register('spr183', 4, 0x160c),
Register('spr184', 4, 0x1610),
Register('spr185', 4, 0x1614),
Register('spr186', 4, 0x1618),
Register('spr187', 4, 0x161c),
Register('spr188', 4, 0x1620),
Register('spr189', 4, 0x1624),
Register('spr18a', 4, 0x1628),
Register('spr18b', 4, 0x162c),
Register('spr18c', 4, 0x1630),
Register('spr18d', 4, 0x1634),
Register('spr18e', 4, 0x1638),
Register('spr18f', 4, 0x163c),
Register('spr190', 4, 0x1640),
Register('spr191', 4, 0x1644),
Register('spr192', 4, 0x1648),
Register('spr193', 4, 0x164c),
Register('spr194', 4, 0x1650),
Register('spr195', 4, 0x1654),
Register('spr196', 4, 0x1658),
Register('spr197', 4, 0x165c),
Register('spr198', 4, 0x1660),
Register('spr199', 4, 0x1664),
Register('spr19a', 4, 0x1668),
Register('spr19b', 4, 0x166c),
Register('spr19c', 4, 0x1670),
Register('spr19d', 4, 0x1674),
Register('spr19e', 4, 0x1678),
Register('spr19f', 4, 0x167c),
Register('spr1a0', 4, 0x1680),
Register('spr1a1', 4, 0x1684),
Register('spr1a2', 4, 0x1688),
Register('spr1a3', 4, 0x168c),
Register('spr1a4', 4, 0x1690),
Register('spr1a5', 4, 0x1694),
Register('spr1a6', 4, 0x1698),
Register('spr1a7', 4, 0x169c),
Register('spr1a8', 4, 0x16a0),
Register('spr1a9', 4, 0x16a4),
Register('spr1aa', 4, 0x16a8),
Register('spr1ab', 4, 0x16ac),
Register('spr1ac', 4, 0x16b0),
Register('spr1ad', 4, 0x16b4),
Register('spr1ae', 4, 0x16b8),
Register('spr1af', 4, 0x16bc),
Register('spr1b0', 4, 0x16c0),
Register('spr1b1', 4, 0x16c4),
Register('spr1b2', 4, 0x16c8),
Register('spr1b3', 4, 0x16cc),
Register('spr1b4', 4, 0x16d0),
Register('spr1b5', 4, 0x16d4),
Register('spr1b6', 4, 0x16d8),
Register('spr1b7', 4, 0x16dc),
Register('spr1b8', 4, 0x16e0),
Register('spr1b9', 4, 0x16e4),
Register('spr1ba', 4, 0x16e8),
Register('spr1bb', 4, 0x16ec),
Register('spr1bc', 4, 0x16f0),
Register('spr1bd', 4, 0x16f4),
Register('spr1be', 4, 0x16f8),
Register('spr1bf', 4, 0x16fc),
Register('spr1c0', 4, 0x1700),
Register('spr1c1', 4, 0x1704),
Register('spr1c2', 4, 0x1708),
Register('spr1c3', 4, 0x170c),
Register('spr1c4', 4, 0x1710),
Register('spr1c5', 4, 0x1714),
Register('spr1c6', 4, 0x1718),
Register('spr1c7', 4, 0x171c),
Register('spr1c8', 4, 0x1720),
Register('spr1c9', 4, 0x1724),
Register('spr1ca', 4, 0x1728),
Register('spr1cb', 4, 0x172c),
Register('spr1cc', 4, 0x1730),
Register('spr1cd', 4, 0x1734),
Register('spr1ce', 4, 0x1738),
Register('spr1cf', 4, 0x173c),
Register('spr1d0', 4, 0x1740),
Register('spr1d1', 4, 0x1744),
Register('spr1d2', 4, 0x1748),
Register('spr1d3', 4, 0x174c),
Register('spr1d4', 4, 0x1750),
Register('spr1d5', 4, 0x1754),
Register('spr1d6', 4, 0x1758),
Register('spr1d7', 4, 0x175c),
Register('spr1d8', 4, 0x1760),
Register('spr1d9', 4, 0x1764),
Register('spr1da', 4, 0x1768),
Register('spr1db', 4, 0x176c),
Register('spr1dc', 4, 0x1770),
Register('spr1dd', 4, 0x1774),
Register('spr1de', 4, 0x1778),
Register('spr1df', 4, 0x177c),
Register('spr1e0', 4, 0x1780),
Register('spr1e1', 4, 0x1784),
Register('spr1e2', 4, 0x1788),
Register('spr1e3', 4, 0x178c),
Register('spr1e4', 4, 0x1790),
Register('spr1e5', 4, 0x1794),
Register('spr1e6', 4, 0x1798),
Register('spr1e7', 4, 0x179c),
Register('spr1e8', 4, 0x17a0),
Register('spr1e9', 4, 0x17a4),
Register('spr1ea', 4, 0x17a8),
Register('spr1eb', 4, 0x17ac),
Register('spr1ec', 4, 0x17b0),
Register('spr1ed', 4, 0x17b4),
Register('spr1ee', 4, 0x17b8),
Register('spr1ef', 4, 0x17bc),
Register('spr1f0', 4, 0x17c0),
Register('spr1f1', 4, 0x17c4),
Register('spr1f2', 4, 0x17c8),
Register('spr1f3', 4, 0x17cc),
Register('spr1f4', 4, 0x17d0),
Register('spr1f5', 4, 0x17d4),
Register('spr1f6', 4, 0x17d8),
Register('spr1f7', 4, 0x17dc),
Register('spr1f8', 4, 0x17e0),
Register('spr1f9', 4, 0x17e4),
Register('spr1fa', 4, 0x17e8),
Register('spr1fb', 4, 0x17ec),
Register('spr1fc', 4, 0x17f0),
Register('spr1fd', 4, 0x17f4),
Register('spr1fe', 4, 0x17f8),
Register('spr1ff', 4, 0x17fc),
Register('spr200', 4, 0x1800),
Register('spr201', 4, 0x1804),
Register('spr202', 4, 0x1808),
Register('spr203', 4, 0x180c),
Register('spr204', 4, 0x1810),
Register('spr205', 4, 0x1814),
Register('spr206', 4, 0x1818),
Register('spr207', 4, 0x181c),
Register('spr208', 4, 0x1820),
Register('spr209', 4, 0x1824),
Register('spr20a', 4, 0x1828),
Register('spr20b', 4, 0x182c),
Register('spr20c', 4, 0x1830),
Register('spr20d', 4, 0x1834),
Register('spr20e', 4, 0x1838),
Register('spr20f', 4, 0x183c),
Register('spr210', 4, 0x1840),
Register('spr211', 4, 0x1844),
Register('spr212', 4, 0x1848),
Register('spr213', 4, 0x184c),
Register('spr214', 4, 0x1850),
Register('spr215', 4, 0x1854),
Register('spr216', 4, 0x1858),
Register('spr217', 4, 0x185c),
Register('spr218', 4, 0x1860),
Register('spr219', 4, 0x1864),
Register('spr21a', 4, 0x1868),
Register('spr21b', 4, 0x186c),
Register('spr21c', 4, 0x1870),
Register('spr21d', 4, 0x1874),
Register('spr21e', 4, 0x1878),
Register('spr21f', 4, 0x187c),
Register('spr220', 4, 0x1880),
Register('spr221', 4, 0x1884),
Register('spr222', 4, 0x1888),
Register('spr223', 4, 0x188c),
Register('spr224', 4, 0x1890),
Register('spr225', 4, 0x1894),
Register('spr226', 4, 0x1898),
Register('spr227', 4, 0x189c),
Register('spr228', 4, 0x18a0),
Register('spr229', 4, 0x18a4),
Register('spr22a', 4, 0x18a8),
Register('spr22b', 4, 0x18ac),
Register('spr22c', 4, 0x18b0),
Register('spr22d', 4, 0x18b4),
Register('spr22e', 4, 0x18b8),
Register('spr22f', 4, 0x18bc),
Register('spr230', 4, 0x18c0),
Register('spr231', 4, 0x18c4),
Register('spr232', 4, 0x18c8),
Register('spr233', 4, 0x18cc),
Register('spr234', 4, 0x18d0),
Register('spr235', 4, 0x18d4),
Register('spr236', 4, 0x18d8),
Register('spr237', 4, 0x18dc),
Register('spr238', 4, 0x18e0),
Register('spr239', 4, 0x18e4),
Register('spr23a', 4, 0x18e8),
Register('spr23b', 4, 0x18ec),
Register('spr23c', 4, 0x18f0),
Register('spr23d', 4, 0x18f4),
Register('spr23e', 4, 0x18f8),
Register('spr23f', 4, 0x18fc),
Register('spr240', 4, 0x1900),
Register('spr241', 4, 0x1904),
Register('spr242', 4, 0x1908),
Register('spr243', 4, 0x190c),
Register('spr244', 4, 0x1910),
Register('spr245', 4, 0x1914),
Register('spr246', 4, 0x1918),
Register('spr247', 4, 0x191c),
Register('spr248', 4, 0x1920),
Register('spr249', 4, 0x1924),
Register('spr24a', 4, 0x1928),
Register('spr24b', 4, 0x192c),
Register('spr24c', 4, 0x1930),
Register('spr24d', 4, 0x1934),
Register('spr24e', 4, 0x1938),
Register('spr24f', 4, 0x193c),
Register('spr250', 4, 0x1940),
Register('spr251', 4, 0x1944),
Register('spr252', 4, 0x1948),
Register('spr253', 4, 0x194c),
Register('spr254', 4, 0x1950),
Register('spr255', 4, 0x1954),
Register('spr256', 4, 0x1958),
Register('spr257', 4, 0x195c),
Register('spr258', 4, 0x1960),
Register('spr259', 4, 0x1964),
Register('spr25a', 4, 0x1968),
Register('spr25b', 4, 0x196c),
Register('spr25c', 4, 0x1970),
Register('spr25d', 4, 0x1974),
Register('spr25e', 4, 0x1978),
Register('spr25f', 4, 0x197c),
Register('spr260', 4, 0x1980),
Register('spr261', 4, 0x1984),
Register('spr262', 4, 0x1988),
Register('spr263', 4, 0x198c),
Register('spr264', 4, 0x1990),
Register('spr265', 4, 0x1994),
Register('spr266', 4, 0x1998),
Register('spr267', 4, 0x199c),
Register('spr268', 4, 0x19a0),
Register('spr269', 4, 0x19a4),
Register('spr26a', 4, 0x19a8),
Register('spr26b', 4, 0x19ac),
Register('spr26c', 4, 0x19b0),
Register('spr26d', 4, 0x19b4),
Register('spr26e', 4, 0x19b8),
Register('spr26f', 4, 0x19bc),
Register('spr270', 4, 0x19c0),
Register('spr271', 4, 0x19c4),
Register('spr272', 4, 0x19c8),
Register('spr273', 4, 0x19cc),
Register('spr274', 4, 0x19d0),
Register('spr275', 4, 0x19d4),
Register('spr276', 4, 0x19d8),
Register('spr277', 4, 0x19dc),
Register('spr278', 4, 0x19e0),
Register('spr279', 4, 0x19e4),
Register('spr27a', 4, 0x19e8),
Register('spr27b', 4, 0x19ec),
Register('spr27c', 4, 0x19f0),
Register('spr27d', 4, 0x19f4),
Register('spr27e', 4, 0x19f8),
Register('spr27f', 4, 0x19fc),
Register('spr280', 4, 0x1a00),
Register('spr281', 4, 0x1a04),
Register('spr282', 4, 0x1a08),
Register('spr283', 4, 0x1a0c),
Register('spr284', 4, 0x1a10),
Register('spr285', 4, 0x1a14),
Register('spr286', 4, 0x1a18),
Register('spr287', 4, 0x1a1c),
Register('spr288', 4, 0x1a20),
Register('spr289', 4, 0x1a24),
Register('spr28a', 4, 0x1a28),
Register('spr28b', 4, 0x1a2c),
Register('spr28c', 4, 0x1a30),
Register('spr28d', 4, 0x1a34),
Register('spr28e', 4, 0x1a38),
Register('spr28f', 4, 0x1a3c),
Register('spr290', 4, 0x1a40),
Register('spr291', 4, 0x1a44),
Register('spr292', 4, 0x1a48),
Register('spr293', 4, 0x1a4c),
Register('spr294', 4, 0x1a50),
Register('spr295', 4, 0x1a54),
Register('spr296', 4, 0x1a58),
Register('spr297', 4, 0x1a5c),
Register('spr298', 4, 0x1a60),
Register('spr299', 4, 0x1a64),
Register('spr29a', 4, 0x1a68),
Register('spr29b', 4, 0x1a6c),
Register('spr29c', 4, 0x1a70),
Register('spr29d', 4, 0x1a74),
Register('spr29e', 4, 0x1a78),
Register('spr29f', 4, 0x1a7c),
Register('spr2a0', 4, 0x1a80),
Register('spr2a1', 4, 0x1a84),
Register('spr2a2', 4, 0x1a88),
Register('spr2a3', 4, 0x1a8c),
Register('spr2a4', 4, 0x1a90),
Register('spr2a5', 4, 0x1a94),
Register('spr2a6', 4, 0x1a98),
Register('spr2a7', 4, 0x1a9c),
Register('spr2a8', 4, 0x1aa0),
Register('spr2a9', 4, 0x1aa4),
Register('spr2aa', 4, 0x1aa8),
Register('spr2ab', 4, 0x1aac),
Register('spr2ac', 4, 0x1ab0),
Register('spr2ad', 4, 0x1ab4),
Register('spr2ae', 4, 0x1ab8),
Register('spr2af', 4, 0x1abc),
Register('spr2b0', 4, 0x1ac0),
Register('spr2b1', 4, 0x1ac4),
Register('spr2b2', 4, 0x1ac8),
Register('spr2b3', 4, 0x1acc),
Register('spr2b4', 4, 0x1ad0),
Register('spr2b5', 4, 0x1ad4),
Register('spr2b6', 4, 0x1ad8),
Register('spr2b7', 4, 0x1adc),
Register('spr2b8', 4, 0x1ae0),
Register('spr2b9', 4, 0x1ae4),
Register('spr2ba', 4, 0x1ae8),
Register('spr2bb', 4, 0x1aec),
Register('spr2bc', 4, 0x1af0),
Register('spr2bd', 4, 0x1af4),
Register('spr2be', 4, 0x1af8),
Register('spr2bf', 4, 0x1afc),
Register('spr2c0', 4, 0x1b00),
Register('spr2c1', 4, 0x1b04),
Register('spr2c2', 4, 0x1b08),
Register('spr2c3', 4, 0x1b0c),
Register('spr2c4', 4, 0x1b10),
Register('spr2c5', 4, 0x1b14),
Register('spr2c6', 4, 0x1b18),
Register('spr2c7', 4, 0x1b1c),
Register('spr2c8', 4, 0x1b20),
Register('spr2c9', 4, 0x1b24),
Register('spr2ca', 4, 0x1b28),
Register('spr2cb', 4, 0x1b2c),
Register('spr2cc', 4, 0x1b30),
Register('spr2cd', 4, 0x1b34),
Register('spr2ce', 4, 0x1b38),
Register('spr2cf', 4, 0x1b3c),
Register('spr2d0', 4, 0x1b40),
Register('spr2d1', 4, 0x1b44),
Register('spr2d2', 4, 0x1b48),
Register('spr2d3', 4, 0x1b4c),
Register('spr2d4', 4, 0x1b50),
Register('spr2d5', 4, 0x1b54),
Register('spr2d6', 4, 0x1b58),
Register('spr2d7', 4, 0x1b5c),
Register('spr2d8', 4, 0x1b60),
Register('spr2d9', 4, 0x1b64),
Register('spr2da', 4, 0x1b68),
Register('spr2db', 4, 0x1b6c),
Register('spr2dc', 4, 0x1b70),
Register('spr2dd', 4, 0x1b74),
Register('spr2de', 4, 0x1b78),
Register('spr2df', 4, 0x1b7c),
Register('spr2e0', 4, 0x1b80),
Register('spr2e1', 4, 0x1b84),
Register('spr2e2', 4, 0x1b88),
Register('spr2e3', 4, 0x1b8c),
Register('spr2e4', 4, 0x1b90),
Register('spr2e5', 4, 0x1b94),
Register('spr2e6', 4, 0x1b98),
Register('spr2e7', 4, 0x1b9c),
Register('spr2e8', 4, 0x1ba0),
Register('spr2e9', 4, 0x1ba4),
Register('spr2ea', 4, 0x1ba8),
Register('spr2eb', 4, 0x1bac),
Register('spr2ec', 4, 0x1bb0),
Register('spr2ed', 4, 0x1bb4),
Register('spr2ee', 4, 0x1bb8),
Register('spr2ef', 4, 0x1bbc),
Register('spr2f0', 4, 0x1bc0),
Register('spr2f1', 4, 0x1bc4),
Register('spr2f2', 4, 0x1bc8),
Register('spr2f3', 4, 0x1bcc),
Register('spr2f4', 4, 0x1bd0),
Register('spr2f5', 4, 0x1bd4),
Register('spr2f6', 4, 0x1bd8),
Register('spr2f7', 4, 0x1bdc),
Register('spr2f8', 4, 0x1be0),
Register('spr2f9', 4, 0x1be4),
Register('spr2fa', 4, 0x1be8),
Register('spr2fb', 4, 0x1bec),
Register('spr2fc', 4, 0x1bf0),
Register('spr2fd', 4, 0x1bf4),
Register('spr2fe', 4, 0x1bf8),
Register('spr2ff', 4, 0x1bfc),
Register('spr300', 4, 0x1c00),
Register('spr301', 4, 0x1c04),
Register('spr302', 4, 0x1c08),
Register('spr303', 4, 0x1c0c),
Register('spr304', 4, 0x1c10),
Register('spr305', 4, 0x1c14),
Register('spr306', 4, 0x1c18),
Register('spr307', 4, 0x1c1c),
Register('spr308', 4, 0x1c20),
Register('spr309', 4, 0x1c24),
Register('spr30a', 4, 0x1c28),
Register('spr30b', 4, 0x1c2c),
Register('spr30c', 4, 0x1c30),
Register('spr30d', 4, 0x1c34),
Register('spr30e', 4, 0x1c38),
Register('spr30f', 4, 0x1c3c),
Register('spr310', 4, 0x1c40),
Register('spr311', 4, 0x1c44),
Register('spr312', 4, 0x1c48),
Register('spr313', 4, 0x1c4c),
Register('spr314', 4, 0x1c50),
Register('spr315', 4, 0x1c54),
Register('spr316', 4, 0x1c58),
Register('spr317', 4, 0x1c5c),
Register('spr318', 4, 0x1c60),
Register('spr319', 4, 0x1c64),
Register('spr31a', 4, 0x1c68),
Register('spr31b', 4, 0x1c6c),
Register('spr31c', 4, 0x1c70),
Register('spr31d', 4, 0x1c74),
Register('spr31e', 4, 0x1c78),
Register('spr31f', 4, 0x1c7c),
Register('spr320', 4, 0x1c80),
Register('spr321', 4, 0x1c84),
Register('spr322', 4, 0x1c88),
Register('spr323', 4, 0x1c8c),
Register('spr324', 4, 0x1c90),
Register('spr325', 4, 0x1c94),
Register('spr326', 4, 0x1c98),
Register('spr327', 4, 0x1c9c),
Register('spr328', 4, 0x1ca0),
Register('spr329', 4, 0x1ca4),
Register('spr32a', 4, 0x1ca8),
Register('spr32b', 4, 0x1cac),
Register('spr32c', 4, 0x1cb0),
Register('spr32d', 4, 0x1cb4),
Register('spr32e', 4, 0x1cb8),
Register('tar', 4, 0x1cbc),
Register('spr330', 4, 0x1cc0),
Register('spr331', 4, 0x1cc4),
Register('spr332', 4, 0x1cc8),
Register('spr333', 4, 0x1ccc),
Register('spr334', 4, 0x1cd0),
Register('spr335', 4, 0x1cd4),
Register('spr336', 4, 0x1cd8),
Register('spr337', 4, 0x1cdc),
Register('spr338', 4, 0x1ce0),
Register('spr339', 4, 0x1ce4),
Register('spr33a', 4, 0x1ce8),
Register('spr33b', 4, 0x1cec),
Register('spr33c', 4, 0x1cf0),
Register('spr33d', 4, 0x1cf4),
Register('spr33e', 4, 0x1cf8),
Register('spr33f', 4, 0x1cfc),
Register('spr340', 4, 0x1d00),
Register('spr341', 4, 0x1d04),
Register('spr342', 4, 0x1d08),
Register('spr343', 4, 0x1d0c),
Register('spr344', 4, 0x1d10),
Register('spr345', 4, 0x1d14),
Register('spr346', 4, 0x1d18),
Register('spr347', 4, 0x1d1c),
Register('spr348', 4, 0x1d20),
Register('spr349', 4, 0x1d24),
Register('spr34a', 4, 0x1d28),
Register('spr34b', 4, 0x1d2c),
Register('spr34c', 4, 0x1d30),
Register('spr34d', 4, 0x1d34),
Register('spr34e', 4, 0x1d38),
Register('spr34f', 4, 0x1d3c),
Register('spr350', 4, 0x1d40),
Register('spr351', 4, 0x1d44),
Register('spr352', 4, 0x1d48),
Register('spr353', 4, 0x1d4c),
Register('spr354', 4, 0x1d50),
Register('spr355', 4, 0x1d54),
Register('spr356', 4, 0x1d58),
Register('spr357', 4, 0x1d5c),
Register('spr358', 4, 0x1d60),
Register('spr359', 4, 0x1d64),
Register('spr35a', 4, 0x1d68),
Register('spr35b', 4, 0x1d6c),
Register('spr35c', 4, 0x1d70),
Register('spr35d', 4, 0x1d74),
Register('spr35e', 4, 0x1d78),
Register('spr35f', 4, 0x1d7c),
Register('spr360', 4, 0x1d80),
Register('spr361', 4, 0x1d84),
Register('spr362', 4, 0x1d88),
Register('spr363', 4, 0x1d8c),
Register('spr364', 4, 0x1d90),
Register('spr365', 4, 0x1d94),
Register('spr366', 4, 0x1d98),
Register('spr367', 4, 0x1d9c),
Register('spr368', 4, 0x1da0),
Register('spr369', 4, 0x1da4),
Register('spr36a', 4, 0x1da8),
Register('spr36b', 4, 0x1dac),
Register('spr36c', 4, 0x1db0),
Register('spr36d', 4, 0x1db4),
Register('spr36e', 4, 0x1db8),
Register('spr36f', 4, 0x1dbc),
Register('spr370', 4, 0x1dc0),
Register('spr371', 4, 0x1dc4),
Register('spr372', 4, 0x1dc8),
Register('spr373', 4, 0x1dcc),
Register('spr374', 4, 0x1dd0),
Register('spr375', 4, 0x1dd4),
Register('spr376', 4, 0x1dd8),
Register('spr377', 4, 0x1ddc),
Register('spr378', 4, 0x1de0),
Register('spr379', 4, 0x1de4),
Register('spr37a', 4, 0x1de8),
Register('spr37b', 4, 0x1dec),
Register('spr37c', 4, 0x1df0),
Register('spr37d', 4, 0x1df4),
Register('spr37e', 4, 0x1df8),
Register('spr37f', 4, 0x1dfc),
Register('spr380', 4, 0x1e00),
Register('spr381', 4, 0x1e04),
Register('spr382', 4, 0x1e08),
Register('spr383', 4, 0x1e0c),
Register('spr384', 4, 0x1e10),
Register('spr385', 4, 0x1e14),
Register('spr386', 4, 0x1e18),
Register('spr387', 4, 0x1e1c),
Register('spr388', 4, 0x1e20),
Register('spr389', 4, 0x1e24),
Register('spr38a', 4, 0x1e28),
Register('spr38b', 4, 0x1e2c),
Register('spr38c', 4, 0x1e30),
Register('spr38d', 4, 0x1e34),
Register('spr38e', 4, 0x1e38),
Register('spr38f', 4, 0x1e3c),
Register('spr390', 4, 0x1e40),
Register('spr391', 4, 0x1e44),
Register('spr392', 4, 0x1e48),
Register('spr393', 4, 0x1e4c),
Register('spr394', 4, 0x1e50),
Register('spr395', 4, 0x1e54),
Register('spr396', 4, 0x1e58),
Register('spr397', 4, 0x1e5c),
Register('spr398', 4, 0x1e60),
Register('spr399', 4, 0x1e64),
Register('spr39a', 4, 0x1e68),
Register('spr39b', 4, 0x1e6c),
Register('spr39c', 4, 0x1e70),
Register('spr39d', 4, 0x1e74),
Register('spr39e', 4, 0x1e78),
Register('spr39f', 4, 0x1e7c),
Register('spr3a0', 4, 0x1e80),
Register('spr3a1', 4, 0x1e84),
Register('spr3a2', 4, 0x1e88),
Register('spr3a3', 4, 0x1e8c),
Register('spr3a4', 4, 0x1e90),
Register('spr3a5', 4, 0x1e94),
Register('spr3a6', 4, 0x1e98),
Register('spr3a7', 4, 0x1e9c),
Register('spr3a8', 4, 0x1ea0),
Register('spr3a9', 4, 0x1ea4),
Register('spr3aa', 4, 0x1ea8),
Register('spr3ab', 4, 0x1eac),
Register('spr3ac', 4, 0x1eb0),
Register('spr3ad', 4, 0x1eb4),
Register('spr3ae', 4, 0x1eb8),
Register('spr3af', 4, 0x1ebc),
Register('spr3b0', 4, 0x1ec0),
Register('spr3b1', 4, 0x1ec4),
Register('spr3b2', 4, 0x1ec8),
Register('spr3b3', 4, 0x1ecc),
Register('spr3b4', 4, 0x1ed0),
Register('spr3b5', 4, 0x1ed4),
Register('spr3b6', 4, 0x1ed8),
Register('spr3b7', 4, 0x1edc),
Register('spr3b8', 4, 0x1ee0),
Register('spr3b9', 4, 0x1ee4),
Register('spr3ba', 4, 0x1ee8),
Register('spr3bb', 4, 0x1eec),
Register('spr3bc', 4, 0x1ef0),
Register('spr3bd', 4, 0x1ef4),
Register('spr3be', 4, 0x1ef8),
Register('spr3bf', 4, 0x1efc),
Register('spr3c0', 4, 0x1f00),
Register('spr3c1', 4, 0x1f04),
Register('spr3c2', 4, 0x1f08),
Register('spr3c3', 4, 0x1f0c),
Register('spr3c4', 4, 0x1f10),
Register('spr3c5', 4, 0x1f14),
Register('spr3c6', 4, 0x1f18),
Register('spr3c7', 4, 0x1f1c),
Register('spr3c8', 4, 0x1f20),
Register('spr3c9', 4, 0x1f24),
Register('spr3ca', 4, 0x1f28),
Register('spr3cb', 4, 0x1f2c),
Register('spr3cc', 4, 0x1f30),
Register('spr3cd', 4, 0x1f34),
Register('spr3ce', 4, 0x1f38),
Register('spr3cf', 4, 0x1f3c),
Register('spr3d0', 4, 0x1f40),
Register('spr3d1', 4, 0x1f44),
Register('spr3d2', 4, 0x1f48),
Register('spr3d3', 4, 0x1f4c),
Register('spr3d4', 4, 0x1f50),
Register('spr3d5', 4, 0x1f54),
Register('spr3d6', 4, 0x1f58),
Register('spr3d7', 4, 0x1f5c),
Register('spr3d8', 4, 0x1f60),
Register('spr3d9', 4, 0x1f64),
Register('spr3da', 4, 0x1f68),
Register('spr3db', 4, 0x1f6c),
Register('spr3dc', 4, 0x1f70),
Register('spr3dd', 4, 0x1f74),
Register('spr3de', 4, 0x1f78),
Register('spr3df', 4, 0x1f7c),
Register('spr3e0', 4, 0x1f80),
Register('spr3e1', 4, 0x1f84),
Register('spr3e2', 4, 0x1f88),
Register('spr3e3', 4, 0x1f8c),
Register('spr3e4', 4, 0x1f90),
Register('spr3e5', 4, 0x1f94),
Register('spr3e6', 4, 0x1f98),
Register('spr3e7', 4, 0x1f9c),
Register('spr3e8', 4, 0x1fa0),
Register('spr3e9', 4, 0x1fa4),
Register('spr3ea', 4, 0x1fa8),
Register('spr3eb', 4, 0x1fac),
Register('spr3ec', 4, 0x1fb0),
Register('spr3ed', 4, 0x1fb4),
Register('spr3ee', 4, 0x1fb8),
Register('spr3ef', 4, 0x1fbc),
Register('spr3f0', 4, 0x1fc0),
Register('spr3f1', 4, 0x1fc4),
Register('spr3f2', 4, 0x1fc8),
Register('spr3f3', 4, 0x1fcc),
Register('spr3f4', 4, 0x1fd0),
Register('spr3f5', 4, 0x1fd4),
Register('spr3f6', 4, 0x1fd8),
Register('spr3f7', 4, 0x1fdc),
Register('spr3f8', 4, 0x1fe0),
Register('spr3f9', 4, 0x1fe4),
Register('spr3fa', 4, 0x1fe8),
Register('spr3fb', 4, 0x1fec),
Register('spr3fc', 4, 0x1ff0),
Register('spr3fd', 4, 0x1ff4),
Register('spr3fe', 4, 0x1ff8),
Register('spr3ff', 4, 0x1ffc),
Register('vs0', 16, 0x4000),
Register('f0', 8, 0x4008),
Register('vs1', 16, 0x4010),
Register('f1', 8, 0x4018),
Register('vs2', 16, 0x4020),
Register('f2', 8, 0x4028),
Register('vs3', 16, 0x4030),
Register('f3', 8, 0x4038),
Register('vs4', 16, 0x4040),
Register('f4', 8, 0x4048),
Register('vs5', 16, 0x4050),
Register('f5', 8, 0x4058),
Register('vs6', 16, 0x4060),
Register('f6', 8, 0x4068),
Register('vs7', 16, 0x4070),
Register('f7', 8, 0x4078),
Register('vs8', 16, 0x4080),
Register('f8', 8, 0x4088),
Register('vs9', 16, 0x4090),
Register('f9', 8, 0x4098),
Register('vs10', 16, 0x40a0),
Register('f10', 8, 0x40a8),
Register('vs11', 16, 0x40b0),
Register('f11', 8, 0x40b8),
Register('vs12', 16, 0x40c0),
Register('f12', 8, 0x40c8),
Register('vs13', 16, 0x40d0),
Register('f13', 8, 0x40d8),
Register('vs14', 16, 0x40e0),
Register('f14', 8, 0x40e8),
Register('vs15', 16, 0x40f0),
Register('f15', 8, 0x40f8),
Register('vs16', 16, 0x4100),
Register('f16', 8, 0x4108),
Register('vs17', 16, 0x4110),
Register('f17', 8, 0x4118),
Register('vs18', 16, 0x4120),
Register('f18', 8, 0x4128),
Register('vs19', 16, 0x4130),
Register('f19', 8, 0x4138),
Register('vs20', 16, 0x4140),
Register('f20', 8, 0x4148),
Register('vs21', 16, 0x4150),
Register('f21', 8, 0x4158),
Register('vs22', 16, 0x4160),
Register('f22', 8, 0x4168),
Register('vs23', 16, 0x4170),
Register('f23', 8, 0x4178),
Register('vs24', 16, 0x4180),
Register('f24', 8, 0x4188),
Register('vs25', 16, 0x4190),
Register('f25', 8, 0x4198),
Register('vs26', 16, 0x41a0),
Register('f26', 8, 0x41a8),
Register('vs27', 16, 0x41b0),
Register('f27', 8, 0x41b8),
Register('vs28', 16, 0x41c0),
Register('f28', 8, 0x41c8),
Register('vs29', 16, 0x41d0),
Register('f29', 8, 0x41d8),
Register('vs30', 16, 0x41e0),
Register('f30', 8, 0x41e8),
Register('vs31', 16, 0x41f0),
Register('f31', 8, 0x41f8),
Register('vs32', 16, 0x4200),
Register('vr0_64_1', 8, 0x4200),
Register('vr0_32_3', 4, 0x4200),
Register('vr0_16_7', 2, 0x4200),
Register('vr0_8_15', 1, 0x4200),
Register('vr0_8_14', 1, 0x4201),
Register('vr0_16_6', 2, 0x4202),
Register('vr0_8_13', 1, 0x4202),
Register('vr0_8_12', 1, 0x4203),
Register('vr0_32_2', 4, 0x4204),
Register('vr0_16_5', 2, 0x4204),
Register('vr0_8_11', 1, 0x4204),
Register('vr0_8_10', 1, 0x4205),
Register('vr0_16_4', 2, 0x4206),
Register('vr0_8_9', 1, 0x4206),
Register('vr0_8_8', 1, 0x4207),
Register('vr0_64_0', 8, 0x4208),
Register('vr0_32_1', 4, 0x4208),
Register('vr0_16_3', 2, 0x4208),
Register('vr0_8_7', 1, 0x4208),
Register('vr0_8_6', 1, 0x4209),
Register('vr0_16_2', 2, 0x420a),
Register('vr0_8_5', 1, 0x420a),
Register('vr0_8_4', 1, 0x420b),
Register('vr0_32_0', 4, 0x420c),
Register('vr0_16_1', 2, 0x420c),
Register('vr0_8_3', 1, 0x420c),
Register('vr0_8_2', 1, 0x420d),
Register('vr0_16_0', 2, 0x420e),
Register('vr0_8_1', 1, 0x420e),
Register('vr0_8_0', 1, 0x420f),
Register('vs33', 16, 0x4210),
Register('vr1_64_1', 8, 0x4210),
Register('vr1_32_3', 4, 0x4210),
Register('vr1_16_7', 2, 0x4210),
Register('vr1_8_15', 1, 0x4210),
Register('vr1_8_14', 1, 0x4211),
Register('vr1_16_6', 2, 0x4212),
Register('vr1_8_13', 1, 0x4212),
Register('vr1_8_12', 1, 0x4213),
Register('vr1_32_2', 4, 0x4214),
Register('vr1_16_5', 2, 0x4214),
Register('vr1_8_11', 1, 0x4214),
Register('vr1_8_10', 1, 0x4215),
Register('vr1_16_4', 2, 0x4216),
Register('vr1_8_9', 1, 0x4216),
Register('vr1_8_8', 1, 0x4217),
Register('vr1_64_0', 8, 0x4218),
Register('vr1_32_1', 4, 0x4218),
Register('vr1_16_3', 2, 0x4218),
Register('vr1_8_7', 1, 0x4218),
Register('vr1_8_6', 1, 0x4219),
Register('vr1_16_2', 2, 0x421a),
Register('vr1_8_5', 1, 0x421a),
Register('vr1_8_4', 1, 0x421b),
Register('vr1_32_0', 4, 0x421c),
Register('vr1_16_1', 2, 0x421c),
Register('vr1_8_3', 1, 0x421c),
Register('vr1_8_2', 1, 0x421d),
Register('vr1_16_0', 2, 0x421e),
Register('vr1_8_1', 1, 0x421e),
Register('vr1_8_0', 1, 0x421f),
Register('vs34', 16, 0x4220),
Register('vr2_64_1', 8, 0x4220),
Register('vr2_32_3', 4, 0x4220),
Register('vr2_16_7', 2, 0x4220),
Register('vr2_8_15', 1, 0x4220),
Register('vr2_8_14', 1, 0x4221),
Register('vr2_16_6', 2, 0x4222),
Register('vr2_8_13', 1, 0x4222),
Register('vr2_8_12', 1, 0x4223),
Register('vr2_32_2', 4, 0x4224),
Register('vr2_16_5', 2, 0x4224),
Register('vr2_8_11', 1, 0x4224),
Register('vr2_8_10', 1, 0x4225),
Register('vr2_16_4', 2, 0x4226),
Register('vr2_8_9', 1, 0x4226),
Register('vr2_8_8', 1, 0x4227),
Register('vr2_64_0', 8, 0x4228),
Register('vr2_32_1', 4, 0x4228),
Register('vr2_16_3', 2, 0x4228),
Register('vr2_8_7', 1, 0x4228),
Register('vr2_8_6', 1, 0x4229),
Register('vr2_16_2', 2, 0x422a),
Register('vr2_8_5', 1, 0x422a),
Register('vr2_8_4', 1, 0x422b),
Register('vr2_32_0', 4, 0x422c),
Register('vr2_16_1', 2, 0x422c),
Register('vr2_8_3', 1, 0x422c),
Register('vr2_8_2', 1, 0x422d),
Register('vr2_16_0', 2, 0x422e),
Register('vr2_8_1', 1, 0x422e),
Register('vr2_8_0', 1, 0x422f),
Register('vs35', 16, 0x4230),
Register('vr3_64_1', 8, 0x4230),
Register('vr3_32_3', 4, 0x4230),
Register('vr3_16_7', 2, 0x4230),
Register('vr3_8_15', 1, 0x4230),
Register('vr3_8_14', 1, 0x4231),
Register('vr3_16_6', 2, 0x4232),
Register('vr3_8_13', 1, 0x4232),
Register('vr3_8_12', 1, 0x4233),
Register('vr3_32_2', 4, 0x4234),
Register('vr3_16_5', 2, 0x4234),
Register('vr3_8_11', 1, 0x4234),
Register('vr3_8_10', 1, 0x4235),
Register('vr3_16_4', 2, 0x4236),
Register('vr3_8_9', 1, 0x4236),
Register('vr3_8_8', 1, 0x4237),
Register('vr3_64_0', 8, 0x4238),
Register('vr3_32_1', 4, 0x4238),
Register('vr3_16_3', 2, 0x4238),
Register('vr3_8_7', 1, 0x4238),
Register('vr3_8_6', 1, 0x4239),
Register('vr3_16_2', 2, 0x423a),
Register('vr3_8_5', 1, 0x423a),
Register('vr3_8_4', 1, 0x423b),
Register('vr3_32_0', 4, 0x423c),
Register('vr3_16_1', 2, 0x423c),
Register('vr3_8_3', 1, 0x423c),
Register('vr3_8_2', 1, 0x423d),
Register('vr3_16_0', 2, 0x423e),
Register('vr3_8_1', 1, 0x423e),
Register('vr3_8_0', 1, 0x423f),
Register('vs36', 16, 0x4240),
Register('vr4_64_1', 8, 0x4240),
Register('vr4_32_3', 4, 0x4240),
Register('vr4_16_7', 2, 0x4240),
Register('vr4_8_15', 1, 0x4240),
Register('vr4_8_14', 1, 0x4241),
Register('vr4_16_6', 2, 0x4242),
Register('vr4_8_13', 1, 0x4242),
Register('vr4_8_12', 1, 0x4243),
Register('vr4_32_2', 4, 0x4244),
Register('vr4_16_5', 2, 0x4244),
Register('vr4_8_11', 1, 0x4244),
Register('vr4_8_10', 1, 0x4245),
Register('vr4_16_4', 2, 0x4246),
Register('vr4_8_9', 1, 0x4246),
Register('vr4_8_8', 1, 0x4247),
Register('vr4_64_0', 8, 0x4248),
Register('vr4_32_1', 4, 0x4248),
Register('vr4_16_3', 2, 0x4248),
Register('vr4_8_7', 1, 0x4248),
Register('vr4_8_6', 1, 0x4249),
Register('vr4_16_2', 2, 0x424a),
Register('vr4_8_5', 1, 0x424a),
Register('vr4_8_4', 1, 0x424b),
Register('vr4_32_0', 4, 0x424c),
Register('vr4_16_1', 2, 0x424c),
Register('vr4_8_3', 1, 0x424c),
Register('vr4_8_2', 1, 0x424d),
Register('vr4_16_0', 2, 0x424e),
Register('vr4_8_1', 1, 0x424e),
Register('vr4_8_0', 1, 0x424f),
Register('vs37', 16, 0x4250),
Register('vr5_64_1', 8, 0x4250),
Register('vr5_32_3', 4, 0x4250),
Register('vr5_16_7', 2, 0x4250),
Register('vr5_8_15', 1, 0x4250),
Register('vr5_8_14', 1, 0x4251),
Register('vr5_16_6', 2, 0x4252),
Register('vr5_8_13', 1, 0x4252),
Register('vr5_8_12', 1, 0x4253),
Register('vr5_32_2', 4, 0x4254),
Register('vr5_16_5', 2, 0x4254),
Register('vr5_8_11', 1, 0x4254),
Register('vr5_8_10', 1, 0x4255),
Register('vr5_16_4', 2, 0x4256),
Register('vr5_8_9', 1, 0x4256),
Register('vr5_8_8', 1, 0x4257),
Register('vr5_64_0', 8, 0x4258),
Register('vr5_32_1', 4, 0x4258),
Register('vr5_16_3', 2, 0x4258),
Register('vr5_8_7', 1, 0x4258),
Register('vr5_8_6', 1, 0x4259),
Register('vr5_16_2', 2, 0x425a),
Register('vr5_8_5', 1, 0x425a),
Register('vr5_8_4', 1, 0x425b),
Register('vr5_32_0', 4, 0x425c),
Register('vr5_16_1', 2, 0x425c),
Register('vr5_8_3', 1, 0x425c),
Register('vr5_8_2', 1, 0x425d),
Register('vr5_16_0', 2, 0x425e),
Register('vr5_8_1', 1, 0x425e),
Register('vr5_8_0', 1, 0x425f),
Register('vs38', 16, 0x4260),
Register('vr6_64_1', 8, 0x4260),
Register('vr6_32_3', 4, 0x4260),
Register('vr6_16_7', 2, 0x4260),
Register('vr6_8_15', 1, 0x4260),
Register('vr6_8_14', 1, 0x4261),
Register('vr6_16_6', 2, 0x4262),
Register('vr6_8_13', 1, 0x4262),
Register('vr6_8_12', 1, 0x4263),
Register('vr6_32_2', 4, 0x4264),
Register('vr6_16_5', 2, 0x4264),
Register('vr6_8_11', 1, 0x4264),
Register('vr6_8_10', 1, 0x4265),
Register('vr6_16_4', 2, 0x4266),
Register('vr6_8_9', 1, 0x4266),
Register('vr6_8_8', 1, 0x4267),
Register('vr6_64_0', 8, 0x4268),
Register('vr6_32_1', 4, 0x4268),
Register('vr6_16_3', 2, 0x4268),
Register('vr6_8_7', 1, 0x4268),
Register('vr6_8_6', 1, 0x4269),
Register('vr6_16_2', 2, 0x426a),
Register('vr6_8_5', 1, 0x426a),
Register('vr6_8_4', 1, 0x426b),
Register('vr6_32_0', 4, 0x426c),
Register('vr6_16_1', 2, 0x426c),
Register('vr6_8_3', 1, 0x426c),
Register('vr6_8_2', 1, 0x426d),
Register('vr6_16_0', 2, 0x426e),
Register('vr6_8_1', 1, 0x426e),
Register('vr6_8_0', 1, 0x426f),
Register('vs39', 16, 0x4270),
Register('vr7_64_1', 8, 0x4270),
Register('vr7_32_3', 4, 0x4270),
Register('vr7_16_7', 2, 0x4270),
Register('vr7_8_15', 1, 0x4270),
Register('vr7_8_14', 1, 0x4271),
Register('vr7_16_6', 2, 0x4272),
Register('vr7_8_13', 1, 0x4272),
Register('vr7_8_12', 1, 0x4273),
Register('vr7_32_2', 4, 0x4274),
Register('vr7_16_5', 2, 0x4274),
Register('vr7_8_11', 1, 0x4274),
Register('vr7_8_10', 1, 0x4275),
Register('vr7_16_4', 2, 0x4276),
Register('vr7_8_9', 1, 0x4276),
Register('vr7_8_8', 1, 0x4277),
Register('vr7_64_0', 8, 0x4278),
Register('vr7_32_1', 4, 0x4278),
Register('vr7_16_3', 2, 0x4278),
Register('vr7_8_7', 1, 0x4278),
Register('vr7_8_6', 1, 0x4279),
Register('vr7_16_2', 2, 0x427a),
Register('vr7_8_5', 1, 0x427a),
Register('vr7_8_4', 1, 0x427b),
Register('vr7_32_0', 4, 0x427c),
Register('vr7_16_1', 2, 0x427c),
Register('vr7_8_3', 1, 0x427c),
Register('vr7_8_2', 1, 0x427d),
Register('vr7_16_0', 2, 0x427e),
Register('vr7_8_1', 1, 0x427e),
Register('vr7_8_0', 1, 0x427f),
Register('vs40', 16, 0x4280),
Register('vr8_64_1', 8, 0x4280),
Register('vr8_32_3', 4, 0x4280),
Register('vr8_16_7', 2, 0x4280),
Register('vr8_8_15', 1, 0x4280),
Register('vr8_8_14', 1, 0x4281),
Register('vr8_16_6', 2, 0x4282),
Register('vr8_8_13', 1, 0x4282),
Register('vr8_8_12', 1, 0x4283),
Register('vr8_32_2', 4, 0x4284),
Register('vr8_16_5', 2, 0x4284),
Register('vr8_8_11', 1, 0x4284),
Register('vr8_8_10', 1, 0x4285),
Register('vr8_16_4', 2, 0x4286),
Register('vr8_8_9', 1, 0x4286),
Register('vr8_8_8', 1, 0x4287),
Register('vr8_64_0', 8, 0x4288),
Register('vr8_32_1', 4, 0x4288),
Register('vr8_16_3', 2, 0x4288),
Register('vr8_8_7', 1, 0x4288),
Register('vr8_8_6', 1, 0x4289),
Register('vr8_16_2', 2, 0x428a),
Register('vr8_8_5', 1, 0x428a),
Register('vr8_8_4', 1, 0x428b),
Register('vr8_32_0', 4, 0x428c),
Register('vr8_16_1', 2, 0x428c),
Register('vr8_8_3', 1, 0x428c),
Register('vr8_8_2', 1, 0x428d),
Register('vr8_16_0', 2, 0x428e),
Register('vr8_8_1', 1, 0x428e),
Register('vr8_8_0', 1, 0x428f),
Register('vs41', 16, 0x4290),
Register('vr9_64_1', 8, 0x4290),
Register('vr9_32_3', 4, 0x4290),
Register('vr9_16_7', 2, 0x4290),
Register('vr9_8_15', 1, 0x4290),
Register('vr9_8_14', 1, 0x4291),
Register('vr9_16_6', 2, 0x4292),
Register('vr9_8_13', 1, 0x4292),
Register('vr9_8_12', 1, 0x4293),
Register('vr9_32_2', 4, 0x4294),
Register('vr9_16_5', 2, 0x4294),
Register('vr9_8_11', 1, 0x4294),
Register('vr9_8_10', 1, 0x4295),
Register('vr9_16_4', 2, 0x4296),
Register('vr9_8_9', 1, 0x4296),
Register('vr9_8_8', 1, 0x4297),
Register('vr9_64_0', 8, 0x4298),
Register('vr9_32_1', 4, 0x4298),
Register('vr9_16_3', 2, 0x4298),
Register('vr9_8_7', 1, 0x4298),
Register('vr9_8_6', 1, 0x4299),
Register('vr9_16_2', 2, 0x429a),
Register('vr9_8_5', 1, 0x429a),
Register('vr9_8_4', 1, 0x429b),
Register('vr9_32_0', 4, 0x429c),
Register('vr9_16_1', 2, 0x429c),
Register('vr9_8_3', 1, 0x429c),
Register('vr9_8_2', 1, 0x429d),
Register('vr9_16_0', 2, 0x429e),
Register('vr9_8_1', 1, 0x429e),
Register('vr9_8_0', 1, 0x429f),
Register('vs42', 16, 0x42a0),
Register('vr10_64_1', 8, 0x42a0),
Register('vr10_32_3', 4, 0x42a0),
Register('vr10_16_7', 2, 0x42a0),
Register('vr10_8_15', 1, 0x42a0),
Register('vr10_8_14', 1, 0x42a1),
Register('vr10_16_6', 2, 0x42a2),
Register('vr10_8_13', 1, 0x42a2),
Register('vr10_8_12', 1, 0x42a3),
Register('vr10_32_2', 4, 0x42a4),
Register('vr10_16_5', 2, 0x42a4),
Register('vr10_8_11', 1, 0x42a4),
Register('vr10_8_10', 1, 0x42a5),
Register('vr10_16_4', 2, 0x42a6),
Register('vr10_8_9', 1, 0x42a6),
Register('vr10_8_8', 1, 0x42a7),
Register('vr10_64_0', 8, 0x42a8),
Register('vr10_32_1', 4, 0x42a8),
Register('vr10_16_3', 2, 0x42a8),
Register('vr10_8_7', 1, 0x42a8),
Register('vr10_8_6', 1, 0x42a9),
Register('vr10_16_2', 2, 0x42aa),
Register('vr10_8_5', 1, 0x42aa),
Register('vr10_8_4', 1, 0x42ab),
Register('vr10_32_0', 4, 0x42ac),
Register('vr10_16_1', 2, 0x42ac),
Register('vr10_8_3', 1, 0x42ac),
Register('vr10_8_2', 1, 0x42ad),
Register('vr10_16_0', 2, 0x42ae),
Register('vr10_8_1', 1, 0x42ae),
Register('vr10_8_0', 1, 0x42af),
Register('vs43', 16, 0x42b0),
Register('vr11_64_1', 8, 0x42b0),
Register('vr11_32_3', 4, 0x42b0),
Register('vr11_16_7', 2, 0x42b0),
Register('vr11_8_15', 1, 0x42b0),
Register('vr11_8_14', 1, 0x42b1),
Register('vr11_16_6', 2, 0x42b2),
Register('vr11_8_13', 1, 0x42b2),
Register('vr11_8_12', 1, 0x42b3),
Register('vr11_32_2', 4, 0x42b4),
Register('vr11_16_5', 2, 0x42b4),
Register('vr11_8_11', 1, 0x42b4),
Register('vr11_8_10', 1, 0x42b5),
Register('vr11_16_4', 2, 0x42b6),
Register('vr11_8_9', 1, 0x42b6),
Register('vr11_8_8', 1, 0x42b7),
Register('vr11_64_0', 8, 0x42b8),
Register('vr11_32_1', 4, 0x42b8),
Register('vr11_16_3', 2, 0x42b8),
Register('vr11_8_7', 1, 0x42b8),
Register('vr11_8_6', 1, 0x42b9),
Register('vr11_16_2', 2, 0x42ba),
Register('vr11_8_5', 1, 0x42ba),
Register('vr11_8_4', 1, 0x42bb),
Register('vr11_32_0', 4, 0x42bc),
Register('vr11_16_1', 2, 0x42bc),
Register('vr11_8_3', 1, 0x42bc),
Register('vr11_8_2', 1, 0x42bd),
Register('vr11_16_0', 2, 0x42be),
Register('vr11_8_1', 1, 0x42be),
Register('vr11_8_0', 1, 0x42bf),
Register('vs44', 16, 0x42c0),
Register('vr12_64_1', 8, 0x42c0),
Register('vr12_32_3', 4, 0x42c0),
Register('vr12_16_7', 2, 0x42c0),
Register('vr12_8_15', 1, 0x42c0),
Register('vr12_8_14', 1, 0x42c1),
Register('vr12_16_6', 2, 0x42c2),
Register('vr12_8_13', 1, 0x42c2),
Register('vr12_8_12', 1, 0x42c3),
Register('vr12_32_2', 4, 0x42c4),
Register('vr12_16_5', 2, 0x42c4),
Register('vr12_8_11', 1, 0x42c4),
Register('vr12_8_10', 1, 0x42c5),
Register('vr12_16_4', 2, 0x42c6),
Register('vr12_8_9', 1, 0x42c6),
Register('vr12_8_8', 1, 0x42c7),
Register('vr12_64_0', 8, 0x42c8),
Register('vr12_32_1', 4, 0x42c8),
Register('vr12_16_3', 2, 0x42c8),
Register('vr12_8_7', 1, 0x42c8),
Register('vr12_8_6', 1, 0x42c9),
Register('vr12_16_2', 2, 0x42ca),
Register('vr12_8_5', 1, 0x42ca),
Register('vr12_8_4', 1, 0x42cb),
Register('vr12_32_0', 4, 0x42cc),
Register('vr12_16_1', 2, 0x42cc),
Register('vr12_8_3', 1, 0x42cc),
Register('vr12_8_2', 1, 0x42cd),
Register('vr12_16_0', 2, 0x42ce),
Register('vr12_8_1', 1, 0x42ce),
Register('vr12_8_0', 1, 0x42cf),
Register('vs45', 16, 0x42d0),
Register('vr13_64_1', 8, 0x42d0),
Register('vr13_32_3', 4, 0x42d0),
Register('vr13_16_7', 2, 0x42d0),
Register('vr13_8_15', 1, 0x42d0),
Register('vr13_8_14', 1, 0x42d1),
Register('vr13_16_6', 2, 0x42d2),
Register('vr13_8_13', 1, 0x42d2),
Register('vr13_8_12', 1, 0x42d3),
Register('vr13_32_2', 4, 0x42d4),
Register('vr13_16_5', 2, 0x42d4),
Register('vr13_8_11', 1, 0x42d4),
Register('vr13_8_10', 1, 0x42d5),
Register('vr13_16_4', 2, 0x42d6),
Register('vr13_8_9', 1, 0x42d6),
Register('vr13_8_8', 1, 0x42d7),
Register('vr13_64_0', 8, 0x42d8),
Register('vr13_32_1', 4, 0x42d8),
Register('vr13_16_3', 2, 0x42d8),
Register('vr13_8_7', 1, 0x42d8),
Register('vr13_8_6', 1, 0x42d9),
Register('vr13_16_2', 2, 0x42da),
Register('vr13_8_5', 1, 0x42da),
Register('vr13_8_4', 1, 0x42db),
Register('vr13_32_0', 4, 0x42dc),
Register('vr13_16_1', 2, 0x42dc),
Register('vr13_8_3', 1, 0x42dc),
Register('vr13_8_2', 1, 0x42dd),
Register('vr13_16_0', 2, 0x42de),
Register('vr13_8_1', 1, 0x42de),
Register('vr13_8_0', 1, 0x42df),
Register('vs46', 16, 0x42e0),
Register('vr14_64_1', 8, 0x42e0),
Register('vr14_32_3', 4, 0x42e0),
Register('vr14_16_7', 2, 0x42e0),
Register('vr14_8_15', 1, 0x42e0),
Register('vr14_8_14', 1, 0x42e1),
Register('vr14_16_6', 2, 0x42e2),
Register('vr14_8_13', 1, 0x42e2),
Register('vr14_8_12', 1, 0x42e3),
Register('vr14_32_2', 4, 0x42e4),
Register('vr14_16_5', 2, 0x42e4),
Register('vr14_8_11', 1, 0x42e4),
Register('vr14_8_10', 1, 0x42e5),
Register('vr14_16_4', 2, 0x42e6),
Register('vr14_8_9', 1, 0x42e6),
Register('vr14_8_8', 1, 0x42e7),
Register('vr14_64_0', 8, 0x42e8),
Register('vr14_32_1', 4, 0x42e8),
Register('vr14_16_3', 2, 0x42e8),
Register('vr14_8_7', 1, 0x42e8),
Register('vr14_8_6', 1, 0x42e9),
Register('vr14_16_2', 2, 0x42ea),
Register('vr14_8_5', 1, 0x42ea),
Register('vr14_8_4', 1, 0x42eb),
Register('vr14_32_0', 4, 0x42ec),
Register('vr14_16_1', 2, 0x42ec),
Register('vr14_8_3', 1, 0x42ec),
Register('vr14_8_2', 1, 0x42ed),
Register('vr14_16_0', 2, 0x42ee),
Register('vr14_8_1', 1, 0x42ee),
Register('vr14_8_0', 1, 0x42ef),
Register('vs47', 16, 0x42f0),
Register('vr15_64_1', 8, 0x42f0),
Register('vr15_32_3', 4, 0x42f0),
Register('vr15_16_7', 2, 0x42f0),
Register('vr15_8_15', 1, 0x42f0),
Register('vr15_8_14', 1, 0x42f1),
Register('vr15_16_6', 2, 0x42f2),
Register('vr15_8_13', 1, 0x42f2),
Register('vr15_8_12', 1, 0x42f3),
Register('vr15_32_2', 4, 0x42f4),
Register('vr15_16_5', 2, 0x42f4),
Register('vr15_8_11', 1, 0x42f4),
Register('vr15_8_10', 1, 0x42f5),
Register('vr15_16_4', 2, 0x42f6),
Register('vr15_8_9', 1, 0x42f6),
Register('vr15_8_8', 1, 0x42f7),
Register('vr15_64_0', 8, 0x42f8),
Register('vr15_32_1', 4, 0x42f8),
Register('vr15_16_3', 2, 0x42f8),
Register('vr15_8_7', 1, 0x42f8),
Register('vr15_8_6', 1, 0x42f9),
Register('vr15_16_2', 2, 0x42fa),
Register('vr15_8_5', 1, 0x42fa),
Register('vr15_8_4', 1, 0x42fb),
Register('vr15_32_0', 4, 0x42fc),
Register('vr15_16_1', 2, 0x42fc),
Register('vr15_8_3', 1, 0x42fc),
Register('vr15_8_2', 1, 0x42fd),
Register('vr15_16_0', 2, 0x42fe),
Register('vr15_8_1', 1, 0x42fe),
Register('vr15_8_0', 1, 0x42ff),
Register('vs48', 16, 0x4300),
Register('vr16_64_1', 8, 0x4300),
Register('vr16_32_3', 4, 0x4300),
Register('vr16_16_7', 2, 0x4300),
Register('vr16_8_15', 1, 0x4300),
Register('vr16_8_14', 1, 0x4301),
Register('vr16_16_6', 2, 0x4302),
Register('vr16_8_13', 1, 0x4302),
Register('vr16_8_12', 1, 0x4303),
Register('vr16_32_2', 4, 0x4304),
Register('vr16_16_5', 2, 0x4304),
Register('vr16_8_11', 1, 0x4304),
Register('vr16_8_10', 1, 0x4305),
Register('vr16_16_4', 2, 0x4306),
Register('vr16_8_9', 1, 0x4306),
Register('vr16_8_8', 1, 0x4307),
Register('vr16_64_0', 8, 0x4308),
Register('vr16_32_1', 4, 0x4308),
Register('vr16_16_3', 2, 0x4308),
Register('vr16_8_7', 1, 0x4308),
Register('vr16_8_6', 1, 0x4309),
Register('vr16_16_2', 2, 0x430a),
Register('vr16_8_5', 1, 0x430a),
Register('vr16_8_4', 1, 0x430b),
Register('vr16_32_0', 4, 0x430c),
Register('vr16_16_1', 2, 0x430c),
Register('vr16_8_3', 1, 0x430c),
Register('vr16_8_2', 1, 0x430d),
Register('vr16_16_0', 2, 0x430e),
Register('vr16_8_1', 1, 0x430e),
Register('vr16_8_0', 1, 0x430f),
Register('vs49', 16, 0x4310),
Register('vr17_64_1', 8, 0x4310),
Register('vr17_32_3', 4, 0x4310),
Register('vr17_16_7', 2, 0x4310),
Register('vr17_8_15', 1, 0x4310),
Register('vr17_8_14', 1, 0x4311),
Register('vr17_16_6', 2, 0x4312),
Register('vr17_8_13', 1, 0x4312),
Register('vr17_8_12', 1, 0x4313),
Register('vr17_32_2', 4, 0x4314),
Register('vr17_16_5', 2, 0x4314),
Register('vr17_8_11', 1, 0x4314),
Register('vr17_8_10', 1, 0x4315),
Register('vr17_16_4', 2, 0x4316),
Register('vr17_8_9', 1, 0x4316),
Register('vr17_8_8', 1, 0x4317),
Register('vr17_64_0', 8, 0x4318),
Register('vr17_32_1', 4, 0x4318),
Register('vr17_16_3', 2, 0x4318),
Register('vr17_8_7', 1, 0x4318),
Register('vr17_8_6', 1, 0x4319),
Register('vr17_16_2', 2, 0x431a),
Register('vr17_8_5', 1, 0x431a),
Register('vr17_8_4', 1, 0x431b),
Register('vr17_32_0', 4, 0x431c),
Register('vr17_16_1', 2, 0x431c),
Register('vr17_8_3', 1, 0x431c),
Register('vr17_8_2', 1, 0x431d),
Register('vr17_16_0', 2, 0x431e),
Register('vr17_8_1', 1, 0x431e),
Register('vr17_8_0', 1, 0x431f),
Register('vs50', 16, 0x4320),
Register('vr18_64_1', 8, 0x4320),
Register('vr18_32_3', 4, 0x4320),
Register('vr18_16_7', 2, 0x4320),
Register('vr18_8_15', 1, 0x4320),
Register('vr18_8_14', 1, 0x4321),
Register('vr18_16_6', 2, 0x4322),
Register('vr18_8_13', 1, 0x4322),
Register('vr18_8_12', 1, 0x4323),
Register('vr18_32_2', 4, 0x4324),
Register('vr18_16_5', 2, 0x4324),
Register('vr18_8_11', 1, 0x4324),
Register('vr18_8_10', 1, 0x4325),
Register('vr18_16_4', 2, 0x4326),
Register('vr18_8_9', 1, 0x4326),
Register('vr18_8_8', 1, 0x4327),
Register('vr18_64_0', 8, 0x4328),
Register('vr18_32_1', 4, 0x4328),
Register('vr18_16_3', 2, 0x4328),
Register('vr18_8_7', 1, 0x4328),
Register('vr18_8_6', 1, 0x4329),
Register('vr18_16_2', 2, 0x432a),
Register('vr18_8_5', 1, 0x432a),
Register('vr18_8_4', 1, 0x432b),
Register('vr18_32_0', 4, 0x432c),
Register('vr18_16_1', 2, 0x432c),
Register('vr18_8_3', 1, 0x432c),
Register('vr18_8_2', 1, 0x432d),
Register('vr18_16_0', 2, 0x432e),
Register('vr18_8_1', 1, 0x432e),
Register('vr18_8_0', 1, 0x432f),
Register('vs51', 16, 0x4330),
Register('vr19_64_1', 8, 0x4330),
Register('vr19_32_3', 4, 0x4330),
Register('vr19_16_7', 2, 0x4330),
Register('vr19_8_15', 1, 0x4330),
Register('vr19_8_14', 1, 0x4331),
Register('vr19_16_6', 2, 0x4332),
Register('vr19_8_13', 1, 0x4332),
Register('vr19_8_12', 1, 0x4333),
Register('vr19_32_2', 4, 0x4334),
Register('vr19_16_5', 2, 0x4334),
Register('vr19_8_11', 1, 0x4334),
Register('vr19_8_10', 1, 0x4335),
Register('vr19_16_4', 2, 0x4336),
Register('vr19_8_9', 1, 0x4336),
Register('vr19_8_8', 1, 0x4337),
Register('vr19_64_0', 8, 0x4338),
Register('vr19_32_1', 4, 0x4338),
Register('vr19_16_3', 2, 0x4338),
Register('vr19_8_7', 1, 0x4338),
Register('vr19_8_6', 1, 0x4339),
Register('vr19_16_2', 2, 0x433a),
Register('vr19_8_5', 1, 0x433a),
Register('vr19_8_4', 1, 0x433b),
Register('vr19_32_0', 4, 0x433c),
Register('vr19_16_1', 2, 0x433c),
Register('vr19_8_3', 1, 0x433c),
Register('vr19_8_2', 1, 0x433d),
Register('vr19_16_0', 2, 0x433e),
Register('vr19_8_1', 1, 0x433e),
Register('vr19_8_0', 1, 0x433f),
Register('vs52', 16, 0x4340),
Register('vr20_64_1', 8, 0x4340),
Register('vr20_32_3', 4, 0x4340),
Register('vr20_16_7', 2, 0x4340),
Register('vr20_8_15', 1, 0x4340),
Register('vr20_8_14', 1, 0x4341),
Register('vr20_16_6', 2, 0x4342),
Register('vr20_8_13', 1, 0x4342),
Register('vr20_8_12', 1, 0x4343),
Register('vr20_32_2', 4, 0x4344),
Register('vr20_16_5', 2, 0x4344),
Register('vr20_8_11', 1, 0x4344),
Register('vr20_8_10', 1, 0x4345),
Register('vr20_16_4', 2, 0x4346),
Register('vr20_8_9', 1, 0x4346),
Register('vr20_8_8', 1, 0x4347),
Register('vr20_64_0', 8, 0x4348),
Register('vr20_32_1', 4, 0x4348),
Register('vr20_16_3', 2, 0x4348),
Register('vr20_8_7', 1, 0x4348),
Register('vr20_8_6', 1, 0x4349),
Register('vr20_16_2', 2, 0x434a),
Register('vr20_8_5', 1, 0x434a),
Register('vr20_8_4', 1, 0x434b),
Register('vr20_32_0', 4, 0x434c),
Register('vr20_16_1', 2, 0x434c),
Register('vr20_8_3', 1, 0x434c),
Register('vr20_8_2', 1, 0x434d),
Register('vr20_16_0', 2, 0x434e),
Register('vr20_8_1', 1, 0x434e),
Register('vr20_8_0', 1, 0x434f),
Register('vs53', 16, 0x4350),
Register('vr21_64_1', 8, 0x4350),
Register('vr21_32_3', 4, 0x4350),
Register('vr21_16_7', 2, 0x4350),
Register('vr21_8_15', 1, 0x4350),
Register('vr21_8_14', 1, 0x4351),
Register('vr21_16_6', 2, 0x4352),
Register('vr21_8_13', 1, 0x4352),
Register('vr21_8_12', 1, 0x4353),
Register('vr21_32_2', 4, 0x4354),
Register('vr21_16_5', 2, 0x4354),
Register('vr21_8_11', 1, 0x4354),
Register('vr21_8_10', 1, 0x4355),
Register('vr21_16_4', 2, 0x4356),
Register('vr21_8_9', 1, 0x4356),
Register('vr21_8_8', 1, 0x4357),
Register('vr21_64_0', 8, 0x4358),
Register('vr21_32_1', 4, 0x4358),
Register('vr21_16_3', 2, 0x4358),
Register('vr21_8_7', 1, 0x4358),
Register('vr21_8_6', 1, 0x4359),
Register('vr21_16_2', 2, 0x435a),
Register('vr21_8_5', 1, 0x435a),
Register('vr21_8_4', 1, 0x435b),
Register('vr21_32_0', 4, 0x435c),
Register('vr21_16_1', 2, 0x435c),
Register('vr21_8_3', 1, 0x435c),
Register('vr21_8_2', 1, 0x435d),
Register('vr21_16_0', 2, 0x435e),
Register('vr21_8_1', 1, 0x435e),
Register('vr21_8_0', 1, 0x435f),
Register('vs54', 16, 0x4360),
Register('vr22_64_1', 8, 0x4360),
Register('vr22_32_3', 4, 0x4360),
Register('vr22_16_7', 2, 0x4360),
Register('vr22_8_15', 1, 0x4360),
Register('vr22_8_14', 1, 0x4361),
Register('vr22_16_6', 2, 0x4362),
Register('vr22_8_13', 1, 0x4362),
Register('vr22_8_12', 1, 0x4363),
Register('vr22_32_2', 4, 0x4364),
Register('vr22_16_5', 2, 0x4364),
Register('vr22_8_11', 1, 0x4364),
Register('vr22_8_10', 1, 0x4365),
Register('vr22_16_4', 2, 0x4366),
Register('vr22_8_9', 1, 0x4366),
Register('vr22_8_8', 1, 0x4367),
Register('vr22_64_0', 8, 0x4368),
Register('vr22_32_1', 4, 0x4368),
Register('vr22_16_3', 2, 0x4368),
Register('vr22_8_7', 1, 0x4368),
Register('vr22_8_6', 1, 0x4369),
Register('vr22_16_2', 2, 0x436a),
Register('vr22_8_5', 1, 0x436a),
Register('vr22_8_4', 1, 0x436b),
Register('vr22_32_0', 4, 0x436c),
Register('vr22_16_1', 2, 0x436c),
Register('vr22_8_3', 1, 0x436c),
Register('vr22_8_2', 1, 0x436d),
Register('vr22_16_0', 2, 0x436e),
Register('vr22_8_1', 1, 0x436e),
Register('vr22_8_0', 1, 0x436f),
Register('vs55', 16, 0x4370),
Register('vr23_64_1', 8, 0x4370),
Register('vr23_32_3', 4, 0x4370),
Register('vr23_16_7', 2, 0x4370),
Register('vr23_8_15', 1, 0x4370),
Register('vr23_8_14', 1, 0x4371),
Register('vr23_16_6', 2, 0x4372),
Register('vr23_8_13', 1, 0x4372),
Register('vr23_8_12', 1, 0x4373),
Register('vr23_32_2', 4, 0x4374),
Register('vr23_16_5', 2, 0x4374),
Register('vr23_8_11', 1, 0x4374),
Register('vr23_8_10', 1, 0x4375),
Register('vr23_16_4', 2, 0x4376),
Register('vr23_8_9', 1, 0x4376),
Register('vr23_8_8', 1, 0x4377),
Register('vr23_64_0', 8, 0x4378),
Register('vr23_32_1', 4, 0x4378),
Register('vr23_16_3', 2, 0x4378),
Register('vr23_8_7', 1, 0x4378),
Register('vr23_8_6', 1, 0x4379),
Register('vr23_16_2', 2, 0x437a),
Register('vr23_8_5', 1, 0x437a),
Register('vr23_8_4', 1, 0x437b),
Register('vr23_32_0', 4, 0x437c),
Register('vr23_16_1', 2, 0x437c),
Register('vr23_8_3', 1, 0x437c),
Register('vr23_8_2', 1, 0x437d),
Register('vr23_16_0', 2, 0x437e),
Register('vr23_8_1', 1, 0x437e),
Register('vr23_8_0', 1, 0x437f),
Register('vs56', 16, 0x4380),
Register('vr24_64_1', 8, 0x4380),
Register('vr24_32_3', 4, 0x4380),
Register('vr24_16_7', 2, 0x4380),
Register('vr24_8_15', 1, 0x4380),
Register('vr24_8_14', 1, 0x4381),
Register('vr24_16_6', 2, 0x4382),
Register('vr24_8_13', 1, 0x4382),
Register('vr24_8_12', 1, 0x4383),
Register('vr24_32_2', 4, 0x4384),
Register('vr24_16_5', 2, 0x4384),
Register('vr24_8_11', 1, 0x4384),
Register('vr24_8_10', 1, 0x4385),
Register('vr24_16_4', 2, 0x4386),
Register('vr24_8_9', 1, 0x4386),
Register('vr24_8_8', 1, 0x4387),
Register('vr24_64_0', 8, 0x4388),
Register('vr24_32_1', 4, 0x4388),
Register('vr24_16_3', 2, 0x4388),
Register('vr24_8_7', 1, 0x4388),
Register('vr24_8_6', 1, 0x4389),
Register('vr24_16_2', 2, 0x438a),
Register('vr24_8_5', 1, 0x438a),
Register('vr24_8_4', 1, 0x438b),
Register('vr24_32_0', 4, 0x438c),
Register('vr24_16_1', 2, 0x438c),
Register('vr24_8_3', 1, 0x438c),
Register('vr24_8_2', 1, 0x438d),
Register('vr24_16_0', 2, 0x438e),
Register('vr24_8_1', 1, 0x438e),
Register('vr24_8_0', 1, 0x438f),
Register('vs57', 16, 0x4390),
Register('vr25_64_1', 8, 0x4390),
Register('vr25_32_3', 4, 0x4390),
Register('vr25_16_7', 2, 0x4390),
Register('vr25_8_15', 1, 0x4390),
Register('vr25_8_14', 1, 0x4391),
Register('vr25_16_6', 2, 0x4392),
Register('vr25_8_13', 1, 0x4392),
Register('vr25_8_12', 1, 0x4393),
Register('vr25_32_2', 4, 0x4394),
Register('vr25_16_5', 2, 0x4394),
Register('vr25_8_11', 1, 0x4394),
Register('vr25_8_10', 1, 0x4395),
Register('vr25_16_4', 2, 0x4396),
Register('vr25_8_9', 1, 0x4396),
Register('vr25_8_8', 1, 0x4397),
Register('vr25_64_0', 8, 0x4398),
Register('vr25_32_1', 4, 0x4398),
Register('vr25_16_3', 2, 0x4398),
Register('vr25_8_7', 1, 0x4398),
Register('vr25_8_6', 1, 0x4399),
Register('vr25_16_2', 2, 0x439a),
Register('vr25_8_5', 1, 0x439a),
Register('vr25_8_4', 1, 0x439b),
Register('vr25_32_0', 4, 0x439c),
Register('vr25_16_1', 2, 0x439c),
Register('vr25_8_3', 1, 0x439c),
Register('vr25_8_2', 1, 0x439d),
Register('vr25_16_0', 2, 0x439e),
Register('vr25_8_1', 1, 0x439e),
Register('vr25_8_0', 1, 0x439f),
Register('vs58', 16, 0x43a0),
Register('vr26_64_1', 8, 0x43a0),
Register('vr26_32_3', 4, 0x43a0),
Register('vr26_16_7', 2, 0x43a0),
Register('vr26_8_15', 1, 0x43a0),
Register('vr26_8_14', 1, 0x43a1),
Register('vr26_16_6', 2, 0x43a2),
Register('vr26_8_13', 1, 0x43a2),
Register('vr26_8_12', 1, 0x43a3),
Register('vr26_32_2', 4, 0x43a4),
Register('vr26_16_5', 2, 0x43a4),
Register('vr26_8_11', 1, 0x43a4),
Register('vr26_8_10', 1, 0x43a5),
Register('vr26_16_4', 2, 0x43a6),
Register('vr26_8_9', 1, 0x43a6),
Register('vr26_8_8', 1, 0x43a7),
Register('vr26_64_0', 8, 0x43a8),
Register('vr26_32_1', 4, 0x43a8),
Register('vr26_16_3', 2, 0x43a8),
Register('vr26_8_7', 1, 0x43a8),
Register('vr26_8_6', 1, 0x43a9),
Register('vr26_16_2', 2, 0x43aa),
Register('vr26_8_5', 1, 0x43aa),
Register('vr26_8_4', 1, 0x43ab),
Register('vr26_32_0', 4, 0x43ac),
Register('vr26_16_1', 2, 0x43ac),
Register('vr26_8_3', 1, 0x43ac),
Register('vr26_8_2', 1, 0x43ad),
Register('vr26_16_0', 2, 0x43ae),
Register('vr26_8_1', 1, 0x43ae),
Register('vr26_8_0', 1, 0x43af),
Register('vs59', 16, 0x43b0),
Register('vr27_64_1', 8, 0x43b0),
Register('vr27_32_3', 4, 0x43b0),
Register('vr27_16_7', 2, 0x43b0),
Register('vr27_8_15', 1, 0x43b0),
Register('vr27_8_14', 1, 0x43b1),
Register('vr27_16_6', 2, 0x43b2),
Register('vr27_8_13', 1, 0x43b2),
Register('vr27_8_12', 1, 0x43b3),
Register('vr27_32_2', 4, 0x43b4),
Register('vr27_16_5', 2, 0x43b4),
Register('vr27_8_11', 1, 0x43b4),
Register('vr27_8_10', 1, 0x43b5),
Register('vr27_16_4', 2, 0x43b6),
Register('vr27_8_9', 1, 0x43b6),
Register('vr27_8_8', 1, 0x43b7),
Register('vr27_64_0', 8, 0x43b8),
Register('vr27_32_1', 4, 0x43b8),
Register('vr27_16_3', 2, 0x43b8),
Register('vr27_8_7', 1, 0x43b8),
Register('vr27_8_6', 1, 0x43b9),
Register('vr27_16_2', 2, 0x43ba),
Register('vr27_8_5', 1, 0x43ba),
Register('vr27_8_4', 1, 0x43bb),
Register('vr27_32_0', 4, 0x43bc),
Register('vr27_16_1', 2, 0x43bc),
Register('vr27_8_3', 1, 0x43bc),
Register('vr27_8_2', 1, 0x43bd),
Register('vr27_16_0', 2, 0x43be),
Register('vr27_8_1', 1, 0x43be),
Register('vr27_8_0', 1, 0x43bf),
Register('vs60', 16, 0x43c0),
Register('vr28_64_1', 8, 0x43c0),
Register('vr28_32_3', 4, 0x43c0),
Register('vr28_16_7', 2, 0x43c0),
Register('vr28_8_15', 1, 0x43c0),
Register('vr28_8_14', 1, 0x43c1),
Register('vr28_16_6', 2, 0x43c2),
Register('vr28_8_13', 1, 0x43c2),
Register('vr28_8_12', 1, 0x43c3),
Register('vr28_32_2', 4, 0x43c4),
Register('vr28_16_5', 2, 0x43c4),
Register('vr28_8_11', 1, 0x43c4),
Register('vr28_8_10', 1, 0x43c5),
Register('vr28_16_4', 2, 0x43c6),
Register('vr28_8_9', 1, 0x43c6),
Register('vr28_8_8', 1, 0x43c7),
Register('vr28_64_0', 8, 0x43c8),
Register('vr28_32_1', 4, 0x43c8),
Register('vr28_16_3', 2, 0x43c8),
Register('vr28_8_7', 1, 0x43c8),
Register('vr28_8_6', 1, 0x43c9),
Register('vr28_16_2', 2, 0x43ca),
Register('vr28_8_5', 1, 0x43ca),
Register('vr28_8_4', 1, 0x43cb),
Register('vr28_32_0', 4, 0x43cc),
Register('vr28_16_1', 2, 0x43cc),
Register('vr28_8_3', 1, 0x43cc),
Register('vr28_8_2', 1, 0x43cd),
Register('vr28_16_0', 2, 0x43ce),
Register('vr28_8_1', 1, 0x43ce),
Register('vr28_8_0', 1, 0x43cf),
Register('vs61', 16, 0x43d0),
Register('vr29_64_1', 8, 0x43d0),
Register('vr29_32_3', 4, 0x43d0),
Register('vr29_16_7', 2, 0x43d0),
Register('vr29_8_15', 1, 0x43d0),
Register('vr29_8_14', 1, 0x43d1),
Register('vr29_16_6', 2, 0x43d2),
Register('vr29_8_13', 1, 0x43d2),
Register('vr29_8_12', 1, 0x43d3),
Register('vr29_32_2', 4, 0x43d4),
Register('vr29_16_5', 2, 0x43d4),
Register('vr29_8_11', 1, 0x43d4),
Register('vr29_8_10', 1, 0x43d5),
Register('vr29_16_4', 2, 0x43d6),
Register('vr29_8_9', 1, 0x43d6),
Register('vr29_8_8', 1, 0x43d7),
Register('vr29_64_0', 8, 0x43d8),
Register('vr29_32_1', 4, 0x43d8),
Register('vr29_16_3', 2, 0x43d8),
Register('vr29_8_7', 1, 0x43d8),
Register('vr29_8_6', 1, 0x43d9),
Register('vr29_16_2', 2, 0x43da),
Register('vr29_8_5', 1, 0x43da),
Register('vr29_8_4', 1, 0x43db),
Register('vr29_32_0', 4, 0x43dc),
Register('vr29_16_1', 2, 0x43dc),
Register('vr29_8_3', 1, 0x43dc),
Register('vr29_8_2', 1, 0x43dd),
Register('vr29_16_0', 2, 0x43de),
Register('vr29_8_1', 1, 0x43de),
Register('vr29_8_0', 1, 0x43df),
Register('vs62', 16, 0x43e0),
Register('vr30_64_1', 8, 0x43e0),
Register('vr30_32_3', 4, 0x43e0),
Register('vr30_16_7', 2, 0x43e0),
Register('vr30_8_15', 1, 0x43e0),
Register('vr30_8_14', 1, 0x43e1),
Register('vr30_16_6', 2, 0x43e2),
Register('vr30_8_13', 1, 0x43e2),
Register('vr30_8_12', 1, 0x43e3),
Register('vr30_32_2', 4, 0x43e4),
Register('vr30_16_5', 2, 0x43e4),
Register('vr30_8_11', 1, 0x43e4),
Register('vr30_8_10', 1, 0x43e5),
Register('vr30_16_4', 2, 0x43e6),
Register('vr30_8_9', 1, 0x43e6),
Register('vr30_8_8', 1, 0x43e7),
Register('vr30_64_0', 8, 0x43e8),
Register('vr30_32_1', 4, 0x43e8),
Register('vr30_16_3', 2, 0x43e8),
Register('vr30_8_7', 1, 0x43e8),
Register('vr30_8_6', 1, 0x43e9),
Register('vr30_16_2', 2, 0x43ea),
Register('vr30_8_5', 1, 0x43ea),
Register('vr30_8_4', 1, 0x43eb),
Register('vr30_32_0', 4, 0x43ec),
Register('vr30_16_1', 2, 0x43ec),
Register('vr30_8_3', 1, 0x43ec),
Register('vr30_8_2', 1, 0x43ed),
Register('vr30_16_0', 2, 0x43ee),
Register('vr30_8_1', 1, 0x43ee),
Register('vr30_8_0', 1, 0x43ef),
Register('vs63', 16, 0x43f0),
Register('vr31_64_1', 8, 0x43f0),
Register('vr31_32_3', 4, 0x43f0),
Register('vr31_16_7', 2, 0x43f0),
Register('vr31_8_15', 1, 0x43f0),
Register('vr31_8_14', 1, 0x43f1),
Register('vr31_16_6', 2, 0x43f2),
Register('vr31_8_13', 1, 0x43f2),
Register('vr31_8_12', 1, 0x43f3),
Register('vr31_32_2', 4, 0x43f4),
Register('vr31_16_5', 2, 0x43f4),
Register('vr31_8_11', 1, 0x43f4),
Register('vr31_8_10', 1, 0x43f5),
Register('vr31_16_4', 2, 0x43f6),
Register('vr31_8_9', 1, 0x43f6),
Register('vr31_8_8', 1, 0x43f7),
Register('vr31_64_0', 8, 0x43f8),
Register('vr31_32_1', 4, 0x43f8),
Register('vr31_16_3', 2, 0x43f8),
Register('vr31_8_7', 1, 0x43f8),
Register('vr31_8_6', 1, 0x43f9),
Register('vr31_16_2', 2, 0x43fa),
Register('vr31_8_5', 1, 0x43fa),
Register('vr31_8_4', 1, 0x43fb),
Register('vr31_32_0', 4, 0x43fc),
Register('vr31_16_1', 2, 0x43fc),
Register('vr31_8_3', 1, 0x43fc),
Register('vr31_8_2', 1, 0x43fd),
Register('vr31_16_0', 2, 0x43fe),
Register('vr31_8_1', 1, 0x43fe),
Register('vr31_8_0', 1, 0x43ff),
Register('contextreg', 4, 0x6000),
Register('dcr000', 4, 0x7000),
Register('dcr001', 4, 0x7004),
Register('dcr002', 4, 0x7008),
Register('dcr003', 4, 0x700c),
Register('dcr004', 4, 0x7010),
Register('dcr005', 4, 0x7014),
Register('dcr006', 4, 0x7018),
Register('dcr007', 4, 0x701c),
Register('dcr008', 4, 0x7020),
Register('dcr009', 4, 0x7024),
Register('dcr00a', 4, 0x7028),
Register('dcr00b', 4, 0x702c),
Register('dcr00c', 4, 0x7030),
Register('dcr00d', 4, 0x7034),
Register('dcr00e', 4, 0x7038),
Register('dcr00f', 4, 0x703c),
Register('dcr010', 4, 0x7040),
Register('dcr011', 4, 0x7044),
Register('dcr012', 4, 0x7048),
Register('dcr013', 4, 0x704c),
Register('dcr014', 4, 0x7050),
Register('dcr015', 4, 0x7054),
Register('dcr016', 4, 0x7058),
Register('dcr017', 4, 0x705c),
Register('dcr018', 4, 0x7060),
Register('dcr019', 4, 0x7064),
Register('dcr01a', 4, 0x7068),
Register('dcr01b', 4, 0x706c),
Register('dcr01c', 4, 0x7070),
Register('dcr01d', 4, 0x7074),
Register('dcr01e', 4, 0x7078),
Register('dcr01f', 4, 0x707c),
Register('dcr020', 4, 0x7080),
Register('dcr021', 4, 0x7084),
Register('dcr022', 4, 0x7088),
Register('dcr023', 4, 0x708c),
Register('dcr024', 4, 0x7090),
Register('dcr025', 4, 0x7094),
Register('dcr026', 4, 0x7098),
Register('dcr027', 4, 0x709c),
Register('dcr028', 4, 0x70a0),
Register('dcr029', 4, 0x70a4),
Register('dcr02a', 4, 0x70a8),
Register('dcr02b', 4, 0x70ac),
Register('dcr02c', 4, 0x70b0),
Register('dcr02d', 4, 0x70b4),
Register('dcr02e', 4, 0x70b8),
Register('dcr02f', 4, 0x70bc),
Register('dcr030', 4, 0x70c0),
Register('dcr031', 4, 0x70c4),
Register('dcr032', 4, 0x70c8),
Register('dcr033', 4, 0x70cc),
Register('dcr034', 4, 0x70d0),
Register('dcr035', 4, 0x70d4),
Register('dcr036', 4, 0x70d8),
Register('dcr037', 4, 0x70dc),
Register('dcr038', 4, 0x70e0),
Register('dcr039', 4, 0x70e4),
Register('dcr03a', 4, 0x70e8),
Register('dcr03b', 4, 0x70ec),
Register('dcr03c', 4, 0x70f0),
Register('dcr03d', 4, 0x70f4),
Register('dcr03e', 4, 0x70f8),
Register('dcr03f', 4, 0x70fc),
Register('dcr040', 4, 0x7100),
Register('dcr041', 4, 0x7104),
Register('dcr042', 4, 0x7108),
Register('dcr043', 4, 0x710c),
Register('dcr044', 4, 0x7110),
Register('dcr045', 4, 0x7114),
Register('dcr046', 4, 0x7118),
Register('dcr047', 4, 0x711c),
Register('dcr048', 4, 0x7120),
Register('dcr049', 4, 0x7124),
Register('dcr04a', 4, 0x7128),
Register('dcr04b', 4, 0x712c),
Register('dcr04c', 4, 0x7130),
Register('dcr04d', 4, 0x7134),
Register('dcr04e', 4, 0x7138),
Register('dcr04f', 4, 0x713c),
Register('dcr050', 4, 0x7140),
Register('dcr051', 4, 0x7144),
Register('dcr052', 4, 0x7148),
Register('dcr053', 4, 0x714c),
Register('dcr054', 4, 0x7150),
Register('dcr055', 4, 0x7154),
Register('dcr056', 4, 0x7158),
Register('dcr057', 4, 0x715c),
Register('dcr058', 4, 0x7160),
Register('dcr059', 4, 0x7164),
Register('dcr05a', 4, 0x7168),
Register('dcr05b', 4, 0x716c),
Register('dcr05c', 4, 0x7170),
Register('dcr05d', 4, 0x7174),
Register('dcr05e', 4, 0x7178),
Register('dcr05f', 4, 0x717c),
Register('dcr060', 4, 0x7180),
Register('dcr061', 4, 0x7184),
Register('dcr062', 4, 0x7188),
Register('dcr063', 4, 0x718c),
Register('dcr064', 4, 0x7190),
Register('dcr065', 4, 0x7194),
Register('dcr066', 4, 0x7198),
Register('dcr067', 4, 0x719c),
Register('dcr068', 4, 0x71a0),
Register('dcr069', 4, 0x71a4),
Register('dcr06a', 4, 0x71a8),
Register('dcr06b', 4, 0x71ac),
Register('dcr06c', 4, 0x71b0),
Register('dcr06d', 4, 0x71b4),
Register('dcr06e', 4, 0x71b8),
Register('dcr06f', 4, 0x71bc),
Register('dcr070', 4, 0x71c0),
Register('dcr071', 4, 0x71c4),
Register('dcr072', 4, 0x71c8),
Register('dcr073', 4, 0x71cc),
Register('dcr074', 4, 0x71d0),
Register('dcr075', 4, 0x71d4),
Register('dcr076', 4, 0x71d8),
Register('dcr077', 4, 0x71dc),
Register('dcr078', 4, 0x71e0),
Register('dcr079', 4, 0x71e4),
Register('dcr07a', 4, 0x71e8),
Register('dcr07b', 4, 0x71ec),
Register('dcr07c', 4, 0x71f0),
Register('dcr07d', 4, 0x71f4),
Register('dcr07e', 4, 0x71f8),
Register('dcr07f', 4, 0x71fc),
Register('dcr080', 4, 0x7200),
Register('dcr081', 4, 0x7204),
Register('dcr082', 4, 0x7208),
Register('dcr083', 4, 0x720c),
Register('dcr084', 4, 0x7210),
Register('dcr085', 4, 0x7214),
Register('dcr086', 4, 0x7218),
Register('dcr087', 4, 0x721c),
Register('dcr088', 4, 0x7220),
Register('dcr089', 4, 0x7224),
Register('dcr08a', 4, 0x7228),
Register('dcr08b', 4, 0x722c),
Register('dcr08c', 4, 0x7230),
Register('dcr08d', 4, 0x7234),
Register('dcr08e', 4, 0x7238),
Register('dcr08f', 4, 0x723c),
Register('dcr090', 4, 0x7240),
Register('dcr091', 4, 0x7244),
Register('dcr092', 4, 0x7248),
Register('dcr093', 4, 0x724c),
Register('dcr094', 4, 0x7250),
Register('dcr095', 4, 0x7254),
Register('dcr096', 4, 0x7258),
Register('dcr097', 4, 0x725c),
Register('dcr098', 4, 0x7260),
Register('dcr099', 4, 0x7264),
Register('dcr09a', 4, 0x7268),
Register('dcr09b', 4, 0x726c),
Register('dcr09c', 4, 0x7270),
Register('dcr09d', 4, 0x7274),
Register('dcr09e', 4, 0x7278),
Register('dcr09f', 4, 0x727c),
Register('dcr0a0', 4, 0x7280),
Register('dcr0a1', 4, 0x7284),
Register('dcr0a2', 4, 0x7288),
Register('dcr0a3', 4, 0x728c),
Register('dcr0a4', 4, 0x7290),
Register('dcr0a5', 4, 0x7294),
Register('dcr0a6', 4, 0x7298),
Register('dcr0a7', 4, 0x729c),
Register('dcr0a8', 4, 0x72a0),
Register('dcr0a9', 4, 0x72a4),
Register('dcr0aa', 4, 0x72a8),
Register('dcr0ab', 4, 0x72ac),
Register('dcr0ac', 4, 0x72b0),
Register('dcr0ad', 4, 0x72b4),
Register('dcr0ae', 4, 0x72b8),
Register('dcr0af', 4, 0x72bc),
Register('dcr0b0', 4, 0x72c0),
Register('dcr0b1', 4, 0x72c4),
Register('dcr0b2', 4, 0x72c8),
Register('dcr0b3', 4, 0x72cc),
Register('dcr0b4', 4, 0x72d0),
Register('dcr0b5', 4, 0x72d4),
Register('dcr0b6', 4, 0x72d8),
Register('dcr0b7', 4, 0x72dc),
Register('dcr0b8', 4, 0x72e0),
Register('dcr0b9', 4, 0x72e4),
Register('dcr0ba', 4, 0x72e8),
Register('dcr0bb', 4, 0x72ec),
Register('dcr0bc', 4, 0x72f0),
Register('dcr0bd', 4, 0x72f4),
Register('dcr0be', 4, 0x72f8),
Register('dcr0bf', 4, 0x72fc),
Register('dcr0c0', 4, 0x7300),
Register('dcr0c1', 4, 0x7304),
Register('dcr0c2', 4, 0x7308),
Register('dcr0c3', 4, 0x730c),
Register('dcr0c4', 4, 0x7310),
Register('dcr0c5', 4, 0x7314),
Register('dcr0c6', 4, 0x7318),
Register('dcr0c7', 4, 0x731c),
Register('dcr0c8', 4, 0x7320),
Register('dcr0c9', 4, 0x7324),
Register('dcr0ca', 4, 0x7328),
Register('dcr0cb', 4, 0x732c),
Register('dcr0cc', 4, 0x7330),
Register('dcr0cd', 4, 0x7334),
Register('dcr0ce', 4, 0x7338),
Register('dcr0cf', 4, 0x733c),
Register('dcr0d0', 4, 0x7340),
Register('dcr0d1', 4, 0x7344),
Register('dcr0d2', 4, 0x7348),
Register('dcr0d3', 4, 0x734c),
Register('dcr0d4', 4, 0x7350),
Register('dcr0d5', 4, 0x7354),
Register('dcr0d6', 4, 0x7358),
Register('dcr0d7', 4, 0x735c),
Register('dcr0d8', 4, 0x7360),
Register('dcr0d9', 4, 0x7364),
Register('dcr0da', 4, 0x7368),
Register('dcr0db', 4, 0x736c),
Register('dcr0dc', 4, 0x7370),
Register('dcr0dd', 4, 0x7374),
Register('dcr0de', 4, 0x7378),
Register('dcr0df', 4, 0x737c),
Register('dcr0e0', 4, 0x7380),
Register('dcr0e1', 4, 0x7384),
Register('dcr0e2', 4, 0x7388),
Register('dcr0e3', 4, 0x738c),
Register('dcr0e4', 4, 0x7390),
Register('dcr0e5', 4, 0x7394),
Register('dcr0e6', 4, 0x7398),
Register('dcr0e7', 4, 0x739c),
Register('dcr0e8', 4, 0x73a0),
Register('dcr0e9', 4, 0x73a4),
Register('dcr0ea', 4, 0x73a8),
Register('dcr0eb', 4, 0x73ac),
Register('dcr0ec', 4, 0x73b0),
Register('dcr0ed', 4, 0x73b4),
Register('dcr0ee', 4, 0x73b8),
Register('dcr0ef', 4, 0x73bc),
Register('dcr0f0', 4, 0x73c0),
Register('dcr0f1', 4, 0x73c4),
Register('dcr0f2', 4, 0x73c8),
Register('dcr0f3', 4, 0x73cc),
Register('dcr0f4', 4, 0x73d0),
Register('dcr0f5', 4, 0x73d4),
Register('dcr0f6', 4, 0x73d8),
Register('dcr0f7', 4, 0x73dc),
Register('dcr0f8', 4, 0x73e0),
Register('dcr0f9', 4, 0x73e4),
Register('dcr0fa', 4, 0x73e8),
Register('dcr0fb', 4, 0x73ec),
Register('dcr0fc', 4, 0x73f0),
Register('dcr0fd', 4, 0x73f4),
Register('dcr0fe', 4, 0x73f8),
Register('dcr0ff', 4, 0x73fc),
Register('dcr100', 4, 0x7400),
Register('dcr101', 4, 0x7404),
Register('dcr102', 4, 0x7408),
Register('dcr103', 4, 0x740c),
Register('dcr104', 4, 0x7410),
Register('dcr105', 4, 0x7414),
Register('dcr106', 4, 0x7418),
Register('dcr107', 4, 0x741c),
Register('dcr108', 4, 0x7420),
Register('dcr109', 4, 0x7424),
Register('dcr10a', 4, 0x7428),
Register('dcr10b', 4, 0x742c),
Register('dcr10c', 4, 0x7430),
Register('dcr10d', 4, 0x7434),
Register('dcr10e', 4, 0x7438),
Register('dcr10f', 4, 0x743c),
Register('dcr110', 4, 0x7440),
Register('dcr111', 4, 0x7444),
Register('dcr112', 4, 0x7448),
Register('dcr113', 4, 0x744c),
Register('dcr114', 4, 0x7450),
Register('dcr115', 4, 0x7454),
Register('dcr116', 4, 0x7458),
Register('dcr117', 4, 0x745c),
Register('dcr118', 4, 0x7460),
Register('dcr119', 4, 0x7464),
Register('dcr11a', 4, 0x7468),
Register('dcr11b', 4, 0x746c),
Register('dcr11c', 4, 0x7470),
Register('dcr11d', 4, 0x7474),
Register('dcr11e', 4, 0x7478),
Register('dcr11f', 4, 0x747c),
Register('dcr120', 4, 0x7480),
Register('dcr121', 4, 0x7484),
Register('dcr122', 4, 0x7488),
Register('dcr123', 4, 0x748c),
Register('dcr124', 4, 0x7490),
Register('dcr125', 4, 0x7494),
Register('dcr126', 4, 0x7498),
Register('dcr127', 4, 0x749c),
Register('dcr128', 4, 0x74a0),
Register('dcr129', 4, 0x74a4),
Register('dcr12a', 4, 0x74a8),
Register('dcr12b', 4, 0x74ac),
Register('dcr12c', 4, 0x74b0),
Register('dcr12d', 4, 0x74b4),
Register('dcr12e', 4, 0x74b8),
Register('dcr12f', 4, 0x74bc),
Register('dcr130', 4, 0x74c0),
Register('dcr131', 4, 0x74c4),
Register('dcr132', 4, 0x74c8),
Register('dcr133', 4, 0x74cc),
Register('dcr134', 4, 0x74d0),
Register('dcr135', 4, 0x74d4),
Register('dcr136', 4, 0x74d8),
Register('dcr137', 4, 0x74dc),
Register('dcr138', 4, 0x74e0),
Register('dcr139', 4, 0x74e4),
Register('dcr13a', 4, 0x74e8),
Register('dcr13b', 4, 0x74ec),
Register('dcr13c', 4, 0x74f0),
Register('dcr13d', 4, 0x74f4),
Register('dcr13e', 4, 0x74f8),
Register('dcr13f', 4, 0x74fc),
Register('dcr140', 4, 0x7500),
Register('dcr141', 4, 0x7504),
Register('dcr142', 4, 0x7508),
Register('dcr143', 4, 0x750c),
Register('dcr144', 4, 0x7510),
Register('dcr145', 4, 0x7514),
Register('dcr146', 4, 0x7518),
Register('dcr147', 4, 0x751c),
Register('dcr148', 4, 0x7520),
Register('dcr149', 4, 0x7524),
Register('dcr14a', 4, 0x7528),
Register('dcr14b', 4, 0x752c),
Register('dcr14c', 4, 0x7530),
Register('dcr14d', 4, 0x7534),
Register('dcr14e', 4, 0x7538),
Register('dcr14f', 4, 0x753c),
Register('dcr150', 4, 0x7540),
Register('dcr151', 4, 0x7544),
Register('dcr152', 4, 0x7548),
Register('dcr153', 4, 0x754c),
Register('dcr154', 4, 0x7550),
Register('dcr155', 4, 0x7554),
Register('dcr156', 4, 0x7558),
Register('dcr157', 4, 0x755c),
Register('dcr158', 4, 0x7560),
Register('dcr159', 4, 0x7564),
Register('dcr15a', 4, 0x7568),
Register('dcr15b', 4, 0x756c),
Register('dcr15c', 4, 0x7570),
Register('dcr15d', 4, 0x7574),
Register('dcr15e', 4, 0x7578),
Register('dcr15f', 4, 0x757c),
Register('dcr160', 4, 0x7580),
Register('dcr161', 4, 0x7584),
Register('dcr162', 4, 0x7588),
Register('dcr163', 4, 0x758c),
Register('dcr164', 4, 0x7590),
Register('dcr165', 4, 0x7594),
Register('dcr166', 4, 0x7598),
Register('dcr167', 4, 0x759c),
Register('dcr168', 4, 0x75a0),
Register('dcr169', 4, 0x75a4),
Register('dcr16a', 4, 0x75a8),
Register('dcr16b', 4, 0x75ac),
Register('dcr16c', 4, 0x75b0),
Register('dcr16d', 4, 0x75b4),
Register('dcr16e', 4, 0x75b8),
Register('dcr16f', 4, 0x75bc),
Register('dcr170', 4, 0x75c0),
Register('dcr171', 4, 0x75c4),
Register('dcr172', 4, 0x75c8),
Register('dcr173', 4, 0x75cc),
Register('dcr174', 4, 0x75d0),
Register('dcr175', 4, 0x75d4),
Register('dcr176', 4, 0x75d8),
Register('dcr177', 4, 0x75dc),
Register('dcr178', 4, 0x75e0),
Register('dcr179', 4, 0x75e4),
Register('dcr17a', 4, 0x75e8),
Register('dcr17b', 4, 0x75ec),
Register('dcr17c', 4, 0x75f0),
Register('dcr17d', 4, 0x75f4),
Register('dcr17e', 4, 0x75f8),
Register('dcr17f', 4, 0x75fc),
Register('dcr180', 4, 0x7600),
Register('dcr181', 4, 0x7604),
Register('dcr182', 4, 0x7608),
Register('dcr183', 4, 0x760c),
Register('dcr184', 4, 0x7610),
Register('dcr185', 4, 0x7614),
Register('dcr186', 4, 0x7618),
Register('dcr187', 4, 0x761c),
Register('dcr188', 4, 0x7620),
Register('dcr189', 4, 0x7624),
Register('dcr18a', 4, 0x7628),
Register('dcr18b', 4, 0x762c),
Register('dcr18c', 4, 0x7630),
Register('dcr18d', 4, 0x7634),
Register('dcr18e', 4, 0x7638),
Register('dcr18f', 4, 0x763c),
Register('dcr190', 4, 0x7640),
Register('dcr191', 4, 0x7644),
Register('dcr192', 4, 0x7648),
Register('dcr193', 4, 0x764c),
Register('dcr194', 4, 0x7650),
Register('dcr195', 4, 0x7654),
Register('dcr196', 4, 0x7658),
Register('dcr197', 4, 0x765c),
Register('dcr198', 4, 0x7660),
Register('dcr199', 4, 0x7664),
Register('dcr19a', 4, 0x7668),
Register('dcr19b', 4, 0x766c),
Register('dcr19c', 4, 0x7670),
Register('dcr19d', 4, 0x7674),
Register('dcr19e', 4, 0x7678),
Register('dcr19f', 4, 0x767c),
Register('dcr1a0', 4, 0x7680),
Register('dcr1a1', 4, 0x7684),
Register('dcr1a2', 4, 0x7688),
Register('dcr1a3', 4, 0x768c),
Register('dcr1a4', 4, 0x7690),
Register('dcr1a5', 4, 0x7694),
Register('dcr1a6', 4, 0x7698),
Register('dcr1a7', 4, 0x769c),
Register('dcr1a8', 4, 0x76a0),
Register('dcr1a9', 4, 0x76a4),
Register('dcr1aa', 4, 0x76a8),
Register('dcr1ab', 4, 0x76ac),
Register('dcr1ac', 4, 0x76b0),
Register('dcr1ad', 4, 0x76b4),
Register('dcr1ae', 4, 0x76b8),
Register('dcr1af', 4, 0x76bc),
Register('dcr1b0', 4, 0x76c0),
Register('dcr1b1', 4, 0x76c4),
Register('dcr1b2', 4, 0x76c8),
Register('dcr1b3', 4, 0x76cc),
Register('dcr1b4', 4, 0x76d0),
Register('dcr1b5', 4, 0x76d4),
Register('dcr1b6', 4, 0x76d8),
Register('dcr1b7', 4, 0x76dc),
Register('dcr1b8', 4, 0x76e0),
Register('dcr1b9', 4, 0x76e4),
Register('dcr1ba', 4, 0x76e8),
Register('dcr1bb', 4, 0x76ec),
Register('dcr1bc', 4, 0x76f0),
Register('dcr1bd', 4, 0x76f4),
Register('dcr1be', 4, 0x76f8),
Register('dcr1bf', 4, 0x76fc),
Register('dcr1c0', 4, 0x7700),
Register('dcr1c1', 4, 0x7704),
Register('dcr1c2', 4, 0x7708),
Register('dcr1c3', 4, 0x770c),
Register('dcr1c4', 4, 0x7710),
Register('dcr1c5', 4, 0x7714),
Register('dcr1c6', 4, 0x7718),
Register('dcr1c7', 4, 0x771c),
Register('dcr1c8', 4, 0x7720),
Register('dcr1c9', 4, 0x7724),
Register('dcr1ca', 4, 0x7728),
Register('dcr1cb', 4, 0x772c),
Register('dcr1cc', 4, 0x7730),
Register('dcr1cd', 4, 0x7734),
Register('dcr1ce', 4, 0x7738),
Register('dcr1cf', 4, 0x773c),
Register('dcr1d0', 4, 0x7740),
Register('dcr1d1', 4, 0x7744),
Register('dcr1d2', 4, 0x7748),
Register('dcr1d3', 4, 0x774c),
Register('dcr1d4', 4, 0x7750),
Register('dcr1d5', 4, 0x7754),
Register('dcr1d6', 4, 0x7758),
Register('dcr1d7', 4, 0x775c),
Register('dcr1d8', 4, 0x7760),
Register('dcr1d9', 4, 0x7764),
Register('dcr1da', 4, 0x7768),
Register('dcr1db', 4, 0x776c),
Register('dcr1dc', 4, 0x7770),
Register('dcr1dd', 4, 0x7774),
Register('dcr1de', 4, 0x7778),
Register('dcr1df', 4, 0x777c),
Register('dcr1e0', 4, 0x7780),
Register('dcr1e1', 4, 0x7784),
Register('dcr1e2', 4, 0x7788),
Register('dcr1e3', 4, 0x778c),
Register('dcr1e4', 4, 0x7790),
Register('dcr1e5', 4, 0x7794),
Register('dcr1e6', 4, 0x7798),
Register('dcr1e7', 4, 0x779c),
Register('dcr1e8', 4, 0x77a0),
Register('dcr1e9', 4, 0x77a4),
Register('dcr1ea', 4, 0x77a8),
Register('dcr1eb', 4, 0x77ac),
Register('dcr1ec', 4, 0x77b0),
Register('dcr1ed', 4, 0x77b4),
Register('dcr1ee', 4, 0x77b8),
Register('dcr1ef', 4, 0x77bc),
Register('dcr1f0', 4, 0x77c0),
Register('dcr1f1', 4, 0x77c4),
Register('dcr1f2', 4, 0x77c8),
Register('dcr1f3', 4, 0x77cc),
Register('dcr1f4', 4, 0x77d0),
Register('dcr1f5', 4, 0x77d4),
Register('dcr1f6', 4, 0x77d8),
Register('dcr1f7', 4, 0x77dc),
Register('dcr1f8', 4, 0x77e0),
Register('dcr1f9', 4, 0x77e4),
Register('dcr1fa', 4, 0x77e8),
Register('dcr1fb', 4, 0x77ec),
Register('dcr1fc', 4, 0x77f0),
Register('dcr1fd', 4, 0x77f4),
Register('dcr1fe', 4, 0x77f8),
Register('dcr1ff', 4, 0x77fc),
Register('dcr200', 4, 0x7800),
Register('dcr201', 4, 0x7804),
Register('dcr202', 4, 0x7808),
Register('dcr203', 4, 0x780c),
Register('dcr204', 4, 0x7810),
Register('dcr205', 4, 0x7814),
Register('dcr206', 4, 0x7818),
Register('dcr207', 4, 0x781c),
Register('dcr208', 4, 0x7820),
Register('dcr209', 4, 0x7824),
Register('dcr20a', 4, 0x7828),
Register('dcr20b', 4, 0x782c),
Register('dcr20c', 4, 0x7830),
Register('dcr20d', 4, 0x7834),
Register('dcr20e', 4, 0x7838),
Register('dcr20f', 4, 0x783c),
Register('dcr210', 4, 0x7840),
Register('dcr211', 4, 0x7844),
Register('dcr212', 4, 0x7848),
Register('dcr213', 4, 0x784c),
Register('dcr214', 4, 0x7850),
Register('dcr215', 4, 0x7854),
Register('dcr216', 4, 0x7858),
Register('dcr217', 4, 0x785c),
Register('dcr218', 4, 0x7860),
Register('dcr219', 4, 0x7864),
Register('dcr21a', 4, 0x7868),
Register('dcr21b', 4, 0x786c),
Register('dcr21c', 4, 0x7870),
Register('dcr21d', 4, 0x7874),
Register('dcr21e', 4, 0x7878),
Register('dcr21f', 4, 0x787c),
Register('dcr220', 4, 0x7880),
Register('dcr221', 4, 0x7884),
Register('dcr222', 4, 0x7888),
Register('dcr223', 4, 0x788c),
Register('dcr224', 4, 0x7890),
Register('dcr225', 4, 0x7894),
Register('dcr226', 4, 0x7898),
Register('dcr227', 4, 0x789c),
Register('dcr228', 4, 0x78a0),
Register('dcr229', 4, 0x78a4),
Register('dcr22a', 4, 0x78a8),
Register('dcr22b', 4, 0x78ac),
Register('dcr22c', 4, 0x78b0),
Register('dcr22d', 4, 0x78b4),
Register('dcr22e', 4, 0x78b8),
Register('dcr22f', 4, 0x78bc),
Register('dcr230', 4, 0x78c0),
Register('dcr231', 4, 0x78c4),
Register('dcr232', 4, 0x78c8),
Register('dcr233', 4, 0x78cc),
Register('dcr234', 4, 0x78d0),
Register('dcr235', 4, 0x78d4),
Register('dcr236', 4, 0x78d8),
Register('dcr237', 4, 0x78dc),
Register('dcr238', 4, 0x78e0),
Register('dcr239', 4, 0x78e4),
Register('dcr23a', 4, 0x78e8),
Register('dcr23b', 4, 0x78ec),
Register('dcr23c', 4, 0x78f0),
Register('dcr23d', 4, 0x78f4),
Register('dcr23e', 4, 0x78f8),
Register('dcr23f', 4, 0x78fc),
Register('dcr240', 4, 0x7900),
Register('dcr241', 4, 0x7904),
Register('dcr242', 4, 0x7908),
Register('dcr243', 4, 0x790c),
Register('dcr244', 4, 0x7910),
Register('dcr245', 4, 0x7914),
Register('dcr246', 4, 0x7918),
Register('dcr247', 4, 0x791c),
Register('dcr248', 4, 0x7920),
Register('dcr249', 4, 0x7924),
Register('dcr24a', 4, 0x7928),
Register('dcr24b', 4, 0x792c),
Register('dcr24c', 4, 0x7930),
Register('dcr24d', 4, 0x7934),
Register('dcr24e', 4, 0x7938),
Register('dcr24f', 4, 0x793c),
Register('dcr250', 4, 0x7940),
Register('dcr251', 4, 0x7944),
Register('dcr252', 4, 0x7948),
Register('dcr253', 4, 0x794c),
Register('dcr254', 4, 0x7950),
Register('dcr255', 4, 0x7954),
Register('dcr256', 4, 0x7958),
Register('dcr257', 4, 0x795c),
Register('dcr258', 4, 0x7960),
Register('dcr259', 4, 0x7964),
Register('dcr25a', 4, 0x7968),
Register('dcr25b', 4, 0x796c),
Register('dcr25c', 4, 0x7970),
Register('dcr25d', 4, 0x7974),
Register('dcr25e', 4, 0x7978),
Register('dcr25f', 4, 0x797c),
Register('dcr260', 4, 0x7980),
Register('dcr261', 4, 0x7984),
Register('dcr262', 4, 0x7988),
Register('dcr263', 4, 0x798c),
Register('dcr264', 4, 0x7990),
Register('dcr265', 4, 0x7994),
Register('dcr266', 4, 0x7998),
Register('dcr267', 4, 0x799c),
Register('dcr268', 4, 0x79a0),
Register('dcr269', 4, 0x79a4),
Register('dcr26a', 4, 0x79a8),
Register('dcr26b', 4, 0x79ac),
Register('dcr26c', 4, 0x79b0),
Register('dcr26d', 4, 0x79b4),
Register('dcr26e', 4, 0x79b8),
Register('dcr26f', 4, 0x79bc),
Register('dcr270', 4, 0x79c0),
Register('dcr271', 4, 0x79c4),
Register('dcr272', 4, 0x79c8),
Register('dcr273', 4, 0x79cc),
Register('dcr274', 4, 0x79d0),
Register('dcr275', 4, 0x79d4),
Register('dcr276', 4, 0x79d8),
Register('dcr277', 4, 0x79dc),
Register('dcr278', 4, 0x79e0),
Register('dcr279', 4, 0x79e4),
Register('dcr27a', 4, 0x79e8),
Register('dcr27b', 4, 0x79ec),
Register('dcr27c', 4, 0x79f0),
Register('dcr27d', 4, 0x79f4),
Register('dcr27e', 4, 0x79f8),
Register('dcr27f', 4, 0x79fc),
Register('dcr280', 4, 0x7a00),
Register('dcr281', 4, 0x7a04),
Register('dcr282', 4, 0x7a08),
Register('dcr283', 4, 0x7a0c),
Register('dcr284', 4, 0x7a10),
Register('dcr285', 4, 0x7a14),
Register('dcr286', 4, 0x7a18),
Register('dcr287', 4, 0x7a1c),
Register('dcr288', 4, 0x7a20),
Register('dcr289', 4, 0x7a24),
Register('dcr28a', 4, 0x7a28),
Register('dcr28b', 4, 0x7a2c),
Register('dcr28c', 4, 0x7a30),
Register('dcr28d', 4, 0x7a34),
Register('dcr28e', 4, 0x7a38),
Register('dcr28f', 4, 0x7a3c),
Register('dcr290', 4, 0x7a40),
Register('dcr291', 4, 0x7a44),
Register('dcr292', 4, 0x7a48),
Register('dcr293', 4, 0x7a4c),
Register('dcr294', 4, 0x7a50),
Register('dcr295', 4, 0x7a54),
Register('dcr296', 4, 0x7a58),
Register('dcr297', 4, 0x7a5c),
Register('dcr298', 4, 0x7a60),
Register('dcr299', 4, 0x7a64),
Register('dcr29a', 4, 0x7a68),
Register('dcr29b', 4, 0x7a6c),
Register('dcr29c', 4, 0x7a70),
Register('dcr29d', 4, 0x7a74),
Register('dcr29e', 4, 0x7a78),
Register('dcr29f', 4, 0x7a7c),
Register('dcr2a0', 4, 0x7a80),
Register('dcr2a1', 4, 0x7a84),
Register('dcr2a2', 4, 0x7a88),
Register('dcr2a3', 4, 0x7a8c),
Register('dcr2a4', 4, 0x7a90),
Register('dcr2a5', 4, 0x7a94),
Register('dcr2a6', 4, 0x7a98),
Register('dcr2a7', 4, 0x7a9c),
Register('dcr2a8', 4, 0x7aa0),
Register('dcr2a9', 4, 0x7aa4),
Register('dcr2aa', 4, 0x7aa8),
Register('dcr2ab', 4, 0x7aac),
Register('dcr2ac', 4, 0x7ab0),
Register('dcr2ad', 4, 0x7ab4),
Register('dcr2ae', 4, 0x7ab8),
Register('dcr2af', 4, 0x7abc),
Register('dcr2b0', 4, 0x7ac0),
Register('dcr2b1', 4, 0x7ac4),
Register('dcr2b2', 4, 0x7ac8),
Register('dcr2b3', 4, 0x7acc),
Register('dcr2b4', 4, 0x7ad0),
Register('dcr2b5', 4, 0x7ad4),
Register('dcr2b6', 4, 0x7ad8),
Register('dcr2b7', 4, 0x7adc),
Register('dcr2b8', 4, 0x7ae0),
Register('dcr2b9', 4, 0x7ae4),
Register('dcr2ba', 4, 0x7ae8),
Register('dcr2bb', 4, 0x7aec),
Register('dcr2bc', 4, 0x7af0),
Register('dcr2bd', 4, 0x7af4),
Register('dcr2be', 4, 0x7af8),
Register('dcr2bf', 4, 0x7afc),
Register('dcr2c0', 4, 0x7b00),
Register('dcr2c1', 4, 0x7b04),
Register('dcr2c2', 4, 0x7b08),
Register('dcr2c3', 4, 0x7b0c),
Register('dcr2c4', 4, 0x7b10),
Register('dcr2c5', 4, 0x7b14),
Register('dcr2c6', 4, 0x7b18),
Register('dcr2c7', 4, 0x7b1c),
Register('dcr2c8', 4, 0x7b20),
Register('dcr2c9', 4, 0x7b24),
Register('dcr2ca', 4, 0x7b28),
Register('dcr2cb', 4, 0x7b2c),
Register('dcr2cc', 4, 0x7b30),
Register('dcr2cd', 4, 0x7b34),
Register('dcr2ce', 4, 0x7b38),
Register('dcr2cf', 4, 0x7b3c),
Register('dcr2d0', 4, 0x7b40),
Register('dcr2d1', 4, 0x7b44),
Register('dcr2d2', 4, 0x7b48),
Register('dcr2d3', 4, 0x7b4c),
Register('dcr2d4', 4, 0x7b50),
Register('dcr2d5', 4, 0x7b54),
Register('dcr2d6', 4, 0x7b58),
Register('dcr2d7', 4, 0x7b5c),
Register('dcr2d8', 4, 0x7b60),
Register('dcr2d9', 4, 0x7b64),
Register('dcr2da', 4, 0x7b68),
Register('dcr2db', 4, 0x7b6c),
Register('dcr2dc', 4, 0x7b70),
Register('dcr2dd', 4, 0x7b74),
Register('dcr2de', 4, 0x7b78),
Register('dcr2df', 4, 0x7b7c),
Register('dcr2e0', 4, 0x7b80),
Register('dcr2e1', 4, 0x7b84),
Register('dcr2e2', 4, 0x7b88),
Register('dcr2e3', 4, 0x7b8c),
Register('dcr2e4', 4, 0x7b90),
Register('dcr2e5', 4, 0x7b94),
Register('dcr2e6', 4, 0x7b98),
Register('dcr2e7', 4, 0x7b9c),
Register('dcr2e8', 4, 0x7ba0),
Register('dcr2e9', 4, 0x7ba4),
Register('dcr2ea', 4, 0x7ba8),
Register('dcr2eb', 4, 0x7bac),
Register('dcr2ec', 4, 0x7bb0),
Register('dcr2ed', 4, 0x7bb4),
Register('dcr2ee', 4, 0x7bb8),
Register('dcr2ef', 4, 0x7bbc),
Register('dcr2f0', 4, 0x7bc0),
Register('dcr2f1', 4, 0x7bc4),
Register('dcr2f2', 4, 0x7bc8),
Register('dcr2f3', 4, 0x7bcc),
Register('dcr2f4', 4, 0x7bd0),
Register('dcr2f5', 4, 0x7bd4),
Register('dcr2f6', 4, 0x7bd8),
Register('dcr2f7', 4, 0x7bdc),
Register('dcr2f8', 4, 0x7be0),
Register('dcr2f9', 4, 0x7be4),
Register('dcr2fa', 4, 0x7be8),
Register('dcr2fb', 4, 0x7bec),
Register('dcr2fc', 4, 0x7bf0),
Register('dcr2fd', 4, 0x7bf4),
Register('dcr2fe', 4, 0x7bf8),
Register('dcr2ff', 4, 0x7bfc),
Register('dcr300', 4, 0x7c00),
Register('dcr301', 4, 0x7c04),
Register('dcr302', 4, 0x7c08),
Register('dcr303', 4, 0x7c0c),
Register('dcr304', 4, 0x7c10),
Register('dcr305', 4, 0x7c14),
Register('dcr306', 4, 0x7c18),
Register('dcr307', 4, 0x7c1c),
Register('dcr308', 4, 0x7c20),
Register('dcr309', 4, 0x7c24),
Register('dcr30a', 4, 0x7c28),
Register('dcr30b', 4, 0x7c2c),
Register('dcr30c', 4, 0x7c30),
Register('dcr30d', 4, 0x7c34),
Register('dcr30e', 4, 0x7c38),
Register('dcr30f', 4, 0x7c3c),
Register('dcr310', 4, 0x7c40),
Register('dcr311', 4, 0x7c44),
Register('dcr312', 4, 0x7c48),
Register('dcr313', 4, 0x7c4c),
Register('dcr314', 4, 0x7c50),
Register('dcr315', 4, 0x7c54),
Register('dcr316', 4, 0x7c58),
Register('dcr317', 4, 0x7c5c),
Register('dcr318', 4, 0x7c60),
Register('dcr319', 4, 0x7c64),
Register('dcr31a', 4, 0x7c68),
Register('dcr31b', 4, 0x7c6c),
Register('dcr31c', 4, 0x7c70),
Register('dcr31d', 4, 0x7c74),
Register('dcr31e', 4, 0x7c78),
Register('dcr31f', 4, 0x7c7c),
Register('dcr320', 4, 0x7c80),
Register('dcr321', 4, 0x7c84),
Register('dcr322', 4, 0x7c88),
Register('dcr323', 4, 0x7c8c),
Register('dcr324', 4, 0x7c90),
Register('dcr325', 4, 0x7c94),
Register('dcr326', 4, 0x7c98),
Register('dcr327', 4, 0x7c9c),
Register('dcr328', 4, 0x7ca0),
Register('dcr329', 4, 0x7ca4),
Register('dcr32a', 4, 0x7ca8),
Register('dcr32b', 4, 0x7cac),
Register('dcr32c', 4, 0x7cb0),
Register('dcr32d', 4, 0x7cb4),
Register('dcr32e', 4, 0x7cb8),
Register('dcr32f', 4, 0x7cbc),
Register('dcr330', 4, 0x7cc0),
Register('dcr331', 4, 0x7cc4),
Register('dcr332', 4, 0x7cc8),
Register('dcr333', 4, 0x7ccc),
Register('dcr334', 4, 0x7cd0),
Register('dcr335', 4, 0x7cd4),
Register('dcr336', 4, 0x7cd8),
Register('dcr337', 4, 0x7cdc),
Register('dcr338', 4, 0x7ce0),
Register('dcr339', 4, 0x7ce4),
Register('dcr33a', 4, 0x7ce8),
Register('dcr33b', 4, 0x7cec),
Register('dcr33c', 4, 0x7cf0),
Register('dcr33d', 4, 0x7cf4),
Register('dcr33e', 4, 0x7cf8),
Register('dcr33f', 4, 0x7cfc),
Register('dcr340', 4, 0x7d00),
Register('dcr341', 4, 0x7d04),
Register('dcr342', 4, 0x7d08),
Register('dcr343', 4, 0x7d0c),
Register('dcr344', 4, 0x7d10),
Register('dcr345', 4, 0x7d14),
Register('dcr346', 4, 0x7d18),
Register('dcr347', 4, 0x7d1c),
Register('dcr348', 4, 0x7d20),
Register('dcr349', 4, 0x7d24),
Register('dcr34a', 4, 0x7d28),
Register('dcr34b', 4, 0x7d2c),
Register('dcr34c', 4, 0x7d30),
Register('dcr34d', 4, 0x7d34),
Register('dcr34e', 4, 0x7d38),
Register('dcr34f', 4, 0x7d3c),
Register('dcr350', 4, 0x7d40),
Register('dcr351', 4, 0x7d44),
Register('dcr352', 4, 0x7d48),
Register('dcr353', 4, 0x7d4c),
Register('dcr354', 4, 0x7d50),
Register('dcr355', 4, 0x7d54),
Register('dcr356', 4, 0x7d58),
Register('dcr357', 4, 0x7d5c),
Register('dcr358', 4, 0x7d60),
Register('dcr359', 4, 0x7d64),
Register('dcr35a', 4, 0x7d68),
Register('dcr35b', 4, 0x7d6c),
Register('dcr35c', 4, 0x7d70),
Register('dcr35d', 4, 0x7d74),
Register('dcr35e', 4, 0x7d78),
Register('dcr35f', 4, 0x7d7c),
Register('dcr360', 4, 0x7d80),
Register('dcr361', 4, 0x7d84),
Register('dcr362', 4, 0x7d88),
Register('dcr363', 4, 0x7d8c),
Register('dcr364', 4, 0x7d90),
Register('dcr365', 4, 0x7d94),
Register('dcr366', 4, 0x7d98),
Register('dcr367', 4, 0x7d9c),
Register('dcr368', 4, 0x7da0),
Register('dcr369', 4, 0x7da4),
Register('dcr36a', 4, 0x7da8),
Register('dcr36b', 4, 0x7dac),
Register('dcr36c', 4, 0x7db0),
Register('dcr36d', 4, 0x7db4),
Register('dcr36e', 4, 0x7db8),
Register('dcr36f', 4, 0x7dbc),
Register('dcr370', 4, 0x7dc0),
Register('dcr371', 4, 0x7dc4),
Register('dcr372', 4, 0x7dc8),
Register('dcr373', 4, 0x7dcc),
Register('dcr374', 4, 0x7dd0),
Register('dcr375', 4, 0x7dd4),
Register('dcr376', 4, 0x7dd8),
Register('dcr377', 4, 0x7ddc),
Register('dcr378', 4, 0x7de0),
Register('dcr379', 4, 0x7de4),
Register('dcr37a', 4, 0x7de8),
Register('dcr37b', 4, 0x7dec),
Register('dcr37c', 4, 0x7df0),
Register('dcr37d', 4, 0x7df4),
Register('dcr37e', 4, 0x7df8),
Register('dcr37f', 4, 0x7dfc),
Register('dcr380', 4, 0x7e00),
Register('dcr381', 4, 0x7e04),
Register('dcr382', 4, 0x7e08),
Register('dcr383', 4, 0x7e0c),
Register('dcr384', 4, 0x7e10),
Register('dcr385', 4, 0x7e14),
Register('dcr386', 4, 0x7e18),
Register('dcr387', 4, 0x7e1c),
Register('dcr388', 4, 0x7e20),
Register('dcr389', 4, 0x7e24),
Register('dcr38a', 4, 0x7e28),
Register('dcr38b', 4, 0x7e2c),
Register('dcr38c', 4, 0x7e30),
Register('dcr38d', 4, 0x7e34),
Register('dcr38e', 4, 0x7e38),
Register('dcr38f', 4, 0x7e3c),
Register('dcr390', 4, 0x7e40),
Register('dcr391', 4, 0x7e44),
Register('dcr392', 4, 0x7e48),
Register('dcr393', 4, 0x7e4c),
Register('dcr394', 4, 0x7e50),
Register('dcr395', 4, 0x7e54),
Register('dcr396', 4, 0x7e58),
Register('dcr397', 4, 0x7e5c),
Register('dcr398', 4, 0x7e60),
Register('dcr399', 4, 0x7e64),
Register('dcr39a', 4, 0x7e68),
Register('dcr39b', 4, 0x7e6c),
Register('dcr39c', 4, 0x7e70),
Register('dcr39d', 4, 0x7e74),
Register('dcr39e', 4, 0x7e78),
Register('dcr39f', 4, 0x7e7c),
Register('dcr3a0', 4, 0x7e80),
Register('dcr3a1', 4, 0x7e84),
Register('dcr3a2', 4, 0x7e88),
Register('dcr3a3', 4, 0x7e8c),
Register('dcr3a4', 4, 0x7e90),
Register('dcr3a5', 4, 0x7e94),
Register('dcr3a6', 4, 0x7e98),
Register('dcr3a7', 4, 0x7e9c),
Register('dcr3a8', 4, 0x7ea0),
Register('dcr3a9', 4, 0x7ea4),
Register('dcr3aa', 4, 0x7ea8),
Register('dcr3ab', 4, 0x7eac),
Register('dcr3ac', 4, 0x7eb0),
Register('dcr3ad', 4, 0x7eb4),
Register('dcr3ae', 4, 0x7eb8),
Register('dcr3af', 4, 0x7ebc),
Register('dcr3b0', 4, 0x7ec0),
Register('dcr3b1', 4, 0x7ec4),
Register('dcr3b2', 4, 0x7ec8),
Register('dcr3b3', 4, 0x7ecc),
Register('dcr3b4', 4, 0x7ed0),
Register('dcr3b5', 4, 0x7ed4),
Register('dcr3b6', 4, 0x7ed8),
Register('dcr3b7', 4, 0x7edc),
Register('dcr3b8', 4, 0x7ee0),
Register('dcr3b9', 4, 0x7ee4),
Register('dcr3ba', 4, 0x7ee8),
Register('dcr3bb', 4, 0x7eec),
Register('dcr3bc', 4, 0x7ef0),
Register('dcr3bd', 4, 0x7ef4),
Register('dcr3be', 4, 0x7ef8),
Register('dcr3bf', 4, 0x7efc),
Register('dcr3c0', 4, 0x7f00),
Register('dcr3c1', 4, 0x7f04),
Register('dcr3c2', 4, 0x7f08),
Register('dcr3c3', 4, 0x7f0c),
Register('dcr3c4', 4, 0x7f10),
Register('dcr3c5', 4, 0x7f14),
Register('dcr3c6', 4, 0x7f18),
Register('dcr3c7', 4, 0x7f1c),
Register('dcr3c8', 4, 0x7f20),
Register('dcr3c9', 4, 0x7f24),
Register('dcr3ca', 4, 0x7f28),
Register('dcr3cb', 4, 0x7f2c),
Register('dcr3cc', 4, 0x7f30),
Register('dcr3cd', 4, 0x7f34),
Register('dcr3ce', 4, 0x7f38),
Register('dcr3cf', 4, 0x7f3c),
Register('dcr3d0', 4, 0x7f40),
Register('dcr3d1', 4, 0x7f44),
Register('dcr3d2', 4, 0x7f48),
Register('dcr3d3', 4, 0x7f4c),
Register('dcr3d4', 4, 0x7f50),
Register('dcr3d5', 4, 0x7f54),
Register('dcr3d6', 4, 0x7f58),
Register('dcr3d7', 4, 0x7f5c),
Register('dcr3d8', 4, 0x7f60),
Register('dcr3d9', 4, 0x7f64),
Register('dcr3da', 4, 0x7f68),
Register('dcr3db', 4, 0x7f6c),
Register('dcr3dc', 4, 0x7f70),
Register('dcr3dd', 4, 0x7f74),
Register('dcr3de', 4, 0x7f78),
Register('dcr3df', 4, 0x7f7c),
Register('dcr3e0', 4, 0x7f80),
Register('dcr3e1', 4, 0x7f84),
Register('dcr3e2', 4, 0x7f88),
Register('dcr3e3', 4, 0x7f8c),
Register('dcr3e4', 4, 0x7f90),
Register('dcr3e5', 4, 0x7f94),
Register('dcr3e6', 4, 0x7f98),
Register('dcr3e7', 4, 0x7f9c),
Register('dcr3e8', 4, 0x7fa0),
Register('dcr3e9', 4, 0x7fa4),
Register('dcr3ea', 4, 0x7fa8),
Register('dcr3eb', 4, 0x7fac),
Register('dcr3ec', 4, 0x7fb0),
Register('dcr3ed', 4, 0x7fb4),
Register('dcr3ee', 4, 0x7fb8),
Register('dcr3ef', 4, 0x7fbc),
Register('dcr3f0', 4, 0x7fc0),
Register('dcr3f1', 4, 0x7fc4),
Register('dcr3f2', 4, 0x7fc8),
Register('dcr3f3', 4, 0x7fcc),
Register('dcr3f4', 4, 0x7fd0),
Register('dcr3f5', 4, 0x7fd4),
Register('dcr3f6', 4, 0x7fd8),
Register('dcr3f7', 4, 0x7fdc),
Register('dcr3f8', 4, 0x7fe0),
Register('dcr3f9', 4, 0x7fe4),
Register('dcr3fa', 4, 0x7fe8),
Register('dcr3fb', 4, 0x7fec),
Register('dcr3fc', 4, 0x7ff0),
Register('dcr3fd', 4, 0x7ff4),
Register('dcr3fe', 4, 0x7ff8),
Register('dcr3ff', 4, 0x7ffc),
Register('acc', 8, 0x10000)
]
register_arch(['powerpc:le:32:quicc'], 32, Endness.LE, ArchPcode_PowerPC_LE_32_QUICC)
| 1.617188 | 2 |
makesense/graph.py | sieben/makesense | 5 | 9632 | # -*- coding: utf-8 -*-
import json
import pdb
import os
from os.path import join as pj
import networkx as nx
import pandas as pd
from networkx.readwrite.json_graph import node_link_data
def chain():
g = nx.Graph()
# Horizontal
for i in range(11, 15):
g.add_edge(i, i + 1)
for i in range(7, 10):
g.add_edge(i, i + 1)
for i in range(4, 6):
g.add_edge(i, i + 1)
for i in range(2, 3):
g.add_edge(i, i + 1)
g.add_node(1)
# Trans height
g.add_edge(1, 2)
g.add_edge(1, 3)
g.add_edge(2, 4)
g.add_edge(2, 5)
g.add_edge(3, 5)
g.add_edge(3, 6)
g.add_edge(4, 7)
g.add_edge(4, 8)
g.add_edge(5, 8)
g.add_edge(5, 9)
g.add_edge(6, 9)
g.add_edge(6, 10)
g.add_edge(7, 11)
g.add_edge(7, 12)
g.add_edge(8, 12)
g.add_edge(8, 13)
g.add_edge(9, 13)
g.add_edge(9, 14)
g.add_edge(10, 14)
g.add_edge(10, 15)
def tree():
with open("graph_radio.json", "w") as f:
f.write(json_graph.dumps(g,sort_keys=True,
indent=4, separators=(',', ': ') ))
# Drawing
pos = nx.spectral_layout(g)
nx.draw(g, pos, node_color="g")
nx.draw_networkx_nodes(g, pos, nodelist=[1], node_color="b")
plt.savefig("topology_tree.pdf", format="pdf")
plt.show()
def plot_graph_chain(folder):
g = nx.DiGraph()
N = 7
for i in range(1, N):
g.add_edge(i + 1, i)
g.add_node(1, root=True)
with open("radio_tree.json", "w") as f:
f.write(json_graph.dumps(g, sort_keys=True,
indent=4, separators=(',', ': ')))
pos = nx.circular_layout(g)
nx.draw(g, pos=pos)
nx.draw_networkx_nodes(g, pos, node_color='g')
nx.draw_networkx_nodes(g, pos, nodelist=[1], node_color='b')
nx.draw_networkx_edges(g, pos, edge_color="r", arrows=True)
plt.savefig(pj(folder, "topology_chain.pdf"), format="pdf")
def flower():
g = wheel_graph(7)
g.add_edge(6, 1)
g.add_edge(7, 6)
g.add_edge(8, 7)
with open("radio_graph.json", "w") as f:
f.write(json_graph.dumps(g, sort_keys=True,
indent=4, separators=(',', ': ')))
pos = nx.spring_layout(g)
nx.draw(g, pos=pos)
nx.draw_networkx_nodes(g,pos,
node_color='g')
nx.draw_networkx_nodes(g,pos,
nodelist=[8],
node_color='b')
#nx.draw_networkx_edges(g, pos, edge_color="r", arrows=True)
plt.savefig("topology_fleur.pdf", format="pdf")
plt.show()
def plot_graph(self):
"""
Plot the transmission graph of the simulation.
TODO: Draw arrows and have a directed graph.
http://goo.gl/Z697dH
TODO: Graph with big nodes for big transmissions
"""
fig = plt.figure()
ax1 = fig.add_subplot(111)
ax1.set_title("Transmission / RPL tree")
ax1.axis("off")
val_color = {"udp_server": 0.5714285714285714}
pos = {node: data["pos"]
for node, data in self.radio_tree.nodes(data=True)}
# color for all nodes
node_color = [val_color.get(data["mote_type"], 0.25)
for node, data in self.radio_tree.nodes(data=True)]
# Drawing the nodes
nx.draw_networkx_nodes(self.radio_tree, pos, node_color=node_color, ax=ax1)
nx.draw_networkx_labels(self.radio_tree, pos, ax=ax1)
# Drawing radio edges
nx.draw_networkx_edges(self.radio_tree, pos, edgelist=self.radio_tree.edges(),
width=8, alpha=0.5, ax=ax1)
# Adding the depth of each node.
with open(PJ(self.result_dir, "depth.csv")) as depth_f:
reader = DictReader(depth_f)
for row in reader:
node = int(row["node"])
depth = row["depth"]
ax1.text(pos[node][0] + 5, pos[node][1] + 5, depth,
bbox=dict(facecolor='red', alpha=0.5),
horizontalalignment='center')
# Drawing RPL edges
nx.draw_networkx_edges(
self.rpl_tree, pos, edge_color='r', nodelist=[], arrows=True, ax=ax1)
img_path = PJ(self.img_dir, "graph.pdf")
fig.savefig(img_path, format="pdf")
update_report(self.result_dir, "plot_graph", {
"img_src": "img/graph.pdf",
"comment": """
When the edge is thick it means edges are in an RPL instance.
Otherwise it means that the two nodes can see each others.
""",
"text": """
We generate a random geometric graph then use information coming
to the RPL root to construct the gateway representation of the RPL
tree. We add into this tree representation the traffic generated.
"""})
def transmission_graph(self):
"""
Plot the transmission graph of the simulation.
"""
settings = self.settings["transmission_graph"]
output_path = pj(self.result_folder_path, *settings["output_path"])
fig_rplinfo, ax_transmission_graph = plt.subplots()
net = nx.Graph()
# nodes
mote_types = self.settings["mote_types"]
motes = self.settings["motes"]
position = {}
for mote in motes:
mote_type = mote["mote_type"]
mote_id = mote["mote_id"]
position[mote_id] = (mote["x"], mote["y"])
mote_types[mote_type] \
.setdefault("nodes", []) \
.append(mote["mote_id"])
# edges
transmitting_range = self.settings["transmitting_range"]
for couple in itertools.product(motes, motes):
if 0 < distance(couple) <= transmitting_range:
net.add_edge(couple[0]["mote_id"],
couple[1]["mote_id"])
for mote_type in mote_types:
color = mote_types[mote_type]["color"]
nodelist = mote_types[mote_type]["nodes"]
nx.draw_networkx_nodes(net, position,
nodelist=nodelist,
node_color=color,
ax=ax_transmission_graph)
nx.draw_networkx_edges(net, pos=position, ax=ax_transmission_graph)
# labels
nx.draw_networkx_labels(net, position, ax=ax_transmission_graph)
plt.axis('off')
plt.savefig(output_path) # save as PNG
return ax_transmission_graph
def rpl_graph(folder):
"""
Build up the RPL representation at the gateway
"""
output_folder = pj(folder, "results", "graph")
if not os.path.exists(output_folder):
os.makedirs(output_folder)
df = pd.read_csv(pj(folder, "results", "messages.csv"))
parent_df = df[df.message_type == "parent"]
rpl_graph = nx.DiGraph()
for c, p in parent_df.iterrows():
rpl_graph.add_edge(p["mote_id"], p["node"])
with open(pj(output_folder, "rpl_graph.json"), "w") as f:
f.write(json.dumps(node_link_data(rpl_graph),
sort_keys=True, indent=4))
| 2.78125 | 3 |
recipes/Python/52228_Remote_control_with_telnetlib/recipe-52228.py | tdiprima/code | 2,023 | 9633 | <reponame>tdiprima/code<filename>recipes/Python/52228_Remote_control_with_telnetlib/recipe-52228.py
# auto_telnet.py - remote control via telnet
import os, sys, string, telnetlib
from getpass import getpass
class AutoTelnet:
def __init__(self, user_list, cmd_list, **kw):
self.host = kw.get('host', 'localhost')
self.timeout = kw.get('timeout', 600)
self.command_prompt = kw.get('command_prompt', "$ ")
self.passwd = {}
for user in user_list:
self.passwd[user] = getpass("Enter user '%s' password: " % user)
self.telnet = telnetlib.Telnet()
for user in user_list:
self.telnet.open(self.host)
ok = self.action(user, cmd_list)
if not ok:
print "Unable to process:", user
self.telnet.close()
def action(self, user, cmd_list):
t = self.telnet
t.write("\n")
login_prompt = "login: "
response = t.read_until(login_prompt, 5)
if string.count(response, login_prompt):
print response
else:
return 0
password_prompt = "Password:"
t.write("%s\n" % user)
response = t.read_until(password_prompt, 3)
if string.count(response, password_prompt):
print response
else:
return 0
t.write("%s\n" % self.passwd[user])
response = t.read_until(self.command_prompt, 5)
if not string.count(response, self.command_prompt):
return 0
for cmd in cmd_list:
t.write("%s\n" % cmd)
response = t.read_until(self.command_prompt, self.timeout)
if not string.count(response, self.command_prompt):
return 0
print response
return 1
if __name__ == '__main__':
basename = os.path.splitext(os.path.basename(sys.argv[0]))[0]
logname = os.environ.get("LOGNAME", os.environ.get("USERNAME"))
host = 'localhost'
import getopt
optlist, user_list = getopt.getopt(sys.argv[1:], 'c:f:h:')
usage = """
usage: %s [-h host] [-f cmdfile] [-c "command"] user1 user2 ...
-c command
-f command file
-h host (default: '%s')
Example: %s -c "echo $HOME" %s
""" % (basename, host, basename, logname)
if len(sys.argv) < 2:
print usage
sys.exit(1)
cmd_list = []
for (opt, optarg) in optlist:
if opt == '-f':
for r in open(optarg).readlines():
if string.rstrip(r):
cmd_list.append(r)
elif opt == '-c':
command = optarg
if command[0] == '"' and command[-1] == '"':
command = command[1:-1]
cmd_list.append(command)
elif opt == '-h':
host = optarg
autoTelnet = AutoTelnet(user_list, cmd_list, host=host)
| 2.890625 | 3 |
FFTNet_dilconv.py | mimbres/FFTNet | 0 | 9634 | <filename>FFTNet_dilconv.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 7 09:46:10 2018
@author: sungkyun
FFTNet model using 2x1 dil-conv
"""
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
# Models with Preset (for convenience)
'''
dim_input: dimension of input (256 for 8-bit mu-law input)
num_layer: number of layers (11 in paper). receptive field = 2^11 (2,048)
io_ch: number of input(=output) channels in each fft layers
skip_ch: number of skip-channels, only required for fft-residual net.
Annotations:
B: batch dimension
C: channel dimension
L: length dimension
'''
def fftnet_base(input_dim=256, num_layer=11, io_ch=256):
return FFTNet(input_dim=input_dim, num_layer=num_layer, io_ch=io_ch, skip_ch=0, bias=True)
def fftnet_residual(input_dim=256, num_layer=11, io_ch=256, skip_ch=256):
return FFTNet(input_dim=input_dim, num_layer=num_layer, io_ch=io_ch, skip_ch=skip_ch, bais=True)
# FFT_Block: define a basic FFT Block
'''
FFT_Block:
- using 2x1 dilated-conv, instead of LR split 1x1 conv.
- described in the paper, section 2.2.
- in case of the first layer used in the first FFT_Block,
we use nn.embedding layer for one-hot index(0-255) entries.
'''
class FFT_Block(nn.Module):
def __init__(self, cond_dim=26, io_ch=int, recep_sz=int, bias=True):
super(FFT_Block, self).__init__()
self.cond_dim=cond_dim # Number of dimensions of condition input
self.io_ch = io_ch
self.recep_sz = recep_sz # Size of receptive field: i.e., the 1st layer has receptive field of 2^11(=2,048). 2nd has 2^10.
self.bias = bias # If True, use bias in 1x1 conv.
self.dilation = int(recep_sz / 2)
self.conv_2x1_LR = nn.Conv1d(in_channels=self.io_ch, out_channels=self.io_ch,
kernel_size=2, stride=1, dilation=self.dilation, bias=self.bias)
self.conv_2x1_VLR = nn.Conv1d(in_channels=self.cond_dim, out_channels=self.io_ch,
kernel_size=2, stride=1, dilation=self.dilation, bias=self.bias)
self.conv_1x1_last = nn.Conv1d(in_channels=self.io_ch, out_channels=self.io_ch,
kernel_size=1, stride=1, bias=self.bias)
return None
def forward(self, x, cond):
z = self.conv_2x1_LR(x) # Eq(1), z = w_L*x_L + w_R*x_R
z = z + self.conv_2x1_VLR(cond) # Eq(2), z = (WL ∗ xL + WR ∗ xR) + (VL ∗ hL + VR ∗ hR)
x = F.relu(self.conv_1x1_last(F.relu(z))) # x = ReLU(conv1x1(ReLU(z)))
return x
'''
FFTNet:
- [11 FFT_blocks] --> [FC_layer] --> [softmax]
'''
class FFTNet(nn.Module):
def __init__(self, input_dim=256, cond_dim=26, num_layer=11, io_ch=256, skip_ch=0, bias=True):
super(FFTNet, self).__init__()
self.input_dim = input_dim # 256 (=num_classes)
self.cond_dim = cond_dim # 26
self.num_layer = num_layer # 11
self.io_ch = io_ch # 256 ch. in the paper
self.skip_ch = skip_ch # Not implemented yet (no skip channel in the paper)
self.bias = bias # If True, use bias in 2x1 conv.
self.max_recep_sz = int(pow(2, self.num_layer)) # 2^11, max receptive field size
# Embedding layer: one-hot_index -> embedding -> 256ch output
self.input_embedding_layer = nn.Embedding(num_embeddings=self.input_dim,
embedding_dim=self.io_ch)
# Constructing FFT Blocks:
blocks = nn.ModuleList()
for l in range(self.num_layer):
recep_sz = int(pow(2, self.num_layer-l)) # 1024, 512, ... 2
blocks.append( FFT_Block(cond_dim=self.cond_dim,
io_ch=self.io_ch,
recep_sz=recep_sz,
bias=self.bias) )
self.fft_blocks=blocks
# Final FC layer:
self.fc = nn.Linear(in_features=self.io_ch, out_features=self.io_ch)
return None
def forward(self, x, cond, gen_mod=False):
# Padding x:
zpad_sz = int(self.max_recep_sz)
x = F.pad(x, (zpad_sz, 0), 'constant', 128) # 128? or 0?
# Embedding(x):
x = self.input_embedding_layer(x) # In : BxL, Out: BxLxC
x = x.permute(0,2,1) # Out: BxCxL
# FFT_Blocks:
for l in range(self.num_layer):
# Padding cond:
zpad_sz = int(self.max_recep_sz/pow(2, l))
padded_cond = F.pad(cond, (zpad_sz, 0), 'constant', 0)
x = self.fft_blocks[l](x, padded_cond)
if gen_mod is True:
x = x[:,:,-1] # In generator mode, take the last one sample only.
x = x.reshape(-1, 1, self.io_ch) # (BxC) --> (Bx1xC)
else:
x = x[:,:,:-1] # In training mode, right-omit 1 is required.
x = x.permute(0,2,1) # (BxCxL) --> (BxLxC)
x = self.fc(x) # (BxLxC)
# NOTE: in PyTorch, softmax() is included in CE loss.
return x
| 2.34375 | 2 |
snewpdag/plugins/Copy.py | SNEWS2/snewpdag | 0 | 9635 | <filename>snewpdag/plugins/Copy.py
"""
Copy - copy fields into other (possibly new) fields
configuration:
on: list of 'alert', 'revoke', 'report', 'reset' (optional: def 'alert' only)
cp: ( (in,out), ... )
Field names take the form of dir1/dir2/dir3,
which in the payload will be data[dir1][dir2][dir3]
"""
import logging
from snewpdag.dag import Node
class Copy(Node):
def __init__(self, cp, **kwargs):
self.cp = []
for op in cp:
src = op[0].split('/')
dst = op[1].split('/')
self.cp.append( [src, dst[:-1], dst[-1]] )
self.on = kwargs.pop('on', [ 'alert' ])
super().__init__(**kwargs)
def copy(self, data):
for op in self.cp:
v = data # should just follow references
for k in op[0]:
if k in v:
v = v[k]
else:
logging.warning('Field {} not found from source {}'.format(k, op[0]))
continue
# v should now hold the value to be copied
d = data
for k in op[1]:
if k not in d:
d[k] = {}
d = d[k]
d[op[2]] = v
return data
def alert(self, data):
return self.copy(data) if 'alert' in self.on else True
def revoke(self, data):
return self.copy(data) if 'revoke' in self.on else True
def reset(self, data):
return self.copy(data) if 'reset' in self.on else True
def report(self, data):
return self.copy(data) if 'report' in self.on else True
| 2.359375 | 2 |
pages/aboutus.py | BuildWeek-AirBnB-Optimal-Price/application | 0 | 9636 | <gh_stars>0
'''
houses each team member's
link to personal GitHub io or
website or blog space
RJProctor
'''
# Imports from 3rd party libraries
import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
from app import app
# 1 column layout
# https://dash-bootstrap-components.opensource.faculty.ai/l/components/layout
column1 = dbc.Col(
[
dcc.Markdown(
"""
## The Team:
Select a link to learn more about each of our
team members.
"""
),
],
)
# create footer
column2 = dbc.Col(
[
dcc.Markdown(
"""
**<NAME>**
https://github.com/dscohen75/dscohen75.github.io
https://medium.com/@debbiecohen_22419
**<NAME>**
---
**<NAME>**
https://medium.com/@eprecendez
---
**<NAME>**
https://jproctor-rebecca.github.io/
https://medium.com/@jproctor.m.ed.tn
---
**Code Review Team Members:**
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
<NAME>,
and
<NAME>
"""
),
],
)
layout = dbc.Row([column1, column2]) | 2.28125 | 2 |
Codes/Python32/Lib/importlib/test/extension/test_path_hook.py | eyantra/FireBird_Swiss_Knife | 319 | 9637 | from importlib import _bootstrap
from . import util
import collections
import imp
import sys
import unittest
class PathHookTests(unittest.TestCase):
"""Test the path hook for extension modules."""
# XXX Should it only succeed for pre-existing directories?
# XXX Should it only work for directories containing an extension module?
def hook(self, entry):
return _bootstrap._file_path_hook(entry)
def test_success(self):
# Path hook should handle a directory where a known extension module
# exists.
self.assertTrue(hasattr(self.hook(util.PATH), 'find_module'))
def test_main():
from test.support import run_unittest
run_unittest(PathHookTests)
if __name__ == '__main__':
test_main()
| 2.515625 | 3 |
3. count_words/solution.py | dcragusa/WeeklyPythonExerciseB2 | 0 | 9638 | import os
from glob import iglob
from concurrent.futures import ThreadPoolExecutor
def count_words_file(path):
if not os.path.isfile(path):
return 0
with open(path) as file:
return sum(len(line.split()) for line in file)
def count_words_sequential(pattern):
return sum(map(count_words_file, iglob(pattern)))
def count_words_threading(pattern):
with ThreadPoolExecutor() as pool:
return sum(pool.map(count_words_file, iglob(pattern)))
| 3.078125 | 3 |
kafka-connect-azblob/docs/autoreload.py | cirobarradov/kafka-connect-hdfs-datalab | 0 | 9639 | <reponame>cirobarradov/kafka-connect-hdfs-datalab<filename>kafka-connect-azblob/docs/autoreload.py
#!/usr/bin/env python
from livereload import Server, shell
server = Server()
server.watch('*.rst', shell('make html'))
server.serve()
| 1.234375 | 1 |
keras_textclassification/conf/path_config.py | atom-zh/Keras-TextClassification | 0 | 9640 | # -*- coding: UTF-8 -*-
# !/usr/bin/python
# @time :2019/6/5 21:04
# @author :Mo
# @function :file of path
import os
import pathlib
import sys
# 项目的根目录
path_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
path_root = path_root.replace('\\', '/')
path_top = str(pathlib.Path(os.path.abspath(__file__)).parent.parent.parent)
path_top = path_top.replace('\\', '/')
# path of embedding
path_embedding_user_dict = path_root + '/data/embeddings/user_dict.txt'
path_embedding_random_char = path_root + '/data/embeddings/term_char.txt'
path_embedding_random_word = path_root + '/data/embeddings/term_word.txt'
path_embedding_bert = path_root + '/data/embeddings/chinese_L-12_H-768_A-12/'
path_embedding_xlnet = path_root + '/data/embeddings/chinese_xlnet_mid_L-24_H-768_A-12/'
path_embedding_albert = path_root + '/data/embeddings/albert_base_zh'
path_embedding_vector_word2vec_char = path_root + '/data/embeddings/multi_label_char.vec'
path_embedding_vector_word2vec_word = path_root + '/data/embeddings/multi_label_word.vec'
path_embedding_vector_word2vec_char_bin = path_root + '/data/embeddings/multi_label_char.bin'
path_embedding_vector_word2vec_word_bin = path_root + '/data/embeddings/multi_label_word.bin'
# classify data of baidu qa 2019
path_baidu_qa_2019_train = path_root + '/data/baidu_qa_2019/baike_qa_train.csv'
path_baidu_qa_2019_valid = path_root + '/data/baidu_qa_2019/baike_qa_valid.csv'
# 今日头条新闻多标签分类
path_byte_multi_news_train = path_root + '/data/byte_multi_news/train.csv'
path_byte_multi_news_valid = path_root + '/data/byte_multi_news/valid.csv'
path_byte_multi_news_label = path_root + '/data/byte_multi_news/labels.csv'
# classify data of baidu qa 2019
path_sim_webank_train = path_root + '/data/sim_webank/train.csv'
path_sim_webank_valid = path_root + '/data/sim_webank/valid.csv'
path_sim_webank_test = path_root + '/data/sim_webank/test.csv'
# classfiy multi labels 2021
path_multi_label_train = path_root + '/data/multi_label/train.csv'
path_multi_label_valid = path_root + '/data/multi_label/valid.csv'
path_multi_label_labels = path_root + '/data/multi_label/labels.csv'
path_multi_label_tests = path_root + '/data/multi_label/tests.csv'
# 路径抽象层
path_label = path_multi_label_labels
path_train = path_multi_label_train
path_valid = path_multi_label_valid
path_tests = path_multi_label_tests
path_edata = path_root + "/../out/error_data.csv"
# fast_text config
path_out = path_top + "/out/"
# 模型目录
path_model_dir = path_root + "/data/model/fast_text/"
# 语料地址
path_model = path_root + '/data/model/fast_text/model_fast_text.h5'
# 超参数保存地址
path_hyper_parameters = path_root + '/data/model/fast_text/hyper_parameters.json'
# embedding微调保存地址
path_fineture = path_root + "/data/model/fast_text/embedding_trainable.h5"
# 保持 分类-标签 索引
path_category = path_root + '/data/multi_label/category2labels.json'
# l2i_i2l
path_l2i_i2l = path_root + '/data/multi_label/l2i_i2l.json'
| 2.25 | 2 |
tests/test_apyhgnc.py | robertopreste/apyhgnc | 0 | 9641 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Created by <NAME>
import pytest
import asyncio
from pandas.testing import assert_frame_equal
from apyhgnc import apyhgnc
# apyhgnc.info
def test_info_searchableFields(searchable_fields):
result = apyhgnc.info().searchableFields
assert result == searchable_fields
def test_info_storedFields(stored_fields):
result = apyhgnc.info().storedFields
assert result == stored_fields
def test_info_url():
result = apyhgnc.info().url
assert result == "http://rest.genenames.org/info"
# apyhgnc.fetch
def test_fetch_symbol_znf3(df_fetch_symbol_znf3):
result = apyhgnc.fetch("symbol", "ZNF3")
assert_frame_equal(result, df_fetch_symbol_znf3)
def test_fetch_symbol_znf3_async(df_fetch_symbol_znf3):
loop = asyncio.get_event_loop()
result = loop.run_until_complete(
apyhgnc.afetch("symbol", "ZNF3")
)
assert_frame_equal(result, df_fetch_symbol_znf3)
# apyhgnc.search
def test_search_all_braf(df_search_all_braf):
result = apyhgnc.search("BRAF")
assert_frame_equal(result, df_search_all_braf)
def test_search_all_braf_async(df_search_all_braf):
loop = asyncio.get_event_loop()
result = loop.run_until_complete(
apyhgnc.asearch("BRAF")
)
assert_frame_equal(result, df_search_all_braf)
def test_search_symbol_braf(df_search_symbol_braf):
result = apyhgnc.search("symbol", "BRAF")
assert_frame_equal(result, df_search_symbol_braf)
def test_search_symbol_braf_async(df_search_symbol_braf):
loop = asyncio.get_event_loop()
result = loop.run_until_complete(
apyhgnc.asearch("symbol", "BRAF")
)
assert_frame_equal(result, df_search_symbol_braf)
def test_search_symbols_braf_znf3(df_search_symbols_braf_znf3):
result = apyhgnc.search(symbol=["BRAF", "ZNF3"])
assert_frame_equal(result, df_search_symbols_braf_znf3)
def test_search_symbols_braf_znf3_async(df_search_symbols_braf_znf3):
loop = asyncio.get_event_loop()
result = loop.run_until_complete(
apyhgnc.asearch(symbol=["BRAF", "ZNF3"])
)
assert_frame_equal(result, df_search_symbols_braf_znf3)
def test_search_symbol_and_status(df_search_symbol_and_status):
result = apyhgnc.search(symbol="BRAF", status="Approved")
assert_frame_equal(result, df_search_symbol_and_status)
def test_search_symbol_and_status_async(df_search_symbol_and_status):
loop = asyncio.get_event_loop()
result = loop.run_until_complete(
apyhgnc.asearch(symbol="BRAF", status="Approved")
)
assert_frame_equal(result, df_search_symbol_and_status)
| 2.3125 | 2 |
bdaq/tools/extract_enums.py | magnium/pybdaq | 0 | 9642 | import os.path
import argparse
from xml.etree import ElementTree as ET
class ExtractedEnum(object):
def __init__(self, tag_name, value_names):
self.tag_name = tag_name
self.value_names = value_names
def write_pxd(self, file_):
file_.write("\n ctypedef enum {}:\n".format(self.tag_name))
for name in self.value_names:
file_.write(" " * 8 + "{}\n".format(name))
def write_pyx(self, file_):
file_.write("\nclass {}(enum.Enum):\n".format(self.tag_name))
for name in self.value_names:
file_.write(" " * 4 + "{0} = _c.{0}\n".format(name))
@staticmethod
def from_xml(element, typedefs):
value_names = [v.attrib["name"] for v in element.findall("EnumValue")]
return ExtractedEnum(
typedefs[element.attrib["id"]],
value_names)
def find_enums(file_or_path):
# parse XML
tree = ET.parse(file_or_path)
# extract typedefs
typedefs = {}
for element in tree.findall("Typedef"):
typedefs[element.attrib["type"]] = element.attrib["name"]
# extract enums
enums = []
for element in tree.findall("Enumeration"):
enums.append(ExtractedEnum.from_xml(element, typedefs))
print "Found {} enums to extract.".format(len(enums))
return enums
def write_cython(pyx_file, pxd_file, enums):
# write pxd file header
pxd_file.write("# GENERATED FILE; DO NOT MODIFY\n\n")
pxd_file.write(
'cdef extern from "bdaqctrl.h" namespace "Automation::BDaq":')
# write pyx file header
pyx_file.write("# GENERATED FILE; DO NOT MODIFY\n\n")
pyx_file.write("import enum\n\n")
pyx_file.write("cimport wrapper_enums_c as _c\n\n")
# write enums
for extracted in enums:
print "Extracting definition of {}...".format(extracted.tag_name)
extracted.write_pyx(pyx_file)
extracted.write_pxd(pxd_file)
print "Done extracting definitions."
def main():
# parse script arguments
parser = argparse.ArgumentParser(
description="Extract enum definitions from header.")
parser.add_argument(
"--xml-in",
default="bdaqctrl.h.xml",
help="path to gccxml result")
parser.add_argument(
"--path-out",
default=".",
help="path to output directory")
args = parser.parse_args()
# extract enums
enums = find_enums(args.xml_in)
out_pyx_path = os.path.join(args.path_out, "wrapper_enums.pyx")
out_pxd_path = os.path.join(args.path_out, "wrapper_enums_c.pxd")
with open(out_pyx_path, "wb") as out_pyx_file:
with open(out_pxd_path, "wb") as out_pxd_file:
write_cython(out_pyx_file, out_pxd_file, enums)
if __name__ == "__main__":
main()
| 2.859375 | 3 |
Objetos/biblioteca.py | SebaB29/Python | 0 | 9643 | <filename>Objetos/biblioteca.py<gh_stars>0
class Libro:
def __init__(self, titulo, autor):
"""..."""
self.titulo = titulo
self.autor = autor
def obtener_titulo(self):
"""..."""
return str(self.titulo)
def obtener_autor(self):
"""..."""
return str(self.autor)
class Biblioteca:
def __init__(self):
"""..."""
self.coleccion = set()
def agregar_libro(self, libro):
"""..."""
self.coleccion.add((libro.titulo, libro.autor))
def sacar_libro(self, titulo, autor):
"""..."""
if not (titulo, autor) in self.coleccion:
raise Exception("El libro no esta en la colección")
self.coleccion.remove((titulo, autor))
return f"Libro: {titulo}, Autor: {autor}"
def contiene_libro(self, titulo, autor):
"""..."""
return (titulo, autor) in self.coleccion
libro = Libro("HyP", "JK")
libro1 = Libro("La Isla M", "JCortazar")
libro2 = Libro("El tunel", "Sabato")
biblio = Biblioteca()
biblio.agregar_libro(libro)
biblio.agregar_libro(libro1)
biblio.agregar_libro(libro2)
print(biblio.contiene_libro("HyP", "JK"))
print(biblio.sacar_libro("HyP", "JK"))
print(biblio.contiene_libro("HyP", "JK")) | 3.40625 | 3 |
parser_tool/tests/test_htmlgenerator.py | Harvard-ATG/visualizing_russian_tools | 2 | 9644 | <filename>parser_tool/tests/test_htmlgenerator.py<gh_stars>1-10
# -*- coding: utf-8 -*-
import unittest
from xml.etree import ElementTree as ET
from parser_tool import tokenizer
from parser_tool import htmlgenerator
class TestHtmlGenerator(unittest.TestCase):
def _maketokendict(self, **kwargs):
token_text = kwargs.get("token", "")
token_dict = {
"token": token_text,
"index": kwargs.get("index", 0),
"offset": kwargs.get("offset", 0),
"tokentype": kwargs.get("tokentype", tokenizer.TOKEN_WORD),
"canonical": kwargs.get("canonical", tokenizer.canonical(token_text)),
"form_ids": kwargs.get("form_ids", []),
"level": kwargs.get("level", ""),
}
return token_dict
def test_render_token_russian_word(self):
token_text = "п<PASSWORD>"
token_dict = self._maketokendict(token=token_text, tokentype=tokenizer.TOKEN_RUS, level="3A", form_ids=["174128"])
rendered = htmlgenerator.render_token(token_dict)
node_type, el = rendered['node_type'], rendered['element']
self.assertEqual(htmlgenerator.ELEMENT_NODE, node_type)
self.assertEqual("span", el.tag)
self.assertEqual({
"class": "word parsed level3",
"data-form-ids": ",".join(token_dict['form_ids']),
"data-level": token_dict['level']
}, el.attrib)
self.assertEqual(token_text, el.text)
def test_render_token_english_word(self):
token_text = "<PASSWORD>"
token_dict = self._maketokendict(token=token_text, tokentype=tokenizer.TOKEN_WORD)
rendered = htmlgenerator.render_token(token_dict)
node_type, el = rendered['node_type'], rendered['element']
self.assertEqual(htmlgenerator.ELEMENT_NODE, node_type)
self.assertEqual("span", el.tag)
self.assertEqual({"class": "word"}, el.attrib)
self.assertEqual(token_text, el.text)
def test_render_token_with_multiple_spaces(self):
token_text = " " * 3
expected_text = token_text.replace(" ", "\u00A0\u00A0")
token_dict = self._maketokendict(token=token_text, tokentype=tokenizer.TOKEN_SPACE)
rendered = htmlgenerator.render_token(token_dict)
self.assertEqual(htmlgenerator.TEXT_NODE, rendered['node_type'])
self.assertEqual(expected_text, rendered['text'])
def test_render_token_with_punctuation(self):
token_text = "')."
expected_text = token_text
token_dict = self._maketokendict(token=token_text, tokentype=tokenizer.TOKEN_SPACE)
rendered = htmlgenerator.render_token(token_dict)
self.assertEqual(htmlgenerator.TEXT_NODE, rendered['node_type'])
self.assertEqual(expected_text, rendered['text'])
def test_tokens_with_leading_punct_to_html(self):
# (собака) dog
tokens = [
self._maketokendict(token="(", tokentype=tokenizer.TOKEN_PUNCT),
self._maketokendict(token="собака", tokentype=tokenizer.TOKEN_RUS, level="1E", form_ids=["7599"]),
self._maketokendict(token=")", tokentype=tokenizer.TOKEN_RUS),
self._maketokendict(token=" ", tokentype=tokenizer.TOKEN_SPACE),
self._maketokendict(token="dog", tokentype=tokenizer.TOKEN_WORD),
]
html = htmlgenerator.tokens2html(tokens)
expected_html = '<pre class="words">(<span data-form-ids="7599" data-level="1E" class="word parsed level1">собака</span><span class="word">)</span> <span class="word">dog</span></pre>'
self.assertEqual(expected_html, html)
def test_tokens2html(self):
tokens = [
self._maketokendict(token="A", tokentype=tokenizer.TOKEN_WORD),
self._maketokendict(token=" ", tokentype=tokenizer.TOKEN_SPACE),
self._maketokendict(token="первоку́рсник", tokentype=tokenizer.TOKEN_RUS, level="3A", form_ids=["174128"]),
self._maketokendict(token=" ", tokentype=tokenizer.TOKEN_SPACE),
self._maketokendict(token="|", tokentype=tokenizer.TOKEN_PUNCT),
self._maketokendict(token="первоку́рсница", tokentype=tokenizer.TOKEN_RUS, level="3A", form_ids=["174128"]),
self._maketokendict(token=" ", tokentype=tokenizer.TOKEN_SPACE),
]
html = htmlgenerator.tokens2html(tokens)
root = ET.fromstring(html)
# Check the root element (e.g. container)
self.assertEqual("pre", root.tag)
self.assertEqual({"class": "words"}, root.attrib)
# Check that we have the expected number of child elements (1 element for each word or russian token)
expected_word_elements = sum([1 for t in tokens if t['tokentype'] in (tokenizer.TOKEN_WORD, tokenizer.TOKEN_RUS)])
self.assertEqual(expected_word_elements, len(root))
# Now check the first few tokens...
# 1) Check that the first child contains the text of the first token
self.assertEqual(tokens[0]['token'], root[0].text)
self.assertEqual("span", root[0].tag)
self.assertEqual({"class": "word"}, root[0].attrib)
# 2) Check that the first child's tail contains the text of the second token since it's a space token
self.assertEqual(tokens[1]['token'], root[0].tail)
# 3) Check that the second child contains the text of the third token
self.assertEqual(tokens[2]['token'], root[1].text)
self.assertEqual("span", root[1].tag)
self.assertEqual({'class': 'word parsed level3', 'data-form-ids': '174128', 'data-level': '3A'}, root[1].attrib)
| 2.671875 | 3 |
taiga/hooks/gitlab/migrations/0002_auto_20150703_1102.py | threefoldtech/Threefold-Circles | 1 | 9645 | <reponame>threefoldtech/Threefold-Circles
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.core.files import File
def update_gitlab_system_user_photo_to_v2(apps, schema_editor):
# We get the model from the versioned app registry;
# if we directly import it, it'll be the wrong version
User = apps.get_model("users", "User")
db_alias = schema_editor.connection.alias
try:
user = User.objects.using(db_alias).get(username__startswith="gitlab-",
is_active=False,
is_system=True)
f = open("taiga/hooks/gitlab/migrations/logo-v2.png", "rb")
user.photo.save("logo.png", File(f))
user.save()
except User.DoesNotExist:
pass
def update_gitlab_system_user_photo_to_v1(apps, schema_editor):
# We get the model from the versioned app registry;
# if we directly import it, it'll be the wrong version
User = apps.get_model("users", "User")
db_alias = schema_editor.connection.alias
try:
user = User.objects.using(db_alias).get(username__startswith="gitlab-",
is_active=False,
is_system=True)
f = open("taiga/hooks/gitlab/migrations/logo.png", "rb")
user.photo.save("logo.png", File(f))
user.save()
except User.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('gitlab', '0001_initial'),
('users', '0011_user_theme'),
]
operations = [
migrations.RunPython(update_gitlab_system_user_photo_to_v2,
update_gitlab_system_user_photo_to_v1),
]
| 2.03125 | 2 |
external_plugin_deps.bzl | michalgagat/plugins_oauth | 143 | 9646 | <reponame>michalgagat/plugins_oauth
load("//tools/bzl:maven_jar.bzl", "maven_jar")
def external_plugin_deps(omit_commons_codec = True):
JACKSON_VERS = "2.10.2"
maven_jar(
name = "scribejava-core",
artifact = "com.github.scribejava:scribejava-core:6.9.0",
sha1 = "ed761f450d8382f75787e8fee9ae52e7ec768747",
)
maven_jar(
name = "jackson-annotations",
artifact = "com.fasterxml.jackson.core:jackson-annotations:" + JACKSON_VERS,
sha1 = "3a13b6105946541b8d4181a0506355b5fae63260",
)
maven_jar(
name = "jackson-databind",
artifact = "com.fasterxml.jackson.core:jackson-databind:" + JACKSON_VERS,
sha1 = "0528de95f198afafbcfb0c09d2e43b6e0ea663ec",
deps = [
"@jackson-annotations//jar",
],
)
if not omit_commons_codec:
maven_jar(
name = "commons-codec",
artifact = "commons-codec:commons-codec:1.4",
sha1 = "4216af16d38465bbab0f3dff8efa14204f7a399a",
)
| 1.578125 | 2 |
11.-Operaciones_entero_con_float_python.py | emiliocarcanobringas/11.-Operaciones_entero_con_float_python | 0 | 9647 | <filename>11.-Operaciones_entero_con_float_python.py
# Este programa muestra la suma de dos variables, de tipo int y float
print("Este programa muestra la suma de dos variables, de tipo int y float")
print("También muestra que la variable que realiza la operación es de tipo float")
numero1 = 7
numero2 = 3.1416
sumadeambos = numero1 + numero2
print("El resultado de la suma es: ")
print(sumadeambos)
print(type(sumadeambos))
# Este programa fue escrito por <NAME>
| 3.90625 | 4 |
main_gat.py | basiralab/RG-Select | 1 | 9648 | # -*- coding: utf-8 -*-
from sklearn import preprocessing
from torch.autograd import Variable
from models_gat import GAT
import os
import torch
import numpy as np
import argparse
import pickle
import sklearn.metrics as metrics
import cross_val
import time
import random
torch.manual_seed(0)
np.random.seed(0)
random.seed(0)
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
def evaluate(dataset, model_GAT, args, threshold_value, model_name):
"""
Parameters
----------
dataset : dataloader (dataloader for the validation/test dataset).
model_GCN : nn model (GAT model).
args : arguments
threshold_value : float (threshold for adjacency matrices).
Description
----------
This methods performs the evaluation of the model on test/validation dataset
Returns
-------
test accuracy.
"""
model_GAT.eval()
labels = []
preds = []
for batch_idx, data in enumerate(dataset):
adj = Variable(data['adj'].float(), requires_grad=False).to(device)
labels.append(data['label'].long().numpy())
adj = torch.squeeze(adj)
features = np.identity(adj.shape[0])
features = Variable(torch.from_numpy(features).float(), requires_grad=False).cpu()
if args.threshold in ["median", "mean"]:
adj = torch.where(adj > threshold_value, torch.tensor([1.0]), torch.tensor([0.0]))
ypred = model_GAT(features, adj)
_, indices = torch.max(ypred, 1)
preds.append(indices.cpu().data.numpy())
labels = np.hstack(labels)
preds = np.hstack(preds)
simple_r = {'labels':labels,'preds':preds}
with open("./gat/Labels_and_preds/"+model_name+".pickle", 'wb') as f:
pickle.dump(simple_r, f)
result = {'prec': metrics.precision_score(labels, preds, average='macro'),
'recall': metrics.recall_score(labels, preds, average='macro'),
'acc': metrics.accuracy_score(labels, preds),
'F1': metrics.f1_score(labels, preds, average="micro")}
if args.evaluation_method == 'model assessment':
name = 'Test'
if args.evaluation_method == 'model selection':
name = 'Validation'
print(name, " accuracy:", result['acc'])
return result['acc']
def minmax_sc(x):
min_max_scaler = preprocessing.MinMaxScaler()
x = min_max_scaler.fit_transform(x)
return x
def train(args, train_dataset, val_dataset, model_GAT, threshold_value, model_name):
"""
Parameters
----------
args : arguments
train_dataset : dataloader (dataloader for the validation/test dataset).
val_dataset : dataloader (dataloader for the validation/test dataset).
model_GAT : nn model (GAT model).
threshold_value : float (threshold for adjacency matrices).
Description
----------
This methods performs the training of the model on train dataset and calls evaluate() method for evaluation.
Returns
-------
test accuracy.
"""
params = list(model_GAT.parameters())
optimizer = torch.optim.Adam(params, lr=args.lr, weight_decay=args.weight_decay)
test_accs = []
train_loss=[]
val_acc=[]
for epoch in range(args.num_epochs):
print("Epoch ",epoch)
print("Size of Training Set:" + str(len(train_dataset)))
print("Size of Validation Set:" + str(len(val_dataset)))
model_GAT.train()
total_time = 0
avg_loss = 0.0
preds = []
labels = []
for batch_idx, data in enumerate(train_dataset):
begin_time = time.time()
adj = Variable(data['adj'].float(), requires_grad=False).to(device)
label = Variable(data['label'].long()).to(device)
#adj_id = Variable(data['id'].int()).to(device)
adj = torch.squeeze(adj)
features = np.identity(adj.shape[0])
features = Variable(torch.from_numpy(features).float(), requires_grad=False).cpu()
if args.threshold in ["median", "mean"]:
adj = torch.where(adj > threshold_value, torch.tensor([1.0]), torch.tensor([0.0]))
ypred = model_GAT(features, adj)
_, indices = torch.max(ypred, 1)
preds.append(indices.cpu().data.numpy())
labels.append(data['label'].long().numpy())
loss = model_GAT.loss(ypred, label)
model_GAT.zero_grad()
loss.backward()
#nn.utils.clip_grad_norm_(model_DIFFPOOL.parameters(), args.clip)
optimizer.step()
avg_loss += loss
elapsed = time.time() - begin_time
total_time += elapsed
if epoch == args.num_epochs-1:
model_GAT.is_trained = True
preds = np.hstack(preds)
labels = np.hstack(labels)
print("Train accuracy : ", np.mean( preds == labels ))
test_acc = evaluate(val_dataset, model_GAT, args, threshold_value, model_name)
print('Avg loss: ', avg_loss, '; epoch time: ', total_time)
test_accs.append(test_acc)
train_loss.append(avg_loss)
val_acc.append(test_acc)
path = './gat/weights/W_'+model_name+'.pickle'
if os.path.exists(path):
os.remove(path)
os.rename('GAT_W.pickle',path)
los_p = {'loss':train_loss}
with open("./gat/training_loss/Training_loss_"+model_name+".pickle", 'wb') as f:
pickle.dump(los_p, f)
torch.save(model_GAT,"./gat/models/GAT_"+model_name+".pt")
return test_acc
def load_data(args):
"""
Parameters
----------
args : arguments
Description
----------
This methods loads the adjacency matrices representing the args.view -th view in dataset
Returns
-------
List of dictionaries{adj, label, id}
"""
#Load graphs and labels
with open('data/'+args.dataset+'/'+args.dataset+'_edges','rb') as f:
multigraphs = pickle.load(f)
with open('data/'+args.dataset+'/'+args.dataset+'_labels','rb') as f:
labels = pickle.load(f)
adjacencies = [multigraphs[i][:,:,args.view] for i in range(len(multigraphs))]
#Normalize inputs
if args.NormalizeInputGraphs==True:
for subject in range(len(adjacencies)):
adjacencies[subject] = minmax_sc(adjacencies[subject])
#Create List of Dictionaries
G_list=[]
for i in range(len(labels)):
G_element = {"adj": adjacencies[i],"label": labels[i],"id": i,}
G_list.append(G_element)
return G_list
def arg_parse(dataset, view, num_shots=2, cv_number=5):
"""
arguments definition method
"""
parser = argparse.ArgumentParser(description='Graph Classification')
parser.add_argument('--mode', type=str, default='train', choices=['train', 'test'])
parser.add_argument('--v', type=str, default=1)
parser.add_argument('--data', type=str, default='Sample_dataset', choices = [ f.path[5:] for f in os.scandir("data") if f.is_dir() ])
parser.add_argument('--dataset', type=str, default=dataset,
help='Dataset')
parser.add_argument('--view', type=int, default=view,
help = 'view index in the dataset')
parser.add_argument('--num_epochs', type=int, default=1, #50
help='Training Epochs')
parser.add_argument('--num_shots', type=int, default=num_shots, #100
help='number of shots')
parser.add_argument('--cv_number', type=int, default=cv_number,
help='number of validation folds.')
parser.add_argument('--NormalizeInputGraphs', default=False, action='store_true',
help='Normalize Input adjacency matrices of graphs')
parser.add_argument('--evaluation_method', type=str, default='model assessment',
help='evaluation method, possible values : model selection, model assessment')
parser.add_argument('--threshold', dest='threshold', default='mean',
help='threshold the graph adjacency matrix. Possible values: no_threshold, median, mean')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='Disables CUDA training.')
parser.add_argument('--num-classes', dest='num_classes', type=int, default=2,
help='Number of label classes')
parser.add_argument('--lr', type=float, default=0.001,
help='Initial learning rate.')
parser.add_argument('--weight_decay', type=float, default=5e-4,
help='Weight decay (L2 loss on parameters).')
parser.add_argument('--hidden', type=int, default=8,
help='Number of hidden units.')
parser.add_argument('--nb_heads', type=int, default=8,
help='Number of head attentions.')
parser.add_argument('--dropout', type=float, default=0.8,
help='Dropout rate (1 - keep probability).')
parser.add_argument('--alpha', type=float, default=0.2,
help='Alpha for the leaky_relu.')
return parser.parse_args()
def benchmark_task(args, model_name):
"""
Parameters
----------
args : Arguments
Description
----------
Initiates the model and performs train/test or train/validation splits and calls train() to execute training and evaluation.
Returns
-------
test_accs : test accuracies (list)
"""
G_list = load_data(args)
num_nodes = G_list[0]['adj'].shape[0]
test_accs = []
folds = cross_val.stratify_splits(G_list,args)
[random.shuffle(folds[i]) for i in range(len(folds))]
for i in range(args.cv_number):
train_set, validation_set, test_set = cross_val.datasets_splits(folds, args, i)
if args.evaluation_method =='model selection':
train_dataset, val_dataset, threshold_value = cross_val.model_selection_split(train_set, validation_set, args)
if args.evaluation_method =='model assessment':
train_dataset, val_dataset, threshold_value = cross_val.model_assessment_split(train_set, validation_set, test_set, args)
print("CV : ",i)
model_GAT = GAT(nfeat=num_nodes,
nhid=args.hidden,
nclass=args.num_classes,
dropout=args.dropout,
nheads=args.nb_heads,
alpha=args.alpha)
test_acc = train(args, train_dataset, val_dataset, model_GAT, threshold_value, model_name+"_CV_"+str(i)+"_view_"+str(args.view))
test_accs.append(test_acc)
return test_accs
def test_scores(dataset, view, model_name, cv_number):
args = arg_parse(dataset, view, cv_number=cv_number)
print("Main : ",args)
test_accs = benchmark_task(args, model_name)
print("test accuracies ",test_accs)
return test_accs
def two_shot_trainer(dataset, view, num_shots):
args = arg_parse(dataset, view, num_shots=num_shots)
torch.manual_seed(0)
np.random.seed(0)
random.seed(0)
start = time.time()
for i in range(args.num_shots):
model = "gat"
model_name = "Few_Shot_"+dataset+"_"+model + str(i)
print("Shot : ",i)
with open('./Two_shot_samples_views/'+dataset+'_view_'+str(view)+'_shot_'+str(i)+'_train','rb') as f:
train_set = pickle.load(f)
with open('./Two_shot_samples_views/'+dataset+'_view_'+str(view)+'_shot_'+str(i)+'_test','rb') as f:
test_set = pickle.load(f)
num_nodes = train_set[0]['adj'].shape[0]
model_GAT = GAT(nfeat=num_nodes,
nhid=args.hidden,
nclass=args.num_classes,
dropout=args.dropout,
nheads=args.nb_heads,
alpha=args.alpha)
train_dataset, val_dataset, threshold_value = cross_val.two_shot_loader(train_set, test_set, args)
test_acc = train(args, train_dataset, val_dataset, model_GAT, threshold_value, model_name+"_view_"+str(view))
print("Test accuracy:"+str(test_acc))
print('load data using ------>', time.time()-start) | 2.5 | 2 |
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Python/piexif/_dump.py | jeikabu/lumberyard | 8 | 9649 | import copy
import numbers
import struct
from ._common import *
from ._exif import *
TIFF_HEADER_LENGTH = 8
def dump(exif_dict_original):
"""
py:function:: piexif.load(data)
Return exif as bytes.
:param dict exif: Exif data({"0th":dict, "Exif":dict, "GPS":dict, "Interop":dict, "1st":dict, "thumbnail":bytes})
:return: Exif
:rtype: bytes
"""
exif_dict = copy.deepcopy(exif_dict_original)
header = b"Exif\x00\x00\x4d\x4d\x00\x2a\x00\x00\x00\x08"
exif_is = False
gps_is = False
interop_is = False
first_is = False
if "0th" in exif_dict:
zeroth_ifd = exif_dict["0th"]
else:
zeroth_ifd = {}
if (("Exif" in exif_dict) and len(exif_dict["Exif"]) or
("Interop" in exif_dict) and len(exif_dict["Interop"]) ):
zeroth_ifd[ImageIFD.ExifTag] = 1
exif_is = True
exif_ifd = exif_dict["Exif"]
if ("Interop" in exif_dict) and len(exif_dict["Interop"]):
exif_ifd[ExifIFD. InteroperabilityTag] = 1
interop_is = True
interop_ifd = exif_dict["Interop"]
elif ExifIFD. InteroperabilityTag in exif_ifd:
exif_ifd.pop(ExifIFD.InteroperabilityTag)
elif ImageIFD.ExifTag in zeroth_ifd:
zeroth_ifd.pop(ImageIFD.ExifTag)
if ("GPS" in exif_dict) and len(exif_dict["GPS"]):
zeroth_ifd[ImageIFD.GPSTag] = 1
gps_is = True
gps_ifd = exif_dict["GPS"]
elif ImageIFD.GPSTag in zeroth_ifd:
zeroth_ifd.pop(ImageIFD.GPSTag)
if (("1st" in exif_dict) and
("thumbnail" in exif_dict) and
(exif_dict["thumbnail"] is not None)):
first_is = True
exif_dict["1st"][ImageIFD.JPEGInterchangeFormat] = 1
exif_dict["1st"][ImageIFD.JPEGInterchangeFormatLength] = 1
first_ifd = exif_dict["1st"]
zeroth_set = _dict_to_bytes(zeroth_ifd, "0th", 0)
zeroth_length = (len(zeroth_set[0]) + exif_is * 12 + gps_is * 12 + 4 +
len(zeroth_set[1]))
if exif_is:
exif_set = _dict_to_bytes(exif_ifd, "Exif", zeroth_length)
exif_length = len(exif_set[0]) + interop_is * 12 + len(exif_set[1])
else:
exif_bytes = b""
exif_length = 0
if gps_is:
gps_set = _dict_to_bytes(gps_ifd, "GPS", zeroth_length + exif_length)
gps_bytes = b"".join(gps_set)
gps_length = len(gps_bytes)
else:
gps_bytes = b""
gps_length = 0
if interop_is:
offset = zeroth_length + exif_length + gps_length
interop_set = _dict_to_bytes(interop_ifd, "Interop", offset)
interop_bytes = b"".join(interop_set)
interop_length = len(interop_bytes)
else:
interop_bytes = b""
interop_length = 0
if first_is:
offset = zeroth_length + exif_length + gps_length + interop_length
first_set = _dict_to_bytes(first_ifd, "1st", offset)
thumbnail = _get_thumbnail(exif_dict["thumbnail"])
thumbnail_max_size = 64000
if len(thumbnail) > thumbnail_max_size:
raise ValueError("Given thumbnail is too large. max 64kB")
else:
first_bytes = b""
if exif_is:
pointer_value = TIFF_HEADER_LENGTH + zeroth_length
pointer_str = struct.pack(">I", pointer_value)
key = ImageIFD.ExifTag
key_str = struct.pack(">H", key)
type_str = struct.pack(">H", TYPES.Long)
length_str = struct.pack(">I", 1)
exif_pointer = key_str + type_str + length_str + pointer_str
else:
exif_pointer = b""
if gps_is:
pointer_value = TIFF_HEADER_LENGTH + zeroth_length + exif_length
pointer_str = struct.pack(">I", pointer_value)
key = ImageIFD.GPSTag
key_str = struct.pack(">H", key)
type_str = struct.pack(">H", TYPES.Long)
length_str = struct.pack(">I", 1)
gps_pointer = key_str + type_str + length_str + pointer_str
else:
gps_pointer = b""
if interop_is:
pointer_value = (TIFF_HEADER_LENGTH +
zeroth_length + exif_length + gps_length)
pointer_str = struct.pack(">I", pointer_value)
key = ExifIFD.InteroperabilityTag
key_str = struct.pack(">H", key)
type_str = struct.pack(">H", TYPES.Long)
length_str = struct.pack(">I", 1)
interop_pointer = key_str + type_str + length_str + pointer_str
else:
interop_pointer = b""
if first_is:
pointer_value = (TIFF_HEADER_LENGTH + zeroth_length +
exif_length + gps_length + interop_length)
first_ifd_pointer = struct.pack(">L", pointer_value)
thumbnail_pointer = (pointer_value + len(first_set[0]) + 24 +
4 + len(first_set[1]))
thumbnail_p_bytes = (b"\x02\x01\x00\x04\x00\x00\x00\x01" +
struct.pack(">L", thumbnail_pointer))
thumbnail_length_bytes = (b"\x02\x02\x00\x04\x00\x00\x00\x01" +
struct.pack(">L", len(thumbnail)))
first_bytes = (first_set[0] + thumbnail_p_bytes +
thumbnail_length_bytes + b"\x00\x00\x00\x00" +
first_set[1] + thumbnail)
else:
first_ifd_pointer = b"\x00\x00\x00\x00"
zeroth_bytes = (zeroth_set[0] + exif_pointer + gps_pointer +
first_ifd_pointer + zeroth_set[1])
if exif_is:
exif_bytes = exif_set[0] + interop_pointer + exif_set[1]
return (header + zeroth_bytes + exif_bytes + gps_bytes +
interop_bytes + first_bytes)
def _get_thumbnail(jpeg):
segments = split_into_segments(jpeg)
while (b"\xff\xe0" <= segments[1][0:2] <= b"\xff\xef"):
segments.pop(1)
thumbnail = b"".join(segments)
return thumbnail
def _pack_byte(*args):
return struct.pack("B" * len(args), *args)
def _pack_signed_byte(*args):
return struct.pack("b" * len(args), *args)
def _pack_short(*args):
return struct.pack(">" + "H" * len(args), *args)
def _pack_signed_short(*args):
return struct.pack(">" + "h" * len(args), *args)
def _pack_long(*args):
return struct.pack(">" + "L" * len(args), *args)
def _pack_slong(*args):
return struct.pack(">" + "l" * len(args), *args)
def _pack_float(*args):
return struct.pack(">" + "f" * len(args), *args)
def _pack_double(*args):
return struct.pack(">" + "d" * len(args), *args)
def _value_to_bytes(raw_value, value_type, offset):
four_bytes_over = b""
value_str = b""
if value_type == TYPES.Byte:
length = len(raw_value)
if length <= 4:
value_str = (_pack_byte(*raw_value) +
b"\x00" * (4 - length))
else:
value_str = struct.pack(">I", offset)
four_bytes_over = _pack_byte(*raw_value)
elif value_type == TYPES.Short:
length = len(raw_value)
if length <= 2:
value_str = (_pack_short(*raw_value) +
b"\x00\x00" * (2 - length))
else:
value_str = struct.pack(">I", offset)
four_bytes_over = _pack_short(*raw_value)
elif value_type == TYPES.Long:
length = len(raw_value)
if length <= 1:
value_str = _pack_long(*raw_value)
else:
value_str = struct.pack(">I", offset)
four_bytes_over = _pack_long(*raw_value)
elif value_type == TYPES.SLong:
length = len(raw_value)
if length <= 1:
value_str = _pack_slong(*raw_value)
else:
value_str = struct.pack(">I", offset)
four_bytes_over = _pack_slong(*raw_value)
elif value_type == TYPES.Ascii:
try:
new_value = raw_value.encode("latin1") + b"\x00"
except:
try:
new_value = raw_value + b"\x00"
except TypeError:
raise ValueError("Got invalid type to convert.")
length = len(new_value)
if length > 4:
value_str = struct.pack(">I", offset)
four_bytes_over = new_value
else:
value_str = new_value + b"\x00" * (4 - length)
elif value_type == TYPES.Rational:
if isinstance(raw_value[0], numbers.Integral):
length = 1
num, den = raw_value
new_value = struct.pack(">L", num) + struct.pack(">L", den)
elif isinstance(raw_value[0], tuple):
length = len(raw_value)
new_value = b""
for n, val in enumerate(raw_value):
num, den = val
new_value += (struct.pack(">L", num) +
struct.pack(">L", den))
value_str = struct.pack(">I", offset)
four_bytes_over = new_value
elif value_type == TYPES.SRational:
if isinstance(raw_value[0], numbers.Integral):
length = 1
num, den = raw_value
new_value = struct.pack(">l", num) + struct.pack(">l", den)
elif isinstance(raw_value[0], tuple):
length = len(raw_value)
new_value = b""
for n, val in enumerate(raw_value):
num, den = val
new_value += (struct.pack(">l", num) +
struct.pack(">l", den))
value_str = struct.pack(">I", offset)
four_bytes_over = new_value
elif value_type == TYPES.Undefined:
length = len(raw_value)
if length > 4:
value_str = struct.pack(">I", offset)
try:
four_bytes_over = b"" + raw_value
except TypeError:
raise ValueError("Got invalid type to convert.")
else:
try:
value_str = raw_value + b"\x00" * (4 - length)
except TypeError:
raise ValueError("Got invalid type to convert.")
elif value_type == TYPES.SByte: # Signed Byte
length = len(raw_value)
if length <= 4:
value_str = (_pack_signed_byte(*raw_value) +
b"\x00" * (4 - length))
else:
value_str = struct.pack(">I", offset)
four_bytes_over = _pack_signed_byte(*raw_value)
elif value_type == TYPES.SShort: # Signed Short
length = len(raw_value)
if length <= 2:
value_str = (_pack_signed_short(*raw_value) +
b"\x00\x00" * (2 - length))
else:
value_str = struct.pack(">I", offset)
four_bytes_over = _pack_signed_short(*raw_value)
elif value_type == TYPES.Float:
length = len(raw_value)
if length <= 1:
value_str = _pack_float(*raw_value)
else:
value_str = struct.pack(">I", offset)
four_bytes_over = _pack_float(*raw_value)
elif value_type == TYPES.DFloat: # Double
length = len(raw_value)
value_str = struct.pack(">I", offset)
four_bytes_over = _pack_double(*raw_value)
length_str = struct.pack(">I", length)
return length_str, value_str, four_bytes_over
def _dict_to_bytes(ifd_dict, ifd, ifd_offset):
tag_count = len(ifd_dict)
entry_header = struct.pack(">H", tag_count)
if ifd in ("0th", "1st"):
entries_length = 2 + tag_count * 12 + 4
else:
entries_length = 2 + tag_count * 12
entries = b""
values = b""
for n, key in enumerate(sorted(ifd_dict)):
if (ifd == "0th") and (key in (ImageIFD.ExifTag, ImageIFD.GPSTag)):
continue
elif (ifd == "Exif") and (key == ExifIFD.InteroperabilityTag):
continue
elif (ifd == "1st") and (key in (ImageIFD.JPEGInterchangeFormat, ImageIFD.JPEGInterchangeFormatLength)):
continue
raw_value = ifd_dict[key]
key_str = struct.pack(">H", key)
value_type = TAGS[ifd][key]["type"]
type_str = struct.pack(">H", value_type)
four_bytes_over = b""
if isinstance(raw_value, numbers.Integral) or isinstance(raw_value, float):
raw_value = (raw_value,)
offset = TIFF_HEADER_LENGTH + entries_length + ifd_offset + len(values)
try:
length_str, value_str, four_bytes_over = _value_to_bytes(raw_value,
value_type,
offset)
except ValueError:
raise ValueError(
'"dump" got wrong type of exif value.\n' +
'{0} in {1} IFD. Got as {2}.'.format(key, ifd, type(ifd_dict[key]))
)
entries += key_str + type_str + length_str + value_str
values += four_bytes_over
return (entry_header + entries, values)
| 2.46875 | 2 |
portal.py | mrahman4782/portalhoop | 0 | 9650 | import pygame
import random
from pygame import *
pygame.init()
width, height = 740, 500
screen = pygame.display.set_mode((width, height))
player = [pygame.transform.scale(pygame.image.load("Resources/Balljump-1(2).png"), (100,100)), pygame.transform.scale(pygame.image.load("Resources/Balljump-1.png"),(100,100))]
launch = [pygame.transform.scale(pygame.image.load("Resources/Balljump-1.png"), (100,100)), pygame.transform.scale(pygame.image.load("Resources/Balljump-1(2).png"), (100,100)),pygame.transform.scale(pygame.image.load("Resources/Balljump-2.png"), (100,100)),pygame.transform.scale(pygame.image.load("Resources/Balljump-3.png"), (100,100)), pygame.transform.scale(pygame.image.load("Resources/Balljump-4.png"),(100,100))]
shoot = [pygame.transform.scale(pygame.image.load("Resources/Balljump-5.png"), (100, 100)), pygame.transform.scale(pygame.image.load("Resources/Balljump-6.png"), (100, 100))]
ball = pygame.transform.scale(pygame.image.load("Resources/ball.png"), (100,100))
blue = (0, 0, 128)
white = (255, 255, 255)
janimation, danimation, movable, motionactivate, limit_reached, nojump = False, False, False, False, False, False
jumplock = True
ballrelease, ballregain = False, False
fr = pygame.time.Clock()
c = 0
i = 0
p = 0
x, y = 0, 300
score = 0
a, b, rpos = 0, 0, 0
xpos, ypos = 17, 313
# Background image source: https://www.freepik.com/free-vector/floral-ornamental-abstract-background_6189902.htm#page=1&query=black%20background&position=40
background = pygame.image.load("Resources/back.jpg")
gamestart = False
def basketball():
#Draw basketball
global rpos, xpos, ypos, ballregain
if gamestart == True and ballrelease == False:
if nojump == True:
if c % 2 == 0:
screen.blit(ball, (xpos, ypos + 24))
if c % 2 == 1:
screen.blit(ball, (xpos + 2 , ypos ))
if nojump == False and motionactivate == True:
if p // 4 == 0:
screen.blit(ball, (xpos, ypos))
if p // 4 == 1:
screen.blit(ball, (xpos-2, ypos-5))
if p // 4 == 2:
screen.blit(ball, (xpos-2, ypos-7))
if p // 4 == 3:
screen.blit(ball, (xpos-2, ypos-11))
if p// 4 == 4:
screen.blit(ball, (xpos-2, ypos-13))
if janimation == True:
rpos = y -13
screen.blit(ball, (xpos, rpos))
rposNew = 400 - rpos
if gamestart == True and ballrelease == True:
if rpos <= 325:
screen.blit(ball, (xpos, rpos))
if xpos <= 700:
ballregain = False
xpos += (rposNew / 20)
print("rpos is: " + str(rpos) + " xpos is: " + str(xpos))
rpos = (-1*((xpos/600)**2))+((xpos)/150)+rpos
if xpos > 700 or rpos > 325:
xpos = 17
ballregain = True
def player_animations():
# Animations while the user makes no input
global c
global player
global i
if nojump == True:
if c % 2 == 0 and i<= 10:
if i<10:
screen.blit(player[c], (0, 300))
i += 1
if i == 10:
c += 1
i += 1
elif c % 2 == 1 and i<= 20:
if i>10 and i<20:
screen.blit(player[c], (0, 300))
i += 1
if i == 20:
c -= 1
i += 1
elif i>20:
i = 0
screen.blit(player[c], (0, 300))
if nojump == False:
screen.fill(0)
def screen_text():
global score
global nojump
global movable
if nojump == True:
font = pygame.font.Font("Resources/android.ttf", 16)
text2 = font.render("Hold space to throw the ball", True, white)
textRect2 = text2.get_rect()
textRect2.center = (width // 2, height // 2 + 200)
screen.blit(text2, textRect2)
movable = True
font = pygame.font.Font("Resources/android.ttf", 16)
text2 = font.render("Score: "+ str(score), True, white)
textRect2 = text2.get_rect()
textRect2.center = (width // 2 - 300, height // 2 - 200)
screen.blit(text2, textRect2)
def player_jump():
# Initial animations before the player jumps
global p, nojump, movable, x, y, janimation, danimation, a, b, motionactivate, limit_reached
global jumplock, ballrelease, ballregain
if movable == True and keypress[K_SPACE]:
#print(pygame.time.get_ticks())
motionactivate = True
#print(nojump)
#if p >= 19:
# p = 0
if motionactivate == True:
#screen.fill(0)
nojump = False
if p < 21:
screen.blit(launch[p // 4], (0, 300))
p += 1
if p == 20:
a = pygame.time.get_ticks()
janimation = True
p += 1
#elif keypress[K_SPACE]:
# what to do when jump is completed
if janimation == True and limit_reached == False:
if keypress[K_SPACE] and pygame.KEYDOWN and jumplock == True:
b = pygame.time.get_ticks()
if y > 239:
y = ((b - a) / -25) + 310
if y >= 305:
screen.fill(0)
screen.blit(shoot[0], (x, y))
if y < 305 and y > 240:
screen.blit(shoot[1], (x,y))
if y <= 239:
screen.blit(shoot[0], (x, y))
danimation = True
limit_reached = True
#print(danimation)
if event.type == pygame.KEYUP:
if event.key == K_SPACE:
danimation = True
motionactivate = False
ballrelease = True
if danimation == True:
jumplock = False
if danimation == True or limit_reached == True:
#print("poopc "+ str(y))
if y < 310:
screen.blit(shoot[0], (x, y))
y += 2
#
# print("zag")
#print("poop: " + str(pygame.KEYUP) + " key down is: " + str(pygame.KEYDOWN))
if y >= 310:
nojump = True
danimation = False
janimation = False
movable = False
limit_reached = False
p = 0
jumplock = True
if ballregain == True:
ballrelease = False
#print("y value is: "+ str(y)+ " a is: "+ str(a) + " b is: "+ str(b))
while 1:
keypress = pygame.key.get_pressed()
fr.tick(30)
screen.fill(0)
if keypress[K_RETURN]:
gamestart = True
if gamestart == False:
#screen.fill(0)
screen.blit(background, (0,0))
# Draw opening texts
font = pygame.font.Font("Resources/android.ttf", 64)
text = font.render("Portal Hoop", True, white)
textRect = text.get_rect()
textRect.center = (width // 2, height // 2 - 100)
screen.blit(text, textRect)
font = pygame.font.Font("Resources/android.ttf", 18)
text2 = font.render("Press Return to start", True, white)
textRect2 = text2.get_rect()
textRect2.center = (width // 2, height // 2 + 100)
screen.blit(text2, textRect2)
nojump = True
# Check if any
if gamestart == True:
#screen.fill(0)
player_animations()
player_jump()
basketball()
screen_text()
pygame.display.flip()
pygame.display.set_caption("Portal Hoop")
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit(0)
| 2.78125 | 3 |
datadog_cluster_agent/tests/test_datadog_cluster_agent.py | tdimnet/integrations-core | 1 | 9651 | <gh_stars>1-10
# (C) Datadog, Inc. 2021-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from typing import Any, Dict
from datadog_checks.base.stubs.aggregator import AggregatorStub
from datadog_checks.datadog_cluster_agent import DatadogClusterAgentCheck
from datadog_checks.dev.utils import get_metadata_metrics
NAMESPACE = 'datadog.cluster_agent'
METRICS = [
'admission_webhooks.certificate_expiry',
'admission_webhooks.mutation_attempts',
'admission_webhooks.mutation_errors',
'admission_webhooks.reconcile_errors',
'admission_webhooks.reconcile_success',
'admission_webhooks.webhooks_received',
'aggregator.flush',
'aggregator.processed',
'api_requests',
'cluster_checks.busyness',
'cluster_checks.configs_dangling',
'cluster_checks.configs_dispatched',
'cluster_checks.failed_stats_collection',
'cluster_checks.nodes_reporting',
'cluster_checks.rebalancing_decisions',
'cluster_checks.rebalancing_duration_seconds',
'cluster_checks.successful_rebalancing_moves',
'cluster_checks.updating_stats_duration_seconds',
'datadog.rate_limit_queries.limit',
'datadog.rate_limit_queries.period',
'datadog.rate_limit_queries.remaining',
'datadog.rate_limit_queries.reset',
'datadog.requests',
'external_metrics',
'external_metrics.datadog_metrics',
'external_metrics.delay_seconds',
'external_metrics.processed_value',
'secret_backend.elapsed',
'go.goroutines',
'go.memstats.alloc_bytes',
'go.threads',
]
def test_check(aggregator, instance, mock_metrics_endpoint):
# type: (AggregatorStub, Dict[str, Any]) -> None
check = DatadogClusterAgentCheck('datadog_cluster_agent', {}, [instance])
# dry run to build mapping for label joins
check.check(instance)
check.check(instance)
for metric in METRICS:
aggregator.assert_metric(NAMESPACE + '.' + metric)
aggregator.assert_metric_has_tag_prefix(NAMESPACE + '.' + metric, 'is_leader:')
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics())
| 1.75 | 2 |
prev_ob_models/KaplanLansner2014/plotting_and_analysis/plot_results.py | fameshpatel/olfactorybulb | 5 | 9652 | <gh_stars>1-10
import pylab
import numpy
import sys
if (len(sys.argv) < 2):
fn = raw_input("Please enter data file to be plotted\n")
else:
fn = sys.argv[1]
data = np.loadtxt(fn)
# if the first line contains crap use skiprows=1
#data = np.loadtxt(fn, skiprows=1)
fig = pylab.figure()
ax = fig.add_subplot(111)
# if you want to use multiple figures in one, use
#ax1 = fig.add_subplot(211)
#ax2 = fig.add_subplot(212)
# and
if (data.ndim == 1):
x_axis = numpy.arange(data.size)
ax.plot(x_axis, data)
else:
# ax.errorbar(data[:,0], data[:,1], yerr=data[:, 2])
# print 'mean y-value:', data[:, 1].mean()
ax.plot(data[:, 0], data[:, 1], ls='-', lw=3, c='b')
# ax.scatter(data[:,0], data[:,2])
# ax.plot(data[:,3], data[:,6])
# saving:
# fig.savefig('output_figure.png')
# otherwise nothing is shown
pylab.show()
| 2.78125 | 3 |
pyeccodes/defs/grib2/tables/15/3_11_table.py | ecmwf/pyeccodes | 7 | 9653 | def load(h):
return ({'abbr': 0, 'code': 0, 'title': 'There is no appended list'},
{'abbr': 1,
'code': 1,
'title': 'Numbers define number of points corresponding to full coordinate '
'circles (i.e. parallels), coordinate values on each circle are '
'multiple of the circle mesh, and extreme coordinate values given '
'in grid definition (i.e. extreme longitudes) may not be reached in '
'all rows'},
{'abbr': 2,
'code': 2,
'title': 'Numbers define number of points corresponding to coordinate lines '
'delimited by extreme coordinate values given in grid definition '
'(i.e. extreme longitudes) which are present in each row'},
{'abbr': 3,
'code': 3,
'title': 'Numbers define the actual latitudes for each row in the grid. The '
'list of numbers are integer values of the valid latitudes in '
'microdegrees (scaled by 10-6) or in unit equal to the ratio of the '
'basic angle and the subdivisions number for each row, in the same '
'order as specified in the scanning mode flag',
'units': 'bit no. 2'},
{'abbr': None, 'code': 255, 'title': 'Missing'})
| 2.34375 | 2 |
lib/take2/main.py | zacharyfrederick/deep_q_gaf | 0 | 9654 | <filename>lib/take2/main.py<gh_stars>0
from __future__ import division
from lib import env_config
from lib.senior_env import BetterEnvironment
from keras.optimizers import Adam
from rl.agents.dqn import DQNAgent
from rl.policy import LinearAnnealedPolicy, BoltzmannQPolicy, EpsGreedyQPolicy
from rl.memory import SequentialMemory
from lib import models
import random
choices = [0,1,2]
def gen_action():
return random.choice(choices)
if __name__ == '__main__':
config_ = env_config.EnvConfig('config/debug.json')
env = BetterEnvironment(config_)
INPUT_SHAPE = (30, 180)
WINDOW_LENGTH = 4
model = models.build_paper_model()
# Get the environment and extract the number of actions.
nb_actions = 3
# Next, we build our model. We use the same model that was described by Mnih et al. (2015).
input_shape = (WINDOW_LENGTH,) + INPUT_SHAPE
# Finally, we configure and compile our agent. You can use every built-in Keras optimizer and
# even the metrics!
memory = SequentialMemory(limit=10000000, window_length=WINDOW_LENGTH)
# Select a policy. We use eps-greedy action selection, which means that a random action is selected
# with probability eps. We anneal eps from 1.0 to 0.1 over the course of 1M steps. This is done so that
# the agent initially explores the environment (high eps) and then gradually sticks to what it knows
# (low eps). We also set a dedicated eps value that is used during testing. Note that we set it to 0.05
# so that the agent still performs some random actions. This ensures that the agent cannot get stuck.
policy = LinearAnnealedPolicy(EpsGreedyQPolicy(), attr='eps', value_max=1., value_min=.1, value_test=.05,
nb_steps=1000000)
# The trade-off between exploration and exploitation is difficult and an on-going research topic.
# If you want, you can experiment with the parameters or use a different policy. Another popular one
# is Boltzmann-style exploration:
# policy = BoltzmannQPolicy(tau=1.)
# Feel free to give it a try!
dqn = DQNAgent(model=model, nb_actions=nb_actions, policy=policy, memory=memory,
nb_steps_warmup=50000, gamma=.99, target_model_update=10000,
train_interval=4, delta_clip=1.)
dqn.compile(Adam(lr=.00025), metrics=['mae'])
# Okay, now it's time to learn something! We capture the interrupt exception so that training
# can be prematurely aborted. Notice that now you can use the built-in Keras callbacks!
weights_filename = 'dqn_{}_weights.h5f'.format('god_help_me.weights')
dqn.fit(env, nb_steps=100000, log_interval=10000)
print(env.portfolio.print_portfolio_results())
| 2.34375 | 2 |
src/clcore.py | ShepardPower/PyMCBuilder | 1 | 9655 | <filename>src/clcore.py
# I'm just the one that executes the instructions!
import sys, math, json, operator, time
import mcpi.minecraft as minecraft
from PIL import Image as pillow
from blockid import get_block
import mcpi.block as block
import functions as pymc
from tqdm import tqdm
import tkinter as tk
# Functions
# Main code
mc = minecraft.Minecraft.create()
try:
json_file = open("blocks.json")
json_put = json.load(json_file)
except:
pymc.chat(mc, "blocks.json not found, exiting!", 0)
sys.exit(1)
try:
rim = pillow.open(sys.argv[1])
except:
pymc.chat(mc, "bad image, exiting!", 0)
sys.exit(1)
orders = []
used = []
imwid, imhei = rim.size
if imhei > 200:
maxheight = 200
rim.thumbnail((200, maxheight), pillow.ANTIALIAS)
imwid, imhei = rim.size
pymc.chat(mc, "image is over 200 pixels, reducing height.", 1)
rim.convert('RGB')
im = rim.load()
pbar = tqdm(total=imhei*imwid)
for hei in range(imhei):
for wid in range(imwid):
smal = pymc.comp_pixel((im[wid, hei][0], im[wid, hei][1], im[wid, hei][2]), json_put)
im[wid, hei] = smal[1]
used.append(str(smal[2]))
pbar.update(1)
pbar.close()
rim.save("result.GIF") # The result
json_file.close()
oldPos = mc.player.getPos()
playerPos = [round(oldPos.x), round(oldPos.y), round(oldPos.z)]
pymc.chat(mc, "Ready!")
pbar = tqdm(total=imhei*imwid)
num_temp = imhei*imwid-1
for hei in range(imhei):
for wid in range(imwid):
#print(used[wid + (imhei * hei)])
gblock = get_block(used[num_temp])
mc.setBlock(playerPos[0]+wid, playerPos[1]+hei, playerPos[2], gblock)
num_temp -= 1
pbar.update(1)
pbar.close()
pymc.chat(mc, "Done!!")
pymc.chat(mc, "Please star us on github if you like the result!", 2)
| 2.4375 | 2 |
gaphor/tools/gaphorconvert.py | 987Frogh/project-makehuman | 1 | 9656 | <reponame>987Frogh/project-makehuman<gh_stars>1-10
#!/usr/bin/python
import optparse
import os
import re
import sys
import cairo
from gaphas.painter import Context, ItemPainter
from gaphas.view import View
import gaphor.UML as UML
from gaphor.application import Application
from gaphor.storage import storage
def pkg2dir(package):
"""
Return directory path from UML package class.
"""
name = []
while package:
name.insert(0, package.name)
package = package.package
return "/".join(name)
def paint(view, cr):
view.painter.paint(Context(cairo=cr, items=view.canvas.get_all_items(), area=None))
def main(argv=sys.argv[1:]):
def message(msg):
"""
Print message if user set verbose mode.
"""
if options.verbose:
print(msg, file=sys.stderr)
usage = "usage: %prog [options] file1 file2..."
parser = optparse.OptionParser(usage=usage)
parser.add_option(
"-v", "--verbose", dest="verbose", action="store_true", help="verbose output"
)
parser.add_option(
"-u",
"--use-underscores",
dest="underscores",
action="store_true",
help="use underscores instead of spaces for output filenames",
)
parser.add_option(
"-d", "--dir", dest="dir", metavar="directory", help="output to directory"
)
parser.add_option(
"-f",
"--format",
dest="format",
metavar="format",
help="output file format, default pdf",
default="pdf",
choices=["pdf", "svg", "png"],
)
parser.add_option(
"-r",
"--regex",
dest="regex",
metavar="regex",
help="process diagrams which name matches given regular expresion;"
" name includes package name; regular expressions are case insensitive",
)
(options, args) = parser.parse_args(argv)
if not args:
parser.print_help()
Application.init(
services=["event_manager", "component_registry", "element_factory"]
)
factory = Application.get_service("element_factory")
name_re = None
if options.regex:
name_re = re.compile(options.regex, re.I)
# we should have some gaphor files to be processed at this point
for model in args:
message(f"loading model {model}")
storage.load(model, factory)
message("ready for rendering")
for diagram in factory.select(lambda e: e.isKindOf(UML.Diagram)):
odir = pkg2dir(diagram.package)
# just diagram name
dname = diagram.name
# full diagram name including package path
pname = f"{odir}/{dname}"
if options.underscores:
odir = odir.replace(" ", "_")
dname = dname.replace(" ", "_")
if name_re and not name_re.search(pname):
message(f"skipping {pname}")
continue
if options.dir:
odir = f"{options.dir}/{odir}"
outfilename = f"{odir}/{dname}.{options.format}"
if not os.path.exists(odir):
message(f"creating dir {odir}")
os.makedirs(odir)
message(f"rendering: {pname} -> {outfilename}...")
view = View(diagram.canvas)
view.painter = ItemPainter()
tmpsurface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 0, 0)
tmpcr = cairo.Context(tmpsurface)
view.update_bounding_box(tmpcr)
tmpcr.show_page()
tmpsurface.flush()
w, h = view.bounding_box.width, view.bounding_box.height
if options.format == "pdf":
surface = cairo.PDFSurface(outfilename, w, h)
elif options.format == "svg":
surface = cairo.SVGSurface(outfilename, w, h)
elif options.format == "png":
surface = cairo.ImageSurface(
cairo.FORMAT_ARGB32, int(w + 1), int(h + 1)
)
else:
assert False, f"unknown format {options.format}"
cr = cairo.Context(surface)
view.matrix.translate(-view.bounding_box.x, -view.bounding_box.y)
paint(view, cr)
cr.show_page()
if options.format == "png":
surface.write_to_png(outfilename)
surface.flush()
surface.finish()
| 2.390625 | 2 |
zeta_python_sdk/exceptions.py | prettyirrelevant/zeta-python-sdk | 2 | 9657 | <reponame>prettyirrelevant/zeta-python-sdk
class InvalidSideException(Exception):
"""Invalid side"""
class NotSupportedException(Exception):
"""Not supported by dummy wallet"""
class InvalidProductException(Exception):
"""Invalid product type"""
class OutOfBoundsException(Exception):
"""Attempt to access memory outside buffer bounds"""
| 2.015625 | 2 |
test/test_ID.py | a-buntjer/tsib | 14 | 9658 | <gh_stars>10-100
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 08 11:33:01 2016
@author: <NAME>
"""
import tsib
def test_get_ID():
# parameterize a building
bdgcfg = tsib.BuildingConfiguration(
{
"refurbishment": False,
"nightReduction": False,
"occControl": False,
"capControl": True,
"n_persons": 2,
"roofOrientation": 0.0,
"n_apartments": 1,
"latitude": 49.,
"longitude": 12.,
}
)
bdgObj = tsib.Building(configurator=bdgcfg)
print('ID is : ' + str(bdgObj.ID))
return
def test_set_ID():
# parameterize a building
bdgcfg = tsib.BuildingConfiguration(
{
"buildingYear": 1980,
"n_persons": 2,
"roofOrientation": 0.0,
"n_apartments": 2,
"a_ref": 300.,
"surrounding": "Detached",
"latitude": 52.,
"longitude": 13.,
}
)
bdgObj = tsib.Building(configurator=bdgcfg)
bdgObj.ID = 'custom'
if not bdgObj.ID == 'custom':
raise ValueError()
return
| 2.453125 | 2 |
dislib/model_selection/_search.py | alexbarcelo/dislib | 36 | 9659 | <gh_stars>10-100
from abc import ABC, abstractmethod
from collections import defaultdict
from collections.abc import Sequence
from functools import partial
from itertools import product
import numpy as np
from pycompss.api.api import compss_wait_on
from scipy.stats import rankdata
from sklearn import clone
from sklearn.model_selection import ParameterGrid, ParameterSampler
from numpy.ma import MaskedArray
from dislib.model_selection._split import infer_cv
from dislib.model_selection._validation import check_scorer, fit_and_score, \
validate_score, aggregate_score_dicts
class BaseSearchCV(ABC):
"""Abstract base class for hyper parameter search with cross-validation."""
def __init__(self, estimator, scoring=None, cv=None, refit=True):
self.estimator = estimator
self.scoring = scoring
self.cv = cv
self.refit = refit
@abstractmethod
def _run_search(self, evaluate_candidates):
"""Abstract method to perform the search. The parameter
`evaluate_candidates` is a function that evaluates a ParameterGrid at a
time """
pass
def fit(self, x, y=None, **fit_params):
"""Run fit with all sets of parameters.
Parameters
----------
x : ds-array
Training data samples.
y : ds-array, optional (default = None)
Training data labels or values.
**fit_params : dict of string -> object
Parameters passed to the ``fit`` method of the estimator
"""
estimator = self.estimator
cv = infer_cv(self.cv)
scorers, refit_metric = self._infer_scorers()
base_estimator = clone(estimator)
n_splits = None
all_candidate_params = []
all_out = []
def evaluate_candidates(candidate_params):
"""Evaluate some parameters"""
candidate_params = list(candidate_params)
out = [fit_and_score(clone(base_estimator), train, validation,
scorer=scorers, parameters=parameters,
fit_params=fit_params)
for parameters, (train, validation)
in product(candidate_params, cv.split(x, y))]
nonlocal n_splits
n_splits = cv.get_n_splits()
all_candidate_params.extend(candidate_params)
all_out.extend(out)
self._run_search(evaluate_candidates)
for params_result in all_out:
scores = params_result[0]
for scorer_name, score in scores.items():
score = compss_wait_on(score)
scores[scorer_name] = validate_score(score, scorer_name)
results = self._format_results(all_candidate_params, scorers,
n_splits, all_out)
# For multi-metric evaluation, store the best_index_, best_params_ and
# best_score_ iff refit is one of the scorer names
# In single metric evaluation, refit_metric is "score"
if self.refit or not self.multimetric_:
# If callable, refit is expected to return the index of the best
# parameter set.
if callable(self.refit):
self.best_index_ = self.refit(results)
if not isinstance(self.best_index_, (int, np.integer)):
raise TypeError('best_index_ returned is not an integer')
if (self.best_index_ < 0 or
self.best_index_ >= len(results["params"])):
raise IndexError('best_index_ index out of range')
else:
self.best_index_ = results["rank_test_%s"
% refit_metric].argmin()
self.best_score_ = results["mean_test_%s" % refit_metric][
self.best_index_]
self.best_params_ = results["params"][self.best_index_]
if self.refit:
self.best_estimator_ = clone(base_estimator).set_params(
**self.best_params_)
self.best_estimator_.fit(x, y, **fit_params)
# Store the only scorer not as a dict for single metric evaluation
self.scorer_ = scorers if self.multimetric_ else scorers['score']
self.cv_results_ = results
self.n_splits_ = n_splits
return self
@staticmethod
def _format_results(candidate_params, scorers, n_splits, out):
n_candidates = len(candidate_params)
(test_score_dicts,) = zip(*out)
test_scores = aggregate_score_dicts(test_score_dicts)
results = {}
def _store(key_name, array, splits=False, rank=False):
"""A small helper to store the scores/times to the cv_results_"""
array = np.array(array, dtype=np.float64).reshape(n_candidates,
n_splits)
if splits:
for split_i in range(n_splits):
# Uses closure to alter the results
results["split%d_%s"
% (split_i, key_name)] = array[:, split_i]
array_means = np.mean(array, axis=1)
results['mean_%s' % key_name] = array_means
array_stds = np.std(array, axis=1)
results['std_%s' % key_name] = array_stds
if rank:
results["rank_%s" % key_name] = np.asarray(
rankdata(-array_means, method='min'), dtype=np.int32)
# Use one MaskedArray and mask all the places where the param is not
# applicable for that candidate. Use defaultdict as each candidate may
# not contain all the params
param_results = defaultdict(partial(MaskedArray,
np.empty(n_candidates, ),
mask=True,
dtype=object))
for cand_i, params in enumerate(candidate_params):
for name, value in params.items():
# An all masked empty array gets created for the key
# `"param_%s" % name` at the first occurrence of `name`.
# Setting the value at an index also unmasks that index
param_results["param_%s" % name][cand_i] = value
results.update(param_results)
# Store a list of param dicts at the key 'params'
results['params'] = candidate_params
for scorer_name in scorers.keys():
_store('test_%s' % scorer_name, test_scores[scorer_name],
splits=True, rank=True)
return results
def _infer_scorers(self):
estimator = self.estimator
scoring = self.scoring
refit = self.refit
if scoring is None or callable(scoring):
scorers = {"score": check_scorer(estimator, scoring)}
refit_metric = 'score'
self.multimetric_ = False
elif isinstance(scoring, dict):
scorers = {key: check_scorer(estimator, scorer)
for key, scorer in scoring.items()}
if refit is not False and (
not isinstance(refit, str) or
refit not in scorers) and not callable(refit):
raise ValueError("For multi-metric scoring, the parameter "
"refit must be set to a scorer key or a "
"callable to refit an estimator with the "
"best parameter setting on the whole "
"data and make the best_* attributes "
"available for that metric. If this is "
"not needed, refit should be set to "
"False explicitly. %r was passed."
% refit)
refit_metric = refit
self.multimetric_ = True
else:
raise ValueError('scoring is not valid')
return scorers, refit_metric
class GridSearchCV(BaseSearchCV):
"""Exhaustive search over specified parameter values for an estimator.
GridSearchCV implements a "fit" and a "score" method.
The parameters of the estimator used to apply these methods are optimized
by cross-validated grid-search over a parameter grid.
Parameters
----------
estimator : estimator object.
This is assumed to implement the scikit-learn estimator interface.
Either estimator needs to provide a ``score`` function,
or ``scoring`` must be passed.
param_grid : dict or list of dictionaries
Dictionary with parameters names (string) as keys and lists of
parameter settings to try as values, or a list of such
dictionaries, in which case the grids spanned by each dictionary
in the list are explored. This enables searching over any sequence
of parameter settings.
scoring : callable, dict or None, optional (default=None)
A callable to evaluate the predictions on the test set. It should take
3 parameters, estimator, x and y, and return a score (higher meaning
better). For evaluating multiple metrics, give a dict with names as
keys and callables as values. If None, the estimator's score method is
used.
cv : int or cv generator, optional (default=None)
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 5-fold cross validation,
- integer, to specify the number of folds in a `KFold`,
- custom cv generator.
refit : boolean, string, or callable, optional (default=True)
Refit an estimator using the best found parameters on the whole
dataset.
For multiple metric evaluation, this needs to be a string denoting the
scorer that would be used to find the best parameters for refitting
the estimator at the end.
Where there are considerations other than maximum score in
choosing a best estimator, ``refit`` can be set to a function which
returns the selected ``best_index_`` given ``cv_results_``.
The refitted estimator is made available at the ``best_estimator_``
attribute and permits using ``predict`` directly on this
``GridSearchCV`` instance.
Also for multiple metric evaluation, the attributes ``best_index_``,
``best_score_`` and ``best_params_`` will only be available if
``refit`` is set and all of them will be determined w.r.t this specific
scorer. ``best_score_`` is not returned if refit is callable.
See ``scoring`` parameter to know more about multiple metric
evaluation.
Examples
--------
>>> import dislib as ds
>>> from dislib.model_selection import GridSearchCV
>>> from dislib.classification import RandomForestClassifier
>>> import numpy as np
>>> from sklearn import datasets
>>>
>>>
>>> if __name__ == '__main__':
>>> x_np, y_np = datasets.load_iris(return_X_y=True)
>>> x = ds.array(x_np, (30, 4))
>>> y = ds.array(y_np[:, np.newaxis], (30, 1))
>>> param_grid = {'n_estimators': (2, 4), 'max_depth': range(3, 5)}
>>> rf = RandomForestClassifier()
>>> searcher = GridSearchCV(rf, param_grid)
>>> searcher.fit(x, y)
>>> searcher.cv_results_
Attributes
----------
cv_results_ : dict of numpy (masked) ndarrays
A dict with keys as column headers and values as columns, that can be
imported into a pandas ``DataFrame``.
For instance the below given table:
+------------+------------+-----------------+---+---------+
|param_kernel|param_degree|split0_test_score|...|rank_t...|
+============+============+=================+===+=========+
| 'poly' | 2 | 0.80 |...| 2 |
+------------+------------+-----------------+---+---------+
| 'poly' | 3 | 0.70 |...| 4 |
+------------+------------+-----------------+---+---------+
| 'rbf' | -- | 0.80 |...| 3 |
+------------+------------+-----------------+---+---------+
| 'rbf' | -- | 0.93 |...| 1 |
+------------+------------+-----------------+---+---------+
will be represented by a ``cv_results_`` dict of::
{
'param_kernel': masked_array(data = ['poly', 'poly', 'rbf', 'rbf'],
mask = [False False False False]...),
'param_degree': masked_array(data = [2.0 3.0 -- --],
mask = [False False True True]...),
'split0_test_score' : [0.80, 0.70, 0.80, 0.93],
'split1_test_score' : [0.82, 0.50, 0.68, 0.78],
'split2_test_score' : [0.79, 0.55, 0.71, 0.93],
...
'mean_test_score' : [0.81, 0.60, 0.75, 0.85],
'std_test_score' : [0.01, 0.10, 0.05, 0.08],
'rank_test_score' : [2, 4, 3, 1],
'params' : [{'kernel': 'poly', 'degree': 2}, ...],
}
NOTES:
The key ``'params'`` is used to store a list of parameter
settings dicts for all the parameter candidates.
The ``mean_fit_time``, ``std_fit_time``, ``mean_score_time`` and
``std_score_time`` are all in seconds.
For multi-metric evaluation, the scores for all the scorers are
available in the ``cv_results_`` dict at the keys ending with that
scorer's name (``'_<scorer_name>'``) instead of ``'_score'`` shown
above ('split0_test_precision', 'mean_train_precision' etc.).
best_estimator_ : estimator or dict
Estimator that was chosen by the search, i.e. estimator
which gave highest score (or smallest loss if specified)
on the left out data. Not available if ``refit=False``.
See ``refit`` parameter for more information on allowed values.
best_score_ : float
Mean cross-validated score of the best_estimator
For multi-metric evaluation, this is present only if ``refit`` is
specified.
best_params_ : dict
Parameter setting that gave the best results on the hold out data.
For multi-metric evaluation, this is present only if ``refit`` is
specified.
best_index_ : int
The index (of the ``cv_results_`` arrays) which corresponds to the best
candidate parameter setting.
The dict at ``search.cv_results_['params'][search.best_index_]`` gives
the parameter setting for the best model, that gives the highest
mean score (``search.best_score_``).
For multi-metric evaluation, this is present only if ``refit`` is
specified.
scorer_ : function or a dict
Scorer function used on the held out data to choose the best
parameters for the model.
For multi-metric evaluation, this attribute holds the validated
``scoring`` dict which maps the scorer key to the scorer callable.
n_splits_ : int
The number of cross-validation splits (folds/iterations).
"""
def __init__(self, estimator, param_grid, scoring=None, cv=None,
refit=True):
super().__init__(estimator=estimator, scoring=scoring, cv=cv,
refit=refit)
self.param_grid = param_grid
self._check_param_grid(param_grid)
def _run_search(self, evaluate_candidates):
evaluate_candidates(ParameterGrid(self.param_grid))
@staticmethod
def _check_param_grid(param_grid):
if hasattr(param_grid, 'items'):
param_grid = [param_grid]
for p in param_grid:
for name, v in p.items():
if isinstance(v, np.ndarray) and v.ndim > 1:
raise ValueError("Parameter array should be "
"one-dimensional.")
if (isinstance(v, str) or
not isinstance(v, (np.ndarray, Sequence))):
raise ValueError(
"Parameter values for parameter ({0}) need "
"to be a sequence (but not a string) or"
" np.ndarray.".format(name))
if len(v) == 0:
raise ValueError(
"Parameter values for parameter ({0}) need "
"to be a non-empty sequence.".format(name))
class RandomizedSearchCV(BaseSearchCV):
"""Randomized search on hyper parameters.
RandomizedSearchCV implements a "fit" and a "score" method.
The parameters of the estimator used to apply these methods are optimized
by cross-validated search over parameter settings.
In contrast to GridSearchCV, not all parameter values are tried out, but
rather a fixed number of parameter settings is sampled from the specified
distributions. The number of parameter settings that are tried is
given by n_iter.
If all parameters are presented as a list,
sampling without replacement is performed. If at least one parameter
is given as a distribution, sampling with replacement is used.
Parameters
----------
estimator : estimator object.
This is assumed to implement the scikit-learn estimator interface.
Either estimator needs to provide a ``score`` function,
or ``scoring`` must be passed.
param_distributions : dict
Dictionary with parameters names (string) as keys and distributions
or lists of parameters to try. Distributions must provide a ``rvs``
method for sampling (such as those from scipy.stats.distributions).
If a list is given, it is sampled uniformly.
n_iter : int, optional (default=10)
Number of parameter settings that are sampled.
scoring : callable, dict or None, optional (default=None)
A callable to evaluate the predictions on the test set. It should take
3 parameters, estimator, x and y, and return a score (higher meaning
better). For evaluating multiple metrics, give a dict with names as
keys and callables as values. If None, the estimator's score method is
used.
cv : int or cv generator, optional (default=None)
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 5-fold cross validation,
- integer, to specify the number of folds in a `KFold`,
- custom cv generator.
refit : boolean, string, or callable, optional (default=True)
Refit an estimator using the best found parameters on the whole
dataset.
For multiple metric evaluation, this needs to be a string denoting the
scorer that would be used to find the best parameters for refitting
the estimator at the end.
Where there are considerations other than maximum score in
choosing a best estimator, ``refit`` can be set to a function which
returns the selected ``best_index_`` given ``cv_results_``.
The refitted estimator is made available at the ``best_estimator_``
attribute and permits using ``predict`` directly on this
``GridSearchCV`` instance.
Also for multiple metric evaluation, the attributes ``best_index_``,
``best_score_`` and ``best_params_`` will only be available if
``refit`` is set and all of them will be determined w.r.t this specific
scorer. ``best_score_`` is not returned if refit is callable.
See ``scoring`` parameter to know more about multiple metric
evaluation.
random_state : int, RandomState instance or None, optional, default=None
Pseudo random number generator state used for random sampling of params
in param_distributions. This is not passed to each estimator.
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Examples
--------
>>> import dislib as ds
>>> from dislib.model_selection import RandomizedSearchCV
>>> from dislib.classification import CascadeSVM
>>> import numpy as np
>>> import scipy.stats as stats
>>> from sklearn import datasets
>>>
>>>
>>> if __name__ == '__main__':
>>> x_np, y_np = datasets.load_iris(return_X_y=True)
>>> # Pre-shuffling required for CSVM
>>> p = np.random.permutation(len(x_np))
>>> x = ds.array(x_np[p], (30, 4))
>>> y = ds.array((y_np[p] == 0)[:, np.newaxis], (30, 1))
>>> param_distributions = {'c': stats.expon(scale=0.5),
>>> 'gamma': stats.expon(scale=10)}
>>> csvm = CascadeSVM()
>>> searcher = RandomizedSearchCV(csvm, param_distributions, n_iter=10)
>>> searcher.fit(x, y)
>>> searcher.cv_results_
Attributes
----------
cv_results_ : dict of numpy (masked) ndarrays
A dict with keys as column headers and values as columns, that can be
imported into a pandas ``DataFrame``.
For instance the below given table
+---------+-------------+-------------------+---+---------------+
| param_c | param_gamma | split0_test_score |...|rank_test_score|
+=========+=============+===================+===+===============+
| 0.193 | 1.883 | 0.82 |...| 3 |
+---------+-------------+-------------------+---+---------------+
| 1.452 | 0.327 | 0.81 |...| 2 |
+---------+-------------+-------------------+---+---------------+
| 0.926 | 3.452 | 0.94 |...| 1 |
+---------+-------------+-------------------+---+---------------+
will be represented by a ``cv_results_`` dict of::
{
'param_kernel' : masked_array(data = ['rbf', 'rbf', 'rbf'],
mask = False),
'param_gamma' : masked_array(data = [0.1 0.2 0.3], mask = False),
'split0_test_score' : [0.82, 0.81, 0.94],
'split1_test_score' : [0.66, 0.75, 0.79],
'split2_test_score' : [0.82, 0.87, 0.84],
...
'mean_test_score' : [0.76, 0.84, 0.86],
'std_test_score' : [0.01, 0.20, 0.04],
'rank_test_score' : [3, 2, 1],
'params' : [{'c' : 0.193, 'gamma' : 1.883}, ...],
}
NOTE
The key ``'params'`` is used to store a list of parameter
settings dicts for all the parameter candidates.
The ``mean_fit_time``, ``std_fit_time``, ``mean_score_time`` and
``std_score_time`` are all in seconds.
For multi-metric evaluation, the scores for all the scorers are
available in the ``cv_results_`` dict at the keys ending with that
scorer's name (``'_<scorer_name>'``) instead of ``'_score'`` shown
above. ('split0_test_precision', 'mean_train_precision' etc.)
best_estimator_ : estimator or dict
Estimator that was chosen by the search, i.e. estimator
which gave highest score (or smallest loss if specified)
on the left out data. Not available if ``refit=False``.
For multi-metric evaluation, this attribute is present only if
``refit`` is specified.
See ``refit`` parameter for more information on allowed values.
best_score_ : float
Mean cross-validated score of the best_estimator.
For multi-metric evaluation, this is not available if ``refit`` is
``False``. See ``refit`` parameter for more information.
best_params_ : dict
Parameter setting that gave the best results on the hold out data.
For multi-metric evaluation, this is not available if ``refit`` is
``False``. See ``refit`` parameter for more information.
best_index_ : int
The index (of the ``cv_results_`` arrays) which corresponds to the best
candidate parameter setting.
The dict at ``search.cv_results_['params'][search.best_index_]`` gives
the parameter setting for the best model, that gives the highest
mean score (``search.best_score_``).
For multi-metric evaluation, this is not available if ``refit`` is
``False``. See ``refit`` parameter for more information.
scorer_ : function or a dict
Scorer function used on the held out data to choose the best
parameters for the model.
For multi-metric evaluation, this attribute holds the validated
``scoring`` dict which maps the scorer key to the scorer callable.
n_splits_ : int
The number of cross-validation splits (folds/iterations).
"""
def __init__(self, estimator, param_distributions, n_iter=10, scoring=None,
cv=None, refit=True, random_state=None):
super().__init__(estimator=estimator, scoring=scoring, cv=cv,
refit=refit)
self.param_distributions = param_distributions
self.n_iter = n_iter
self.random_state = random_state
def _run_search(self, evaluate_candidates):
"""Search n_iter candidates from param_distributions"""
ps = ParameterSampler(self.param_distributions, self.n_iter,
random_state=self.random_state)
evaluate_candidates(ps)
| 2.734375 | 3 |
webapp/ui/tests/test_parse_search_results.py | robseed/botanist | 0 | 9660 | <filename>webapp/ui/tests/test_parse_search_results.py
import os
from django.test import TestCase
from mock import patch
from ui.views import parse_search_results
FIXTURES_ROOT = os.path.join(os.path.dirname(__file__), 'fixtures')
FX = lambda *relpath: os.path.join(FIXTURES_ROOT, *relpath)
@patch('ui.views.get_repo_type')
@patch('ui.views.CODE_ROOT', '/opt/botanist/repos')
class ParseSearchResults(TestCase):
def test_duplicate_repositories_in_github_and_bitbucket(self, get_repo_type):
def se(filepath):
if 'bitbucket' in filepath:
return 'hg'
elif 'github' in filepath:
return 'git'
else:
raise Exception('thats odd')
get_repo_type.side_effect = se
with open(FX('duplicate_repositories_in_github_and_bitbucket.results.txt')) as f:
output = f.read()
results, count = parse_search_results(output, 'AbstractSendTimeJob', True)
self.assertEqual(2, count)
self.assertListEqual(['bitbucket', 'github'], results['sproutjobs'].keys())
self.assertEqual('public abstract class AbstractJob implements Job {', results['sproutjobs']['bitbucket']['files']['src/main/java/com/sproutsocial/AbstractJob.java'][0]['srcline'])
self.assertEqual('public abstract class AbstractJob implements Job {', results['sproutjobs']['github']['files']['src/main/java/com/sproutsocial/AbstractJob.java'][0]['srcline']) | 2.28125 | 2 |
minos/api_gateway/common/exceptions.py | Clariteia/api_gateway_common | 3 | 9661 | """
Copyright (C) 2021 Clariteia SL
This file is part of minos framework.
Minos framework can not be copied and/or distributed without the express permission of Clariteia SL.
"""
from typing import (
Any,
Type,
)
class MinosException(Exception):
"""Exception class for import packages or modules"""
__slots__ = "_message"
def __init__(self, error_message: str):
self._message = error_message
def __repr__(self):
return f"{type(self).__name__}(message={repr(self._message)})"
def __str__(self) -> str:
"""represent in a string format the error message passed during the instantiation"""
return self._message
class MinosImportException(MinosException):
pass
class MinosProtocolException(MinosException):
pass
class MinosMessageException(MinosException):
pass
class MinosConfigException(MinosException):
"""Base config exception."""
class MinosConfigDefaultAlreadySetException(MinosConfigException):
"""Exception to be raised when some config is already set as default."""
class MinosRepositoryException(MinosException):
"""Base repository exception."""
class MinosRepositoryAggregateNotFoundException(MinosRepositoryException):
"""Exception to be raised when some aggregate is not found on the repository."""
class MinosRepositoryDeletedAggregateException(MinosRepositoryException):
"""Exception to be raised when some aggregate is already deleted from the repository."""
class MinosRepositoryManuallySetAggregateIdException(MinosRepositoryException):
"""Exception to be raised when some aggregate is trying to be created with a manually set id."""
class MinosRepositoryManuallySetAggregateVersionException(MinosRepositoryException):
"""Exception to be raised when some aggregate is trying to be created with a manually set version."""
class MinosRepositoryUnknownActionException(MinosRepositoryException):
"""Exception to be raised when some entry tries to perform an unknown action."""
class MinosRepositoryNonProvidedException(MinosRepositoryException):
"""Exception to be raised when a repository is needed but none is set."""
class MinosModelException(MinosException):
"""Exception to be raised when some mandatory condition is not satisfied by a model."""
pass
class EmptyMinosModelSequenceException(MinosModelException):
"""Exception to be raised when a sequence must be not empty, but it is empty."""
pass
class MultiTypeMinosModelSequenceException(MinosModelException):
"""Exception to be raised when a sequence doesn't satisfy the condition to have the same type for each item."""
pass
class MinosModelAttributeException(MinosException):
"""Base model attributes exception."""
pass
class MinosReqAttributeException(MinosModelAttributeException):
"""Exception to be raised when some required attributes are not provided."""
pass
class MinosTypeAttributeException(MinosModelAttributeException):
"""Exception to be raised when there are any mismatching between the expected and observed attribute type."""
def __init__(self, name: str, target_type: Type, value: Any):
self.name = name
self.target_type = target_type
self.value = value
super().__init__(
f"The {repr(target_type)} expected type for {repr(name)} does not match with "
f"the given data type: {type(value)}"
)
class MinosMalformedAttributeException(MinosModelAttributeException):
"""Exception to be raised when there are any kind of problems with the type definition."""
pass
class MinosParseAttributeException(MinosModelAttributeException):
"""Exception to be raised when there are any kind of problems with the parsing logic."""
def __init__(self, name: str, value: Any, exception: Exception):
self.name = name
self.value = value
self.exception = exception
super().__init__(f"{repr(exception)} was raised while parsing {repr(name)} field with {repr(value)} value.")
class MinosAttributeValidationException(MinosModelAttributeException):
"""Exception to be raised when some fields are not valid."""
def __init__(self, name: str, value: Any):
self.name = name
self.value = value
super().__init__(f"{repr(value)} value does not pass the {repr(name)} field validation.")
| 2.25 | 2 |
tsdl/tools/extensions.py | burgerdev/hostload | 0 | 9662 | <reponame>burgerdev/hostload
"""
Extensions for pylearn2 training algorithms. Those are either reimplemented to
suit the execution model of this package, or new ones for recording metrics.
"""
import os
import cPickle as pkl
import numpy as np
from pylearn2.train_extensions import TrainExtension
from .abcs import Buildable
class BuildableTrainExtension(TrainExtension, Buildable):
"""
makes a pylearn2 TrainExtension buildable
"""
@classmethod
def build(cls, config, parent=None, graph=None, workingdir=None):
"""
build an instance of this class with given configuration dict
"""
config_copy = config.copy()
if "wd" not in config_copy:
config_copy["wd"] = workingdir
obj = super(BuildableTrainExtension, cls).build(config_copy)
return obj
def __init__(self, **kwargs):
if "workingdir" in kwargs:
self._wd = kwargs["workingdir"]
super(BuildableTrainExtension, self).__init__()
@classmethod
def get_default_config(cls):
"""
override to provide your own default configuration
"""
conf = super(BuildableTrainExtension, cls).get_default_config()
conf["wd"] = None
return conf
class PersistentTrainExtension(BuildableTrainExtension):
"""
abstract extension that can store its results (on disk, probably)
"""
def store(self):
"""
store the findings of this extension
"""
pass
class WeightKeeper(PersistentTrainExtension):
"""
keeps track of the model's weights at each monitor step
This model stores weights *per monitor step* - the list grows large pretty
quickly.
"""
_weights = []
def on_monitor(self, model, dataset, algorithm):
"""
save the model's weights
"""
self._weights.append(model.get_param_values())
def setup(self, model, dataset, algorithm):
"""
initialize the weight list
"""
self._weights = []
def get_weights(self):
"""
get weights history
"""
return self._weights
def store(self):
path = os.path.join(self._wd, "weightkeeper.pkl")
with open(path, "w") as file_:
pkl.dump(self._weights, file_)
class ProgressMonitor(PersistentTrainExtension):
"""
Makes the monitor channel's history accessible to us.
"""
_progress = np.NaN
@classmethod
def get_default_config(cls):
config = super(ProgressMonitor, cls).get_default_config()
config["channel"] = "valid_objective"
return config
def on_monitor(self, model, dataset, algorithm):
"""
save the desired channel
"""
monitor = model.monitor
channels = monitor.channels
channel = channels[self._channel]
self._progress = channel.val_record
def get_progress(self):
"""
get the value's history
"""
return self._progress
def store(self):
filename = "progress_{}.pkl".format(self._channel)
path = os.path.join(self._wd, filename)
with open(path, "w") as file_:
pkl.dump(self._progress, file_)
class MonitorBasedSaveBest(BuildableTrainExtension):
"""
similar to pylearn2's MonitorBasedSaveBest, but avoids memory hogging
(see https://github.com/lisa-lab/pylearn2/issues/1567)
"""
best_cost = np.inf
best_params = None
@classmethod
def get_default_config(cls):
config = super(MonitorBasedSaveBest, cls).get_default_config()
config["channel"] = "valid_objective"
return config
def setup(self, model, dataset, algorithm):
self.best_cost = np.inf
self.best_params = model.get_param_values()
def on_monitor(self, model, dataset, algorithm):
"""
Looks whether the model performs better than earlier. If it's the
case, saves the model.
Parameters
----------
model : pylearn2.models.model.Model
model.monitor must contain a channel with name given by
self.channel_name
dataset : pylearn2.datasets.dataset.Dataset
Not used
algorithm : TrainingAlgorithm
Not used
"""
monitor = model.monitor
channels = monitor.channels
channel = channels[self._channel]
val_record = channel.val_record
new_cost = val_record[-1]
if new_cost < self.best_cost:
self.best_cost = new_cost
self.best_params = model.get_param_values()
| 2.25 | 2 |
nogi/utils/post_extractor.py | Cooomma/nogi-backup-blog | 0 | 9663 | import asyncio
from io import BytesIO
import logging
import os
import random
import time
from typing import List
from urllib.parse import urlparse
import aiohttp
from aiohttp import ClientSession, TCPConnector
import requests
from requests import Response
from tqdm import tqdm
from nogi import REQUEST_HEADERS
from nogi.db.nogi_blog_content import NogiBlogContent
from nogi.db.nogi_blog_summary import NogiBlogSummary
from nogi.storages.gcs import GCS
from nogi.utils.parsers import PostParser, generate_post_key
logger = logging.getLogger()
HEADERS = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36'
}
class PostExecutor:
def __init__(self, member: dict, summary_db: NogiBlogSummary, content_db: NogiBlogContent, gcs_client: GCS, bucket: str, concurrent: int = 4):
self._waiting_limit = concurrent
self.member = member
# DB
self.summary_db = summary_db
self.content_db = content_db
# GCS Storage
self.bucket = bucket
self.storage = gcs_client
self.storage_blog_post_prefix = os.path.join(member['roma_name'], 'post')
self.storage_blog_image_prefix = os.path.join(member['roma_name'], 'img')
# Tasks
self.todos = self.summary_db.get_missing_blog_url(member['id'])
@staticmethod
def db_transform(post_url: str, obj: dict, **kwargs) -> dict:
return dict(
member_id=kwargs.get('member_id'),
blog_key=generate_post_key(post_url),
url=post_url,
title=obj['title'],
content=obj['content'],
image_gcs_paths=kwargs.get('image_gcs_paths'),
post_gcs_path=kwargs.get('post_gcs_path'),
blog_created_at=int(obj['created_at'].timestamp()))
@staticmethod
def _get_hd_image(url: str) -> BytesIO:
first_layer_response: Response = requests.get(url, headers=HEADERS)
logger.debug(first_layer_response.cookies)
resp = requests.get(
url=url.replace('http://dcimg.awalker.jp/v/', 'http://dcimg.awalker.jp/i/'),
cookies=first_layer_response.cookies)
logger.debug(resp.status_code)
logger.debug(resp.headers)
return BytesIO(resp.content) if resp.status_code == 200 else BytesIO(bytes=b'')
def backup_images(self, image_urls: List[dict]) -> List[str]:
downloaded_image_urls = list()
for url in image_urls:
image_gcs_path = os.path.join(self.storage_blog_image_prefix,
'/'.join(urlparse(url['image_url']).path.split('/')[-5:]))
if url['high_resolution_url'] != url['image_url']:
hd_image = self._get_hd_image(url['high_resolution_url'])
if hd_image:
self.storage.upload_stream(
bucket=self.bucket,
blob_name=image_gcs_path,
content=hd_image.read(),
content_type='image/jpeg'
)
else:
image = requests.get(url=url['image_url'])
if image.status_code != 200:
logger.warning('Image Request Fail: %s', url)
continue
self.storage.upload_stream(
bucket=self.bucket,
blob_name=image_gcs_path,
content=image.content,
content_type='image/jpeg'
)
downloaded_image_urls.append(url)
return downloaded_image_urls
async def backup_content(self, session: ClientSession, post_url: str) -> str:
post_gcs_path = os.path.join(self.storage_blog_post_prefix, '/'.join(urlparse(post_url).path.split('/')[-3:]))
try:
async with session.get(url=post_url, headers=REQUEST_HEADERS) as response:
self.storage.upload_stream(
bucket=self.bucket, blob_name=post_gcs_path,
content=await response.read(), content_type='text/html')
return post_gcs_path
except aiohttp.client_exceptions.InvalidURL:
print('Invalid URL: %s' % post_url)
except aiohttp.client_exceptions.ClientConnectorError:
print('Client Connector Error: %s' % post_url)
@staticmethod
def crawl_post(url: str) -> None:
return PostParser(requests.get(url, headers=REQUEST_HEADERS).text).to_dict()
async def _run(self, url: str):
try:
async with aiohttp.ClientSession(connector=TCPConnector(verify_ssl=False)) as session:
post_gcs_path = await self.backup_content(session, url)
post = self.crawl_post(url)
images_gcs_paths = self.backup_images(post['image_urls'])
result = self.db_transform(
post_url=url, obj=post, member_id=self.member['id'], image_gcs_paths=images_gcs_paths, post_gcs_path=post_gcs_path)
self.content_db.upsert_crawled_post(result)
self.summary_db.update_crawled_result(result)
except aiohttp.client_exceptions.InvalidURL:
print('Invalid URL: %s' % url)
except aiohttp.client_exceptions.ClientConnectorError:
print('Client Connector Error: %s' % url)
except Exception:
import traceback
print('Error URL: %s' % url)
print(traceback.format_exc())
def run(self):
loop = asyncio.get_event_loop()
if self.todos:
tasks = []
for url in tqdm(self.todos, desc='Current Member: {}'.format(self.member['kanji_name']), ncols=120):
tasks.append(asyncio.ensure_future(self._run(url)))
if len(tasks) > self._waiting_limit:
loop.run_until_complete(asyncio.gather(*tasks))
tasks = []
if tasks:
loop.run_until_complete(asyncio.gather(*tasks))
slepp_second = random.randint(1, 15)
print('Sleep for %s second' % slepp_second)
time.sleep(slepp_second)
| 2.015625 | 2 |
sandbox/lib/jumpscale/JumpscaleLibsExtra/sal_zos/gateway/dhcp.py | threefoldtech/threebot_prebuilt | 1 | 9664 | from Jumpscale import j
import signal
from .. import templates
DNSMASQ = "/bin/dnsmasq --conf-file=/etc/dnsmasq.conf -d"
class DHCP:
def __init__(self, container, domain, networks):
self.container = container
self.domain = domain
self.networks = networks
def apply_config(self):
dnsmasq = templates.render("dnsmasq.conf", domain=self.domain, networks=self.networks)
self.container.upload_content("/etc/dnsmasq.conf", dnsmasq)
dhcp = templates.render("dhcp", networks=self.networks)
self.container.upload_content("/etc/dhcp", dhcp)
self.stop()
self.container.client.system(DNSMASQ, id="dhcp.{}".format(self.container.name))
# check if command is listening for dhcp
if not j.tools.timer.execute_until(self.is_running, 10):
raise j.exceptions.Base("Failed to run dnsmasq")
def is_running(self):
for port in self.container.client.info.port():
if port["network"] == "udp" and port["port"] == 53:
return True
def stop(self):
for process in self.container.client.process.list():
if "dnsmasq" in process["cmdline"]:
self.container.client.process.kill(process["pid"], signal.SIGKILL)
if not j.tools.timer.execute_until(lambda: not self.is_running(), 10):
raise j.exceptions.Base("Failed to stop DNSMASQ")
| 2.234375 | 2 |
answer/a4_type.py | breeze-shared-inc/python_training_01 | 0 | 9665 | hensu_int = 17 #数字
hensu_float = 1.7 #小数点(浮動小数点)
hensu_str = "HelloWorld" #文字列
hensu_bool = True #真偽
hensu_list = [] #リスト
hensu_tuple = () #タプル
hensu_dict = {} #辞書(ディクト)型
print(type(hensu_int))
print(type(hensu_float))
print(type(hensu_str))
print(type(hensu_bool))
print(type(hensu_list))
print(type(hensu_tuple))
print(type(hensu_dict))
| 3.375 | 3 |
LEDdebug/examples/led-demo.py | UrsaLeo/LEDdebug | 0 | 9666 | #!/usr/bin/env python3
"""UrsaLeo LEDdebug board LED demo
Turn the LED's on one at a time, then all off"""
import time
ON = 1
OFF = 0
DELAY = 0.5 # seconds
try:
from LEDdebug import LEDdebug
except ImportError:
try:
import sys
import os
sys.path.append("..")
sys.path.append(os.path.join(os.path.dirname(__file__), '..',
'LEDdebug'))
from LEDdebug import LEDdebug
except ImportError:
print('LEDdebug import failed')
exit(0)
def main():
# Create device
device = LEDdebug()
# Turn on each LED in succession
for led in range(1, 7):
device.set_led(led, ON)
print(f'Turning LED{led} on')
time.sleep(DELAY)
print('Turning all LEDs off')
# Turn all the lights of before leaving!
device.set_leds(OFF)
if __name__ == '__main__':
main()
| 2.9375 | 3 |
modules/server.py | Nitin-Mane/SARS-CoV-2-xDNN-Classifier | 0 | 9667 | <reponame>Nitin-Mane/SARS-CoV-2-xDNN-Classifier
#!/usr/bin/env python
###################################################################################
##
## Project: COVID -19 xDNN Classifier 2020
## Version: 1.0.0
## Module: Server
## Desription: The COVID -19 xDNN Classifier 2020 server.
## License: MIT
## Copyright: 2021, Asociacion De Investigacion En Inteligencia Artificial Para
## La Leucemia Peter Moss.
## Author: <NAME>
## Maintainer: <NAME>
##
## Modified: 2021-2-19
##
###################################################################################
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files(the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in all
## copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
## SOFTWARE.
##
###################################################################################
import cv2
import json
import jsonpickle
import os
import requests
import time
import numpy as np
import tensorflow as tf
from modules.AbstractServer import AbstractServer
from flask import Flask, request, Response
from io import BytesIO
from PIL import Image
from tensorflow.keras.preprocessing import image
from tensorflow.keras.applications.vgg16 import preprocess_input
class server(AbstractServer):
""" COVID 19 xDNN Classifier 2020 Server.
This object represents the COVID 19 xDNN Classifier 2020 Server.
"""
def predict(self, req):
""" Classifies an image sent via HTTP. """
if len(req.files) != 0:
img = Image.open(req.files['file'].stream).convert('RGB')
else:
img = Image.open(BytesIO(req.data)).convert('RGB')
img = img.resize((224, 224), Image.ANTIALIAS)
np_img = tf.keras.preprocessing.image.img_to_array(img)
np_img.transpose(1, 2, 0)
#img = keras.preprocessing.image.img_to_array(img)
#img = np.array([img]) # Convert single image to a batch.
img = np.expand_dims(np_img, axis=0)
img = preprocess_input(img)
#prediction = self.predict(img)
#img = img.resize((224, 224), Image.ANTIALIAS)
#img = image.img_to_array(img)
#img = np.expand_dims(img, axis=0)
#img = preprocess_input(img)
#img = img.reshape((1,224,224,3))
return self.model.predict(img)
def request(self, img_path):
""" Sends image to the inference API endpoint. """
self.helpers.logger.info("Sending request for: " + img_path)
_, img_encoded = cv2.imencode('.png', cv2.imread(img_path))
response = requests.post(
self.addr, data=img_encoded.tostring(), headers=self.headers)
response = json.loads(response.text)
return response
def start(self):
""" Starts the server. """
app = Flask(self.helpers.credentials["iotJumpWay"]["name"])
@app.route('/Inference', methods=['POST'])
def Inference():
""" Responds to HTTP POST requests. """
self.mqtt.publish("States", {
"Type": "Prediction",
"Name": self.helpers.credentials["iotJumpWay"]["name"],
"State": "Processing",
"Message": "Processing data"
})
message = ""
prediction = self.predict(request)
print(prediction)
if prediction == 1:
message = "Acute Lymphoblastic Leukemia detected!"
diagnosis = "Positive"
elif prediction == 0:
message = "Acute Lymphoblastic Leukemia not detected!"
diagnosis = "Negative"
self.mqtt.publish("States", {
"Type": "Prediction",
"Name": self.helpers.credentials["iotJumpWay"]["name"],
"State": diagnosis,
"Message": message
})
resp = jsonpickle.encode({
'Response': 'OK',
'Message': message,
'Diagnosis': diagnosis
})
return Response(response=resp, status=200, mimetype="application/json")
app.run(host=self.helpers.credentials["server"]["ip"],
port=self.helpers.credentials["server"]["port"])
def test(self):
""" Tests the trained model via HTTP. """
totaltime = 0
files = 0
tp = 0
fp = 0
tn = 0
fn = 0
self.addr = "http://" + self.helpers.credentials["server"]["ip"] + \
':'+str(self.helpers.credentials["server"]["port"]) + '/Inference'
self.headers = {'content-type': 'image/jpeg'}
for testFile in os.listdir(self.model.testing_dir):
if os.path.splitext(testFile)[1] in self.model.valid:
start = time.time()
prediction = self.request(self.model.testing_dir + "/" + testFile)
print(prediction)
end = time.time()
benchmark = end - start
totaltime += benchmark
msg = ""
status = ""
outcome = ""
if prediction["Diagnosis"] == "Positive" and "Non-Covid" in testFile:
fp += 1
status = "incorrectly"
outcome = "(False Positive)"
elif prediction["Diagnosis"] == "Negative" and "Non-Covid" in testFile:
tn += 1
status = "correctly"
outcome = "(True Negative)"
elif prediction["Diagnosis"] == "Positive" and "Covid" in testFile:
tp += 1
status = "correctly"
outcome = "(True Positive)"
elif prediction["Diagnosis"] == "Negative" and "Covid" in testFile:
fn += 1
status = "incorrectly"
outcome = "(False Negative)"
files += 1
self.helpers.logger.info("COVID-19 xDNN Classifier " + status +
" detected " + outcome + " in " + str(benchmark) + " seconds.")
self.helpers.logger.info("Images Classified: " + str(files))
self.helpers.logger.info("True Positives: " + str(tp))
self.helpers.logger.info("False Positives: " + str(fp))
self.helpers.logger.info("True Negatives: " + str(tn))
self.helpers.logger.info("False Negatives: " + str(fn))
self.helpers.logger.info("Total Time Taken: " + str(totaltime))
| 1.265625 | 1 |
rdr_service/lib_fhir/fhirclient_3_0_0/models/allergyintolerance_tests.py | all-of-us/raw-data-repository | 39 | 9668 | <reponame>all-of-us/raw-data-repository
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.0.0.11832 on 2017-03-22.
# 2017, SMART Health IT.
import io
import json
import os
import unittest
from . import allergyintolerance
from .fhirdate import FHIRDate
class AllergyIntoleranceTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("AllergyIntolerance", js["resourceType"])
return allergyintolerance.AllergyIntolerance(js)
def testAllergyIntolerance1(self):
inst = self.instantiate_from("allergyintolerance-example.json")
self.assertIsNotNone(inst, "Must have instantiated a AllergyIntolerance instance")
self.implAllergyIntolerance1(inst)
js = inst.as_json()
self.assertEqual("AllergyIntolerance", js["resourceType"])
inst2 = allergyintolerance.AllergyIntolerance(js)
self.implAllergyIntolerance1(inst2)
def implAllergyIntolerance1(self, inst):
self.assertEqual(inst.assertedDate.date, FHIRDate("2014-10-09T14:58:00+11:00").date)
self.assertEqual(inst.assertedDate.as_json(), "2014-10-09T14:58:00+11:00")
self.assertEqual(inst.category[0], "food")
self.assertEqual(inst.clinicalStatus, "active")
self.assertEqual(inst.code.coding[0].code, "227493005")
self.assertEqual(inst.code.coding[0].display, "Cashew nuts")
self.assertEqual(inst.code.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.criticality, "high")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.identifier[0].system, "http://acme.com/ids/patients/risks")
self.assertEqual(inst.identifier[0].value, "49476534")
self.assertEqual(inst.lastOccurrence.date, FHIRDate("2012-06").date)
self.assertEqual(inst.lastOccurrence.as_json(), "2012-06")
self.assertEqual(inst.note[0].text, "The criticality is high becasue of the observed anaphylactic reaction when challenged with cashew extract.")
self.assertEqual(inst.onsetDateTime.date, FHIRDate("2004").date)
self.assertEqual(inst.onsetDateTime.as_json(), "2004")
self.assertEqual(inst.reaction[0].description, "Challenge Protocol. Severe reaction to subcutaneous cashew extract. Epinephrine administered")
self.assertEqual(inst.reaction[0].exposureRoute.coding[0].code, "34206005")
self.assertEqual(inst.reaction[0].exposureRoute.coding[0].display, "Subcutaneous route")
self.assertEqual(inst.reaction[0].exposureRoute.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.reaction[0].manifestation[0].coding[0].code, "39579001")
self.assertEqual(inst.reaction[0].manifestation[0].coding[0].display, "Anaphylactic reaction")
self.assertEqual(inst.reaction[0].manifestation[0].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.reaction[0].onset.date, FHIRDate("2012-06-12").date)
self.assertEqual(inst.reaction[0].onset.as_json(), "2012-06-12")
self.assertEqual(inst.reaction[0].severity, "severe")
self.assertEqual(inst.reaction[0].substance.coding[0].code, "1160593")
self.assertEqual(inst.reaction[0].substance.coding[0].display, "cashew nut allergenic extract Injectable Product")
self.assertEqual(inst.reaction[0].substance.coding[0].system, "http://www.nlm.nih.gov/research/umls/rxnorm")
self.assertEqual(inst.reaction[1].manifestation[0].coding[0].code, "64305001")
self.assertEqual(inst.reaction[1].manifestation[0].coding[0].display, "Urticaria")
self.assertEqual(inst.reaction[1].manifestation[0].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.reaction[1].note[0].text, "The patient reports that the onset of urticaria was within 15 minutes of eating cashews.")
self.assertEqual(inst.reaction[1].onset.date, FHIRDate("2004").date)
self.assertEqual(inst.reaction[1].onset.as_json(), "2004")
self.assertEqual(inst.reaction[1].severity, "moderate")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.type, "allergy")
self.assertEqual(inst.verificationStatus, "confirmed")
| 2.375 | 2 |
jsparse/meijiexia/meijiexia.py | PyDee/Spiders | 6 | 9669 | import time
import random
import requests
from lxml import etree
import pymongo
from .url_file import mjx_weibo, mjx_dy, mjx_ks, mjx_xhs
class DBMongo:
def __init__(self):
self.my_client = pymongo.MongoClient("mongodb://localhost:27017/")
# 连接数据库
self.db = self.my_client["mcn"]
def insert_2_xt(self, success_item, collection_name):
try:
collection = self.db[collection_name]
collection.insert_one(success_item) # 数据写入mongoDB
print('success!!!')
except:
print('写入数据失败')
class MJX:
def __init__(self):
self.db = DBMongo()
self.headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Cache-Control': 'max-age=0',
'Connection': 'keep-alive',
'Cookie': 'accessId=defba4d0-9ab2-11e8-b156-7b8f577687be; qimo_seokeywords_defba4d0-9ab2-11e8-b156-7b8f577687be=; href=https%3A%2F%2Fwww.meijiexia.com%2Fmedias-118.html; ci_session=ccb97bb846cd5e0ce6538c2cc8f11ca7abc296ee; Hm_lvt_c96abf7da979015953d1d22702db6de8=1591685037,1592274339,1592278224; qimo_seosource_defba4d0-9ab2-11e8-b156-7b8f577687be=%E7%AB%99%E5%86%85; Hm_lpvt_c96abf7da979015953d1d22702db6de8=1592278238; pageViewNum=34',
'Host': 'www.meijiexia.com',
'Referer': 'https://www.meijiexia.com/medias-118.html',
'Sec-Fetch-Dest': 'document',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-User': '?1',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36',
}
def get_response(self, url, collection):
proxy = self.get_proxy()
proxies = {
"http": "http://{}:{}".format(proxy.get('IP'), proxy.get('Port')),
"https": "http://{}:{}".format(proxy.get('IP'), proxy.get('Port')),
}
ret = requests.get(url, headers=self.headers, proxies=proxies)
response = etree.HTML(ret.text)
tr_list = response.xpath('//tbody[@id="qu-con"]/tr')
for tr in tr_list:
item = dict()
user_id = tr.xpath('./td[@class="td1"]/input/@value')[0]
nick_name = tr.xpath('./td[@class="td2"]/div[@class="itemMsg"]//a/text()')[0]
place = tr.xpath('./td[@class="td3"]/text()')[0]
fans_num = tr.xpath('./td[@class="td6"]/p[@class="num"]/text()')[0]
price_list = tr.xpath('./td[@class="td4"]/p')
for price_element in price_list:
classify = price_element.xpath(
'./span[@class="money"]/preceding-sibling::span[1]/text()')[0]
price = price_element.xpath('./span[@class="money"]/text()')[0]
item[classify.strip()] = price.strip()
item['fans_num'] = fans_num.strip()
item['user_id'] = user_id.strip()
item['nick_name'] = nick_name.strip()
item['place'] = place.strip()
item['plant'] = collection.split('mjx_')[1]
self.db.insert_2_xt(item, collection)
@staticmethod
def get_proxy():
proxy = [{"IP": "192.168.3.11", "Port": 21730}]
return random.choice(proxy)
def run(self):
urls = ''
for item in {'mjx_weibo': mjx_weibo, 'mjx_dy': mjx_dy, 'mjx_ks': mjx_ks, 'mjx_xhs': mjx_xhs}.keys():
if item == 'mjx_weibo':
urls = mjx_weibo
if item == 'mjx_dy':
urls = mjx_dy
if item == 'mjx_ks':
urls = mjx_ks
if item == 'mjx_xhs':
urls = mjx_xhs
for url in urls:
time.sleep(3)
print(url)
self.get_response(url, item)
if __name__ == '__main__':
mjx = MJX()
mjx.run()
| 2.515625 | 3 |
MLModules/ABD/B_PCAQDA.py | jamster112233/ICS_IDS | 0 | 9670 | import numpy as np
from keras.utils import np_utils
import pandas as pd
import sys
from sklearn.preprocessing import LabelEncoder
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis as QDA
from sklearn.decomposition import PCA
import os
from sklearn.externals import joblib
from sklearn.metrics import f1_score
trainName = sys.argv[1]
testName = sys.argv[2]
# Create an object called iris with the iris Data
dftrain = pd.read_csv(filepath_or_buffer=trainName, header=None, sep=',')
dftest = pd.read_csv(filepath_or_buffer=testName, header=None, sep=',')
cols = ['Proto']
for i in range(1,dftrain.shape[1]):
cols.append('Byte' + str(i))
dftrain.columns=cols
dftrain.dropna(how="all", inplace=True)
dftrain.tail()
dftest.columns=cols
dftest.dropna(how="all", inplace=True)
dftest.tail()
Xtrain = dftrain.ix[:,1:dftrain.shape[1]].values
Ytrain = dftrain.ix[:,0].values
Xtest = dftest.ix[:,1:dftrain.shape[1]].values
Ytest = dftest.ix[:,0].values
encoder = LabelEncoder()
encoder.fit(Ytrain)
encYtrain = encoder.transform(Ytrain)
encoder = LabelEncoder()
encoder.fit(Ytest)
encYtest = encoder.transform(Ytest)
directory = "models/ABD/QDA/"
if not os.path.exists(directory):
os.makedirs(directory)
logfile = directory + "log-0.csv"
with open(logfile, "w") as file:
file.write("PCAlevel,acc,val_acc,f1\n")
fscores = []
accs = []
for q in xrange(1,151):
pca = PCA(n_components=q)
Xtrain_pca = pca.fit_transform(Xtrain)
Xtest_pca = pca.transform(Xtest)
clf = QDA(priors=None, reg_param=0.0)
clf.fit(Xtrain_pca, encYtrain)
trainPred = clf.predict(Xtrain_pca)
testPred = clf.predict(Xtest_pca)
score = 0.0
for i in xrange(0, len(trainPred)):
if trainPred[i] == encYtrain[i]:
score += 1
trainAcc = float(score) / len(trainPred)
score = 0.0
for i in xrange(0, len(testPred)):
if testPred[i] == encYtest[i]:
score += 1
testAcc = float(score) / len(testPred)
f1 = f1_score(encYtest, testPred)
accs.append(testAcc)
fscores.append(f1)
print("Train " + str(trainAcc))
print("Test " + str(testAcc))
print("F1 " + str(f1))
with open(logfile, "a") as file:
file.write(str(q) + "," + str(trainAcc) + "," + str(testAcc) + "," + str(f1) + "\n")
if q == 2:
joblib.dump(clf, 'QDA2.pkl')
print("Val Acc max" + str(max(accs)))
print("FMAX " + str(max(fscores)))
# print(str(q) + ":" + str((float(score)/len(classesPred)*100)) + "%")
#
# preds = classesPred
# if(len(preds) > 0):
# preds = np.array(list(encoder.inverse_transform(preds)))
#
# df = pd.crosstab(dftest['Proto'], preds, rownames=['Actual Protocol'], colnames=['Predicted Protocol'])
# df.to_csv('ConfusionMatrixLDA.csv')
| 2.328125 | 2 |
GR2-Save-Loader.py | 203Null/Gravity-Rush-2-Save-Loader | 2 | 9671 | <reponame>203Null/Gravity-Rush-2-Save-Loader
import struct
import json
from collections import OrderedDict
file_path = "data0002.bin"
show_offset = True
show_hash = False
loaded_data = 0
def unpack(upstream_data_set):
global loaded_data
loaded_data = loaded_data + 1
currentCursor = file.tell()
print(hex(file.tell()))
file.seek(int.from_bytes(file.read(4), byteorder='little'), 0)
variable_name = file.read(200).split(b'\x00')[0].decode('UTF8') #Use UTF8 because some strings are in Japanese
print(hex(file.tell()))
print(variable_name)
file.seek(currentCursor + 4, 0)
type = int.from_bytes(file.read(4), byteorder='little')
data_location = file.tell()
if type == 0x08: # List
list_length = int.from_bytes(file.read(4), byteorder='little')
name_hash = file.read(4).hex()
data_location = file.tell()
value = {}
for i in range(0, list_length):
unpack(value)
value = OrderedDict(sorted(value.items()))
else:
if type % 0x10 == 0x0b: # String
string_length = int.from_bytes(file.read(4), byteorder='little') - 1
data_location = type // 0x10
file.seek(data_location, 0)
try:
value = file.read(string_length).decode('UTF8')
except:
value = "ERROR EXTRACTING STRING"
file.seek(currentCursor + 0x0c, 0)
elif type == 0x09: # Float
value = struct.unpack('f', file.read(4))[0]
elif type == 0x0C: # Boolean
value = int.from_bytes(file.read(1), byteorder='little') > 0
file.seek(3, 1)
else:
value = file.read(4).hex()
print("Warring!!! Unknow type!!! %s at %s with value %s" % (hex(type), hex(file.tell()-8), value))
print()
name_hash = file.read(4).hex()
if variable_name == None:
variable_name = hex(data_location)
else:
if show_hash:
variable_name = variable_name = "%s %s" % (variable_name, name_hash)
if show_offset:
variable_name = variable_name = "%s %s" % (variable_name, hex(data_location))
print(value)
upstream_data_set[variable_name] = value
file = open(file_path, mode='rb')
data = file.read()
data_set = OrderedDict()
if len(data) > 0x40 and data[0:4] == b'ggdL':
file.seek(0x0c, 0)
numOfData = int.from_bytes(file.read(4), byteorder='little')
while loaded_data < numOfData:
unpack(data_set)
print()
print(data_set)
print()
print("Complete with %i/%i data" % (loaded_data, numOfData))
with open(r"%s.txt" % (file_path.split('.')[0]), 'w', encoding='utf-8') as json_file:
json.dump(data_set, json_file, indent=4, ensure_ascii=False)
else:
print("File Incorrect") | 2.546875 | 3 |
python/Recursion.py | itzsoumyadip/vs | 1 | 9672 | ## to change recursion limit
import sys
print(sys.getrecursionlimit()) #Return the current value of the recursion limit
#1000
## change the limit
sys.setrecursionlimit(2000) # change value of the recursion limit
#2000
i=0
def greet():
global i
i+=1
print('hellow',i)
greet()
greet() # hellow 1996 then error
| 3.765625 | 4 |
pages/tests/test_views.py | andywar65/starter-fullstack | 0 | 9673 | from django.test import TestCase, override_settings
from django.urls import reverse
from pages.models import Article, HomePage
@override_settings(USE_I18N=False)
class PageViewTest(TestCase):
@classmethod
def setUpTestData(cls):
print("\nTest page views")
# Set up non-modified objects used by all test methods
HomePage.objects.create(title="Title")
Article.objects.create(title="First", date="2022-04-09")
def test_homepage_view(self):
response = self.client.get(reverse("home"))
self.assertEqual(response.status_code, 200)
print("\n-Test Homepage status 200")
self.assertTemplateUsed(response, "pages/home.html")
print("\n-Test Homepage template")
def test_no_homepage(self):
HomePage.objects.all().delete()
response = self.client.get(reverse("home"))
self.assertEqual(response.status_code, 404)
print("\n-Test Homepage status 404")
def test_article_template(self):
response = self.client.get(
reverse(
"pages:article_detail",
kwargs={"year": 2022, "month": 4, "day": 9, "slug": "first"},
)
)
self.assertEqual(response.status_code, 200)
print("\n-Test Article status 200")
self.assertTemplateUsed(response, "pages/article_detail.html")
print("\n-Test Article template")
| 2.25 | 2 |
poco/services/batch/server.py | sunliwen/poco | 0 | 9674 | <gh_stars>0
#!/usr/bin/env python
import logging
import sys
sys.path.append("../../")
sys.path.append("pylib")
import time
import datetime
import pymongo
import uuid
import os
import subprocess
import os.path
import settings
from common.utils import getSiteDBCollection
sys.path.insert(0, "../../")
class LoggingManager:
def __init__(self):
self.h_console = None
self.h_file = None
logging.getLogger('').setLevel(logging.INFO)
def reconfig_h_console(self, site_id, calculation_id):
if self.h_console is not None:
self.h_console.flush()
logging.getLogger('').removeHandler(self.h_console)
self.h_console = logging.StreamHandler()
self.h_console.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s|" + calculation_id +
"|%(levelname)s|%(name)s|%(message)s", datefmt="%Y-%m-%d %H:%M:%S")
self.h_console.setFormatter(formatter)
logging.getLogger('').addHandler(self.h_console)
def getLogFilePath(self, site_id, calculation_id):
site_log_dir = os.path.join(settings.log_dir, site_id)
if not os.path.isdir(site_log_dir):
os.makedirs(site_log_dir)
formatted_date_time = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
log_file_name = "%s_%s.log" % (formatted_date_time, calculation_id)
log_file_path = os.path.join(site_log_dir, log_file_name)
return log_file_path
def reconfig_h_file(self, site_id, calculation_id):
if self.h_file is not None:
self.h_file.flush()
self.h_file.close()
logging.getLogger('').removeHandler(self.h_file)
self.h_file = logging.FileHandler(
self.getLogFilePath(site_id, calculation_id))
self.h_file.setLevel(logging.INFO)
formatter = logging.Formatter(
"%(asctime)s|%(levelname)s|%(name)s|%(message)s", datefmt="%Y-%m-%d %H:%M:%S")
self.h_file.setFormatter(formatter)
logging.getLogger('').addHandler(self.h_file)
def reconfig(self, site_id, calculation_id):
self.reconfig_h_console(site_id, calculation_id)
self.reconfig_h_file(site_id, calculation_id)
logging_manager = LoggingManager()
def getLogger():
return logging.getLogger("Batch Server")
def getBaseWorkDir(site_id, calculation_id):
site_work_dir = os.path.join(settings.work_dir, site_id)
formatted_date_time = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
calculation_work_dir_name = "%s_%s" % (formatted_date_time, calculation_id)
calculation_work_dir_path = os.path.join(
site_work_dir, calculation_work_dir_name)
os.makedirs(calculation_work_dir_path)
return calculation_work_dir_path
def getConnection():
if(settings.replica_set):
return pymongo.MongoReplicaSetClient(settings.mongodb_host, replicaSet=settings.replica_set)
else:
return pymongo.Connection(settings.mongodb_host)
connection = getConnection()
class ShellExecutionError(Exception):
pass
class BaseFlow:
def __init__(self, name):
self.name = name
self.jobs = []
self.dependencies = []
def dependOn(self, flow):
self.parent = flow
flow.dependencies.append(self)
def getWorkDir(self):
work_dir = os.path.join(BASE_WORK_DIR, self.name)
if not os.path.exists(work_dir):
os.makedirs(work_dir)
return work_dir
def getWorkFile(self, file_name):
return os.path.join(self.getWorkDir(), file_name)
def __call__(self):
global CALC_SUCC
writeFlowBegin(SITE_ID, self.__class__.__name__)
if self.__class__.__name__ in DISABLEDFLOWS:
getLogger().info("Flow Skipped: %s" % self.__class__.__name__)
writeFlowEnd(SITE_ID, self.__class__.__name__,
is_successful=True, is_skipped=True)
return True
else:
for job_callable in self.jobs:
if not self._execJob(job_callable):
writeFlowEnd(
SITE_ID, self.__class__.__name__, is_successful=False, is_skipped=False,
err_msg="SOME_JOBS_FAILED")
CALC_SUCC = False
return False
writeFlowEnd(SITE_ID, self.__class__.__name__,
is_successful=True, is_skipped=False)
# execute downlines
for dependency in self.dependencies:
dependency()
return True
def _exec_shell(self, command):
getLogger().info("Execute %s" % command)
#ret_code = os.system(command)
# if ret_code != 0:
# raise ShellExecutionError("Shell Execution Failed, ret_code=%s" % ret_code)
ret_code = subprocess.call(command, shell=True)
if ret_code != 0:
getLogger().error("Failed %s" % sys.stderr)
raise ShellExecutionError(
"Shell Execution Failed, ret_code=%s" % ret_code)
def _execJob(self, callable):
try:
getLogger().info("Start Job: %s.%s" %
(self.__class__.__name__, callable.__name__))
callable()
getLogger().info("Job Succ: %s.%s" %
(self.__class__.__name__, callable.__name__))
return True
except:
getLogger(
).critical("An Exception happened while running Job: %s" % callable,
exc_info=True)
# TODO: send message (email, sms)
# TODO: record exception info.
writeFailedJob(SITE_ID, self.__class__.__name__, callable.__name__)
return False
class PreprocessingFlow(BaseFlow):
def __init__(self):
BaseFlow.__init__(self, "preprocessing")
self.jobs += [self.do_backfill,
self.do_reverse_reversed_backfilled_raw_logs]
def do_backfill(self):
from preprocessing import backfiller
last_ts = None # FIXME: load correct last_ts from somewhere
bf = backfiller.BackFiller(connection, SITE_ID, last_ts,
self.getWorkFile("reversed_backfilled_raw_logs"))
last_ts = bf.start() # FIXME: save last_ts somewhere
def do_reverse_reversed_backfilled_raw_logs(self):
input_path = self.getWorkFile("reversed_backfilled_raw_logs")
output_path = self.getWorkFile("backfilled_raw_logs")
self._exec_shell("%s <%s >%s" %
(settings.tac_command, input_path, output_path))
class HiveBasedStatisticsFlow(BaseFlow):
def __init__(self):
BaseFlow.__init__(self, "hive-based-statistics")
self.jobs += [self.do_hive_based_calculations]
# Begin Hive Based Calculations
def do_hive_based_calculations(self):
from statistics.hive_based_calculations import hive_based_calculations
backfilled_raw_logs_path = self.parent.getWorkFile(
"backfilled_raw_logs")
hive_based_calculations(
connection, SITE_ID, self.getWorkDir(), backfilled_raw_logs_path)
#
# End Hive Based Calculations
class BaseSimilarityCalcFlow(BaseFlow):
def __init__(self, type):
BaseFlow.__init__(self, "similarities-calc:%s" % type)
self.type = type
self.jobs += self.getExtractUserItemMatrixJobs(
) + [self.do_sort_user_item_matrix,
self.do_calc_item_prefer_count,
self.do_calc_user_count,
self.do_emit_cooccurances,
self.do_sort_cooccurances,
self.do_count_cooccurances,
self.do_format_cooccurances_counts,
self.do_calc_item_similarities,
self.do_make_item_similarities_bi_directional,
self.do_sort_item_similarities_bi_directional,
self.do_extract_top_n,
self.do_upload_item_similarities_result]
def do_sort_user_item_matrix(self):
input_path = self.getWorkFile("user_item_matrix")
output_path = self.getWorkFile("user_item_matrix_sorted")
self._exec_shell("sort -T /cube/services/batch/temp %s > %s" %
(input_path, output_path))
def do_calc_item_prefer_count(self):
if SITE["algorithm_type"] == "llh":
input_path = self.getWorkFile("user_item_matrix_sorted")
output_path = self.getWorkFile("item_prefer_count")
self._exec_shell(
"cut -d , -f 2 %s | sort -T /cube/services/batch/temp | uniq -c > %s" %
(input_path, output_path))
def do_calc_user_count(self):
if SITE["algorithm_type"] == "llh":
input_path = self.getWorkFile("user_item_matrix_sorted")
output_path = self.getWorkFile("user_count")
self._exec_shell("cut -d , -f 1 %s | uniq | wc -l > %s" %
(input_path, output_path))
def do_emit_cooccurances(self):
from similarity_calculation.amazon.emit_cooccurances import emit_cooccurances
input_path = self.getWorkFile("user_item_matrix_sorted")
output_path = self.getWorkFile("cooccurances_not_sorted")
emit_cooccurances(input_path, output_path)
def do_sort_cooccurances(self):
input_path = self.getWorkFile("cooccurances_not_sorted")
output_path = self.getWorkFile("cooccurances_sorted")
self._exec_shell("sort -T /cube/services/batch/temp %s > %s" %
(input_path, output_path))
def do_count_cooccurances(self):
input_path = self.getWorkFile("cooccurances_sorted")
output_path = self.getWorkFile("cooccurances_counts_raw")
self._exec_shell("uniq -c %s > %s" % (input_path, output_path))
def do_format_cooccurances_counts(self):
from similarity_calculation.amazon.format_item_similarities import format_item_similarities
input_path = self.getWorkFile("cooccurances_counts_raw")
output_path = self.getWorkFile("cooccurances_counts_formatted")
format_item_similarities(input_path, output_path)
def do_calc_item_similarities(self):
if SITE["algorithm_type"] == "llh":
from similarity_calculation.loglikelihood.calc_loglikelihood import calc_loglikelihood
cooccurances_counts_path = self.getWorkFile(
"cooccurances_counts_formatted")
user_counts_path = self.getWorkFile("user_count")
item_prefer_count_path = self.getWorkFile("item_prefer_count")
output_path = self.getWorkFile("item_similarities_formatted")
calc_loglikelihood(cooccurances_counts_path,
user_counts_path, item_prefer_count_path, output_path)
else:
input_path = self.getWorkFile("cooccurances_counts_formatted")
output_path = self.getWorkFile("item_similarities_formatted")
self._exec_shell("mv %s %s" % (input_path, output_path))
def do_make_item_similarities_bi_directional(self):
from similarity_calculation.make_similarities_bidirectional import make_similarities_bidirectional
input_path = self.getWorkFile("item_similarities_formatted")
output_path = self.getWorkFile("item_similarities_bi_directional")
make_similarities_bidirectional(input_path, output_path)
def do_sort_item_similarities_bi_directional(self):
input_path = self.getWorkFile("item_similarities_bi_directional")
output_path = self.getWorkFile(
"item_similarities_bi_directional_sorted")
self._exec_shell("sort -T /cube/services/batch/temp %s > %s" %
(input_path, output_path))
def do_extract_top_n(self):
from similarity_calculation.extract_top_n import extract_top_n
input_path = self.getWorkFile(
"item_similarities_bi_directional_sorted")
output_path = self.getWorkFile("item_similarities_top_n")
n = 20
extract_top_n(input_path, output_path, n)
def do_upload_item_similarities_result(self):
from common.utils import UploadItemSimilarities
input_path = self.getWorkFile("item_similarities_top_n")
uis = UploadItemSimilarities(connection, SITE_ID, self.type)
uis(input_path)
class VSimiliarityCalcFlow(BaseSimilarityCalcFlow):
def __init__(self):
BaseSimilarityCalcFlow.__init__(self, "V")
def getExtractUserItemMatrixJobs(self):
return [self.do_extract_user_item_matrix,
self.do_de_duplicate_user_item_matrix]
def do_extract_user_item_matrix(self):
from preprocessing.extract_user_item_matrix import v_extract_user_item_matrix
input_path = self.parent.getWorkFile("backfilled_raw_logs")
output_path = self.getWorkFile("user_item_matrix_maybe_dup")
v_extract_user_item_matrix(input_path, output_path)
def do_de_duplicate_user_item_matrix(self):
input_path = self.getWorkFile("user_item_matrix_maybe_dup")
output_path = self.getWorkFile("user_item_matrix")
self._exec_shell("sort -T /cube/services/batch/temp < %s | uniq > %s" %
(input_path, output_path))
class PLOSimilarityCalcFlow(BaseSimilarityCalcFlow):
def __init__(self):
BaseSimilarityCalcFlow.__init__(self, "PLO")
def getExtractUserItemMatrixJobs(self):
return [self.do_extract_user_item_matrix,
self.do_de_duplicate_user_item_matrix]
def do_extract_user_item_matrix(self):
from preprocessing.extract_user_item_matrix import plo_extract_user_item_matrix
input_path = self.parent.getWorkFile("backfilled_raw_logs")
output_path = self.getWorkFile("user_item_matrix_maybe_dup")
plo_extract_user_item_matrix(input_path, output_path)
def do_de_duplicate_user_item_matrix(self):
input_path = self.getWorkFile("user_item_matrix_maybe_dup")
output_path = self.getWorkFile("user_item_matrix")
self._exec_shell("sort -T /cube/services/batch/temp < %s | uniq > %s" %
(input_path, output_path))
class BuyTogetherSimilarityFlow(BaseSimilarityCalcFlow):
def __init__(self):
BaseSimilarityCalcFlow.__init__(self, "BuyTogether")
def getExtractUserItemMatrixJobs(self):
return [self.do_extract_user_item_matrix,
self.do_de_duplicate_user_item_matrix]
def do_extract_user_item_matrix(self):
from preprocessing.extract_user_item_matrix import buytogether_extract_user_item_matrix
input_path = self.parent.getWorkFile("backfilled_raw_logs")
output_path = self.getWorkFile("user_item_matrix_maybe_dup")
buytogether_extract_user_item_matrix(input_path, output_path)
def do_de_duplicate_user_item_matrix(self):
input_path = self.getWorkFile("user_item_matrix_maybe_dup")
output_path = self.getWorkFile("user_item_matrix")
self._exec_shell("sort -T /cube/services/batch/temp < %s | uniq > %s" %
(input_path, output_path))
class ViewedUltimatelyBuyFlow(BaseFlow):
def __init__(self):
BaseFlow.__init__(self, "ViewedUltimatelyBuy")
self.jobs += [self.do_extract_user_view_buy_logs,
self.do_sort_user_view_buy_logs,
self.do_pair_view_buy,
self.count_pairs,
self.do_extract_user_item_matrix,
self.do_de_duplicate_user_item_matrix,
self.count_item_view,
self.upload_viewed_ultimately_buy]
def do_extract_user_view_buy_logs(self):
from viewed_ultimately_buy.extract_user_view_buy_logs import extract_user_view_buy_logs
input_path = self.parent.getWorkFile("backfilled_raw_logs")
output_path = self.getWorkFile("user_view_buy_logs")
extract_user_view_buy_logs(input_path, output_path)
def do_sort_user_view_buy_logs(self):
input_path = self.getWorkFile("user_view_buy_logs")
output_path = self.getWorkFile("user_view_buy_logs_sorted")
self._exec_shell("sort -T /cube/services/batch/temp <%s >%s" %
(input_path, output_path))
def do_pair_view_buy(self):
from viewed_ultimately_buy.pair_view_buy import pair_view_buy
input_path = self.getWorkFile("user_view_buy_logs_sorted")
output_path = self.getWorkFile("view_buy_pairs")
pair_view_buy(input_path, output_path)
def count_pairs(self):
input_path = self.getWorkFile("view_buy_pairs")
output_path = self.getWorkFile("view_buy_pairs_counted")
self._exec_shell("sort -T /cube/services/batch/temp <%s | uniq -c >%s" %
(input_path, output_path))
def do_extract_user_item_matrix(self):
from preprocessing.extract_user_item_matrix import v_extract_user_item_matrix
input_path = self.parent.getWorkFile("backfilled_raw_logs")
output_path = self.getWorkFile("user_item_matrix_maybe_dup")
v_extract_user_item_matrix(input_path, output_path)
def do_de_duplicate_user_item_matrix(self):
input_path = self.getWorkFile("user_item_matrix_maybe_dup")
output_path = self.getWorkFile("user_item_matrix")
self._exec_shell("sort -T /cube/services/batch/temp < %s | uniq > %s" %
(input_path, output_path))
def count_item_view(self):
# FIXME a hack
input_path = self.getWorkFile("user_item_matrix")
output_path = self.getWorkFile("item_view_times")
self._exec_shell(
"cut -d , -f 2 <%s | sort -T /cube/services/batch/temp | uniq -c >%s" %
(input_path, output_path))
def upload_viewed_ultimately_buy(self):
from viewed_ultimately_buy.upload_viewed_ultimately_buy import upload_viewed_ultimately_buy
item_view_times_path = self.getWorkFile("item_view_times")
view_buy_pairs_counted_path = self.getWorkFile(
"view_buy_pairs_counted")
upload_viewed_ultimately_buy(
connection, SITE_ID, item_view_times_path, view_buy_pairs_counted_path)
class EDMRelatedPreprocessingFlow(BaseFlow):
def __init__(self):
BaseFlow.__init__(self, "ViewedUltimatelyBuy")
self.jobs += [self.do_update_user_orders_collection,
self.do_generate_edm_emailing_list]
def do_update_user_orders_collection(self):
from edm_calculations import doUpdateUserOrdersCollection
doUpdateUserOrdersCollection(connection, SITE_ID)
def do_generate_edm_emailing_list(self):
from edm_calculations import generateEdmEmailingList
generateEdmEmailingList(connection, SITE_ID)
class BeginFlow(BaseFlow):
def __init__(self):
BaseFlow.__init__(self, "Root")
self.jobs += [self.begin]
def begin(self):
pass
# TODO: removed items' similarities should also be removed.
begin_flow = BeginFlow()
preprocessing_flow = PreprocessingFlow()
preprocessing_flow.dependOn(begin_flow)
hive_based_statistics_flow = HiveBasedStatisticsFlow()
hive_based_statistics_flow.dependOn(preprocessing_flow)
v_similarity_calc_flow = VSimiliarityCalcFlow()
v_similarity_calc_flow.dependOn(preprocessing_flow)
plo_similarity_calc_flow = PLOSimilarityCalcFlow()
plo_similarity_calc_flow.dependOn(preprocessing_flow)
buy_together_similarity_flow = BuyTogetherSimilarityFlow()
buy_together_similarity_flow.dependOn(preprocessing_flow)
viewed_ultimately_buy_flow = ViewedUltimatelyBuyFlow()
viewed_ultimately_buy_flow.dependOn(preprocessing_flow)
#edm_related_preprocessing_flow = EDMRelatedPreprocessingFlow()
# edm_related_preprocessing_flow.dependOn(preprocessing_flow)
def createCalculationRecord(site_id):
calculation_id = str(uuid.uuid4())
record = {
"calculation_id": calculation_id, "begin_datetime": datetime.datetime.now(),
"flows": {}}
calculation_records = getSiteDBCollection(
connection, site_id, "calculation_records")
calculation_records.save(record)
return calculation_id
def getCalculationRecord(site_id, calculation_id):
calculation_records = getSiteDBCollection(
connection, site_id, "calculation_records")
return calculation_records.find_one({"calculation_id": calculation_id})
def updateCalculationRecord(site_id, record):
calculation_records = getSiteDBCollection(
connection, site_id, "calculation_records")
calculation_records.save(record)
def writeFailedJob(site_id, flow_name, failed_job_name):
record = getCalculationRecord(SITE_ID, CALCULATION_ID)
flow_record = record["flows"][flow_name]
flow_record["failed_job_name"] = failed_job_name
updateCalculationRecord(SITE_ID, record)
def writeFlowBegin(site_id, flow_name):
record = getCalculationRecord(SITE_ID, CALCULATION_ID)
logging.info("FlowBegin: %s" % (flow_name, ))
record["flows"][flow_name] = {"begin_datetime": datetime.datetime.now()}
updateCalculationRecord(SITE_ID, record)
def writeFlowEnd(site_id, flow_name, is_successful, is_skipped, err_msg=None):
record = getCalculationRecord(SITE_ID, CALCULATION_ID)
logging.info("FlowEnd: %s" % (flow_name, ))
flow_record = record["flows"][flow_name]
flow_record["end_datetime"] = datetime.datetime.now()
flow_record["is_successful"] = is_successful
flow_record["is_skipped"] = is_skipped
if not is_successful:
flow_record["err_msg"] = err_msg
updateCalculationRecord(SITE_ID, record)
def writeCalculationEnd(site_id, is_successful, err_msg=None):
record = getCalculationRecord(SITE_ID, CALCULATION_ID)
record["end_datetime"] = datetime.datetime.now()
record["is_successful"] = is_successful
if not is_successful:
record["err_msg"] = err_msg
updateCalculationRecord(SITE_ID, record)
def getManualCalculationSites():
result = []
for site in loadSites(connection):
manual_calculation_list = connection[
"tjb-db"]["manual_calculation_list"]
record_in_db = manual_calculation_list.find_one(
{"site_id": site["site_id"]})
if record_in_db is not None:
result.append(site)
return result
def updateSiteLastUpdateTs(site_id):
sites = connection["tjb-db"]["sites"]
sites.update({"site_id": site_id},
{"$set": {"last_update_ts": time.time()}})
def is_time_okay_for_automatic_calculation():
now = datetime.datetime.now()
return now.hour >= 0 and now.hour < 6
def loadSites(connection, site_ids=None):
c_sites = connection["tjb-db"]["sites"]
if site_ids:
return [site for site in c_sites.find({'available': 'on'}) if site["site_id"] in site_ids]
else:
return [site for site in c_sites.find({'available': 'on'})]
def workOnSite(site, is_manual_calculation=False):
calculation_result = None
# Pop a job
manual_calculation_list = connection["tjb-db"]["manual_calculation_list"]
record_in_db = manual_calculation_list.find_one(
{"site_id": site["site_id"]})
if record_in_db is not None:
manual_calculation_list.remove(record_in_db)
# Proceed the job
now = time.time()
is_time_interval_okay_for_auto = (site.get("last_update_ts", None) is None
or now - site.get("last_update_ts") > site["calc_interval"])
# print site["site_id"], is_time_interval_okay_for_auto,
# is_time_okay_for_automatic_calculation()
is_automatic_calculation_okay = is_time_okay_for_automatic_calculation(
) and is_time_interval_okay_for_auto
if is_manual_calculation or is_automatic_calculation_okay:
global SITE
global SITE_ID
global DISABLEDFLOWS
global CALCULATION_ID
global CALC_SUCC
global BASE_WORK_DIR
SITE = site
SITE_ID = site["site_id"]
DISABLEDFLOWS = site.get("disabledFlows", [])
CALC_SUCC = True
CALCULATION_ID = createCalculationRecord(SITE_ID)
logging_manager.reconfig(SITE_ID, CALCULATION_ID)
BASE_WORK_DIR = getBaseWorkDir(SITE_ID, CALCULATION_ID)
try:
try:
getLogger().info("BEGIN CALCULATION ON:%s, CALCULATION_ID:%s" %
(SITE_ID, CALCULATION_ID))
# Begin workflow to do calculations
begin_flow()
writeCalculationEnd(
SITE_ID, CALC_SUCC, err_msg="SOME_FLOWS_FAILED")
if CALC_SUCC:
calculation_result = "SUCC"
else:
calculation_result = "FAIL"
except:
getLogger().critical("Unexpected Exception:", exc_info=True)
writeCalculationEnd(SITE_ID, False, "UNEXPECTED_EXCEPTION")
calculation_result = "FAIL"
finally:
getLogger(
).info("END CALCULATION ON:%s, RESULT:%s, CALCULATION_ID:%s" %
(SITE_ID, calculation_result, CALCULATION_ID))
# FIXME: save last_update_ts
updateSiteLastUpdateTs(site["site_id"])
return calculation_result
def workOnSiteWithRetries(site, is_manual_calculation=False, max_attempts=2):
current_attempts = 0
while current_attempts < max_attempts:
calculation_result = workOnSite(site, is_manual_calculation)
if calculation_result != "FAIL":
break
current_attempts += 1
if __name__ == "__main__":
os.environ["PATH"] = "%s:%s" % (getattr(settings, "extra_shell_path", ""), os.environ["PATH"])
while True:
#site_ids = ["test_with_gdian_data"]
for site in loadSites(connection):
for site in getManualCalculationSites():
workOnSiteWithRetries(site, is_manual_calculation=True)
workOnSiteWithRetries(site)
sleep_seconds = 1
time.sleep(sleep_seconds)
| 2.15625 | 2 |
tests/integration/basket/model_tests.py | makielab/django-oscar | 0 | 9675 | from decimal import Decimal as D
from django.test import TestCase
from oscar.apps.basket.models import Basket
from oscar.apps.partner import strategy
from oscar.test import factories
from oscar.apps.catalogue.models import Option
class TestAddingAProductToABasket(TestCase):
def setUp(self):
self.basket = Basket()
self.basket.strategy = strategy.Default()
self.product = factories.create_product()
self.record = factories.create_stockrecord(
currency='GBP',
product=self.product, price_excl_tax=D('10.00'))
self.stockinfo = factories.create_stockinfo(self.record)
self.basket.add(self.product)
def test_creates_a_line(self):
self.assertEqual(1, self.basket.num_lines)
def test_sets_line_prices(self):
line = self.basket.lines.all()[0]
self.assertEqual(line.price_incl_tax, self.stockinfo.price.incl_tax)
self.assertEqual(line.price_excl_tax, self.stockinfo.price.excl_tax)
def test_means_another_currency_product_cannot_be_added(self):
product = factories.create_product()
factories.create_stockrecord(
currency='USD', product=product, price_excl_tax=D('20.00'))
with self.assertRaises(ValueError):
self.basket.add(product)
class TestANonEmptyBasket(TestCase):
def setUp(self):
self.basket = Basket()
self.basket.strategy = strategy.Default()
self.product = factories.create_product()
self.record = factories.create_stockrecord(
self.product, price_excl_tax=D('10.00'))
self.stockinfo = factories.create_stockinfo(self.record)
self.basket.add(self.product, 10)
def test_can_be_flushed(self):
self.basket.flush()
self.assertEqual(self.basket.num_items, 0)
def test_returns_correct_product_quantity(self):
self.assertEqual(10, self.basket.product_quantity(
self.product))
def test_returns_correct_line_quantity_for_existing_product_and_stockrecord(self):
self.assertEqual(10, self.basket.line_quantity(
self.product, self.record))
def test_returns_zero_line_quantity_for_alternative_stockrecord(self):
record = factories.create_stockrecord(
self.product, price_excl_tax=D('5.00'))
self.assertEqual(0, self.basket.line_quantity(
self.product, record))
def test_returns_zero_line_quantity_for_missing_product_and_stockrecord(self):
product = factories.create_product()
record = factories.create_stockrecord(
product, price_excl_tax=D('5.00'))
self.assertEqual(0, self.basket.line_quantity(
product, record))
def test_returns_correct_quantity_for_existing_product_and_stockrecord_and_options(self):
product = factories.create_product()
record = factories.create_stockrecord(
product, price_excl_tax=D('5.00'))
option = Option.objects.create(name="Message")
options = [{"option": option, "value": "2"}]
self.basket.add(product, options=options)
self.assertEqual(0, self.basket.line_quantity(
product, record))
self.assertEqual(1, self.basket.line_quantity(
product, record, options))
class TestMergingTwoBaskets(TestCase):
def setUp(self):
self.product = factories.create_product()
self.record = factories.create_stockrecord(
self.product, price_excl_tax=D('10.00'))
self.stockinfo = factories.create_stockinfo(self.record)
self.main_basket = Basket()
self.main_basket.strategy = strategy.Default()
self.main_basket.add(self.product, quantity=2)
self.merge_basket = Basket()
self.merge_basket.strategy = strategy.Default()
self.merge_basket.add(self.product, quantity=1)
self.main_basket.merge(self.merge_basket)
def test_doesnt_sum_quantities(self):
self.assertEquals(1, self.main_basket.num_lines)
def test_changes_status_of_merge_basket(self):
self.assertEquals(Basket.MERGED, self.merge_basket.status)
class TestASubmittedBasket(TestCase):
def setUp(self):
self.basket = Basket()
self.basket.strategy = strategy.Default()
self.basket.submit()
def test_has_correct_status(self):
self.assertTrue(self.basket.is_submitted)
def test_can_be_edited(self):
self.assertFalse(self.basket.can_be_edited)
| 2.3125 | 2 |
tests/fixtures/db/sqlite.py | code-watch/meltano | 8 | 9676 | import pytest
import os
import sqlalchemy
import contextlib
@pytest.fixture(scope="session")
def engine_uri(test_dir):
database_path = test_dir.joinpath("pytest_meltano.db")
try:
database_path.unlink()
except FileNotFoundError:
pass
return f"sqlite:///{database_path}"
| 1.859375 | 2 |
experiments/render-tests-avg.py | piotr-karon/realworld-starter-kit | 0 | 9677 | <reponame>piotr-karon/realworld-starter-kit
#!/usr/bin/env python3
import json
import os
from pathlib import Path
import numpy as np
from natsort import natsorted
try:
from docopt import docopt
from marko.ext.gfm import gfm
import pygal
from pygal.style import Style, DefaultStyle
except ImportError as e:
raise Exception('Some external dependencies not found, install them using: pip install -r requirements.txt') from e
def render():
suffix = '.avg.checks.bench.json'
suites = {}
for filepath in Path('').glob(f'*{suffix}'):
name = filepath.name[:-len(suffix)]
print(f'Loading {filepath} as {name}.')
with open(filepath) as fp:
suites[name] = json.load(fp)
names = natsorted(suites.keys())
figure_filenames = render_figures(names, suites)
out_filename = Path('bench-results.md')
with open(out_filename, 'w') as out:
cwd = os.getcwd().split(os.sep)[-2:]
print(f'# Benchmark of {", ".join(names)} in {cwd}', file=out)
notes_file = Path('notes.md')
if notes_file.exists():
print(f'Including {notes_file} in resulting Markdown.')
with notes_file.open() as fp:
out.write(fp.read())
else:
print(f'File {notes_file} does not exist, create it to include it in resulting Markdown.')
# print('## General Info & Checks', file=out)
# render_checks(names, suites, out)
print('## Graphs', file=out)
print('*The graphs are interactive, view the rendered HTML locally to enjoy it.*\n', file=out)
for filename in figure_filenames:
# Use HTML instead of Markdown image to specify the width
print(f'<img type="image/svg+xml" src="{filename}" alt="{filename}" width="49%"/>', file=out)
print(f'Markdown output written to {out_filename}.')
render_html(out_filename, Path('bench-results.html'))
def render_checks(names, suites, out):
print(f'|Check|{"|".join(names)}|', file=out)
print(f'|{"|".join(["---"] * (len(names) + 1))}|', file=out)
per_impl_checks = {name: suite['checks'] for name, suite in suites.items()}
check_names = sorted(set().union(*(checks.keys() for checks in per_impl_checks.values())))
def sanitize(value):
if type(value) is float:
value = float(f'{value:.3g}') # round to 3 significant figures
return str(int(value) if value >= 100 else value)
return str(value)
for check_name in check_names:
values = [sanitize(per_impl_checks[name].get(check_name)) for name in names]
if len(values) > 1 and len(set(values)) > 1:
values = [f'**{value}**' for value in values]
print(f'|{check_name}|{"|".join(values)}|', file=out)
FIGURE_FUNCS = []
def figure(func):
"""Simple decorator to mark a function as a figure generator."""
FIGURE_FUNCS.append(func)
return func
def render_figures(names, suites):
filenames = []
config = pygal.Config(legend_at_bottom=True, style=DefaultStyle)
for figure_func in FIGURE_FUNCS:
chart = figure_func(names, suites, config.copy())
filename = f'bench-results.{figure_func.__name__}.svg'
chart.render_to_file(filename)
filenames.append(filename)
return filenames
@figure
def startup_time_figure(names, suites, config):
all_vals = [suites[name]['startup_max'] for name in names]
mx = np.max(all_vals)
config.range = (0, mx + 0.1)
chart = pygal.Bar(config, value_formatter=lambda x: "{:0.2f}s".format(x))
chart.title = 'Czas uruchomienia (s)'
for name in names:
vals = [{'value': suites[name]['startup_avg'],
'ci': {'low': suites[name]['startup_min'], 'high': suites[name]['startup_max']}}]
# print(vals)
chart.add(name, vals)
return chart
@figure
def errors_vs_connections_figure(names, suites, config):
all_vals = [suites[name]['stats'] for name in names]
flat = [item for sublist in all_vals for item in sublist]
print(flat)
all_rates = [
div_or_none(s['request_errors_new_avg'], s['request_errors_new_avg'] + s['requests_new_avg'], scale=100) for s
in flat]
mx = np.max(all_rates)
config.range = (0, mx + mx * 0.1)
chart = pygal.Line(config, value_formatter=lambda x: "{:0.2f}%".format(x))
chart.title = 'Współczynnik liczby błędów względem liczby połączeń (%)'
connections_x_labels(chart, suites, skip=0)
for name in names:
chart.add(name, [
div_or_none(s['request_errors_new_avg'], s['request_errors_new_avg'] + s['requests_new_avg'], scale=100)
for s in suites[name]['stats'][0:]])
return chart
@figure
def requests_vs_connections_figure(names, suites, config):
vals = [[x['requests_per_s_avg'] for x in suites[name]['stats']] for name in names]
print(vals)
mx = np.max(vals)
config.range = (0, mx + mx * 0.1)
config.min_scale = 6
chart = pygal.Line(config, value_formatter=lambda x: "{:0.0f}".format(x))
chart.title = 'Liczba sukcesów na sekundę względem liczby połączeń (Zapytań/s)'
connections_x_labels(chart, suites, skip=0)
for name in names:
# print(suites[name]['stats'])
# vals = [{'value': x['requests_per_s_avg'], 'ci': {'low': x['requests_per_s_min'], 'high': x['requests_per_s_max']}} for x in suites[name]['stats']]
vals = [{'value': x['requests_per_s_avg']} for x in suites[name]['stats']]
chart.add(name, vals)
return chart
@figure
def latency_vs_connections_50_figure(names, suites, config):
return latency_vs_connections_figure(50, names, suites, config)
@figure
def latency_vs_connections_90_figure(names, suites, config):
return latency_vs_connections_figure(90, names, suites, config)
@figure
def latency_vs_connections_99_figure(names, suites, config):
return latency_vs_connections_figure(99, names, suites, config)
def latency_vs_connections_figure(percentile, names, suites, config):
all_vals = [[s[f'latency_{percentile}p_ms_avg'] for s in suites[name]['stats'][0:]] for name in names]
mx = np.max(all_vals)
mn = np.min(all_vals)
config.range = (mn - mn * .5, mx + mx * .5)
chart = pygal.Line(config, logarithmic=True, value_formatter=lambda x: "{:0.0f}".format(x))
chart.title = f'{percentile}. centyl czasu odpowiedzi względem liczby połączeń (ms)'
connections_x_labels(chart, suites, skip=0)
for name in names:
chart.add(name, [s[f'latency_{percentile}p_ms_avg']
for s in suites[name]['stats'][0:]])
return chart
@figure
def max_mem_usage_figure(names, suites, config):
all_vals = [[s['mem_usage_mb_avg'] for s in suites[name]['stats']] for name in names]
mx = np.max(all_vals)
config.range = (0, mx + .1 * mx)
chart = pygal.Line(config, value_formatter=lambda x: "{:0.0f}".format(x))
chart.title = 'Maksymalne zużycie pamięci względem liczby połączeń (MiB)'
connections_x_labels(chart, suites)
for name in names:
chart.add(name, [s['mem_usage_mb_avg'] for s in suites[name]['stats']])
return chart
@figure
def max_mem_usage_per_requests_figure(names, suites, config):
all_vals = [[div_or_none(s['mem_usage_mb_avg'], s['requests_per_s_avg']) for s in suites[name]['stats'][0:]] for name in names]
mx = np.max(all_vals)
config.range = (0, mx + .1 * mx)
config.min_scale = 6
chart = pygal.Line(config, value_formatter=lambda x: "{:0.3f}".format(x))
chart.title = 'Maksymalne zużycie pamięci per liczba sukcesów na sekundę (MiB-sekunda/Zapytanie)'
connections_x_labels(chart, suites, skip=0)
for name in names:
chart.add(name,
[div_or_none(s['mem_usage_mb_avg'], s['requests_per_s_avg']) for s in suites[name]['stats'][0:]])
return chart
@figure
def cpu_figure(names, suites, config):
mx = np.max([[s['cpu_new_s_avg'] for s in suites[name]['stats'][0:]] for name in names])
config.range = (0, mx + mx * 0.1)
chart = pygal.Line(config, value_formatter=lambda x: "{:0.3f}".format(x))
chart.title = 'Wykorzystanie czasu procesora w czasie rundy testów (sekundy CPU)'
connections_x_labels(chart, suites, skip=0)
for name in names:
chart.add(name, [s['cpu_new_s_avg'] for s in suites[name]['stats'][0:]])
return chart
@figure
def cpu_per_request_figure(names, suites, config):
mx = np.max([[div_or_none(s['cpu_new_s_avg'], s['requests_new_avg'], scale=1000) for s in
suites[name]['stats'][0:]] for name in names])
config.range = (0, mx + mx * 0.1)
chart = pygal.Line(config, value_formatter=lambda x: "{:0.3f}".format(x))
chart.title = 'Wykorzystanie czasu procesora per poprawna odpowiedź (milisekundy CPU/Req)'
connections_x_labels(chart, suites, skip=0)
for name in names:
chart.add(name, [div_or_none(s['cpu_new_s_avg'], s['requests_new_avg'], scale=1000) for s in
suites[name]['stats'][0:]])
return chart
@figure
def cpu_vs_requests_figure(names, suites, config):
all_vls = [[s['requests_total_avg'] for s in suites[name]['stats']] for name in names]
mx = np.max(all_vls)
config.range = (0, mx + mx * 0.1)
config.min_scale = 6
chart = pygal.XY(config, value_formatter=lambda x: "{:0.0f}".format(x), series_formatter=lambda x: "{:0.2f}".format(x))
chart.title = 'Skumulowana liczba poprawnych odpowiedzi względem skumulowanego czasu CPU'
chart.x_title = 'sekundy CPU'
chart.y_title = 'skumulowana liczba poprawnych odpowiedzi'
for name in names:
chart.add(name, [
{'value': (s['cpu_total_s_avg'], s['requests_total_avg']),
'label': f'After {s["connections"]} connections round.'}
for s in suites[name]['stats']
])
return chart
def connections_x_labels(chart, suites, skip=0):
chart.x_labels = [f"{s['connections']} conn's" if s['connections'] else s['message']
for s in next(iter(suites.values()))['stats']][skip:]
chart.x_label_rotation = -30
def div_or_none(numerator, denominator, scale=1):
if not denominator:
return None
return scale * numerator / denominator
HTML_PREFIX = '''<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Benchmark Report</title>
</head>
<body>
'''
HTML_SUFFIX = ''' </body>
</html>
'''
def render_html(md_file, html_file):
with open(md_file) as in_fp, open(html_file, 'w') as out_fp:
rs = in_fp.read()
html = gfm(rs)
# Replace <img> by <embed> for pygal interactivity, http://www.pygal.org/en/latest/documentation/web.html
html = html.replace('<img', '<embed')
# Replace link to md with link to .html for better browsability at HTML level.
html = html.replace('/README.md">full benchmark', '/README.html">full benchmark')
out_fp.write(HTML_PREFIX)
out_fp.write(html)
out_fp.write(HTML_SUFFIX)
print(f'HTML output written to {html_file.resolve().as_uri()}.')
if __name__ == '__main__':
# args = docopt(__doc__)
render()
| 2.25 | 2 |
litex/build/altera/quartus.py | osterwood/litex | 1,501 | 9678 | #
# This file is part of LiteX.
#
# Copyright (c) 2014-2019 <NAME> <<EMAIL>>
# Copyright (c) 2019 msloniewski <<EMAIL>>
# Copyright (c) 2019 vytautasb <<EMAIL>>
# SPDX-License-Identifier: BSD-2-Clause
import os
import subprocess
import sys
import math
from shutil import which
from migen.fhdl.structure import _Fragment
from litex.build.generic_platform import Pins, IOStandard, Misc
from litex.build import tools
# IO/Placement Constraints (.qsf) ------------------------------------------------------------------
def _format_constraint(c, signame, fmt_r):
# IO location constraints
if isinstance(c, Pins):
tpl = "set_location_assignment -comment \"{name}\" -to {signame} Pin_{pin}"
return tpl.format(signame=signame, name=fmt_r, pin=c.identifiers[0])
# IO standard constraints
elif isinstance(c, IOStandard):
tpl = "set_instance_assignment -name io_standard -comment \"{name}\" \"{std}\" -to {signame}"
return tpl.format(signame=signame, name=fmt_r, std=c.name)
# Others constraints
elif isinstance(c, Misc):
if not isinstance(c.misc, str) and len(c.misc) == 2:
tpl = "set_instance_assignment -comment \"{name}\" -name {misc[0]} \"{misc[1]}\" -to {signame}"
return tpl.format(signame=signame, name=fmt_r, misc=c.misc)
else:
tpl = "set_instance_assignment -comment \"{name}\" -name {misc} -to {signame}"
return tpl.format(signame=signame, name=fmt_r, misc=c.misc)
def _format_qsf_constraint(signame, pin, others, resname):
fmt_r = "{}:{}".format(*resname[:2])
if resname[2] is not None:
fmt_r += "." + resname[2]
fmt_c = [_format_constraint(c, signame, fmt_r) for c in ([Pins(pin)] + others)]
return '\n'.join(fmt_c)
def _build_qsf_constraints(named_sc, named_pc):
qsf = []
for sig, pins, others, resname in named_sc:
if len(pins) > 1:
for i, p in enumerate(pins):
qsf.append(_format_qsf_constraint("{}[{}]".format(sig, i), p, others, resname))
else:
qsf.append(_format_qsf_constraint(sig, pins[0], others, resname))
if named_pc:
qsf.append("\n\n".join(named_pc))
return "\n".join(qsf)
# Timing Constraints (.sdc) ------------------------------------------------------------------------
def _build_sdc(clocks, false_paths, vns, named_sc, build_name, additional_sdc_commands):
sdc = []
# Clock constraints
for clk, period in sorted(clocks.items(), key=lambda x: x[0].duid):
is_port = False
for sig, pins, others, resname in named_sc:
if sig == vns.get_name(clk):
is_port = True
if is_port:
tpl = "create_clock -name {clk} -period {period} [get_ports {{{clk}}}]"
sdc.append(tpl.format(clk=vns.get_name(clk), period=str(period)))
else:
tpl = "create_clock -name {clk} -period {period} [get_nets {{{clk}}}]"
sdc.append(tpl.format(clk=vns.get_name(clk), period=str(period)))
# False path constraints
for from_, to in sorted(false_paths, key=lambda x: (x[0].duid, x[1].duid)):
tpl = "set_false_path -from [get_clocks {{{from_}}}] -to [get_clocks {{{to}}}]"
sdc.append(tpl.format(from_=vns.get_name(from_), to=vns.get_name(to)))
# Add additional commands
sdc += additional_sdc_commands
# Generate .sdc
tools.write_to_file("{}.sdc".format(build_name), "\n".join(sdc))
# Project (.qsf) -----------------------------------------------------------------------------------
def _build_qsf(device, ips, sources, vincpaths, named_sc, named_pc, build_name, additional_qsf_commands):
qsf = []
# Set device
qsf.append("set_global_assignment -name DEVICE {}".format(device))
# Add sources
for filename, language, library in sources:
if language == "verilog": language = "systemverilog" # Enforce use of SystemVerilog
tpl = "set_global_assignment -name {lang}_FILE {path} -library {lib}"
# Do not add None type files
if language is not None:
qsf.append(tpl.format(lang=language.upper(), path=filename.replace("\\", "/"), lib=library))
# Check if the file is a header. Those should not be explicitly added to qsf,
# but rather included in include search_path
else:
if filename.endswith(".svh") or filename.endswith(".vh"):
fpath = os.path.dirname(filename)
if fpath not in vincpaths:
vincpaths.append(fpath)
# Add ips
for filename in ips:
tpl = "set_global_assignment -name QSYS_FILE {filename}"
qsf.append(tpl.replace(filename=filename.replace("\\", "/")))
# Add include paths
for path in vincpaths:
qsf.append("set_global_assignment -name SEARCH_PATH {}".format(path.replace("\\", "/")))
# Set top level
qsf.append("set_global_assignment -name top_level_entity " + build_name)
# Add io, placement constraints
qsf.append(_build_qsf_constraints(named_sc, named_pc))
# Set timing constraints
qsf.append("set_global_assignment -name SDC_FILE {}.sdc".format(build_name))
# Add additional commands
qsf += additional_qsf_commands
# Generate .qsf
tools.write_to_file("{}.qsf".format(build_name), "\n".join(qsf))
# Script -------------------------------------------------------------------------------------------
def _build_script(build_name, create_rbf):
if sys.platform in ["win32", "cygwin"]:
script_contents = "REM Autogenerated by LiteX / git: " + tools.get_litex_git_revision()
script_file = "build_" + build_name + ".bat"
else:
script_contents = "# Autogenerated by LiteX / git: " + tools.get_litex_git_revision()
script_file = "build_" + build_name + ".sh"
script_contents += """
quartus_map --read_settings_files=on --write_settings_files=off {build_name} -c {build_name}
quartus_fit --read_settings_files=off --write_settings_files=off {build_name} -c {build_name}
quartus_asm --read_settings_files=off --write_settings_files=off {build_name} -c {build_name}
quartus_sta {build_name} -c {build_name}"""
if create_rbf:
script_contents += """
if [ -f "{build_name}.sof" ]
then
quartus_cpf -c {build_name}.sof {build_name}.rbf
fi
"""
script_contents = script_contents.format(build_name=build_name)
tools.write_to_file(script_file, script_contents, force_unix=True)
return script_file
def _run_script(script):
if sys.platform in ["win32", "cygwin"]:
shell = ["cmd", "/c"]
else:
shell = ["bash"]
if which("quartus_map") is None:
msg = "Unable to find Quartus toolchain, please:\n"
msg += "- Add Quartus toolchain to your $PATH."
raise OSError(msg)
if subprocess.call(shell + [script]) != 0:
raise OSError("Error occured during Quartus's script execution.")
# AlteraQuartusToolchain ---------------------------------------------------------------------------
class AlteraQuartusToolchain:
attr_translate = {}
def __init__(self):
self.clocks = dict()
self.false_paths = set()
self.additional_sdc_commands = []
self.additional_qsf_commands = []
def build(self, platform, fragment,
build_dir = "build",
build_name = "top",
run = True,
**kwargs):
# Create build directory
cwd = os.getcwd()
os.makedirs(build_dir, exist_ok=True)
os.chdir(build_dir)
# Finalize design
if not isinstance(fragment, _Fragment):
fragment = fragment.get_fragment()
platform.finalize(fragment)
# Generate verilog
v_output = platform.get_verilog(fragment, name=build_name, **kwargs)
named_sc, named_pc = platform.resolve_signals(v_output.ns)
v_file = build_name + ".v"
v_output.write(v_file)
platform.add_source(v_file)
# Generate design timing constraints file (.sdc)
_build_sdc(
clocks = self.clocks,
false_paths = self.false_paths,
vns = v_output.ns,
named_sc = named_sc,
build_name = build_name,
additional_sdc_commands = self.additional_sdc_commands)
# Generate design project and location constraints file (.qsf)
_build_qsf(
device = platform.device,
ips = platform.ips,
sources = platform.sources,
vincpaths = platform.verilog_include_paths,
named_sc = named_sc,
named_pc = named_pc,
build_name = build_name,
additional_qsf_commands = self.additional_qsf_commands)
# Generate build script
script = _build_script(build_name, platform.create_rbf)
# Run
if run:
_run_script(script)
os.chdir(cwd)
return v_output.ns
def add_period_constraint(self, platform, clk, period):
clk.attr.add("keep")
period = math.floor(period*1e3)/1e3 # round to lowest picosecond
if clk in self.clocks:
if period != self.clocks[clk]:
raise ValueError("Clock already constrained to {:.2f}ns, new constraint to {:.2f}ns"
.format(self.clocks[clk], period))
self.clocks[clk] = period
def add_false_path_constraint(self, platform, from_, to):
from_.attr.add("keep")
to.attr.add("keep")
if (to, from_) not in self.false_paths:
self.false_paths.add((from_, to))
| 2.046875 | 2 |
arxiv/canonical/util.py | arXiv/arxiv-canonical | 5 | 9679 | <gh_stars>1-10
"""Various helpers and utilities that don't belong anywhere else."""
from typing import Dict, Generic, TypeVar
KeyType = TypeVar('KeyType')
ValueType = TypeVar('ValueType')
class GenericMonoDict(Dict[KeyType, ValueType]):
"""A dict with specific key and value types."""
def __getitem__(self, key: KeyType) -> ValueType: ... | 2.609375 | 3 |
records/urls.py | Glucemy/Glucemy-back | 0 | 9680 | from rest_framework.routers import DefaultRouter
from records.views import RecordViewSet
router = DefaultRouter()
router.register('', RecordViewSet, basename='records')
urlpatterns = router.urls
| 1.679688 | 2 |
polystores/stores/azure_store.py | polyaxon/polystores | 50 | 9681 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import os
from rhea import RheaError
from rhea import parser as rhea_parser
from azure.common import AzureHttpError
from azure.storage.blob.models import BlobPrefix
from polystores.clients.azure_client import get_blob_service_connection
from polystores.exceptions import PolyaxonStoresException
from polystores.stores.base_store import BaseStore
from polystores.utils import append_basename, check_dirname_exists, get_files_in_current_directory
# pylint:disable=arguments-differ
class AzureStore(BaseStore):
"""
Azure store Service.
"""
STORE_TYPE = BaseStore._AZURE_STORE # pylint:disable=protected-access
def __init__(self, connection=None, **kwargs):
self._connection = connection
self._account_name = kwargs.get('account_name') or kwargs.get('AZURE_ACCOUNT_NAME')
self._account_key = kwargs.get('account_key') or kwargs.get('AZURE_ACCOUNT_KEY')
self._connection_string = (
kwargs.get('connection_string') or kwargs.get('AZURE_CONNECTION_STRING'))
@property
def connection(self):
if self._connection is None:
self.set_connection(account_name=self._account_name,
account_key=self._account_key,
connection_string=self._connection_string)
return self._connection
def set_connection(self, account_name=None, account_key=None, connection_string=None):
"""
Sets a new Blob service connection.
Args:
account_name: `str`. The storage account name.
account_key: `str`. The storage account key.
connection_string: `str`. If specified, this will override all other parameters besides
request session.
Returns:
BlockBlobService instance
"""
self._connection = get_blob_service_connection(account_name=account_name,
account_key=account_key,
connection_string=connection_string)
def set_env_vars(self):
if self._account_name:
os.environ['AZURE_ACCOUNT_NAME'] = self._account_name
if self._account_key:
os.environ['AZURE_ACCOUNT_KEY'] = self._account_key
if self._connection_string:
os.environ['AZURE_CONNECTION_STRING'] = self._connection_string
@staticmethod
def parse_wasbs_url(wasbs_url):
"""
Parses and validates a wasbs url.
Returns:
tuple(container, storage_account, path).
"""
try:
spec = rhea_parser.parse_wasbs_path(wasbs_url)
return spec.container, spec.storage_account, spec.path
except RheaError as e:
raise PolyaxonStoresException(e)
def check_blob(self, blob, container_name=None):
"""
Checks if a blob exists.
Args:
blob: `str`. Name of existing blob.
container_name: `str`. Name of existing container.
Returns:
bool
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
try:
return self.connection.get_blob_properties(
container_name,
blob
)
except AzureHttpError:
return None
def ls(self, path):
results = self.list(key=path)
return {'files': results['blobs'], 'dirs': results['prefixes']}
def list(self, key, container_name=None, path=None, delimiter='/', marker=None):
"""
Checks if a blob exists.
Args:
key: `str`. key prefix.
container_name: `str`. Name of existing container.
path: `str`. an extra path to append to the key.
delimiter: `str`. the delimiter marks key hierarchy.
marker: `str`. An opaque continuation token.
"""
if not container_name:
container_name, _, key = self.parse_wasbs_url(key)
if key and not key.endswith('/'):
key += '/'
prefix = key
if path:
prefix = os.path.join(prefix, path)
if prefix and not prefix.endswith('/'):
prefix += '/'
list_blobs = []
list_prefixes = []
while True:
results = self.connection.list_blobs(container_name,
prefix=prefix,
delimiter=delimiter,
marker=marker)
for r in results:
if isinstance(r, BlobPrefix):
name = r.name[len(key):]
list_prefixes.append(name)
else:
name = r.name[len(key):]
list_blobs.append((name, r.properties.content_length))
if results.next_marker:
marker = results.next_marker
else:
break
return {
'blobs': list_blobs,
'prefixes': list_prefixes
}
def upload_file(self, filename, blob, container_name=None, use_basename=True):
"""
Uploads a local file to Google Cloud Storage.
Args:
filename: `str`. the file to upload.
blob: `str`. blob to upload to.
container_name: `str`. the name of the container.
use_basename: `bool`. whether or not to use the basename of the filename.
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
if use_basename:
blob = append_basename(blob, filename)
self.connection.create_blob_from_path(container_name, blob, filename)
def upload_dir(self, dirname, blob, container_name=None, use_basename=True):
"""
Uploads a local directory to to Google Cloud Storage.
Args:
dirname: `str`. name of the directory to upload.
blob: `str`. blob to upload to.
container_name: `str`. the name of the container.
use_basename: `bool`. whether or not to use the basename of the directory.
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
if use_basename:
blob = append_basename(blob, dirname)
# Turn the path to absolute paths
dirname = os.path.abspath(dirname)
with get_files_in_current_directory(dirname) as files:
for f in files:
file_blob = os.path.join(blob, os.path.relpath(f, dirname))
self.upload_file(filename=f,
blob=file_blob,
container_name=container_name,
use_basename=False)
def download_file(self, blob, local_path, container_name=None, use_basename=True):
"""
Downloads a file from Google Cloud Storage.
Args:
blob: `str`. blob to download.
local_path: `str`. the path to download to.
container_name: `str`. the name of the container.
use_basename: `bool`. whether or not to use the basename of the blob.
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
local_path = os.path.abspath(local_path)
if use_basename:
local_path = append_basename(local_path, blob)
check_dirname_exists(local_path)
try:
self.connection.get_blob_to_path(container_name, blob, local_path)
except AzureHttpError as e:
raise PolyaxonStoresException(e)
def download_dir(self, blob, local_path, container_name=None, use_basename=True):
"""
Download a directory from Google Cloud Storage.
Args:
blob: `str`. blob to download.
local_path: `str`. the path to download to.
container_name: `str`. the name of the container.
use_basename: `bool`. whether or not to use the basename of the key.
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
local_path = os.path.abspath(local_path)
if use_basename:
local_path = append_basename(local_path, blob)
try:
check_dirname_exists(local_path, is_dir=True)
except PolyaxonStoresException:
os.makedirs(local_path)
results = self.list(container_name=container_name, key=blob, delimiter='/')
# Create directories
for prefix in sorted(results['prefixes']):
direname = os.path.join(local_path, prefix)
prefix = os.path.join(blob, prefix)
# Download files under
self.download_dir(blob=prefix,
local_path=direname,
container_name=container_name,
use_basename=False)
# Download files
for file_key in results['blobs']:
file_key = file_key[0]
filename = os.path.join(local_path, file_key)
file_key = os.path.join(blob, file_key)
self.download_file(blob=file_key,
local_path=filename,
container_name=container_name,
use_basename=False)
def delete(self, blob, container_name=None):
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
results = self.list(container_name=container_name, key=blob, delimiter='/')
if not any([results['prefixes'], results['blobs']]):
self.delete_file(blob=blob, container_name=container_name)
# Delete directories
for prefix in sorted(results['prefixes']):
prefix = os.path.join(blob, prefix)
# Download files under
self.delete(blob=prefix, container_name=container_name)
# Delete files
for file_key in results['blobs']:
file_key = file_key[0]
file_key = os.path.join(blob, file_key)
self.delete_file(blob=file_key, container_name=container_name)
def delete_file(self, blob, container_name=None):
"""
Deletes if a blob exists.
Args:
blob: `str`. Name of existing blob.
container_name: `str`. Name of existing container.
"""
if not container_name:
container_name, _, blob = self.parse_wasbs_url(blob)
try:
self.connection.delete_blob(container_name, blob)
except AzureHttpError:
pass
| 2.078125 | 2 |
analysis/webservice/NexusHandler.py | dataplumber/nexus | 23 | 9682 | """
Copyright (c) 2016 Jet Propulsion Laboratory,
California Institute of Technology. All rights reserved
"""
import sys
import numpy as np
import logging
import time
import types
from datetime import datetime
from netCDF4 import Dataset
from nexustiles.nexustiles import NexusTileService
from webservice.webmodel import NexusProcessingException
AVAILABLE_HANDLERS = []
AVAILABLE_INITIALIZERS = []
def nexus_initializer(clazz):
log = logging.getLogger(__name__)
try:
wrapper = NexusInitializerWrapper(clazz)
log.info("Adding initializer '%s'" % wrapper.clazz())
AVAILABLE_INITIALIZERS.append(wrapper)
except Exception as ex:
log.warn("Initializer '%s' failed to load (reason: %s)" % (clazz, ex.message), exc_info=True)
return clazz
def nexus_handler(clazz):
log = logging.getLogger(__name__)
try:
wrapper = AlgorithmModuleWrapper(clazz)
log.info("Adding algorithm module '%s' with path '%s' (%s)" % (wrapper.name(), wrapper.path(), wrapper.clazz()))
AVAILABLE_HANDLERS.append(wrapper)
except Exception as ex:
log.warn("Handler '%s' is invalid and will be skipped (reason: %s)" % (clazz, ex.message), exc_info=True)
return clazz
DEFAULT_PARAMETERS_SPEC = {
"ds": {
"name": "Dataset",
"type": "string",
"description": "One or more comma-separated dataset shortnames"
},
"minLat": {
"name": "Minimum Latitude",
"type": "float",
"description": "Minimum (Southern) bounding box Latitude"
},
"maxLat": {
"name": "Maximum Latitude",
"type": "float",
"description": "Maximum (Northern) bounding box Latitude"
},
"minLon": {
"name": "Minimum Longitude",
"type": "float",
"description": "Minimum (Western) bounding box Longitude"
},
"maxLon": {
"name": "Maximum Longitude",
"type": "float",
"description": "Maximum (Eastern) bounding box Longitude"
},
"startTime": {
"name": "Start Time",
"type": "long integer",
"description": "Starting time in milliseconds since midnight Jan. 1st, 1970 UTC"
},
"endTime": {
"name": "End Time",
"type": "long integer",
"description": "Ending time in milliseconds since midnight Jan. 1st, 1970 UTC"
},
"lowPassFilter": {
"name": "Apply Low Pass Filter",
"type": "boolean",
"description": "Specifies whether to apply a low pass filter on the analytics results"
},
"seasonalFilter": {
"name": "Apply Seasonal Filter",
"type": "boolean",
"description": "Specified whether to apply a seasonal cycle filter on the analytics results"
}
}
class NexusInitializerWrapper:
def __init__(self, clazz):
self.__log = logging.getLogger(__name__)
self.__hasBeenRun = False
self.__clazz = clazz
self.validate()
def validate(self):
if "init" not in self.__clazz.__dict__ or not type(self.__clazz.__dict__["init"]) == types.FunctionType:
raise Exception("Method 'init' has not been declared")
def clazz(self):
return self.__clazz
def hasBeenRun(self):
return self.__hasBeenRun
def init(self, config):
if not self.__hasBeenRun:
self.__hasBeenRun = True
instance = self.__clazz()
instance.init(config)
else:
self.log("Initializer '%s' has already been run" % self.__clazz)
class AlgorithmModuleWrapper:
def __init__(self, clazz):
self.__instance = None
self.__clazz = clazz
self.validate()
def validate(self):
if "calc" not in self.__clazz.__dict__ or not type(self.__clazz.__dict__["calc"]) == types.FunctionType:
raise Exception("Method 'calc' has not been declared")
if "path" not in self.__clazz.__dict__:
raise Exception("Property 'path' has not been defined")
if "name" not in self.__clazz.__dict__:
raise Exception("Property 'name' has not been defined")
if "description" not in self.__clazz.__dict__:
raise Exception("Property 'description' has not been defined")
if "params" not in self.__clazz.__dict__:
raise Exception("Property 'params' has not been defined")
def clazz(self):
return self.__clazz
def name(self):
return self.__clazz.name
def path(self):
return self.__clazz.path
def description(self):
return self.__clazz.description
def params(self):
return self.__clazz.params
def instance(self, algorithm_config=None, sc=None):
if "singleton" in self.__clazz.__dict__ and self.__clazz.__dict__["singleton"] is True:
if self.__instance is None:
self.__instance = self.__clazz()
try:
self.__instance.set_config(algorithm_config)
except AttributeError:
pass
try:
self.__instance.set_spark_context(sc)
except AttributeError:
pass
return self.__instance
else:
instance = self.__clazz()
try:
instance.set_config(algorithm_config)
except AttributeError:
pass
try:
self.__instance.set_spark_context(sc)
except AttributeError:
pass
return instance
def isValid(self):
try:
self.validate()
return True
except Exception as ex:
return False
class CalcHandler(object):
def calc(self, computeOptions, **args):
raise Exception("calc() not yet implemented")
class NexusHandler(CalcHandler):
def __init__(self, skipCassandra=False, skipSolr=False):
CalcHandler.__init__(self)
self.algorithm_config = None
self._tile_service = NexusTileService(skipCassandra, skipSolr)
def set_config(self, algorithm_config):
self.algorithm_config = algorithm_config
def _mergeDicts(self, x, y):
z = x.copy()
z.update(y)
return z
def _now(self):
millis = int(round(time.time() * 1000))
return millis
def _mergeDataSeries(self, resultsData, dataNum, resultsMap):
for entry in resultsData:
#frmtdTime = datetime.fromtimestamp(entry["time"] ).strftime("%Y-%m")
frmtdTime = entry["time"]
if not frmtdTime in resultsMap:
resultsMap[frmtdTime] = []
entry["ds"] = dataNum
resultsMap[frmtdTime].append(entry)
def _resultsMapToList(self, resultsMap):
resultsList = []
for key, value in resultsMap.iteritems():
resultsList.append(value)
resultsList = sorted(resultsList, key=lambda entry: entry[0]["time"])
return resultsList
def _mergeResults(self, resultsRaw):
resultsMap = {}
for i in range(0, len(resultsRaw)):
resultsSeries = resultsRaw[i]
resultsData = resultsSeries[0]
self._mergeDataSeries(resultsData, i, resultsMap)
resultsList = self._resultsMapToList(resultsMap)
return resultsList
class SparkHandler(NexusHandler):
class SparkJobContext(object):
class MaxConcurrentJobsReached(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
def __init__(self, job_stack):
self.spark_job_stack = job_stack
self.job_name = None
self.log = logging.getLogger(__name__)
def __enter__(self):
try:
self.job_name = self.spark_job_stack.pop()
self.log.debug("Using %s" % self.job_name)
except IndexError:
raise SparkHandler.SparkJobContext.MaxConcurrentJobsReached()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.job_name is not None:
self.log.debug("Returning %s" % self.job_name)
self.spark_job_stack.append(self.job_name)
def __init__(self, **kwargs):
import inspect
NexusHandler.__init__(self, **kwargs)
self._sc = None
self.spark_job_stack = []
def with_spark_job_context(calc_func):
from functools import wraps
@wraps(calc_func)
def wrapped(*args, **kwargs1):
try:
with SparkHandler.SparkJobContext(self.spark_job_stack) as job_context:
# TODO Pool and Job are forced to a 1-to-1 relationship
calc_func.im_self._sc.setLocalProperty("spark.scheduler.pool", job_context.job_name)
calc_func.im_self._sc.setJobGroup(job_context.job_name, "a spark job")
return calc_func(*args, **kwargs1)
except SparkHandler.SparkJobContext.MaxConcurrentJobsReached:
raise NexusProcessingException(code=503,
reason="Max concurrent requests reached. Please try again later.")
return wrapped
for member in inspect.getmembers(self, predicate=inspect.ismethod):
if member[0] == "calc":
setattr(self, member[0], with_spark_job_context(member[1]))
def set_spark_context(self, sc):
self._sc = sc
def set_config(self, algorithm_config):
max_concurrent_jobs = algorithm_config.getint("spark", "maxconcurrentjobs") if algorithm_config.has_section(
"spark") and algorithm_config.has_option("spark", "maxconcurrentjobs") else 10
self.spark_job_stack = list(["Job %s" % x for x in xrange(1, max_concurrent_jobs + 1)])
self.algorithm_config = algorithm_config
def _setQueryParams(self, ds, bounds, start_time=None, end_time=None,
start_year=None, end_year=None, clim_month=None,
fill=-9999., spark_master=None, spark_nexecs=None,
spark_nparts=None):
self._ds = ds
self._minLat, self._maxLat, self._minLon, self._maxLon = bounds
self._startTime = start_time
self._endTime = end_time
self._startYear = start_year
self._endYear = end_year
self._climMonth = clim_month
self._fill = fill
self._spark_master = spark_master
self._spark_nexecs = spark_nexecs
self._spark_nparts = spark_nparts
def _find_global_tile_set(self):
if type(self._ds) in (list,tuple):
ds = self._ds[0]
else:
ds = self._ds
ntiles = 0
##################################################################
# Temporary workaround until we have dataset metadata to indicate
# temporal resolution.
if "monthly" in ds.lower():
t_incr = 2592000 # 30 days
else:
t_incr = 86400 # 1 day
##################################################################
t = self._endTime
self._latRes = None
self._lonRes = None
while ntiles == 0:
nexus_tiles = self._tile_service.get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon, self._maxLon, ds=ds, start_time=t-t_incr, end_time=t)
ntiles = len(nexus_tiles)
self.log.debug('find_global_tile_set got {0} tiles'.format(ntiles))
if ntiles > 0:
for tile in nexus_tiles:
self.log.debug('tile coords:')
self.log.debug('tile lats: {0}'.format(tile.latitudes))
self.log.debug('tile lons: {0}'.format(tile.longitudes))
if self._latRes is None:
lats = tile.latitudes.data
if (len(lats) > 1):
self._latRes = abs(lats[1]-lats[0])
if self._lonRes is None:
lons = tile.longitudes.data
if (len(lons) > 1):
self._lonRes = abs(lons[1]-lons[0])
if ((self._latRes is not None) and
(self._lonRes is not None)):
break
if (self._latRes is None) or (self._lonRes is None):
ntiles = 0
else:
lats_agg = np.concatenate([tile.latitudes.compressed()
for tile in nexus_tiles])
lons_agg = np.concatenate([tile.longitudes.compressed()
for tile in nexus_tiles])
self._minLatCent = np.min(lats_agg)
self._maxLatCent = np.max(lats_agg)
self._minLonCent = np.min(lons_agg)
self._maxLonCent = np.max(lons_agg)
t -= t_incr
return nexus_tiles
def _find_tile_bounds(self, t):
lats = t.latitudes
lons = t.longitudes
if (len(lats.compressed()) > 0) and (len(lons.compressed()) > 0):
min_lat = np.ma.min(lats)
max_lat = np.ma.max(lats)
min_lon = np.ma.min(lons)
max_lon = np.ma.max(lons)
good_inds_lat = np.where(lats.mask == False)[0]
good_inds_lon = np.where(lons.mask == False)[0]
min_y = np.min(good_inds_lat)
max_y = np.max(good_inds_lat)
min_x = np.min(good_inds_lon)
max_x = np.max(good_inds_lon)
bounds = (min_lat, max_lat, min_lon, max_lon,
min_y, max_y, min_x, max_x)
else:
self.log.warn('Nothing in this tile!')
bounds = None
return bounds
@staticmethod
def query_by_parts(tile_service, min_lat, max_lat, min_lon, max_lon,
dataset, start_time, end_time, part_dim=0):
nexus_max_tiles_per_query = 100
#print 'trying query: ',min_lat, max_lat, min_lon, max_lon, \
# dataset, start_time, end_time
try:
tiles = \
tile_service.find_tiles_in_box(min_lat, max_lat,
min_lon, max_lon,
dataset,
start_time=start_time,
end_time=end_time,
fetch_data=False)
assert(len(tiles) <= nexus_max_tiles_per_query)
except:
#print 'failed query: ',min_lat, max_lat, min_lon, max_lon, \
# dataset, start_time, end_time
if part_dim == 0:
# Partition by latitude.
mid_lat = (min_lat + max_lat) / 2
nexus_tiles = SparkHandler.query_by_parts(tile_service,
min_lat, mid_lat,
min_lon, max_lon,
dataset,
start_time, end_time,
part_dim=part_dim)
nexus_tiles.extend(SparkHandler.query_by_parts(tile_service,
mid_lat,
max_lat,
min_lon,
max_lon,
dataset,
start_time,
end_time,
part_dim=part_dim))
elif part_dim == 1:
# Partition by longitude.
mid_lon = (min_lon + max_lon) / 2
nexus_tiles = SparkHandler.query_by_parts(tile_service,
min_lat, max_lat,
min_lon, mid_lon,
dataset,
start_time, end_time,
part_dim=part_dim)
nexus_tiles.extend(SparkHandler.query_by_parts(tile_service,
min_lat,
max_lat,
mid_lon,
max_lon,
dataset,
start_time,
end_time,
part_dim=part_dim))
elif part_dim == 2:
# Partition by time.
mid_time = (start_time + end_time) / 2
nexus_tiles = SparkHandler.query_by_parts(tile_service,
min_lat, max_lat,
min_lon, max_lon,
dataset,
start_time, mid_time,
part_dim=part_dim)
nexus_tiles.extend(SparkHandler.query_by_parts(tile_service,
min_lat,
max_lat,
min_lon,
max_lon,
dataset,
mid_time,
end_time,
part_dim=part_dim))
else:
# No exception, so query Cassandra for the tile data.
#print 'Making NEXUS query to Cassandra for %d tiles...' % \
# len(tiles)
#t1 = time.time()
#print 'NEXUS call start at time %f' % t1
#sys.stdout.flush()
nexus_tiles = list(tile_service.fetch_data_for_tiles(*tiles))
nexus_tiles = list(tile_service.mask_tiles_to_bbox(min_lat, max_lat,
min_lon, max_lon,
nexus_tiles))
#t2 = time.time()
#print 'NEXUS call end at time %f' % t2
#print 'Seconds in NEXUS call: ', t2-t1
#sys.stdout.flush()
#print 'Returning %d tiles' % len(nexus_tiles)
return nexus_tiles
@staticmethod
def _prune_tiles(nexus_tiles):
del_ind = np.where([np.all(tile.data.mask) for tile in nexus_tiles])[0]
for i in np.flipud(del_ind):
del nexus_tiles[i]
def _lat2ind(self,lat):
return int((lat-self._minLatCent)/self._latRes)
def _lon2ind(self,lon):
return int((lon-self._minLonCent)/self._lonRes)
def _ind2lat(self,y):
return self._minLatCent+y*self._latRes
def _ind2lon(self,x):
return self._minLonCent+x*self._lonRes
def _create_nc_file_time1d(self, a, fname, varname, varunits=None,
fill=None):
self.log.debug('a={0}'.format(a))
self.log.debug('shape a = {0}'.format(a.shape))
assert len(a.shape) == 1
time_dim = len(a)
rootgrp = Dataset(fname, "w", format="NETCDF4")
rootgrp.createDimension("time", time_dim)
vals = rootgrp.createVariable(varname, "f4", dimensions=("time",),
fill_value=fill)
times = rootgrp.createVariable("time", "f4", dimensions=("time",))
vals[:] = [d['mean'] for d in a]
times[:] = [d['time'] for d in a]
if varunits is not None:
vals.units = varunits
times.units = 'seconds since 1970-01-01 00:00:00'
rootgrp.close()
def _create_nc_file_latlon2d(self, a, fname, varname, varunits=None,
fill=None):
self.log.debug('a={0}'.format(a))
self.log.debug('shape a = {0}'.format(a.shape))
assert len(a.shape) == 2
lat_dim, lon_dim = a.shape
rootgrp = Dataset(fname, "w", format="NETCDF4")
rootgrp.createDimension("lat", lat_dim)
rootgrp.createDimension("lon", lon_dim)
vals = rootgrp.createVariable(varname, "f4",
dimensions=("lat","lon",),
fill_value=fill)
lats = rootgrp.createVariable("lat", "f4", dimensions=("lat",))
lons = rootgrp.createVariable("lon", "f4", dimensions=("lon",))
vals[:,:] = a
lats[:] = np.linspace(self._minLatCent,
self._maxLatCent, lat_dim)
lons[:] = np.linspace(self._minLonCent,
self._maxLonCent, lon_dim)
if varunits is not None:
vals.units = varunits
lats.units = "degrees north"
lons.units = "degrees east"
rootgrp.close()
def _create_nc_file(self, a, fname, varname, **kwargs):
self._create_nc_file_latlon2d(a, fname, varname, **kwargs)
def executeInitializers(config):
[wrapper.init(config) for wrapper in AVAILABLE_INITIALIZERS]
| 2.328125 | 2 |
utils/box/metric.py | ming71/SLA | 9 | 9683 | import numpy as np
from collections import defaultdict, Counter
from .rbbox_np import rbbox_iou
def get_ap(recall, precision):
recall = [0] + list(recall) + [1]
precision = [0] + list(precision) + [0]
for i in range(len(precision) - 1, 0, -1):
precision[i - 1] = max(precision[i - 1], precision[i])
ap = sum((recall[i] - recall[i - 1]) * precision[i] for i in range(1, len(recall)) if recall[i] != recall[i - 1])
return ap * 100
def get_ap_07(recall, precision):
ap = 0.
for t in np.linspace(0, 1, 11, endpoint=True):
mask = recall >= t
if np.any(mask):
ap += np.max(precision[mask]) / 11
return ap * 100
def get_det_aps(detect, target, num_classes, iou_thresh=0.5, use_07_metric=False):
# [[index, bbox, score, label], ...]
aps = []
for c in range(num_classes):
target_c = list(filter(lambda x: x[3] == c, target))
detect_c = filter(lambda x: x[3] == c, detect)
detect_c = sorted(detect_c, key=lambda x: x[2], reverse=True)
tp = np.zeros(len(detect_c))
fp = np.zeros(len(detect_c))
target_count = Counter([x[0] for x in target_c])
target_count = {index: np.zeros(count) for index, count in target_count.items()}
target_lut = defaultdict(list)
for index, bbox, conf, label in target_c:
target_lut[index].append(bbox)
detect_lut = defaultdict(list)
for index, bbox, conf, label in detect_c:
detect_lut[index].append(bbox)
iou_lut = dict()
for index, bboxes in detect_lut.items():
if index in target_lut:
iou_lut[index] = rbbox_iou(np.stack(bboxes), np.stack(target_lut[index]))
counter = defaultdict(int)
for i, (index, bbox, conf, label) in enumerate(detect_c):
count = counter[index]
counter[index] += 1
iou_max = -np.inf
hit_j = 0
if index in iou_lut:
for j, iou in enumerate(iou_lut[index][count]):
if iou > iou_max:
iou_max = iou
hit_j = j
if iou_max > iou_thresh and target_count[index][hit_j] == 0:
tp[i] = 1
target_count[index][hit_j] = 1
else:
fp[i] = 1
tp_sum = np.cumsum(tp)
fp_sum = np.cumsum(fp)
npos = len(target_c)
recall = tp_sum / npos
precision = tp_sum / (tp_sum + fp_sum)
aps.append((get_ap_07 if use_07_metric else get_ap)(recall, precision))
return aps
| 1.953125 | 2 |
app.py | winstonschroeder/setlistmanager | 0 | 9684 | <filename>app.py
import logging
import pygame
from app import *
from pygame.locals import *
from werkzeug.serving import run_simple
from web import webapp as w
import data_access as da
logging.basicConfig(filename='setlistmanager.log', level=logging.DEBUG)
SCREEN_WIDTH = 160
SCREEN_HEIGHT = 128
class Button:
pass
class Text():
"""Create a text object."""
def __init__(self, surface, text, pos, **options):
self.text = text
self.surface = surface
self.pos = pos
self.bold = True
self.italic = False
self.underline = False
self.background = None # Color('white')
self.font = pygame.font.SysFont('Arial', 64)
self.fontname = None # 'Free Sans'
self.fontsize = 40
self.fontcolor = Color('black')
self.set_font()
da.connect_db('db.db')
songs = da.get_all_songs_as_json()
print (songs)
# self.words = [word.split(' ') for word in self.text.splitlines()] # 2D array where each row is a list of words.
# self.space = self.font.size(' ')[0] # The width of a space.
# max_width, max_height = self.surface.get_size()
# x, y = self.pos
# for line in self.words:
# for word in line:
# word_surface = self.font.render(word, 0, self.fontcolor)
# # print(word)
# word_width, word_height = word_surface.get_size()
# if x + word_width >= max_width:
# x = pos[0] # Reset the x.
# y += word_height # Start on new row.
# surface.blit(word_surface, (x, y))
# x += word_width + self.space
# x = pos[0] # Reset the x.
# y += word_height # Start on new row.
self.render()
def set_font(self):
"""Set the font from its name and size."""
self.font = pygame.font.Font(self.fontname, self.fontsize)
self.font.set_bold(self.bold)
self.font.set_italic(self.italic)
self.font.set_underline(self.underline)
def render(self):
"""Render the text into an image."""
self.img = self.font.render(self.text, True, self.fontcolor, self.background)
self.rect = self.img.get_rect()
self.rect.size = self.img.get_size()
self.rect.topleft = self.pos
def draw(self):
"""Draw the text image to the screen."""
# Put the center of surf at the center of the display
surf_center = (
(SCREEN_WIDTH - self.rect.width)/2,
(SCREEN_HEIGHT - self.rect.height)/2
)
App.screen.blit(self.img, surf_center)
# App.screen.blit(self.img, self.rect)
class App:
"""Create a single-window app with multiple scenes."""
def __init__(self):
"""Initialize pygame and the application."""
logging.debug('Initializing App')
pygame.init()
pygame.mouse.set_cursor((8, 8), (0, 0), (0, 0, 0, 0, 0, 0, 0, 0), (0, 0, 0, 0, 0, 0, 0, 0))
self.shortcuts = {
(K_x, KMOD_LMETA): 'print("cmd+X")',
(K_x, KMOD_LALT): 'print("alt+X")',
(K_x, KMOD_LCTRL): 'print("ctrl+X")',
(K_x, KMOD_LMETA + KMOD_LSHIFT): 'print("cmd+shift+X")',
(K_x, KMOD_LMETA + KMOD_LALT): 'print("cmd+alt+X")',
(K_x, KMOD_LMETA + KMOD_LALT + KMOD_LSHIFT): 'print("cmd+alt+shift+X")',
}
self.color = Color('green')
self.flags = RESIZABLE
self.rect = Rect(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT)
App.screen = pygame.display.set_mode(self.rect.size, self.flags)
App.t = Text(App.screen, 'Chorus', pos=(0, 0))
App.running = True
def run(self):
"""Run the main event loop."""
logging.debug('entering method run')
app = w.create_app()
run_simple('127.0.0.1', 5000, app, use_debugger=True, use_reloader=True)
logging.debug('after start of flask')
while App.running:
logging.debug('.')
for event in pygame.event.get():
if event.type == QUIT:
App.running = False
if event.type == KEYDOWN:
self.do_shortcut(event)
App.screen.fill(self.color)
App.t.draw()
pygame.display.update()
logging.debug('exiting setlistmanager')
pygame.quit()
def do_shortcut(self, event):
"""Find the the key/mod combination in the dictionary and execute the cmd."""
k = event.key
m = event.mod
if (k, m) in self.shortcuts:
exec(self.shortcuts[k, m])
def toggle_fullscreen(self):
"""Toggle between full screen and windowed screen."""
self.flags ^= FULLSCREEN
pygame.display.set_mode((0, 0), self.flags)
def toggle_resizable(self):
"""Toggle between resizable and fixed-size window."""
self.flags ^= RESIZABLE
pygame.display.set_mode(self.rect.size, self.flags)
def toggle_frame(self):
"""Toggle between frame and noframe window."""
self.flags ^= NOFRAME
pygame.display.set_mode(self.rect.size, self.flags)
| 2.96875 | 3 |
sim_keypoints.py | Praznat/annotationmodeling | 8 | 9685 | import json
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
import simulation
from eval_functions import oks_score_multi
import utils
def alter_location(points, x_offset, y_offset):
x, y = points.T
return np.array([x + x_offset, y + y_offset]).T
def alter_rotation(points, radians):
centroid = np.mean(points, axis=0)
return utils.rotate_via_numpy((points - centroid).T, radians) + centroid
def alter_magnitude(points, percent_diff):
centroid = np.mean(points, axis=0)
return (points - centroid) * np.exp(percent_diff) + centroid
def alter_normal_jump(points, scale):
return points + np.random.normal(0, scale, points.shape)
def alter_cauchy_jump(points, scale, abs_bound):
return points + utils.bounded_cauchy(scale, points.shape, abs_bound)
def disappear(points, p_disappear):
return None if np.random.uniform() < p_disappear else points
def shift_by_uerr(annotation, uerr):
shifts = [
alter_rotation(annotation, np.random.normal(0, 0.5 * uerr) * np.pi / 8),
alter_magnitude(annotation, np.random.normal(0, 0.3 * uerr)),
alter_normal_jump(annotation, 30 * uerr),
alter_cauchy_jump(annotation, 30 * uerr, 100),
]
return np.mean(shifts, axis=0) * np.abs(np.sign(annotation))
def create_user_data(uid, df, pct_items, u_err, difficulty_dict=None, extraarg=None):
items = df["item"].unique()
n_items_labeled = int(np.round(pct_items * len(items)))
items_labeled = sorted(np.random.choice(items, n_items_labeled, replace=False))
labels = []
for item in items_labeled:
gold = df[df["item"] == item]["gold"].values[0]
shifted_kpobjs = [shift_by_uerr(kpobj, u_err) for kpobj in gold]
kpobjs = [shifted_kpobjs[0]] + [disappear(kp, u_err / 2) for kp in shifted_kpobjs[1:]]
kpobjs = [kp for kp in kpobjs if kp is not None]
labels.append(kpobjs)
dfdict = {
"uid": [uid] * len(items_labeled),
"item": items_labeled,
"annotation": labels,
}
return pd.DataFrame(dfdict)
class KeypointSimulator(simulation.Simulator):
def __init__(self, rawdata_dir='data/coco/person_keypoints_train2017.json', max_items=500, minlabelsperitem=4):
with open(rawdata_dir) as f:
dataset = json.load(f)
self.category_id_skeletons = {c["id"]: np.array(c["skeleton"])-1 for c in iter(dataset["categories"])}
img_label = {}
for dataset_annotation in iter(dataset["annotations"]):
v = img_label.setdefault(dataset_annotation["image_id"], [])
v.append(dataset_annotation)
img_label_minlen = {k: v for k, v in img_label.items() if len(v) >= minlabelsperitem}
i = 0
rows = []
item = []
annotation = []
category = []
for dataset_annotations in iter(img_label_minlen.values()):
for dataset_annotation in dataset_annotations:
kp = np.reshape(dataset_annotation["keypoints"], (-1,3))
kp = kp[kp[:,2]>-90][:,:2]
if len(kp) == 0:
continue
item.append(dataset_annotation["image_id"])
annotation.append(kp)
category.append(dataset_annotation["category_id"])
i += 1
if i > max_items:
break
kp_df = pd.DataFrame({"item":item, "gold":annotation, "category":category})
self.df = kp_df.groupby("item")["gold"].apply(list).reset_index()
self.itemdict = utils.make_categorical(self.df, "item")
def create_stan_data(self, n_users, pct_items, err_rates, difficulty_dict):
self.err_rates = err_rates
self.difficulty_dict = difficulty_dict
self.sim_df = simulation.create_sim_df(create_user_data, self.df, n_users, pct_items, err_rates, difficulty_dict)
stan_data = utils.calc_distances(self.sim_df, (lambda x,y: 1 - oks_score_multi(x, y)), label_colname="annotation", item_colname="item")
return stan_data
def sim_uerr_fn(self, uerr_a, uerr_b, n_users):
z = np.abs(np.random.normal(uerr_a, uerr_b, 10000))
return np.quantile(z, np.linspace(0,1,n_users+2)[1:-1])
def sim_diff_fn(self, difficulty_a, difficulty_b):
z = 1 * np.random.beta(difficulty_a, difficulty_b, 10000)
n_items = len(self.df["item"].unique())
return dict(zip(np.arange(n_items), np.quantile(z, np.linspace(0,1,n_items+2)[1:-1]))) | 2.40625 | 2 |
local/controller.py | Loptt/home-automation-system | 0 | 9686 | <reponame>Loptt/home-automation-system
import requests
import time
import os
import sys
import json
import threading
from getpass import getpass
import schedule
import event as e
import configuration as c
import RPi.GPIO as GPIO
#SERVER_URL = "https://home-automation-289621.uc.r.appspot.com"
#SERVER_URL = "http://127.0.0.1:4747"
SERVER_URL = "http://192.168.11.117:4747"
pins = [2, 3, 4, 7, 8, 9, 10, 11, 14, 15, 17, 18, 22, 23, 24, 27]
def calculate_max_duration(time):
hours = 23 - time.hour
minutes = 60 - time.minute
return hours * 60 + minutes
def turn_on(pin):
print("Turn on " + str(pin))
GPIO.output(pin, GPIO.HIGH)
def turn_off(pin):
print("Turn off " + str(pin))
GPIO.output(pin, GPIO.LOW)
def schedule_off(time, day, duration, pin):
new_day = day
end_time = e.Time(0, 0)
if duration > calculate_max_duration(time):
# Next day calculation
new_day = day + 1
off_duration = duration - calculate_max_duration(time)
end_time.hour = off_duration // 60
end_time.minute = off_duration % 60
else:
# Same day calculation
end_time.hour = time.hour + \
(duration // 60) + (time.minute + (duration % 60)) // 60
end_time.minute = (time.minute + duration % 60) % 60
if new_day > 7:
new_day = 1
if new_day == 1:
schedule.every().monday.at(str(end_time)).do(turn_off, pin)
elif new_day == 2:
schedule.every().tuesday.at(str(end_time)).do(turn_off, pin)
elif new_day == 3:
schedule.every().wednesday.at(str(end_time)).do(turn_off, pin)
elif new_day == 4:
schedule.every().thursday.at(str(end_time)).do(turn_off, pin)
elif new_day == 5:
schedule.every().friday.at(str(end_time)).do(turn_off, pin)
elif new_day == 6:
schedule.every().saturday.at(str(end_time)).do(turn_off, pin)
elif new_day == 7:
schedule.every().sunday.at(str(end_time)).do(turn_off, pin)
def schedule_job(event):
GPIO.setup(event.pin, GPIO.OUT)
if len(event.days) == 0 or len(event.days) == 7:
schedule.every().day.at(str(event.time)).do(turn_on, event.pin)
else:
if 1 in event.days:
schedule.every().monday.at(str(event.time)).do(turn_on, event.pin)
schedule_off(event.time, 1, event.duration, event.pin)
if 2 in event.days:
schedule.every().tuesday.at(str(event.time)).do(turn_on, event.pin)
schedule_off(event.time, 2, event.duration, event.pin)
if 3 in event.days:
schedule.every().wednesday.at(str(event.time)).do(turn_on, event.pin)
schedule_off(event.time, 3, event.duration, event.pin)
if 4 in event.days:
schedule.every().thursday.at(str(event.time)).do(turn_on, event.pin)
schedule_off(event.time, 4, event.duration, event.pin)
if 5 in event.days:
schedule.every().friday.at(str(event.time)).do(turn_on, event.pin)
schedule_off(event.time, 5, event.duration, event.pin)
if 6 in event.days:
schedule.every().saturday.at(str(event.time)).do(turn_on, event.pin)
schedule_off(event.time, 6, event.duration, event.pin)
if 7 in event.days:
schedule.every().sunday.at(str(event.time)).do(turn_on, event.pin)
schedule_off(event.time, 7, event.duration, event.pin)
def run_scheduling():
while True:
schedule.run_pending()
time.sleep(1)
def initial_setup():
username = input("Enter your username: ")
password = getpass("Enter your password: ")
pload = json.dumps({"username": username, "password": password})
r = requests.post(SERVER_URL + "/login", data=pload,
headers={'Content-type': 'application/json'})
r_dict = r.json()
if not r_dict["valid"]:
print("Invalid username/password")
print("Run program again to try again")
sys.exit()
print("Successful login...")
print("Saving configuration...")
f = open("config.txt", "w")
f.write(r_dict["user"])
f.close()
return r_dict["user"]
def get_user():
f = open("config.txt", "r")
user = f.readline()
r = requests.get(SERVER_URL + "/users/" + user)
if r.status_code == 200:
print("Successful login...")
return user
else:
print("Invalid user... Reinitializing configuration")
return initial_setup()
def get_configuration(user):
r = requests.get(SERVER_URL + "/configurations/by-user/" + user)
if r.status_code != 200:
print("Error retrieving configuration, check internet connection.")
sys.exit()
r_dict = r.json()
return c.Configuration(
r_dict["systemStatus"], r_dict["rainPercentage"], r_dict["defaultDuration"], r_dict["update"], r_dict["_id"])
def set_update_off(configuration):
r = requests.put(
SERVER_URL + "/configurations/set-off-update/" + configuration.id)
if r.status_code >= 400:
print(
"Error updating configuration status... Possible reconfiguration on next cycle")
else:
print("Update set off.")
def update_schedules(user):
r = requests.get(
SERVER_URL + "/devices/by-user-with-events/" + user)
devices = r.json()
schedule.clear()
GPIO.cleanup()
for device in devices:
for event in device["events"]:
print(event)
schedule_job(e.Event(
device["pin"], event["days"], e.Time(event["time"]["hour"], event["time"]["minute"]), e.Repetition(event["repetition"]["times"], event["repetition"]["date"], event["repetition"]["current"]), event["duration"]))
def setup():
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
for pin in pins:
GPIO.setup(pin, GPIO.OUT, initial=GPIO.LOW)
GPIO.cleanup()
def main():
setup()
user = ""
if not os.path.isfile("./config.txt"):
print("No configuration found... Initializing configuration")
user = initial_setup()
else:
print("Validating user...")
user = get_user()
print("Initializing routine...")
# Initialize separate thread to run scheduling jobs
thread = threading.Thread(None, run_scheduling, "Schedule")
thread.start()
print("Schedule running.")
while True:
configuration = get_configuration(user)
if configuration.update:
print("Updating schedule...")
update_schedules(user)
set_update_off(configuration)
time.sleep(1)
thread.join()
if __name__ == "__main__":
main()
| 3.125 | 3 |
src/graphql_sqlalchemy/graphql_types.py | gzzo/graphql-sqlalchemy | 12 | 9687 | from typing import Dict, Union
from graphql import (
GraphQLBoolean,
GraphQLFloat,
GraphQLInputField,
GraphQLInt,
GraphQLList,
GraphQLNonNull,
GraphQLScalarType,
GraphQLString,
)
from sqlalchemy import ARRAY, Boolean, Float, Integer
from sqlalchemy.dialects.postgresql import ARRAY as PGARRAY
from sqlalchemy.types import TypeEngine
def get_graphql_type_from_column(column_type: TypeEngine) -> Union[GraphQLScalarType, GraphQLList]:
if isinstance(column_type, Integer):
return GraphQLInt
if isinstance(column_type, Float):
return GraphQLFloat
if isinstance(column_type, Boolean):
return GraphQLBoolean
if isinstance(column_type, (ARRAY, PGARRAY)):
return GraphQLList(get_graphql_type_from_column(column_type.item_type))
return GraphQLString
def get_base_comparison_fields(graphql_type: Union[GraphQLScalarType, GraphQLList]) -> Dict[str, GraphQLInputField]:
return {
"_eq": GraphQLInputField(graphql_type),
"_neq": GraphQLInputField(graphql_type),
"_in": GraphQLInputField(GraphQLList(GraphQLNonNull(graphql_type))),
"_nin": GraphQLInputField(GraphQLList(GraphQLNonNull(graphql_type))),
"_lt": GraphQLInputField(graphql_type),
"_gt": GraphQLInputField(graphql_type),
"_gte": GraphQLInputField(graphql_type),
"_lte": GraphQLInputField(graphql_type),
"_is_null": GraphQLInputField(GraphQLBoolean),
}
def get_string_comparison_fields() -> Dict[str, GraphQLInputField]:
return {"_like": GraphQLInputField(GraphQLString), "_nlike": GraphQLInputField(GraphQLString)}
| 2.3125 | 2 |
Knapsack.py | byterubpay/mininero1 | 182 | 9688 | <reponame>byterubpay/mininero1
import Crypto.Random.random as rand
import itertools
import math #for log
import sys
def decomposition(i):
#from stack exchange, don't think it's uniform
while i > 0:
n = rand.randint(1, i)
yield n
i -= n
def Decomposition(i):
while True:
l = list(decomposition(i))
if len(set(l)) == len(l):
return l
def decomposition2(n, s, d, k):
#home-brewed, returns no duplicates, includes the number d
s = s - 1
n = n
while True:
a = [d]
nn = n
#a.append(d)
for i in range(0, s):
a.append(rand.randint(0, n))
a.sort()
#print("a", a)
b = []
c = []
while len(a) > 0:
t = a.pop()
#print(t, a)
if t >= d:
b.append(nn - t)
else:
c.append(nn - t)
nn = t
c.append(nn)
tot = b[:] + c[:]
#print("b", b)
if sum(set(tot)) == n and len(c) > int(k):
return sorted(c), sorted(b)
def decomposition3(n, s, d, k):
#a combination of both methods, designed to get some smaller values
send, change = decomposition2(n, s, d, k)
for i in send:
if i > n / s:
send.remove(i)
send = send + list(Decomposition(i))
for i in change:
if i > n / (s - 1):
change.remove(i)
change = change + list(Decomposition(i))
return send, change
def divv(l, m):
return [a /float( m) for a in l]
def frexp10(x):
exp = int(math.log10(x))
return x / 10**exp, exp
def decideAmounts(totalInputs, toSend, Partitions, k, fuzz):
#fuzz is an optional amount to fuzz the transaction by
#so if you start with a big obvious number like 2000, it might be fuzzed by up to "fuzz" amount
fz = rand.randint(0, int(fuzz * 1000) ) / 1000.0
toSend += fz
g, ii =frexp10(totalInputs)
ii = 10 ** (-1 * min(ii - 2, 0))
print("ii", ii)
M = 10 ** (int(math.log(2 ** Partitions) / math.log(10))) * ii
#M = 10 ** M
print("multiplier:", M)
totalInputs = int(totalInputs * M)
toSend = int(toSend * M)
change = totalInputs - toSend
send_amounts, change_amounts = decomposition3(totalInputs, Partitions, toSend, k)
all_amounts = send_amounts[:] + change_amounts[:]
rand.shuffle(all_amounts)
print("")
print("change amounts:", divv(change_amounts, M))
print("send amounts:", divv(send_amounts, M))
print("now from the following, how much is sent?")
print("all amounts:", sorted(divv(all_amounts, M)))
print("possible sent amounts:")
amounts = []
for L in range(0, len(all_amounts)+1):
for subset in itertools.combinations(all_amounts, L):
amounts.append(sum(subset))
print("number of possible sent amounts:")
print(len(amounts))
print("2^N:", 2 ** len(all_amounts))
print("number of possible sent amounts duplicates removed:")
print(len(list(set(amounts))))
if len(sys.argv) > 2:
kk = 2
parts = 7
kk = rand.randint(1, int(parts / 4)) #how many sends to demand
fuzz = 1
decideAmounts(float(sys.argv[1]), float(sys.argv[2]), parts, kk, fuzz)
| 2.796875 | 3 |
drought_impact_forecasting/models/model_parts/Conv_Transformer.py | rudolfwilliam/satellite_image_forecasting | 4 | 9689 | import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from einops import rearrange
from .shared import Conv_Block
from ..utils.utils import zeros, mean_cube, last_frame, ENS
class Residual(nn.Module):
def __init__(self, fn):
super().__init__()
self.fn = fn
def forward(self, x, **kwargs):
return self.fn(x, **kwargs) + x
class PreNorm(nn.Module):
def __init__(self, dim, fn):
super().__init__()
self.norm = nn.LayerNorm(dim)
self.fn = fn
def forward(self, x, **kwargs):
return self.fn(torch.stack([self.norm(x[..., i]) for i in range(x.size()[-1])], dim=-1), **kwargs)
class FeedForward(nn.Module):
def __init__(self, kernel_size, num_hidden, dilation_rate, num_conv_layers):
super().__init__()
self.kernel_size = kernel_size
self.num_hidden = num_hidden
self.num_conv_layers = num_conv_layers
self.dilation_rate = dilation_rate
self.conv = Conv_Block(self.num_hidden, self.num_hidden, kernel_size=self.kernel_size,
dilation_rate=self.dilation_rate, num_conv_layers=self.num_conv_layers)
def forward(self, x):
return torch.stack([self.conv(x[..., i]) for i in range(x.size()[-1])], dim=-1)
class ConvAttention(nn.Module):
def __init__(self, num_hidden, kernel_size, enc=True, mask=False):
super(ConvAttention, self).__init__()
self.enc = enc
self.mask = mask
self.kernel_size = kernel_size
self.num_hidden = num_hidden
# important note: shared convolution is intentional here
if self.enc:
# 3 times num_hidden for out_channels due to queries, keys & values
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=self.num_hidden, out_channels=3*self.num_hidden, kernel_size=1, padding="same", padding_mode="reflect")
)
else:
# only 2 times num_hidden for keys & values
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels=self.num_hidden, out_channels=2*self.num_hidden, kernel_size=1, padding="same", padding_mode="reflect")
)
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=self.num_hidden*2, out_channels=1, kernel_size=self.kernel_size, padding="same", padding_mode="reflect")
)
def forward(self, x, enc_out=None):
# s is num queries, t is num keys/values
b, _, _, _, s = x.shape
if self.enc:
t = s
qkv_set = torch.stack([self.conv1(x[..., i]) for i in range(t)], dim=-1)
Q, K, V = torch.split(qkv_set, self.num_hidden, dim=1)
else:
# x correspond to queries
t = enc_out.size()[-1]
kv_set = torch.stack([self.conv1(enc_out[..., i]) for i in range(t)], dim=-1)
K, V = torch.split(kv_set, self.num_hidden, dim=1)
Q = x
K_rep = torch.stack([K] * s, dim=-2)
V_rep = torch.stack([V] * s, dim=-1)
Q_rep = torch.stack([Q] * t, dim=-1)
# concatenate queries and keys for cross-channel convolution
Q_K = torch.concat((Q_rep, K_rep), dim=1)
if self.mask:
# only feed in 'previous' keys & values for computing softmax
V_out = []
# for each query
for i in range(t):
Q_K_temp = rearrange(Q_K[..., :i+1, i], 'b c h w t -> (b t) c h w')
extr_feat = rearrange(torch.squeeze(self.conv2(Q_K_temp), dim=1), '(b t) h w -> b h w t', b=b, t=i+1)
attn_mask = F.softmax(extr_feat, dim=-1)
# convex combination over values using weights from attention mask, per channel c
V_out.append(torch.stack([torch.sum(torch.mul(attn_mask, V_rep[:, c, :, :, i, :i+1]), dim=-1) for c in range(V_rep.size()[1])], dim=1))
V_out = torch.stack(V_out, dim=-1)
else:
Q_K = rearrange(Q_K, 'b c h w s t -> (b s t) c h w') # no convolution across time dim!
extr_feat = rearrange(torch.squeeze(self.conv2(Q_K), dim=1), '(b s t) h w -> b h w t s', b=b, t=t)
attn_mask = F.softmax(extr_feat, dim=-2)
V_out = torch.stack([torch.sum(torch.mul(attn_mask, V_rep[:, c, ...]), dim=-2) for c in range(V_rep.size()[1])], dim=1)
return V_out
class PositionalEncoding(nn.Module):
def __init__(self, num_hidden, img_width):
# no differentiation should happen with respect to the params in here!
super(PositionalEncoding, self).__init__()
self.num_hidden = num_hidden
self.img_width = img_width
def _get_sinusoid_encoding_table(self, t, device):
''' Sinusoid position encoding table '''
sinusoid_table = torch.stack([self._get_position_angle_vec(pos_i) for pos_i in range(t)], dim=0)
sinusoid_table[:, :, 0::2] = torch.sin(sinusoid_table[:, :, 0::2]) # even dim
sinusoid_table[:, :, 1::2] = torch.cos(sinusoid_table[:, :, 1::2]) # odd dim
return torch.moveaxis(sinusoid_table, 0, -1)
def _get_position_angle_vec(self, position):
return_list = [torch.ones((1,
self.img_width,
self.img_width),
device=torch.device("cuda:0" if torch.cuda.is_available() else "cpu")) *
(position / np.power(10000, 2 * (hid_j // 2) / self.num_hidden[-1])) for hid_j in range(self.num_hidden[-1])]
return torch.stack(return_list, dim=1)
def forward(self, x, t, single=False):
"""Returns entire positional encoding until step T if not single, otherwise only encoding of time step T."""
if not single:
self.register_buffer('pos_table', self._get_sinusoid_encoding_table(t, x.get_device()))
return torch.squeeze(x + self.pos_table.clone().detach(), dim=0)
else:
if t % 2 == 0:
return x + torch.unsqueeze(torch.sin(self._get_position_angle_vec(t)), dim=-1).clone().detach()
else:
return x + torch.unsqueeze(torch.cos(self._get_position_angle_vec(t)), dim=-1).clone().detach()
class Encoder(nn.Module):
def __init__(self, num_hidden, depth, dilation_rate, num_conv_layers, kernel_size, img_width):
super().__init__()
self.num_hidden = num_hidden
self.depth = depth
self.dilation_rate = dilation_rate
self.num_conv_layers = num_conv_layers
self.kernel_size = kernel_size
self.img_width = img_width
self.layers = nn.ModuleList([])
self.num_hidden = self.num_hidden
for _ in range(self.depth):
self.layers.append(nn.ModuleList([
Residual(PreNorm([self.num_hidden[-1], self.img_width, self.img_width],
ConvAttention(kernel_size=self.kernel_size, num_hidden=self.num_hidden[-1], enc=True))),
Residual(PreNorm([self.num_hidden[-1], self.img_width, self.img_width],
FeedForward(kernel_size=self.kernel_size, num_hidden=self.num_hidden[-1],
dilation_rate=self.dilation_rate, num_conv_layers=self.num_conv_layers)))
]))
def forward(self, x):
for attn, ff in self.layers:
x = attn(x)
x = ff(x)
return x
class Decoder(nn.Module):
def __init__(self, num_hidden, depth, dilation_rate, num_conv_layers, kernel_size, img_width, non_pred_channels):
super().__init__()
self.layers = nn.ModuleList([])
self.dilation_rate = dilation_rate
self.num_conv_layers = num_conv_layers
self.depth = depth
self.kernel_size = kernel_size
self.img_width = img_width
self.num_hidden = num_hidden
self.num_non_pred_feat = non_pred_channels
for _ in range(self.depth):
self.layers.append(nn.ModuleList([
# (masked) query self-attention
Residual(PreNorm([self.num_hidden[-1], self.img_width, self.img_width],
ConvAttention(num_hidden=self.num_hidden[-1], kernel_size=self.kernel_size, mask=True))),
# encoder-decoder attention
Residual(PreNorm([self.num_hidden[-1], self.img_width, self.img_width],
ConvAttention(num_hidden=self.num_hidden[-1], kernel_size=self.kernel_size, enc=False))),
# feed forward
Residual(PreNorm([self.num_hidden[-1], self.img_width, self.img_width],
FeedForward(num_hidden=self.num_hidden[-1], kernel_size=self.kernel_size, dilation_rate=self.dilation_rate, num_conv_layers=self.num_conv_layers)))
]))
def forward(self, queries, enc_out):
for query_attn, attn, ff in self.layers:
queries = query_attn(queries)
x = attn(queries, enc_out=enc_out)
x = ff(x)
return x
class Conv_Transformer(nn.Module):
"""Standard, single-headed ConvTransformer like in https://arxiv.org/pdf/2011.10185.pdf"""
def __init__(self, num_hidden, depth, dilation_rate, num_conv_layers, kernel_size, img_width, non_pred_channels, num_layers_query_feat, in_channels):
super(Conv_Transformer, self).__init__()
self.num_hidden = num_hidden
self.depth = depth
self.num_layers_query_feat = num_layers_query_feat
self.dilation_rate = dilation_rate
self.num_conv_layers = num_conv_layers
self.kernel_size = kernel_size
self.img_width = img_width
self.in_channels = in_channels
self.non_pred_channels = non_pred_channels
self.pos_embedding = PositionalEncoding(self.num_hidden, self.img_width)
self.Encoder = Encoder(num_hidden=self.num_hidden, depth=self.depth, dilation_rate=self.dilation_rate,
num_conv_layers=self.num_conv_layers, kernel_size=self.kernel_size, img_width=self.img_width)
self.Decoder = Decoder(num_hidden=self.num_hidden, depth=self.depth, dilation_rate=self.dilation_rate,
num_conv_layers=self.num_conv_layers, kernel_size=self.kernel_size, img_width=self.img_width, non_pred_channels=self.non_pred_channels)
self.input_feat_gen = Conv_Block(self.in_channels, self.num_hidden[-1], num_conv_layers=self.num_conv_layers, kernel_size=self.kernel_size)
# TODO (optionally): replace this by SFFN
self.back_to_pixel = nn.Sequential(
nn.Conv2d(self.num_hidden[-1], 4, kernel_size=1)
)
def forward(self, frames, n_predictions):
_, _, _, _, T = frames.size()
feature_map = self.feature_embedding(img=frames, network=self.input_feat_gen)
enc_in = self.pos_embedding(feature_map, T)
# encode all input values
enc_out = torch.concat(self.Encoder(enc_in), dim=-1)
out_list = []
queries = self.feature_embedding(img=feature_map[..., -1], network=self.query_feat_gen)
for _ in range(n_predictions):
dec_out = self.Decoder(queries, enc_out)
pred = self.feature_embedding(dec_out)
out_list.append(pred)
queries = torch.concat((queries, pred), dim=-1)
x = torch.stack(out_list, dim=-1)
return x
def feature_embedding(self, img, network):
generator = network
gen_img = []
for i in range(img.shape[-1]):
gen_img.append(generator(img[..., i]))
gen_img = torch.stack(gen_img, dim=-1)
return gen_img
class ENS_Conv_Transformer(Conv_Transformer):
"""ConvTransformer that employs delta model and can read in non-pred future features, hence taylored to the ENS challenge."""
def __init__(self, num_hidden, output_dim, depth, dilation_rate, num_conv_layers, kernel_size, img_width, non_pred_channels, num_layers_query_feat, in_channels, baseline):
super(ENS_Conv_Transformer, self).__init__(num_hidden, depth, dilation_rate, num_conv_layers, kernel_size, img_width, non_pred_channels, num_layers_query_feat, in_channels - 1)
# remove cloud mask
self.in_channels = self.in_channels - 1
self.baseline = baseline
self.output_dim = output_dim
def forward(self, input_tensor, non_pred_feat=None, prediction_count=1):
baseline = eval(self.baseline + "(input_tensor[:, 0:5, :, :, :], 4)")
b, _, width, height, T = input_tensor.size()
pred_deltas = torch.zeros((b, self.output_dim, height, width, prediction_count), device = self._get_device())
preds = torch.zeros((b, self.output_dim, height, width, prediction_count), device = self._get_device())
baselines = torch.zeros((b, self.output_dim, height, width, prediction_count), device = self._get_device())
# remove cloud mask channel for feature embedding
feature_map = torch.concat((input_tensor[:, :4, ...], input_tensor[:, 5:, ...]), dim=1)
features = self.feature_embedding(img=feature_map, network=self.input_feat_gen)
enc_in = torch.stack([self.pos_embedding(features[i, ...], T) for i in range(b)], dim=0)
enc_out = self.Encoder(enc_in)
# first query stems from last input frame
queries = features[..., -1:]
baselines[..., 0] = baseline
pred_deltas[..., 0] = self.back_to_pixel(self.Decoder(queries, enc_out)[..., 0])
preds[..., 0] = pred_deltas[..., 0] + baselines[..., 0]
for t in range(1, prediction_count):
if self.baseline == "mean_cube":
baselines[..., t] = (preds[..., t - 1] + (baselines[..., t - 1] * (T + t)))/(T + t + 1)
if self.baseline == "zeros":
pass
else:
baselines[..., t] = preds[..., t - 1]
# concatenate with non-pred features & feature embedding & do positional encoding
query = self.pos_embedding(self.feature_embedding(torch.concat((preds[..., t-1:t], non_pred_feat[..., t-1:t]), dim=1), network=self.input_feat_gen), t, single=True)
queries = torch.concat((queries, query), dim=-1)
pred_deltas[..., :t] = torch.stack([self.back_to_pixel(self.Decoder(queries, enc_out)[..., i]) for i in range(t)], dim=-1)
preds[..., t] = pred_deltas[..., t] + baselines[..., t]
return preds, pred_deltas, baselines
def _get_device(self):
return next(self.parameters()).device | 2.234375 | 2 |
tests/test_clients.py | rodrigoapereira/python-hydra-sdk | 0 | 9690 | # Copyright (C) 2017 O.S. Systems Software LTDA.
# This software is released under the MIT License
import unittest
from hydra import Hydra, Client
class ClientsTestCase(unittest.TestCase):
def setUp(self):
self.hydra = Hydra('http://localhost:4444', 'client', 'secret')
self.client = Client(
name='new-client',
secret='client-secret',
scopes=['devices', 'products'],
redirect_uris=['http://localhost/callback'],
)
def test_can_create_client(self):
client = self.hydra.clients.create(self.client)
self.addCleanup(self.hydra.clients.delete, client_id=client.id)
self.assertEqual(client.name, 'new-client')
self.assertEqual(client.secret, 'client-secret')
self.assertEqual(client.scopes, ['devices', 'products'])
self.assertEqual(client.redirect_uris, ['http://localhost/callback'])
def test_can_get_client(self):
client_id = self.hydra.clients.create(self.client).id
self.addCleanup(self.hydra.clients.delete, client_id=client_id)
client = self.hydra.clients.get(client_id)
self.assertEqual(client.id, client_id)
def test_can_update_client(self):
client = self.hydra.clients.create(self.client)
self.addCleanup(self.hydra.clients.delete, client_id=client.id)
self.assertEqual(client.name, 'new-client')
client.name = 'new-client-name'
self.hydra.clients.update(client)
self.assertEqual(client.name, 'new-client-name')
def test_can_delete_client(self):
client = self.hydra.clients.create(self.client)
self.addCleanup(self.hydra.clients.delete, client_id=client.id)
self.assertIsNotNone(self.hydra.clients.get(client.id))
self.hydra.clients.delete(client.id)
self.assertIsNone(self.hydra.clients.get(client.id))
def test_can_list_all_clients(self):
client1 = self.hydra.clients.create(self.client)
self.addCleanup(self.hydra.clients.delete, client_id=client1.id)
client2 = self.hydra.clients.create(self.client)
self.addCleanup(self.hydra.clients.delete, client_id=client2.id)
clients = [c.id for c in self.hydra.clients.all()]
self.assertIn(client1.id, clients)
self.assertIn(client2.id, clients)
| 2.5625 | 3 |
test/PR_test/unit_test/backend/test_binary_crossentropy.py | Phillistan16/fastestimator | 0 | 9691 | <gh_stars>0
# Copyright 2020 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import unittest
import numpy as np
import tensorflow as tf
import torch
import fastestimator as fe
class TestBinarayCrossEntropy(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.tf_true = tf.constant([[1], [0], [1], [0]])
cls.tf_pred = tf.constant([[0.9], [0.3], [0.8], [0.1]])
cls.torch_true = torch.tensor([[1], [0], [1], [0]])
cls.torch_pred = torch.tensor([[0.9], [0.3], [0.8], [0.1]])
def test_binaray_crossentropy_average_loss_true_tf(self):
obj1 = fe.backend.binary_crossentropy(y_pred=self.tf_pred, y_true=self.tf_true).numpy()
obj2 = 0.19763474
self.assertTrue(np.allclose(obj1, obj2))
def test_binaray_crossentropy_average_loss_false_tf(self):
obj1 = fe.backend.binary_crossentropy(y_pred=self.tf_pred, y_true=self.tf_true, average_loss=False).numpy()
obj2 = np.array([0.10536041, 0.3566748, 0.22314338, 0.10536041])
self.assertTrue(np.allclose(obj1, obj2))
def test_binaray_crossentropy_average_from_logit_average_loss_true_tf(self):
obj1 = fe.backend.binary_crossentropy(y_pred=self.tf_pred,
y_true=self.tf_true,
from_logits=True,
average_loss=True).numpy()
obj2 = 0.57775164
self.assertTrue(np.allclose(obj1, obj2))
def test_binaray_crossentropy_average_from_logit_average_loss_false_tf(self):
obj1 = fe.backend.binary_crossentropy(y_pred=self.tf_pred,
y_true=self.tf_true,
from_logits=True,
average_loss=False).numpy()
obj2 = np.array([0.34115386, 0.8543553, 0.37110066, 0.7443967])
self.assertTrue(np.allclose(obj1, obj2))
def test_binaray_crossentropy_average_loss_true_torch(self):
obj1 = fe.backend.binary_crossentropy(y_pred=self.torch_pred, y_true=self.torch_true).numpy()
obj2 = 0.19763474
self.assertTrue(np.allclose(obj1, obj2))
def test_binaray_crossentropy_average_loss_false_torch(self):
obj1 = fe.backend.binary_crossentropy(y_pred=self.torch_pred, y_true=self.torch_true,
average_loss=False).numpy()
obj2 = np.array([0.10536041, 0.3566748, 0.22314338, 0.10536041])
self.assertTrue(np.allclose(obj1, obj2))
def test_binaray_crossentropy_average_from_logit_average_loss_true_torch(self):
obj1 = fe.backend.binary_crossentropy(y_pred=self.torch_pred,
y_true=self.torch_true,
from_logits=True,
average_loss=True).numpy()
obj2 = 0.57775164
self.assertTrue(np.allclose(obj1, obj2))
def test_binaray_crossentropy_average_from_logit_average_loss_false_torch(self):
obj1 = fe.backend.binary_crossentropy(y_pred=self.torch_pred,
y_true=self.torch_true,
from_logits=True,
average_loss=False).numpy()
obj2 = np.array([0.34115386, 0.8543553, 0.37110066, 0.7443967])
self.assertTrue(np.allclose(obj1, obj2))
| 2.125 | 2 |
ats_hex.py | kyeser/scTools | 0 | 9692 | <reponame>kyeser/scTools
#!/usr/bin/env python
from scTools import interval, primeForm
from scTools.rowData import ats
from scTools.scData import *
count = 1
for w in ats:
prime = primeForm(w[0:6])
print '%3d\t' % count,
for x in w:
print '%X' % x,
print ' ',
intervals = interval(w)
for y in intervals:
print '%X' % y,
print '\t%2d\t' % sc6.index(prime),
if prime == sc6[1] or prime == sc6[7] or prime == sc6[8] or \
prime == sc6[20] or prime == sc6[32] or prime == sc6[35]:
print 'AC'
elif prime == sc6[17]:
print 'AT'
else:
print
count += 1
| 3.3125 | 3 |
src/precon/commands.py | Albert-91/precon | 0 | 9693 | import asyncio
import click
from precon.devices_handlers.distance_sensor import show_distance as show_distance_func
from precon.remote_control import steer_vehicle, Screen
try:
import RPi.GPIO as GPIO
except (RuntimeError, ModuleNotFoundError):
import fake_rpi
GPIO = fake_rpi.RPi.GPIO
@click.command(name="rc")
def remote_control() -> None:
loop = asyncio.get_event_loop()
try:
with Screen() as screen:
loop.run_until_complete(steer_vehicle(screen))
except KeyboardInterrupt:
print("Finishing remote control...")
except Exception as e:
print("Raised unexpected error: %s" % e)
finally:
GPIO.cleanup()
@click.command(name="show-distance")
def show_distance() -> None:
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(show_distance_func())
except KeyboardInterrupt:
print("Finishing measuring distance...")
except Exception as e:
print("Raised unexpected error: %s" % e)
finally:
GPIO.cleanup()
| 2.75 | 3 |
midway.py | sjtichenor/midway-ford | 0 | 9694 | <reponame>sjtichenor/midway-ford
import csv
import string
import ftplib
import math
import time
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import sqlite3
from lxml import html
import requests
import sys
import midwords
import facebook
import hd_images
import adwords_feeds
import sheets
import random
import sales_specials
import scrape
from pprint import pprint
from pyvirtualdisplay import Display
import locale
locale.setlocale(locale.LC_ALL, 'en_US.utf8')
# Misc stuff
def isNumber(s):
try:
float(s)
return True
except ValueError:
return False
def start_chromedriver():
display = Display(visible=0, size=(800, 800))
display.start()
path_to_chromedriver = 'chromedriver'
browser = webdriver.Chrome(executable_path=path_to_chromedriver)
return browser
# FMC Dealer Scrapes
def randomInterval(): #returns random float roughly between 1.5 and 2.75
return 1.75+1*random.random()-.25*random.random()
def switchDefaultSearch(browser) : # Switch between MyLot/States
#Switch default search back to Dealership Proximity
print('Switching default search...')
browser.get('https://www.vlplus.dealerconnection.com/Search?&searchType=quicksearch')
time.sleep(3)
browser.find_element_by_xpath('//a[@id="ActivateSettings"]').click()
time.sleep(3)
browser.find_element_by_xpath('//a[text()="Search Settings"]').click()
time.sleep(3)
# Check what default is currently set to
tree = html.fromstring(browser.page_source)
currentSetting = tree.xpath('//option[@selected]/text()')
print('Setting Before:', currentSetting)
if 'My Lot' in currentSetting:
print('Switching default search from My Lot to Proximity')
browser.find_element_by_xpath('//select[@id="searchSettingsDefaultSearchMode"]').click()
time.sleep(2)
browser.find_element_by_xpath('//option[@value="6"]').click()
time.sleep(2)
elif 'States' in currentSetting :
print('Switching default search from States to My Lot')
browser.find_element_by_xpath('//select[@id="searchSettingsDefaultSearchMode"]').click()
time.sleep(2)
browser.find_element_by_xpath('//option[@value="1"]').click()
time.sleep(2)
currentSetting = tree.xpath('//option[@selected]/text()')
#print('Setting After:', currentSetting) This doesn't work..
browser.find_element_by_xpath('//a[@id="saveSearchSettings"]').click()
time.sleep(2)
browser.get('https://www.vlplus.dealerconnection.com/Search?&searchType=quicksearch')
time.sleep(2)
print('Finished switching default search...')
return browser
def getVinList() :
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
vinList = []
c.execute('SELECT vin FROM masterInventory where invType = ?', ('New',))
vinTupleList = c.fetchall()
for vinTuple in vinTupleList :
vin = vinTuple[0]
vinList.append(vin)
numVehicles = len(vinList)
conn.commit()
conn.close()
return vinList
def fmcLogin(browser) : #Logs into fmcdealer and returns browser
# Fire up ChomeDriver
# path_to_chromedriver = '/Users/spencertichenor/PycharmProjects/midway/chromedriver'
# browser = webdriver.Chrome(executable_path = path_to_chromedriver)
# Log into FMC Dealer
url = 'https://fmcdealer.com'
browser.get(url)
username = browser.find_element_by_id('DEALER-WSLXloginUserIdInput')
password = browser.find_element_by_id('DEALER-WSLXloginPasswordInput')
username.send_keys('t-spen29')
password.send_keys('<PASSWORD>')
browser.find_element_by_xpath('//div[@id="DEALER-WSLXloginWSLSubmitButton"]/input').click()
time.sleep(5)
return browser
def navigateToVincent(browser, vin) :
print('\nNavigating to Vincent page for VIN: ' + vin + '...\n\n')
#print('\nSearching for rebate info for vehicle ' + str(k+1) + '/' + str(len(vinList)) + '...')
#print('\n\tVIN: ' + vin + '\n')
browser.get('https://www.vlplus.dealerconnection.com/Search?&searchType=quicksearch')
time.sleep(3)
try :
vinField = browser.find_element_by_id('txtVIN')
vinField.send_keys(vin)
browser.find_element_by_xpath('//input[@value="Search"]').click()
time.sleep(2)
except :
print('VIN FIELD ERROR:')
print(sys.exc_info()[0])
#errorList.append(vin)
#pass this was pass but i think it should be return
return browser
source = browser.page_source
if 'Please broaden your search.' not in source : # Check if vehicle was not found in dealership proximity search
# Click on Vincent button
#source = browser.page_source
try :
vincentUrl = vincentUrl[0]
browser.get(vincentUrl)
time.sleep(4)
except :
print('Vincent Url Error:')
print(sys.exc_info()[0])
#errorList.append(vin)
#pass
return browser
source = browser.page_source
tree = html.fromstring(source)
if 'Please click the "Close" button to continue with the Sales Process.' in source : # Check for recall warning
browser.find_element_by_xpath('//input[@value="Close"]').click()
time.sleep(2)
if 'value="Certificate Inquiry"' not in source : # Check if vehicle already sold
# Enter ZIP code and click next
try :
zipField = browser.find_element_by_xpath('//div/input[@name="customerZip"]')
zipField.send_keys('55113')
browser.find_element_by_id('primaryButtonId').click()
time.sleep(2)
except :
print('ZIP FIELD ERROR:')
print(sys.exc_info()[0])
#errorList.append(vin)
pass
# Get rebate info
#rebateInfo = scrapeRebateInfo(browser)
else :
#soldList.append(vin)
print('\tIt looks like this vehicle has already been sold.\n\n')
else : # Vehicle not found in Dealership Proximity search
print('\tVehicle not found after searching Dealership Proximity.')
#Switch default search to My Lot
browser = switchDefaultSearch(browser)
try :
vinField = browser.find_element_by_id('txtVIN')
vinField.send_keys(vin)
browser.find_element_by_xpath('//input[@value="Search"]').click()
time.sleep(2)
except :
#errorList.append(vin)
print('VIN FIELD ERROR:')
print(sys.exc_info()[0])
#switchToProximity(browser)
return browser
# Click on Vincent button
source = browser.page_source
tree = html.fromstring(source)
vincentUrl = tree.xpath('//a[@title="Smart Vincent"]/@href')
try :
vincentUrl = vincentUrl[0]
browser.get(vincentUrl)
time.sleep(4)
except :
#errorList.append(vin)
print('Vincent Url Error:')
print(sys.exc_info()[0])
#switchToProximity(browser)
#return browser
source = browser.page_source
tree = html.fromstring(source)
if 'Please click the "Close" button to continue with the Sales Process.' in source : # Check for recall warning
browser.find_element_by_xpath('//input[@value="Close"]').click()
time.sleep(2)
if 'value="Certificate Inquiry"' not in source : # Check if vehicle already sold
# Enter ZIP code and click next
try :
zipField = browser.find_element_by_xpath('//div/input[@name="customerZip"]')
zipField.send_keys('55113')
browser.find_element_by_id('primaryButtonId').click()
time.sleep(2)
except :
#errorList.append(vin)
print('ZIP FIELD ERROR:')
print(sys.exc_info()[0])
#switchToProximity(browser)
#return browser
# Get rebate info
#rebateInfo = scrapeRebateInfo(browser)
else :
#soldList.append(vin)
print('\tIt looks like this vehicle has already been sold.\n\n')
#Switch default search back to Dealership Proximity
#switchToProximity(browser)
#pass
return browser
# print('\nNumber of vehicles appear to have been sold: ' + str(len(soldList)))
# print('Sold List:')
# print(soldList)
# print('\nNumber of vehicles that ran into errors: ' + str(len(errorList)))
# print('Error List:')
# print(errorList)
#print('\n\nFinished getting rebate information.')
def scrapeRebateInfo(page_source) : #input browser of vincent page, return tuple with unconditional rebate info
# Get rebate info
#source = browser.page_source
tree = html.fromstring(page_source)
vin = tree.xpath('//dt[.="VIN:"]/following-sibling::dd/text()')
vin = vin[0].replace('\xa0', ' ').replace('\t', '').replace('\n', '')
rowspans = tree.xpath('//table[@summary="This table displays and lets you choose public program bundles."]/tbody/tr/td[@class="textC altRow"]/@rowspan | //table[@summary="This table displays and lets you choose public program bundles."]/tbody/tr/td[@class="textC "]/@rowspan')
conditions = tree.xpath('//table[@summary="This table displays and lets you choose public program bundles."]/tbody/tr[@class="programTableHeader"]/td[@style="{border-right:none;}"]/text()')
nums = tree.xpath('//table[@summary="This table displays and lets you choose public program bundles."]/tbody/tr/td[@class="textL txtCol "]/a/text() | //table[@summary="This table displays and lets you choose public program bundles."]/tbody/tr/td[@class="textL txtCol altRow "]/a/text()')
names = tree.xpath('//table[@summary="This table displays and lets you choose public program bundles."]/tbody/tr/td[@class="textL txtCol "]/text() | //table[@summary="This table displays and lets you choose public program bundles."]/tbody/tr/td[@class="textL txtCol altRow "]/text()')
amounts = tree.xpath('//table[@summary="This table displays and lets you choose public program bundles."]/tbody/tr/td[@class="textR "]/text() | //table[@summary="This table displays and lets you choose public program bundles."]/tbody/tr/td[@class="textR altRow"]/text()')
expirations = tree.xpath('//table[@summary="This table displays and lets you choose public program bundles."]/tbody/tr/td[@class="textC highlight noWrap"]/text()')
to_db = (vin,)
if rowspans == [] : # No unconditional rebates
print('No rebates found for this vehicle.\n')
print('Updating rebate info...')
while len(to_db) < 43 :
to_db += (None,)
else : # Yah, it has unconditional rebates
# Clean up Condition info
condition = conditions[0]
condition = condition.replace('\n', '').replace('\t', '').replace(' ', '').replace(':C', ': C')
condition = condition[1:]
condition = removeWeirdChars(condition)
if 'Cash Payment' in condition :
print('\tUnconditional Rebates:\n')
i=0
for i in range(i, int(rowspans[0])) :
num = nums[i].replace('\n', '').replace('\t', '').replace(' ', '')
name = names[i*2+1].replace('\n', '').replace('\t', '').replace(' - ', '').replace('s C', 's C').replace(' ', '').replace('"', '')
amount = amounts[i].replace('\n', '').replace('\t', '').replace(' ', '')
expiration = expirations[i].replace('\n', '').replace('\t', '').replace(' ', '')
if 'SIRIUS' in name : #Fix for the stupid 6-month extra Sirius incentive
amount = '$0'
if ' - ' not in amount and 'Amount Not Available' not in amount : # stupid fix for Oct 2016 rebate and anotehr fix for Dec 2016 rebate
print('\t\tProgram: #' + num)
print('\t\tName: ' + name)
print('\t\tAmount: ' + amount)
print('\t\tExpiration: ' + expiration + '\n')
to_db += (num,) + (name,) + (condition,) + (amount,) + (expiration,) + (condition,) #fix double header
while len(to_db) < 43 :
to_db += (None,)
return to_db
time.sleep(2)
def scrapeLeaseInfo(page_source) :
# Connect to database
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
to_db = ()
# Get rebate info
tree = html.fromstring(page_source)
vin = tree.xpath('//dt[.="VIN:"]/following-sibling::dd/text()')
vin = vin[0].replace('\xa0', ' ').replace('\t', '').replace('\n', '')
vehDesc = tree.xpath('//dt[.="Description:"]/following-sibling::dd/text()')
residualTable = tree.xpath('//table[@class="rateTable"]/tbody/tr/td/text() | //table[@class="rateTable"]/thead/tr/th/text()')
#rclRebateRow = tree.xpath('//tr[td[contains(., "RCL Customer Cash")]]/td/text()')
rclFactorsRow = tree.xpath('//tr[td[contains(., "RCL Factors")]]/td/text()')
rclTermLengths = tree.xpath('//tr[td[contains(., "RCL Factors")]]//th/text()')
rclFactors = tree.xpath('//tr[td[contains(., "RCL Factors")]]//td/text()')
rebateCells = tree.xpath('//tr[td[contains(., "LEASE")]]/following-sibling::*/td/text()')
#print('rebateCells:', rebateCells)
#print('length of rebateCells:', len(rebateCells))
if rebateCells != [] :
print('Lease Rebates:')
rebateDict = {}
for i, cell in enumerate(rebateCells) :
if 'Cash' in cell and 'Fast Cash Certificate' not in cell:
rebateName = cell.replace('\t', '').replace('\n', '').replace(' - ', '')
if '$' in rebateCells[i+2] :
rebateAmount = int(rebateCells[i+2].replace('\t', '').replace('\n', '').replace(' ', '').replace('$', '').replace(',', ''))
rebateExpiration = rebateCells[i+3].replace('\t', '').replace('\n', '').replace(' ', '')
elif '$' in rebateCells[i+3] :
rebateAmount = int(rebateCells[i+3].replace('\t', '').replace('\n', '').replace(' ', '').replace('$', '').replace(',', ''))
rebateExpiration = rebateCells[i+4].replace('\t', '').replace('\n', '').replace(' ', '')
rebateDict[rebateName] = [rebateAmount, rebateExpiration]
print('\tRebate Name:', rebateName)
print('\tRebate Amount:', rebateAmount)
print('\tRebate Expiration:', rebateExpiration)
print('\n')
print('rebateDict:', rebateDict)
totalRebates = 0
for rebateName in rebateDict :
totalRebates += rebateDict[rebateName][0]
vehDesc = vehDesc[0].replace('\xa0', ' ').replace('\t', '').replace('\n', '')
rclResiduals = {}
for i, leaseTerm in enumerate(residualTable[0:4]) :
rclResiduals[leaseTerm + ' Month'] = float(residualTable[i+5])/100
#rclRebateName = rclRebateRow[5].replace('\t', '').replace('\n', '').replace(' - ', '')
#rclRebateAmount = rclRebateRow[8].replace('\t', '').replace('\n', '').replace(' ', '').replace('$', '').replace(',', '')
#rclRebateExpiration = rclRebateRow[9].replace('\t', '').replace('\n', '').replace(' ', '')
rclTermLengths = rclTermLengths[:-1]
for i, termLength in enumerate(rclTermLengths) :
rclTermLengths[i] = int(termLength)
rclFactorsExpiration = rclFactorsRow[8].replace('\t', '').replace('\n', '').replace(' ', '')
factors = {}
for e in rclFactors :
if 'Tier' in e :
tierIndex = rclFactors.index(e)
tier = rclFactors[tierIndex]
tierFactors = rclFactors[tierIndex+1:tierIndex+5]
for i, factor in enumerate(tierFactors) :
tierFactors[i] = float(factor)
factors[tier] = tierFactors
print('VIN:', vin)
print('Vehicle Description:', vehDesc)
#print('RCL Rebate Name:', rclRebateName)
print('Total Rebates:', totalRebates)
#print('RCL Rebate Expiration:', rclRebateExpiration)
print('RCL Lengths:', rclTermLengths)
print('RCL Factors: ', factors) #used to be factors but too hard to deal with everything
print('RCL Factors Expiration:', rclFactorsExpiration)
print('RCL Residual:', rclResiduals)
c.execute('SELECT stock, year, model, vehTrim FROM masterInventory WHERE vin = ?', (vin,))
vehInfo = c.fetchall()
vehInfo = vehInfo[0]
print('vehInfo:', vehInfo)
to_db = (vin,) + vehInfo + (str(rebateDict), totalRebates, str(rclTermLengths), str(factors), rclFactorsExpiration, str(rclResiduals))
#to_db = (vin, str(rebateDict), totalRebates, str(rclTermLengths), str(factors), rclFactorsExpiration, str(rclResiduals))
else :
print('No lease info found.')
to_db = (vin, None, None, None, None, None, None, None, None, None, None)
# Close connection to database
conn.commit()
conn.close()
return to_db
time.sleep(2)
def calculateLeasePayment(vin, termLength, mileage, tier) : # outputs monthly payments. input example: ('1FAHP1231', 36, 15000, 'Tier 0-1')
print('Calculating lease payments for VIN: ' + vin)
leaseParameters = getLeaseParameters(vin)
#print('leaseParameters:', leaseParameters)
if leaseParameters[5] == None : # if there are no lease deals
paymentOptions = (None, None, None, None, None, vin)
else :
msrp = leaseParameters[0]
dealerDiscount = leaseParameters[1]
rebateAmount = leaseParameters[2]
termLengths = leaseParameters[3]
interestRates = leaseParameters[4]
residuals = leaseParameters[5]
termLengthIndex = termLengths.index(termLength)
apr = interestRates[tier][termLengthIndex]
apr += 1 # Juicing the apr by 1%
residual = residuals[str(termLength) + ' Month']
# Adjust residual for mileage
residual += (15000 - mileage)/1500 * .01
residual = round(residual, 2)
taxRate = .07125 # plus any local taxes i guess
aquisitionFee = 645 # need to figure out better way
moneyFactor = apr/2400
salesTax = round(msrp * taxRate, 2) # dunno if this should be here
salesTax = 0
signAndDrive = 0 - aquisitionFee - salesTax
downPayments = [signAndDrive, 0, 1000, 2000, 3000]
print('MSRP:', msrp)
print('Dealer Discount:', dealerDiscount)
print('Rebate Amount:', rebateAmount)
print('Term Length:', str(termLength) + ' Month')
print('APR:', apr)
print('Money Factor:', moneyFactor)
print('Residual:', residual)
print('\n\n')
paymentOptions = ()
for downPayment in downPayments :
sellingPrice = msrp - dealerDiscount - rebateAmount
#taxableAmount = sellingPrice - residualValue - downPayment + rentCharge # not accurate
#salesTax = msrp * taxRate
#salesTax = 0
grossCapCost = msrp - dealerDiscount + aquisitionFee + salesTax
capCostReduction = rebateAmount + downPayment
netCapCost = round(grossCapCost - capCostReduction, 2)
residualValue = round(msrp * residual, 2)
depreciation = round(netCapCost - residualValue, 2)
basePayment = round(depreciation/termLength, 2)
rentPayment = round((netCapCost + residualValue) * moneyFactor, 2)
rentCharge = rentPayment*termLength
totalPayment = round(basePayment + rentPayment, 2)
print('Down Payment:', downPayment)
print('\n')
print('Gross Cap. Cost:', grossCapCost)
print('Cap. Cost Reduction:', capCostReduction)
print('Net Cap. Cost:', netCapCost)
print('Residual Value:', residualValue)
print('Depreciation:', depreciation)
print('Base Payment:', basePayment)
print('Rent Payment:', rentPayment)
print('Total Monthly Payment:', totalPayment)
print('\n\n\n')
paymentOptions += (totalPayment,)
paymentOptions += (vin,)
#print('Payment Options:', paymentOptions)
return paymentOptions
def scrapeFMC(): # Gets rebate and lease info from FMC Dealer
vinList = getVinList()
#vinList = ['3FA6P0VP1HR195216', '3FA6P0H77HR187150']
#path_to_chromedriver = 'chromedriver'
#browser = webdriver.Chrome(executable_path=path_to_chromedriver)
browser = start_chromedriver()
browser = fmcLogin(browser)
errorList = []
for i, vin in enumerate(vinList) :
print('Vehicle ' + str(i+1) + '/' + str(len(vinList)) + ':\n')
browser = navigateToVincent(browser, vin)
try :
to_db = scrapeRebateInfo(browser.page_source)
updateRebateTable(to_db)
to_db = scrapeLeaseInfo(browser.page_source)
updateLeaseTable(to_db)
#to_db = calculateLeasePayment(vin, 36, 10500, 'Tier 0-1')
#updateLeaseTable(to_db)
except Exception as e:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(e).__name__, e.args)
message += '\nError on line {}'.format(sys.exc_info()[-1].tb_lineno)
print(message)
errorList.append(vin)
continue
print('Error List:', errorList)
print('Number of Errors:', len(errorList))
doubleErrorList = []
for i, vin in enumerate(errorList) : # Re-run all VINs that had errors
print('Vehicle ' + str(i+1) + '/' + str(len(errorList)) + ':\n')
browser = navigateToVincent(browser, vin)
try :
to_db = scrapeRebateInfo(browser.page_source)
updateRebateTable(to_db)
to_db = scrapeLeaseInfo(browser.page_source)
updateLeaseTable(to_db)
except Exception as e:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(e).__name__, e.args)
message += '\nError on line {}'.format(sys.exc_info()[-1].tb_lineno)
print(message)
doubleErrorList.append(vin)
continue
print('Double Error List:', errorList)
print('Number of Double Errors:', len(errorList))
print(20*'\n')
def updateVLPlusInventoryTable():
print('Scraping Vehicle Locator..')
# Open connection to database
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
# Delete old data
query = 'DELETE FROM VLPlusInventory'
c.execute(query)
# all the xpath that we're gonna need
vin_list_xpath = '//tr[contains(@class, "vehiclerow")]/@vin'
msrp_list_xpath = '//td[contains(@class, "price")]/a[@class="pdfWindowSticker"]/span/text()'
invoice_list_xpath = '//tr[contains(@class, "vehiclerow")]/td[11]/a/span/text()'
pep_list_xpath = '//tr[contains(@class, "vehiclerow")]/td[7]/span[3]/text()'
order_type_list_xpath = '//a[@onclick="showOrderTypeInfo();"]/span/text()'
engine_list_xpath = '//tr[contains(@class, "vehiclerow")]/td[8]/span[1]/text()'
status_list_xpath = '//tr[contains(@class, "vehiclerow")]/td[1]/@class'
# Log into FMC Dealer
#path_to_chromedriver = 'chromedriver'
#browser = webdriver.Chrome(executable_path=path_to_chromedriver)
browser = start_chromedriver()
browser = fmcLogin(browser)
wait = WebDriverWait(browser, 10)
browser.get('https://www.vlplus.dealerconnection.com/InvMgt/')
time.sleep(randomInterval() * 2)
source = browser.page_source
tree = html.fromstring(source)
vehicle_count = tree.xpath('//th[@class="resultcount"]/text()')
print(vehicle_count)
vehicle_count = vehicle_count[1].split(' ')
vehicle_count_index = vehicle_count.index('vehicles') - 1
vehicle_count = vehicle_count[vehicle_count_index]
vehicle_count = int(vehicle_count)
page_count = math.ceil(vehicle_count/25)
print('Total pages:', page_count)
for j in range(0, page_count-1):
tree = html.fromstring(browser.page_source)
vin_list = tree.xpath(vin_list_xpath)
ugly_msrp_list = tree.xpath(msrp_list_xpath)
ugly_invoice_list = tree.xpath(invoice_list_xpath)
ugly_pep_list = tree.xpath(pep_list_xpath)
ugly_order_type_list = tree.xpath(order_type_list_xpath)
ugly_engine_list = tree.xpath(engine_list_xpath)
ugly_status_list = tree.xpath(status_list_xpath)
# Clean up PEP Codes
msrp_list = []
invoice_list = []
pep_list = []
order_type_list = []
engine_list = []
status_list = []
for k in range(0, len(vin_list)):
msrp_list.append(ugly_msrp_list[k].replace('$', '').replace(',', ''))
if msrp_list[k] != 'n/a':
msrp_list[k] = int(msrp_list[k])
else:
msrp_list[k] = ''
invoice_list.append(ugly_invoice_list[k].replace('$', '').replace(',', ''))
if invoice_list[k] != 'n/a':
invoice_list[k] = int(invoice_list[k])
else:
invoice_list[k] = ''
for pep_code in ugly_pep_list:
pep_list.append(pep_code)
for order_type in ugly_order_type_list:
order_type_list.append(order_type)
for engine in ugly_engine_list:
engine = engine.split('<br>')[0].replace(' ', '').replace('\n', '')
if 'L ' in engine and 'SPD' not in engine and 'SPEED' not in engine:
engine_list.append(engine)
for status in ugly_status_list:
if 'transit' in status:
status_list.append('In Transit')
elif 'plant' in status:
status_list.append('In Plant')
else:
status_list.append('In Stock')
if len(msrp_list) != len(invoice_list):
print('len msrp != invoice')
raise ValueError
if len(pep_list) != len(msrp_list):
print('len pep != msrp')
print(msrp_list)
print(ugly_pep_list)
raise ValueError
print('msrp_list len: ', len(msrp_list))
print('msrp_list: ', msrp_list)
print('invoice_list: ', invoice_list)
print('pep_list: ', pep_list)
print('order_type_list: ', order_type_list)
print('engine_list: ', engine_list)
print('status_list: ', status_list)
to_db = []
for k, vin in enumerate(vin_list):
print('VIN: ', vin)
print('msrp: ', msrp_list[k])
print('invoice: ', invoice_list[k])
print('pep: ', pep_list[k])
print('order_type: ', order_type_list[k])
print('engine: ', engine_list[k], '\n')
if msrp_list[k] < invoice_list[k]:
raise ValueError
to_db.append((vin, msrp_list[k], invoice_list[k], pep_list[k], order_type_list[k], engine_list[k], status_list[k]))
query = 'INSERT OR REPLACE INTO VLPlusInventory (vin, msrp, invoice, pepCode, orderType, engine, status) VALUES (?, ?, ?, ?, ?, ?, ?)'
c.executemany(query, to_db)
conn.commit()
time.sleep(randomInterval())
next_page_xpath = '//a[@page="{}"]'.format(str(j+2))
next_page_link = wait.until(EC.element_to_be_clickable((By.XPATH, next_page_xpath)))
next_page_link.click()
#browser.find_element_by_xpath(next_page_xpath).click()
time.sleep(randomInterval()*2)
conn.close()
def updateMasterInventoryStockStatus():
# Open connection to database
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
# Get all vin in master inv
query = 'SELECT vin FROM masterInventory'
c.execute(query)
master_results = c.fetchall()
master_vin_list = []
for r in master_results:
master_vin_list.append(r[0])
# Get all retail veh in vlplus inv
query = 'SELECT vin, status FROM VLPlusInventory WHERE orderType = ? OR orderType = ?'
to_db = ('1', '2')
c.execute(query, to_db)
vlplus_results = c.fetchall()
for r in vlplus_results:
vin = r[0]
vlpus_status = r[1]
print('\n', vin, ':\n\n')
if vin in master_vin_list:
query = 'SELECT status, dateInStock FROM masterInventory WHERE vin = ?'
to_db = (vin,)
c.execute(query, to_db)
result = c.fetchall()
master_status = result[0][0]
date_in_stock = result[0][1]
print(master_status)
if date_in_stock and master_status == 'In Stock':
print('Stock status already set')
continue
elif date_in_stock and master_status != 'In Stock':
print('Updating stock status')
query = 'UPDATE masterInventory SET status = ? WHERE vin = ?'
to_db = ('In Stock', vin)
c.execute(query, to_db)
else:
print('Adding veh to master')
query = 'INSERT OR REPLACE INTO masterInventory (vin, status, invType) VALUES (?, ?, ?)'
to_db = (vin, vlpus_status, 'New')
c.execute(query, to_db)
conn.commit()
conn.close()
# Data stuff
def get_incoming_homenet_file(): # Logs into Homenet FTP server and downloads inventory file
print('Getting CSV file from Homenet feed...')
#autouplinkFilePath = 'spencertichenor.com/home/sjtichenor/public_ftp/incoming/RosevilleMidwayFord' + YEAR + MO + DAY
ftp = ftplib.FTP('spencertichenor.com')
ftp.login(user='<EMAIL>', passwd='<PASSWORD>')
homenetFileName = 'homenet_feed.csv'
localFilePath = 'data/local_homenet_file.csv'
localFile = open(localFilePath, 'wb')
ftp.retrbinary('RETR ' + homenetFileName, localFile.write, 1024)
print('CSV file from Homenet feed saved at: data/local_homenet_file.csv')
ftp.quit()
localFile.close()
def update_incoming_homenet_table(): # Gets data from local_homenet_file.csv then updates homenetInventory and masterInventory tables
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
print('Updating homenetInventory table with data sent from Homenet FTP feed...')
with open('data/local_homenet_file.csv', 'r') as homenetFile:
# csv.DictReader uses first line in file for column headings by default
dr = csv.DictReader(homenetFile) # comma is default delimiter
to_db = []
homenetVinList = []
## Clean out weird characters
valid_chars = string.ascii_letters + string.digits + ' ' + ':' + '-' + ',' + '&' + '$' + '/' + '.' + '_' + '!'
for i in dr:
for key in i.keys():
s = i[key]
clean = ''.join(c for c in s if c in valid_chars)
i[key] = clean
#print(key + ': ' + i[key])
#print('\n' + 50*'*' + '\n')
to_db.append((
i['VIN'],
i['Stock'],
i['Type'],
i['Year'],
i['Make'],
i['Model'],
i['Trim'],
i['Body'],
i['MSRP'],
i['SellingPrice'],
i['InternetPrice'],
i['Invoice'],
i['BookValue'],
i['Certified'],
i['ModelNumber'],
i['Doors'],
i['ExteriorColor'],
i['InteriorColor'],
i['EngineCylinders'],
i['EngineDisplacement'],
i['Transmission'],
i['Miles'],
i['DateInStock'],
i['Description'],
i['Options'],
i['Categorized Options'],
i['ImageList'],
i['Style Description'],
i['Drive type'],
i['Wheelbase Code'],
i['Engine Description'],
i['Market Class'],
i['Factory_Codes']
))
homenetVinList.append(i['VIN']) #used later to delete vehicles that aren't in stock anymore, index of 0 because it is a tuple
query = ("""
INSERT OR REPLACE INTO homenetInventory (vin, stock, invType, year, make, model, vehTrim, cabStyle, intMSRP, intPrice, intInternetPrice, intInvoice, intGeneralLedger, cpo, modelNumber, doors, exteriorColor, interiorColor, engineCylinders, engineDisplacement, transmission, miles, dateInStock, description, options, optionsCategorized, imageUrls, style, drive, wheelbase, engine, marketClass, factCodes)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""")
c.executemany(query, to_db)
#c.executemany("INSERT OR REPLACE INTO masterInventory (vin, stock, invType, year, make, model, vehTrim, cabStyle, intMSRP, intPrice, intInternetPrice, intInvoice, intGeneralLedger, cpo, modelNumber, doors, exteriorColor, interiorColor, engineCylinders, engineDisplacement, transmission, miles, dateInStock, description, options, optionsCategorized, imageUrls, style, drive, wheelbase, engine, marketClass, factCodes) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", to_db)
# that was redundent i think ^^
# Delete vehicles that aren't in stock anymore from Homenet table
currentVinList = []
c.execute('SELECT vin FROM homenetInventory')
tupleVinList = c.fetchall()
for tupleVin in tupleVinList: # Convert tuples to strings in order to compare later
vin = tupleVin[0]
currentVinList.append(vin)
for vin in currentVinList:
if vin not in homenetVinList:
c.execute('DELETE FROM homenetInventory WHERE vin = ?', (vin,))
print('Deleted VIN ' + vin + ' from Homenet Inventory Table.')
conn.commit()
print('Finished updating homenetInventory table.\n')
# Update masterInventory table
print('Updating masterInventory table with data from homenetInventory table...')
query = 'INSERT OR REPLACE INTO masterInventory (vin, stock, invType, year, make, model, vehTrim, cabStyle, intMSRP, intPrice, intInternetPrice, intInvoice, intGeneralLedger, cpo, modelNumber, doors, exteriorColor, interiorColor, engineCylinders, engineDisplacement, transmission, miles, dateInStock, description, options, optionsCategorized, imageUrls, style, drive, wheelbase, engine, marketClass, factCodes) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
c.executemany(query, to_db)
c.execute('SELECT vin from masterInventory')
masterVinTupleList = c.fetchall()
for vinTuple in masterVinTupleList:
vin = vinTuple[0]
if vin not in homenetVinList:
c.execute('DELETE FROM masterInventory WHERE vin = ?', (vin,))
print('Deleted VIN ' + vin + ' from Master Inventory Table.')
conn.commit()
conn.close()
def updateMasterTable() :
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
c.execute('SELECT * FROM homenetInventory')
vehTupleList = c.fetchall()
to_db = vehTupleList
print(to_db)
print(len(to_db))
for i in to_db:
print(i)
print(len(i))
c.executemany("INSERT OR REPLACE INTO masterInventory (vin, stock, invType, year, make, model, vehTrim, bodyStyle, intMSRP, intPrice, intInternetPrice, intInvoice, intGeneralLedger, cpo, modelNumber, doors, exteriorColor, interiorColor, engineCylinders, engineDisplacement, transmission, miles, dateinStock, description, options, optionsCategorized, imageUrls) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);", to_db)
conn.commit()
# Delete vehicles that are no longer in stock from masterInventory
homenetVinList = []
c.execute('SELECT vin from homenetInventory')
homenetVinTupleList = c.fetchall()
for homenetVinTuple in homenetVinTupleList :
homenetVin = homenetVinTuple[0]
homenetVinList.append(homenetVin)
c.execute('SELECT vin from masterInventory')
masterVinTupleList = c.fetchall()
for vinTuple in masterVinTupleList :
vin = vinTuple[0]
if vin not in homenetVinList :
c.execute('DELETE FROM masterInventory WHERE vin = ?', (vin,))
print('Deleted VIN ' + vin + ' from Master Inventory Table.')
conn.commit()
conn.close()
def removeOldVins(table): #DOES NOT WORK removes VINs that are no longer in masterInventory from supplied table
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
masterVinList = []
c.execute('SELECT vin FROM masterInventory')
masterVinTupleList = c.fetchall()
for masterVinTuple in masterVinTupleList :
vin = masterVinTuple[0]
masterVinList.append(vin)
c.execute('SELECT vin FROM ?', (table,))
rebateVinTupleList = c.fetchall()
for rebateVinTuple in rebateVinTupleList :
vin = rebateVinTuple[0]
if vin not in masterVinList :
c.execute('DELETE FROM rebateInfo WHERE vin = ?', (vin,))
print('\t' + vin + ' deleted from rebateInfo table.')
conn.commit()
conn.close()
def compute_highlights(): # Gets masterInventory 'options' field for each veh then finds highlights then adds them to highlights column separated by commas
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
c.execute('SELECT vin, options, year, invType, description, cpo, engine, drive, stock, make, model, marketClass FROM masterInventory')
optionsTupleList = c.fetchall()
for optionsTuple in optionsTupleList:
highlightList = []
highlightStr = ''
vin = optionsTuple[0]
options = optionsTuple[1].lower()
year = optionsTuple[2]
invType = optionsTuple[3]
description = optionsTuple[4].lower()
cpo = optionsTuple[5]
engine = optionsTuple[6]
drive = optionsTuple[7]
stock = optionsTuple[8]
make = optionsTuple[9]
model = optionsTuple[10]
marketClass = optionsTuple[11]
# Get coolest options
if cpo == 'True':
highlightList.append('Certified Pre-Owned')
highlightList.append('100,000-Mile Warranty')
#if year == 2017 and invType == 'New' :
#highlightList.append('Apple CarPlay')
#highlightList.append('Android Auto')
# Highlight Idicators - List of dictionaries where the key is the highlight name and the value is a list of indicator phrases
indicatorList = [
{'One-Owner': ['one owner', 'one-owner']},
{'Low Miles': ['low mile']},
{'Remote Start': ['remote start', 'remote engine start', 'remote auto start']},
{'Technology Package': ['technology package', 'technology pkg']},
{'Cold Weather Package': ['cold weather package']},
{'Appearance Package': ['appearance package']},
{'Moonroof': ['vista roof', 'moonroof', 'glass roof', 'panoramic roof']},
{'Rear Camera': ['rear view camera', 'back-up camera', 'rear-view camera']},
{'Rear Camera w/ Hitch Assist': ['rear view camera w/dynamic hitch assist']},
{'Heated Seats': ['heated leather', 'heated front seats', 'heated bucket']},
{'Heated/Cooled Seats': ['heated & cooled', 'heated and cooled', 'heated/cooled']},
{'Heated Steering Wheel': ['heated steering wheel']},
{'Heated Mirrors': ['heated mirrors']},
{'Tow Package': ['tow package', 'Towing', 'Trailer Hitch']},
{'Trailer Brake Controller': ['trailer brake controller']},
{'Premium Audio System': ['premium audio system', 'premium 9 speaker']},
{'Leather Interior': ['leather seats', 'leather-trimmed', 'leather trimmed']},
{'Bluetooth': ['bluetooth']},
{'USB Connectivity': ['usb']},
{'Apple CarPlay': ['apple carplay']},
{'Android Auto': ['android auto']},
{'Snow Plow Package': ['snow plow package']},
{'Lane-Keeping System': ['lane-keeping system']},
{'Rain-Sensing Wipers': ['rain-sensing wipers']},
{'Park Assist System': ['park assist system']},
{'Sirius': ['sirius', 'satellite radio']},
{'Power Liftgate': ['pwr liftgate', 'power liftgate']},
{'Remote Tailgate': ['remote tailgate']},
{'Push Button Start': ['push button start']},
{'Navigation': ['navigation']},
{'Bedliner': ['bedliner']},
{'Extended Range Fuel Tank': ['extended range']},
{'2nd Row Bucket Seats': ['2nd row bucket seats']},
{'3rd Row Seat': ['3rd row seat', '3rd seat']},
{'Touchscreen': ['touchscreen', 'touch-screen', 'myford touch', 'sync 3']},
{'Keyless Entry': ['keyless', 'keypad entry']},
{'Cruise Control': ['cruise control']},
{'Auto Start-Stop Technology': ['auto start-stop technology']},
{'LED Box Lighting': ['led box lighting']},
]
for i in indicatorList:
highlight = list(i.keys())[0]
phraseList = list(i.values())[0]
for phrase in phraseList:
if phrase in options or phrase in description:
highlightList.append(highlight)
break
highlightList.append(engine)
highlightList.append(drive)
# Remove redundant highlights
redundantList = [
['Heated Seats', 'Heated/Cooled Seats'],
['Rear Camera', 'Rear Camera w/ Hitch Assist'],
['USB Connectivity', 'Bluetooth'],
['Bluetooth', 'Apple CarPlay'],
['Tow Package', 'Trailer Brake Controller']
]
for i in redundantList:
if i[0] in highlightList and i[1] in highlightList:
highlightList.remove(i[0])
for highlight in highlightList:
highlightStr += highlight + ','
if len(highlightStr) > 0: # Get rid of unnecessary comma on end of string
highlightStr = highlightStr[:-1]
# Set Body Style (not really a highlight) - Had to switch to ghetto version below because vans were getting marked as cars because iterating throguh dict is not ordered
# indicatorDict = {
# 'Car': ['Car'],
# 'Truck': ['Truck'],
# 'Van': ['Van', 'van'],
# 'SUV': ['Sport Utility Vehicles']
# }
# bodyStyles = indicatorDict.keys()
# for bodyStyle in bodyStyles :
# for indicator in indicatorDict[bodyStyle] :
# if indicator in marketClass :
# style = bodyStyle
if 'Car' in marketClass: # has to come first so cargo van gets listed as Van
style = 'Car'
if 'Truck' in marketClass:
style = 'Truck'
if 'Van' in marketClass or 'van' in marketClass :
style = 'Van'
if 'Sport Utility Vehicles' in marketClass :
style = 'SUV'
# Clean up Model
model = model.replace(' Commercial Cutaway', '').replace(' Sport Fleet', '').replace(' Cutaway', '')
# Clean up Engine
engine = engine.replace(' L', 'L')
print('Vehicle: ' + stock + ' ' + make + ' ' + model)
print('Highlights:', highlightList)
print('BodyStyle:', style)
print('\n')
# Set Status to In Stock
status = 'In Stock'
# Update database
c.execute('UPDATE masterInventory SET highlights = ?, bodyStyle = ?, model = ?, engine = ?, status = ? WHERE vin = ?', (highlightStr, style, model, engine, status, vin,))
conn.commit()
conn.close()
def calculate_pricing():
print('Calculating max discount for each vehicle...\n')
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
# Set dealer discount and total discount
query = ('SELECT vin, intMSRP, intInternetPrice, intTotalRebates, totalConditionalRebates '
'FROM masterInventory '
'WHERE invType = "New" AND intMSRP != 0')
c.execute(query)
results = c.fetchall()
for r in results:
print('r:', r)
vin = r[0]
msrp = r[1]
price_before_rebates = r[2]
unconditional_rebates = r[3]
conditional_rebates = r[4]
dealer_discount = msrp - price_before_rebates
if unconditional_rebates:
best_discount = dealer_discount + unconditional_rebates + conditional_rebates
else:
best_discount = dealer_discount
# Print results
print('\t\tVIN:', vin)
print('\t\tMSRP:', msrp)
print('\t\tPrice before rebates:', price_before_rebates)
print('\t\tDealer Discount:', dealer_discount)
print('\t\tUnconditional Rebates:', unconditional_rebates)
print('\t\tConditional Rebates:', conditional_rebates)
print('\t\tBest Discount:', best_discount, '\n\n')
# Update database
query = 'UPDATE masterInventory SET intTotalDiscount = ? WHERE vin = ?'
to_db = (best_discount, vin)
c.execute(query, to_db)
conn.commit()
conn.close()
print('Finished calculating max discount for each vehicle.\n')
def create_outgoing_homenet_table():
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
query = ("""
CREATE TABLE IF NOT EXISTS outgoingHomenet
(VIN TEXT UNIQUE, comment1 TEXT, misc_price1 INTEGER, comment2 TEXT, misc_price2 INTEGER, comment3 TEXT, misc_price3 INTEGER, comment5 TEXT)
""")
c.execute(query)
conn.commit()
conn.close()
def update_outgoing_homenet_table():
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
c.execute('DELETE FROM outgoingHomenet')
to_db = []
c.execute('SELECT vin, highlights, intTotalRebates, totalConditionalRebates FROM masterInventory')
results = c.fetchall()
for r in results:
vin = r[0]
highlights = r[1]
unconditional_rebates = r[2]
conditional_rebates = r[3]
if not unconditional_rebates:
unconditional_rebates = 0
if not conditional_rebates:
conditional_rebates = 0
to_db.append((vin, highlights, 0, None, unconditional_rebates, None, conditional_rebates, ''))
print('\n\nVIN:', vin)
print('Highlights:', highlights)
print('Unconditional Rebates:', unconditional_rebates)
print('Conditional Rebates:', conditional_rebates)
query = ("""
INSERT OR REPLACE INTO outgoingHomenet (vin, comment1, misc_price1, comment2, misc_price2, comment3, misc_price3, comment5)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""")
c.executemany(query, to_db)
conn.commit()
conn.close()
def update_outgoing_homenet_file():
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
c.execute('SELECT vin, comment1, misc_price1, comment2, misc_price2, comment3, misc_price3, comment5 FROM outgoingHomenet')
with open('data/homenet-incentive-feed.csv', 'w') as csv_file:
csv_writer = csv.writer(csv_file, dialect='excel')
csv_writer.writerow([i[0] for i in c.description]) # write headers
csv_writer.writerows(c)
conn.commit()
conn.close()
def upload_outgoing_homenet_file():
print('\nUploading inventory to FTP server for Homenet...')
file_path = 'data/homenet-incentive-feed.csv'
file_name = file_path.split('/')
file_name = file_name[-1]
print('Uploading ' + file_name + ' to FTP server...\n')
file = open(file_path, 'rb')
ftp = ftplib.FTP('iol.homenetinc.com')
ftp.login('hndatafeed', 'gx8m6')
ftp.storbinary('STOR ' + file_name, file, 1024)
file.close()
ftp.quit()
print('Successfully uploaded ' + file_name + ' to homenet folder on FTP server.\n')
def send_feeds_from_homenet():
print('Navigating to Homenet.com and send out feeds to cars.com, cargurus, etc..')
# Fire up ChromeDriver
browser = start_chromedriver()
wait = WebDriverWait(browser, 10)
# Log into Homenet
#url = 'https://www.homenetiol.com/marketplace/overview'
url = 'https://www.homenetiol.com/login?RedirectUrl=%2fmarketplace%2foverview'
browser.get(url)
username = browser.find_element_by_xpath('//input[@class="username text-value"]')
password = browser.find_element_by_xpath('//input[@class="password text-value"]')
username.send_keys('<EMAIL>')
password.send_keys('<PASSWORD>')
wait.until(EC.element_to_be_clickable((By.XPATH, '//a[@class="login-action button"]'))).click()
wait.until(EC.element_to_be_clickable((By.XPATH, '//a[@class="run-all-button button"]'))).click()
time.sleep(10)
print('Finished sending out feeds.')
def vacuum_db():
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
c.execute("VACUUM")
conn.close()
def figureManagerSpecials():
# Open connection to database
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
url = 'http://www.rosevillemidwayford.com/new-car-sales-roseville-mn'
page = requests.get(url)
tree = html.fromstring(page.content)
stockResults = tree.xpath('//span[contains(@class, "spec-value-stocknumber")]/text()')
specialStockList = []
for specialStock in stockResults :
specialStock = specialStock.replace('#', '')
specialStockList.append(specialStock)
print(specialStockList)
c.execute('SELECT stock FROM masterInventory')
results = c.fetchall()
for r in results:
stock = r[0]
if stock in specialStockList :
print('looks like stock #' + stock + ' is a special!')
query = 'UPDATE masterInventory SET managerSpecial = ? WHERE stock = ?'
to_db = ('True', stock)
c.execute(query, to_db)
else :
print('looks like stock #' + stock + ' is NOT a special!')
query = 'UPDATE masterInventory SET managerSpecial = ? WHERE stock = ?'
to_db = ('False', stock)
c.execute(query, to_db)
conn.commit()
conn.close()
def figureLeaseSpecials():
# Open connection to database
conn = sqlite3.connect('data/inventory.db')
c = conn.cursor()
lease_specials = []
c.execute('SELECT DISTINCT year FROM masterInventory')
year_results = c.fetchall()
for y in year_results:
year = y[0]
# print(year)
query = 'SELECT DISTINCT model FROM masterInventory WHERE year = ? AND leasePayment != ?'
to_db = (year, '')
c.execute(query, to_db)
model_results = c.fetchall()
for m in model_results:
model = m[0]
query = 'SELECT min(leasePayment) FROM masterInventory WHERE year = ? AND model = ?'
to_db = (year, model)
c.execute(query, to_db)
payment_results = c.fetchall()
min_payment = payment_results[0][0]
query = 'SELECT vin, stock, vehTrim, intMSRP, intPrice, leaseRebateExpiration FROM masterInventory WHERE year = ? AND model = ? AND leasePayment = ?'
to_db = (year, model, minPayment)
c.execute(query, to_db)
veh_results = c.fetchall()
v = veh_results[0] # Just get first vehicle even if there are many
print(v)
vin = v[0]
stock = v[1]
vehTrim = v[2]
msrp = v[3]
price = v[4]
term = 36
residual = v[5]
downPayment = v[6]
totalLeaseRebates = v[7]
dueAtSigning = v[8]
expiration = v[9]
# Get data from masterInventory table for rest of required info
c.execute('SELECT bodyStyle, imageUrls, imageUrlsHD, vdp_url, drive FROM masterInventory WHERE vin = ?', (vin,)) # add option codes to this later
master_results = c.fetchall()
if not master_results:
continue
bodyStyle = master_results[0][0] # just getting that matched the else query, could maybe hone this to get one with pic later??
imageUrls = master_results[0][1]
imageUrlsHD = master_results[0][2]
vdp = master_results[0][3]
drive = master_results[0][4]
# option_codes = masterResults[0][4]
# Set image to HD version if available
if imageUrlsHD:
imageUrl = imageUrlsHD
elif imageUrls:
imageUrl = imageUrls.split(',')[0]
else:
continue
minPayment = locale.currency(minPayment, grouping=True).replace('.00', '')
#downPayment = locale.currency(downPayment, grouping=True).replace('.00', '')
#dueAtSigning = locale.currency(dueAtSigning, grouping=True).replace('.00', '')
msrp = locale.currency(msrp, grouping=True).replace('.00', '')
price = locale.currency(price, grouping=True).replace('.00', '')
# offer = '<p>' + minPayment + '/month with ' + downPayment + ' down payment.<br><br>Just ' + dueAtSigning + ' due at signing.<br><br>Based on MSRP of ' + msrp + '.</p>'
# title = minPayment + '/month with {} down.'.format(downPayment)
# description = 'Lease term of {} months. Based on MSRP of {} and selling price of {}. Requires {} due at signing.'.format(term, msrp, price, dueAtSigning)
# disclaimer = 'Must take new retail delivery from dealer stock by {}. Requires {} due at signing. Based on MSRP of {} and selling price of {}. See Subject to credit approval. Assumes 10,500 miles/year and Tier 0-1 credit. Tax, title, and license not included. Some restrictions apply. See sales representative for details.'.format(expiration, minPayment, msrp, price)
lease_specials.append({
'vin': vin,
'stock': stock,
'year': year,
'model': model,
'vehTrim': vehTrim,
# 'title': title,
# 'description': description,
'expiration': expiration,
'monthlyPayment': minPayment,
'dueAtSigning': dueAtSigning,
'vdp': vdp,
'imageUrl': imageUrl,
'bodyStyle': bodyStyle,
'msrp': msrp,
'price': price,
# 'disclaimer': disclaimer,
'drive': drive,
# 'option_codes': option_codes
})
print('\nFresh Specials:')
for s in lease_specials:
print('\n')
# print('\n\n', s, '\n')
for k in s.keys():
print(k + ': ' + str(s[k]))
print('\n\n')
# Close connection to database
conn.close()
return lease_specials
def wait_for_next_run(minutes_to_wait):
print('Finished running program. Waiting 30 minutes to rerun.')
minutes_to_wait = int(minutes_to_wait)
for i in range(minutes_to_wait, 1, -1):
time.sleep(60)
print('Waiting {} minutes until next run.'.format(i))
def main():
while True:
get_incoming_homenet_file()
update_incoming_homenet_table()
scrape.scrape_cdk()
calculate_pricing()
compute_highlights()
create_outgoing_homenet_table()
update_outgoing_homenet_table()
update_outgoing_homenet_file()
upload_outgoing_homenet_file()
send_feeds_from_homenet()
sales_specials.main()
midwords.main()
hd_images.main()
facebook.main()
#adwords_feeds.main()
sheets.main()
# maybe add something to check if any dealer discounts are negative then re run (and if model isnt raptor)
vacuum_db()
wait_for_next_run(30)
if __name__ == '__main__':
main()
| 2.421875 | 2 |
monolithe/generators/sdkgenerator.py | edwinfeener/monolithe | 18 | 9695 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import unicode_literals
import os
import shutil
from monolithe.lib import Printer
from monolithe.generators.lib import Generator
from monolithe.generators.managers import MainManager, CLIManager, VanillaManager
from .sdkapiversiongenerator import SDKAPIVersionGenerator
class SDKGenerator(Generator):
def cleanup(self):
output = self.config.get_option("output", "transformer")
language = self.config.language
overrides_path = "%s/%s/__overrides" % (output, language)
if os.path.exists(overrides_path):
shutil.rmtree(overrides_path)
attrs_defaults_path = "%s/%s/__attributes_defaults" % (output, language)
if os.path.exists(attrs_defaults_path):
shutil.rmtree(attrs_defaults_path)
code_header_path = "%s/%s/__code_header" % (output, language)
if os.path.exists(code_header_path):
os.remove(code_header_path)
def generate(self, specification_info):
user_vanilla = self.config.get_option("user_vanilla", "transformer")
output = self.config.get_option("output", "transformer")
name = self.config.get_option("name", "transformer")
lang = self.config.language
if not os.path.exists(os.path.join(output, lang)):
os.makedirs(os.path.join(output, lang))
vanilla_manager = VanillaManager(monolithe_config=self.config)
vanilla_manager.execute(output_path="%s/%s" % (output, lang))
self.install_user_vanilla(user_vanilla_path=user_vanilla, output_path="%s/%s" % (output, lang))
version_generator = SDKAPIVersionGenerator(self.config)
apiversions = []
for info in specification_info:
Printer.log("transforming specifications into %s for version %s..." % (lang, info["api"]["version"]))
apiversions.append(info["api"]["version"])
version_generator.generate(specification_info=specification_info)
Printer.log("assembling...")
manager = MainManager(monolithe_config=self.config)
manager.execute(apiversions=apiversions)
cli_manager = CLIManager(monolithe_config=self.config)
cli_manager.execute()
self.cleanup()
Printer.success("%s generation complete and available in \"%s/%s\"" % (name, output, self.config.language))
| 1.234375 | 1 |
rllab-taewoo/rllab/plotter/plotter.py | kyuhoJeong11/GrewRL | 0 | 9696 | import atexit
import sys
if sys.version_info[0] == 2:
from Queue import Empty
else:
from queue import Empty
from multiprocessing import Process, Queue
from rllab.sampler.utils import rollout
import numpy as np
__all__ = [
'init_worker',
'init_plot',
'update_plot'
]
process = None
queue = None
def _worker_start():
env = None
policy = None
max_length = None
try:
while True:
msgs = {}
# Only fetch the last message of each type
while True:
try:
msg = queue.get_nowait()
msgs[msg[0]] = msg[1:]
except Empty:
break
if 'stop' in msgs:
break
elif 'update' in msgs:
env, policy = msgs['update']
# env.start_viewer()
elif 'demo' in msgs:
param_values, max_length = msgs['demo']
policy.set_param_values(param_values)
rollout(env, policy, max_path_length=max_length, animated=True, speedup=5)
else:
if max_length:
rollout(env, policy, max_path_length=max_length, animated=True, speedup=5)
except KeyboardInterrupt:
pass
def _shutdown_worker():
if process:
queue.put(['stop'])
queue.close()
process.join()
def init_worker():
print("####################init_worker")
global process, queue
queue = Queue()
process = Process(target=_worker_start)
process.start()
atexit.register(_shutdown_worker)
def init_plot(env, policy):
queue.put(['update', env, policy])
def update_plot(policy, max_length=np.inf):
queue.put(['demo', policy.get_param_values(), max_length])
| 2.390625 | 2 |
OpenCV/bookIntroCV_008_binarizacao.py | fotavio16/PycharmProjects | 0 | 9697 | <gh_stars>0
'''
Livro-Introdução-a-Visão-Computacional-com-Python-e-OpenCV-3
Repositório de imagens
https://github.com/opencv/opencv/tree/master/samples/data
'''
import cv2
import numpy as np
from matplotlib import pyplot as plt
#import mahotas
VERMELHO = (0, 0, 255)
VERDE = (0, 255, 0)
AZUL = (255, 0, 0)
AMARELO = (0, 255, 255)
BRANCO = (255,255,255)
CIANO = (255, 255, 0)
PRETO = (0, 0, 0)
img = cv2.imread('ponte2.jpg') # Flag 1 = Color, 0 = Gray, -1 = Unchanged
img = img[::2,::2] # Diminui a imagem
#Binarização com limiar
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
suave = cv2.GaussianBlur(img, (7, 7), 0) # aplica blur
(T, bin) = cv2.threshold(suave, 160, 255, cv2.THRESH_BINARY)
(T, binI) = cv2.threshold(suave, 160, 255, cv2.THRESH_BINARY_INV)
'''
resultado = np.vstack([
np.hstack([suave, bin]),
np.hstack([binI, cv2.bitwise_and(img, img, mask = binI)])
])
'''
resultado = np.vstack([
np.hstack([img, suave]),
np.hstack([bin, binI])
])
cv2.imshow("Binarização da imagem", resultado)
cv2.waitKey(0)
#Threshold adaptativo
bin1 = cv2.adaptiveThreshold(suave, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY_INV, 21, 5)
bin2 = cv2.adaptiveThreshold(suave, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV, 21, 5)
resultado = np.vstack([
np.hstack([img, suave]),
np.hstack([bin1, bin2])
])
cv2.imshow("Binarização adaptativa da imagem", resultado)
cv2.waitKey(0)
#Threshold com Otsu e Riddler-Calvard
'''
T = mahotas.thresholding.otsu(suave)
temp = img.copy()
temp[temp > T] = 255
temp[temp < 255] = 0
temp = cv2.bitwise_not(temp)
T = mahotas.thresholding.rc(suave)
temp2 = img.copy()
temp2[temp2 > T] = 255
temp2[temp2 < 255] = 0
temp2 = cv2.bitwise_not(temp2)
resultado = np.vstack([
np.hstack([img, suave]),
np.hstack([temp, temp2])
])
cv2.imshow("Binarização com método Otsu e Riddler-Calvard", resultado)
cv2.waitKey(0)
'''
| 2.703125 | 3 |
djangito/backends.py | mechanicbuddy/djangito | 0 | 9698 | <reponame>mechanicbuddy/djangito
import base64
import json
import jwt
import requests
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
USER_MODEL = get_user_model()
class ALBAuth(ModelBackend):
def authenticate(self, request, **kwargs):
if request:
self.encoded_jwt = request.META.get('HTTP_X_AMZN_OIDC_DATA')
if self.encoded_jwt:
self.payload = self.decode_alb_jwt()
return self.get_or_create_for_alb()
def decode_alb_jwt(self):
# Step 1: Get the key id from JWT headers (the kid field)
jwt_headers = self.encoded_jwt.split('.')[0]
decoded_jwt_headers = base64.b64decode(jwt_headers)
decoded_jwt_headers = decoded_jwt_headers.decode("utf-8")
decoded_json = json.loads(decoded_jwt_headers)
kid = decoded_json['kid']
# Step 2: Get the public key from regional endpoint
url = f'https://public-keys.auth.elb.us-east-1.amazonaws.com/{kid}'
req = requests.get(url)
pub_key = req.text
# Step 3: Get the payload
return jwt.decode(
self.encoded_jwt,
pub_key,
algorithms=['ES256']
)
def get_or_create_for_alb(self):
user_info = {'username': self.payload['sub'][:150]}
if 'given_name' in self.payload:
user_info['first_name'] = self.payload['given_name'][:30]
elif 'name' in self.payload:
user_info['first_name'] = self.payload['name'][:30]
if 'family_name' in self.payload:
user_info['last_name'] = self.payload['family_name'][:30]
self.user, created = USER_MODEL.objects.get_or_create(
email=self.payload['email'],
defaults=user_info
)
if created:
self.setup_user_profile()
return self.user
def setup_user_profile(self):
pass
| 2.375 | 2 |
data_profiler/labelers/regex_model.py | gme5078/data-profiler | 0 | 9699 | <reponame>gme5078/data-profiler<gh_stars>0
import json
import os
import sys
import re
import copy
import numpy as np
from data_profiler.labelers.base_model import BaseModel
from data_profiler.labelers.base_model import AutoSubRegistrationMeta
_file_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(_file_dir)
class RegexModel(BaseModel, metaclass=AutoSubRegistrationMeta):
def __init__(self, label_mapping=None, parameters=None):
"""
Regex Model Initializer.
Example regex_patterns:
regex_patterns = {
"LABEL_1": [
"LABEL_1_pattern_1",
"LABEL_1_pattern_2",
...
],
"LABEL_2": [
"LABEL_2_pattern_1",
"LABEL_2_pattern_2",
...
],
...
}
Example encapsulators:
encapsulators = {
'start': r'(?<![\w.\$\%\-])',
'end': r'(?:(?=(\b|[ ]))|(?=[^\w\%\$]([^\w]|$))|$)',
}
:param label_mapping: maps labels to their encoded integers
:type label_mapping: dict
:param parameters: Contains all the appropriate parameters for the model.
Possible parameters are:
max_length, max_num_chars, dim_embed
:type parameters: dict
:return: None
"""
# parameter initialization
if not parameters:
parameters = {}
parameters.setdefault('regex_patterns', {})
parameters.setdefault('encapsulators', {'start': '', 'end': ''})
parameters.setdefault('ignore_case', True)
parameters.setdefault('default_label', 'BACKGROUND')
self._epoch_id = 0
# initialize class
self.set_label_mapping(label_mapping)
self._validate_parameters(parameters)
self._parameters = parameters
def _validate_parameters(self, parameters):
"""
Validate the parameters sent in. Raise error if invalid parameters are
present.
:param parameters: parameter dict containing the following parameters:
regex_patterns: patterns associated with each label_mapping
Example regex_patterns:
regex_patterns = {
"LABEL_1": [
"LABEL_1_pattern_1",
"LABEL_1_pattern_2",
...
],
"LABEL_2": [
"LABEL_2_pattern_1",
"LABEL_2_pattern_2",
...
],
...
}
encapsulators: regex to add to start and end of each regex
(used to capture entities inside of text).
Example encapsulators:
encapsulators = {
'start': r'(?<![\w.\$\%\-])',
'end': r'(?:(?=(\b|[ ]))|(?=[^\w\%\$]([^\w]|$))|$)',
}
ignore_case: whether or not to set the regex ignore case flag
default_label: default label to assign when no regex found
:type parameters: dict
:return: None
"""
_retype = type(re.compile('pattern for py 3.6 & 3.7'))
errors = []
list_of_necessary_params = ['encapsulators', 'regex_patterns',
'ignore_case', 'default_label']
# Make sure the necessary parameters are present and valid.
for param in parameters:
value = parameters[param]
if param == 'encapsulators' and (
not isinstance(value, dict)
or 'start' not in value
or 'end' not in value):
errors.append(
"`{}` must be a dict with keys 'start' and 'end'".format(
param
))
elif param == 'regex_patterns':
if not isinstance(value, dict):
errors.append('`{}` must be a dict of regex pattern lists.'.
format(param))
continue
for key in value:
if key not in self.label_mapping:
errors.append(
"`{}` was a regex pattern not found in the "
"label_mapping".format(key))
elif not isinstance(value[key], list):
errors.append(
"`{}` must be a list of regex patterns, i.e."
"[pattern_1, pattern_2, ...]".format(key))
else:
for i in range(len(value[key])):
if not isinstance(value[key][i], (_retype, str)):
errors.append(
"`{}`, pattern `{}' was not a valid regex "
"pattern (re.Pattern, str)".format(key, i))
elif isinstance(value[key][i], str):
try:
re.compile(value[key][i])
except re.error as e:
errors.append(
"`{}`, pattern {} was not a valid regex"
" pattern: {}".format(key, i, str(e)))
elif param == 'ignore_case' \
and not isinstance(parameters[param], bool):
errors.append("`{}` must be a bool.".format(param))
elif param == 'default_label' \
and not isinstance(parameters[param], str):
errors.append("`{}` must be a string.".format(param))
elif param not in list_of_necessary_params:
errors.append("`{}` is not an accepted parameter.".format(
param))
if errors:
raise ValueError('\n'.join(errors))
def _construct_model(self):
pass
def _reconstruct_model(self):
pass
def _need_to_reconstruct_model(self):
pass
def reset_weights(self):
pass
def predict(self, data, batch_size=None, show_confidences=False,
verbose=True):
"""
Applies the regex patterns (within regex_model) to the input_string,
create predictions for all matching patterns. Each pattern has an
associated entity and the predictions of each character within the
string are given a True or False identification for each entity. All
characters not identified by ANY of the regex patterns in the
pattern_dict are considered background characters, and are replaced with
the default_label value.
:param data: list of strings to predict upon
:type data: iterator
:param batch_size: does not impact this model and should be fixed to not
be required.
:type batch_size: N/A
:param show_confidences: whether user wants prediction confidences
:type show_confidences:
:param verbose: Flag to determine whether to print status or not
:type verbose: bool
:return: char level predictions and confidences
:rtype: dict
"""
start_pattern = ''
end_pattern = ''
regex_patterns = self._parameters['regex_patterns']
default_ind = self.label_mapping[self._parameters['default_label']]
encapsulators = self._parameters['encapsulators']
re_flags = re.IGNORECASE if self._parameters['ignore_case'] else 0
if encapsulators:
start_pattern = encapsulators['start']
end_pattern = encapsulators['end']
pre_compiled_patterns = copy.deepcopy(regex_patterns)
for entity_label, entity_patterns in pre_compiled_patterns.items():
for i in range(len(entity_patterns)):
pattern = (start_pattern
+ pre_compiled_patterns[entity_label][i]
+ end_pattern)
pre_compiled_patterns[entity_label][i] = re.compile(
pattern, flags=re_flags)
# Construct array initial regex predictions where background is
# predicted.
predictions = [np.empty((0,))] * 100
i = 0
for i, input_string in enumerate(data):
# Double array size
if len(predictions) <= i:
predictions.extend([np.empty((0,))] * len(predictions))
pred = np.zeros((len(input_string), self.num_labels), dtype=int)
pred[:, default_ind] = 1
for entity_label, entity_patterns in pre_compiled_patterns.items():
entity_id = self.label_mapping[entity_label]
for re_pattern in entity_patterns:
for each_find in re_pattern.finditer(input_string):
indices = each_find.span(0)
pred[indices[0]:indices[1], default_ind] = 0
pred[indices[0]:indices[1], entity_id] = 1
if verbose:
sys.stdout.flush()
sys.stdout.write(
"\rData Samples Processed: {:d} ".format(i))
predictions[i] = pred
if verbose:
print()
# Trim array size to number of samples
if len(predictions) > i+1:
del predictions[i+1:]
if show_confidences:
conf = copy.deepcopy(predictions)
for i in range(len(conf)):
conf[i] = conf[i] / \
np.linalg.norm(conf[i], axis=1, ord=1, keepdims=True)
return {"pred": predictions, 'conf': conf}
return {"pred": predictions}
@classmethod
def load_from_disk(cls, dirpath):
"""
Loads whole model from disk with weights
:param dirpath: directory path where you want to load the model from
:type dirpath: str
:return: None
"""
# load parameters
model_param_dirpath = os.path.join(dirpath, "model_parameters.json")
with open(model_param_dirpath, 'r') as fp:
parameters = json.load(fp)
# load label_mapping
labels_dirpath = os.path.join(dirpath, "label_mapping.json")
with open(labels_dirpath, 'r') as fp:
label_mapping = json.load(fp)
loaded_model = cls(label_mapping, parameters)
return loaded_model
def save_to_disk(self, dirpath):
"""
Saves whole model to disk with weights.
:param dirpath: directory path where you want to save the model to
:type dirpath: str
:return: None
"""
if not os.path.isdir(dirpath):
os.makedirs(dirpath)
model_param_dirpath = os.path.join(dirpath, "model_parameters.json")
with open(model_param_dirpath, 'w') as fp:
json.dump(self._parameters, fp)
labels_dirpath = os.path.join(dirpath, "label_mapping.json")
with open(labels_dirpath, 'w') as fp:
json.dump(self.label_mapping, fp)
| 2.390625 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.