content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
import pandas as pd
from numpy import datetime64
from pandas_datareader import data
from pandas.core.series import Series
from pandas.core.frame import DataFrame
from yahoofinancials import YahooFinancials
# holding period return in percents
def get_holding_period_return(df: DataFrame, start, end, col) -> float:
start_price = df.at[pd.to_datetime(start), col]
end_price = df.at[pd.to_datetime(end), col]
return round((end_price - start_price) * 100 / start_price, 2)
# calculates return for the last x days in percents
def get_rolling_x_day_return(df: DataFrame, col: str, days: int) -> DataFrame:
def f(x: Series):
return round((x[-1:] - x[0]) * 100 / x[0], 2)
df['Rolling Return'] = df[col].rolling(days).apply(f)
return df
# Rolling returns rank is based on the
# percentile stock's rolling returns fall into.
def get_market_timing(df: DataFrame, col: str) -> DataFrame:
df['{} Rank'.format(col)] = round(df[col].rank(pct=True) * 100, 2)
return df
def format_quater(dt: datetime64) -> str:
return '{}Q{}'.format(int((dt.month - 1) / 3) + 1, dt.year)
# read stock data from yahoo finance and return adjusted cose prices
def read_stock_dataframe(start: str, end: str, symbol: str) -> DataFrame:
df = data.get_data_yahoo(symbol, start, end)
df['Quarter'] = df.index
df['Quarter'] = df['Quarter'].apply(format_quater)
df = df[['Quarter', 'Adj Close']]
return df
# return df with the following columns
# Quarter, Shares Outstanding, Net Income, Total Sales, Book Value
def read_quarter_res(symbol: str) -> DataFrame:
df = {
'Quarter': [],
'Book Value': [],
'Net Income': [],
'Total Sales': [],
'Shares Outstanding': [],
}
stock = YahooFinancials(symbol)
res = stock.get_financial_stmts('quarterly', ['income', 'balance'])
income = res['incomeStatementHistoryQuarterly'][symbol]
balance = res['balanceSheetHistoryQuarterly'][symbol]
for dt in [list(d.items())[0][0] for d in income]:
dt_income = [x[dt] for x in income if dt in x.keys()][0]
dt_balance = [x[dt] for x in balance if dt in x.keys()][0]
df['Quarter'].append(format_quater(pd.to_datetime(dt)))
df['Book Value'].append(dt_balance['netTangibleAssets'])
df['Net Income'].append(dt_income['netIncome'])
df['Total Sales'].append(dt_income['totalRevenue'])
df['Shares Outstanding'].append(dt_balance['commonStock'])
return pd.DataFrame.from_dict(df)
def join_stock_data(df_stock: DataFrame, df_financial: DataFrame) -> DataFrame:
df_stock = get_rolling_x_day_return(df_stock, 'Adj Close', 30)
df_stock = get_market_timing(df_stock, 'Rolling Return')
return pd.merge(df_stock, df_financial, on='Quarter', how='inner')
|
python
|
class JobLookupError(KeyError):
"""Raised when the job store cannot find a job for update or removal."""
def __init__(self, job_id):
super().__init__(u'No job by the id of %s was found' % job_id)
class ConflictingIdError(KeyError):
"""Raised when the uniqueness of job IDs is being violated."""
def __init__(self, job_id):
super().__init__(
u'Job identifier (%s) conflicts with an existing job' % job_id)
class TransientJobError(ValueError):
"""
Raised when an attempt to add transient (with no func_ref) job to a persistent job store is
detected.
"""
def __init__(self, job_id):
super().__init__(
f'Job ({job_id}) cannot be added to this job store because a reference to the '
f'callable could not be determined.')
class SerializationError(Exception):
"""Raised when a serializer fails to serialize the given object."""
class DeserializationError(Exception):
"""Raised when a serializer fails to deserialize the given object."""
class MaxIterationsReached(Exception):
"""
Raised when a trigger has reached its maximum number of allowed computation iterations when
trying to calculate the next fire time.
"""
|
python
|
import pytest
from src.dataToCode.dataClasses.classData import ClassData
from src.dataToCode.dataClasses.interface import Interface
from src.dataToCode.languages.toPython.fileNameToPython import FileNameToPython
data = [
("Orc", "orc.py"),
("HighOrc", "high_orc.py"),
("PrettyLongClassName", "pretty_long_class_name.py")
]
@pytest.mark.parametrize("data_name, expected", data)
def test_file_names_with_class(data_name, expected):
assert FileNameToPython(ClassData(data_name)).get_file_name() == expected
@pytest.mark.parametrize("data_name, expected", data)
def test_file_names_with_interface(data_name, expected):
assert FileNameToPython(Interface(data_name, [])).get_file_name() == expected
|
python
|
# -*- coding: utf-8 -*-
"""
# @Time : 29/06/18 12:23 PM
# @Author : ZHIMIN HOU
# @FileName: run_Control.py
# @Software: PyCharm
# @Github : https://github.com/hzm2016
"""
import numpy as np
np.random.seed(1)
import time
import gym
import gym_puddle
import gym.spaces
import pickle
from algorithms import *
from Tile_coding import *
import argparse
"""Superparameters"""
OUTPUT_GRAPH = True
MAX_EPISODE = 5000
DISPLAY_REWARD_THRESHOLD = 4001
MAX_EP_STEPS = 5000
"""Environments Informations :: Puddle world"""
env = gym.make('PuddleWorld-v0')
env.seed(1)
env = env.unwrapped
# print("Environments information:")
# print(env.action_space.n)
# print(env.observation_space.shape[0])
# print(env.observation_space.high)
# print(env.observation_space.low)
"""Tile coding"""
NumOfTilings = 10
MaxSize = 100000
HashTable = IHT(MaxSize)
"""position and velocity needs scaling to satisfy the tile software"""
PositionScale = NumOfTilings / (env.observation_space.high[0] - env.observation_space.low[0])
VelocityScale = NumOfTilings / (env.observation_space.high[1] - env.observation_space.low[1])
def getQvalueFeature(obv, action):
activeTiles = tiles(HashTable, NumOfTilings, [PositionScale * obv[0], VelocityScale * obv[1]], [action])
return activeTiles
def getValueFeature(obv):
activeTiles = tiles(HashTable, NumOfTilings, [PositionScale * obv[0], VelocityScale * obv[1]])
return activeTiles
"""Parameters"""
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--dir', default='../control_data')
parser.add_argument('alpha', type=float) # default=np.array([1e-4, 5e-4, 1e-3, 1e-2, 0.5, 1.])
parser.add_argument('--alpha_h', type=float, default=np.array([0.0001]))
parser.add_argument('--eta', type=float, default=0.0)
parser.add_argument('lambda', type=float) # default=np.array([0., 0.2, 0.4, 0.6, 0.8, 0.99])
parser.add_argument('--gamma', type=float, default=0.99)
parser.add_argument('--decay', type=float, default=0.99)
parser.add_argument('--ISW', type=int, default=0)
parser.add_argument('--left_probability', type=float, dest='left_probability', default=0.05)
parser.add_argument('--left_probability_end', type=float, dest='left_probability_end', default=0.75)
parser.add_argument('--num_seeds', type=int, dest='num_seeds', default=100)
parser.add_argument('--num_runs', type=int, dest='num_runs', default=30)
parser.add_argument('--num_states', type=int, dest='num_states', default=5)
parser.add_argument('--num_actions', type=int, dest='num_actions', default=2)
parser.add_argument('--num_episodes', type=int, dest='num_episodes', default=4000)
parser.add_argument('--num_frequency', type=int, dest='num_frequency', default=1000)
parser.add_argument('--num_change', type=int, dest='num_change', default=1000)
parser.add_argument('--all_algorithms', type=str, dest='all_algorithms', default=['OffPAC'])
parser.add_argument('--behavior_policy', type=float, dest='behavior_policy',
default=np.array([0.2, 0.2, 0.2, 0.2, 0.2]))
parser.add_argument('--target_policy', type=float, dest='target_policy',
default=np.array([0., 0., 0.5, 0., 0.5]))
parser.add_argument('--test_name', default='puddle_control')
args = vars(parser.parse_args())
if 'num_steps' not in args:
args['num_steps'] = args['num_states'] * 100
return args
def control_performance(off_policy, behavior_policy):
average_reward = []
for j in range(100):
t = 0
track_r = []
observation = env.reset()
action_test = off_policy.start(getValueFeature(observation), behavior_policy)
while True:
observation_, reward, done, info = env.step(action_test)
track_r.append(reward)
action_test = off_policy.choose_action(getValueFeature(observation))
observation = observation_
t += 1
if done or t > MAX_EP_STEPS:
average_reward.append(sum(track_r))
break
return np.mean(average_reward)
def play_control(learner, behavior_policy):
t = 0
observation = env.reset()
action = learner.start(getValueFeature(observation), behavior_policy)
while True:
observation_, reward, done, info = env.step(action)
action, delta = learner.step(reward, getValueFeature(observation), behavior_policy)
observation = observation_
t += 1
if done or t > MAX_EP_STEPS:
break
"""
################################offPolicyControl#########################
utilized target policy to generate a trajectory
sampled 2000 states from one trajectory
and run 500 Monte Carlo rollouts to compute an estimate true value
"""
if __name__ == '__main__':
args = parse_args()
"""Run all the parameters"""
print('#############################offPolicyControl::Env_puddle#########################')
rewards = np.zeros((len(args['all_algorithms']), args['num_runs'], int(args['num_episodes']/50)))
for agentInd, agent in enumerate(args['all_algorithms']):
for run in range(args['num_runs']):
if agent == 'OffPAC':
learner = OffPAC(MaxSize, env.action_space.n, args['gamma'], args['eta'], \
args['alpha']*10, args['alpha'], args['alpha_h'], args['lambda'], args['lambda'])
else:
print('Please give the right agent!!')
for ep in range(args['num_episodes']):
play_control(learner, args['behavior_policy'])
if ep > 0 and ep % 50 == 0:
cum_reward = control_performance(learner, args['behavior_policy'])
rewards[agentInd, run, int(ep/50)] = cum_reward
print('agent %s, run %d, episode %d, rewards%d' % (agent, run, ep, cum_reward))
with open('{}/rmse_{}_alpha_{}_lambda_{}.npy'.format(args['directory'], args['test_name'], args['alpha'], args['lambda']), 'wb') as outfile:
np.save(outfile, rewards)
# off_policy = OffActorCritic(MaxSize, env.action_space.n, \
# gamma, eta, alphas[0]*10, alphas[0], lams[0], lams[0])
# behavior_policy = np.array([0.2, 0.2, 0.2, 0.2, 0.2])
# for i_espisode in range(MAX_EPISODE):
#
# t = 0
# track_r = []
# observation = env.reset()
# action = off_policy.start(getValueFeature(observation), behavior_policy)
# while True:
#
# observation_, reward, done, info = env.step(action)
# track_r.append(reward)
# optimal_action, delta = off_policy.step(reward, getValueFeature(observation), behavior_policy)
# observation = observation_
# action = np.random.choice(env.action_space.n, p=behavior_policy)
# t += 1
#
# if done or t > MAX_EP_STEPS:
# break
#
# if i_espisode % 100 == 0:
# cum_reward = test(off_policy)
# print('num_espisode %d, cumulative_reward %f' % (i_espisode, cum_reward))
#
# LinearAC = DiscreteActorCritic(MaxSize, env.action_space.n, 0.99, 0., 1e-4, 1e-5, 0.3, 0.3)
# espisode_reward = []
# observation = env.reset()
# action = LinearAC.start(getValueFeature(observation))
#
# for i_espisode in range(MAX_EPISODE):
#
# t = 0
# track_r = []
# while True:
#
# observation_, reward, done, info = env.step(action)
# track_r.append(reward)
# action, delta = LinearAC.step(reward, getValueFeature(observation))
# observation = observation_
# t += 1
# if done or t > MAX_EP_STEPS:
#
# observation = env.reset()
# ep_rs_sum = sum(track_r)
# if 'running_reward' not in globals():
# running_reward = ep_rs_sum
# else:
# running_reward = running_reward * 0.99 + ep_rs_sum * 0.01
# print("episode:", i_espisode, "reward:", int(running_reward))
# espisode_reward.append(int(running_reward))
# break
|
python
|
from .black_scholes_process import BlackScholesMertonProcess
|
python
|
from datetime import datetime
from os.path import dirname, join
import pytest
from city_scrapers_core.constants import COMMISSION
from city_scrapers_core.utils import file_response
from freezegun import freeze_time
from city_scrapers.spiders.chi_ssa_8 import ChiSsa8Spider
test_response = file_response(
join(dirname(__file__), "files", "chi_ssa_8.html"),
url="https://lakevieweast.com/ssa-8/",
)
spider = ChiSsa8Spider()
freezer = freeze_time("2020-07-29")
freezer.start()
parsed_items = [item for item in spider.parse(test_response)]
freezer.stop()
def test_title():
assert parsed_items[0]["title"] == "Special Meeting"
assert parsed_items[3]["title"] == "2021 budget meeting"
def test_description():
assert parsed_items[0]["description"] == "Special Meeting"
assert parsed_items[3]["description"] == "2021 budget meeting"
def test_start():
assert parsed_items[0]["start"] == datetime(2020, 2, 25, 0, 0)
assert parsed_items[3]["start"] == datetime(2020, 7, 23, 0, 0)
def test_end():
assert parsed_items[0]["end"] is None
def test_time_notes():
assert parsed_items[0]["time_notes"] == ""
def test_id():
assert parsed_items[0]["id"] == "chi_ssa_8/202002250000/x/special_meeting"
assert parsed_items[3]["id"] == "chi_ssa_8/202007230000/x/2021_budget_meeting"
def test_status():
assert parsed_items[0]["status"] == "passed"
def test_location():
assert parsed_items[0]["location"] == {
"name": "LVECC office",
"address": "3138 N. Broadway, Chicago, IL",
}
def test_source():
assert parsed_items[0]["source"] == "https://lakevieweast.com/ssa-8/"
def test_links():
assert parsed_items[0]["links"] == []
def test_classification():
assert parsed_items[0]["classification"] == COMMISSION
@pytest.mark.parametrize("item", parsed_items)
def test_all_day(item):
assert item["all_day"] is False
|
python
|
import os, sys
sys.path.insert(0, os.path.join("..",".."))
from nodebox.graphics.context import *
from nodebox.graphics import *
# Generate compositions using random text.
font('Arial Black')
def rndText():
"""Returns a random string of up to 9 characters."""
t = u""
for i in range(random(10)):
t += chr(random(10,120))
return t
def draw(canvas):
canvas.clear()
# Define some colors.
#colormode(HSB)
white = color(1,1,1,0.8)
black = color(0,0,0,0.8)
red = color(random(),0,0.2,0.8)
translate(0,-200)
for i in range(100):
# This translation is not reset every time, so it is
# appended to previous translations. This gives
# interesting effects.
translate(random(-100,100),random(-100,100))
# Save the current transformation. It's a good idea
# to do this in the beginning of a loop. End the
# loop with a pop.
push()
# Rotate in increments of 45 degrees.
rotate(random(5)*45)
fontsize(random(800))
fill(choice((white,black,red)))
someText = rndText()
text(someText, 0,0)
pop()
canvas.size = 500,500
canvas.run(draw)
|
python
|
#coding utf8
#!/usr/bin/env python
# Python 2/3 compatibility
from __future__ import print_function
import numpy as np
import cv2
import Queue
# local modules
from video import create_capture
from common import clock, draw_str
Sample_Num = 128
xx1 = lambda x1, x2: int((x1+x2)/2-(x2-x1)*0.2)
xx2 = lambda x1, x2: int((x1+x2)/2+(x2-x1)*0.2)
yy1 = lambda y1, y2: int(y1+(y2-y1)*0.1)
yy2 = lambda y1, y2: int(y1+(y2-y1)*0.2)
def detect(img, cascade):
rects = cascade.detectMultiScale(img, scaleFactor=1.3, minNeighbors=4, minSize=(30, 30),
flags=cv2.CASCADE_SCALE_IMAGE)
if len(rects) == 0:
return []
rects[:, 2:] += rects[:, :2]
return rects
def draw_rects(img, rects, color):
for x1, y1, x2, y2 in rects:
cv2.rectangle(img, (x1, y1), (x2, y2), color, 2)
cv2.rectangle(img, (xx1(x1,x2), yy1(y1,y2)),
(xx2(x1,x2), yy2(y1,y2)),
(0, 0, 255), 2)
if __name__ == '__main__':
import sys, getopt
q_data = Queue.Queue(maxsize=Sample_Num)
q_heart = Queue.Queue(maxsize=10)
q_samplefreq = Queue.Queue(maxsize=10)
args, video_src = getopt.getopt(sys.argv[1:], '', ['cascade=', 'nested-cascade='])
try:
video_src = video_src[0]
except:
video_src = 0
args = dict(args)
cascade_fn = args.get('--cascade', "haarcascade_frontalface_alt.xml")
cascade = cv2.CascadeClassifier(cascade_fn)
cam = create_capture(video_src)
while True:
ret, img = cam.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gray = cv2.equalizeHist(gray)
t = clock()
rects = detect(gray, cascade)
vis = img.copy()
if len(rects) > 0:
x1, y1, x2, y2 = rects[0]
xxx1, yyy1, xxx2, yyy2 = xx1(x1,x2), yy1(y1,y2), \
xx2(x1,x2), yy2(y1,y2)
gg = img[xxx1:xxx2, yyy1:yyy2, 1]
if q_data.full():
q_data.get()
q_data.put(gg)
else:
q_data.put(gg)
if len(rects) == 0 and not q_data.empty():
q_data.get()
zz = map(lambda x: np.sum(x.ravel()), np.array(q_data.queue))
draw_rects(vis, rects, (0, 255, 0))
dt = clock() - t
tf = 0
ft = 1000.0 / (dt * 1000 + 10+5)
if q_samplefreq.full():
q_samplefreq.get(); q_samplefreq.put(ft)
else:
q_samplefreq.put(ft)
ft = np.average(np.array(q_samplefreq.queue))
if q_data.full():
frez = np.abs(np.fft.fft(zz, Sample_Num))
frez[0:20] = 0
tf = frez[0:len(frez)/2]
tf = np.where(tf == max(tf))
tf = tf[0]*ft/Sample_Num*10
if q_heart.full():
q_heart.get()
q_heart.put(tf)
else:
q_heart.put(tf)
tf = np.average(np.array(q_heart.queue))
draw_str(vis, (20, 20), 'Sample Freq: %.0f Heartbeat Freq: %.0f Sample Num: %d '%(ft,tf, len(zz)))
cv2.imshow('Heartbeat frequency', vis)
if 0xFF & cv2.waitKey(5) == 27:
break
cv2.destroyAllWindows()
|
python
|
#!/usr/bin/env python
from __future__ import print_function, division
from multiprocessing import Pool
import os
from shutil import copy
import glob
from sqlalchemy import or_
from pyql.database.ql_database_interface import Master
from pyql.database.ql_database_interface import session
from pyql.database.ql_database_interface import UVIS_flt_0
"""This script queries the Quicklook database for images potentially
affected by dragons breath and copies them to
/grp/hst/wfc3t/sasp/data.
Authors
-------
Larissa Markwardt
Use
---
This program is intended to be executed via the command line as
such:
>>> python Dragons_Breath_Query.py
Dependencies
------------
This module depends on sqlalchemy and pyql.
"""
def copy_to_sasp(file_path):
"""This function copies the file located at file_path to
/grp/hst/wfc3t/sasp/data/.
Parameters
----------
file_path: string
Full path to the flt file.
"""
files_in_dir = glob.glob('/grp/hst/wfc3t/sasp/data/*_flt.fits')
basenames_in_dir = [os.path.basename(file) for file in files_in_dir]
if os.path.basename(file_path) not in basenames_in_dir:
try:
print('Copying from ' + file_path)
copy(file_path, '/grp/hst/wfc3t/sasp/data/')
print(file_path + ' is done!')
except IOError:
print(file_path + ' does not exist!')
else:
print(file_path + ' is already in the directory.')
def main():
"""Main function which queries for all of the images that might be
affected by Dragon's Breath and copies them to
/grp/hst/wfc3t/sasp/data.
The images that are to be included were taken with either the F606W
or F814W filters and had an exposure time > 300 seconds.
"""
results = session.query(Master.dir, Master.rootname).\
join(UVIS_flt_0).filter(UVIS_flt_0.exptime > 300).\
filter(or_(UVIS_flt_0.filter == 'F606W', UVIS_flt_0.filter == 'F814W')).\
all()
origin_paths = ['{}_flt.fits'.format(os.path.join(item.dir, item.rootname)) for item in results]
print('Starting to copy images...')
p = Pool(4) # for linux server
p.map(copy_to_sasp, origin_paths)
print('Complete. All files copied over.')
if __name__ == "__main__":
main()
|
python
|
"""This module contains loosely related utility functions used by the Gemicai"""
from string import Template
import os
from tabulate import tabulate
from collections import Counter
from math import log
from matplotlib import pyplot as plt
import numpy as np
import itertools
from sklearn.metrics import confusion_matrix
import datetime
def strfdelta(tdelta, fmt='%H:%M:%S'):
"""Similar to strftime, but this one is for a datetime.timedelta object.
:param tdelta: datetime object containing some time difference
:type tdelta: datetime.timedelta
:param fmt: string with a format
:type fmt: str
:return: string containing a time in a given format
"""
class DeltaTemplate(Template):
delimiter = "%"
d = {"D": tdelta.days}
hours, rem = divmod(tdelta.seconds, 3600)
minutes, seconds = divmod(rem, 60)
d["H"] = '{:02d}'.format(hours)
d["M"] = '{:02d}'.format(minutes)
d["S"] = '{:02d}'.format(seconds)
t = DeltaTemplate(fmt)
return t.substitute(**d)
def format_byte_size(num):
"""Returns a given number as a formatted binary unit string
:param num: number to format eg. 1048576
:type num: int
:return: a binary unit formatted string eg. 10 MB
"""
unit_list = list(zip(['bytes', 'kB', 'MB', 'GB', 'TB', 'PB'], [0, 0, 1, 2, 2, 2]))
if num > 1:
exponent = min(int(log(num, 1024)), len(unit_list) - 1)
quotient = float(num) / 1024 ** exponent
unit, num_decimals = unit_list[exponent]
format_string = '{:.%sf} {}' % num_decimals
return format_string.format(quotient, unit)
if num == 0:
return '0 bytes'
if num == 1:
return '1 byte'
def dir_info(directory):
"""Prints extensions, file names and sizes of all files contained inside of a specified directory.
:param directory: directory to iterate over
:type directory: Union[str, os.path]
"""
if not os.path.isdir(directory):
raise NotADirectoryError('{} isn\'t a directory'.format(directory))
cnt_ext, sum_size = Counter(), {}
for root, dirs, files in os.walk(directory):
for f in files:
fp = os.path.join(root, f)
if not os.path.islink(fp):
ext = f[f.find('.'):]
cnt_ext.update({ext: 1})
if ext in sum_size:
sum_size[ext] += os.path.getsize(fp)
else:
sum_size[ext] = os.path.getsize(fp)
data = []
for k in sum_size:
data.append([k, cnt_ext[k], format_byte_size(sum_size[k])])
if len(data) > 1:
data.append(['TOTAL', sum(cnt_ext.values()), format_byte_size(sum(sum_size.values()))])
print(tabulate(data, headers=['Extension', 'Files', 'Size'], tablefmt='orgtbl'), '\n')
def table_print(template, data, is_header=False):
"""Prints a row of a table using a specified template and data
:param template: list of strings containing a table row template
:type template: list
:param data: list of strings containing a table row data
:type data: list
:param is_header: whenever passed data should be formatted and printed as a header
:type is_header: bool
"""
assert len(template) == len(data), 'Template length and data length should be equal!'
for i, d in enumerate(data):
data[i] = template[i].format(str(d))
s = '| '
for d in data:
s += d + ' | '
print(s)
if is_header:
line = '|'
for d in data:
line += '-' * len(d) + '--+'
line = line[:-1]
print(line+'|')
# if is_header:
# print(tabulate([[]], headers=data, tablefmt='orgtbl'))
# else:
# print(tabulate(data, tablefmt='orgtbl'))
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
plt.figure(figsize=(12, 12))
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
# plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
pass
# print('Confusion matrix, without normalization')
# print(cm)
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j],
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
|
python
|
# CRINGE FILE NAME ALERT(PC culture is not even rational)
import sys
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'autogger',
'USER': 'postgres_user',
'PASSWORD': 'postgres_user_password',
'HOST': 'localhost',
'PORT': '5432',
}
}
if 'test' in sys.argv:
DATABASES['default'] = {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_autogger',
'USER': 'postgres_user',
'PASSWORD': 'postgres_user_password',
'HOST': 'localhost',
}
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--with-coverage',
'--cover-package=core,jira,github,user',
'--cover-html',
'--cover-erase'
]
ALLOWED_HOSTS = ['localhost', ]
ROOT_SITE_DOMAIN = 'http://localhost:8090/'
DJANGO_ADMIN_SUPER_USER_EMAIL = '[email protected]'
DJANGO_ADMIN_SUPER_USER_PASSWORD = 'EXAMPLE_PASSWORD'
|
python
|
# --------------
# Importing header files
import numpy as np
import warnings
warnings.filterwarnings('ignore')
#New record
new_record=[[50, 9, 4, 1, 0, 0, 40, 0]]
#Reading file
data = np.genfromtxt(path, delimiter=",", skip_header=1)
#print(data)
#Code starts here
#Task -1 : In this first task, we will load the data to a numpy array and add a new record to it.
census=np.concatenate((data,new_record))
print(census.shape)
age=census[:,0].astype('int32')
print(age)
#Task -2 We often associate the potential of a country based on the age distribution of the people residing there. We too want to do a simple analysis of the age distribution
max_age=np.max(age)
min_age=np.min(age)
age_mean=age.mean()
age_std=np.std(age)
print(max_age)
print(min_age)
print(age_mean)
print(age_std)
#Task -3 The constitution of the country tries it's best to ensure that people of all races are able to live harmoniously. Let's check the country's race distribution to identify the minorities so that the government can help them.
race_0=np.array([census for census in census[:,2] if census == 0])
race_1=np.array([census for census in census[:,2] if census == 1])
race_2=np.array([census for census in census[:,2] if census == 2])
race_3=np.array([census for census in census[:,2] if census == 3])
race_4=np.array([census for census in census[:,2] if census == 4])
len_0=len(race_0)
len_1=len(race_1)
len_2=len(race_2)
len_3=len(race_3)
len_4=len(race_4)
min_race=np.min([len_0,len_1,len_2,len_3,len_4])
if len_0==min_race:
minority_race=0
elif len_1==min_race:
minority_race=1
elif len_2==min_race:
minority_race=2
elif len_3==min_race:
minority_race=3
elif len_4==min_race:
minority_race=4
print(minority_race)
# Task - 4 As per the new govt. policy, all citizens above age 60 should not be made to work more than 25 hours per week. Let us look at the data and see if that policy is followed.
senior_citizens=[i for i in census.astype('int32') if i[0]>60]
senior_citizens_len=len(senior_citizens)
print(senior_citizens_len)
#working_hours_sum=[j.size for i,j in zip(senior_citizens,census[:,6].astype('int32'))]
#print(working_hours_sum)
working_hours_sum=0
for i in senior_citizens:
working_hours_sum+=i[6]
print(working_hours_sum)
avg_working_hours=working_hours_sum/senior_citizens_len
print(avg_working_hours)
#Task -6 Our parents have repeatedly told us that we need to study well in order to get a good(read: higher-paying) job. Let's see whether the higher educated people have better pay in general.
high=np.array([i for i in census.astype('int32') if i[1]>10])
low=np.array([i for i in census.astype('int32') if i[1]<=10])
sum=0
for i in high[:,7]:
sum+=i
avg_pay_high=(sum/len(high))
print(round(avg_pay_high,2))
sum=0
for i in low[:,7]:
sum+=i
avg_pay_low=(sum/len(low))
print(round(avg_pay_low,2))
|
python
|
import pygame
from pygame import color
import os
letter_x = pygame.image.load(os.path.join('res', 'letter_x.png'))
letter_0 = pygame.image.load(os.path.join('res', 'letter_o.png'))
class Grip:
def __init__(self) :
self.grip_line = [((0, 200), (600, 200)), #1st horizontal line
((0, 400), (600, 400)), #2ns horizontal line
((200, 0), (200, 600)), #1st vertical line
((400, 0), (400, 600))] #2nd vertical line
self.grid = [[0 for x in range (3)] for y in range (3)]
self.switch_player = True
#search direction around the current input
#direction NE E SE S SW W NW N
self.search_direction = [(1, -1), (1, 0), (1, 1), (0, 1), (-1, 1), (-1, 0), (-1, -1), (0, -1)]
self. game_over = False
def print_grid(self):
for row in self.grid :
print(row)
def get_cell_value(self, x, y):
return self.grid[y][x]
def set_cell_value(self, x, y, value):
self.grid[y][x] = value
#set 'x' pr 'o' to position in grid
def get_mouse(self, x, y, player):
if self.get_cell_value(x, y) == 0:
self.switch_player = True
if player == 'x' :
self.set_cell_value(x, y, 'x')
elif player == 'o':
self.set_cell_value(x, y, 'o')
else:
self.switch_player = False
def draw(self, surface):
#draw the line for game
for lines in self.grip_line:
pygame.draw.line(surface, (200,200,200), lines[0], lines[1], 2)
#draw the 'x' and 'o' everytime player press
for y in range( len(self.grid)):
for x in range (len(self.grid[y])):
#check if the cell is empty before input
if self.get_cell_value(x, y) == 'x':
surface.blit(letter_x, (x*200, y*200))
elif self.get_cell_value(x, y) == 'o':
surface.blit(letter_0, (x*200, y*200))
#check when check around cell is in playing area
def is_in_bound(self, x, y):
return x >= 0 and x < 3 and y >= 0 and y <3
def is_grip_full(self):
for row in self.grid:
for value in row:
if value == 0:
return False
return True
#check for winner
#check around the current cell x, y coord
def check_grip(self, x, y, player) :
#for loop go to each of the direction to check
#reset count to 1 when move to the new direction
for (dir_x, dir_y) in (self.search_direction):
count = 1
#move to the next cell in current direction
xx = x + dir_x
yy = y + dir_y
#while loop to continute to check on that direction
while self.is_in_bound(xx, yy) and player == self.get_cell_value(xx, yy):
count += 1
xx += dir_x
yy += dir_y
#after checking everthing in current direction then if count = 3, current player win
if count == 3:
print(f'Player : {player} win' )
self.game_over = True
break
elif self.is_grip_full() == True:
self.game_over = True
def clear_grip(self):
self.grid = [[0 for x in range (3)] for y in range (3)]
self.game_over = False
|
python
|
from dbconnect import session, User, Offer, Skills, Languages
from tweetparse import tweetParse
def addUserToDB(mentor, twit_uid, tweet_id, screenname):
userVar = User(mentor_mentee=mentor, twitter_uid=twit_uid, original_tweet_id=tweet_id, scrn_name=screenname)
session.add(userVar)
session.commit()
return userVar
def addSkills(user, skillist):
for idx, skill in enumerate(skillist):
if idx == 0:
skillVar = Skills(skills_1=skill, skillset=user)
session.add(skillVar)
session.commit()
elif idx == 1:
skillVar = Skills(skills_2=skill, skillset=user)
session.add(skillVar)
session.commit()
elif idx == 2:
skillVar = Skills(skills_3=skill, skillset=user)
session.add(skillVar)
session.commit()
elif idx == 3:
skillVar = Skills(skills_4=skill, skillset=user)
session.add(skillVar)
session.commit()
def addLangs(user, langlist):
for idx, lang in enumerate(langlist):
if idx == 0:
langVar = Languages(languages_1=lang, polyglot=user)
session.add(langVar)
session.commit()
elif idx == 1:
langVar = Languages(languages_2=lang, polyglot=user)
session.add(langVar)
session.commit()
elif idx == 2:
langVar = Languages(languages_3=lang, polyglot=user)
session.add(langVar)
session.commit()
elif idx == 3:
langVar = Languages(languages_4=lang, polyglot=user)
session.add(langVar)
session.commit()
def addOffer(user, offlist):
for idx, off in enumerate(offlist):
if idx == 0:
offVar = Offer(offer_1=off, useroffer=user)
session.add(offVar)
session.commit()
elif idx == 1:
offVar = Offer(offer_2=off, useroffer=user)
session.add(offVar)
session.commit()
elif idx == 2:
offVar = Offer(offer_3=off, useroffer=user)
session.add(offVar)
session.commit()
def dbCheck(user_id_str, mentor_mentee):
dbCheckBool = False
userList = session.query(User).filter_by(twitter_uid=user_id_str).all()
for user in userList:
if user.mentor_mentee == mentor_mentee:
dbCheckBool = True;
return dbCheckBool
# mentor = session.query(User).filter_by(="mentor").first()
# mentor_status, langs, skill, offer = tweetParse('#WomenToTech #Mentor -langs: javascript, python, haskell -skill: node.js, d3.js, jinja2 -offering: help getting started')
#
# user_one = addUserToDB(mentor_status, 'tragiccabbage')
# addSkills(user_one, skill)
# addLangs(user_one, langs)
# addOffer(user_one, offer)
|
python
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from unittest import mock
from sushy.resources import constants as res_cons
from sushy.resources.system import ethernet_interface
from sushy.tests.unit import base
class EthernetInterfaceTestCase(base.TestCase):
def setUp(self):
super(EthernetInterfaceTestCase, self).setUp()
self.conn = mock.Mock()
with open('sushy/tests/unit/json_samples/'
'ethernet_interfaces.json') as f:
self.json_doc = json.load(f)
self.conn.get.return_value.json.return_value = self.json_doc
eth_path = ("/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/"
"12446A3B0411")
self.sys_eth = ethernet_interface.EthernetInterface(
self.conn, eth_path, redfish_version='1.0.2')
def test__parse_attributes(self):
self.sys_eth._parse_attributes(self.json_doc)
self.assertEqual('1.0.2', self.sys_eth.redfish_version)
self.assertEqual('1', self.sys_eth.identity)
self.assertEqual('Ethernet Interface', self.sys_eth.name)
self.assertEqual('System NIC 1', self.sys_eth.description)
self.assertEqual(
'12:44:6A:3B:04:11', self.sys_eth.permanent_mac_address)
self.assertEqual('12:44:6A:3B:04:11', self.sys_eth.mac_address)
self.assertEqual(res_cons.STATE_ENABLED, self.sys_eth.status.state)
self.assertEqual(res_cons.HEALTH_OK, self.sys_eth.status.health)
self.assertEqual(1000, self.sys_eth.speed_mbps)
class EthernetInterfaceCollectionTestCase(base.TestCase):
def setUp(self):
super(EthernetInterfaceCollectionTestCase, self).setUp()
self.conn = mock.Mock()
with open('sushy/tests/unit/json_samples/'
'ethernet_interfaces_collection.json') as f:
self.json_doc = json.load(f)
self.conn.get.return_value.json.return_value = self.json_doc
self.sys_eth_col = ethernet_interface.EthernetInterfaceCollection(
self.conn, '/redfish/v1/Systems/437XR1138R2/EthernetInterfaces',
redfish_version='1.0.2')
def test__parse_attributes(self):
self.sys_eth_col._parse_attributes(self.json_doc)
self.assertEqual('1.0.2', self.sys_eth_col.redfish_version)
self.assertEqual('Ethernet Interface Collection',
self.sys_eth_col.name)
eth_path = ('/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/'
'12446A3B0411',)
self.assertEqual(eth_path, self.sys_eth_col.members_identities)
@mock.patch.object(ethernet_interface, 'EthernetInterface', autospec=True)
def test_get_member(self, mock_eth):
self.sys_eth_col.get_member(
'/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/'
'12446A3B0411')
mock_eth.assert_called_once_with(
self.sys_eth_col._conn,
('/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/'
'12446A3B0411'),
redfish_version=self.sys_eth_col.redfish_version, registries=None,
root=self.sys_eth_col.root)
@mock.patch.object(ethernet_interface, 'EthernetInterface', autospec=True)
def test_get_members(self, mock_eth):
members = self.sys_eth_col.get_members()
eth_path = ("/redfish/v1/Systems/437XR1138R2/EthernetInterfaces/"
"12446A3B0411")
calls = [
mock.call(self.sys_eth_col._conn, eth_path,
redfish_version=self.sys_eth_col.redfish_version,
registries=None,
root=self.sys_eth_col.root),
]
mock_eth.assert_has_calls(calls)
self.assertIsInstance(members, list)
self.assertEqual(1, len(members))
def test_summary(self):
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/'
'ethernet_interfaces.json') as f:
self.conn.get.return_value.json.return_value = json.load(f)
expected_summary = {'12:44:6A:3B:04:11': res_cons.STATE_ENABLED}
actual_summary = self.sys_eth_col.summary
self.assertEqual(expected_summary, actual_summary)
|
python
|
# iNTERFACE 2 - TESTE
from tkinter import *
from tkinter import messagebox
# AINDA EM CONSTRUÇÃO AULA 2
janela = Tk()
janela.title('Systems OS')
janela.geometry('500x400')
w= Spinbox(janela, values=("Python", "HTML", "Java", "Javascript"))
w.pack()
e = Spinbox(janela, values=("carnes", "Verduras", "Legumes", "Frutas"))
e.pack()
janela.mainloop()
|
python
|
#!/usr/bin/env python3
import SimpleITK as sITK
import nibabel as nib
import numpy as np
class nibabelToSimpleITK(object):
'''
Collection of methods to convert from nibabel to simpleITK and vice versa. Note: Only applicable for 3d images for
now.
'''
@staticmethod
def sitkImageFromNib( nibImageIn ):
'''
Convert a nibabel image into an SITK object.
@param nibImageIn: Image object read with nibabel library
'''
# Currently only 3d images are supported
if ( nibImageIn.header['dim'][0] != 3):
print("WARNING: This class is currently only intended for 3D images")
# Generate an sitk image object from the nibabel image array,
# Note that the order of the axes is reverted
sitkImage = sITK.GetImageFromArray(np.transpose( nibImageIn.get_fdata(), [2, 1, 0]))
# Set the image geometry
# - origin
sitkImage.SetOrigin(nibImageIn.affine[:3, 3] * np.array([-1, -1, 1]))
# - spacing
sitkImage.SetSpacing( nibImageIn.header['pixdim'][1:4].astype(np.double) )
# - direction
dirMatrix = nibImageIn.affine[:3, :3].copy()
dirMatrix[:, 0] = dirMatrix[:, 0] / np.linalg.norm(dirMatrix[:, 0])
dirMatrix[:, 1] = dirMatrix[:, 1] / np.linalg.norm(dirMatrix[:, 1])
dirMatrix[:, 2] = dirMatrix[:, 2] / np.linalg.norm(dirMatrix[:, 2])
dirMatrix[:2, :] = dirMatrix[:2, :] * (-1)
sitkImage.SetDirection(dirMatrix.reshape(-1))
return sitkImage
@staticmethod
def nibImageFromSITK( sITKImageIn ):
'''
Generate a new nifti image from a given SITK image.
@param sITKImageIn: THe simple ITK image object to be converted. Note, only 3D images supported at the moment.
'''
# Currently only 3D images supported.
if (sITKImageIn.GetDimension() != 3):
print("WARNING: This class is currently only intended for 3D images")
affineMatrix = np.eye(4)
# Create the matrix according to itkSoftware guide
affineMatrix[:3,:3] = np.dot( np.diag(sITKImageIn.GetSpacing()), np.array(sITKImageIn.GetDirection()).reshape([3,-1]) )
affineMatrix[:3,3] = sITKImageIn.GetOrigin()
# Account for change in geometry dicom/ITK vs. nifti
affineMatrix[:2, :] = (-1) * affineMatrix[:2, :]
return nib.Nifti1Image( np.transpose( sITK.GetArrayFromImage( sITKImageIn ), [2,1,0]), affineMatrix )
if __name__ == '__main__':
# Conversion from SITK to nibabel
nImg = nib.load( 'C:/debugData/cidX/resampled_only_follow_up_12.nii' )
sReader = sITK.ImageFileReader()
sReader.SetFileName('C:/debugData/cidX/resampled_only_follow_up_12.nii')
sImg = sReader.Execute()
# Convert nibabel to simple ITK
convertedSITKImg = nibabelToSimpleITK.sitkImageFromNib( nImg )
# Convert simpleITK back to nibabel
recoveredNibImg = nibabelToSimpleITK.nibImageFromSITK( convertedSITKImg )
nib.save(recoveredNibImg, 'C:/debugData/cidX/resampled_only_follow_up_12_recNib.nii.gz')
# Check a simpleITK filter and save the converted nibabel image
cropper = sITK.CropImageFilter()
cropper.SetUpperBoundaryCropSize([1, 2, 3])
cropper.SetLowerBoundaryCropSize([4, 5, 6])
cImg = cropper.Execute( convertedSITKImg )
ncImg = nibabelToSimpleITK.nibImageFromSITK( cImg )
nib.save(ncImg, 'C:/debugData/cidX/resampled_only_follow_up_12_recNib_cropped.nii.gz')
|
python
|
#!/usr/bin/env python3
# Programita para implementar el método de bisección
# Busca un cero de f en el intervalo [a,b]
# tol= tolerancia prescripta para detener el método numérico
# (si no sería un ciclo infinito)
# Hipótesis: las del teorema de Bolzano
# f(a) y f(b) deben tener signos opuestos
# f debe ser continua en [a,b]
# Usamos una llamada recursiva por motivos didácticos.
# (Es más fácil de entender)
def biseccion(f, a, b, n=1, tol=1e-11):
# La variable n cuenta las iteraciones
# tol especifica la tolerancia permitida
# al método numérico
print("n= ",n,":Bisección en el intervalo",
"[", a,",", b, "]")
if abs(b - a) < tol:
return a
if f(a) == 0:
return a
if f(b) == 0:
return b
# Encontramos el punto medio del intervalo [a,b]
c = (a + b) / 2
if f(c) == 0:
return c
if f(a) > 0:
if f(b) > 0:
raise Exception ("Error: hipótesis!")
# Si llegamos acá, f(b)<0
if f(c) > 0:
# sabemos que f(b)<0 y f(c)>0
return biseccion(f, c, b, n + 1, tol)
else:
# sabemos que f(a)>0 y f(c)<0
return biseccion(f, a, c, n + 1, tol)
else:
# caso en que f(a)<0
if f(b) < 0:
raise Exception ("Error: hipótesis!")
# si llegamos acá f(b)>0
if f(c) > 0:
# sabemos que f(a)<0 y f(c)>0
return biseccion(f, a, c, n + 1, tol)
else:
# sabemos que f(b)>0 y f(c) <0
return biseccion(f, c, b, n + 1, tol)
if __name__ == "__main__":
from math import sqrt, exp, log, sin, cos, pi
# Ejemplo 1: calculemos la raíz cuadrada de 2
# encontrando una raíz de un polinomio
def mi_funcion(x):
return x * x - 2
raiz_hallada = biseccion(mi_funcion, 0, 2,tol=1e-3)
print("raiz_hallada=", raiz_hallada)
print("valor exacto=", sqrt(2))
# Otro ejemplo: calculemos el logaritmo de 2 como la inversa de la
# exponencial
def mi_funcion2(x):
return exp(x) - 2
raiz_hallada = biseccion(mi_funcion2, 0, 2, tol=1e-11)
print("raiz_hallada=", raiz_hallada)
print("valor exacto=", log(2))
# ejemplo 3: calculemos pi/4 resolviendo la ecuación
# sin(x)=cos(x)
def mi_funcion3(x):
return sin(x) - cos(x)
raiz_hallada = biseccion(mi_funcion3, 0, 3, tol=1e-12)
print("raiz_hallada=", raiz_hallada)
print("valor exacto=", pi / 4)
|
python
|
__author__ = "Samantha Lawler"
__copyright__ = "Copyright 2020"
__version__ = "1.0.1"
__maintainer__ = "Rabaa"
__email__ = "[email protected]"
import numpy as np
import sys
## Class: TestParticle
# Functions: Default Constructor, DataDissection, IdentifyResonance, PrintData
class TestParticle:
def __init__(self): # Attributes defined
self.Resonant = False
self.ResonanceType = 'n:n'
self.Name = 'N/A'
self.ResonanceCenter = -999
self.ResonanceAmplitude = -999
self.AverageSMA = -999 # Average SemiMajor axist
self.AverageEccentricity = -999
self.AverageInclination = -999
self.Kozai = False
self.SMAamplitude = -999
self.SMACenter = -999
self.Index = -1
############################################ FUNCTIONS #################################################
############################################ DATA DISSECTION #################################################
# Expects: typeOfData, IndexCount
# Will do: Alter the Resonance & Kozai attributes of the class, given the write orbital elements
def DataDissection(self, typeOfData, IndexCount):
self.Index = IndexCount
TestParticleSample = sys.argv[1] # User to choose a test sample using terminal
with open('tp' + TestParticleSample + ".out") as f: # Counting number of lines
for line, l in enumerate(f):
pass
NumberOfLines = line
# Taking the test point's data from the .out file sequentially
TestParticleTime, Index, SemiMajorAxis, Eccentricity, Inclination, Omega, omega, AngularPosition, LongitudeTP = np.genfromtxt(
'tp' + TestParticleSample + ".out", unpack=True)
Longitude = np.genfromtxt(
"LN.out", usecols= 8, unpack=True)
NumberOfLines = (NumberOfLines / (max(Index)+1)) -1 # Dividing the total number of lines by number of test particles, to get steps of one test particle.
# Matching the orbitals with the index we need
TestParticleTime = TestParticleTime[Index == IndexCount]
SemiMajorAxis = SemiMajorAxis[Index == IndexCount]
Eccentricity = Eccentricity[Index == IndexCount]
Inclination = Inclination[Index == IndexCount]
Omega = Omega[Index == IndexCount]
omega = omega[Index == IndexCount]
AngularPosition = AngularPosition[Index == IndexCount]
# Calculating Lambda, Pomega
Lambda = (Omega + omega + AngularPosition) % 360 # The Lambda for test particles
Pomega = (Omega + omega) % 360 # The longitude if pericenter in degrees
# Flags "Specific ones"
IsItResonant = False # Is it in resonance?
ResonanceAmplitude = -999 # The Resonance Amplitude
ResonanceCenter = -999 # The Resonance Center
ResonanceName = -999 # The Resonance name "Ration"
IsItKozai = False # Is it Kozai resonance?
SMAAmplitude = -999 # SemiMajor amplitude
SMACenter = -999 # SemiMajor center
# Flags "General ones"
IsIt = False # Resonance / Kozai ?
Amplitude = -999 # Phi / SMA
Center = -999 # Phi / SMA
Name = -999 # Name of the test particle
# General flags will be used in the coming loop, Specific flags will then be set at the end, to distinguish Kozai / Resonance
# list of resonances to check: pp and qq for pp:qq resonance
pp = [2, 3, 3, 4, 4, 5, 5, 5, 5, 6, 7, 7, 7, 7, 8, 8, 9, 9, 9, 10]
qq = [1, 1, 2, 1, 3, 1, 2, 3, 4, 1, 1, 2, 3, 4, 1, 3, 1, 2, 4, 1]
for jj in np.arange(0, len(pp)): # First Loop
ResSemiMajorAxis = 30.1 * (float(pp[jj]) / float(qq[jj])) ** (
2. / 3.) # Kepler's Third Law to calculate semimajor axis of the resonance
# Searching within 2 AUs from the resonance center
if IsIt == 0 and (ResSemiMajorAxis + 2) > np.average(SemiMajorAxis) > (ResSemiMajorAxis - 2):
phi = (float(pp[jj]) * Lambda - float(qq[jj]) * Longitude - (float(pp[jj]) - float(qq[jj])) * Pomega) % 360
AngleRange = np.arange(0, 360, 15) # Array of angles 15 degrees increment each step
Window = int(0)
Loop = 0
if typeOfData == 0:
# Dividing the timeline to 10 separate windows Detecting resonance on smaller scales
WindowStep = int(NumberOfLines / 10)
IsItArray = np.zeros(int(len(
phi) / WindowStep)) # Array of 10 binary elements to check for resonance each step '10%' set to zero
CenterArray = np.zeros(int(len(
phi) / WindowStep)) # Array of 10 binary elements to check the res angle each step '10%' set to zero
while Window + WindowStep < len(phi):
# Average of the semi-major axis from Current Window -> Next Window
WindowAverage = np.average(SemiMajorAxis[Window:Window + WindowStep])
if (ResSemiMajorAxis + 2) > WindowAverage > (
ResSemiMajorAxis - 2): # Within 2 AUs of Window Average
WindowPhi = phi[Window:Window + WindowStep] # Phi of next window
AnglePresent = np.zeros(len(AngleRange)) + 1
for step in np.arange(0, len(
AngleRange) - 1): # find out where the res angle doesn't go for 15 degrees, proxy for AnglePresent
if len(WindowPhi[
(WindowPhi > AngleRange[step]) * (WindowPhi < (AngleRange[step + 1]))]) == 0:
AnglePresent[step] = 0
IsItArray[Loop] = np.average(AnglePresent) * 180.
CenterArray[Loop] = np.average(
AnglePresent[AnglePresent != 0] * AngleRange[AnglePresent != 0])
else:
IsItArray[Loop] = 180.
Window += WindowStep # Increment Window
Loop += 1 # Increment Loop
if len(IsItArray[
IsItArray < 180.]) > 8: # If 8 out of 10 Windows classified as Resonant
IsIt = True
Amplitude = np.average(IsItArray)
Center = np.average(CenterArray)
Name = str(pp[jj]) + ':' + str(qq[jj])
MaxCenter = max(CenterArray)
MinCenter = min(CenterArray)
if (MaxCenter - MinCenter) > 210: # If the centers are too large in difference, it is not resonant
IsIt = False
Amplitude = -999
Center = -999
break
else:
Amplitude = -999
Center = -999
else:
# If checking for Kozai, we only want one window
WindowStep = int(NumberOfLines)
IsItArray = np.zeros(int(len(
omega) / WindowStep)) # For Kozai we check SMA
CenterArray = np.zeros(int(len(
omega) / WindowStep))
while Window + WindowStep < len(SemiMajorAxis):
# WindowSMA = SemiMajorAxis[Window:Window + WindowStep] # SMA of next window
AnglePresent = np.zeros(len(AngleRange)) + 1
for step in np.arange(0, len(
AngleRange) - 1): # find out where the res angle doesn't go for 15 degrees, proxy for AnglePresent
if len(omega[
(omega > AngleRange[step]) * (omega < (AngleRange[step + 1]))]) == 0:
AnglePresent[step] = 0
IsItArray[Loop] = np.average(AnglePresent) * 180.
CenterArray[Loop] = np.average(
AnglePresent[AnglePresent != 0] * AngleRange[AnglePresent != 0])
Window += WindowStep # Increment Window
Loop += 1 # Increment Loop
if len(IsItArray[
IsItArray < 180.]) == 1: # If the Window classified as Kozai
IsIt = True
Amplitude = np.average(IsItArray)
Center = np.average(CenterArray)
Name = str(pp[jj]) + ':' + str(qq[jj])
else:
Amplitude = -999
Center = -999
if typeOfData == 0: # Type 0 means we are looking if it was Resonant
IsItResonant = IsIt
ResonanceAmplitude = Amplitude
ResonanceCenter = Center
ResonanceName = Name
self.Resonant = IsItResonant
self.ResonanceAmplitude = ResonanceAmplitude
self.ResonanceCenter = ResonanceCenter
self.ResonanceType = ResonanceName
else: # Else 1 means we are looking if it was Kozai
IsItKozai = IsIt
SMAAmplitude = Amplitude
SMACenter = Center
self.Kozai = IsItKozai
self.SMAamplitude = SMAAmplitude
self.SMACenter = SMACenter
# End Else
self.Name = TestParticleSample
self.AverageEccentricity = np.average(Eccentricity)
self.AverageInclination = np.average(Inclination)
self.AverageSMA = np.average(SemiMajorAxis)
return
############################################ IDENTIFY RESONANCE ##############################################
# Expects: IndexCount
# Will do: First call to function DataDissection to check if resonant, if resonant, will do second call to check for Kozai
def IdentifyResonance(self, IndexCount):
type = 0 # Indicated that the variable Resonant is what we want from DataDissection function
self.DataDissection(type, IndexCount)
if self.Resonant == True:
type = 1 # Indicated that the variable Kozai is what we want from DataDissection function
self.DataDissection(type, IndexCount)
############################################## PRINT DATA ##############################################
# Expects: IndexCount
# Will do: Print Data Into a '.out' file Names tp + 'number you entered' + .out
def PrintData(self, IndexCount ):
TestParticleSample = sys.argv[1]
TestParticleTime, Index, SemiMajorAxis, Eccentricity, Inclination, Omega, omega, AngularPosition, Longitude = np.genfromtxt(
"tp" + TestParticleSample + ".out", unpack=True)
TextFile.write((str(self.Index) + " " +str(SemiMajorAxis[IndexCount]) + " " + str(Eccentricity[IndexCount]) + " " + str(Inclination[IndexCount]) + " " + str(Omega[IndexCount]) + " " + str(omega[IndexCount]) + " " + str(AngularPosition[IndexCount]) + " " + str(self.Name) + " " + str(self.AverageSMA) + " " + str(self.AverageEccentricity) + " " + str(self.AverageInclination) + " " + str(self.ResonanceCenter) + " " + str(self.ResonanceAmplitude) + " " + str(self.SMACenter) + " " + str(self.SMAamplitude) + " " + '\n'))
# Main function
if __name__ == '__main__':
TestParticleSample = sys.argv[1] # User to enter the number indicating the file number
Index = np.genfromtxt('tp' + TestParticleSample + ".out", usecols=1, unpack=True)
NumberOfTPs = max(Index) # Assuming there is more than one Testparticle, all with different timesteps, in the same file
TextFile = open("TestParticleResonance"+ TestParticleSample +".out", "a+")
TextFile.write("# SMA0 Ecc0 Inc0 Node0 ArgPeri0 MeanAnom0 Name AverageSMA AverageEcc AverageInc LibrationCenter LibrationAmp KozaiCenter KozaiAmp" + '\n')
IndexCount = 0
for IndexCount in range(0, int(NumberOfTPs)+1 ):
Tp = TestParticle() # Initialise the test particle
Tp.IdentifyResonance(IndexCount) # Identify its resonant / kozai status
Tp.PrintData(IndexCount) # print the results
print(TestParticleSample) # ensure it is done
|
python
|
# LPS22HB/HH pressure seneor micropython drive
# ver: 2.0
# License: MIT
# Author: shaoziyang ([email protected])
# v1.0 2016.4
# v2.0 2019.7
LPS22_CTRL_REG1 = const(0x10)
LPS22_CTRL_REG2 = const(0x11)
LPS22_STATUS = const(0x27)
LPS22_TEMP_OUT_L = const(0x2B)
LPS22_PRESS_OUT_XL = const(0x28)
LPS22_PRESS_OUT_L = const(0x29)
class LPS22():
def __init__(self, i2c, addr = 0x5D):
self.i2c = i2c
self.addr = addr
self.tb = bytearray(1)
self.rb = bytearray(1)
self.oneshot = False
self.irq_v = [0, 0]
# ODR=1 EN_LPFP=1 BDU=1
self.setreg(LPS22_CTRL_REG1, 0x1A)
self.oneshot_mode(False)
def oneshot_mode(self, oneshot=None):
if oneshot is None:
return self.oneshot
else:
self.getreg(LPS22_CTRL_REG1)
self.oneshot = oneshot
if oneshot: self.rb[0] &= 0x0F
else: self.rb[0] |= 0x10
self.setreg(LPS22_CTRL_REG1, self.rb[0])
def int16(self, d):
return d if d < 0x8000 else d - 0x10000
def setreg(self, reg, dat):
self.tb[0] = dat
self.i2c.writeto_mem(self.addr, reg, self.tb)
def getreg(self, reg):
self.i2c.readfrom_mem_into(self.addr, reg, self.rb)
return self.rb[0]
def get2reg(self, reg):
return self.getreg(reg) + self.getreg(reg+1) * 256
def ONE_SHOT(self, b):
if self.oneshot:
self.setreg(LPS22_CTRL_REG2, self.getreg(LPS22_CTRL_REG2) | 0x01)
self.getreg(0x28 + b*2)
while 1:
if self.getreg(LPS22_STATUS) & b:
return
def temperature(self):
self.ONE_SHOT(2)
try:
return self.int16(self.get2reg(LPS22_TEMP_OUT_L))/100
except MemoryError:
return self.temperature_irq()
def pressure(self):
self.ONE_SHOT(1)
try:
return (self.getreg(LPS22_PRESS_OUT_XL) + self.get2reg(LPS22_PRESS_OUT_L) * 256)/4096
except MemoryError:
return self.pressure_irq()
def get(self):
try:
return self.temperature(), self.pressure()
except MemoryError:
return self.get_irq()
def altitude(self):
return (((1013.25 / self.pressure())**(1/5.257)) - 1.0) * (self.temperature() + 273.15) / 0.0065
def temperature_irq(self):
self.ONE_SHOT(2)
return self.int16(self.get2reg(LPS22_TEMP_OUT_L))//100
def pressure_irq(self):
self.ONE_SHOT(1)
return self.get2reg(LPS22_PRESS_OUT_L) >> 4
def get_irq(self):
self.irq_v[0] = self.temperature_irq()
self.irq_v[1] = self.pressure_irq()
return self.irq_v
|
python
|
from anytree import Resolver, ChildResolverError, Walker
from src.tree_node import TreeNode
class AssetInfoTree:
def __init__(self, texture_info_list):
self.walker = Walker()
r = Resolver('name')
id = 0
self.root = TreeNode('Root', id)
id += 1
for info in texture_info_list:
respath = info.respath
cur_parent = self.root
cur_parent.filesize.bytes += info.filesize.bytes
cur_node = None
for chunk in respath.chunks:
try:
cur_node = r.get(cur_parent, chunk)
cur_parent = cur_node
except ChildResolverError:
cur_node = TreeNode(chunk, id, cur_parent)
cur_parent = cur_node
id += 1
if chunk == respath.chunks[-1]:
cur_node.horizontal_desc = str(info)
finally:
cur_node.filesize.bytes += info.filesize.bytes
def build_node_path(self, node):
path = self.walker.walk(self.root, node)
return '/'.join([tn.name for tn in path[2]])
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# https://www.redblobgames.com/articles/visibility/
# https://en.wikipedia.org/wiki/Art_gallery_problem
"Hover mouse to illuminate the visible area of the room"
from vedo import *
import numpy as np
rw = Rectangle((0,0), (1,1)).texture(dataurl+"textures/paper1.jpg")
tx = Text3D("эd", font="Comae").pickable(False)
tx.scale(0.475).pos(0.1, 0.2, 0.0002).c('green4')
d = rw.diagonalSize()
objs = [rw, tx]
mobjs = merge(objs).c('grey4').flat()
allpts = mobjs.points()
walls = mobjs.extrude(0.03, cap=False).z(-0.01).pickable(False).flat()
def func(evt):
p = evt.picked3d
if p is None:
return
pts1 = []
for q1 in allpts:
v = versor(q1-p)
e = cross(v, [0,0,1]) * 0.01 # a small epsilon shift
for k in (v, v+e, v-e):
ipts = walls.intersectWithLine(p, p+k*d, tol=1e-04)
n = len(ipts)
if n==1 or n>1 and mag(q1-ipts[0]) < 1e-04:
pts1.append(ipts[0])
pts2 = []
for pt in pts1:
angle = np.arctan2(pt[1]-p[1], pt[0]-p[0])
pts2.append([pt, angle])
pts = utils.sortByColumn(pts2,1)[:,0].tolist() # sort by angle
line = Line(pts, closed=True).z(0.01).lw(4).c('grey3')
surf = line.clone().triangulate().lw(0).c('yellow4').alpha(0.3)
surf.pickable(False)
area = Assembly(surf, line)
area.name = "Area"
plt.remove("Area").add(area)
plt = Plotter(bg2="light blue")
plt.addCallback("mouse hover", func)
plt.show(objs, walls, __doc__, zoom=1.1, elevation=-20).close()
|
python
|
import py
import random, sys, os
from rpython.jit.backend.ppc.codebuilder import BasicPPCAssembler, PPCBuilder
from rpython.jit.backend.ppc.regname import *
from rpython.jit.backend.ppc.register import *
from rpython.jit.backend.ppc import form
from rpython.jit.backend import detect_cpu
from rpython.jit.backend.ppc.arch import IS_PPC_32, IS_PPC_64, IS_BIG_ENDIAN
from rpython.jit.backend.ppc.arch import WORD
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper.annlowlevel import llhelper
cpu = detect_cpu.autodetect()
class TestDisassemble(object):
def test_match(self):
class A(BasicPPCAssembler):
insts = []
a = A()
a.add(1, 2, 3)
inst = a.insts[-1]
assert A.add.match(inst.assemble())
"""
Creates the boilerplate code for the tests.
- Make a PPCBuilder object
- Let the given test create the machine code
- Create a function and call it
- Compare the return value with the expected result
"""
def asmtest(expected):
def testmaker(test):
def newtest(self):
a = PPCBuilder()
test(self, a)
f = a.get_assembler_function()
assert f() == expected
return newtest
return testmaker
"""
Treats the given bitstring as binary representation
of an integer in two's complement.
"""
def bits_to_signed_int(bits):
assert len(bits) > 0
sign = 1
if bits[0] == "1":
sign = -1
bits = bits[1:].replace("0", "$").replace("1", "0").replace("$", "1")
return sign * (int(bits, 2) + 1)
def hex_to_signed_int(hx):
return bits_to_signed_int(bin(int(hx, 16))[2:])
# Testing simple assembler instructions
class TestAssemble(object):
def setup_class(cls):
if cpu not in ["ppc", "ppc64", "ppc-64"]:
py.test.skip("can't test all of ppcgen on non-PPC!")
#py.test.xfail("assemble does not return a function any longer, fix tests")
"""
Tests are build like this:
@asmtest(expected=<EXPECTED RESULT>)
def testX(self, assembler):
<Assembler Code>
This is equivalent to:
def testX(self):
assembler = MyPPCAssembler()
<Assembler Code>
f = assembler.assemble()
assert f() == <EXPECTED RESULT>
"""
@asmtest(expected=200)
def test_li(self, a):
a.li(3, 200)
a.blr()
@asmtest(expected=7)
def test_add_imm(self, a):
a.li(3, 6)
a.addi(3, 3, 1)
a.blr()
@asmtest(expected=12341234)
def test_load_imm(self, a):
a.load_imm(r10, 12341234)
a.mtctr(10)
a.mfctr(11)
a.mr(3, 11)
a.blr()
@asmtest(expected=33333333)
def test_add_reg(self, a):
a.load_imm(r10, 11111111)
a.load_imm(r11, 22222222)
a.add(12, 10, 11)
a.mr(3, 12)
a.blr()
@asmtest(expected=-1000)
def test_add_pos_and_neg(self, a):
a.load_imm(r10, 2000)
a.load_imm(r11, -3000)
a.add(3, 10, 11)
a.blr()
@asmtest(expected=7)
def test_sub_imm(self, a):
a.li(3, 10)
a.subi(3, 3, 3)
a.blr()
@asmtest(expected=(123435 - 76457))
def test_sub_reg(self, a):
a.load_imm(r5, 123435)
a.load_imm(r6, 76457)
a.sub(3, 5, 6)
a.blr()
@asmtest(expected=(10000 * 5000))
def test_mul_imm(self, a):
a.load_imm(r3, 10000)
a.mulli(3, 3, 5000)
a.blr()
# 1000000 * 1000000 = 0b1110100011010100101001010001000000000000
# expect: r3 = -HWORD-|11010100101001010001000000000000
@asmtest(expected=bits_to_signed_int('11010100101001010001000000000000'))
def test_mullw(self, a):
word = 1000000
a.load_imm(r5, word)
a.load_imm(r6, word)
a.mullw(3, 5, 6)
if IS_PPC_64:
a.extsw(3, 3)
a.blr()
# 1000000 * 1000000 = 0b1110100011010100101001010001000000000000
# expect: r3 = 11101000|------------LWORD--------------
@asmtest(expected=int('11101000', 2))
def test_mulhw(self, a):
word = 1000000
a.load_imm(r5, word)
a.load_imm(r6, word)
a.mulhw(3, 5, 6)
if IS_PPC_64:
a.extsw(3, 3)
a.blr()
# 1000000 * 1000000 = 0b1110100011010100101001010001000000000000
# expect: r3 = 11101000|------------LWORD--------------
@asmtest(expected=int('11101000', 2))
def test_mulhwu(self, a):
word = 1000000
a.load_imm(r5, word)
a.load_imm(r6, word)
a.mulhwu(3, 5, 6)
if IS_PPC_64:
a.extsw(3, 3)
a.blr()
@asmtest(expected=10000)
def test_divw(self, a):
divident = 1000000
divisor = 100
a.load_imm(r10, divident)
a.load_imm(r11, divisor)
a.divw(3, 10, 11)
a.blr()
def test_call_function(self):
functype = lltype.Ptr(lltype.FuncType([lltype.Signed], lltype.Signed))
call_addr = rffi.cast(lltype.Signed, llhelper(functype, func))
a = PPCBuilder()
# NOW EXPLICITLY:
#
# - Load the address of the function to call into a register x
# - Move the content of this register x into CTR
# - Set the LR manually (or with bctrl)
# - Do jump
a.li(3, 50)
if IS_PPC_32:
a.load_imm(r10, call_addr)
elif IS_BIG_ENDIAN:
# load the 3-words descriptor
a.load_from_addr(r10, SCRATCH2, call_addr)
a.load_from_addr(r2, SCRATCH2, call_addr+WORD)
a.load_from_addr(r11, SCRATCH2, call_addr+2*WORD)
py.test.skip("this test started segfaulting on gcc110, but even "
"reverting to old versions of the code still segfault, "
"so not clue. Maybe something like a ctypes issue")
else:
# no descriptor on little-endian, but the ABI says r12 must
# contain the function pointer
a.load_imm(r10, call_addr)
a.mr(12, 10)
a.mtctr(10)
a.bctr()
a.blr()
f = a.get_assembler_function()
assert f() == 65
@asmtest(expected=0)
def test_and(self, a):
a.load_imm(r10, 8)
a.load_imm(r11, 7)
a.and_(3, 10, 11)
a.blr()
@asmtest(expected=15)
def test_or(self, a):
a.load_imm(r10, 8)
a.load_imm(r11, 7)
a.or_(3, 10, 11)
a.blr()
@asmtest(expected=15)
def test_nand(self, a):
a.load_imm(r10, 8)
a.load_imm(r11, 7)
a.nand(3, 10, 11)
a.load_imm(r12, 0x0000000F) # zero out first 28 bits
a.and_(3, 3, 12) #
a.blr()
@asmtest(expected=1)
def test_nor(self, a):
a.load_imm(r10, 10)
a.load_imm(r11, 6)
a.nor(3, 10, 11)
a.load_imm(r12, 0x0000000F) # zero out first 28 bits
a.and_(3, 3, 12) #
a.blr()
@asmtest(expected=5)
def test_xor(self, a):
a.load_imm(r10, 15)
a.load_imm(r11, 10)
a.xor(3, 10, 11)
a.blr()
@asmtest(expected=0x120)
def test_slw(self, a):
a.load_imm(r10, 9)
a.load_imm(r11, 5)
a.slw(3, 10, 11)
a.blr()
@asmtest(expected=9)
def test_srw(self, a):
a.load_imm(r10, 0x120)
a.load_imm(r11, 5)
a.srw(3, 10, 11)
a.blr()
def test_neg(self):
a = PPCBuilder()
a.load_imm(r10, 0x0000F0F0)
a.neg(3, 10)
a.blr()
f = a.get_assembler_function()
assert f() == hex_to_signed_int("FFFF0F10")
def test_load_and_store(self):
a = PPCBuilder()
word1 = 1000
word2 = 2000
p = lltype.malloc(rffi.CArray(lltype.Signed), 2, flavor="raw")
a.load_imm(r10, word1)
a.load_imm(r11, word2)
a.load_imm(r8, rffi.cast(lltype.Signed, p))
a.load_imm(r9, rffi.cast(lltype.Signed, p) + WORD)
a.stw(10, 8, 0)
a.stw(11, 9, 0)
a.lwz(4, 8, 0)
a.lwz(5, 9, 0)
a.add(3, 4, 5)
a.blr()
f = a.get_assembler_function()
assert f() == word1 + word2
lltype.free(p, flavor="raw")
def test_load_from(self):
a = PPCBuilder()
p = lltype.malloc(rffi.CArray(rffi.LONG), 1, flavor="raw")
addr = rffi.cast(lltype.Signed, p)
p[0] = rffi.cast(rffi.LONG, 200)
a.load_from_addr(r3, SCRATCH2, addr)
a.blr()
f = a.get_assembler_function()
assert f() == 200
p[0] = rffi.cast(rffi.LONG, 300)
assert f() == 300
lltype.free(p, flavor="raw")
def func(arg):
return arg + 15
def is_64_bit_arch():
import sys
return sys.maxint == 9223372036854775807
|
python
|
import discord
from utils import DIGITS
from utils.config import Users
from utils.discord import help_me, get_user, DiscordInteractive
from utils.errors import UserNonexistent, NoPlays
from utils.osu.apiTools import get_recent
from utils.osu.embedding import embed_play
from utils.osu.stating import stat_play
from utils.utils import Log
interact = DiscordInteractive.interact
class Command:
command = "recent"
description = "Show recent score or pass."
argsRequired = 0
usage = "[username]"
examples = [
{
"run": "recent nathan_on_osu",
"result": "Returns nathan on osu's most recent score."
},
{
"run": "recent3 respektive",
"result": "Returns respektive's most recent score."
},
{
"run": "recentpass",
"result": "Returns your most recent pass."
}]
synonyms = [r"recent\d+", "rs", "recentpass", "rp"]
async def call(self, package):
message, args, user_data, client = package["message_obj"], package["args"], \
package["user_obj"], package["client"]
if len(args) < 2 and user_data["osu_ign"] == "":
Log.error("No User provided")
await help_me(message, "ign-set")
return
try:
user = get_user(args, user_data["osu_ign"], "osu")
except UserNonexistent:
interact(message.channel.send, "User does not exist")
return
index = DIGITS.match(args[0])
if index is None:
index = 1
else:
index = int(index.captures(1)[0])
try:
recent_play = get_recent(user, index)
except NoPlays as err:
interact(message.channel.send, f"`{err}`")
Log.log(err)
return
try:
play_data = stat_play(recent_play)
except Exception as err:
interact(message.channel.send, err)
Log.error(err)
return
Users().update_last_message(message.author.id, recent_play.beatmap_id, "id",
recent_play.enabled_mods, play_data.completion, recent_play.accuracy, user,
play_data.replay)
embed = embed_play(play_data, client)
graph = discord.File(play_data.strain_bar, "strains_bar.png")
interact(message.channel.send, file=graph, embed=embed)
Log.log(f"Returning recent play #{index} for {user}")
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "Christian Heider Nielsen"
__doc__ = r"""
Created on 08-12-2020
"""
from itertools import tee
from typing import Any, Generator, Iterable
import tqdm
from notus.notification import JobNotificationSession
from warg import drop_unused_kws, passes_kws_to
__all__ = ["progress_bar"]
@drop_unused_kws
@passes_kws_to(tqdm.tqdm)
def progress_bar(
iterable: Iterable,
description: str = None,
*,
leave: bool = False,
notifications: bool = False,
total: int = None,
auto_total_generator: bool = False,
auto_describe_iterator: bool = True, # DOES NOT WORK IS THIS FUNCTION IS ALIAS does not match!
alias="progress_bar",
disable: bool = False,
**kwargs,
) -> Any:
"""
hint:
use next first and then later use send instead of next to set a new tqdm description if desired.
"""
if not disable:
if description is None and auto_describe_iterator:
from warg import get_first_arg_name
description = get_first_arg_name(alias)
if total is None and isinstance(iterable, Generator) and auto_total_generator:
iterable, ic = tee(iterable, 2)
total = len(list(ic))
if total == 0:
print(f"WARNING zero length iterable - {description}:{iterable}")
generator = tqdm.tqdm(
iterable,
description,
leave=leave,
total=total,
disable=disable, # redundant
**kwargs,
)
if notifications:
with JobNotificationSession(description):
for val in generator:
a = yield val
if a:
generator.set_description(a)
return
for val in generator:
a = yield val
if a:
generator.set_description(a)
else:
yield from iterable
if __name__ == "__main__":
def dsad3123():
""" """
from time import sleep
for a in progress_bar([2.13, 8921.9123, 923], notifications=False):
sleep(1)
def asd21sa():
""" """
from time import sleep
pb = progress_bar # Aliased!
for a in pb([2.13, 8921.9123, 923], notifications=False):
sleep(1)
def dict_items():
""" """
from time import sleep
class exp_v:
Test_Sets = {v: v for v in range(9)}
for a in progress_bar(exp_v.Test_Sets.items()):
sleep(1)
def send_example():
from itertools import count
pb = progress_bar(count())
next(pb)
for a in range(100000):
pb.send(f"step_{a}")
# dsad3123()
# asd21sa()
# dict_items()
send_example()
|
python
|
from util.tipo import tipo
class S_PARTY_MEMBER_CHANGE_MP(object):
def __init__(self, tracker, time, direction, opcode, data):
print(str(type(self)).split('.')[3]+'('+str(len(data))+'): '+ str(data.get_array_hex(1))[1:-1])
server_id = data.read(tipo.uint32)
player_id = data.read(tipo.uint32)
current_mp = data.read(tipo.int32)
max_mp = data.read(tipo.int32)
unk1 = data.read(tipo.int16)
|
python
|
import os
from jina.peapods.pods.k8slib.kubernetes_tools import _get_yaml
from jina import Flow
def test_custom_resource_dir():
custom_resource_dir = '/test'
flow = Flow(
name='test-flow', port_expose=8080, infrastructure='K8S', protocol='http'
).add(name='test_executor', k8s_custom_resource_dir=custom_resource_dir)
assert (
flow._pod_nodes['test_executor'].args.k8s_custom_resource_dir
== custom_resource_dir
)
def test_no_resource_dir_specified():
flow = Flow(
name='test-flow', port_expose=8080, infrastructure='K8S', protocol='http'
).add(name='test_executor')
assert flow._pod_nodes['test_executor'].args.k8s_custom_resource_dir is None
def test_template_file_read_correctly(test_dir: str):
custom_resource_dir = os.path.join(test_dir, 'custom-resource')
content = _get_yaml('namespace', params={}, custom_resource_dir=custom_resource_dir)
assert 'Test' in content
|
python
|
from models import OrderModel
from flask_sqlalchemy import sqlalchemy
from config import db
import uuid
class OrderActions():
# Table actions:
@classmethod
def create(cls, usastate: str, order_number: int, home_office_code: str, order_status:int):
theUuid = str(uuid.uuid4())
new_order = OrderModel(theUuid, usastate, order_number,home_office_code,order_status)
db.session.add(new_order)
db.session.commit()
return new_order
@ classmethod
def get(cls):
orders = OrderModel.query.all()
return {"orders": [{"order_number": i.order_number, "uuid": i.uuid, "usa_state": i.usa_state, "home_office_code": i.home_office_code}
for i in orders]}
# @ classmethod
# def get_state(cls, state):
# return db.session.query.filter(OrderModel.state == state)
@ classmethod
def get_order_by_order_number(cls, order_number):
order = OrderModel.query.filter(OrderModel.order_number == order_number).first()
return order
@ classmethod
def get_order_by_uuid(cls, uuid):
# Return OrderModel object for use by backend
order = OrderModel.query.filter(OrderModel.uuid == uuid).first()
return order
@ classmethod
def get_home_office_code(cls, home_office_code):
return OrderModel.query.filter(OrderModel.home_office_code == home_office_code)
@ classmethod
def update_order(cls, uuid, usa_state, order_number , home_office_code):
order = cls.get_order_by_uuid(uuid)
order.order_number = order_number
order.usa_state = usa_state
order.home_office_code = home_office_code
db.session.commit()
return order
|
python
|
from io import open
from setuptools import find_packages, setup, Extension
from setuptools.command.build_ext import build_ext
import os
import re
import sys
import shutil
from distutils.version import LooseVersion
from subprocess import check_output
cwd = os.path.dirname(os.path.abspath(__file__))
try:
filepath = './neural_compressor/version.py'
with open( filepath ) as version_file:
__version__ ,= re.findall( '__version__ = "(.*)"', version_file.read() )
except Exception as error:
assert False, "Error: Could not open '%s' due %s\n" % (filepath, error)
def which(thefile):
path = os.environ.get("PATH", os.defpath).split(os.pathsep)
for d in path:
fname = os.path.join(d, thefile)
fnames = [fname]
if sys.platform == 'win32':
exts = os.environ.get('PATHEXT', '').split(os.pathsep)
fnames += [fname + ext for ext in exts]
for name in fnames:
if os.access(name, os.F_OK | os.X_OK) and not os.path.isdir(name):
return name
return None
def get_version(cmd):
"Returns cmake version."
try:
for line in check_output([cmd, '--version']).decode('utf-8').split('\n'):
if 'version' in line:
print(line.strip().split(' ')[2])
return LooseVersion(line.strip().split(' ')[2])
except Exception as error:
return LooseVersion('0.0.0')
def get_cmake_command():
"Returns cmake command."
cmake_command = 'cmake'
if sys.platform == 'win32':
return cmake_command
cmake3 = which('cmake3')
cmake = which('cmake')
if cmake3 is not None:
if cmake is not None:
bare_version = get_version('cmake')
if (bare_version < LooseVersion("3.12.0") and
get_version('cmake3') > bare_version):
cmake_command = 'cmake3'
else:
cmake_command = 'cmake3'
elif cmake is None:
raise RuntimeError('no cmake or cmake3 found')
return cmake_command
class build_ext(build_ext):
def build_extension(self, ext):
if not sys.platform.startswith("win"):
import pathlib
cwd = pathlib.Path().absolute()
build_temp = pathlib.Path(self.build_temp)
build_temp.mkdir(parents=True, exist_ok=True)
extdir = pathlib.Path(self.get_ext_fullpath(ext.name))
executable_path = extdir.parent.absolute()
cmake_args = [
'-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + str(extdir.parent.absolute()),
'-DPYTHON_EXECUTABLE={}'.format(sys.executable)
]
build_args = [
'-j'
]
cmake_command = get_cmake_command()
os.chdir(str(build_temp))
self.spawn([cmake_command, ext.sourcedir] + cmake_args)
self.spawn(['make'] + build_args)
if os.path.exists('inferencer'):
shutil.copy('inferencer', executable_path)
os.chdir(str(cwd))
else:
print("Engine is not support windows for now")
class CMakeExtension(Extension):
def __init__(self, name, sourcedir=""):
Extension.__init__(self, name, sources=[])
self.sourcedir = os.path.abspath(sourcedir)
def check_submodules():
def check_for_files(folder, files):
if not any(os.path.exists(os.path.join(folder, f)) for f in files):
report("Could not find any of {} in {}".format(", ".join(files), folder))
report("Did you run 'git submodule update --init --recursive'?")
sys.exit(1)
def not_exists_or_empty(folder):
return not os.path.exists(folder) or (os.path.isdir(folder) and len(os.listdir(folder)) == 0)
git_modules_path = os.path.join(cwd, ".gitmodules")
with open(git_modules_path) as f:
folders = [os.path.join(cwd, line.split("=", 1)[1].strip()) for line in
f.readlines() if line.strip().startswith("path")]
# If none of the submodule folders exists, try to initialize them
if all(not_exists_or_empty(folder) for folder in folders) and not sys.platform.startswith("win"):
try:
print(' --- Trying to initialize submodules')
start = time.time()
subprocess.check_call(["git", "submodule", "update", "--init", "--recursive"], cwd=cwd)
end = time.time()
print(' --- Submodule initialization took {:.2f} sec'.format(end - start))
except Exception:
print(' --- Submodule initalization failed')
print('Please run:\n\tgit submodule update --init --recursive')
sys.exit(1)
if __name__ == '__main__':
check_submodules()
setup(
name="neural_compressor",
version=__version__,
author="Intel MLP/MLPC Team",
author_email="[email protected], [email protected], [email protected], [email protected], [email protected], [email protected], [email protected]",
description="Repository of Intel® Neural Compressor",
long_description=open("README.md", "r", encoding='utf-8').read(),
long_description_content_type="text/markdown",
keywords='quantization, auto-tuning, post-training static quantization, post-training dynamic quantization, quantization-aware training, tuning strategy',
license='',
url="https://github.com/intel/neural-compressor",
ext_modules=[CMakeExtension("engine_py", str(cwd) + '/engine/executor/')],
packages = find_packages(),
include_package_data = True,
package_dir = {'':'.'},
package_data={
'': ['*.py', '*.yaml'],
'neural_compressor.ux': [
"web/static/*.*",
"web/static/assets/*.*",
"web/static/assets/fonts/*.*",
"utils/configs/*.json",
"utils/configs/predefined_configs/**/*.yaml",
"utils/templates/*.txt",
],
'engine': ['*.py'],
},
cmdclass={
'build_ext': build_ext,
},
install_requires=[
'numpy', 'pyyaml', 'scikit-learn', 'schema', 'py-cpuinfo', 'hyperopt', 'pandas', 'pycocotools', 'opencv-python',
'requests', 'Flask-Cors', 'Flask-SocketIO', 'Flask', 'gevent-websocket', 'gevent', 'psutil', 'Pillow', 'sigopt',
'prettytable', 'cryptography'],
scripts=['neural_compressor/ux/bin/inc_bench', 'engine/bin/inferencer'],
python_requires='>=3.6.0',
classifiers=[
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
)
|
python
|
#
# PySNMP MIB module IBMIROCAUTH-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/IBMIROCAUTH-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:40:06 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
ObjectIdentity, NotificationType, NotificationType, Integer32, Counter32, TimeTicks, Gauge32, ModuleIdentity, enterprises, Unsigned32, IpAddress, iso, MibIdentifier, Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "NotificationType", "NotificationType", "Integer32", "Counter32", "TimeTicks", "Gauge32", "ModuleIdentity", "enterprises", "Unsigned32", "IpAddress", "iso", "MibIdentifier", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits")
TruthValue, DisplayString, RowStatus, TestAndIncr, TextualConvention, AutonomousType, PhysAddress = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "DisplayString", "RowStatus", "TestAndIncr", "TextualConvention", "AutonomousType", "PhysAddress")
ibmIROCconfigAuth = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2))
ibm = MibIdentifier((1, 3, 6, 1, 4, 1, 2))
ibmProd = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6))
ibm2210 = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 72))
ibmIROC = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119))
ibmIROCconfig = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7))
ibmAuthTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 0))
ibmAuthMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1))
ibmAuthDomains = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 2))
ibmAuthConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 3))
ibmAuthGeneral = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 1))
authCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 3, 1))
authGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 3, 2))
class RowDefinition(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 3, 4, 5, 6))
namedValues = NamedValues(("active", 1), ("notReady", 3), ("createAndGo", 4), ("createAndWait", 5), ("destroy", 6))
class Enabled(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("disabled", 0), ("enabled", 1))
class DateAndTime2(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 11)
class SecureOctetString(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 65535)
class SecureDisplayString(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 65535)
class SecureRowDefinition(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 65535)
authUserProfileTable = MibTable((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2), )
if mibBuilder.loadTexts: authUserProfileTable.setStatus('mandatory')
authUserProfileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1), ).setIndexNames((1, "IBMIROCAUTH-MIB", "authUserProfileName"))
if mibBuilder.loadTexts: authUserProfileEntry.setStatus('mandatory')
authUserProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64)))
if mibBuilder.loadTexts: authUserProfileName.setStatus('mandatory')
authUserProfileRowDefinition = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 2), SecureRowDefinition()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileRowDefinition.setStatus('mandatory')
authUserProfilePassword = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 3), SecureDisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfilePassword.setStatus('mandatory')
authUserProfileType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="20")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileType.setStatus('mandatory')
authUserProfileMaxConnectTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileMaxConnectTime.setStatus('mandatory')
authUserProfileCallbackType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("disabled", 0), ("roaming", 1), ("required", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileCallbackType.setStatus('mandatory')
authUserProfileCallbackNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileCallbackNum.setStatus('mandatory')
authUserProfileDialout = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 8), Enabled().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileDialout.setStatus('mandatory')
authUserProfileEncryptionKey = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 9), SecureOctetString().clone(hexValue="")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileEncryptionKey.setStatus('mandatory')
authUserProfileStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2), ("locked", 3))).clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileStatus.setStatus('mandatory')
authUserProfileExpirationDate = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 11), DateAndTime2().clone(hexValue="")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileExpirationDate.setStatus('mandatory')
authUserProfileGLoginAllowed = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileGLoginAllowed.setStatus('mandatory')
authUserProfileGLoginsAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: authUserProfileGLoginsAttempts.setStatus('mandatory')
authUserProfileLoginAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: authUserProfileLoginAttempts.setStatus('mandatory')
authUserProfileLoginFails = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: authUserProfileLoginFails.setStatus('mandatory')
authUserProfileLoginLock = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: authUserProfileLoginLock.setStatus('mandatory')
authUserProfileIpType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 3, 4))).clone(namedValues=NamedValues(("disabled", 0), ("single", 1), ("networkDials", 3), ("singleDials", 4))).clone('single')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileIpType.setStatus('mandatory')
authUserProfileIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 18), IpAddress().clone('0.0.0.0')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileIpAddr.setStatus('mandatory')
authUserProfileIpMask = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 19), IpAddress().clone('255.255.255.255')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileIpMask.setStatus('mandatory')
authUserProfileHostName = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 20), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileHostName.setStatus('mandatory')
authUserProfileSharedSecurity = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 21), SecureDisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileSharedSecurity.setStatus('mandatory')
authUserProfileTunneled = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 22), Enabled().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileTunneled.setStatus('mandatory')
authUserProfileTunnelType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3))).clone(namedValues=NamedValues(("l2tp", 3))).clone('l2tp')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileTunnelType.setStatus('mandatory')
authUserProfileTunnelMediumType = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("ip", 1))).clone('ip')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileTunnelMediumType.setStatus('mandatory')
authUserProfileTunnelServer = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 25), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileTunnelServer.setStatus('mandatory')
authUserProfileVcEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 26), Enabled().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileVcEnabled.setStatus('mandatory')
authUserProfileVcMaxSuspendTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 2147483647)).clone(-1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileVcMaxSuspendTime.setStatus('mandatory')
authUserProfileVcIdleTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 1, 2, 1, 28), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 2147483647)).clone(-1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: authUserProfileVcIdleTime.setStatus('mandatory')
authUserProfileGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 3, 2, 1))
authUserProfileCompliance = MibIdentifier((1, 3, 6, 1, 4, 1, 2, 6, 119, 7, 2, 3, 1, 1))
mibBuilder.exportSymbols("IBMIROCAUTH-MIB", authUserProfileEncryptionKey=authUserProfileEncryptionKey, authUserProfileLoginFails=authUserProfileLoginFails, authUserProfileVcEnabled=authUserProfileVcEnabled, authUserProfilePassword=authUserProfilePassword, authUserProfileGLoginAllowed=authUserProfileGLoginAllowed, ibmAuthDomains=ibmAuthDomains, SecureOctetString=SecureOctetString, ibm2210=ibm2210, ibmIROCconfig=ibmIROCconfig, ibmIROC=ibmIROC, ibmAuthGeneral=ibmAuthGeneral, authUserProfileVcMaxSuspendTime=authUserProfileVcMaxSuspendTime, authUserProfileHostName=authUserProfileHostName, authUserProfileLoginLock=authUserProfileLoginLock, authUserProfileTable=authUserProfileTable, authUserProfileEntry=authUserProfileEntry, DateAndTime2=DateAndTime2, ibmProd=ibmProd, authUserProfileSharedSecurity=authUserProfileSharedSecurity, authUserProfileLoginAttempts=authUserProfileLoginAttempts, authUserProfileExpirationDate=authUserProfileExpirationDate, authUserProfileStatus=authUserProfileStatus, ibmAuthMIB=ibmAuthMIB, ibmIROCconfigAuth=ibmIROCconfigAuth, authUserProfileCallbackNum=authUserProfileCallbackNum, authUserProfileIpMask=authUserProfileIpMask, authUserProfileName=authUserProfileName, ibm=ibm, authUserProfileCallbackType=authUserProfileCallbackType, authUserProfileVcIdleTime=authUserProfileVcIdleTime, authUserProfileTunnelServer=authUserProfileTunnelServer, authUserProfileRowDefinition=authUserProfileRowDefinition, authUserProfileType=authUserProfileType, authUserProfileMaxConnectTime=authUserProfileMaxConnectTime, Enabled=Enabled, authCompliances=authCompliances, authGroups=authGroups, RowDefinition=RowDefinition, SecureDisplayString=SecureDisplayString, authUserProfileIpAddr=authUserProfileIpAddr, authUserProfileGroup=authUserProfileGroup, authUserProfileIpType=authUserProfileIpType, authUserProfileTunneled=authUserProfileTunneled, ibmAuthTraps=ibmAuthTraps, SecureRowDefinition=SecureRowDefinition, authUserProfileDialout=authUserProfileDialout, authUserProfileTunnelMediumType=authUserProfileTunnelMediumType, authUserProfileTunnelType=authUserProfileTunnelType, authUserProfileCompliance=authUserProfileCompliance, ibmAuthConformance=ibmAuthConformance, authUserProfileGLoginsAttempts=authUserProfileGLoginsAttempts)
|
python
|
import functools
import numpy as np
from itertools import groupby
import cv2
import torch
from torch import nn
from torch.nn import init
from torch.optim import lr_scheduler
from networks.block import AdaptiveInstanceNorm2d, Identity, AdaptiveInstanceLayerNorm2d, InstanceLayerNorm2d
from lib.alphabet import word_capitalize
from PIL import Image, ImageDraw, ImageFont
def init_weights(net, init_type='normal', init_gain=0.02):
"""Initialize network weights.
Parameters:
net (network) -- network to be initialized
init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal
init_gain (float) -- scaling factor for normal, xavier and orthogonal.
We use 'normal' in the original pix2pix and CycleGAN paper. But xavier and kaiming might
work better for some applications. Feel free to try yourself.
"""
def init_func(m): # define the initialization function
if (isinstance(m, nn.Conv2d)
or isinstance(m, nn.Linear)
or isinstance(m, nn.Embedding)):
if init_type == 'N02':
init.normal_(m.weight.data, 0.0, init_gain)
elif init_type in ['glorot', 'xavier']:
init.xavier_normal_(m.weight.data, gain=init_gain)
elif init_type == 'kaiming':
init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
elif init_type == 'ortho':
init.orthogonal_(m.weight.data, gain=init_gain)
else:
raise NotImplementedError('initialization method [%s] is not implemented' % init_type)
if init_type in ['N02', 'glorot', 'xavier', 'kaiming', 'ortho']:
print('initialize network {} with {}'.format(net.__class__.__name__, init_type))
net.apply(init_func) # apply the initialization function <init_func>
return net
def get_norm_layer(norm='in', **kwargs):
"""Return a normalization layer
Parameters:
norm_type (str) -- the name of the normalization layer: batch | instance | none
For BatchNorm, we use learnable affine parameters and track running statistics (mean/stddev).
For InstanceNorm, we do not use learnable affine parameters. We do not track running statistics.
"""
if norm == 'bn':
norm_layer = functools.partial(nn.BatchNorm2d)
elif norm == 'gn':
norm_layer = functools.partial(nn.GroupNorm)
elif norm == 'in':
norm_layer = functools.partial(nn.InstanceNorm2d)
elif norm == 'adain':
norm_layer = functools.partial(AdaptiveInstanceNorm2d)
elif norm == 'iln':
norm_layer = functools.partial(InstanceLayerNorm2d)
elif norm == 'adailn':
norm_layer = functools.partial(AdaptiveInstanceLayerNorm2d)
elif norm == 'none':
def norm_layer(x): return Identity()
else:
assert 0, "Unsupported normalization: {}".format(norm)
return norm_layer
def get_linear_scheduler(optimizer, start_decay_iter, n_iters_decay):
def lambda_rule(iter):
lr_l = 1.0 - max(0, iter - start_decay_iter) / float(n_iters_decay + 1)
return lr_l
scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule)
return scheduler
def get_scheduler(optimizer, opt):
"""Return a learning rate scheduler
Parameters:
optimizer -- the optimizer of the network
opt (option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions.
opt.lr_policy is the name of learning rate policy: linear | step | plateau | cosine
For 'linear', we keep the same learning rate for the first <opt.n_epochs> epochs
and linearly decay the rate to zero over the next <opt.n_epochs_decay> epochs.
For other schedulers (step, plateau, and cosine), we use the default PyTorch schedulers.
See https://pytorch.org/docs/stable/optim.html for more details.
"""
if opt.lr_policy == 'linear':
def lambda_rule(epoch):
lr_l = 1.0 - max(0, epoch - opt.start_decay_epoch) / float(opt.n_epochs_decay + 1)
return lr_l
scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule)
elif opt.lr_policy == 'step':
scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.lr_decay_iters, gamma=0.1)
elif opt.lr_policy == 'plateau':
scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.2, threshold=0.01, patience=5)
elif opt.lr_policy == 'cosine':
scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=opt.n_epochs, eta_min=0)
else:
return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy)
return scheduler
def _len2mask(length, max_len, dtype=torch.float32):
assert len(length.shape) == 1, 'Length shape should be 1 dimensional.'
max_len = max_len or length.max().item()
mask = torch.arange(max_len, device=length.device,
dtype=length.dtype).expand(len(length), max_len) < length.unsqueeze(1)
if dtype is not None:
mask = torch.as_tensor(mask, dtype=dtype, device=length.device)
return mask
def get_init_state(deepth, batch_size, hidden_dim, device, bidirectional=False):
"""Get cell states and hidden states."""
if bidirectional:
deepth *= 2
hidden_dim //= 2
h0_encoder_bi = torch.zeros(
deepth,
batch_size,
hidden_dim, requires_grad=False)
c0_encoder_bi = torch.zeros(
deepth,
batch_size,
hidden_dim, requires_grad=False)
return h0_encoder_bi.to(device), c0_encoder_bi.to(device)
def _info(model, detail=False, ret=False):
nParams = sum([p.nelement() for p in model.parameters()])
mSize = nParams * 4.0 / 1024 / 1024
res = "*%-12s param.: %dK Stor.: %.4fMB" % (type(model).__name__, nParams / 1000, mSize)
if detail:
res += '\r\n' + str(model)
if ret:
return res
else:
print(res)
def _info_simple(model, tag=None):
nParams = sum([p.nelement() for p in model.parameters()])
mSize = nParams * 4.0 / 1024 / 1024
if tag is None:
tag = type(model).__name__
res = "%-12s P:%6dK S:%8.4fMB" % (tag, nParams / 1000, mSize)
return res
def set_requires_grad(nets, requires_grad=False):
"""Set requires_grad=False for all the networks to avoid unnecessary computations
Parameters:
nets (network list) -- a list of networks
requires_grad (bool) -- whether the networks require gradients or not
"""
if not isinstance(nets, list):
nets = [nets]
for net in nets:
if net is not None:
for param in net.parameters():
param.requires_grad = requires_grad
def idx_to_words(idx, lexicon, capitize_ratio=0.5):
words = []
for i in idx:
word = lexicon[i]
if np.random.random() < capitize_ratio:
word = word_capitalize(word)
words.append(word)
return words
def pil_text_img(im, text, pos, color=(255, 0, 0), textSize=25):
img_PIL = Image.fromarray(cv2.cvtColor(im, cv2.COLOR_BGR2RGB))
font = ImageFont.truetype('font/arial.ttf', textSize)
fillColor = color # (255,0,0)
position = pos # (100,100)
draw = ImageDraw.Draw(img_PIL)
draw.text(position, text, font=font, fill=fillColor)
img = cv2.cvtColor(np.asarray(img_PIL), cv2.COLOR_RGB2BGR)
return img
def words_to_images(texts, img_h, img_w, n_channel=1):
n_channel = 3
word_imgs = np.zeros((len(texts), img_h, img_w, n_channel)).astype(np.uint8)
for i in range(len(texts)):
# cv2.putText(word_imgs[i], texts[i], (2, 29), cv2.FONT_HERSHEY_SIMPLEX, 0.5, 255, 2)
word_imgs[i] = pil_text_img(word_imgs[i], texts[i], (1, 1), textSize=25)
word_imgs = word_imgs.sum(axis=-1, keepdims=True).astype(np.uint8)
word_imgs = torch.from_numpy(word_imgs).permute([0, 3, 1, 2]).float() / 128 - 1
return word_imgs
def ctc_greedy_decoder(probs_seq, blank_index=0):
"""CTC greedy (best path) decoder.
Path consisting of the most probable tokens are further post-processed to
remove consecutive repetitions and all blanks.
:param probs_seq: 2-D list of probabilities over the vocabulary for each
character. Each element is a list of float probabilities
for one character.
:type probs_seq: list
:param vocabulary: Vocabulary list.
:type vocabulary: list
:return: Decoding result string.
:rtype: baseline
"""
# argmax to get the best index for each time step
max_index_list = list(np.array(probs_seq).argmax(axis=1))
# remove consecutive duplicate indexes
index_list = [index_group[0] for index_group in groupby(max_index_list)]
# remove blank indexes
# blank_index = len(vocabulary)
index_list = [index for index in index_list if index != blank_index]
# convert index list to string
return index_list
def make_one_hot(labels, len_labels, n_class):
one_hot = torch.zeros((labels.shape[0], labels.shape[1], n_class), dtype=torch.float32)
for i in range(len(labels)):
one_hot[i, np.array(range(len_labels[i])), labels[i,:len_labels[i]]-1]=1
return one_hot
def rand_clip(imgs, img_lens, min_clip_width=64):
device = imgs.device
imgs, img_lens = imgs.cpu().numpy(), img_lens.cpu().numpy()
clip_imgs, clip_img_lens = [], []
for img, img_len in zip(imgs, img_lens):
if img_len <= min_clip_width:
clip_imgs.append(img[:, :, :img_len])
clip_img_lens.append(img_len)
else:
crop_width = np.random.randint(min_clip_width, img_len)
crop_width = crop_width - crop_width % (min_clip_width // 4)
rand_pos = np.random.randint(0, img_len - crop_width)
clip_img = img[:, :, rand_pos: rand_pos + crop_width]
clip_imgs.append(clip_img)
clip_img_lens.append(clip_img.shape[-1])
max_img_len = max(clip_img_lens)
pad_imgs = -np.ones((imgs.shape[0], 1, imgs.shape[2], max_img_len))
for i, (clip_img, clip_img_len) in enumerate(zip(clip_imgs, clip_img_lens)):
pad_imgs[i, 0, :, :clip_img_len] = clip_img
return torch.from_numpy(pad_imgs).float().to(device), torch.Tensor(clip_img_lens).int().to(device)
|
python
|
import logging
import pytest
import csv
from ocs_ci.ocs import constants, scale_noobaa_lib
from ocs_ci.framework.testlib import scale, E2ETest
from ocs_ci.ocs.resources.objectconfigfile import ObjectConfFile
from ocs_ci.utility.utils import ocsci_log_path
from ocs_ci.ocs.utils import oc_get_all_obc_names
log = logging.getLogger(__name__)
@pytest.fixture(autouse=True)
def teardown(request):
def finalizer():
scale_noobaa_lib.cleanup(constants.OPENSHIFT_STORAGE_NAMESPACE)
request.addfinalizer(finalizer)
@scale
class TestScaleOCBCreateDelete(E2ETest):
"""
OBC scale creation and deletion using Multi cloud Object Gateway
* Creating up to max support number of OBCs, capture creation time in sec.
* Deleting OBCs and capture deleting time in sec.
"""
namespace = constants.OPENSHIFT_STORAGE_NAMESPACE
scale_obc_count = 500
num_obc_batch = 50
@pytest.mark.polarion_id("OCS-2667")
def test_scale_obc_create_delete_time(self, tmp_path):
"""
MCG OBC creation and deletion using Noobaa MCG storage class
"""
log.info(
f"Start creating {self.scale_obc_count} "
f"OBCs in a batch of {self.num_obc_batch}"
)
obc_create = dict()
obc_delete = dict()
for i in range(int(self.scale_obc_count / self.num_obc_batch)):
obc_dict_list = (
scale_noobaa_lib.construct_obc_creation_yaml_bulk_for_kube_job(
no_of_obc=self.num_obc_batch,
sc_name=constants.NOOBAA_SC,
namespace=self.namespace,
)
)
# Create job profile
job_file = ObjectConfFile(
name="job_profile",
obj_dict_list=obc_dict_list,
project=self.namespace,
tmp_path=tmp_path,
)
# Create kube_job
job_file.create(namespace=self.namespace)
# Check all the OBCs to reach Bound state
obc_bound_list = (
scale_noobaa_lib.check_all_obc_reached_bound_state_in_kube_job(
kube_job_obj=job_file,
namespace=self.namespace,
no_of_obc=self.num_obc_batch,
)
)
log.info(f"Number of OBCs in Bound state {len(obc_bound_list)}")
# Measure obc creation and deletion time
obc_creation_time = scale_noobaa_lib.measure_obc_creation_time(
obc_name_list=obc_bound_list
)
obc_create.update(obc_creation_time)
# Delete all obcs in a batch
obc_name_list = list(oc_get_all_obc_names())
new_list = [
obc_name_list[i : i + 20]
for i in range(0, len(obc_name_list), self.num_obc_batch)
]
for i in range(len(new_list)):
scale_noobaa_lib.cleanup(self.namespace, obc_count=new_list[i])
obc_deletion_time = scale_noobaa_lib.measure_obc_deletion_time(
obc_name_list=new_list[i]
)
obc_delete.update(obc_deletion_time)
# Store obc creation time on csv file
log_path = f"{ocsci_log_path()}/obc-creation"
with open(f"{log_path}-{constants.NOOBAA_SC}.csv", "w") as fd:
csv_obj = csv.writer(fd)
for k, v in obc_create.items():
csv_obj.writerow([k, v])
log.info(f"OBC creation data present in {log_path}-{constants.NOOBAA_SC}.csv")
# Store obc deletion time on csv file
log_path = f"{ocsci_log_path()}/obc-deletion"
with open(f"{log_path}-{constants.NOOBAA_SC}.csv", "w") as fd:
csv_obj = csv.writer(fd)
for k, v in obc_create.items():
csv_obj.writerow([k, v])
log.info(f"OBC deletion data present in {log_path}-{constants.NOOBAA_SC}.csv")
|
python
|
import paramUtils
import argparse
import metricsJsonUtils
import os
import json
# Generate a csv file for Design Explorer (DEX)
# from the input parameters and output
# metrics.
# Example output csv file for Design Explorer
"""
in:Inlet Velocity,in:Jet Velocity,in:Pannel Height,out:PMV (person ave),out:PMV (room ave),out:T (person ave),out:T (room ave),out:DR (person ave),out:DR (room ave),out:U (person ave),out:U (room ave),img:T_jetSection.png,img:T_personSection.png,img:U_jetSection.png,img:U_personSection.png
5,0,0.04,-0.74,-0.803,297.381,297.19,72.27,74.74,0.15,0.042,../pngs/0/out_sliceT_jet.png,../pngs/0/out_sliceT_person.png,../pngs/0/out_sliceUMag_jet.png,../pngs/0/out_sliceUMag_person.png
5,0,0.5,-0.693,-0.682,297.258,297.30,67.95,67.45,0.18,0.022,../pngs/1/out_sliceT_jet.png,../pngs/1/out_sliceT_person.png,../pngs/1/out_sliceUMag_jet.png,../pngs/1/out_sliceUMag_person.png
5,20,0.04,-0.7,-0.807,297.437,297.32,71.66,70.22,0.14,0.040,../pngs/2/out_sliceT_jet.png,../pngs/2/out_sliceT_person.png,../pngs/2/out_sliceUMag_jet.png,../pngs/2/out_sliceUMag_person.png
5,20,0.5,0.381,0.326,297.851,297.737,61.59,67.84,0.20,0.024,../pngs/3/out_sliceT_jet.png,../pngs/3/out_sliceT_person.png,../pngs/3/out_sliceUMag_jet.png,../pngs/3/out_sliceUMag_person.png
"""
# Parse inputs
parser = argparse.ArgumentParser(
description='Generate a csv file from result files for Design Explorer (DEX)')
parser.add_argument("caseListFile",
help='The address of a file listing the parameter names and values '
'for each simulation case per line.')
parser.add_argument("kpiFile",
help="The address of a json file used for specifying the output "
"metrics and images for Metrics Extraction (MEX) Python library")
parser.add_argument("basePath", help="The path where the DEX csv and html will be put")
parser.add_argument("DEX_CSVFile", help="The address of the DEX csv file to be generated")
parser.add_argument("--casesList_paramValueDelimiter", default=',',
help='The delimiter between parameter names and parameter values in '
'<caseListFile> (default:",")')
parser.add_argument("--casesList_paramsDelimiter", default=',',
help='The delimiter to separate parameter/value pairs from each other in '
'<caseListFile> (default:",")')
parser.add_argument('--excludeParams', default='',
help='A comma separated list specifying the parameters to exclude '
'from inputs or outputs from in DEX. The default is empty.')
parser.add_argument('--includeOutputParamsFile', default='',
help='A file specifying the desired outputs to include as output '
'parameters in DEX. By default all statistics specified in the '
'kpiFile are included as output parameters in DEX')
parser.add_argument('--imagesDirectory', default='',
help='The path to output image directories relative to basePath. '
'The image path can also be provided in the kpi file (with the '
'"imageName" field). Either way, the case number in the path, '
'if any, should be replaced by {:d}. For example, '
'"outputs/imgs_{:03d}" indicates that the images are in '
'outputs/imgs_000/, outputs/imgs_0001/, ... '
'(default:"")')
parser.add_argument('--MEXCsvPathTemplate', default='',
help='The path of csv files generated by MEX. The case number in the '
'path should be replaced by {:d}. For example, '
'"outputs/case{:03d}/metrics.csv" indicates that the MEX csv '
'files are: outputs/case000/metrics.csv, '
'outputs/case001/metrics.csv, ... '
'(default:"", which only works if no output is required from'
' the MEX csv files)')
args = parser.parse_args()
caseListFile = args.caseListFile
kpiFile = args.kpiFile
basepath = args.basePath
basepath = os.path.join(basepath, '')
deCSVFile = args.DEX_CSVFile
imagesdir = args.imagesDirectory
imagesdir = os.path.join(imagesdir, '')
metricsFilesNameTemplate = args.MEXCsvPathTemplate
outputParamStatsFile = args.includeOutputParamsFile
casesListParamValDelim = args.casesList_paramValueDelimiter
casesListParamPairDelim = args.casesList_paramsDelimiter
ignoreList_default = []
ignoreList_default = ",".join(ignoreList_default)
ignoreList = args.excludeParams
ignoreSet = set(ignoreList.split(","))
# Read the input parameters from the cases.list file (also works with a sweep.run file but
# make sure the order is the same as cases.list files used for running the cases)
cases = paramUtils.readParamsFile(caseListFile, paramValDelim=casesListParamValDelim,
paramPairDelim=casesListParamPairDelim)
# Correct input variable names: Replace comma's with "_"
cases = paramUtils.correct_input_variable_names(cases)
print("---> Read " + str(len(cases)) + " cases from " + caseListFile)
# Get the list of input parameters from the first case
inputVarNames = paramUtils.getParamNamesFromCase(cases[0])
inputVarNames = list(set(inputVarNames)-ignoreSet)
print("---> Found these input variable names: ")
for varName in inputVarNames:
print(varName)
# Add the values of input parameters for each case to caselist
caselist = paramUtils.writeInputParamVals2caselist(cases, inputVarNames)
print("---> Found these cases: ")
for case in caselist:
print(case)
# Read the kpihash and set the default values for missing fields
print("---> Reading the kpi.json file...")
[kpihash, orderPreservedKeys] = metricsJsonUtils.readKPIJsonFile(kpiFile)
print("---> Original input kpi.json:")
print(json.dumps(kpihash, indent=4))
print("---> Setting missing fields to defaults:")
for kpi in kpihash:
kpihash[kpi] = metricsJsonUtils.setKPIFieldDefaults(kpihash[kpi], kpi)
print(json.dumps(kpihash, indent=4))
print("---> Reading list of desired output metrics...")
outParamTable = paramUtils.getOutputParamsFromKPI(kpihash, orderPreservedKeys, ignoreSet)
print("---> Found these output parameters: ")
print(outParamTable)
print("---> Adding additional simulation_completed parameter...")
# This is for displaying solution convergence
outParamTable.append(['simulation_completed', -2])
# Read the desired metric from each output file and add them to caselist
print("---> Reading metrics from output files...")
caselist = paramUtils.writeOutParamVals2caselist(cases, metricsFilesNameTemplate,
outParamTable, caselist, kpihash)
# Get the list of desired images
print("---> Getting images...")
imgList, imgNames = paramUtils.getOutImgsFromKPI(kpihash, orderPreservedKeys)
caselist = paramUtils.writeImgs2caselist(cases, imgNames, basepath, imagesdir,
caselist)
# Write the header of the DEX csv file
header = paramUtils.generateHeader(inputVarNames, outParamTable, imgList)
# Write the Design Explorer csv file:
paramUtils.writeDesignExplorerCSVfile(deCSVFile, header, caselist)
print("---> Done writing Design Explorer csv file.")
|
python
|
import logging
import ibmsecurity.utilities.tools
logger = logging.getLogger(__name__)
module_uri = "/isam/felb"
requires_modules = None
requires_version = None
requires_model= "Appliance"
def get(isamAppliance, check_mode=False, force=False):
"""
Retrieving FELB configuration in full
"""
return isamAppliance.invoke_get("Retrieving FELB configuration in full", module_uri,
requires_modules=requires_modules, requires_version=requires_version, requires_model=requires_model)
def get_config(isamAppliance, check_mode=False, force=False):
"""
Retrieving FELB configuration
"""
return isamAppliance.invoke_get("Retrieving FELB configuration", "{0}/configuration".format(module_uri),
requires_modules=requires_modules, requires_version=requires_version, requires_model=requires_model)
def set(isamAppliance, enabled, debug, ha, logging, ssl, services, attributes,
check_mode=False, force=False):
"""
Replacing FELB configuration in full
"""
if force is False:
update_required, json_data, warnings = _check(isamAppliance, enabled, debug, ha, logging, ssl, services, attributes)
if force is True or update_required:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnings=warnings)
else:
return isamAppliance.invoke_put("Replacing FELB configuration in full", module_uri, json_data,
requires_modules=requires_modules, requires_version=requires_version, requires_model=requires_model)
return isamAppliance.create_return_object(warnings=warnings)
def _check(isamAppliance, enabled, debug, ha, logging, ssl, services, attributes):
update_required = False
ret_obj = get(isamAppliance)
warnings=ret_obj['warnings']
json_data = {
"enabled": enabled,
"debug": debug,
"ha": ha,
"logging": logging,
"ssl": ssl,
"services": services,
"attributes": attributes
}
sorted_json_data = ibmsecurity.utilities.tools.json_sort(json_data)
logger.debug("Sorted input: {0}".format(sorted_json_data))
sorted_ret_obj = ibmsecurity.utilities.tools.json_sort(ret_obj['data'])
logger.debug("Sorted existing data: {0}".format(sorted_ret_obj))
if sorted_ret_obj != sorted_json_data:
logger.info("Changes detected, update needed.")
update_required = True
return update_required, json_data, warnings
def export_file(isamAppliance, filename, check_mode=False, force=False):
"""
Exporting FELB configuration
"""
import os.path
if force is True or os.path.exists(filename) is False:
if check_mode is False: # No point downloading a file if in check_mode
return isamAppliance.invoke_get_file("Exporting FELB configuration", "{}?export=true".format(module_uri),
filename=filename, requires_modules=requires_modules,
requires_version=requires_version, requires_model=requires_model)
return isamAppliance.create_return_object()
def import_file(isamAppliance, file, check_mode=False, force=False):
"""
Importing FELB configuration
"""
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_files(description="Importing FELB configuration",
uri=module_uri,
fileinfo=[{
'file_formfield': 'file',
'filename': file,
'mimetype': 'application/octet-stream'
}],
data={},
requires_modules=requires_modules, requires_version=requires_version, requires_model=requires_model)
def compare(isamAppliance1, isamAppliance2):
"""
Compare FELB configuration between 2 appliances
"""
ret_obj1 = get(isamAppliance1)
ret_obj2 = get(isamAppliance2)
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
|
python
|
from app.experimental.views import experimental
|
python
|
# decompyle3 version 3.3.2
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.8.5 (default, Jul 28 2020, 12:59:40)
# [GCC 9.3.0]
# Embedded file name: lib\bl702\efuse_create_do.py
import os, sys, re, binascii, hashlib
from lib import bflb_utils
from lib import bflb_efuse_boothd_create
ef_sf_aes_mode_list = [
'None', 'AES128', 'AES192', 'AES256']
def verify_hex_num(string):
l = len(string)
i = 0
while 1:
if re.match('\\A[0-9a-fA-F]+\\Z', string[i:i + 1]) == None:
return False
else:
i += 1
if i >= l:
break
return True
def get_eflash_loader(xtal):
xtal_suffix = str(xtal).lower().replace('.', 'p').replace('M', 'm').replace('RC', 'rc')
return 'eflash_loader_' + xtal_suffix + '.bin'
def str_endian_switch(string):
s = string[6:8] + string[4:6] + string[2:4] + string[0:2]
return s
def img_create_sha256_data(data_bytearray):
hashfun = hashlib.sha256()
hashfun.update(data_bytearray)
return bflb_utils.hexstr_to_bytearray(hashfun.hexdigest())
def create_key_data_do(values, chip_name, chip_type, cfg_file, efuse_data):
tips = ''
bflb_utils.printf('Create_key_data')
fp = open(cfg_file, 'w+')
fp.write('[EFUSE_CFG]\n')
aes_mode = ef_sf_aes_mode_list.index(values['ef_sf_aes_mode'])
tips += 'AES Mode:' + values['ef_sf_aes_mode'] + '\r\n'
bflb_utils.printf(ef_sf_aes_mode_list[aes_mode])
fp.write('ef_cpu_enc_en = 1\n')
if values['cpu0_pk_simple'] != '':
if os.path.exists(values['cpu0_pk_simple']) == False:
bflb_utils.printf('Error: public key file not found')
return 'Error: public key file not found'
vk = ecdsa.VerifyingKey.from_pem(open(values['cpu0_pk_simple']).read())
pk_data = vk.to_string()
bflb_utils.printf('Public key: ', binascii.hexlify(pk_data))
pk_hash = img_create_sha256_data(pk_data)
bflb_utils.printf('Public key hash=', binascii.hexlify(pk_hash))
fp.write('ef_sboot_sign_mode = 1\n')
fp.write('ef_key_slot_0_w0 = 0x' + str_endian_switch(str(pk_hash.hex())[0:8]) + '\n')
fp.write('ef_key_slot_0_w1 = 0x' + str_endian_switch(str(pk_hash.hex())[8:16]) + '\n')
fp.write('ef_key_slot_0_w2 = 0x' + str_endian_switch(str(pk_hash.hex())[16:24]) + '\n')
fp.write('ef_key_slot_0_w3 = 0x' + str_endian_switch(str(pk_hash.hex())[24:32]) + '\n')
fp.write('ef_key_slot_1_w0 = 0x' + str_endian_switch(str(pk_hash.hex())[32:40]) + '\n')
fp.write('ef_key_slot_1_w1 = 0x' + str_endian_switch(str(pk_hash.hex())[40:48]) + '\n')
fp.write('ef_key_slot_1_w2 = 0x' + str_endian_switch(str(pk_hash.hex())[48:56]) + '\n')
fp.write('ef_key_slot_1_w3 = 0x' + str_endian_switch(str(pk_hash.hex())[56:64]) + '\n')
if values['cpu0_pk_wp_enable'] == True:
fp.write('wr_lock_key_slot_0 = 1\n')
fp.write('wr_lock_key_slot_1 = 1\n')
else:
fp.write('wr_lock_key_slot_0 = 0\n')
fp.write('wr_lock_key_slot_1 = 0\n')
tips += 'public key hash\r\n'
if aes_mode != 0:
if len(values['cpu0_aes_key_simple']) >= 32 and verify_hex_num(values['cpu0_aes_key_simple']) == True:
fp.write('ef_sf_aes_mode = ' + str(ef_sf_aes_mode_list.index(values['ef_sf_aes_mode'])) + '\n')
fp.write('ef_key_slot_2_w0 = 0x' + str_endian_switch(values['cpu0_aes_key_simple'][0:8]) + '\n')
fp.write('ef_key_slot_2_w1 = 0x' + str_endian_switch(values['cpu0_aes_key_simple'][8:16]) + '\n')
fp.write('ef_key_slot_2_w2 = 0x' + str_endian_switch(values['cpu0_aes_key_simple'][16:24]) + '\n')
fp.write('ef_key_slot_2_w3 = 0x' + str_endian_switch(values['cpu0_aes_key_simple'][24:32]) + '\n')
if values['cpu0_aes_key_wp_enable'] == True:
fp.write('wr_lock_key_slot_2 = 1\n')
else:
fp.write('wr_lock_key_slot_2 = 0\n')
if values['cpu0_aes_key_rp_enable'] == True:
fp.write('rd_lock_key_slot_2 = 1\n')
else:
fp.write('rd_lock_key_slot_2 = 0\n')
tips += 'AES key\r\n'
else:
bflb_utils.printf('Error: Please check AES key data and len')
return 'Error: Please check AES key data and len'
elif values['cpu0_aes_key_simple'] != '':
bflb_utils.printf('Error: AES mode is None, no need to fill in CPU0 AES key')
return 'Error: AES mode is None, no need to fill in CPU0 AES key'
if aes_mode == 1:
if len(values['cpu0_aes_key_simple']) != 32:
bflb_utils.printf('Error: Please check AES key len')
return 'Error: Please check AES key len'
elif aes_mode == 2:
if len(values['cpu0_aes_key_simple']) == 48:
fp.write('ef_key_slot_3_w0 = 0x' + str_endian_switch(values['cpu0_aes_key_simple'][32:40]) + '\n')
fp.write('ef_key_slot_3_w1 = 0x' + str_endian_switch(values['cpu0_aes_key_simple'][40:48]) + '\n')
if values['cpu0_aes_key_wp_enable'] == True:
fp.write('wr_lock_key_slot_3 = 1\n')
else:
fp.write('wr_lock_key_slot_3 = 0\n')
if values['cpu0_aes_key_rp_enable'] == True:
fp.write('rd_lock_key_slot_3 = 1\n')
else:
fp.write('rd_lock_key_slot_3 = 0\n')
else:
bflb_utils.printf('Error: Please check AES key len')
return 'Error: Please check AES key len'
elif aes_mode == 3:
if len(values['cpu0_aes_key_simple']) == 64:
fp.write('ef_key_slot_3_w0 = 0x' + str_endian_switch(values['cpu0_aes_key_simple'][32:40]) + '\n')
fp.write('ef_key_slot_3_w1 = 0x' + str_endian_switch(values['cpu0_aes_key_simple'][40:48]) + '\n')
fp.write('ef_key_slot_3_w2 = 0x' + str_endian_switch(values['cpu0_aes_key_simple'][48:56]) + '\n')
fp.write('ef_key_slot_3_w3 = 0x' + str_endian_switch(values['cpu0_aes_key_simple'][56:64]) + '\n')
if values['cpu0_aes_key_wp_enable'] == True:
fp.write('wr_lock_key_slot_3 = 1\n')
else:
fp.write('wr_lock_key_slot_3 = 0\n')
if values['cpu0_aes_key_rp_enable'] == True:
fp.write('rd_lock_key_slot_3 = 1\n')
else:
fp.write('rd_lock_key_slot_3 = 0\n')
else:
bflb_utils.printf('Error: Please check AES key len')
return 'Error: Please check AES key len'
lines = len(tips.split('\r\n')) + 1
bflb_utils.printf('Following will be burned:\r\n' + tips)
fp.close()
bflb_efuse_boothd_create.efuse_create_process(chip_name, chip_type, cfg_file, efuse_data)
# okay decompiling lib.bl702.efuse_create_do.pyc
|
python
|
import os
import shutil
def get_filename(file_dir):
filenames=[]
for root, dirs, files in os.walk(file_dir):
for file in files:
filenames.append(os.path.join(root, file))
return filenames
# True针对行号False针对列号
def refile(filename, newfilename, rerow=True ,recol=True ):
# 一位数组保存行号、列号、非零元个数
origin_first = []
# 二维数组保存当前的每行数据
origin_data = []
# 保存真实的行/列号
origin_row = []
origin_col = []
isFirst = True
file = open(filename, 'r')
try:
text_lines = file.readlines()
# 保存内容
for line in text_lines:
line_list = line.strip('\n').split(" ")
if isFirst:
print(line_list)
origin_first = line_list
isFirst = False
else:
origin_data.append(line_list)
# 只管行。以后拓展成行列都行
# not in 速度太慢,后续使用set处理
# if line_list[0] not in origin_row:
origin_row.append(line_list[0])
origin_col.append(line_list[1])
# print(type(line), line)
finally:
file.close()
print("read success!")
# 转成set,高效去重
new_row = list(set(origin_row))
new_row.sort()
new_col = list(set(origin_col))
new_col.sort()
print("compress success!")
# 转成dict,降低查找复杂度
new_row_value = list(range(1, len(new_row)+1 ))
new_row_dict = dict(zip(new_row,new_row_value))
new_col_value = list(range(1, len(new_col)+1 ))
new_col_dict = dict(zip(new_col,new_col_value))
print("zip success!new_row:",len(new_row))
print("zip success!new_col:",len(new_col))
origin_data.sort(key=lambda i:(i[0],i[1]))
print("data order success!")
with open(newfilename, 'w') as f:
f.write('%s %s %s\n' % (len(new_row) if rerow else origin_first[0], len(new_col) if recol else origin_first[1], origin_first[2]))
for dataline in origin_data:
for ind,val in enumerate(dataline):
real_val = val
if ind == 0:
if rerow:
real_val = new_row_dict[val]
# real_val = new_row.index(val)
f.write('%s ' % real_val)
elif ind == 1:
if recol:
real_val = new_col_dict[val]
f.write('%s ' % real_val)
elif ind == 2:
f.write('%s ' % real_val)
f.write('\n')
dir_name = "../tensors/mat1nell"
new_dir_name = dir_name + "py"
if not os.path.exists(new_dir_name):
os.makedirs(new_dir_name)
# 文件夹存在则删除
else:
shutil.rmtree(new_dir_name)
os.makedirs(new_dir_name)
# new_dir_name_r = dir_name + "pyr"
# if not os.path.exists(new_dir_name_r):
# os.makedirs(new_dir_name_r)
# new_dir_name_c = dir_name + "pyc"
# if not os.path.exists(new_dir_name_c):
# os.makedirs(new_dir_name_c)
filenames = get_filename(dir_name)
# 遍历所有文件,读取出所有数据,分别保存。 重新处理行号,再写回文件
fi = 0
for filename in filenames:
filename_list = filename.split("/")
# 行列双压缩
newfilename = "%s/%s_%s" % (new_dir_name,filename_list[-2],filename_list[-1])
refile(filename, newfilename, rerow=True ,recol=True)
# # 行单压缩
# newfilename = "%s/%s" % (new_dir_name_r,filename_list[-1])
# refile(filename, newfilename, rerow=True ,recol=False)
# # 列单压缩
# newfilename = "%s/%s" % (new_dir_name_c,filename_list[-1])
# refile(filename, newfilename, rerow=False ,recol=True)
# print("filename",filename)
# print("newfilename",newfilename)
fi = fi + 1
|
python
|
#!/usr/bin/env python
'''Given desired set of parameters, generates all configurations
obtained by enumerate each parameter individually (continuous are discretized).
'''
import sys, re, MySQLdb, argparse, os, json, subprocess
import pandas as pd
import makemodel
import numpy as np
from MySQLdb.cursors import DictCursor
from outputjson import makejson
from populaterequests import addrows
parser = argparse.ArgumentParser(description='Exhaustive grid search along single axes of variation')
parser.add_argument('--host',type=str,help='Database host')
parser.add_argument('-p','--password',type=str,help='Database password')
parser.add_argument('--db',type=str,help='Database name',default='database')
parser.add_argument('-o','--output',type=str,help="Output file",default="rows.txt")
parser.add_argument('--parameters',type=file,help='parameters to enumerate',required=True)
args = parser.parse_args()
#get options
defaults = makemodel.getdefaults()
options = makemodel.getoptions()
opts = sorted(options.items())
#read in list of parameters
params = args.parameters.read().rstrip().split()
outrows = set() #uniq configurations only (e.g., avoid replicating the default over and over again)
for param in params:
if param in options:
choices = options[param]
if isinstance(choices, makemodel.Range):
choices = np.linspace(choices.min,choices.max, 9)
#for each parameter value, create a row
for val in choices:
row = ['P','P'] #spearmint
for (name,_) in opts:
if name == param:
row.append(val)
else:
row.append(defaults[name])
outrows.add(tuple(row))
out = open(args.output,'w')
for row in outrows:
out.write(' '.join(map(str,row))+'\n')
out.close()
if args.host:
addrows(args.output,args.host,args.db,args.password)
|
python
|
# -*- coding: utf-8 -*-
# ToMaTo (Topology management software)
# Copyright (C) 2010 Dennis Schwerdel, University of Kaiserslautern
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
import os, sys, signal, time, thread
os.environ['TOMATO_MODULE'] = "backend"
import monkey
monkey.patch_all()
import config
from mongoengine import connect
database_connnection = connect(config.DATABASE, host=config.DATABASE_HOST)
database_obj = getattr(database_connnection, config.DATABASE)
def db_migrate():
def getMigration(version):
try:
return __import__("tomato.migrations.migration_%04d" % version, {}, {}, 'migration_%04d' % version).migrate
except ImportError:
return None
from .db import data
version = data.get('db_version', 0)
print >>sys.stderr, "Database version: %04d" % version
if version > 0 and not getMigration(version):
raise Exception("Database is newer than code")
if not version and not getMigration(1):
raise Exception("Failed to migrate to initial version")
while True:
version += 1
migrate = getMigration(version)
if not migrate:
break
print >>sys.stderr, " - migrating to version %04d..." % version
try:
migrate()
except:
import traceback
traceback.print_exc()
raise
data.set('db_version', version)
import threading
_currentUser = threading.local()
def currentUser():
return _currentUser.user if hasattr(_currentUser, "user") else None # fixme
def setCurrentUser(user):
_currentUser.user = user
def login(credentials, sslCert):
user = auth.login(*credentials) if credentials else None
setCurrentUser(user)
return user or not credentials
from lib import logging
def handleError():
logging.logException()
dump.dumpException()
from lib import tasks #@UnresolvedImport
scheduler = tasks.TaskScheduler(maxLateTime=30.0, minWorkers=5, maxWorkers=25)
starttime = time.time()
from . import host, auth, rpcserver #@UnresolvedImport
from lib.cmd import bittorrent, process #@UnresolvedImport
from lib import util, cache #@UnresolvedImport
scheduler.scheduleRepeated(config.BITTORRENT_RESTART, util.wrap_task(bittorrent.restartClient))
stopped = threading.Event()
import dump
import dumpmanager
import models
def start():
logging.openDefault(config.LOG_FILE)
if not os.environ.has_key("TOMATO_NO_MIGRATE"):
db_migrate()
else:
print >>sys.stderr, "Skipping migrations"
auth.init()
global starttime
bittorrent.startTracker(config.TRACKER_PORT, config.TEMPLATE_PATH)
bittorrent.startClient(config.TEMPLATE_PATH)
rpcserver.start()
starttime = time.time()
if not os.environ.has_key("TOMATO_NO_TASKS"):
scheduler.start()
else:
print >>sys.stderr, "Running without tasks"
dump.init()
dumpmanager.init()# important: must be called after dump.init()
cache.init()# this does not depend on anything (except the scheduler variable being initialized), and nothing depends on this. No need to hurry this.
def reload_(*args):
print >>sys.stderr, "Reloading..."
logging.closeDefault()
reload(config)
logging.openDefault(config.LOG_FILE)
#stopRPCserver()
#startRPCserver()
def _printStackTraces():
import traceback
for threadId, stack in sys._current_frames().items():
print >>sys.stderr, "ThreadID: %s" % threadId
for filename, lineno, name, line in traceback.extract_stack(stack):
print >>sys.stderr, '\tFile: "%s", line %d, in %s' % (filename, lineno, name)
if line:
print >>sys.stderr, "\t\t%s" % (line.strip())
def _stopHelper():
stopped.wait(10)
if stopped.isSet():
return
print >>sys.stderr, "Stopping takes long, waiting some more time..."
stopped.wait(10)
if stopped.isSet():
return
print >>sys.stderr, "Ok last chance, killing process in 10 seconds..."
stopped.wait(10)
if stopped.isSet():
return
print >>sys.stderr, "Some threads are still running:"
_printStackTraces()
print >>sys.stderr, "Killing process..."
process.kill(os.getpid(), force=True)
def stop(*args):
print >>sys.stderr, "Shutting down..."
thread.start_new_thread(_stopHelper, ())
rpcserver.stop()
host.stopCaching()
scheduler.stop()
bittorrent.stopTracker()
bittorrent.stopClient()
logging.closeDefault()
stopped.set()
def run():
start()
signal.signal(signal.SIGTERM, stop)
signal.signal(signal.SIGINT, stop)
signal.signal(signal.SIGHUP, reload_)
try:
while not stopped.isSet():
stopped.wait(1.0)
except KeyboardInterrupt:
stop()
|
python
|
# This is a modification of stlehmann/Flask-MQTT
# https://github.com/stlehmann/Flask-MQTT
from logzero import logger
import paho.mqtt.client as mqtt
from paho.mqtt.client import ( # noqa: F401
Client,
MQTT_ERR_SUCCESS,
MQTT_ERR_ACL_DENIED,
MQTT_ERR_AGAIN,
MQTT_ERR_AUTH,
MQTT_ERR_CONN_LOST,
MQTT_ERR_CONN_REFUSED,
MQTT_ERR_ERRNO,
MQTT_ERR_INVAL,
MQTT_ERR_NO_CONN,
MQTT_ERR_NOMEM,
MQTT_ERR_NOT_FOUND,
MQTT_ERR_NOT_SUPPORTED,
MQTT_ERR_PAYLOAD_SIZE,
MQTT_ERR_PROTOCOL,
MQTT_ERR_QUEUE_SIZE,
MQTT_ERR_TLS,
MQTT_ERR_UNKNOWN,
MQTT_LOG_DEBUG,
MQTT_LOG_ERR,
MQTT_LOG_INFO,
MQTT_LOG_NOTICE,
MQTT_LOG_WARNING,
)
class MqttDecorator():
def _handle_connect(self, client, userdata, flags, rc):
# type: (Client, Any, Dict, int) -> None
if rc == MQTT_ERR_SUCCESS:
self.connected = True
for key, item in self.topics.items():
self.client.subscribe(topic=item.topic, qos=item.qos)
if self._connect_handler is not None:
self._connect_handler(client, userdata, flags, rc)
def _handle_disconnect(self, client, userdata, rc):
# type: (str, Any, int) -> None
self.connected = False
if self._disconnect_handler is not None:
self._disconnect_handler()
def on_topic(self, topic):
# type: (str) -> Callable
"""Decorator.
Decorator to add a callback function that is called when a certain
topic has been published. The callback function is expected to have the
following form: `handle_topic(client, userdata, message)`
:parameter topic: a string specifying the subscription topic to
subscribe to
The topic still needs to be subscribed via mqtt.subscribe() before the
callback function can be used to handle a certain topic. This way it is
possible to subscribe and unsubscribe during runtime.
**Example usage:**::
mqtt = EdgeAgent(__name__)
mqtt.subscribe('home/mytopic')
@mqtt.on_topic('home/mytopic')
def handle_mytopic(client, userdata, message):
print('Received message on topic {}: {}'
.format(message.topic, message.payload.decode()))
"""
def decorator(handler):
# type: (Callable[[str], None]) -> Callable[[str], None]
self.client.message_callback_add(topic, handler)
return handler
return decorator
def subscribe(self, topic, qos=0):
# type: (str, int) -> Tuple[int, int]
"""
Subscribe to a certain topic.
:param topic: a string specifying the subscription topic to
subscribe to.
:param qos: the desired quality of service level for the subscription.
Defaults to 0.
:rtype: (int, int)
:result: (result, mid)
A topic is a UTF-8 string, which is used by the broker to filter
messages for each connected client. A topic consists of one or more
topic levels. Each topic level is separated by a forward slash
(topic level separator).
The function returns a tuple (result, mid), where result is
MQTT_ERR_SUCCESS to indicate success or (MQTT_ERR_NO_CONN, None) if the
client is not currently connected. mid is the message ID for the
subscribe request. The mid value can be used to track the subscribe
request by checking against the mid argument in the on_subscribe()
callback if it is defined.
**Topic example:** `myhome/groundfloor/livingroom/temperature`
"""
# TODO: add support for list of topics
# don't subscribe if already subscribed
# try to subscribe
result, mid = self.client.subscribe(topic=topic, qos=qos)
# if successful add to topics
if result == MQTT_ERR_SUCCESS:
self.topics[topic] = TopicQos(topic=topic, qos=qos)
logger.debug('Subscribed to topic: {0}, qos: {1}'
.format(topic, qos))
else:
logger.error('Error {0} subscribing to topic: {1}'
.format(result, topic))
return (result, mid)
def unsubscribe(self, topic):
# type: (str) -> Optional[Tuple[int, int]]
"""
Unsubscribe from a single topic.
:param topic: a single string that is the subscription topic to
unsubscribe from
:rtype: (int, int)
:result: (result, mid)
Returns a tuple (result, mid), where result is MQTT_ERR_SUCCESS
to indicate success or (MQTT_ERR_NO_CONN, None) if the client is not
currently connected.
mid is the message ID for the unsubscribe request. The mid value can be
used to track the unsubscribe request by checking against the mid
argument in the on_unsubscribe() callback if it is defined.
"""
# don't unsubscribe if not in topics
if topic in self.topics:
result, mid = self.client.unsubscribe(topic)
if result == MQTT_ERR_SUCCESS:
self.topics.pop(topic)
logger.debug('Unsubscribed from topic: {0}'.format(topic))
else:
logger.debug('Error {0} unsubscribing from topic: {1}'
.format(result, topic))
# if successful remove from topics
return result, mid
return None
def unsubscribe_all(self):
# type: () -> None
"""Unsubscribe from all topics."""
topics = list(self.topics.keys())
for topic in topics:
self.unsubscribe(topic)
def publish(self, topic, payload=None, qos=0, retain=False):
# type: (str, bytes, int, bool) -> Tuple[int, int]
"""
Send a message to the broker.
:param topic: the topic that the message should be published on
:param payload: the actual message to send. If not given, or set to
None a zero length message will be used. Passing an
int or float will result in the payload being
converted to a string representing that number.
If you wish to send a true int/float, use struct.pack()
to create the payload you require.
:param qos: the quality of service level to use
:param retain: if set to True, the message will be set as the
"last known good"/retained message for the topic
:returns: Returns a tuple (result, mid), where result is
MQTT_ERR_SUCCESS to indicate success or MQTT_ERR_NO_CONN
if the client is not currently connected. mid is the message
ID for the publish request.
"""
if not self.connected:
self.client.reconnect()
result, mid = self.client.publish(topic, payload, qos, retain)
if result == MQTT_ERR_SUCCESS:
logger.debug('Published topic {0}: {1}'.format(topic, payload))
else:
logger.error('Error {0} publishing topic {1}'
.format(result, topic))
return (result, mid)
def on_connect(self):
# type: () -> Callable
"""Decorator.
Decorator to handle the event when the broker responds to a connection
request. Only the last decorated function will be called.
"""
def decorator(handler):
# type: (Callable) -> Callable
self._connect_handler = handler
return handler
return decorator
def on_disconnect(self):
# type: () -> Callable
"""Decorator.
Decorator to handle the event when client disconnects from broker. Only
the last decorated function will be called.
"""
def decorator(handler):
# type: (Callable) -> Callable
self._disconnect_handler = handler
return handler
return decorator
def on_message(self):
# type: () -> Callable
"""Decorator.
Decorator to handle all messages that have been subscribed and that
are not handled via the `on_message` decorator.
**Note:** Unlike as written in the paho mqtt documentation this
callback will not be called if there exists an topic-specific callback
added by the `on_topic` decorator.
**Example Usage:**::
@mqtt.on_message()
def handle_messages(client, userdata, message):
print('Received message on topic {}: {}'
.format(message.topic, message.payload.decode()))
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_message = handler
return handler
return decorator
def on_publish(self):
# type: () -> Callable
"""Decorator.
Decorator to handle all messages that have been published by the
client.
**Example Usage:**::
@mqtt.on_publish()
def handle_publish(client, userdata, mid):
print('Published message with mid {}.'
.format(mid))
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_publish = handler
return handler
return decorator
def on_subscribe(self):
# type: () -> Callable
"""Decorate a callback function to handle subscritions.
**Usage:**::
@mqtt.on_subscribe()
def handle_subscribe(client, userdata, mid, granted_qos):
print('Subscription id {} granted with qos {}.'
.format(mid, granted_qos))
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_subscribe = handler
return handler
return decorator
def on_unsubscribe(self):
# type: () -> Callable
"""Decorate a callback funtion to handle unsubscribtions.
**Usage:**::
@mqtt.unsubscribe()
def handle_unsubscribe(client, userdata, mid)
print('Unsubscribed from topic (id: {})'
.format(mid)')
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_unsubscribe = handler
return handler
return decorator
def on_log(self):
# type: () -> Callable
"""Decorate a callback function to handle MQTT logging.
**Example Usage:**
::
@mqtt.on_log()
def handle_logging(client, userdata, level, buf):
print(client, userdata, level, buf)
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_log = handler
return handler
return decorator
|
python
|
__author__ = 'petlja'
from docutils import nodes
from docutils.parsers.rst import directives
from docutils.parsers.rst import Directive
def setup(app):
app.connect('html-page-context', html_page_context_handler)
app.add_stylesheet('notes.css')
app.add_javascript('notes.js')
app.add_directive('infonote', InfoNoteDirective)
app.add_directive('questionnote', QuestionNoteDirective)
app.add_directive('level', LevelDirective)
app.add_node(InfoNoteNode, html=(visit_info_note_node, depart_info_note_node))
app.add_node(QuestionNoteNode, html=(visit_question_note_node, depart_question_note_node))
app.add_node(LevelNode, html=(visit_level_node, depart_level_node))
def html_page_context_handler(app, pagename, templatename, context, doctree):
app.builder.env.h_ctx = context
TEMPLATE_START = """
<div class="course-box course-box-info">
<div class="course-content">
<p>
"""
TEMPLATE_END = """
</p></div></div>
"""
class InfoNoteNode(nodes.General, nodes.Element):
def __init__(self, content):
super(InfoNoteNode, self).__init__()
self.note = content
def visit_info_note_node(self, node):
node.delimiter = "_start__{}_".format("info")
self.body.append(node.delimiter)
res = TEMPLATE_START
self.body.append(res)
def depart_info_note_node(self, node):
res = TEMPLATE_END
self.body.append(res)
self.body.remove(node.delimiter)
class InfoNoteDirective(Directive):
"""
.. infonote::
"""
required_arguments = 0
optional_arguments = 0
has_content = True
def run(self):
"""
generate html to include note box.
:param self:
:return:
"""
env = self.state.document.settings.env
self.options['source'] = "\n".join(self.content)
innode = InfoNoteNode(self.options)
self.state.nested_parse(self.content, self.content_offset, innode)
return [innode]
TEMPLATE_START_Q = """
<div class="course-box course-box-special">
<div class="course-content">
<h4 class="carbox-title">
<img class="corner-image float-right" src="%s" />
</h4>
<p>
"""
TEMPLATE_END_Q = """
</p></div></div>
"""
class QuestionNoteNode(nodes.General, nodes.Element):
def __init__(self, content):
super(QuestionNoteNode, self).__init__()
self.note = content
def visit_question_note_node(self, node):
node.delimiter = "_start__{}_".format("info")
self.body.append(node.delimiter)
prefix = '../' * self.builder.current_docname.count('/')
res = TEMPLATE_START_Q % (prefix + "_static/img/question-mark.png")
self.body.append(res)
def depart_question_note_node(self, node):
res = TEMPLATE_END_Q
self.body.append(res)
self.body.remove(node.delimiter)
class QuestionNoteDirective(Directive):
"""
.. questionnote::
"""
required_arguments = 0
optional_arguments = 0
has_content = True
def run(self):
"""
generate html to include note box.
:param self:
:return:
"""
env = self.state.document.settings.env
self.options['source'] = "\n".join(self.content)
qnnode = QuestionNoteNode(self.options)
self.state.nested_parse(self.content, self.content_offset, qnnode)
return [qnnode]
TEMPLATE_START_L_CONTAINER = """
<div class="rst-level rst-level-%(complexity)s">
"""
TEMPLATE_START_L = """
<div data-level="%(complexity)s" style="display:none">
"""
TEMPLATE_END_L = """
</div>
"""
class LevelNode(nodes.General, nodes.Element):
def __init__(self, content):
super(LevelNode, self).__init__()
self.note = content
def visit_level_node(self, node):
node.delimiter = "_start__{}_".format("level")
self.body.append(node.delimiter)
if 'container' in node.note:
res = TEMPLATE_START_L_CONTAINER % node.note
else:
res = TEMPLATE_START_L % node.note
self.body.append(res)
def depart_level_node(self, node):
res = TEMPLATE_END_L
self.body.append(res)
self.body.remove(node.delimiter)
class LevelDirective(Directive):
"""
.. level:: 2
:container:
"""
required_arguments = 1
optional_arguments = 0
has_content = True
option_spec = {
'container':directives.flag,
}
def run(self):
"""
generate html to include level box.
:param self:
:return:
"""
env = self.state.document.settings.env
self.options['source'] = "\n".join(self.content)
self.options['complexity'] = self.arguments[0]
innode = LevelNode(self.options)
self.state.nested_parse(self.content, self.content_offset, innode)
return [innode]
|
python
|
import discord
from jinja2 import Environment, FileSystemLoader
from configurations import CONFIG
from game_constants import RARITY_COLORS
from search import _
from util import flatten
class Views:
WHITE = discord.Color.from_rgb(254, 254, 254)
BLACK = discord.Color.from_rgb(0, 0, 0)
RED = discord.Color.from_rgb(255, 0, 0)
def __init__(self, emojis):
self.my_emojis = emojis
self.jinja_env = Environment(loader=FileSystemLoader('templates'))
def banner_colors(self, banner):
return [f'{self.my_emojis.get(d[0], f":{d[0]}:")}{abs(d[1]) * f"{d[1]:+d}"[0]}' for d in banner['colors']]
def render_embed(self, embed, template_name, **kwargs):
self.jinja_env.filters['emoji'] = self.my_emojis.get
self.jinja_env.filters['banner_colors'] = self.banner_colors
self.jinja_env.globals.update({
'emoji': self.my_emojis.get,
'flatten': flatten
})
template = self.jinja_env.get_template(template_name)
content = template.render(**kwargs)
for i, splitted in enumerate(content.split('<T>')):
if i == 0:
embed.description = splitted
else:
title_end = splitted.index('</T>')
inline = splitted.startswith('inline')
embed.add_field(
name=splitted[inline * len('inline'):title_end],
value=splitted[title_end + 4:],
inline=inline)
return embed
def render_help(self, prefix, lang):
title = f'garyatrics.com bot {_("[HELP]", lang)}'
e = discord.Embed(title=title, color=self.WHITE)
return self.render_embed(e, f'help/help-{lang}.jinja', prefix=prefix)
def render_weapon(self, weapon, shortened):
rarity_color = RARITY_COLORS.get(weapon['raw_rarity'], RARITY_COLORS['Mythic'])
color = discord.Color.from_rgb(*rarity_color)
e = discord.Embed(title='Weapon search found one exact match', color=color)
thumbnail_url = f'{CONFIG.get("graphics_url")}/Spells/Cards_{weapon["spell_id"]}_full.png'
e.set_thumbnail(url=thumbnail_url)
if shortened:
return self.render_embed(e, 'weapon_shortened.jinja', weapon=weapon)
if 'release_date' in weapon:
e.set_footer(text='Release date')
e.timestamp = weapon["release_date"]
return self.render_embed(e, 'weapon.jinja', weapon=weapon)
def render_affix(self, affix, shortened):
e = discord.Embed(title='Affix search found one exact match', color=self.WHITE)
affix['weapons'] = [f'{w["name"]} `#{w["id"]}`' for w in affix['weapons']]
thumbnail_url = f'{CONFIG.get("graphics_url")}/Ingots/Ingots_AnvilIcon_full.png'
e.set_thumbnail(url=thumbnail_url)
return self.render_embed(e, 'affix.jinja', affix=affix)
def render_pet(self, pet, shortened):
e = discord.Embed(title='Pet search found one exact match', color=self.WHITE)
thumbnail_url = f'{CONFIG.get("graphics_url")}/Pets/Cards_{pet["filename"]}_full.png'
e.set_thumbnail(url=thumbnail_url)
if shortened:
return self.render_embed(e, 'pet_shortened.jinja', pet=pet)
if 'release_date' in pet:
e.set_footer(text='Release date')
e.timestamp = pet["release_date"]
return self.render_embed(e, 'pet.jinja', pet=pet)
def render_troop(self, troop, shortened):
rarity_color = RARITY_COLORS.get(troop['raw_rarity'], RARITY_COLORS['Mythic'])
if 'Boss' in troop['raw_types']:
rarity_color = RARITY_COLORS['Doomed']
e = discord.Embed(title='Troop search found one exact match', color=discord.Color.from_rgb(*rarity_color))
thumbnail_url = f'{CONFIG.get("graphics_url")}/Troops/Cards_{troop["filename"]}_full.png'
e.set_thumbnail(url=thumbnail_url)
if shortened:
return self.render_embed(e, 'troop_shortened.jinja', troop=troop)
if 'release_date' in troop:
e.set_footer(text='Release date')
e.timestamp = troop["release_date"]
return self.render_embed(e, 'troop.jinja', troop=troop)
def render_traitstone(self, traitstone, shortened):
e = discord.Embed(color=self.WHITE)
e.title = traitstone['name']
thumbnail_url = f'{CONFIG.get("graphics_url")}/Runes_Rune{traitstone["id"]:02d}_full.png'
e.set_thumbnail(url=thumbnail_url)
troop_list = ['{0} ({1})'.format(*troop) for troop in traitstone['troops']]
troops = self.trim_text_to_length(", ".join(sorted(troop_list)), 900, ',', ', ...')
class_list = ['{0} ({1})'.format(*_class) for _class in traitstone['classes']]
classes = self.trim_text_to_length(", ".join(sorted(class_list)), 900, ',', ', ...')
kingdom_list = [k for k in traitstone['kingdoms']]
kingdoms = self.trim_text_to_length(", ".join(sorted(kingdom_list)), 900, ',', ', ...')
return self.render_embed(e, 'traitstone.jinja',
traitstone=traitstone, troops=troops, classes=classes, kingdoms=kingdoms)
def render_talent(self, tree, shortened):
e = discord.Embed(color=self.WHITE)
if shortened:
e.title = tree["name"]
return self.render_embed(e, 'talent_shortened.jinja', tree=tree)
e.title = 'Talent search found one exact match'
return self.render_embed(e, 'talent.jinja', tree=tree)
def render_team(self, team, author, shortened):
color = discord.Color.from_rgb(*RARITY_COLORS['Mythic'])
e = discord.Embed(color=color)
if team['banner']:
thumbnail_url = f'{CONFIG.get("graphics_url")}/Banners/Banners_{team["banner"]["filename"]}_full.png'
e.set_thumbnail(url=thumbnail_url)
if shortened:
troops = [f'{t[1]}' for t in team['troops']]
e.title = ', '.join(troops)
return self.render_embed(e, 'team_shortened.jinja', team=team)
e.title = f"{author} team"
return self.render_embed(e, 'team.jinja', team=team)
def render_kingdom(self, kingdom, shortened):
e = discord.Embed(title='Kingdom search found one exact match', color=self.WHITE)
underworld = 'underworld' if kingdom['underworld'] else ''
thumbnail_url = f'{CONFIG.get("graphics_url")}/Maplocations{underworld}_{kingdom["filename"]}_full.png'
e.set_thumbnail(url=thumbnail_url)
if shortened:
return self.render_embed(e, 'kingdom_shortened.jinja', kingdom=kingdom)
return self.render_embed(e, 'kingdom.jinja', kingdom=kingdom)
def render_trait(self, trait, shortened):
e = discord.Embed(title='Trait search found one exact match', color=self.WHITE)
thumbnail_url = f'{CONFIG.get("graphics_url")}/Troopcardall_Traits/{trait["image"]}_full.png'
e.set_thumbnail(url=thumbnail_url)
trait['thumbnail'] = thumbnail_url
return self.render_embed(e, 'trait.jinja', trait=trait)
def render_class(self, _class, shortened):
e = discord.Embed(title='Class search found one exact match', color=self.WHITE)
thumbnail_url = f'{CONFIG.get("graphics_url")}/Classes_{_class["code"]}_full.png'
e.set_thumbnail(url=thumbnail_url)
if shortened:
return self.render_embed(e, 'class_shortened.jinja', _class=_class)
return self.render_embed(e, 'class.jinja', _class=_class)
@staticmethod
def trim_text_to_length(text, limit, break_character='\n', indicator=''):
input_text = f'{text}{break_character}'
trimmed_text = input_text[:input_text[:limit].rfind(break_character)]
if trimmed_text != text:
trimmed_text += indicator
return trimmed_text
@classmethod
def trim_text_lines_to_length(cls, lines, limit, break_character='\n'):
input_text = break_character.join(lines) + break_character
trimmed_text = cls.trim_text_to_length(input_text, limit)
return trimmed_text.split(break_character)
@classmethod
def trim_news_to_length(cls, text, link, max_length=900):
trimmed_text = cls.trim_text_to_length(text, max_length)
if len(trimmed_text) > max_length:
trimmed_text = cls.trim_text_to_length(text, max_length, break_character=' ')
read_more = ''
if len(trimmed_text) != len(text):
read_more = '[...] '
result = f'{trimmed_text}{read_more}\n\n[Read full news article]({link}).'
return result
def render_events(self, events):
e = discord.Embed(title='Upcoming GoW Events', color=self.WHITE)
message_lines = ['```']
last_event_date = events[0]['start']
for event in events:
if event['start'] > last_event_date and event['start'].weekday() == 0:
message_lines.append('')
last_event_date = event['start']
message_lines.append(f'{event["start"].strftime("%b %d")} - '
f'{event["end"].strftime("%b %d")} '
f'{event["type"]}'
f'{":" if event["extra_info"] else ""} '
f'{event["extra_info"]}')
message_lines = self.trim_text_lines_to_length(message_lines, 900)
message_lines.append('```')
e.add_field(name='Spoilers', value='\n'.join(message_lines))
return e
def render_levels(self, levels):
e = discord.Embed(title='Level progression overview', color=self.WHITE)
return self.render_embed(e, 'levels.jinja', levels=levels)
|
python
|
def set_clock(time, buttons):
res=[int(time.split(":")[0]), int(time.split(":")[1])]
for i in buttons:
if i=="M":
res[1]=(res[1]+1)%60
elif i=="H":
res[0]=(res[0]+1)%24
return "{:d}:{:02d}".format(res[0] if res[0] else 24, res[1])
|
python
|
def outer_function(msg):
def inner_function():
print(msg)
return inner_function
hi_func=outer_function('Hi')
bye_func=outer_function('Bye')
hi_func()
bye_func()
|
python
|
import discord
from discord.ext import commands
class Build_info(commands.Cog, name='Build_info'):
def __init__(self, bot):
self.bot = bot
@commands.command(description="Displays build info")
async def build_info(self,ctx,file_override=None):
if file_override is None:
file= 'buildinfo.conf'
with open(file, 'r') as f:
await ctx.send(''.join(f.readlines()))
else:
file = file_override
with open(file, 'r') as f:
await ctx.send(''.join(f.readlines()))
def setup(bot):
bot.add_cog(Build_info(bot))
|
python
|
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 1 23:51:03 2017
@author: Mayur
"""
"""
Grader
A regular polygon has n number of sides. Each side has length s.
The area of a regular polygon is: (0.25∗n∗s^2)/tan(π/n)
The perimeter of a polygon is: length of the boundary of the polygon
Write a function called polysum that takes 2 arguments, n and s. This function
should sum the area and square of the perimeter of the regular polygon. The
function returns the sum, rounded to 4 decimal places.
"""
#code
import math
def polysum(n, s):
"""
n: Number of sides
s: Length of each side
returns: sum of the area and square of the perimeter of the regular polygon
"""
area = (0.25*n*s*s)/math.tan(math.pi/n)
perimeter = n*s
sum = area + perimeter**2
return round(sum,4)
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019-07-04 11:04
# @Author : minp
# @contact : [email protected]
# @Site :
# @File : stockdb.py
# @Software: PyCharm
import pymongo
myclient = pymongo.MongoClient("mongodb://localhost:27017/")
mydb = myclient["runoobdb2"]
mycol = mydb["sites"]
mydict = {"name": "RUNOOB", "alexa": "10000", "url": "https://www.runoob.com"}
x = mycol.insert_one(mydict)
print(x)
print(x)
|
python
|
import os
import unittest
import pawnpy
basedir = os.path.dirname(os.path.realpath(__file__))
class TestCompile(unittest.TestCase):
def test_compile(self):
pawnpy.cc(os.path.join(basedir, '../pawnpy/pawn/examples/hello.p'),
basedir + '/hello.amx',
os.path.join(basedir, '../pawnpy/pawn/include'))
|
python
|
from flask import Flask, url_for
from flask_migrate import Migrate
from flask_security import Security
from flask_security.utils import encrypt_password
from flask_admin import helpers as admin_helpers
from adminlte.admin import AdminLte, admins_store, admin_db
from adminlte.models import Role
from adminlte.views import FaLink
from flask_admin import menu
from models import db
from models.message import Message
from models.device import Device
from views.message import MessageView
from views.device import DeviceView
app = Flask(__name__)
app.config.from_pyfile('config.py')
db.init_app(app)
db.app = app
migrate = Migrate(app, db)
admin_migrate = Migrate(app, admin_db)
security = Security(app, admins_store)
admin = AdminLte(app, skin = 'green', name = 'FlaskCMS', short_name = "<b>F</b>C", long_name = "<b>Flask</b>CMS")
admin.add_view(MessageView(Message, db.session, name = "Messages", menu_icon_value = 'fa-envelope'))
admin.add_view(DeviceView(Device, db.session, name = "Devices", menu_icon_value = 'fa-laptop'))
admin.add_link(FaLink(name = 'Website', category = 'Author', url = 'http://tomasznajda.com',
icon_value = 'fa-globe', target = "_blank"))
admin.add_link(FaLink(name = 'GitHub', category = 'Author', url = 'https://github.com/tomasznajda',
icon_value = 'fa-github', target = "_blank"))
admin.set_category_icon(name = 'Author', icon_value = 'fa-address-card')
@security.context_processor
def security_context_processor():
return dict(
admin_base_template = admin.base_template,
admin_view = admin.index_view,
h = admin_helpers,
get_url = url_for
)
@app.cli.command()
def build_sample_db():
"""
Populate a small db with some example entries.
"""
admin_db.drop_all()
admin_db.create_all()
with app.app_context():
super_admin_role = Role(name = 'superadmin')
admin_role = Role(name = 'admin')
admin_db.session.add(super_admin_role)
admin_db.session.add(admin_role)
admin_db.session.commit()
test_user = admins_store.create_user(
first_name = 'John',
last_name = 'Doe',
email = '[email protected]',
password = encrypt_password('admin'),
roles = [super_admin_role, admin_role]
)
admin_db.session.add(test_user)
admin_db.session.commit()
return
if __name__ == '__main__':
app.run()
|
python
|
import time
from pymongo import MongoClient
from kiroku.config import MONGO_LOGIN
client = MongoClient(MONGO_LOGIN)
db = client['krk']
def insert_update(data, collection_name):
"""
Insert if the item has unique _id else update the item.
:param dict data: Data to be inserted into the database
:param str collection_name: Collection name that the data should be inserted to
:return: True if the insert is unique
"""
col = db[collection_name]
_id = data["_id"]
cur = col.find_one({"_id": _id})
if cur:
log("update", {"id": _id, "previous": cur})
col.replace_one({"_id": _id}, data)
return False
else:
col.insert_one(data)
return True
def find_by_id(_id, collection_name):
"""
Fine *one* item by _id
:param ? _id: The _id of the item
:param str collection_name: The name of the collection that you expect to find the item in.
:return: Data if found, else None
"""
return db[collection_name].find_one({"_id": _id})
def log(event_type, data):
"""
Log into the database
:param str event_type: Type of the event
:param dict data:
:return:
"""
db['log'].insert_one({"eventType": event_type, "data": data, "time": time.time()})
|
python
|
import json
def create_row_w_validated_params(cls, validated_params, rqst_errors):
found_specific_concern_rows = cls.check_for_specific_concern_rows_with_given_question(
validated_params["rqst_specific_concern_question"],
rqst_errors
)
specific_concern_row = None
if not found_specific_concern_rows and not rqst_errors:
specific_concern_row = cls()
specific_concern_row.question = validated_params["rqst_specific_concern_question"]
specific_concern_row.research_weight = validated_params["rqst_specific_concern_research_weight"]
specific_concern_row.save()
if "add_related_general_concerns_objects" in validated_params:
add_related_general_concerns_to_row(specific_concern_row, validated_params, rqst_errors)
if not rqst_errors:
specific_concern_row.save()
return specific_concern_row
def update_row_w_validated_params(cls, validated_params, rqst_errors):
rqst_id = validated_params['rqst_id']
if "rqst_specific_concern_question" in validated_params:
found_specific_concern_rows = cls.check_for_specific_concern_rows_with_given_question(
validated_params["rqst_specific_concern_question"],
rqst_errors,
rqst_id
)
else:
found_specific_concern_rows = None
specific_concern_row = None
if not found_specific_concern_rows and not rqst_errors:
try:
specific_concern_row = cls.objects.get(id=rqst_id)
if "rqst_specific_concern_question" in validated_params:
specific_concern_row.question = validated_params["rqst_specific_concern_question"]
if "rqst_specific_concern_research_weight" in validated_params:
specific_concern_row.research_weight = validated_params["rqst_specific_concern_research_weight"]
if "add_related_general_concerns_objects" in validated_params:
add_related_general_concerns_to_row(specific_concern_row, validated_params, rqst_errors)
elif "remove_related_general_concerns_objects" in validated_params:
remove_related_general_concerns_from_row(specific_concern_row, validated_params, rqst_errors)
if not rqst_errors:
specific_concern_row.save()
except cls.DoesNotExist:
rqst_errors.append("Specific concern does not exist for database id: {}".format(rqst_id))
return specific_concern_row
def delete_row_w_validated_params(cls, validated_params, rqst_errors):
rqst_id = validated_params['rqst_id']
try:
specific_concern_obj = cls.objects.get(id=rqst_id)
specific_concern_obj.delete()
except cls.DoesNotExist:
rqst_errors.append("Specific concern does not exist for database id: {}".format(rqst_id))
def check_for_specific_concern_rows_with_given_question(cls, specific_concern_question, post_errors, current_specific_concern_id=None):
found_specific_concern_obj = False
specific_concern_objs = cls.objects.filter(question__iexact=specific_concern_question)
if specific_concern_objs:
found_specific_concern_obj = True
specific_concern_ids = []
len_of_specific_concerns_qset = len(specific_concern_objs)
for specific_concern_obj in specific_concern_objs:
specific_concern_ids.append(specific_concern_obj.id)
if len_of_specific_concerns_qset > 1:
post_errors.append(
"Multiple specific concerns with question: {} already exist in db. (Hint - Delete all but one and modify the remaining) id's: {}".format(
specific_concern_question, json.dumps(specific_concern_ids)))
else:
if not current_specific_concern_id or current_specific_concern_id not in specific_concern_ids:
post_errors.append(
"Specific concern with question: {} already exists in db. (Hint - Modify that entry) id: {}".format(
specific_concern_question, specific_concern_ids[0]))
else:
found_specific_concern_obj = False
return found_specific_concern_obj
def add_related_general_concerns_to_row(specific_concern_row, validated_params, rqst_errors):
related_general_concerns_rows = validated_params["add_related_general_concerns_objects"]
if related_general_concerns_rows:
check_related_general_concerns_for_given_rows(specific_concern_row, related_general_concerns_rows, rqst_errors)
for related_general_concerns_row in related_general_concerns_rows:
specific_concern_row.related_general_concerns.add(related_general_concerns_row)
def remove_related_general_concerns_from_row(specific_concern_row, validated_params, rqst_errors):
related_general_concerns_rows = validated_params["remove_related_general_concerns_objects"]
check_related_general_concerns_for_not_given_rows(
specific_concern_row,
related_general_concerns_rows,
rqst_errors
)
for related_general_concerns_row in related_general_concerns_rows:
specific_concern_row.related_general_concerns.remove(related_general_concerns_row)
def check_related_general_concerns_for_given_rows(specific_concern_row, related_general_concerns_rows, rqst_errors):
cur_related_general_concerns_qset = specific_concern_row.related_general_concerns.all()
for related_general_concerns_object in related_general_concerns_rows:
if related_general_concerns_object in cur_related_general_concerns_qset:
rqst_errors.append(
"Related general concern with the following name already exists in db id {}'s related_general_concerns list (Hint - remove from parameter 'related_general_concerns' list): {})".format(
specific_concern_row.id, related_general_concerns_object.name
)
)
def check_related_general_concerns_for_not_given_rows(specific_concern_rows, related_general_concerns_rows, rqst_errors):
cur_related_general_concerns_qset = specific_concern_rows.related_general_concerns.all()
for related_general_concerns_object in related_general_concerns_rows:
if related_general_concerns_object not in cur_related_general_concerns_qset:
rqst_errors.append(
"Related general concern with the following name does not exist in db id {}'s related_general_concerns list (Hint - remove from parameter 'related_general_concerns' list): {})".format(
specific_concern_rows.id, related_general_concerns_object.name
)
)
|
python
|
# MIT License
#
# Copyright (c) 2020 Tri Minh Cao
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Program to plot vias in the whole layout using DEF and LEF data.
Author: Tri Minh Cao
Email: [email protected]
Date: September 2016
"""
from def_parser import *
from lef_parser import *
from util import *
import plot_cell
import matplotlib.pyplot as plt
import numpy as np
import time
import img_util
import pickle
import random
import os
import time
import shutil
def sort_vias_by_row(layout_area, row_height, vias):
"""
Sort the vias by row
:param layout_area: a list [x, y] that stores the area of the layout
:param vias: a list of vias that need to be sorted
:return: a list of rows, each containing a list of vias in that row.
"""
num_rows = layout_area[1] // row_height + 1
rows = []
for i in range(num_rows):
rows.append([])
for via in vias:
via_y = via[0][1]
row_dest = via_y // row_height
rows[row_dest].append(via)
# sort vias in each row based on x-coordinate
for each_row in rows:
each_row.sort(key = lambda x: x[0][0])
return rows
def plot_window(left_pt, width, height, vias, lef_data, macro=None, comp=None):
"""
Method to plot a window from the layout with all vias inside it.
:param left_pt: bottom left point (origin) of the window
:param width: width of the window
:param height: height of the window
:param vias: a list containing all vias on a row
:return: void
"""
# get the corners for the window
corners = [left_pt]
corners.append((left_pt[0] + width, left_pt[1] + height))
# compose the output file name
out_folder = './images/'
# current_time = time.strftime('%H%M%d%m%Y')
pos = (str(corners[0][0]) + '_' + str(corners[0][1]) + '_' +
str(corners[1][0]) + '_' + str(corners[1][1]))
# out_file = out_folder + pos
out_file = out_folder
# out_file += str(corners[0][0])
out_file += pos
if macro:
out_file += '_' + macro
if comp:
out_file += '_' + comp
# current_time = time.strftime('%H%M%S%d%m%Y')
# out_file += '_' + current_time
if os.path.exists(out_file + '.png'):
return out_file + '.png'
plt.figure(figsize=(3, 5), dpi=80, frameon=False)
# scale the axis of the subplot
# draw the window boundary
# scaled_pts = rect_to_polygon(corners)
# draw_shape = plt.Polygon(scaled_pts, closed=True, fill=None,
# color="blue")
# plt.gca().add_patch(draw_shape)
# plot the vias inside the windows
# look for the vias
for via in vias:
if (via[0][0] - left_pt[0] > width):
break
via_name = via[1]
via_info = lef_data.via_dict[via_name]
via_loc = via[0]
plot_cell.draw_via(via_loc, via_info)
# scale the axis of the subplot
axis = [corners[0][0], corners[1][0], corners[0][1], corners[1][1]]
# print (test_axis)
plt.axis(axis)
plt.axis('off')
plt.gca().set_aspect('equal', adjustable='box')
plt.savefig(out_file)
# plt.show()
plt.close('all')
return out_file + '.png'
def group_via(via_list, max_number, max_distance):
"""
Method to group the vias together to check if they belong to a cell.
:param via_list: a list of all vias.
:return: a list of groups of vias.
"""
groups = []
length = len(via_list)
for i in range(length):
# one_group = [via_list[i]]
curr_via = via_list[i]
curr_list = []
for j in range(2, max_number + 1):
if i + j - 1 < length:
right_via = via_list[i + j - 1]
dist = right_via[0][0] - curr_via[0][0]
if dist < max_distance:
curr_list.append(via_list[i:i+j])
# only add via group list that is not empty
if len(curr_list) > 0:
groups.append(curr_list)
return groups
def predict_cell(candidates, row, model, lef_data, std_cells):
"""
Use the trained model to choose the most probable cell from via groups.
:param candidates: 2-via and 3-via groups that could make a cell
:return: a tuple (chosen via group, predicted cell name)
"""
margin = 350
img_width = 200
img_height = 400
img_shape = img_width * img_height
possible_candidates = []
for i in range(len(candidates)):
# dataset = np.ndarray(shape=(len(candidates), img_height, img_width),
# dtype=np.float32)
if candidates[i] != -1:
possible_candidates.append(i)
dataset = np.ndarray(shape=(1, img_height, img_width),
dtype=np.float32)
each_group = candidates[i]
left_pt = [each_group[0][0][0] - margin, CELL_HEIGHT * row]
width = each_group[-1][0][0] - left_pt[0] + margin
# print (width)
img_file = plot_window(left_pt, width, CELL_HEIGHT, each_group, lef_data)
# print (img_file)
image_data = img_util.load_image(img_file)
# print (image_data.shape)
dataset[0, :, :] = image_data
X_test = dataset.reshape(dataset.shape[0], img_shape)
result = model.decision_function(X_test)
result = result[0]
# check for result
if result[i] == max(result):
return candidates[i], i
# if we cannot find a solution, randomly select a choice
choice = random.choice(possible_candidates)
return candidates[choice], choice
def sorted_components(layout_area, row_height, comps):
"""
Sort the components by row
:param layout_area: a list [x, y] that stores the area of the layout
:param comps: a list of components that need to be sorted
:return: a list of rows, each containing a list of components in that row.
"""
num_rows = layout_area[1] // row_height + 1
rows = []
for i in range(num_rows):
rows.append([])
for comp in comps:
comp_y = comp.placed[1]
row_dest = comp_y // row_height
rows[row_dest].append(comp)
# sort vias in each row based on x-coordinate
for each_row in rows:
each_row.sort(key = lambda x: x.placed[0])
return rows
def predict_score(predicts, actuals):
"""
Find the number of correct cell predictions.
:param predicts: a list of predictions.
:param actuals: a list of actual cells.
:return: # correct predictions, # cells
"""
len_preds = len(predicts)
len_actuals = len(actuals)
shorter_len = min(len_preds, len_actuals)
gap_predict = 0
gap_actual = 0
num_correct = 0
# print (shorter_len)
for i in range(shorter_len):
# print (i)
# print (gap_predict)
# print (gap_actual)
# print ()
if predicts[i + gap_predict] == actuals[i + gap_actual]:
num_correct += 1
else:
if len_preds < len_actuals:
gap_actual += 1
len_preds += 1
elif len_preds > len_actuals:
gap_predict += 1
len_actuals += 1
return num_correct, len(actuals)
def plot_cell_w_vias():
# process each row, plot all cells
# for i in range(num_rows):
margin = 350
for i in range(1):
via_idx = 0
print (len(components[i]))
print (len(via1_sorted[i]))
for each_comp in components[i]:
comp_name = each_comp.name
macro_name = each_comp.macro
macro_data = lef_parser.macro_dict[macro_name]
num_vias = len(macro_data.pin_dict) - 2 # because of VDD and GND pins
# get the vias
cell_vias = via1_sorted[i][via_idx:via_idx + num_vias]
# update via_idx
via_idx += num_vias
# plot the cell
left_pt = [cell_vias[0][0][0] - margin, CELL_HEIGHT * i]
width = cell_vias[-1][0][0] - left_pt[0] + margin
# print (width)
img_file = plot_window(left_pt, width, CELL_HEIGHT, cell_vias,
lef_parser, macro=macro_name, comp = comp_name)
print (comp_name)
print (macro_name)
print (cell_vias)
print (via_idx)
print('Finished!')
def check_via_group(via_group, source_sink):
"""
Check the validity of each via set in the via group.
:param via_group: the via_group in question.
:return: via_group with all valid candidate(s)
"""
# valid for 2-via cell: 1 source, 1 sink
# valid for 3-via cell: 2 sink, 1 source
valid_group = []
for each_group in via_group:
num_vias = len(each_group)
num_source = 0
num_sink = 0
for each_via in each_group:
# 0 = sink, 1 = source
if source_sink[each_via[2]] == 1:
num_source += 1
elif source_sink[each_via[2]] == 0:
num_sink += 1
if num_source <= 1 and num_sink <=2:
valid_group.append(each_group)
return valid_group
def get_candidates(first_via_idx, via_list, std_cells):
"""
Generate a list of candidates from the first via.
Each standard cell will be considered for candidates.
If the standard cell cannot be placed there, the value is -1,
otherwise, it will be a list of vias.
:param first_via_idx: first via index in the via_list
:param via_list: the list of all vias (in a row)
:param std_cells: a list that stores information of std cells
:return: a list of groups of vias, or -1
"""
# candidates = [-1 for i in range(len(std_cells))]
candidates = []
first_via = via_list[first_via_idx]
# print (first_via)
first_via_x = first_via[0][0]
for i in range(len(std_cells)):
cell_width = std_cells[i][2]
min_vias = std_cell_info[i][0]
max_vias = std_cells[i][1]
pin_left_dist = std_cells[i][3]
boundary = first_via_x + cell_width - pin_left_dist
# possible vias contain the vias inside the boundary
possible_vias = [first_via]
for j in range(first_via_idx + 1, len(via_list)):
if via_list[j][0][0] <= boundary:
possible_vias.append(via_list[j])
else:
break
# check the candidate against cell info
if len(possible_vias) > max_vias or len(possible_vias) < min_vias:
candidates.append(-1)
else:
candidates.append(possible_vias)
return candidates
def get_inputs_outputs(def_info):
"""
Method to get all inputs and outputs nets from a DEF file.
:param def_info: def info (already parsed).
:return: inputs and outputs
"""
pins = def_parser.pins.pins
inputs = []
outputs = []
for each_pin in pins:
pin_name = each_pin.name
direction = each_pin.direction.lower()
if direction == 'input':
inputs.append(pin_name)
elif direction == 'output':
outputs.append(pin_name)
return inputs, outputs
def recover_netlist(def_info, inputs, outputs, recovered_cells):
"""
Method to create a netlist from predicted cells
:param def_info: information from the DEF file
:param inputs: input pins of the design
:param outputs: output pins of the design
:param recovered_cells: recovered cells with input nets and output nets
:return: recovered netlist file name
"""
# NOTE: the order of nets is not like that in original netlist
design = def_info.design_name
nets = set(def_info.nets.net_dict.keys())
inputs_set = set(inputs)
outputs_set = set(outputs)
io = inputs_set | outputs_set
wires = nets - io
# print(wires)
# print(len(wires))
## dd/mm/yyyy format
date = time.strftime("%m/%d/%Y %H:%M:%S")
s = '#############################\n'
s += '# Generated by TMC\n'
s += '# Design: ' + design + '\n'
s += '# Date: ' + date + '\n'
s += '#############################\n\n'
# add module definition
s += 'module ' + design + ' ( '
num_ios = len(io)
idx = 0
for each_pin in io:
s += each_pin
idx += 1
if idx < num_ios:
s += ', '
s += ' );\n'
indent = ' '
# add input
num_in = len(inputs)
idx = 0
s += indent + 'input '
for each_in in inputs:
s += each_in
idx += 1
if idx < num_in:
s += ', '
s += ';\n'
# add output
num_out = len(outputs)
idx = 0
s += indent + 'output '
for each_out in outputs:
s += each_out
idx += 1
if idx < num_out:
s += ', '
s += ';\n'
# add wire
num_wire = len(wires)
idx = 0
s += indent + 'wire '
for each_wire in wires:
s += each_wire
idx += 1
if idx < num_wire:
s += ', '
s += ';\n'
# add cells
s += '\n'
cell_idx = 2
for each_cell in cells_reco:
cell_idx += 1
s += indent + each_cell[0] + ' U' + str(cell_idx) + ' ( '
in_nets = each_cell[1]
s += '.A(' + in_nets[0] + ')' + ', '
if len(in_nets) == 2:
s += '.B(' + in_nets[1] + ')' + ', '
out_net = each_cell[2]
s += '.Y(' + out_net + ')'
s += ' );\n'
# write to an output file
folder = './recovered/'
filename = design + '_recovered' + '.v'
print('Writing recovered netlist file...')
f = open(folder + filename, mode="w+")
f.write(s)
f.close()
print('Writing done.')
return filename
# Main Class
if __name__ == '__main__':
start_time = time.time()
def_path = './libraries/layout_yujie/c2670_gscl45nm_tri_routing_layer6.def'
def_parser = DefParser(def_path)
def_parser.parse()
scale = def_parser.scale
lef_file = "./libraries/FreePDK45/gscl45nm.lef"
lef_parser = LefParser(lef_file)
lef_parser.parse()
macro_dict = lef_parser.macro_dict
CELL_HEIGHT = int(float(scale) * lef_parser.cell_height)
# print (CELL_HEIGHT)
print ("Process file:", def_path)
all_via1 = get_all_vias(def_parser, via_type="M2_M1_via")
# build the net_via dictionary
nets = def_parser.nets.nets
# initialize the nets_via_dict
nets_vias_dict = {}
for net in nets:
net_name = net.name
nets_vias_dict[net_name] = []
# add vias to nets_dict
for each_via in all_via1:
net = each_via[2]
nets_vias_dict[net].append(each_via)
# sort the vias by row
via1_sorted = sort_vias_by_row(def_parser.diearea[1], CELL_HEIGHT, all_via1)
# add inputs and outputs from the design to via info
inputs, outputs = get_inputs_outputs(def_parser)
for each_in in inputs:
for each_via in nets_vias_dict[each_in]:
each_via[3] = 0
for each_out in outputs:
for each_via in nets_vias_dict[each_out]:
each_via[3] = 1
MAX_DISTANCE = 2280 # OR2 cell width, can be changed later
components = sorted_components(def_parser.diearea[1], CELL_HEIGHT,
def_parser.components.comps)
num_rows = len(components)
###############
# DO PREDICTION
# predict_row()
# We can load the trained model
pickle_filename = "./trained_models/logit_model_100916_2.pickle"
try:
with open(pickle_filename, 'rb') as f:
logit_model = pickle.load(f)
except Exception as e:
print('Unable to read data from', pickle_filename, ':', e)
labels = {0: 'and2', 1: 'invx1', 2: 'invx8', 3: 'nand2', 4: 'nor2',
5: 'or2'}
macro_from_labels = {0: 'AND2X1', 1: 'INVX1', 2: 'INVX8', 3: 'NAND2X1',
4: 'NOR2X1', 5: 'OR2X1'}
cell_labels = {'AND2X1': 'and2', 'INVX1': 'invx1', 'NAND2X1': 'nand2',
'NOR2X1': 'nor2', 'OR2X1': 'or2', 'INVX8': 'invx8'}
##############
# List of standard cells
std_cell_info = {}
# info includes (min num vias, max num vias, width,
# distance from left boundary to first pin)
# I wonder if max num vias should be used, actually I don't know what is the
# maximum number of vias, but I guess +1 is fine.
# 0 is and2, 1 is invx1, etc.
std_cell_info[0] = (3, 4, 2280, 295)
std_cell_info[1] = (2, 3, 1140, 315)
std_cell_info[2] = (2, 3, 2660, 695)
std_cell_info[3] = (3, 4, 1520, 90)
std_cell_info[4] = (3, 4, 1520, 315)
std_cell_info[5] = (3, 4, 2280, 695)
# process
# print the sorted components
components = sorted_components(def_parser.diearea[1], CELL_HEIGHT,
def_parser.components.comps)
correct = 0
total_cells = 0
predicts = []
actuals = []
cells_reco = [] # a list of recovered cells
# via_groups is only one row
for i in range(len(via1_sorted)):
# for i in range(0, 1):
print ('Process row', (i + 1))
# each via group in via_groups consist of two candidates
# via_groups = group_via(via1_sorted[i], 3, MAX_DISTANCE)
visited_vias = [] # later, make visited_vias a set to run faster
cells_pred = []
via_idx = 0
while via_idx < len(via1_sorted[i]):
# choosing candidates
candidates = get_candidates(via_idx, via1_sorted[i], std_cell_info)
best_group, prediction = predict_cell(candidates, i, logit_model,
lef_parser, std_cell_info)
# recover the cell information
macro_name = macro_from_labels[prediction]
macro_info = macro_dict[macro_from_labels[prediction]]
num_pins = len(macro_info.info["PIN"]) - 2
# NOTE: we assume inputs are A, B and output is Y
# for each_pin in pins:
# print(each_pin.name)
recover = []
output_net = best_group[-1][2]
input_nets = []
for each_via in best_group:
if each_via[2] != output_net:
input_nets.append(each_via[2])
# NOTE: the following lines only work for 2-pin and 3-pin cell
recover.append(macro_name)
recover.append(input_nets)
recover.append(output_net)
cells_reco.append(recover)
via_idx += len(best_group)
# print (best_group)
# print (labels[prediction])
cells_pred.append(labels[prediction])
for each_via in best_group:
visited_vias.append(each_via)
print (cells_pred)
print (len(cells_pred))
actual_comp = []
actual_macro = []
for each_comp in components[i]:
actual_comp.append(cell_labels[each_comp.macro])
actual_macro.append(each_comp.macro)
print (actual_comp)
print (len(actual_comp))
num_correct, num_cells = predict_score(cells_pred, actual_comp)
correct += num_correct
total_cells += num_cells
predicts.append(cells_pred)
actuals.append(actual_comp)
print ()
print ("\nTotal number of cells: ", total_cells)
print ("Number of correct cells predicted: ", correct)
print ("Accuracy rate (%): ", correct / total_cells * 100)
# print the execution time
print("\n--- Execution time:")
print("--- %s seconds ---" % (time.time() - start_time))
print("\n")
# remove images used
shutil.rmtree("./images")
if not os.path.exists("./images"):
os.makedirs("./images")
# count the time to generate the netlist separately
start_time = time.time()
# write the recovered verilog netlist
recover_netlist(def_parser, inputs, outputs, cells_reco)
print("\n--- Generate netlist time:")
print("--- %s seconds ---" % (time.time() - start_time))
|
python
|
from django.urls import path
from . import views
urlpatterns = [
path("",views.home),
path("posts/",views.PostList.as_view()),
path("posts/api/v1/",views.getDataJson),
path("posts/<int:pk>",views.PostDetails.as_view()),
path("post/search/<int:pk>",views.get_Data),
path('modules/',views.ModuleList.as_view()),
path("modules/<int:pk>",views.ModulePost.as_view())
]
|
python
|
import sys
fname = sys.argv[1]
ofname = fname + '.raw'
idx = 0
with open(ofname, 'w') as fo:
for line in open(fname).readlines():
fo.write("# ::id %d\n"%(idx, ))
fo.write("# ::snt %s\n"%(line.rstrip()))
fo.write("(d / dummy)\n\n")
idx += 1
|
python
|
# Modify print_tree_inorder so that it puts parentheses around every operator and pair of operands.
# Is the output correct and unambiguous? Are the parentheses always necessary?
class Tree:
def __init__(self, cargo, left=None, right=None):
self.cargo = cargo
self.left = left
self.right = right
def __str__(self):
return str(self.cargo)
def total(tree):
if tree is None: return 0
return total(tree.left) + total(tree.right) + tree.cargo
def print_tree(tree):
if tree is None: return
print(tree.cargo, end=" ")
print_tree(tree.left)
print_tree(tree.right)
def print_tree_postorder(tree):
if tree is None: return
print_tree_postorder(tree.left)
print_tree_postorder(tree.right)
print(tree.cargo, end=" ")
def print_tree_inorder(tree):
if tree is None: return
print_tree_inorder(tree.left)
print("({0})".format(tree.cargo), end=" ") # changed format
print_tree_inorder(tree.right)
def print_tree_indented(tree, level=0):
if tree is None: return
print_tree_indented(tree.right, level + 1)
print(" " * level + str(tree.cargo))
print_tree_indented(tree.left, level + 1)
def get_token(token_list, expected):
if token_list[0] == expected:
del token_list[0]
return True
return False
def get_number(token_list):
if get_token(token_list, "("):
x = get_sum(token_list)
if not get_token(token_list, ")"):
raise ValueError("Missing close parenthesis")
return x
else:
x = token_list[0]
if type(x) != type(0): return None
del token_list[0]
return Tree(x, None, None)""
def get_product(token_list):
a = get_number(token_list)
if get_token(token_list, "*"):
b = get_product(token_list)
return Tree("*", a, b)
return a
def get_sum(token_list):
a = get_product(token_list)
if get_token(token_list, "+"):
b = get_sum(token_list)
return Tree("+", a, b)
return a
|
python
|
from flask_login import UserMixin
from sqlalchemy import Column, Integer, String, UniqueConstraint, CheckConstraint
from .base import Base
class User(UserMixin, Base):
"""
Class representing an application user
"""
__tablename__ = "Users"
#: Primary key
id = Column(Integer, primary_key=True)
#: Username
username = Column(String, unique=True, nullable=False)
#: Salt used in password hashing
salt = Column(String, nullable=False)
#: Hashed password
password = Column(String, nullable=False)
__table_args__ = (UniqueConstraint('username', name='USER_NAME_UX'),
CheckConstraint("LENGTH(TRIM(username)) > 0"),
CheckConstraint("LENGTH(TRIM(password)) > 0"),
CheckConstraint("LENGTH(TRIM(salt)) > 0"))
def __repr__(self):
return f"{type(self).__name__}(id={self.id!r}, username={self.username!r}, salt={self.salt!r}," \
f"password={self.password!r})"
|
python
|
import requests
from bs4 import BeautifulSoup
import sqlite3
from sqlite3 import Error
from datetime import date
#ვიგებთ დღევანდელ რიცხვს თვეს და წელს
today = date.today()
d = today.strftime('%d/%m/%y')
page = 1 #საიტის გვერდი
#ვქმნით ბაზას სადაც შევინახავთ დასკრაპულ ინფორმაციას
conn = sqlite3.connect('data.db')
c = conn.cursor()
c.execute('''CREATE TABLE IF NOT EXISTS laptops ([id] INTEGER PRIMARY KEY, [LAP_NAMES] TEXT, [PRICE] INTEGER, [DATE] TEXT);''')
conn.commit()
#ლინკიდან ვიღებთ ინფორმაციას და ვამუშავებთ (ასევე ვუთითებთ ბრაუზერის ჰედერებს, რადგან მოვატყუოთ საიტი ვითომ ადამიანია)
def url():
url = f'https://alta.ge/notebooks-page-{page}.html'
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
r = requests.get(url, headers=headers)
soup = BeautifulSoup(r.text, 'html.parser')
print(r)
heds = soup.find_all('div', {'class': 'ty-column3'})
return heds
#ვეძებთ ინფორმაციას, ვპრინტავთ და ასევე ვდებთ ჩვენს მიერ შექმნილ ბაზაში
def data():
info = []
for i in url():
if i.find('a', class_='product-title') is not None:
n = i.find('a', class_='product-title').text
else:
n = 'none'
if i.find('span', class_='ty-price-num') is not None:
m = i.find('span', class_='ty-price-num').text
c.execute('''INSERT INTO laptops (LAP_NAMES, PRICE, DATE) VALUES(?, ?, ?)''', (n, m, d))
conn.commit()
else:
m = 'none'
info.append(n + ': ' + m)
return info
if __name__ == '__main__':
while page <= 7:
print(data())
page += 1
|
python
|
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import nltk
import numpy as np
from nltk.metrics import *
from nltk.util import ngrams
import enchant # spell checker library pyenchant
from enchant.checker import SpellChecker
from nltk.stem import PorterStemmer
from nltk.corpus import words
import string
import re
from collections import Counter
from nltk.corpus import brown
from nltk.probability import FreqDist
from nltk.metrics import edit_distance
from sympound import sympound
import re
from weighted_levenshtein import lev
class WordSuggestor():
'''
Code obtained from http://norvig.com/spell-correct.html.
'''
def __init__(self):
self.words = Counter(brown.words())
def P(self, word):
"Probability of `word`."
N = sum(self.words.values())
return self.words[word] / N
def correction(self, word):
"Most probable spelling correction for word."
return max(candidates(word), key=self.P)
def candidates(self, word):
"Generate possible spelling corrections for word."
return (self.known([word]) or self.known(self.edits1(word)) or self.known(self.edits2(word)) or [word])
def known(self, words):
"The subset of `words` that appear in the dictionary of WORDS."
return set(w for w in words if w in self.words)
def edits1(self, word):
"All edits that are one edit away from `word`."
letters = 'abcdefghijklmnopqrstuvwxyz'
splits = [(word[:i], word[i:]) for i in range(len(word) + 1)]
deletes = [L + R[1:] for L, R in splits if R]
transposes = [L + R[1] + R[0] + R[2:] for L, R in splits if len(R)>1]
replaces = [L + c + R[1:] for L, R in splits if R for c in letters]
inserts = [L + c + R for L, R in splits for c in letters]
return set(deletes + transposes + replaces + inserts)
def edits2(self, word):
"All edits that are two edits away from `word`."
return (e2 for e1 in self.edits1(word) for e2 in self.edits1(e1))
class OcrDistanceMeasure():
# Helper class to obtain a handwriting error weighted edit distance. The weighted edit distance class can be found in
# https://github.com/infoscout/weighted-levenshtein.
# Substitute_costs.txt, insertion_costs and deletion_costs are calculated in
# https://github.com/ThomasDelteil/Gluon_OCR_LSTM_CTC/blob/language_model/model_distance.ipynb
def __init__(self):
self.substitute_costs = self.make_substitute_costs()
self.insertion_costs = self.make_insertion_costs()
self.deletion_costs = self.make_deletion_costs()
def make_substitute_costs(self):
substitute_costs = np.loadtxt('models/substitute_costs.txt', dtype=float)
#substitute_costs = np.ones((128, 128), dtype=np.float64)
return substitute_costs
def make_insertion_costs(self):
insertion_costs = np.loadtxt('models/insertion_costs.txt', dtype=float)
#insertion_costs = np.ones(128, dtype=np.float64)
return insertion_costs
def make_deletion_costs(self):
deletion_costs = np.loadtxt('models/deletion_costs.txt', dtype=float)
#deletion_costs = np.ones(128, dtype=np.float64)
return deletion_costs
def __call__(self, input1, input2):
return lev(input1, input2, substitute_costs=self.substitute_costs,
insert_costs=self.insertion_costs,
delete_costs=self.deletion_costs)
class LexiconSearch:
'''
Lexicon search was based on https://github.com/rameshjesswani/Semantic-Textual-Similarity/blob/master/nlp_basics/nltk/string_similarity.ipynb
'''
def __init__(self):
self.dictionary = enchant.Dict('en')
self.word_suggestor = WordSuggestor()
self.distance_measure = OcrDistanceMeasure()
def suggest_words(self, word):
candidates = list(self.word_suggestor.candidates(word))
output = []
for word in candidates:
if word[0].isupper():
output.append(word)
else:
if self.dictionary.check(word):
output.append(word)
return output
def minimumEditDistance_spell_corrector(self,word):
max_distance = 3
if (self.dictionary.check(word.lower())):
return word
suggested_words = self.suggest_words(word)
num_modified_characters = []
if len(suggested_words) != 0:
for sug_words in suggested_words:
num_modified_characters.append(self.distance_measure(word, sug_words))
minimum_edit_distance = min(num_modified_characters)
best_arg = num_modified_characters.index(minimum_edit_distance)
if max_distance > minimum_edit_distance:
best_suggestion = suggested_words[best_arg]
return best_suggestion
else:
return word
else:
return word
|
python
|
# -*- coding: utf-8 -*-
"""
Created on Tue May 03 16:32:53 2016
@author: Peter
A collection of functions that can be used to generate a mask for a given tissue.
These functions were fine tuned on a personnal data. They can be used at your own risk.
"""
import numpy as np
from skimage.morphology import disk, opening, closing, dilation, remove_small_objects
from skimage.filters import threshold_otsu
from skimage.measure import label
from scipy.ndimage.morphology import binary_fill_holes
def opening_(image, disk_size=7):
# Computes an opening of size disk.
inter_image = image.copy()
inter_image = opening(inter_image, disk(disk_size))
return inter_image
def tissue_thresh(image, thresh=None):
# tissue is in black... Tries to find the best background
if thresh is None:
thresh = threshold_otsu(image)
binary = image > thresh
if binary.dtype == 'bool':
binary = binary + 0
return binary
def fill_hole(bin_image, invert=False):
# files holes
values = np.unique(bin_image)
if len(values) > 2:
print("Not binary image")
return []
background = min(values)
bin_image -= background
bin_image[bin_image > 0] = 1
if invert:
bin_image -= 1
bin_image[bin_image < 0] = 1
result = np.copy(bin_image)
binary_fill_holes(bin_image, output=result)
return result
def remove_border(image_bin, border=15):
# removes borders
neg_border = border * -1
result = np.copy(image_bin)
result[:, :] = 0
result[border:neg_border, border:neg_border] = image_bin[
border:neg_border, border:neg_border]
return result
def remove_isolated_points(binary_image, thresh=100):
# removing tiny areas...
# pdb.set_trace()
lbl = label(binary_image)
lbl = remove_small_objects(lbl, thresh)
binary = (lbl > 0).astype(int)
return binary
def find_ticket(rgb_image, _3tuple=(80, 80, 80)):
# Find the "black ticket on the images"
temp_image_3 = np.copy(rgb_image)
temp_image_3[:, :, :] = 0
for i in range(3):
temp_image_1 = np.zeros(shape=rgb_image.shape[0:2])
temp_image_1[np.where(rgb_image[:, :, i] < _3tuple[i])] = 1
temp_image_3[:, :, i] = temp_image_1
temp_resultat = temp_image_3.sum(axis=2)
temp_resultat[temp_resultat > 2] = 3
temp_resultat[temp_resultat < 3] = 0
temp_resultat[temp_resultat == 3] = 1
#temp_resultat = Filling_holes_2(temp_resultat)
temp_resultat = closing(temp_resultat, disk(20))
temp_resultat = opening_(temp_resultat, 20)
temp_resultat = remove_border(temp_resultat)
return temp_resultat
def preprocessing(image, thresh=200, invert=True):
inter = opening_(image)
inter = tissue_thresh(inter, thresh)
inter = fill_hole(inter, invert=invert)
inter = remove_border(inter)
res = remove_isolated_points(inter)
return res
def combining(numpy_array):
res = np.sum(numpy_array, axis=2)
res[res > 0] = 1
return res
def roi_binary_mask2(sample, size=5, ticket=(80, 80, 80)):
# very slow function at resolution 4
preproc_res = np.copy(sample)
for i in range(3): # RGB
# this one is painfully slow..
preproc_res[:, :, i] = preprocessing(sample[:, :, i])
res = combining(preproc_res)
ticket = find_ticket(sample, ticket)
res = res - ticket
res[res > 0] = 1
res[res < 0] = 0
res = opening_(res, size)
return res
def roi_binary_mask(sample, size=5):
val = threshold_otsu(sample[:, :, 0])
mask = (sample[:, :, 0] < val).astype(int)
mask = dilation(mask, disk(size))
mask = fill_hole(mask)
mask = remove_isolated_points(mask)
return mask
|
python
|
from itertools import product
def is_triangle(sides) -> bool:
return all(sum(pair[:2]) >= pair[2] != 0 for pair in product(sides, repeat=3))
def equilateral(sides):
return is_triangle(sides) and sides[0] == sides[1] == sides[2]
def isosceles(sides):
return is_triangle(sides) and (
sides[0] == sides[1] or sides[0] == sides[2] or sides[1] == sides[2]
)
def scalene(sides):
return is_triangle(sides) and sides[0] != sides[1] != sides[2]
|
python
|
l086jXnOFE4BtKQlonKnUg+VY1GYfjn3PlxLmSqeVotZ4EuiAMTRmf5+72mmbZZF4OYRQy4ORTJsO2Lt/QsW8VSvG5vQKpjflbbMObnXBVoTrkvHd1i/xYqBUsktc9t4/jn2WW5MROuLwrYvo6gnjHRy6gmPQqxC9RBvC7DXptoYwekEWRh0D5BZ/88slO4iQJ7C+cA0/0fzjIePXkeacQumgZuYLZLnV4+TH9vAQmL4yh4OUnePCcuwG7X74/SlKA065fJIaPqpAE35FC0qaAULRE5E5tdVlWTiwjT1GSFgH2vo18Y7978tgm8D3QhXDcm0dtR6owQA8zbJngxFbQ==
|
python
|
# Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
from tests.testbase import BaseClass
import tempfile
from requre.simple_object import Simple
from requre.online_replacing import replace
def x():
return tempfile.mktemp()
class Duplicated(BaseClass):
@replace("tests.test_duplication.x", decorate=Simple.decorator_plain())
@replace("tempfile.mktemp", decorate=Simple.decorator_plain())
def test(self):
a = x()
b = tempfile.mktemp()
self.assertNotEqual(a, b)
|
python
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.tests import base
class BaseTestCase(base.BaseTestCase):
""" Defines all the dummy resources needed for test_filter.py
"""
def __init__(self, *args, **kwargs):
super(BaseTestCase, self).__init__(*args, **kwargs)
self.service_info = {}
self.desc1 = ["fip=1.203.1.108",
"tunnel_local_cidr=11.0.0.0/24",
"user_access_ip=1.203.2.101",
"fixed_ip=192.168.0.3", "standby_fip=",
"service_vendor=vyos",
"stitching_cidr=192.168.0.0/28",
"stitching_gateway=192.168.0.1",
"mgmt_gw_ip=120.0.0.1"]
self.desc2 = ["fip=1.203.1.109",
"tunnel_local_cidr=12.0.0.0/24",
"user_access_ip=1.203.2.102",
"fixed_ip=192.168.0.4", "standby_fip=",
"service_vendor=vyos",
"stitching_cidr=192.168.0.0/28",
"stitching_gateway=192.168.1.1",
"mgmt_gw_ip=121.0.0.1"]
self.name = ["aff8163b-f964-4ad7-a222-0e0a6e5593fe-8eacf5cf",
"-1e92-4e7b-90c4-cc68ef8c4e88"]
self.ssl_vpn_connections = [{
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"id": "b88b1d77-fbf8-45b5-adc3-9cd5169c7103",
"name": "ssl_vpn_connections1",
"admin_state_up": True,
"status": "ACTIVE",
"vpnservice_id": "19d22704-69ea-40c8-8bcf-2e1ffd697e33",
"credential_id": "8163b-f964-4ad7-a222-0e0a6e5593feaff",
"client_address_pool_cidr": "11.0.0.0/24"
}]
self.ports = [{
"status": "ACTIVE",
"name": "",
"allowed_address_pairs": [],
"admin_state_up": True,
"network_id": "92f423a7-f44e-4726-b453-c8a1369a3ad0",
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"extra_dhcp_opts": [],
"binding:vnic_type": "normal",
"device_owner": "network:dhcp",
"mac_address": "fa:16:3e:01:19:11",
"fixed_ips": [{
"subnet_id": "2670bdcd-1bcf-4b97-858d-ab0d621983cc",
"ip_address": "11.0.0.3"
},
{
"subnet_id": "94aee832-935b-4e23-8f90-b6a81b0195b1",
"ip_address": "192.168.0.2"
}],
"id": "cfd9fcc0-c27b-478b-985e-8dd73f2c16e8",
"security_groups": [],
"device_id": ("dhcpf986c817-fd54-5bae-a8e4-e473b69100d2-"
"92f423a7-f44e-4726-b453-c8a1369a3ad0")
},
{
"status": "ACTIVE",
"name": ("aff8163b-f964-4ad7-a222-0e0a6e5593fe-"
"ea9ff596-51bc-4381-8aff-ee9f0ef7e319"),
"allowed_address_pairs": [],
"admin_state_up": True,
"network_id": "0ced2567-47a0-4b67-be52-0e9695e8b0e6",
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"extra_dhcp_opts": [],
"binding:vnic_type": "normal",
"device_owner": "network:router_interface",
"mac_address": "fa:16:3e:1b:f2:44",
"fixed_ips": [{
"subnet_id": "ea9ff596-51bc-4381-8aff-ee9f0ef7e319",
"ip_address": "11.0.3.2"
}],
"id": "31df0d68-e9ea-4713-a629-29e6d87c2727",
"security_groups": ["fb44b3f5-a319-4176-9e3b-361c5faafb66"],
"device_id": "aff8163b-f964-4ad7-a222-0e0a6e5593fe"
},
{
"status": "ACTIVE",
"name": ";".join(self.name),
"allowed_address_pairs": [],
"admin_state_up": True,
"network_id": "2e9652e8-bd95-472a-96b5-6a7939ae0f8d",
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"extra_dhcp_opts": [],
"binding:vnic_type": "normal",
"device_owner": "network:router_interface",
"mac_address": "fa:16:3e:49:44:b3",
"fixed_ips": [{
"subnet_id": "8eacf5cf-1e92-4e7b-90c4-cc68ef8c4e88",
"ip_address": "11.0.4.2"
}],
"id": "214eaa12-36c9-45b1-8fee-350ce2ff2dae",
"security_groups": ["fb44b3f5-a319-4176-9e3b-361c5faafb66"],
"device_id": "aff8163b-f964-4ad7-a222-0e0a6e5593fe"
}]
self.subnets = [{
"name": "apic_owned_ew-consumer",
"enable_dhcp": True,
"network_id": "0ced2567-47a0-4b67-be52-0e9695e8b0e6",
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"dns_nameservers": [],
"gateway_ip": "11.0.3.1",
"ipv6_ra_mode": None,
"allocation_pools": [{"start": "11.0.3.2",
"end": "11.0.3.254"
}],
"host_routes": [],
"ip_version": 4,
"ipv6_address_mode": None,
"cidr": "11.0.3.0/24",
"id": "ea9ff596-51bc-4381-8aff-ee9f0ef7e319"
},
{
"name": "apic_owned_ew-provider",
"enable_dhcp": True,
"network_id": "2e9652e8-bd95-472a-96b5-6a7939ae0f8d",
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"dns_nameservers": [],
"gateway_ip": "11.0.4.1",
"ipv6_ra_mode": None,
"allocation_pools": [{"start": "11.0.4.2",
"end": "11.0.4.254"
}],
"host_routes": [],
"ip_version": 4,
"ipv6_address_mode": None,
"cidr": "11.0.4.0/24",
"id": "94aee832-935b-4e23-8f90-b6a81b0195b1"
}]
self.routers = [{
"status": "ACTIVE",
"external_gateway_info": {
"network_id": (
"a413e04d-1431-4b21-8327-d4de25fa604b"),
"external_fixed_ips": [{
"subnet_id": (
"fcc74b65-dafe-4b74-91fa-028dec8467a8"),
"ip_address": "169.254.2.148"
}]},
"name": "remote-vpn-client-pool-cidr-l3policy-Datacenter-Out",
"admin_state_up": True,
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"routes": [],
"id": "61189c93-d8c7-46ff-b1b1-6a6db2b9ae0a"
},
{
"status": "ACTIVE",
"external_gateway_info": {
"network_id": (
"a413e04d-1431-4b21-8327-d4de25fa604b"),
"external_fixed_ips": [{
"subnet_id": (
"fcc74b65-dafe-4b74-91fa-028dec8467a8"),
"ip_address": "169.254.2.150"
}]},
"name": "default-Datacenter-Out",
"admin_state_up": True,
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"routes": [],
"id": "aff8163b-f964-4ad7-a222-0e0a6e5593fe"
}]
self.vpnservices = [{
"router_id": "aff8163b-f964-4ad7-a222-0e0a6e5593fe",
"status": "ACTIVE",
"name": "VPNService",
"admin_state_up": True,
"subnet_id": "94aee832-935b-4e23-8f90-b6a81b0195b1",
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"id": "19d22704-69ea-40c8-8bcf-2e1ffd697e33",
"description": ";".join(self.desc1)
},
{
"router_id": "61189c93-d8c7-46ff-b1b1-6a6db2b9ae0a",
"status": "ACTIVE",
"name": "VPNService1",
"admin_state_up": True,
"subnet_id": "94aee832-935b-4e23-8f90-b6a81b0195b1",
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"id": "19d22704-69ea-40c8-8bcf-2e1ffd697f44",
"description": ";".join(self.desc2)
}]
self.ipsecpolicies = [{
"encapsulation_mode": "tunnel",
"encryption_algorithm": "3des",
"pfs": "group5",
"lifetime":
{"units": "seconds", "value": 3600},
"name": "IPsecPolicy",
"transform_protocol": "esp",
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"id": "b88b1d77-fbf8-45b5-adc3-9cd5169c7102",
"auth_algorithm": "sha1", "description": ""
}]
self.ikepolicies = [{
"encryption_algorithm": "3des",
"pfs": "group5",
"name": "IKEPolicy",
"phase1_negotiation_mode":
"main", "lifetime":
{"units": "seconds", "value": 3600},
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"ike_version": "v1",
"id": "ae1dd05c-ac66-45e5-868e-36f20c9aa222",
"auth_algorithm": "sha1",
"description": ""
}]
self.ipsec_site_connections = [{
"status": "INIT",
"psk": "sapna",
"initiator": "bi-directional",
"name": "site_to_site_connection1",
"admin_state_up": True,
"tenant_id": "5e67167662f94fd5987e12a68ea6c1d8",
"ipsecpolicy_id": "b88b1d77-fbf8-45b5-adc3-9cd5169c7102",
"auth_mode": "psk",
"peer_cidrs": ["11.0.1.0/24"],
"mtu": 1500,
"ikepolicy_id": "ae1dd05c-ac66-45e5-868e-36f20c9aa222",
"dpd": {"action": "hold", "interval": 30, "timeout": 120},
"route_mode": "static",
"vpnservice_id": "19d22704-69ea-40c8-8bcf-2e1ffd697e33",
"peer_address": "1.203.2.1",
"peer_id": "1.203.2.1",
"id": "9736cb21-4996-4dae-8e66-d13c24c44a8b",
"description": ";".join(self.desc1)
}]
# update the below lists as per the future requirements
self.firewalls = []
self.firewall_policies = []
self.firewall_rules = []
def _test_get_vpn_info(self):
"""Prepares VPN service_info needed for VPN context
Returns: VPN service info
"""
self.service_info['vpnservices'] = self.vpnservices
self.service_info['ikepolicies'] = self.ikepolicies
self.service_info['ipsecpolicies'] = self.ipsecpolicies
self.service_info['ipsec_site_conns'] = self.ipsec_site_connections
self.service_info['ssl_vpn_conns'] = self.ssl_vpn_connections
self.service_info['routers'] = self.routers
self.service_info['subnets'] = self.subnets
return self.service_info
def _test_get_fw_info(self):
"""Prepares FW service_info needed for FW context
Returns: FW service info
"""
self.service_info['firewalls'] = self.firewalls
self.service_info['firewall_policies'] = self.firewall_policies
self.service_info['firewall_rules'] = self.firewall_rules
return self.service_info
|
python
|
from django.db import models
class Conta(models.Model):
descricao = models.CharField(max_length=150, verbose_name="Descrição")
def __str__(self):
return '{}'.format(self.descricao)
class Responsavel(models.Model):
nome = models.CharField(max_length=150, verbose_name="Nome")
def __str__(self):
return '{}'.format(self.nome)
class Meta:
verbose_name_plural = "Responsáveis"
class TipoDespesa(models.Model):
descricao = models.CharField(max_length=150, verbose_name="Descrição")
def __str__(self):
return '{}'.format(self.descricao)
class Meta:
verbose_name_plural = "Tipo de Despesas"
class Despesa(models.Model):
conta = models.ForeignKey(Conta, on_delete=models.CASCADE, verbose_name="Conta Vinculada")
responsavel = models.ForeignKey(Responsavel, on_delete=models.CASCADE, verbose_name="Responsável")
tipoDespesa = models.ForeignKey(TipoDespesa, on_delete=models.CASCADE, verbose_name="Tipo de Despesa")
descricao = models.CharField(max_length=150, verbose_name="Descrição")
valor = models.DecimalField(max_digits=10, decimal_places=2, verbose_name="Valor Compra")
dataCompra = models.DateField(auto_now=True, verbose_name="Data da Compra")
def __str__(self):
return '{} ({}) - {}'.format(self.descricao, self.conta, self.responsavel)
class Parcelas(models.Model):
despesa = models.ForeignKey(Despesa, on_delete=models.CASCADE, verbose_name="Despesa")
valor = models.DecimalField(max_digits=10, decimal_places=2, verbose_name="Valor")
dataVencimento = models.DateField(verbose_name="Data de Vencimento")
pagamento = models.BooleanField(default=False, verbose_name="Pago?")
class Meta:
verbose_name = "Parcelas"
verbose_name_plural = "Parcelas"
|
python
|
class serverLoginValidation():
def __init__(self, email, passwd):
self.email = email
self.passwd = passwd
def validateUser(self):
self.db = DataBase()
self.userdata = self.db.userDefined(self.email, self.passwd)
if self.userdata:
return True
else:
return False
|
python
|
from ep.utils import *
def first_run(download=False):
"""
Create csv files from stratch
:param download: boolean.
Set to False if files have been downloaded and saved to ./resources
Set to True to download files from EIA website
"""
setupDirectories()
interest_dataframes = get_new_dataframes()
for year in range(2005, END_YEAR+1):
for month in range(12):
datetime_object = getDateObject(month+1, year)
file_name = getFileName(datetime_object)
if datetime_object <= getLastDayPreviousMonth():
print('Processing {}..'.format(file_name))
file = getFile(file_name, download=download)
with open(file, 'rb') as f:
interest_dataframes = update_dataframes_with_file(f, datetime_object, year, month, interest_dataframes)
write_dataframes_to_csv(interest_dataframes)
def update(year, month, download):
"""
Assuming an analyst or an automated process has identified a new file published on EIA
Assuming existing CSVs are saved in ./csv/, this function updates all csv
files with a new EIA file identified by the year and month parameter
:param year: int, year of the new file, e.g. 2021
:param month: int, month of the new file, between 1-12
:param download: bool
True to download from EIA
False to find file already downloaded into ./resources/
e.g. If apr21_base.xlsx is identified, year will be 2021, month will be 4
"""
datetime_object = getDateObject(month, year)
if datetime_object >= datetime.today():
raise(ValueError(
'Cannot download file for {}. '
'Check the input year and month'.format(datetime_object)))
file_name = getFileName(datetime_object)
file = getFile(file_name, download=download)
interest_dataframes = get_dataframes()
with open(file, 'rb') as f:
interest_dataframes = update_dataframes_with_file(f, datetime_object, year, month-1, interest_dataframes)
write_dataframes_to_csv(interest_dataframes)
def main():
# Run first_run function to build CSV files from stratch
# first_run()
# Run update function to update the existing CSV files with a new EIA file
# update(year=2021, month=4, download=False)
pass
|
python
|
import os
import argparse
def main(argv=sys.argv[1:]):
pass
|
python
|
import dash_mantine_components as dmc
component = dmc.Affix(
dmc.Button("I'm in an Affix Component"), position={"bottom": 20, "right": 20}
)
|
python
|
import re
import math
import tweepy
import feedparser
from tweepy import OAuthHandler
from textblob import TextBlob
class TwitterClient(object):
# keys and tokens from the Twitter Dev Console
consumer_key = ''
consumer_secret = ''
access_token = ''
access_token_secret = ''
positive_news = []
negative_news = []
neutral_news = []
all_news = []
positive_tweets = []
negative_tweets = []
neutral_tweets = []
all_tweets = []
def __init__(self):
# attempt authentication
try:
# create OAuthHandler object
self.auth = OAuthHandler(self.consumer_key, self.consumer_secret)
# set access token and secret
self.auth.set_access_token(self.access_token, self.access_token_secret)
# create tweepy API object to fetch tweets
self.api = tweepy.API(self.auth)
except:
print("Error: Authentication Failed")
def read_tweets(self, query, count):
try:
# call twitter api to fetch tweets
tweet_list = self.api.search(q=query, count=count)
for tweet in tweet_list:
# ignore retweets
if (not tweet.retweeted) and ('RT @' not in tweet.text):
# print(tweet.text)
self.all_tweets.append(self.clean_text(tweet.text))
except Exception as e:
# print error (if any)
print("Error : " + str(e))
def read_google_news_feed(self, quote, news_count=100):
try:
parsed_news = feedparser.parse(
"https://finance.google.com/finance/company_news?q={}&output=rss&num={}".format(quote, news_count))
for index in range(len(parsed_news['entries'])):
self.all_news.append(self.clean_text(parsed_news['entries'][index]['title']))
except Exception as e:
print("<p>Error: %s</p>" % e)
# by remove user mentions urls and other special symbolic chars
def clean_text(self, text):
return ' '.join(
re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", text, flags=re.MULTILINE).split())
def predict_stock_sentiment(self, quote, count=100):
result = {}
news_sentiment = {}
tweet_sentiment = {}
# read twitter feed, use keywords with company quotes + " Stocks " etc.
tokens = quote.split(":")
if len(tokens) > 1:
tweet_search = tokens[1] + " Stocks "
else:
tweet_search = quote + " Stocks "
self.read_tweets(tweet_search + '-filter:retweets', count)
self.read_google_news_feed(quote, count)
for news in self.all_news:
# saving sentiment of tweet
# print('News- '+news)
analysis = TextBlob(news)
if analysis.sentiment.polarity > 0:
self.positive_news.append(news)
elif analysis.sentiment.polarity == 0:
self.neutral_news.append(news)
else:
self.negative_news.append(news)
total_news_count = len(self.all_news)
positive_news_count = len(self.positive_news)
negative_news_count = len(self.negative_news)
neutral_news_count = len(self.neutral_news)
news_sentiment['positive_per'] = round(100 * (positive_news_count / total_news_count), 2)
news_sentiment["negative_per"] = round(100 * (negative_news_count / total_news_count), 2)
news_sentiment["neutral_per"] = round(100 * (neutral_news_count / total_news_count), 2)
news_sentiment["positive_count"] = positive_news_count
news_sentiment["negative_count"] = negative_news_count
news_sentiment["neutral_count"] = neutral_news_count
print("\n\nNegative news:")
for news in self.negative_news[:10]:
print('-ve news- ' + news)
print("\n\nPositive news:")
for news in self.positive_news[:10]:
print('+ve news- ' + news)
print("\n\nNeutral news:")
for news in self.neutral_news[:10]:
print('= news- ' + news)
for tweet in self.all_tweets:
# saving sentiment of tweet
# print('News- '+news)
analysis = TextBlob(tweet)
if analysis.sentiment.polarity > 0:
self.positive_tweets.append(tweet)
elif analysis.sentiment.polarity == 0:
self.neutral_tweets.append(tweet)
else:
self.negative_tweets.append(tweet)
total_tweet_count = len(self.all_tweets)
positive_tweet_count = len(self.positive_tweets)
negative_tweet_count = len(self.negative_tweets)
neutral_tweet_count = len(self.neutral_tweets)
tweet_sentiment["positive_per"] = round(100 * (positive_tweet_count / total_tweet_count), 2)
tweet_sentiment["negative_per"] = round (100 * (negative_tweet_count / total_tweet_count), 2)
tweet_sentiment["neutral_per"] = round(100 * (neutral_tweet_count / total_tweet_count), 2)
tweet_sentiment["positive_count"] = positive_tweet_count
tweet_sentiment["negative_count"] = negative_tweet_count
tweet_sentiment["neutral_count"] = neutral_tweet_count
print("\n\nNegative tweets:")
for tweet in self.negative_tweets[:10]:
print('-ve tweet- ' + tweet)
print("\n\nPositive tweet:")
for tweet in self.positive_tweets[:10]:
print('+ve tweet- ' + tweet)
print("\n\nNeutral tweet:")
for tweet in self.neutral_tweets[:10]:
print('= tweet- ' + tweet)
result["tweet_sentiment"] = tweet_sentiment
result["news_sentiment"] = news_sentiment
return result
def main():
client = TwitterClient()
# client.get_google_finance_news_feed('NASDAQ:FB',100)
result = client.predict_stock_sentiment(quote='NSE:TCS', count=500)
print(result)
if __name__ == "__main__":
# calling main function
main()
|
python
|
import sys
from .bv import get_bv_circuit
from .qaoa import get_qaoa_circuit
from .qgan import get_qgan_circuit
from .ising import get_ising_circuit
from .parallel_cnot import get_parallel_cnot, get_parallel_cnot_barriers
from .parallel_swap import get_parallel_swap
from .xeb import get_xeb_circuit, get_xeb_iswap_circuit, get_xeb_iswap_barriers_circuit
def get_circuit(numQ, circ_name, dep=0):
if circ_name=='bv':
hs = '00101'*(numQ//5 + 1)
return get_bv_circuit(numQ, hs[:numQ])
elif circ_name == 'qaoa':
return get_qaoa_circuit(numQ, 0.5, 1)
elif circ_name == 'qgan':
return get_qgan_circuit(numQ)
elif circ_name == 'ising':
return get_ising_circuit(numQ)
elif circ_name == 'parallel_cnot':
return get_parallel_cnot(numQ, dep)
elif circ_name == 'parallel_cnot_barrier':
return get_parallel_cnot_barriers(numQ, dep)
elif circ_name == 'parallel_swap':
return get_parallel_swap(numQ)
elif circ_name == 'xeb':
return get_xeb_circuit(numQ,dep)
elif circ_name == 'xeb_iswap':
return get_xeb_iswap_circuit(numQ,dep)
elif circ_name == 'xeb_iswap_barrier':
return get_xeb_iswap_barriers_circuit(numQ, dep)
else:
print("Circuit name %s not recognized." % circ_name)
sys.exit()
|
python
|
""" Canvas Indexer
A flask web application that crawls Activity Streams for IIIF Canvases and
offers a search API.
"""
import atexit
from flask import Flask
from flask_cors import CORS
from canvasindexer.config import Cfg
from canvasindexer.crawler.crawler import crawl
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.interval import IntervalTrigger
__version__ = '1.0.0'
def create_app(**kwargs):
app = Flask(__name__)
with app.app_context():
CORS(app)
app.cfg = Cfg()
app.config['SQLALCHEMY_DATABASE_URI'] = app.cfg.db_uri()
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
from canvasindexer.models import db
db.init_app(app)
db.create_all()
from canvasindexer.api.views import pd
app.register_blueprint(pd)
if app.cfg.crawler_interval() > 0:
crawl()
scheduler = BackgroundScheduler()
scheduler.start()
scheduler.add_job(
func=crawl,
trigger=IntervalTrigger(seconds=app.cfg.crawler_interval()),
id='crawl_job',
name='crawl AS with interval set in config',
replace_existing=True)
atexit.register(lambda: scheduler.shutdown())
return app
|
python
|
# Generated by Django 3.1.8 on 2021-08-25 14:00
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("river", "0004_batch_completed_canceled"),
]
operations = [
migrations.RemoveField(
model_name="error",
name="deleted_at",
),
]
|
python
|
# -*- coding: utf-8 -*-
# Copyright 2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module containing release engineering related builders."""
from __future__ import print_function
from chromite.lib import parallel
from chromite.cbuildbot.builders import simple_builders
from chromite.cbuildbot.stages import build_stages
from chromite.cbuildbot.stages import release_stages
class GeneratePayloadsBuilder(simple_builders.SimpleBuilder):
"""Run the PaygenStage once for each board."""
def RunStages(self):
"""Runs through build process."""
def _RunStageWrapper(board):
self._RunStage(build_stages.UprevStage)
self._RunStage(build_stages.InitSDKStage)
self._RunStage(release_stages.PaygenStage, board=board,
channels=self._run.options.channels)
with parallel.BackgroundTaskRunner(_RunStageWrapper) as queue:
for board in self._run.config.boards:
queue.put([board])
|
python
|
"""Configs for global use"""
cfg = {'seed': 1971}
|
python
|
# -*- coding: utf-8 -
import codecs
import io
import os
from setuptools import setup
with io.open(os.path.join(os.path.dirname(__file__), 'README.md'),
encoding='utf-8') as f:
long_description = f.read()
setup(
name = 'bernhard',
version = '0.2.6',
description = 'Python client for Riemann',
long_description = long_description,
author = 'Benjamin Anderspn',
author_email = '[email protected]',
license = 'ASF2.0',
url = 'http://github.com/banjiewen/bernhard.git',
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: Log Analysis',
'Topic :: Utilities',
'Topic :: System :: Networking :: Monitoring'
],
zip_safe = False,
packages = ['bernhard'],
include_package_data = True,
install_requires=['protobuf >= 2.4']
)
|
python
|
import asyncio, pathlib, importlib
parent = pathlib.Path(__file__).resolve().parent
for _ in parent.iterdir():
if _.is_dir() and not _.name.startswith('.'): importlib.import_module('.main', _.name)
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/11/6 上午11:33
# @Title : 58. 最后一个单词的长度
# @Link : https://leetcode-cn.com/problems/length-of-last-word/
QUESTION = """
给定一个仅包含大小写字母和空格 ' ' 的字符串,返回其最后一个单词的长度
如果不存在最后一个单词,请返回 0 。
说明:一个单词是指由字母组成,但不包含任何空格的字符串。
示例:
输入: "Hello World"
输出: 5
"""
THINKING = """
先左右去空格,再按照空格分隔取最后一个求长度
"""
class Solution:
def lengthOfLastWord(self, s: str) -> int:
s = s.strip()
result = s.split(" ")[-1]
return len(result)
if __name__ == '__main__':
s = Solution()
string = "Hello World"
print(s.lengthOfLastWord(string))
|
python
|
from __future__ import absolute_import, print_function
from django.conf.urls import patterns, url
from .metadata import CloudflareMetadataEndpoint
from .webhook import CloudflareWebhookEndpoint
urlpatterns = patterns(
"",
url(r"^metadata/$", CloudflareMetadataEndpoint.as_view()),
url(r"^webhook/$", CloudflareWebhookEndpoint.as_view()),
)
|
python
|
from sparkpost import SparkPost
import requests
import os
class SparkPostIPNotifier:
current_ip = None
fetched_ip = None
def __init__(self, sparkpost_api_key, subject, from_email, recipients):
self.subject = subject
self.from_email = from_email
self.recipients = recipients
self.current_ip = self.get_saved_ip()
self.fetched_ip = self.fetch_and_save_ip_from_sites()
self.sparkpost = SparkPost(sparkpost_api_key)
if self.has_ip_changed():
self.send_notification()
@staticmethod
def get_sites():
return [
{'url': 'http://jsonip.com/', 'key': 'ip', 'kind': 'json'},
{'url': 'http://httpbin.org/ip', 'key': 'origin', 'kind': 'json'},
{'url': 'https://api.ipify.org?format=json', 'key': 'ip', 'kind': 'json'},
{'url': 'http://ip.42.pl/raw', 'key': 'ip', 'kind': 'text'}
]
@staticmethod
def save_ip(ip):
with open('ip.txt', 'w') as f:
f.write(ip)
@staticmethod
def get_saved_ip():
if not os.path.isfile('ip.txt'):
return
with open('ip.txt', 'r') as f:
return f.read()
@classmethod
def fetch_and_save_ip_from_sites(self):
ip = self.current_ip
sites = self.get_sites()
for site in sites:
key = site.get('key')
url = site.get('url')
kind = site.get('kind')
try:
response = requests.get(url)
if kind is 'text':
ip = response.text
elif kind is 'json':
json = response.json()
ip = json.get(key)
except Exception, e:
pass
if ip is not None:
break
self.save_ip(ip)
return ip
def send_notification(self):
text = 'New IP-address: {}.\nOld IP-address: {}'.format(self.fetched_ip, self.current_ip)
r = self.sparkpost.transmission.send(subject=self.subject, from_email=self.from_email, text=text, recipients=self.recipients)
return r
def has_ip_changed(self):
return self.current_ip != self.fetched_ip
|
python
|
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
# Test case ID : C4976201
# Test Case Title : Verify that the value assigned to the Mass of the object, gets actually assigned to the object
# fmt: off
class Tests:
# test iteration 1
enter_game_mode_1 = ("Entered game mode first time", "Failed to enter game mode first time")
ProjectileSphere_exists_1 = ("ProjectileSphere entity found first time", "ProjectileSphere entity not found first time")
TargetSphere_exists_1 = ("TargetSphere entity found first time", "TargetSphere entity not found first time")
Trigger1_exists_1 = ("Trigger1 entity found first time", "Trigger1 entity not found first time")
Trigger2_exists_1 = ("Trigger2 entity found first time", "Trigger2 entity not found first time")
Trigger3_exists_1 = ("Trigger3 entity found first time", "Trigger3 entity not found first time")
TargetSphere_mass_1 = ("Mass of TargetSphere was set to 1.0", "Mass of TargetSphere was not set to 1.0")
spheres_collided_1 = ("ProjectileSphere and TargetSphere collided first time", "Timed out before ProjectileSphere & 2 collided first time")
stopped_correctly_1 = ("TargetSphere hit Trigger1 & Trigger2 but not Trigger_3", "TargetSphere did not stop correctly")
check_y_1 = ("sphere did not move far from expected in Y direction _1", "TargetSphere moved an unexpected distance in Y direction _1")
check_z_1 = ("sphere did not move far from expected in Z direction _1", "TargetSphere moved an unexpected distance in Z direction _1")
exit_game_mode_1 = ("Exited game mode first time", "Couldn't exit game mode first time")
# test iteration 2
enter_game_mode_2 = ("Entered game mode second time", "Failed to enter game mode second time")
ProjectileSphere_exists_2 = ("ProjectileSphere entity found second time", "ProjectileSphere entity not found second time")
TargetSphere_exists_2 = ("TargetSphere entity found second time", "TargetSphere entity not found second time")
Trigger1_exists_2 = ("Trigger1 entity found second time", "Trigger1 entity not found second time")
Trigger2_exists_2 = ("Trigger2 entity found second time", "Trigger2 entity not found second time")
Trigger3_exists_2 = ("Trigger3 entity found second time", "Trigger3 entity not found second time")
TargetSphere_mass_2 = ("Mass of TargetSphere was set to 10.0", "Mass of TargetSphere was not set to 10.0")
spheres_collided_2 = ("ProjectileSphere and TargetSphere collided second time", "Timed out before ProjectileSphere & 2 collided second time")
stopped_correctly_2 = ("TargetSphere hit Trigger1 but not Trigger2 or Trigger3", "TargetSphere did not stop correctly")
check_y_2 = ("sphere did not move far from expected in Y direction _2", "TargetSphere moved an unexpected distance in Y direction _2")
check_z_2 = ("sphere did not move far from expected in Z direction _2", "TargetSphere moved an unexpected distance in Z direction _2")
exit_game_mode_2 = ("Exited game mode second time", "Couldn't exit game mode second time")
# test iteration 3
enter_game_mode_3 = ("Entered game mode third time", "Failed to enter game mode third time")
ProjectileSphere_exists_3 = ("ProjectileSphere entity found third time", "ProjectileSphere entity not found third time")
TargetSphere_exists_3 = ("TargetSphere entity found third time", "TargetSphere entity not found third time")
Trigger1_exists_3 = ("Trigger1 entity found third time", "Trigger1 entity not found third time")
Trigger2_exists_3 = ("Trigger2 entity found third time", "Trigger2 entity not found third time")
Trigger3_exists_3 = ("Trigger3 entity found third time", "Trigger3 entity not found third time")
TargetSphere_mass_3 = ("Mass of TargetSphere was set to 100.0", "Mass of TargetSphere was not set to 100.0")
spheres_collided_3 = ("ProjectileSphere and TargetSphere collided third time", "Timed out before ProjectileSphere & 2 collided third time")
stopped_correctly_3 = ("TargetSphere did not hit Trigger1, Trigger2, or Trigger3", "TargetSphere hit one or more triggers before stopping")
check_y_3 = ("sphere did not move far from expected in Y direction _3", "TargetSphere moved an unexpected distance in Y direction _3")
check_z_3 = ("sphere did not move far from expected in Z direction _3", "TargetSphere moved an unexpected distance in Z direction _3")
exit_game_mode_3 = ("Exited game mode third time", "Couldn't exit game mode third time")
# general
velocity_sizing = ("The velocities are in the correct order of magnitude", "The velocities are not correctly ordered in magnitude")
# fmt: on
def C4976201_RigidBody_MassIsAssigned():
"""
Summary:
Checking that the mass set to the object is actually applied via colliding entities
Level Description:
ProjectileSphere (entity) - Sphere shaped Mesh; Sphere shaped PhysX Collider;
PhysX Rigid Body: initial linear velocity in X direction is 5m/s, initial mass 1kg,
gravity disabled, linear damping default (0.05)
TargetSphere (entity) - Sphere shaped Mesh; Sphere shaped PhysX Collider;
PhysX Rigid Body: no initial velocity, initial mass 1kg, gravity disabled, linear damping 1.0
Expected Behavior:
The ProjectileSphere entity will float towards TargetSphere entity and then collide with it.
Because they are the same mass initially, the second sphere will move after collision.
TargetSphere's mass will be increased and scenario will run again,
but TargetSphere will have a smaller velocity after collision.
TargetSphere will then increase mass again and should barely move after the final collision.
Test Steps:
1) Open level
2) Repeat steps 3-9
3) Enter game mode
4) Find and setup entities
5) Set mass of the TargetSphere
6) Check for collision
7) Wait for TargetSphere x velocity = 0
8) Check the triggers
9) Exit game mode
10) Verify the velocity of TargetSphere decreased after collision as mass increased
11) Close the editor
Note:
- This test file must be called from the Open 3D Engine Editor command terminal
- Any passed and failed tests are written to the Editor.log file.
Parsing the file or running a log_monitor are required to observe the test results.
:return: None
"""
import os
import sys
import azlmbr.legacy.general as general
import azlmbr.bus
import azlmbr
from editor_python_test_tools.utils import Report
from editor_python_test_tools.utils import TestHelper as helper
MOVEMENT_TIMEOUT = 7.0
COLLISION_TIMEOUT = 2.0
VELOCITY_ZERO = 0.01
Y_Z_BUFFER = 0.01
TARGET_SPHERE_NAME = "TargetSphere"
PROJECTILE_SPHERE_NAME = "ProjectileSphere"
TRIGGER_1_NAME = "Trigger1"
TRIGGER_2_NAME = "Trigger2"
TRIGGER_3_NAME = "Trigger3"
class ProjectileSphere:
def __init__(self, test_iteration):
self.name = PROJECTILE_SPHERE_NAME
self.test_iteration = test_iteration
self.timeout_reached = True
self.id = general.find_game_entity(self.name)
Report.critical_result(
Tests.__dict__["ProjectileSphere_exists_" + str(self.test_iteration)], self.id.IsValid()
)
def destroy_me(self):
azlmbr.entity.GameEntityContextRequestBus(azlmbr.bus.Broadcast, "DestroyGameEntity", self.id)
class TargetSphere:
def __init__(self, mass_to_assign, stop_before_trigger_name, expected_trigger_pattern, test_iteration):
self.id = None
self.name = TARGET_SPHERE_NAME
self.start_mass = None
self.mass_to_assign = mass_to_assign
self.collision_begin = False
self.after_collision_velocity = None
self.x_movement_timeout = True
self.stop_before_trigger_name = stop_before_trigger_name
self.expected_trigger_pattern = expected_trigger_pattern
self.collision_ended = False
self.test_iteration = test_iteration
self.test_set_mass = self.get_test("TargetSphere_mass_")
self.test_enter_game_mode = self.get_test("enter_game_mode_")
self.test_ProjectileSphere_exist = self.get_test("ProjectileSphere_exists_")
self.test_TargetSphere_exist = self.get_test("TargetSphere_exists_")
self.test_spheres_collided = self.get_test("spheres_collided_")
self.test_stop_properly = self.get_test("stopped_correctly_")
self.test_check_y = self.get_test("check_y_")
self.test_check_z = self.get_test("check_z_")
self.test_exit_game_mode = self.get_test("exit_game_mode_")
def get_test(self, test_prefix):
return Tests.__dict__[test_prefix + str(self.test_iteration)]
def find(self):
self.id = general.find_game_entity(self.name)
Report.critical_result(Tests.__dict__["TargetSphere_exists_" + str(self.test_iteration)], self.id.IsValid())
def setup_mass(self):
self.start_mass = azlmbr.physics.RigidBodyRequestBus(azlmbr.bus.Event, "GetMass", self.id)
Report.info("{} starting mass: {}".format(self.name, self.start_mass))
azlmbr.physics.RigidBodyRequestBus(azlmbr.bus.Event, "SetMass", self.id, self.mass_to_assign)
general.idle_wait_frames(1) # wait for mass to apply
mass_after_set = azlmbr.physics.RigidBodyRequestBus(azlmbr.bus.Event, "GetMass", self.id)
Report.info("{} mass after setting: {}".format(self.name, mass_after_set))
Report.result(self.test_set_mass, self.mass_to_assign == mass_after_set)
def current_velocity(self):
return azlmbr.physics.RigidBodyRequestBus(azlmbr.bus.Event, "GetLinearVelocity", self.id)
def on_collision_begin(self, args):
other_id = args[0]
if other_id.Equal(self.id):
Report.info("spheres collision begin")
self.collision_begin = True
def on_collision_end(self, args):
other_id = args[0]
if other_id.Equal(self.id):
Report.info("spheres collision end")
self.after_collision_velocity = self.current_velocity()
self.collision_ended = True
def add_collision_handlers(self, projectile_sphere_id):
self.handler = azlmbr.physics.CollisionNotificationBusHandler()
self.handler.connect(projectile_sphere_id)
self.handler.add_callback("OnCollisionBegin", self.on_collision_begin)
self.handler.add_callback("OnCollisionEnd", self.on_collision_end)
def x_velocity_zero(self):
if abs(self.current_velocity().x) < VELOCITY_ZERO:
Report.info("TargetSphere has stopped moving.")
self.x_movement_timeout = False
return True
return False
def collision_complete(self):
return self.collision_begin and self.collision_ended
def check_y_z_movement_from_collision(self):
"""
Used to check that the entity has not moved too far in either the Y or Z direction
"""
def is_within_tolerance(velocity_one_direction):
return abs(velocity_one_direction) < Y_Z_BUFFER
Report.info_vector3(self.after_collision_velocity, "Initial Velocity: ")
Report.result(self.test_check_y, is_within_tolerance(self.after_collision_velocity.y))
Report.result(self.test_check_z, is_within_tolerance(self.after_collision_velocity.z))
class Trigger:
"""
Used in the level to tell if the TargetSphere entity has moved a certain distance.
There are three triggers set up in the level.
"""
def __init__(self, name, test_iteration):
self.name = name
self.handler = None
self.triggered = False
self.test_iteration = test_iteration
self.id = general.find_game_entity(self.name)
Report.critical_result(Tests.__dict__[self.name + "_exists_" + str(self.test_iteration)], self.id.IsValid())
self.setup_handler()
def on_trigger_enter(self, args):
"""
This is passed into this object's handler.add_callback().
"""
other_id = args[0]
self.triggered = True
triggered_by_name = azlmbr.entity.GameEntityContextRequestBus(
azlmbr.bus.Broadcast, "GetEntityName", other_id
)
Report.info("{} was triggered by {}.".format(self.name, triggered_by_name))
def setup_handler(self):
"""
This is called to setup the handler for this trigger object
"""
self.handler = azlmbr.physics.TriggerNotificationBusHandler()
self.handler.connect(self.id)
self.handler.add_callback("OnTriggerEnter", self.on_trigger_enter)
class TriggerResultPattern:
"""
Used to store and determine which triggers were activated and compare to expected
"""
def __init__(self, trigger1_activated, trigger2_activated, trigger3_activated):
self.trigger1_activated = trigger1_activated
self.trigger2_activated = trigger2_activated
self.trigger3_activated = trigger3_activated
def __eq__(self, other_pattern):
"""
Used to determine if two patterns equal/match each other (i.e. Expected VS Actual)
"""
if isinstance(other_pattern, self.__class__):
return (
self.trigger1_activated == other_pattern.trigger1_activated
and self.trigger2_activated == other_pattern.trigger2_activated
and self.trigger3_activated == other_pattern.trigger3_activated
)
else:
return False
def report(self, expect_actual):
Report.info(
"""TargetSphere {} Triggers:
Trigger_1: {}
Trigger_2: {}
Trigger_3: {}
""".format(
expect_actual, self.trigger1_activated, self.trigger2_activated, self.trigger3_activated
)
)
target_sphere_1kg = TargetSphere(
mass_to_assign=1.0,
stop_before_trigger_name=TRIGGER_3_NAME,
expected_trigger_pattern=TriggerResultPattern(True, True, False),
test_iteration=1,
)
target_sphere_10kg = TargetSphere(
mass_to_assign=10.0,
stop_before_trigger_name=TRIGGER_2_NAME,
expected_trigger_pattern=TriggerResultPattern(True, False, False),
test_iteration=2,
)
target_sphere_100kg = TargetSphere(
mass_to_assign=100.0,
stop_before_trigger_name=TRIGGER_1_NAME,
expected_trigger_pattern=TriggerResultPattern(False, False, False),
test_iteration=3,
)
target_spheres = [target_sphere_1kg, target_sphere_10kg, target_sphere_100kg]
target_sphere_velocities = {}
helper.init_idle()
# 1) Open level
helper.open_level("Physics", "C4976201_RigidBody_MassIsAssigned")
# 2) Repeat steps 3-9
for target_sphere in target_spheres:
Report.info("***************** Begin Test Iteration {} ******************".format(target_sphere.test_iteration))
# 3) Enter game mode
helper.enter_game_mode(target_sphere.test_enter_game_mode)
# 4) Find and setup entities
projectile_sphere = ProjectileSphere(target_sphere.test_iteration)
target_sphere.find()
target_sphere.add_collision_handlers(projectile_sphere.id)
trigger_1 = Trigger(TRIGGER_1_NAME, target_sphere.test_iteration)
trigger_2 = Trigger(TRIGGER_2_NAME, target_sphere.test_iteration)
trigger_3 = Trigger(TRIGGER_3_NAME, target_sphere.test_iteration)
# 5) Set mass of the TargetSphere
target_sphere.setup_mass()
# 6) Check for collision
helper.wait_for_condition(target_sphere.collision_complete, COLLISION_TIMEOUT)
Report.critical_result(target_sphere.test_spheres_collided, target_sphere.collision_complete())
projectile_sphere.destroy_me()
Report.info_vector3(
target_sphere.after_collision_velocity, "Velocity of {} after the collision: ".format(target_sphere.name)
)
Report.info("The sphere should stop before touching {}".format(target_sphere.stop_before_trigger_name))
# 7) Wait for TargetSphere x velocity = 0
helper.wait_for_condition(target_sphere.x_velocity_zero, MOVEMENT_TIMEOUT)
if target_sphere.x_movement_timeout is True:
Report.info("TargetSphere failed to stop moving in the x direction before timeout was reached.")
# 8) Check the triggers
actual_trigger_pattern = TriggerResultPattern(trigger_1.triggered, trigger_2.triggered, trigger_3.triggered)
patterns_match = actual_trigger_pattern == target_sphere.expected_trigger_pattern
target_sphere.expected_trigger_pattern.report("Expected")
actual_trigger_pattern.report("Actual")
Report.result(target_sphere.test_stop_properly, patterns_match)
target_sphere.check_y_z_movement_from_collision()
target_sphere_velocities.update({target_sphere.test_iteration: target_sphere.after_collision_velocity.x})
# 9) Exit game mode
helper.exit_game_mode(target_sphere.test_exit_game_mode)
Report.info("~~~~~~~~~~~~~~ Test Iteration {} End ~~~~~~~~~~~~~~~~~~".format(target_sphere.test_iteration))
# 10) Verify the velocity of TargetSphere decreased after collision as mass increased
outcome = target_sphere_velocities[1] > target_sphere_velocities[2] > target_sphere_velocities[3]
Report.result(Tests.velocity_sizing, outcome)
if __name__ == "__main__":
from editor_python_test_tools.utils import Report
Report.start_test(C4976201_RigidBody_MassIsAssigned)
|
python
|
# -*- coding: utf-8 -*-
"""
Query permutator to match multiword queries.
Given a query with n tokens we assume the first n-1 to be complete while the last might be partial. From that we
want to match using the following algorithm.
Bag of words reordering is applied to the first n-1 tokens, n-th token (partial) is kept last.
Example: "messenger facebook deu" -> "facebook messenger deu"
In order to match the index entry: "facebook messenger download deutsch" we need an entry:
"facebook messenger deutsch" which corresponds to tokens 0 1 3
We do not require "messenger facebook deu" (1 0 3) due to the BOW reordering at lookup
"""
import sys
import keyvi
# permutation lookup table, number_of_tokens - > permutations_to_build
# the table is build with scripts/build_bow_completion_permutations
PERMUTATION_LOOKUP_TABLE = {
2: [
[0],
[1],
[0, 1],
[1, 0],
],
3: [
[0],
[1],
[2],
[0, 1],
[0, 2],
[1, 0],
[1, 2],
[2, 0],
[2, 1],
[0, 1, 2],
[0, 2, 1],
[1, 2, 0],
],
4: [
[0],
[1],
[2],
[3],
[0, 1],
[0, 2],
[0, 3],
[1, 0],
[1, 2],
[1, 3],
[2, 0],
[2, 1],
[2, 3],
[3, 0],
[3, 1],
[3, 2],
[0, 1, 2],
[0, 1, 3],
[0, 2, 1],
[0, 2, 3],
[0, 3, 1],
[0, 3, 2],
[1, 2, 0],
[1, 2, 3],
[1, 3, 0],
[1, 3, 2],
[2, 3, 0],
[2, 3, 1],
[0, 1, 2, 3],
],
}
MULTIWORD_QUERY_SEPARATOR = '\x1b'
class MultiWordPermutation:
def __init__(self):
pass
def __call__(self, query):
query_tokens = query.split(" ")
query_tokens_bow = sorted(query_tokens)
length = len(query_tokens_bow)
if not PERMUTATION_LOOKUP_TABLE.has_key(length):
yield query
return
for permutation in PERMUTATION_LOOKUP_TABLE[len(query_tokens_bow)]:
if len(permutation) < 3:
first_token = query_tokens_bow[permutation[0]]
if first_token != query_tokens[permutation[0]] and len(first_token) == 1:
continue
yield " ".join([query_tokens_bow[i] for i in permutation]) + MULTIWORD_QUERY_SEPARATOR + query
if __name__ == '__main__':
pipeline = []
pipeline.append(MultiWordPermutation())
c = keyvi.CompletionDictionaryCompiler()
for line in sys.stdin:
key, weight = line.split("\t")
for q in reduce(lambda x, y: y(x), pipeline, key):
c.Add(q, int(weight))
c.Compile()
c.WriteToFile("mw-completion.keyvi")
|
python
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-10-25 19:58
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('s3pooler', '0001_initial'),
]
operations = [
migrations.DeleteModel(
name='RawEvents',
),
migrations.DeleteModel(
name='UsersEvents',
),
]
|
python
|
import matplotlib.pyplot as plt
def main():
# The iteration number 0 corresponds to 0 DAger's iteration, which corresponds to the simple behavioral cloning model
iterations = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
dagger_mean_return = [499, 491.00752037626864, 280.35493455713464, 313.0841462553591, 281.2280666853588, 302.32458150840415, 375.3346917930867,
526.2328895242667, 722.7978505443548, 623.9050590062458, 1398.44995600636, 3076.955646121361, 8253.923134108587,
7629.325002257281, 7246.285399942843, 8087.738743306754, 3893.1664238204467, 9132.88474770422, 9460.28598069521,
9726.914701058917]
dagger_stds = [499, 107.8460299321844, 13.621758933207397, 13.659271852314815, 18.423331558439394, 9.814398412282554, 184.14841961543374,
207.72978567273103, 508.0083698513861, 695.3579276517936, 904.658395758084, 2612.8369380474232, 3498.691955383935,
3287.0477377140114, 4022.94659430406, 2843.4029358558473, 1324.384546716151, 2446.9232315473655, 2682.070975182269,
2073.0612932329764]
plt.figure()
plt.errorbar(iterations, dagger_mean_return, color='r', yerr=dagger_stds, label='DAgger')
plt.suptitle('DAgger - Mean reward function of number of iterations')
plt.xlabel('DAgger iteration')
plt.ylabel('Mean reward')
plt.xlim([-0.5, 20])
plt.ylim([450, 10800])
expert = plt.axhline(y=10477, color='g', label='Expert Policy')
bc = plt.axhline(y=499, color='b', label='Behavioral Cloning')
plt.legend(loc=4)
plt.savefig('figures/Q3.2.png')
plt.show()
if __name__ == '__main__':
main()
|
python
|
"""broccoliフレームワーク内データの、シリアライズ・デシリアライズに関するモジュール。"""
import json
from broccoli import register
from broccoli.layer import BaseLayer, BaseItemLayer, BaseObjectLayer, BaseTileLayer
from broccoli.material import BaseTile, BaseObject, BaseItem, BaseMaterial
MANAGER = 'Manager'
LAYER = 'Layer'
TILE = 'Tile'
OBJECT = 'Object'
ITEM = 'Item'
MATERIALS = (TILE, OBJECT, ITEM)
class JsonEncoder(json.JSONEncoder):
"""broccoliフレームワーク専用JSONエンコーダー。
json.dump(tile_layer, file, cls=serializers.JsonEncoder)
のように使ってください。
通常のJSONEncoderと違い、
- 関数
- マテリアル
- レイヤー
もJSONエンコードされます。
レイヤー内にマテリアルがある場合はマテリアルも正しくエンコードされ、
更にマテリアルが何らかのマテリアルを保持している場合(アイテムなど)も再帰的にエンコードされます。
関数は、
"generic.do_nothing"
のような単純な文字列になります。この文字列は関数の登録名です。
マテリアルは、
{
"class_name": "KindnessSheep",
"kwargs": {},
"kind": "Object"
}
という表現になります。
レイヤーは、
{
"kind": "Layer",
"layer": [
[マテリアル, マテリアル...],
[マテリアル, マテリアル...],
[マテリアル, マテリアル...],
]
}
という表現になります。内部のマテリアル部分は、上で紹介したマテリアルのJSON表現が入ります。
実際にどういうJSONになるかは、samples/roguelike内のjsonファイルを見てください。
"""
def manager_to_json(self, o):
"""マネージャーをJSONエンコードする。"""
canvas = o.current_canvas
result = {
'kind': MANAGER,
'name': o.current_canvas_name,
'vars': self.default(o.vars),
'tile_layer': self.layer_to_json(canvas.tile_layer),
'object_layer': self.layer_to_json(canvas.object_layer),
'item_layer': self.layer_to_json(canvas.item_layer),
}
return result
def layer_to_json(self, o):
"""レイヤーをJSONエンコードする。"""
result = {'kind': LAYER}
if isinstance(o, BaseItemLayer):
result['layer'] = [[[] for _ in range(o.tile_layer.x_length)] for _ in range(o.tile_layer.y_length)]
for x, y, items in o.all():
if items:
result['layer'][y][x] = [self.material_to_json(item, kind=ITEM) for item in items]
else:
result['layer'][y][x] = []
elif isinstance(o, BaseTileLayer):
result.update({
'x_length': o.x_length,
'y_length': o.y_length,
'layer': [[None for _ in range(o.x_length)] for _ in range(o.y_length)],
})
for x, y, tile in o.all():
result['layer'][y][x] = self.material_to_json(tile, kind=TILE)
elif isinstance(o, BaseObjectLayer):
result['layer'] = [[None for _ in range(o.tile_layer.x_length)] for _ in range(o.tile_layer.y_length)]
for x, y, obj in o.all():
if obj is None:
result['layer'][y][x] = None
else:
result['layer'][y][x] = self.material_to_json(obj, kind=OBJECT)
return result
def material_dump_to_json(self, o):
"""マテリアルダンプをJSONエンコードする。
(cls, kwargs)形式のマテリアルダンプを一度インスタンス化し、
それを再度defaultメソッドに渡し、マテリアルのJSONエンコードを行います。
"""
cls = o[0]
kwargs = o[1]
material = cls(**kwargs)
return self.default(material)
def material_to_json(self, o, kind):
"""マテリアルをJSONエンコードする。"""
result = {
'class_name': o.__class__.__name__,
'kwargs': self.kwargs_to_json(o),
'kind': kind,
}
return result
def kwargs_to_json(self, o):
"""マテリアルのインスタンス属性をJSONエンコードする。"""
result = o.get_instance_attrs()
for key, value in result.items():
if key in o.func_attrs:
result[key] = value.name # 関数のname属性に、registerに登録する名前が入っている
elif isinstance(value, (list, tuple)):
for i, data in enumerate(value):
value[i] = self.default(data)
elif isinstance(value, dict):
for attr_name, attr_value in value.items():
value[attr_name] = self.default(attr_value)
return result
def default(self, o):
"""JSONエンコードする。
このメソッドが最初に呼び出されます。
"""
from broccoli.manage import BaseManager
# マネージャークラスを渡された場合
if isinstance(o, BaseManager):
return self.manager_to_json(o)
# レイヤーを渡された場合。レイヤーの専用エンコード処理を呼ぶ。
elif isinstance(o, BaseLayer):
return self.layer_to_json(o)
# 各マテリアルも、専用のエンコード処理を呼ぶ。
elif isinstance(o, BaseTile):
return self.material_to_json(o, kind=TILE)
elif isinstance(o, BaseObject):
return self.material_to_json(o, kind=OBJECT)
elif isinstance(o, BaseItem):
return self.material_to_json(o, kind=ITEM)
# (Material, kwargs)形式のデータの場合
elif isinstance(o, tuple) and len(o) >= 2 and issubclass(o[0], BaseMaterial) and isinstance(o[1], dict):
return self.material_dump_to_json(o)
# リストやタプルならば、中身がマテリアル等の場合もあるので
# 再帰的にJSONエンコードする。
elif isinstance(o, list):
for i, data in enumerate(o):
o[i] = self.default(data)
return o
# 辞書の場合も、中身がマテリアルの場合があるので再帰的にエンコード。
elif isinstance(o, dict):
for attr_name, attr_value in o.items():
o[attr_name] = self.default(attr_value)
# 通常の数値や文字列は、そのまま値を返す。
return o
class JsonDecoder(json.JSONDecoder):
"""broccoliフレームワーク専用JSONデコーダー。
data = json.load(file, cls=serializers.JsonDecoder)
のように使ってください。
JSONEncoderでエンコードされたJSONファイルを、broccoliフレームワークで使える形に出コードします。
通常のJSONデコードと違うのは、
- 関数のエンコード表現
- マテリアルのエンコード表現
- レイヤーのエンコード表現
もデコードされることです。
関数は
"generic.do_nothing"
のような文字列となっていますが、これをPythonの関数オブジェクトに変換します。
マテリアルは
{
"class_name": "KindnessSheep",
"kwargs": {},
"kind": "Object"
}
のような表現ですが、これを
(クラスオブジェクト, インスタンス属性の辞書)
に変換します。cls(**kwargs)としてインスタンス化できる形式です。
レイヤーは、
{
"kind": "Layer",
"layer": [
[マテリアル, マテリアル...],
[マテリアル, マテリアル...],
[マテリアル, マテリアル...],
]
}
という表現ですが、これを
[
[(cls, kwargs), (cls, kwargs)...],
[(cls, kwargs), (cls, kwargs)...],
[(cls, kwargs), (cls, kwargs)...],
]
という2次元のリストに変換します。(cls, kwargs)部分は上で紹介したマテリアルのデコード表現です。
"""
def _load_material(self, col, container):
"""マテリアルをデコードする。"""
class_name = col['class_name']
kwargs = col['kwargs']
cls = container[class_name]
# インスタンスの属性を見ていく。
for key, value in kwargs.items():
# 関数となる属性ならば、関数のロード
if key in cls.func_attrs:
kwargs[key] = register.functions[value]
# 属性がリストや辞書ならば、中の要素を再帰的にデコード。
elif isinstance(value, list):
for i, data in enumerate(value):
value[i] = self._decode(data)
elif isinstance(value, dict):
for attr_name, attr_value in value.items():
value[attr_name] = self._decode(attr_value)
return cls, kwargs
def tile_from_json(self, o):
return self._load_material(o, register.tiles)
def object_from_json(self, o):
return self._load_material(o, register.objects)
def item_from_json(self, o):
return self._load_material(o, register.items)
def _decode(self, o):
"""マテリアル、レイヤーなどをデコードする。"""
# リストならば各要素を_decodeし、中のマテリアルなどを再帰的にデコードする。
if isinstance(o, list):
for i, data in enumerate(o):
o[i] = self._decode(data)
# 辞書の場合、マテリアルなどの場合は専用のメソッドを、そうでなければ再帰的にデコードする。
elif isinstance(o, dict):
kind = o.get('kind')
if kind == TILE:
return self.tile_from_json(o)
elif kind == OBJECT:
return self.object_from_json(o)
elif kind == ITEM:
return self.item_from_json(o)
for key, value in o.items():
o[key] = self._decode(value)
# ここは数値や文字列などがくる。
return o
def decode(self, s, **kwargs):
"""デコードする。最初に呼ばれる。"""
# まず、デフォルトのdecodeで
# リストと辞書のPythonオブジェクトに変換してもらう。
o = super().decode(s, **kwargs)
# そのPythonオブジェクトの中から、マテリアルやレイヤー、関数部分を更にデコードする。
return self._decode(o)
|
python
|
import numpy as np
import rasterio as rio
import geopandas as gpd
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
from shapely.ops import cascaded_union
from shapely.geometry.polygon import Polygon
from cartopy.feature import ShapelyFeature
import matplotlib.patches as mpatches
def generate_handles(labels, colors, edge='k', alpha=1):
lc = len(colors) # get the length of the color list
handles = []
for i in range(len(labels)):
handles.append(mpatches.Rectangle((0, 0), 1, 1, facecolor=colors[i % lc], edgecolor=edge, alpha=alpha))
return handles
def percentile_stretch(img, pmin=0., pmax=100.):
'''
This is where you should write a docstring.
'''
# here, we make sure that pmin < pmax, and that they are between 0, 100
if not 0 <= pmin < pmax <= 100:
raise ValueError('0 <= pmin < pmax <= 100')
# here, we make sure that the image is only 2-dimensional
if not img.ndim == 2:
raise ValueError('Image can only have two dimensions (row, column)')
minval = np.percentile(img, pmin)
maxval = np.percentile(img, pmax)
stretched = (img - minval) / (maxval - minval) # stretch the image to 0, 1
stretched[img < minval] = 0 # set anything less than minval to the new minimum, 0.
stretched[img > maxval] = 1 # set anything greater than maxval to the new maximum, 1.
return stretched
def img_display(img, ax, bands, stretch_args=None, **imshow_args):
'''
This is where you should write a docstring.
'''
dispimg = img.copy().astype(np.float32) # make a copy of the original image,
# but be sure to cast it as a floating-point image, rather than an integer
for b in range(img.shape[0]): # loop over each band, stretching using percentile_stretch()
if stretch_args is None: # if stretch_args is None, use the default values for percentile_stretch
dispimg[b] = percentile_stretch(img[b])
else:
dispimg[b] = percentile_stretch(img[b], **stretch_args)
# next, we transpose the image to re-order the indices
dispimg = dispimg.transpose([1, 2, 0])
# finally, we display the image
handle = ax.imshow(dispimg[:, :, bands], **imshow_args)
return handle, ax
# ------------------------------------------------------------------------
# note - rasterio's open() function works in much the same way as python's - once we open a file,
# we have to make sure to close it. One easy way to do this in a script is by using the with statement shown
# below - once we get to the end of this statement, the file is closed.
with rio.open('data_files/NI_Mosaic.tif') as dataset:
img = dataset.read()
xmin, ymin, xmax, ymax = dataset.bounds
# your code goes here!
|
python
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import argparse
import logging
import random
import numpy as np
import torch
from run import run
def main():
parser = argparse.ArgumentParser()
## Basic parameters
parser.add_argument("--train_file", default="data/structured_zeroshot-train-kilt.jsonl")
parser.add_argument("--predict_file", default="data/structured_zeroshot-dev-kilt.jsonl")
parser.add_argument("--dataset", default="zsre", required=True)
parser.add_argument("--model", default="facebook/bart-base", required=False)
parser.add_argument("--output_dir", default=None, type=str, required=True)
parser.add_argument("--do_train", action='store_true')
parser.add_argument("--do_predict", action='store_true')
parser.add_argument("--predict_checkpoint", type=str, default="best-model.pt")
## Model parameters
parser.add_argument("--checkpoint", type=str)
parser.add_argument("--do_lowercase", action='store_true', default=False)
parser.add_argument("--freeze_embeds", action='store_true', default=False)
# Preprocessing/decoding-related parameters
parser.add_argument('--max_input_length', type=int, default=24)
parser.add_argument('--max_output_length', type=int, default=12)
parser.add_argument('--num_beams', type=int, default=4)
parser.add_argument("--append_another_bos", action='store_true', default=False)
# Training-related parameters
parser.add_argument("--train_batch_size", default=64, type=int,
help="Batch size per GPU/CPU for training.")
parser.add_argument("--predict_batch_size", default=64, type=int,
help="Batch size per GPU/CPU for evaluation.")
parser.add_argument("--learning_rate", default=3e-5, type=float,
help="The initial learning rate for Adam.")
parser.add_argument("--warmup_proportion", default=0.01, type=float,
help="Weight decay if we apply some.")
parser.add_argument("--weight_decay", default=0.0, type=float,
help="Weight deay if we apply some.")
parser.add_argument("--adam_epsilon", default=1e-8, type=float,
help="Epsilon for Adam optimizer.")
parser.add_argument("--max_grad_norm", default=0.1, type=float,
help="Max gradient norm.")
parser.add_argument("--gradient_accumulation_steps", default=4, type=int,
help="Max gradient norm.")
parser.add_argument("--num_train_epochs", default=30.0, type=float,
help="Total number of training epochs to perform.")
parser.add_argument("--warmup_steps", default=500, type=int,
help="Linear warmup over warmup_steps.")
parser.add_argument("--total_steps", default=100000, type=int,
help="Linear warmup over warmup_steps.")
parser.add_argument('--wait_step', type=int, default=10000000000)
# Other parameters
parser.add_argument("--verbose", action='store_true',
help="If true, all of the warnings related to data processing will be printed. "
"A number of warnings are expected for a normal SQuAD evaluation.")
parser.add_argument('--eval_period', type=int, default=2000,
help="Evaluate & save model")
parser.add_argument('--prefix', type=str, default='',
help="Prefix for saving predictions")
parser.add_argument('--debug', action='store_true',
help="Use a subset of data for debugging")
parser.add_argument('--seed', type=int, default=42,
help="random seed for initialization")
args = parser.parse_args()
if os.path.exists(args.output_dir) and os.listdir(args.output_dir):
print("Output directory () already exists and is not empty.")
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir, exist_ok=True)
##### Start writing logs
log_filename = "{}log.txt".format("" if args.do_train else "eval_")
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO,
handlers=[logging.FileHandler(os.path.join(args.output_dir, log_filename)),
logging.StreamHandler()])
logger = logging.getLogger(__name__)
logger.info(args)
logger.info(args.output_dir)
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
args.n_gpu = torch.cuda.device_count()
if args.n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
if not args.do_train and not args.do_predict:
raise ValueError("At least one of `do_train` or `do_predict` must be True.")
if args.do_train:
if not args.train_file:
raise ValueError("If `do_train` is True, then `train_file` must be specified.")
if not args.predict_file:
raise ValueError("If `do_train` is True, then `predict_file` must be specified.")
if args.do_predict:
if not args.predict_file:
raise ValueError("If `do_predict` is True, then `predict_file` must be specified.")
logger.info("Using {} gpus".format(args.n_gpu))
run(args, logger)
if __name__=='__main__':
main()
|
python
|
from apistar import http, Route
from apistar.backends.sqlalchemy_backend import Session
from db import Auto
def make_auto_dict(auto: Auto) -> dict:
""" Returns a dictionary of the relevant attributes from an Auto object """
return {
"id": auto.id,
"name": auto.name,
"make": auto.make,
"model": auto.model,
"year": auto.year
}
def create_auto(session: Session, data: http.RequestData) -> dict:
""" Creates auto in db. Called from POST /autos/ """
auto = Auto(**data)
session.add(auto)
session.flush()
return make_auto_dict(auto)
def list_autos(session: Session) -> list:
""" Lists autos from db. Called from GET /autos/ """
queryset = session.query(Auto).all()
return [
{
"id": auto.id,
"name": auto.name,
"make": auto.make,
"model": auto.model,
"year": auto.year
}
for auto in queryset
]
def get_auto(session: Session, auto_id: int) -> dict:
""" Get an auto from db by id. Called from GET /autos/:id """
auto = session.query(Auto).get(auto_id)
return make_auto_dict(auto)
def update_auto(session: Session, auto_id: int, data: http.RequestData) -> dict:
""" Update a given auto. Called from PATCH /autos/:id """
auto_properties = ["name", "make", "model", "year"]
auto = session.query(Auto).get(auto_id)
for auto_prop in auto_properties:
if auto_prop in data:
setattr(auto, auto_prop, data[auto_prop])
session.flush()
return make_auto_dict(auto)
def delete_auto(session: Session, auto_id: int, data: http.RequestData) -> None:
""" Delete a given auto. Called from DELETE /autos/:id """
auto = session.query(Auto).get(auto_id)
session.delete(auto)
session.flush()
return None
auto_urls = [
Route('/', 'GET', list_autos),
Route('/', 'POST', create_auto),
Route('/{auto_id}', 'PATCH', update_auto),
Route('/{auto_id}', 'DELETE', delete_auto),
Route('/{auto_id}', 'GET', get_auto)
]
|
python
|
# MIT License
# Copyright (c) 2016 Alexis Bekhdadi (midoriiro) <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# TODO: write module, function, class, method docstring
# pylint: disable=missing-docstring
import os
import pytest
def get_data_path(filename):
base = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'data'
)
return os.path.join(base, filename)
|
python
|
import unittest
import time
from datetime import datetime
from sqlalchemy.exc import IntegrityError
from app import mail
from flask import current_app
from app import create_app, db
from app.models import User, AnonymousUser, Role, Permission, Follow
class AdditionModelTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
Role.insert_roles()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_mail_send(self):
app = current_app._get_current_object()
with mail.record_messages() as outbox:
mail.send_message(subject='testing',
body='test',
sender=app.config['FLASKY_MAIL_SENDER'],
recipients=["[email protected]"])
self.assertTrue(len(outbox) == 1)
self.assertTrue(outbox[0].subject == "testing")
|
python
|
from django.test import TestCase
from accounts.forms import RegisterForm, SignInForm
from django.contrib.auth.models import User
class TestAccountForm(TestCase):
def test_register_form(self):
form = RegisterForm(
data={
"email": "[email protected]",
"username": "testuser24",
"password1": "Justatest123",
"password2": "Justatest123",
}
)
self.assertTrue(form.is_valid())
def test_register_form_invalid(self):
form = RegisterForm(
data={
"email": "[email protected]",
"username": "testuser24",
"password1": "Justatest12",
"password2": "Justatest123",
}
)
self.assertFalse(form.is_valid())
def test_register_form_no_data(self):
form = RegisterForm(data={})
self.assertFalse(form.is_valid())
self.assertEquals(len(form.errors), 4)
def test_signIn_form(self):
user = User.objects.create_user(username="testuser", password="justatest12")
form = SignInForm(data={"username": "testuser", "password": "justatest12"})
self.assertTrue(form.is_valid())
def test_sigIn_form_invalid(self):
user = User.objects.create_user(username="testuser", password="justatest12")
form = SignInForm(data={"username": "testuse", "password": "justatest12"})
self.assertFalse(form.is_valid())
self.assertEquals(len(form.errors), 1)
def test_signIn_form_no_data(self):
user = User.objects.create_user(username="testuser", password="justatest12")
form = SignInForm(data={})
self.assertFalse(form.is_valid())
self.assertEquals(len(form.errors), 2)
|
python
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'settings_widget.ui'
#
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from tank.platform.qt import QtCore, QtGui
class Ui_SettingsWidget(object):
def setupUi(self, SettingsWidget):
SettingsWidget.setObjectName("SettingsWidget")
SettingsWidget.resize(337, 645)
self.verticalLayout = QtGui.QVBoxLayout(SettingsWidget)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.settings_scroll_area = QtGui.QScrollArea(SettingsWidget)
self.settings_scroll_area.setWidgetResizable(True)
self.settings_scroll_area.setObjectName("settings_scroll_area")
self.settings_host = QtGui.QWidget()
self.settings_host.setGeometry(QtCore.QRect(0, 0, 335, 643))
self.settings_host.setObjectName("settings_host")
self.settings_layout = QtGui.QGridLayout(self.settings_host)
self.settings_layout.setContentsMargins(0, 0, 0, 0)
self.settings_layout.setSpacing(0)
self.settings_layout.setObjectName("settings_layout")
self.settings_scroll_area.setWidget(self.settings_host)
self.verticalLayout.addWidget(self.settings_scroll_area)
self.retranslateUi(SettingsWidget)
QtCore.QMetaObject.connectSlotsByName(SettingsWidget)
def retranslateUi(self, SettingsWidget):
SettingsWidget.setWindowTitle(QtGui.QApplication.translate("SettingsWidget", "Form", None, QtGui.QApplication.UnicodeUTF8))
|
python
|
#!/usr/bin/env python
import logging
from lxml import etree
from ncclient import manager
from ncclient.xml_ import *
def connect(host, port, user, password):
conn = manager.connect(host=host,
port=port,
username=user,
password=password,
timeout=10,
device_params={'name': 'alu'},
hostkey_verify=False)
logging.info('Retrieving full config, please wait ...')
result = conn.get_configuration()
logging.info(result)
logging.info('Here is the chassis configuration')
output = result.xpath('data/configure/system/chassis')[0]
logging.info(to_xml(output))
logging.info('Retrieving service config')
# specify filter to pass to get_config
filter = new_ele('configure', attrs={'xmlns': ALU_CONFIG})
sub_ele(filter, 'service')
result = conn.get_configuration(filter=filter)
epipes = result.xpath('data/configure/service/epipe')
for i in epipes:
logging.info(etree.tostring(i, pretty_print=True).decode('utf-8'))
logging.info('Getting CLI -config')
cli_cfg = conn.get_configuration(content='cli', filter=['port 1/1/11'])
logging.info(cli_cfg)
logging.info('Get detailed CLI -config')
cli_cfg = conn.get_configuration(content='cli', filter=['port 1/1/11'], detail=True)
logging.info(cli_cfg)
conn.close_session()
if __name__ == '__main__':
LOG_FORMAT = '%(asctime)s %(levelname)s %(filename)s:%(lineno)d %(message)s'
logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=LOG_FORMAT)
connect('localhost', 830, 'admin', 'admin')
|
python
|
import shlex
def parse_query(query):
tokens = shlex.split(query)
dsl = {'search': []}
key = ''
for token in tokens:
if token.endswith(':'):
key = token[:-1]
else:
value = token or ''
if ':' in token:
key, word = token.split(':', 1)
value = word
elif not key:
key = 'search'
if key in dsl:
dsl[key].append(value)
else:
dsl[key] = [value]
key = ''
# if key:
# dsl[key] = ''
return dsl
|
python
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
def SyncBatchNorm(*args, **kwargs):
"""In cpu environment nn.SyncBatchNorm does not have kernel so use nn.BatchNorm2D instead"""
if paddle.get_device() == 'cpu' or os.environ.get('PADDLESEG_EXPORT_STAGE'):
return nn.BatchNorm2D(*args, **kwargs)
elif paddle.distributed.ParallelEnv().nranks == 1:
return nn.BatchNorm2D(*args, **kwargs)
else:
return nn.SyncBatchNorm(*args, **kwargs)
class ConvBNReLU(nn.Layer):
def __init__(self,
in_channels,
out_channels,
kernel_size,
padding='same',
**kwargs):
super().__init__()
self._conv = nn.Conv2D(
in_channels, out_channels, kernel_size, padding=padding, **kwargs)
if 'data_format' in kwargs:
data_format = kwargs['data_format']
else:
data_format = 'NCHW'
self._batch_norm = SyncBatchNorm(out_channels, data_format=data_format)
def forward(self, x):
x = self._conv(x)
x = self._batch_norm(x)
x = F.relu(x)
return x
class ConvBN(nn.Layer):
def __init__(self,
in_channels,
out_channels,
kernel_size,
padding='same',
**kwargs):
super().__init__()
self._conv = nn.Conv2D(
in_channels, out_channels, kernel_size, padding=padding, **kwargs)
if 'data_format' in kwargs:
data_format = kwargs['data_format']
else:
data_format = 'NCHW'
self._batch_norm = SyncBatchNorm(out_channels, data_format=data_format)
def forward(self, x):
x = self._conv(x)
x = self._batch_norm(x)
return x
class ConvReLUPool(nn.Layer):
def __init__(self, in_channels, out_channels):
super().__init__()
self.conv = nn.Conv2D(
in_channels,
out_channels,
kernel_size=3,
stride=1,
padding=1,
dilation=1)
def forward(self, x):
x = self.conv(x)
x = F.relu(x)
x = F.pool2d(x, pool_size=2, pool_type="max", pool_stride=2)
return x
class SeparableConvBNReLU(nn.Layer):
def __init__(self,
in_channels,
out_channels,
kernel_size,
padding='same',
**kwargs):
super().__init__()
self.depthwise_conv = ConvBN(
in_channels,
out_channels=in_channels,
kernel_size=kernel_size,
padding=padding,
groups=in_channels,
**kwargs)
if 'data_format' in kwargs:
data_format = kwargs['data_format']
else:
data_format = 'NCHW'
self.piontwise_conv = ConvBNReLU(
in_channels,
out_channels,
kernel_size=1,
groups=1,
data_format=data_format)
def forward(self, x):
x = self.depthwise_conv(x)
x = self.piontwise_conv(x)
return x
class DepthwiseConvBN(nn.Layer):
def __init__(self,
in_channels,
out_channels,
kernel_size,
padding='same',
**kwargs):
super().__init__()
self.depthwise_conv = ConvBN(
in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
padding=padding,
groups=in_channels,
**kwargs)
def forward(self, x):
x = self.depthwise_conv(x)
return x
class AuxLayer(nn.Layer):
"""
The auxiliary layer implementation for auxiliary loss.
Args:
in_channels (int): The number of input channels.
inter_channels (int): The intermediate channels.
out_channels (int): The number of output channels, and usually it is num_classes.
dropout_prob (float, optional): The drop rate. Default: 0.1.
"""
def __init__(self,
in_channels,
inter_channels,
out_channels,
dropout_prob=0.1):
super().__init__()
self.conv_bn_relu = ConvBNReLU(
in_channels=in_channels,
out_channels=inter_channels,
kernel_size=3,
padding=1)
self.dropout = nn.Dropout(p=dropout_prob)
self.conv = nn.Conv2D(
in_channels=inter_channels,
out_channels=out_channels,
kernel_size=1)
def forward(self, x):
x = self.conv_bn_relu(x)
x = self.dropout(x)
x = self.conv(x)
return x
|
python
|
# Generated by Django 3.2.7 on 2021-10-30 21:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('workOrder', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='directive',
old_name='workOders',
new_name='workOrders',
),
]
|
python
|
# Excreva um programa que leia um valor em metros e o exiba convertido em centimetros e em milimetros
medida = float(input('Digite a sua medida: '))
print(f'A medida de {medida}m corresponde a {medida*100}cm e {medida*1000}mm')
|
python
|
"""This module contains modules.
Submodules
==========
.. autosummary::
:toctree: _autosummary
module
gp_modules
"""
__all__ = ['module', 'gp_modules']
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
alphabet.py
Automation sketch to fire LEDs to display clue words.
usage: python3 alphabet.py [-h] [-s SERVER] [-p PORT] [-d] [-l LOGGER]
optional arguments:
-h, --help show this help message and exit
-s SERVER, --server SERVER
change MQTT server host
-p PORT, --port PORT change MQTT server port
-d, --debug set DEBUG log level
-l LOGGER, --logger LOGGER
use logging config file
To switch MQTT broker, kill the program and start again with new arguments.
'''
from PyQt5.QtCore import QUuid
import paho.mqtt.client as mqtt
import os, sys, platform, signal
from constants import *
from AlphabetApp import AlphabetApp
from Singleton import Singleton, SingletonException
me = None
try:
me = Singleton()
except SingletonException:
sys.exit(-1)
except BaseException as e:
print(e)
os.chdir(os.path.dirname(os.path.abspath(__file__)))
clientid = MQTT_CLIENTID_PREFIX + QUuid.createUuid().toString()
mqtt_client = mqtt.Client(clientid, clean_session=True, userdata=None)
sketch = AlphabetApp(sys.argv, mqtt_client, debugging_mqtt=False, gpio_bcm=True, no_gpio=False)
if sketch._logger:
sketch._logger.info(sketch.tr("Sketch started"))
# Assign handler for process exit (shows not effect on Windows in debug here)
signal.signal(signal.SIGTERM, sketch.quit)
signal.signal(signal.SIGINT, sketch.quit)
if platform.system() != 'Windows':
signal.signal(signal.SIGHUP, sketch.quit)
signal.signal(signal.SIGQUIT, sketch.quit)
sketch.start()
if sketch._logger:
raspberryPi = sketch.raspberryPiVersion()
if raspberryPi:
sketch._logger.info("{0} {1}".format(sketch.tr("Sketch running on Raspberry Pi"), raspberryPi))
elif platform.system() == 'Windows':
sketch._logger.info(sketch.tr("Sketch running on Windows"))
rc = sketch.exec_()
try:
mqtt_client.disconnect()
mqtt_client.loop_stop()
except:
pass
if sketch._logger:
sketch._logger.info(sketch.tr("Sketch done"))
del(me)
sys.exit(rc)
|
python
|
"""empty message
Revision ID: fd51d3fa9aef
Revises: 1c96aaeccb0b
Create Date: 2022-03-14 12:16:43.830823
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fd51d3fa9aef'
down_revision = '1c96aaeccb0b'
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
|
python
|
# coding=utf-8
#空文件,只是为了handle目录成为一个模块,python的奇妙规则~
|
python
|