hexsha
stringlengths 40
40
| size
int64 6
782k
| ext
stringclasses 7
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
237
| max_stars_repo_name
stringlengths 6
72
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
53k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
184
| max_issues_repo_name
stringlengths 6
72
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
27.1k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
184
| max_forks_repo_name
stringlengths 6
72
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
12.2k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 6
782k
| avg_line_length
float64 2.75
664k
| max_line_length
int64 5
782k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
58de394fd8dd21900c4b91e9bd80d5a68898ec92
| 5,149 |
py
|
Python
|
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/network/eric_eccli/test_eric_eccli_command.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/network/eric_eccli/test_eric_eccli_command.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/network/eric_eccli/test_eric_eccli_command.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
# (c) 2019 Ericsson.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible_collections.community.general.tests.unit.compat.mock import patch
from ansible_collections.community.general.plugins.modules.network.eric_eccli import eric_eccli_command
from ansible_collections.community.general.tests.unit.modules.utils import set_module_args
from ..eccli_module import TestEccliModule, load_fixture
class TestEccliCommandModule(TestEccliModule):
module = eric_eccli_command
def setUp(self):
super(TestEccliCommandModule, self).setUp()
self.mock_run_commands = patch('ansible_collections.community.general.plugins.modules.network.eric_eccli.eric_eccli_command.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
super(TestEccliCommandModule, self).tearDown()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
obj = json.loads(item['command'])
command = obj['command']
except ValueError:
command = item['command']
filename = str(command).replace(' ', '_')
output.append(load_fixture(filename))
return output
self.run_commands.side_effect = load_from_file
def test_eric_eccli_command_simple(self):
set_module_args(dict(commands=['show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 1)
self.assertTrue(result['stdout'][0].startswith('Ericsson IPOS Version'))
def test_eric_eccli_command_multiple(self):
set_module_args(dict(commands=['show version', 'show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 2)
self.assertTrue(result['stdout'][0].startswith('Ericsson IPOS Version'))
def test_eric_eccli_command_wait_for(self):
wait_for = 'result[0] contains "Ericsson IPOS"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module()
def test_eric_eccli_command_wait_for_fails(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 10)
def test_eric_eccli_command_retries(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for, retries=2))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 2)
def test_eric_eccli_command_match_any(self):
wait_for = ['result[0] contains "Ericsson IPOS"',
'result[0] contains "test string"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='any'))
self.execute_module()
def test_eric_eccli_command_match_all(self):
wait_for = ['result[0] contains "Ericsson IPOS"',
'result[0] contains "Version IPOS"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='all'))
self.execute_module()
def test_eric_eccli_command_match_all_failure(self):
wait_for = ['result[0] contains "Ericsson IPOS"',
'result[0] contains "test string"']
commands = ['show version', 'show version']
set_module_args(dict(commands=commands, wait_for=wait_for, match='all'))
self.execute_module(failed=True)
def test_eric_eccli_command_configure_check_warning(self):
commands = ['configure terminal']
set_module_args({
'commands': commands,
'_ansible_check_mode': True,
})
result = self.execute_module()
self.assertEqual(
result['warnings'],
['only non-config commands are supported when using check mode, not executing configure terminal'],
)
def test_eric_eccli_command_configure_not_warning(self):
commands = ['configure terminal']
set_module_args(dict(commands=commands))
result = self.execute_module()
self.assertEqual(result['warnings'], [])
| 40.543307 | 146 | 0.682463 |
1890c534a93013bc8e1e81f9d0fa1418d165e09f
| 2,316 |
py
|
Python
|
Curso_Python/Secao3-Python-Intermediario-Programacao-Procedural/57_sistema_perguntas_e_respostas/sistema_de_perguntas_respostas.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso_Python/Secao3-Python-Intermediario-Programacao-Procedural/57_sistema_perguntas_e_respostas/sistema_de_perguntas_respostas.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso_Python/Secao3-Python-Intermediario-Programacao-Procedural/57_sistema_perguntas_e_respostas/sistema_de_perguntas_respostas.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
"""
Sistema de perguntas e respostas
"""
print()
print('Desafio multipla escolha.')
print('=' * 28)
pergutas = {
'Pergunta 1': {
'pergunta': 'Quanto é 2 + 2? ',
'respostas': {'a': '1', 'b': '4', 'c': '5'},
'respostas_certas': 'b',
},
'Pergunta 2': {
'pergunta': 'Quanto é 3 * 2? ',
'respostas': {'a': '6', 'b': '4', 'c': '5'},
'respostas_certas': 'a',
},
'Pergunta 3': {
'pergunta': 'Quanto é 9 * 7? ',
'respostas': {'a': '50', 'b': '63', 'c': '70'},
'respostas_certas': 'b',
},
'Pergunta 4': {
'pergunta': 'Quanto é 50 / 2? ',
'respostas': {'a': '26', 'b': '20', 'c': '25'},
'respostas_certas': 'c',
},
'Pergunta 5': {
'pergunta': 'Quanto é 1000 / 100? ',
'respostas': {'a': '200', 'b': '100', 'c': '10'},
'respostas_certas': 'c',
},
'Pergunta 6': {
'pergunta': 'Quanto é 120 + 280? ',
'respostas': {'a': '400', 'b': '380', 'c': '800'},
'respostas_certas': 'a',
},
'Pergunta 7': {
'pergunta': 'Quanto é 100 * 10? ',
'respostas': {'a': '200', 'b': '10000', 'c': '1000'},
'respostas_certas': 'c',
},
'Pergunta 8': {
'pergunta': 'Quanto é 1 * 1? ',
'respostas': {'a': '2', 'b': '1', 'c': '0'},
'respostas_certas': 'b',
},
'Pergunta 9': {
'pergunta': 'Quanto é 0 * 10? ',
'respostas': {'a': '10', 'b': '0', 'c': '5'},
'respostas_certas': 'b',
},
'Pergunta 10': {
'pergunta': 'Quanto é 500 + 501? ',
'respostas': {'a': '1002', 'b': '1000', 'c': '1001'},
'respostas_certas': 'c',
},
}
respostas_certas = 0
for pk, pv in pergutas.items():
print(f'{pk}: {pv["pergunta"]}')
print('Respostas: ')
for rk, rv in pv['respostas'].items():
print(f'[{rk}]: {rv}')
resposta_usuario = input('Sua resposta: ').lower()
if resposta_usuario == pv['respostas_certas']:
print('EHHH!!! Você acertou!!!!')
respostas_certas += 1
else:
print('IXIIII!! Você ERROU!!!!')
print()
porcentagem_acerto = respostas_certas / len(pergutas) * 100
print(f'Você acertou {respostas_certas} respostas.')
print(f'Sua porcentagem de acerto foi de {porcentagem_acerto:.2f}%.')
| 29.692308 | 69 | 0.474093 |
e198ba478c238f5d9fd8eb8e7c85d01fe28d32f6
| 915 |
py
|
Python
|
udacity course code/01-02-readmorestocks.py
|
bluemurder/mlfl
|
b895b2f1d01b0f6418a5bcee2f204dd7916062f0
|
[
"MIT"
] | 1 |
2021-03-22T22:25:54.000Z
|
2021-03-22T22:25:54.000Z
|
udacity course code/01-02-readmorestocks.py
|
bluemurder/mlfl
|
b895b2f1d01b0f6418a5bcee2f204dd7916062f0
|
[
"MIT"
] | 6 |
2017-01-16T09:53:21.000Z
|
2017-01-18T12:20:09.000Z
|
udacity course code/01-02-readmorestocks.py
|
bluemurder/mlfl
|
b895b2f1d01b0f6418a5bcee2f204dd7916062f0
|
[
"MIT"
] | null | null | null |
import pandas as pd
def test_run():
#Define date range
start_date = '2010-01-22'
start_date = '2010-01-26'
dates = pd.date_range(start_date, end_date)
#Create an empty dataframe
df1 = pd.DataFrame(index = dates)
#Read SPY data into temporary dataframe
dfSPY = pd.read_csv("data/SPY.csv", index_col = "Date",
parse_dates = True, usecols = ['Date', 'Adj Close'], na_values = ['nan'])
#Join the two dataframes using DataFrame.join(), with how='inner'
df1 = df1.join(dfSPY, how = 'inner')
#Read in more stocks
symbols = ['GOOG', 'IBM', 'GLD']
for symbol in symbols:
df_temp = pd.read_csv("data/{}.csv".format(symbol), index_col = "Date",
parse_dates = True, usecols = ['Dates', 'Adj Close'],
na_values = ['nan'])
# Rename to prevent clash
df_temp = df_temp.rename(columns={'Adj Close' : symbol})
# Use default how='left'
df = df1.join(df_temp)
print df1
| 28.59375 | 94 | 0.653552 |
beca4eb85deb8e3aa3021730056125568bddf916
| 6,534 |
py
|
Python
|
research/cv/SiamFC/src/custom_transforms.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/SiamFC/src/custom_transforms.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/SiamFC/src/custom_transforms.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""used in data enhance"""
import numpy as np
import cv2
class RandomStretch():
"""
Random resize image according to the stretch
Args:
max_stretch(float): 0 to 1 value
"""
def __init__(self, max_stretch=0.05):
self.max_stretch = max_stretch #
def __call__(self, sample):
"""
Args:
sample(numpy array): 3 or 1 dim image
"""
scale_h = 1.0 + np.random.uniform(-self.max_stretch, self.max_stretch)
scale_w = 1.0 + np.random.uniform(-self.max_stretch, self.max_stretch)
h, w = sample.shape[:2]
shape = (int(h * scale_h), int(w * scale_w))
return cv2.resize(sample, shape, cv2.INTER_LINEAR)
class CenterCrop():
"""
Crop the image in the center according the given size
if size greater than image size, zero padding will adpot
Args:
size (tuple): desired size
"""
def __init__(self, size):
self.size = size # z-> 127x127 x->255x255
def __call__(self, sample):
"""
Args:
sample(numpy array): 3 or 1 dim image
"""
shape = sample.shape[:2]
cy, cx = (shape[0]-1) // 2, (shape[1]-1) // 2
ymin, xmin = cy - self.size[0]//2, cx - self.size[1] // 2
ymax, xmax = cy + self.size[0]//2 + self.size[0] % 2,\
cx + self.size[1]//2 + self.size[1] % 2
left = right = top = bottom = 0
im_h, im_w = shape
if xmin < 0:
left = int(abs(xmin))
if xmax > im_w:
right = int(xmax - im_w)
if ymin < 0:
top = int(abs(ymin))
if ymax > im_h:
bottom = int(ymax - im_h)
xmin = int(max(0, xmin))
xmax = int(min(im_w, xmax))
ymin = int(max(0, ymin))
ymax = int(min(im_h, ymax))
im_patch = sample[ymin:ymax, xmin:xmax]
if left != 0 or right != 0 or top != 0 or bottom != 0:
im_patch = cv2.copyMakeBorder(im_patch, top, bottom, left, right,
cv2.BORDER_CONSTANT, value=0)
return im_patch
class RandomCrop():
"""
Crop the image in the center according the given size
if size greater than image size, zero padding will adpot
Args:
size (tuple): desired size
max_translate: max translate of random shift
"""
def __init__(self, size, max_translate):
self.size = size # 255 - 2*stride stride=8
self.max_translate = max_translate # 255 - 2*stride
def __call__(self, sample):
"""
Args:
sample(numpy array): 3 or 1 dim image
"""
shape = sample.shape[:2]
cy_o = (shape[0] - 1) // 2
cx_o = (shape[1] - 1) // 2
cy = np.random.randint(cy_o - self.max_translate,
cy_o + self.max_translate+1)
cx = np.random.randint(cx_o - self.max_translate,
cx_o + self.max_translate+1)
assert abs(cy-cy_o) <= self.max_translate and \
abs(cx-cx_o) <= self.max_translate
ymin = cy - self.size[0] // 2
xmin = cx - self.size[1] // 2
ymax = cy + self.size[0] // 2 + self.size[0] % 2
xmax = cx + self.size[1] // 2 + self.size[1] % 2
left = right = top = bottom = 0
im_h, im_w = shape
if xmin < 0:
left = int(abs(xmin))
if xmax > im_w:
right = int(xmax - im_w)
if ymin < 0:
top = int(abs(ymin))
if ymax > im_h:
bottom = int(ymax - im_h)
xmin = int(max(0, xmin))
xmax = int(min(im_w, xmax))
ymin = int(max(0, ymin))
ymax = int(min(im_h, ymax))
im_patch = sample[ymin:ymax, xmin:xmax]
if left != 0 or right != 0 or top != 0 or bottom != 0:
im_patch = cv2.copyMakeBorder(im_patch, top, bottom, left, right,
cv2.BORDER_CONSTANT, value=0)
return im_patch
class ColorAug():
"""
colorAug
"""
def __init__(self, type_in='z'):
if type_in == 'z':
rgb_var = np.array([[3.2586416e+03, 2.8992207e+03, 2.6392236e+03],
[2.8992207e+03, 3.0958174e+03, 2.9321748e+03],
[2.6392236e+03, 2.9321748e+03, 3.4533721e+03]])
if type_in == 'x':
rgb_var = np.array([[2.4847285e+03, 2.1796064e+03, 1.9766885e+03],
[2.1796064e+03, 2.3441289e+03, 2.2357402e+03],
[1.9766885e+03, 2.2357402e+03, 2.7369697e+03]])
self.v, _ = np.linalg.eig(rgb_var)
self.v = np.sqrt(self.v)
def __call__(self, sample):
return sample + 0.1 * self.v * np.random.randn(3)
class RandomBlur():
"""Randomblur"""
def __init__(self, ratio):
self.ratio = ratio
def __call__(self, sample):
if np.random.rand(1) < self.ratio:
# random kernel size
kernel_size = np.random.choice([3, 5, 7])
# random gaussian sigma
sigma = np.random.rand() * 5
sample_gaussian = cv2.GaussianBlur(sample, (kernel_size, kernel_size), sigma)
else:
return sample
return sample_gaussian
class Normalize():
"""
image normalize to 0-1
"""
def __init__(self):
self.mean = np.array([0.485, 0.456, 0.406], dtype=np.float32)
self.std = np.array([0.229, 0.224, 0.225], dtype=np.float32)
def __call__(self, sample):
return (sample / 255. - self.mean) / self.std
class ToTensor():
"""transpose and totensor"""
def __call__(self, sample):
sample = np.transpose(sample, (2, 0, 1))
return np.array(sample, dtype=np.float32)
| 34.941176 | 89 | 0.535047 |
831c7eade82b03c4ce032f487e6d23cdc9e5c6ef
| 3,894 |
py
|
Python
|
training_testing/test.py
|
rafaspadilha/timestampVerificationTIFS
|
bad325e676b0b6087e54f2e280c3600c3b0b767f
|
[
"MIT"
] | 5 |
2022-03-11T18:08:32.000Z
|
2022-03-31T13:47:49.000Z
|
training_testing/test.py
|
rafaspadilha/timestampVerificationTIFS
|
bad325e676b0b6087e54f2e280c3600c3b0b767f
|
[
"MIT"
] | null | null | null |
training_testing/test.py
|
rafaspadilha/timestampVerificationTIFS
|
bad325e676b0b6087e54f2e280c3600c3b0b767f
|
[
"MIT"
] | null | null | null |
#####################################################
# Content-Aware Detection of Timestamp Manipulation #
# IEEE Trans. on Information Forensics and Security #
# R. Padilha, T. Salem, S. Workman, #
# F. A. Andalo, A. Rocha, N. Jacobs #
#####################################################
##### DESCRIPTION
"""
Example of testing script considering DenseNet as backbone,
location and satellite included as input modalities, and
multi-task optimization (including transient attribute estimation)
"""
#########################
# IMPORTS #
#########################
## General imports
import numpy as np
import os, sys
from sklearn.metrics import accuracy_score, confusion_matrix, classification_report
from sklearn.metrics import roc_curve, auc
## Dataloader
sys.path.append("../datasets")
from dataLoader import DataLoader
## Keras
from tf.keras.models import Model, load_model
from tf.keras.losses import mean_squared_error
## GPU selection
import tensorflow as tf
gpuNumber = 1
gpus = tf.config.experimental.list_physical_devices('GPU')
tf.config.experimental.set_visible_devices(gpus[gpuNumber], 'GPU')
tf.config.experimental.set_memory_growth(gpus[gpuNumber], True)
#########################
# DEFINITIONS #
#########################
# Path to the weights of the model that will be evaluated
# pathToModel = sys.argv[1] ### uncomment to pass it as parameter
pathToModel = "../models/dummy_model.hdf5"
# Path to save the ROC files
pathToSaveRocs = "./"
# Running multiple times (for different random tamperings)
## and we will report the average of such runs
nRuns = 10
batchSize = 32
#######################
## Custom MSE ##
#######################
# We will compute the MSE only for the consistent inputs
def transient_mse(y_true, y_pred):
return tf.sum(mean_squared_error(y_true[0::2,:], y_pred[0::2,:]), axis=-1)
#######################
## Load architecture
#######################
model = load_model(pathToModel, custom_objects={"transient_mse":transient_mse})
print(model.summary())
#######################
## Testing Setup
#######################
dl = DataLoader(setToLoad="test", includeLocation=True,
includeSatellite=True, outputTransientAttributes=True)
### List to store the statistics for each run
accList = []
tTamperedRate = []
tRealRate = []
aucList = []
for runIdx in range(nRuns):
print("\n\nRun --> ", runIdx+1, " / ", nRuns)
yTrueList, yPredList, yScoreList = [], [], []
for batch, labels in dl.loadTestDataInBatches(batchSize, seed=runIdx*42):
preds = model.predict_on_batch(batch)[0] #get only the consistOrNot Branch
y_true = np.argmax(labels[0], axis=1) #get only the consistOrNot Labels
y_pred = np.argmax(preds, axis=1)
yTrueList += list(y_true)
yPredList += list(y_pred)
yScoreList += [p[1] for p in preds]
acc = accuracy_score(yTrueList, yPredList)
cm = confusion_matrix(yTrueList, yPredList)
trr = cm[0,0] / float(np.sum(cm[0,:]))
ttr = cm[1,1] / float(np.sum(cm[1,:]))
print("Acc = ", acc)
print("True Real Rate = ", trr)
print("True Tampered Rate = ", ttr)
print("Conf Matrix")
print(cm)
accList += [acc]
tTamperedRate += [ttr]
tRealRate += [trr]
fpr, tpr, _ = roc_curve(yTrueList, yScoreList)
roc_auc = auc(fpr, tpr)
aucList += [roc_auc]
print("AUC = ", roc_auc)
print(fpr.shape, tpr.shape, np.array([fpr, tpr]).shape)
np.save(os.path.join(pathToSaveRocs,"fpr_tpr_run_" + str(runIdx)), np.array([fpr, tpr]))
### After all runs, print the average and std
print("==============")
print("ACC (mean +- std) = ", np.mean(accList), " +- ", np.std(accList))
print("True Tampered Rate (mean +- std) = ", np.mean(tTamperedRate), " +- ", np.std(tTamperedRate))
print("True Real Rate (mean +- std) = ", np.mean(tRealRate), " +- ", np.std(tRealRate))
print("AUC (mean +- std) = ", np.mean(aucList), " +- ", np.std(aucList))
| 28.014388 | 99 | 0.62943 |
8357174fda35522a219d3857f1a2d3ad0743d28e
| 989 |
py
|
Python
|
build.py
|
schreibubi/conan-protobuf
|
4f75f7b6cbba280b1ece9673b39140637c58781c
|
[
"MIT"
] | 4 |
2018-04-13T02:15:01.000Z
|
2020-03-08T08:44:03.000Z
|
build.py
|
schreibubi/conan-protobuf
|
4f75f7b6cbba280b1ece9673b39140637c58781c
|
[
"MIT"
] | 15 |
2017-12-05T23:12:36.000Z
|
2020-06-24T15:31:50.000Z
|
recipes/protobuf/3.9.1/build.py
|
SolarFramework/conan-solar
|
cdb6253556c056baacaf3e5f28b595869adddb88
|
[
"Apache-2.0"
] | 35 |
2017-12-05T16:27:25.000Z
|
2021-07-16T16:40:37.000Z
|
#!/usr/bin/env python
from bincrafters import build_template_default, build_template_installer, build_shared
from conans import tools
import os
# installer might depend on lib or vice versa. Make sure that the dependecy is available and up-to-date
build_policy = os.getenv("CONAN_BUILD_POLICY", "outdated")
os.environ["CONAN_BUILD_POLICY"] = build_policy
if __name__ == "__main__":
docker_entry_script = ".ci/entry.sh"
docker_entry_installer_script = ".ci/entry_installer.sh"
if "CONAN_CONANFILE" in os.environ and os.environ["CONAN_CONANFILE"] == "conanfile_installer.py":
arch = os.environ["ARCH"]
builder = build_template_installer.get_builder(docker_entry_script=docker_entry_installer_script)
builder.add({"os": build_shared.get_os(), "arch_build": arch, "arch": arch}, {}, {}, {})
builder.run()
else:
builder = build_template_default.get_builder(docker_entry_script=docker_entry_script, pure_c=False)
builder.run()
| 43 | 107 | 0.740142 |
36c8422408f13fcff54768a809681cc12a26fbe8
| 466 |
py
|
Python
|
ProjectEuler_plus/euler_005.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
ProjectEuler_plus/euler_005.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
ProjectEuler_plus/euler_005.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
from functools import reduce
t = int(input().strip())
for a0 in range(t):
n = int(input().strip())
if n == 1:
print(1)
elif n == 2:
print(2)
elif n == 3:
print(6)
else:
nums = [2, 3]
for i in range(4, n + 1):
for n in nums:
if i % n == 0:
i //= n
nums.append(i)
print(reduce(lambda x, y: x * y, nums))
| 20.26087 | 47 | 0.433476 |
3d41d27f73d5b1df50a9ac25d59393fb507f70aa
| 131 |
py
|
Python
|
forge/configs.py
|
Pandinosaurus/3d-forge
|
d631e14a9351911c3e5612c73c1608d97ed547d2
|
[
"BSD-3-Clause"
] | 31 |
2015-07-13T15:36:50.000Z
|
2022-02-07T21:37:51.000Z
|
forge/configs.py
|
Pandinosaurus/3d-forge
|
d631e14a9351911c3e5612c73c1608d97ed547d2
|
[
"BSD-3-Clause"
] | 109 |
2015-04-24T10:03:24.000Z
|
2019-04-12T13:34:01.000Z
|
forge/configs.py
|
Pandinosaurus/3d-forge
|
d631e14a9351911c3e5612c73c1608d97ed547d2
|
[
"BSD-3-Clause"
] | 16 |
2015-10-03T06:03:22.000Z
|
2022-03-31T08:24:37.000Z
|
# -*- coding: utf-8 -*-
import ConfigParser
tmsConfig = ConfigParser.RawConfigParser()
tmsConfig.read('configs/terrain/tms.cfg')
| 18.714286 | 42 | 0.740458 |
3d520c084aa6f755e5287e0973ac0e283f437a4d
| 4,132 |
py
|
Python
|
test/test_npu/test_network_ops/test_deformable_conv2d_backward.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-12-02T03:07:35.000Z
|
2021-12-02T03:07:35.000Z
|
test/test_npu/test_network_ops/test_deformable_conv2d_backward.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-11-12T07:23:03.000Z
|
2021-11-12T08:28:13.000Z
|
test/test_npu/test_network_ops/test_deformable_conv2d_backward.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
class TestDeformableConv2dBackward(TestCase):
def create_single_npu_tensor(self, item, minvalue, maxvalue):
dtype = item[0]
format = item[1]
shape = item[2]
input1 = np.random.uniform(minvalue, maxvalue, shape).astype(dtype)
npu_input = torch.from_numpy(input1).to("npu")
if format != -1:
npu_input = npu_input.npu_format_cast(format)
return npu_input
def test_deformable_conv2d(self, device):
input = self.create_single_npu_tensor([np.float32, 0, (16, 32, 32, 32)], 0, 10)
weight = self.create_single_npu_tensor([np.float32, 0, (32, 32, 5, 5)], 0, 10)
offset = self.create_single_npu_tensor([np.float32, 0, (16, 75, 32, 32)], 0, 10)
input.requires_grad = True
weight.requires_grad = True
offset.requires_grad = True
npu_output, offset_out = torch.npu_deformable_conv2d(
input, weight, offset, None, kernel_size=[5, 5], stride = [1, 1, 1, 1], padding = [2, 2, 2, 2])
npu_output.backward(torch.ones_like(npu_output))
npu_output = npu_output.cpu().detach()
output = npu_output.select(1, 2).select(1, 2).select(1, 2)
expect_output = torch.tensor([65504., 65504., 65504., 65504., 65504., 65504., 65504., 65504., 65504.,
65504., 65504., 65504., 65504., 65504., 65504., 65504.])
self.assertRtolEqual(expect_output, output)
input_grad = input.grad.select(1, 2).select(1, 2).select(1, 3)
expect_input_grad = torch.tensor([1018.5208, 1080.2323, 2533.2463, 1305.0685, 3977.8293, 2363.5681,
1414.5939, 2116.5427, 1401.0662, 2064.0400, 1945.2327, 2338.5208,
300.2462, 2646.7798, 1899.1229, 2165.7280])
self.assertRtolEqual(expect_input_grad, input_grad.cpu())
offest_grad = offset.grad.select(1, 2).select(1, 2).select(1, 3)
expect_offest_grad = torch.tensor([-4708.0259, -139.2554, -2387.8149, 31017.8438, 19861.9528,
-1209.2686, -24085.7285, -3950.3850, -31044.7070, 4571.3936,
582.9868, -5514.0459, 78401.6562, -1778.3700, -14311.4365,
-2065.9717])
self.assertRtolEqual(expect_offest_grad, offest_grad.cpu())
weight_grad = weight.grad.select(1, 2).select(1, 2).select(1, 3)
expect_weight_grad = torch.tensor([279501.8438, 279501.8438, 279501.8438, 279501.8438, 279501.8125,
279501.8125, 279501.8125, 279501.8125, 279501.8438, 279501.8438,
279501.8438, 279501.8438, 279501.8438, 279501.8438, 279501.8438,
279501.8438, 279501.8438, 279501.8438, 279501.8438, 279501.8438,
279501.8438, 279501.8438, 279501.8438, 279501.8438, 279501.8125,
279501.8125, 279501.8125, 279501.8125, 279501.8438, 279501.8438,
279501.8438, 279501.8438])
self.assertRtolEqual(expect_weight_grad, weight_grad.cpu())
instantiate_device_type_tests(TestDeformableConv2dBackward, globals(), except_for="cpu")
if __name__ == "__main__":
run_tests()
| 55.093333 | 109 | 0.613988 |
62bf60dda1e61d2dae7b08d962ddfb53488289c9
| 794 |
py
|
Python
|
python/pyqt/LearnPyQt/q_label.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/pyqt/LearnPyQt/q_label.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/pyqt/LearnPyQt/q_label.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
import sys
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (
QApplication,
QLabel,
QMainWindow,
)
from PyQt5.QtGui import QPixmap
class MainWindow(QMainWindow):
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
self.setWindowTitle("Jayone's Awesome App")
widget = QLabel('Hello')
widget.setFixedSize(600, 600)
font = widget.font()
font.setPointSize(30)
widget.setFont(font)
widget.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
# set image
widget.setPixmap(QPixmap('python-large.jpg'))
widget.setScaledContents(True)
self.setCentralWidget(widget)
app = QApplication(sys.argv)
window = MainWindow()
window.show()
app.exec_()
| 21.459459 | 62 | 0.65995 |
62d9b674c44f58c6ef9683e77fd3820f49b7cb6c
| 633 |
py
|
Python
|
python/generator_tricks_for_systemprogrammer/gengrep.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/generator_tricks_for_systemprogrammer/gengrep.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/generator_tricks_for_systemprogrammer/gengrep.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
# gengrep.py
#
# Grep a sequence of lines that match a re pattern
import re
def gen_grep(pat, lines):
patc = re.compile(pat)
return (line for line in lines if patc.search(line))
# Example use
if __name__ == "__main__":
from pathlib import Path
from genopen import gen_open
from gencat import gen_cat
lognames = Path("www").rglob("access-log*")
logfiles = gen_open(lognames)
loglines = gen_cat(logfiles)
# Look for ply downloads (PLY is my own Python package)
plylines = gen_grep(r"ply-.*\.gz", loglines)
for line in plylines:
print(line, end="")
| 22.607143 | 60 | 0.63981 |
c5546ce003e724beee7df88e65d6539c19ddd2f9
| 1,286 |
py
|
Python
|
3rdParty/snowball/python/create_init.py
|
sita1999/arangodb
|
6a4f462fa209010cd064f99e63d85ce1d432c500
|
[
"Apache-2.0"
] | 39 |
2015-01-07T09:30:28.000Z
|
2021-12-01T13:04:03.000Z
|
3rdParty/snowball/python/create_init.py
|
lipper/arangodb
|
66ea1fd4946668192e3f0d1060f0844f324ad7b8
|
[
"Apache-2.0"
] | 23 |
2018-06-07T07:46:27.000Z
|
2018-08-06T17:57:39.000Z
|
3rdParty/snowball/python/create_init.py
|
lipper/arangodb
|
66ea1fd4946668192e3f0d1060f0844f324ad7b8
|
[
"Apache-2.0"
] | 4 |
2015-01-02T16:35:37.000Z
|
2018-05-12T23:55:27.000Z
|
#! /bin/sh/env python
import sys
import re
import os
python_out_folder = sys.argv[1]
filematch = re.compile(r"(\w+)_stemmer\.py$")
imports = []
languages = ['_languages = {']
for pyscript in os.listdir(python_out_folder):
match = filematch.match(pyscript)
if (match):
langname = match.group(1)
titlecase = langname.title()
languages.append(" '%(lang)s': %(title)sStemmer," % {'lang': langname, 'title': titlecase})
imports.append('from .%(lang)s_stemmer import %(title)sStemmer' % {'lang': langname, 'title': titlecase})
languages.append('}');
src = '''__all__ = ('language', 'stemmer')
%(imports)s
%(languages)s
try:
import Stemmer
cext_available = True
except ImportError:
cext_available = False
def algorithms():
if cext_available:
return Stemmer.language()
else:
return list(_languages.key())
def stemmer(lang):
if cext_available:
return Stemmer.Stemmer(lang)
if lang.lower() in _languages:
return _languages[lang.lower()]()
else:
raise KeyError("Stemming algorithm '%%s' not found" %% lang)
''' % {'imports': '\n'.join(imports), 'languages': '\n'.join(languages)}
out = open(os.path.join(python_out_folder, '__init__.py'), 'w')
out.write(src)
out.close()
| 24.730769 | 113 | 0.643857 |
9a924efb489cd955c1e204834646228cc6a66300
| 116 |
py
|
Python
|
Python/Courses/Python-Tutorials.Zulkarnine-Mahmud/00.Fundamentals/08.3-Loop.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Courses/Python-Tutorials.Zulkarnine-Mahmud/00.Fundamentals/08.3-Loop.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Courses/Python-Tutorials.Zulkarnine-Mahmud/00.Fundamentals/08.3-Loop.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
grocery = ["rice", "water", "tomato", "onion", "ginger"]
for i in range(2, len(grocery), 2):
print(grocery[i])
| 23.2 | 56 | 0.594828 |
9afb6741fdd47608bc850f867aabf4b32b6b77e3
| 5,577 |
py
|
Python
|
envs/fixed/basic.py
|
etigerstudio/zilong-on-fire
|
5144a471b2d39ea38a47d394e648de00dd13cd8b
|
[
"MIT"
] | 2 |
2021-01-07T01:10:49.000Z
|
2022-01-21T09:37:16.000Z
|
envs/fixed/basic.py
|
etigerstudio/zilong-on-fire
|
5144a471b2d39ea38a47d394e648de00dd13cd8b
|
[
"MIT"
] | null | null | null |
envs/fixed/basic.py
|
etigerstudio/zilong-on-fire
|
5144a471b2d39ea38a47d394e648de00dd13cd8b
|
[
"MIT"
] | null | null | null |
# Created by ALuier Bondar on 2020/12/13.
import random
import numpy as np
from envs.base import BaseEnvironment, StateFormat
from enum import Enum
class BasicFixedEnvironment(BaseEnvironment):
"""Basic environment implementation with fixed main character
子龙位置固定的基本环境实现
"""
ALIVE_REWARD = 1.0 # 存活奖励
DEAD_REWARD = -1.0 # 阵亡奖励
PARTITIONS = 4 # 角度分区数
ARROW_DISTANCE = 2 # 弓箭起始距离
ACTOR_POINT_VALUE = 1 # 子龙在矩阵点上的表示值
ARROW_POINT_VALUE = 1 # 弓箭在矩阵点上的表示值
class Action(Enum):
"""动作枚举"""
LEFT = 0
RIGHT = 1
NONE = 2
ACTIONS = [
Action.LEFT,
Action.RIGHT,
Action.NONE
]
def __init__(
self,
partitions=PARTITIONS,
arrow_distance=ARROW_DISTANCE,
random_reset=True,
state_format=StateFormat.VECTOR):
"""
Args:
partitions: 角度分区总数
arrow_distance: 弓箭起始距离
random_reset: 是否开启随机生成子龙/弓箭角度
state_format: 状态格式
"""
self.partitions = partitions
self.max_arrow_distance = arrow_distance
self.random_reset = random_reset
self.state_format = state_format
if state_format == StateFormat.MATRIX:
self.matrix_actor_width = 2
self.matrix_arrow_width = self.max_arrow_distance
self.matrix_full_width = self.matrix_actor_width + self.matrix_arrow_width * 2
self.reset()
def step(self, action):
"""与环境互动一步
Args:
action: 采取的动作
Returns:
状态, 奖励, 是否阵亡
"""
self.__actor_step(action)
self.__arrow_step()
dead = self.__is_dead()
return self.__get_state(), self.__reward(dead), dead
def reset(self):
"""重设环境
Returns:
最新状态
"""
self.__reset_actor()
self.__reset_arrow()
return self.__get_state()
def __reward(self, dead):
"""Calculate reward after previous action."""
return self.DEAD_REWARD if dead else self.ALIVE_REWARD
def __actor_step(self, action):
"""子龙执行一步操作"""
if action == self.Action.LEFT:
self.actor_facing = (self.actor_facing - 1) % self.partitions
elif action == self.Action.RIGHT:
self.actor_facing = (self.actor_facing + 1) % self.partitions
elif action == self.Action.NONE:
pass
else:
assert False, 'Unexpected action given!'
def __arrow_step(self):
"""弓箭前进一步"""
if self.current_arrow_distance == 0:
# print('v Last arrow is dodged!')
self.__reset_arrow()
else:
self.current_arrow_distance -= 1
def __reset_actor(self):
"""重设子龙"""
self.actor_facing = random.randint(0, self.partitions - 1) if \
self.random_reset else 0
def __reset_arrow(self):
"""重设弓箭"""
self.arrow_direction = random.randint(0, self.partitions - 1) if \
self.random_reset else self.partitions // 2
self.current_arrow_distance = self.max_arrow_distance
def __is_dead(self):
"""是否阵亡"""
return self.current_arrow_distance == 0 and \
self.actor_facing != self.arrow_direction
def __get_state(self):
"""获取当前状态"""
if self.state_format == StateFormat.VECTOR:
return self.actor_facing, self.arrow_direction
elif self.state_format == StateFormat.MATRIX:
return self.__make_state_matrix()
def __make_state_matrix(self):
"""构建状态矩阵"""
if self.partitions == 4:
matrix = np.zeros((self.matrix_full_width, self.matrix_full_width))
arrow_position = None
moves = self.max_arrow_distance - self.current_arrow_distance
if self.arrow_direction == 0:
arrow_position = [self.matrix_full_width - 1, 0]
arrow_position[0] += -1 * moves
arrow_position[1] += +1 * moves
elif self.arrow_direction == 1:
arrow_position = [self.matrix_full_width - 1, self.matrix_full_width - 1]
arrow_position[0] += -1 * moves
arrow_position[1] += -1 * moves
elif self.arrow_direction == 2:
arrow_position = [0, self.matrix_full_width - 1]
arrow_position[0] += +1 * moves
arrow_position[1] += -1 * moves
elif self.arrow_direction == 3:
arrow_position = [0, 0]
arrow_position[0] += +1 * moves
arrow_position[1] += +1 * moves
matrix[arrow_position[0], arrow_position[1]] = self.ARROW_POINT_VALUE
actor_position = [self.matrix_arrow_width, self.matrix_arrow_width]
if self.actor_facing == 0:
actor_position[0] += 1
elif self.actor_facing == 1:
actor_position[0] += 1
actor_position[1] += 1
elif self.actor_facing == 2:
actor_position[1] += 1
elif self.actor_facing == 3:
pass
matrix[actor_position[0], actor_position[1]] = self.ACTOR_POINT_VALUE
return matrix
else:
raise NotImplementedError
def get_state_shape(self):
"""获取状态形状"""
if self.state_format == StateFormat.VECTOR:
return (self.PARTITIONS,) * 2
elif self.state_format == StateFormat.MATRIX:
return (self.matrix_full_width,) * 2
| 31.508475 | 90 | 0.574682 |
4941b8a9c5856702e4d77c5bf79b039bff738855
| 3,726 |
py
|
Python
|
tichywecker-beta.py
|
Trolliverpust/TichyWecker
|
6b361d2ec25deeb0c74aa57bbea7ff1984c6a812
|
[
"CC0-1.0"
] | 2 |
2021-03-31T16:22:19.000Z
|
2021-04-12T10:15:24.000Z
|
tichywecker-beta.py
|
Trolliverpust/TichyWecker
|
6b361d2ec25deeb0c74aa57bbea7ff1984c6a812
|
[
"CC0-1.0"
] | null | null | null |
tichywecker-beta.py
|
Trolliverpust/TichyWecker
|
6b361d2ec25deeb0c74aa57bbea7ff1984c6a812
|
[
"CC0-1.0"
] | null | null | null |
from pygame import mixer
import phatbeat
import time, random, copy
from datetime import datetime
mixer.init(frequency=44100)
stundenzeiten = ["5uhr.mp3","6uhr.mp3","7uhr.mp3","8uhr.mp3","9uhr.mp3","10uhr.mp3","11uhr.mp3"]
minutenzeiten = ["fünf.mp3","zehn.mp3","fünfzehn.mp3","zwanzig.mp3","fünfundzwanzig.mp3","dreißig.mp3","fünfunddreißig.mp3","vierzig.mp3","fünfundvierzig.mp3","fünfzig.mp3","fünfundfünfzig.mp3","leer.mp3"]
weckermodus = True
bearbeitungsmodus = False
heuteschongeweckt = False
weckstunde = 7
weckminute = 30
stundenzeiger = 2
minutenzeiger = 5
def eine_aussage(a1):
mixer.music.load(a1)
mixer.music.play(0)
while mixer.music.get_busy():
pass
def drei_aussagen(a1,a2,a3):
mixer.music.load(a1)
mixer.music.play(0)
while mixer.music.get_busy():
pass
mixer.music.load(a2)
mixer.music.play(0)
while mixer.music.get_busy():
pass
mixer.music.load(a3)
mixer.music.play(0)
while mixer.music.get_busy():
pass
eine_aussage("tichy-startup.mp3")
@phatbeat.on(phatbeat.BTN_PLAYPAUSE)
def stoppen(pin):
mixer.music.stop()
@phatbeat.on(phatbeat.BTN_ONOFF)
def weckmodus_umschalten(pin):
global weckermodus
weckermodus = not weckermodus
if weckermodus:
mixer.music.load("wecker_an.mp3")
mixer.music.play(0)
else:
mixer.music.load("wecker_aus.mp3")
mixer.music.play(0)
@phatbeat.on(phatbeat.BTN_REWIND)
def uhrzeitansagen(pin):
drei_aussagen("aktuelleUhrzeit.mp3",stundenzeiten[stundenzeiger],minutenzeiten[minutenzeiger])
@phatbeat.on(phatbeat.BTN_FASTFWD)
def zeitsetzen(pin):
global bearbeitungsmodus, weckstunde, weckminute, stundenzeiger, minutenzeiger, heuteschongeweckt
if bearbeitungsmodus:
drei_aussagen("einstellung_ende.mp3",stundenzeiten[stundenzeiger],minutenzeiten[minutenzeiger])
heuteschongeweckt = False
bearbeitungsmodus = False
weckstunde = stundenzeiger + 5
weckminute = ((minutenzeiger+1)*5)%60
print(weckstunde, weckminute)
if weckstunde == 8 and weckminute == 0:
eine_aussage("dieArbeitbeginntum8Uhr.mp3")
elif weckstunde == 6 and weckminute == 0:
eine_aussage("sechsuhrfrüh.mp3")
elif weckstunde == 7 and weckminute == 30:
eine_aussage("siebenuhrdreißignichtfrüh.mp3")
elif weckstunde > 8:
eine_aussage("bisschenspät.mp3")
else:
bearbeitungsmodus = True
drei_aussagen("aktuelleUhrzeit.mp3",stundenzeiten[stundenzeiger],minutenzeiten[minutenzeiger])
eine_aussage("einstellung_start.mp3")
@phatbeat.on(phatbeat.BTN_VOLUP)
def stundenplus(pin):
global bearbeitungsmodus, stundenzeiger, stundenzeiten
if bearbeitungsmodus:
stundenzeiger = (stundenzeiger+1)%7
eine_aussage(stundenzeiten[stundenzeiger])
else:
eine_aussage("kein_bearbeitungsmodus.mp3")
@phatbeat.on(phatbeat.BTN_VOLDN)
def minutenplus(pin):
global bearbeitungsmodus, minutenzeiger, minutenzeiten
if bearbeitungsmodus:
minutenzeiger = (minutenzeiger+1)%12
eine_aussage(minutenzeiten[minutenzeiger])
else:
eine_aussage("kein_bearbeitungsmodus.mp3")
while True:
n = datetime.now().time()
if n.hour == 12 and heuteschongeweckt:
heuteschongeweckt = False
time.sleep(5)
if weckermodus and not heuteschongeweckt:
if weckstunde == n.hour and weckminute == n.minute:
mixer.music.set_volume(1)
heuteschongeweckt = True
mixer.music.load("tichy.mp3")
mixer.music.play(-1)
while mixer.music.get_busy():
pass
n = datetime.now().time()
| 28.661538 | 205 | 0.686795 |
b8fe75f9aa928b25fc50a1c698295ab96b8af482
| 2,509 |
py
|
Python
|
apps/training_tf_simple_m/keras_advanced.py
|
Obyoxar/RobolabStatistics
|
08343ca3ac49df7efdac33692d7cc4b783e851f5
|
[
"MIT"
] | 2 |
2017-11-30T21:12:11.000Z
|
2017-12-01T07:52:43.000Z
|
apps/training_tf_simple_m/keras_advanced.py
|
Obyoxar/RobolabStatistics
|
08343ca3ac49df7efdac33692d7cc4b783e851f5
|
[
"MIT"
] | 14 |
2017-11-14T18:12:53.000Z
|
2018-06-03T16:07:57.000Z
|
apps/training_tf_simple_m/keras_advanced.py
|
Obyoxar/RobolabStatistics
|
08343ca3ac49df7efdac33692d7cc4b783e851f5
|
[
"MIT"
] | 3 |
2018-02-05T10:40:03.000Z
|
2018-02-09T09:29:19.000Z
|
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.layers import Flatten
from robolib.robogui import pixel_editor
import cv2
import keras
import numpy as np
import os.path as path
DEBUG = False
RELEARN = False
MODEL_FILENAME = "KerasAdvancedModel"
# DATA STUFF
label_labels = ["Horizontal", "Vertikal"]
labels = np.random.randint(0, 2, size=(1000, 1))
size = 8
data = np.zeros(shape=(1000, size, size, 1))
for la, d in zip(labels, data):
img = np.zeros((size, size))
lineZ = np.random.randint(0, size)
endLineZ = np.clip(lineZ + np.random.randint(-1, 2), 0, size)
if la == 0:
cv2.line(img, (0, lineZ), (size, endLineZ), 1.0)
else:
cv2.line(img, (lineZ, 0), (endLineZ, size), 1.0)
# d[:] = np.reshape(img, (4*4, ))
d[:, :, :] = np.reshape(img, (size, size, 1))
if DEBUG:
print(label_labels[la[0]])
print(lineZ, endLineZ)
print(img)
# MACHINE LEARNING STUFF
model = None
if RELEARN or not path.isfile(MODEL_FILENAME):
print("Model will be recreated: File {} exists: {}".format(MODEL_FILENAME, path.isfile(MODEL_FILENAME)))
model = Sequential()
model.add(Conv2D(30, (3, 3), activation='relu', input_shape=(size, size, 1)))
model.add(Conv2D(20, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(size*size, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(2, activation='relu'))
model.add(Dense(2, activation='softmax'))
model.compile(optimizer='rmsprop',
loss='categorical_crossentropy',
metrics=['accuracy'])
else:
print("Loading model from File {}".format(MODEL_FILENAME))
model = keras.models.load_model(MODEL_FILENAME)
one_hot_labels = keras.utils.to_categorical(labels, num_classes=2)
model.fit(data, one_hot_labels, epochs=300, batch_size=100)
print("Saving model to {}".format(MODEL_FILENAME))
model.save(MODEL_FILENAME)
while True:
predict_data = [pixel_editor.get_pixel_input_raw(size, size)]
if all(1.0 not in row for row in predict_data):
break
if DEBUG:
print(predict_data)
output = model.predict(np.array(predict_data), 1, 3)
if all(all(n < 0.9 for n in m) for m in output):
print("Don't know, will guess: ")
print(label_labels[np.argmax(output)])
if DEBUG:
print(np.around(output, 5))
| 30.597561 | 108 | 0.664807 |
772d85da92e166fa459e639668b79d25e223bd7b
| 737 |
py
|
Python
|
Curso_Python/Secao3-Python-Intermediario-Programacao-Procedural/60_list_comprehension/lista_compreensão.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso_Python/Secao3-Python-Intermediario-Programacao-Procedural/60_list_comprehension/lista_compreensão.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso_Python/Secao3-Python-Intermediario-Programacao-Procedural/60_list_comprehension/lista_compreensão.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
# melhora a performace do codigo
l1 = [1, 2, 3, 4, 5, 6, 7, 8, 9]
ex1 = [variavel for variavel in l1]
ex2 = [v * 2 for v in l1] # multiplica cado elemento da lista 1 por 2
ex3 = [(v, v2) for v in l1 for v2 in range(3)]
print(ex3)
l2 = ['pedro', 'mauro', 'maria']
ex4 = [v.replace('a', '@').upper() for v in l2] # muda a letra a de uma variavel
print(ex4)
tupla = (
('chave1', 'valor1'),
('chave2', 'valor2'),
)
ex5 = [(y, x) for x, y in tupla]
ex5 = dict(ex5) # inverter num dicionario
print(ex5['valor1'])
l3 = list(range(100)) # lista de 0 a 100
ex6 = [va for va in l3 if va % 3 == 0 if va % 8 == 0] # numeros divisiveis por 3 e por 8
print(ex6)
ex7 = [v if v % 3 == 0 and v % 8 == 0 else 0 for v in l3]
print(ex7)
| 25.413793 | 89 | 0.58616 |
a2718cace2c77da8ce7e4bf424f9813581430085
| 3,606 |
py
|
Python
|
marsyas-vamp/marsyas/src/django/birdsong/application/birdsong/orchive/views.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
marsyas-vamp/marsyas/src/django/birdsong/application/birdsong/orchive/views.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
marsyas-vamp/marsyas/src/django/birdsong/application/birdsong/orchive/views.py
|
jaouahbi/VampPlugins
|
27c2248d1c717417fe4d448cdfb4cb882a8a336a
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render_to_response
import subprocess
import re
import marsyas
import numpy as np
# Redirect all output to stderr
import sys
sys.stdout = sys.stderr
def index(request):
buffer_size = int(request.GET.get('buffer_size', '1024'))
print "buffer_size=%i" % buffer_size
hop_size = int(request.GET.get('hop_size', '1024'))
print "hop_size=%i" % hop_size
#sfname = "/Users/sness/venus/assets/recordings/1007/H2/2010/01/01/1/audio.wav"
#sfname = "/home/sness/wavfiles/tiny.wav"
#sfname = "/home/sness/nDEV/venus_orchive-assets/recordings/1007/H2/2010/01/01/1/audio.wav"
sfname = "/home/sness/wavfiles/audio.wav"
print "sfname=%s" % sfname
mng = marsyas.MarSystemManager()
# Create net
net = mng.create("Series","series")
# Make a SoundFileSource and convert it to mono
net.addMarSystem(mng.create("SoundFileSource", "src"));
net.addMarSystem(mng.create("Stereo2Mono", "s2m"));
# A fanout that will do both RMS and Flux calculations
fanout = mng.create("Fanout","fanout");
net.addMarSystem(fanout);
# The branch to do the RMS
rms_series = mng.create("Series","rms_series");
rms_series.addMarSystem(mng.create("Rms", "rms"));
fanout.addMarSystem(rms_series);
# The branch to do the Flux
flux_series = mng.create("Series","flux_series");
flux_series.addMarSystem(mng.create("ShiftInput", "si"));
flux_series.addMarSystem(mng.create("Windowing", "win"));
flux_series.addMarSystem(mng.create("Spectrum","spk"));
flux_series.addMarSystem(mng.create("PowerSpectrum", "pspk"));
flux_series.addMarSystem(mng.create("Flux", "flux"));
fanout.addMarSystem(flux_series);
# Update the controls with required values
net.updControl("SoundFileSource/src/mrs_string/filename", marsyas.MarControlPtr.from_string(sfname))
print "############################## la ##############################"
rms_python_array = []
flux_python_array = []
while net.getControl("SoundFileSource/src/mrs_bool/hasData").to_bool():
data = net.getControl("mrs_realvec/processedData").to_realvec()
rms = data[0]
flux = data[1]
rms_python_array.append(rms)
flux_python_array.append(flux)
net.tick()
# Convert these arrays to numpy vectors
rms_array = np.float32(rms_python_array)
flux_array = np.float32(flux_python_array)
# Normalize these arrays
rms_array *= 1.0/rms_array.max()
flux_array *= 1.0/flux_array.max()
print rms_array
print flux_array
# Calculate the RGBA values
rgba = []
for i in range(0,len(rms_array)):
# The RMS and Flux values
rms = rms_array[i]
flux = flux_array[i]
# Alpha is just the RMS value
alpha = rms * 0.5
# Map the flux to a mix of red and green
red = flux
green = 1.0 - flux
blue = 0
# Add these values as a tuple to the array rgba
rgba.append((red,green,blue,alpha))
output = ""
hop_width_ms = int((hop_size / 44100.0) * 1000)
for i in range(0, len(rgba)):
start_time_ms = int(((hop_size / 44100.0) * float(i))*1000)
end_time_ms = start_time_ms + hop_width_ms
red_int = int(rgba[i][0] * 255)
green_int = int(rgba[i][1] * 255)
blue_int = int(rgba[i][2] * 255)
color = ('%02x%02x%02x' % (red_int, green_int, blue_int)).upper()
output += "%i,%i,%i,,%f,0x%s\n" % (i, start_time_ms, end_time_ms, rgba[i][3], color)
return render_to_response('orchive/index.html', {'output':output}, {})
| 34.342857 | 104 | 0.643649 |
0c2017f243ed9a45df63d2e776327cfdd5730247
| 1,403 |
py
|
Python
|
___Python/Daniel/2018-06-25-VHS-Bielefeld-Python/p05_random/m01_wuerfeln.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
___Python/Daniel/2018-06-25-VHS-Bielefeld-Python/p05_random/m01_wuerfeln.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
___Python/Daniel/2018-06-25-VHS-Bielefeld-Python/p05_random/m01_wuerfeln.py
|
uvenil/PythonKurs201806
|
85afa9c9515f5dd8bec0c546f077d8cc39568fe8
|
[
"Apache-2.0"
] | null | null | null |
import random
r = random.Random()
def wuerfeln():
return r.randint(1, 6) # Augenzahl zwischen 1 und 6
def muenzwurf():
return r.randint(0, 1) # 0 = Kopf, 1 = Zahl
def kugel():
return r.randint(1, 49)
d = {}
for i in range(100000):
augenzahl = wuerfeln()
if augenzahl in d:
d[augenzahl] += 1
else:
d[augenzahl] = 1
print(d)
# 1) Lottozahlen 6 aus 49 ermitteln ==> [2, 7, 13, 17, 19, 42]
# Lösung a)
kugeln = 0
menge = set() # Menge der bereits gezogenen Kugeln
lottoziehung = []
while kugeln < 6:
ziehung = kugel()
if ziehung not in menge: # Diese Kugel wurde zuvor noch nicht gezogen
lottoziehung.append(ziehung)
menge.add(ziehung) # Diese Kugel darf nicht noch einmal gezogen werden, deshalb landet sie in der Menge der zuvor gezogenen Kugeln
kugeln += 1
print(sorted(lottoziehung))
# Lösung b) Ansatz "Lottofee"
urne = list(range(1, 50)) # urne = [1, 2, 3, ..., 49]
lottoziehung = []
for i in range(6):
ziehung = urne.pop(r.randint(0, len(urne)) - 1)
lottoziehung.append(ziehung)
print(sorted(lottoziehung))
# Lösung c) Geeignete Methode aus random benutzen
lottoziehung = r.sample(range(1, 50), 6)
print(sorted(lottoziehung))
# 2) Schreibe eine Funktion wuerfeln2, die fair würfelt.
# Bei der Implementierung darf nur die Funktion muenzwurf benutzt werden
| 26.471698 | 139 | 0.644334 |
ac5dd0c09127b63d4e68b0569992c552a7879fdd
| 4,505 |
py
|
Python
|
research/cv/ESRGAN/src/util/extract_subimages.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/ESRGAN/src/util/extract_subimages.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/ESRGAN/src/util/extract_subimages.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Crop sub-images."""
import os
from os import path as osp
import sys
from multiprocessing import Pool
import cv2
import numpy as np
from tqdm import tqdm
def scandir(dir_path, suffix=None, recursive=False, full_path=False):
"""Scan a directory to find the interested files."""
if (suffix is not None) and not isinstance(suffix, (str, tuple)):
raise TypeError('"suffix" must be a string or tuple of strings')
root = dir_path
def _scandir(dir_path, suffix, recursive):
for entry in os.scandir(dir_path):
if not entry.name.startswith('.') and entry.is_file():
if full_path:
return_path = entry.path
else:
return_path = osp.relpath(entry.path, root)
if suffix is None:
yield return_path
elif return_path.endswith(suffix):
yield return_path
else:
if recursive:
yield from _scandir(entry.path, suffix=suffix, recursive=recursive)
else:
continue
return _scandir(dir_path, suffix=suffix, recursive=recursive)
def main():
"""A multi-thread tool to crop large images to sub-images for faster IO."""
opt = {}
opt['n_thread'] = 20
opt['compression_level'] = 3
# HR images
opt['input_folder'] = 'datasets/DIV2K/DIV2K_train_HR'
opt['save_folder'] = 'datasets/DIV2K/DIV2K_train_HR_sub'
opt['crop_size'] = 480
opt['step'] = 240
opt['thresh_size'] = 0
extract_subimages(opt)
# LRx4 images
opt['input_folder'] = 'datasets/DIV2K/DIV2K_train_LR_bicubic/X4'
opt['save_folder'] = 'datasets/DIV2K/DIV2K_train_LR_bicubic/X4_sub'
opt['crop_size'] = 120
opt['step'] = 60
opt['thresh_size'] = 0
extract_subimages(opt)
def extract_subimages(opt):
"""Crop images to subimages."""
input_folder = opt['input_folder']
save_folder = opt['save_folder']
if not osp.exists(save_folder):
os.makedirs(save_folder)
print(f'mkdir {save_folder} ...')
else:
print(f'Folder {save_folder} already exists. Exit.')
sys.exit(1)
img_list = list(scandir(input_folder, full_path=True))
pbar = tqdm(total=len(img_list), unit='image', desc='Extract')
pool = Pool(opt['n_thread'])
for path in img_list:
pool.apply_async(worker, args=(path, opt), callback=lambda arg: pbar.update(1))
pool.close()
pool.join()
pbar.close()
print('All processes done.')
def worker(path, opt):
"""Worker for each process."""
crop_size = opt['crop_size']
step = opt['step']
thresh_size = opt['thresh_size']
img_name, extension = osp.splitext(osp.basename(path))
# remove the x2, x3, x4 and x8 in the filename for DIV2K
img_name = img_name.replace('x2', '').replace('x3', '').replace('x4', '').replace('x8', '')
img = cv2.imread(path, cv2.IMREAD_UNCHANGED)
h, w = img.shape[0:2]
h_space = np.arange(0, h - crop_size + 1, step)
if h - (h_space[-1] + crop_size) > thresh_size:
h_space = np.append(h_space, h - crop_size)
w_space = np.arange(0, w - crop_size + 1, step)
if w - (w_space[-1] + crop_size) > thresh_size:
w_space = np.append(w_space, w - crop_size)
index = 0
for x in h_space:
for y in w_space:
index += 1
cropped_img = img[x:x + crop_size, y:y + crop_size, ...]
cropped_img = np.ascontiguousarray(cropped_img)
cv2.imwrite(
osp.join(opt['save_folder'], f'{img_name}_s{index:03d}{extension}'), cropped_img,
[cv2.IMWRITE_PNG_COMPRESSION, opt['compression_level']])
process_info = f'Processing {img_name} ...'
return process_info
if __name__ == '__main__':
main()
| 32.883212 | 97 | 0.621088 |
ce20feabb1d206d27691e74aa5ee74dbb2af1c88
| 9,752 |
py
|
Python
|
research/cv/hardnet/src/HarDNet.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/hardnet/src/HarDNet.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/hardnet/src/HarDNet.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""HarDNet"""
import mindspore.nn as nn
from mindspore.ops import operations as P
class GlobalAvgpooling(nn.Cell):
"""
GlobalAvgpooling function
"""
def __init__(self):
super(GlobalAvgpooling, self).__init__()
self.mean = P.ReduceMean(True)
self.shape = P.Shape()
self.reshape = P.Reshape()
def construct(self, x):
x = self.mean(x, (2, 3))
b, c, _, _ = self.shape(x)
x = self.reshape(x, (b, c))
return x
class _ConvLayer(nn.Cell):
"""
convlayer
"""
def __init__(self, in_channels, out_channels, kernel=3, stride=1, dropout=0.9, bias=False):
super(_ConvLayer, self).__init__()
self.ConvLayer_Conv = nn.Conv2d(in_channels, out_channels,
kernel_size=kernel,
stride=stride,
has_bias=bias,
padding=kernel // 2,
pad_mode="pad")
self.ConvLayer_BN = nn.BatchNorm2d(out_channels)
self.ConvLayer_RE = nn.ReLU6()
def construct(self, x):
out = self.ConvLayer_Conv(x)
out = self.ConvLayer_BN(out)
out = self.ConvLayer_RE(out)
return out
class _DWConvLayer(nn.Cell):
"""
dwconvlayer
"""
def __init__(self, in_channels, out_channels, stride=1, bias=False):
super(_DWConvLayer, self).__init__()
self.DWConvLayer_Conv = nn.Conv2d(in_channels, in_channels,
kernel_size=3,
stride=stride,
has_bias=bias,
padding=1,
pad_mode="pad")
self.DWConvLayer_BN = nn.BatchNorm2d(in_channels)
def construct(self, x):
out = self.DWConvLayer_Conv(x)
out = self.DWConvLayer_BN(out)
return out
class _CombConvLayer(nn.Cell):
"""
combconvlayer
"""
def __init__(self, in_channels, out_channels, kernel=1, stride=1, dropout=0.9, bias=False):
super(_CombConvLayer, self).__init__()
self.CombConvLayer_Conv = _ConvLayer(in_channels, out_channels, kernel=kernel)
self.CombConvLayer_DWConv = _DWConvLayer(out_channels, out_channels, stride=stride)
def construct(self, x):
out = self.CombConvLayer_Conv(x)
out = self.CombConvLayer_DWConv(out)
return out
class _HarDBlock(nn.Cell):
"""the HarDBlock function"""
def get_link(self, layer, bash_ch, growth_rate, grmul):
"""
link all layers
"""
if layer == 0:
return bash_ch, 0, []
out_channels = growth_rate
link = []
for i in range(10):
dv = 2 ** i
if layer % dv == 0:
k = layer - dv
link.append(k)
if i > 0:
out_channels *= grmul
out_channels = int(int(out_channels + 1) / 2) * 2
in_channels = 0
for i in link:
ch, _, _ = self.get_link(i, bash_ch, growth_rate, grmul)
in_channels += ch
return out_channels, in_channels, link
def get_out_ch(self):
return self.out_channels
def __init__(self, in_channels, growth_rate, grmul, n_layers, keepBase=False, residual_out=False, dwconv=False):
super(_HarDBlock, self).__init__()
self.keepBase = keepBase
self.links = []
self.layer_list = nn.CellList()
self.out_channels = 0
for i in range(n_layers):
outch, inch, link = self.get_link(i + 1, in_channels, growth_rate, grmul)
self.links.append(link)
if dwconv:
layer = _CombConvLayer(inch, outch)
self.layer_list.append(layer)
else:
layer = _ConvLayer(inch, outch)
self.layer_list.append(layer)
if (i % 2 == 0) or (i == n_layers - 1):
self.out_channels += outch
self.concate = P.Concat(axis=1)
def construct(self, x):
""""
construct all parameters
"""
layers_ = [x]
for layer in range(len(self.layer_list)):
link = self.links[layer]
tin = []
for i in link:
tin.append(layers_[i])
if len(tin) > 1:
input_ = tin[0]
for j in range(len(tin) - 1):
input_ = self.concate((input_, tin[j + 1]))
else:
input_ = tin[0]
out = self.layer_list[layer](input_)
layers_.append(out)
t = len(layers_)
out_ = []
for j in range(t):
if (j == 0 and self.keepBase) or (j == t - 1) or (j % 2 == 1):
out_.append(layers_[j])
output = out_[0]
for k in range(len(out_) - 1):
output = self.concate((output, out_[k + 1]))
return output
class _CommenHead(nn.Cell):
"""
the transition layer
"""
def __init__(self, num_classes, out_channels, keep_rate):
super(_CommenHead, self).__init__()
self.avgpool = GlobalAvgpooling()
self.flat = nn.Flatten()
self.drop = nn.Dropout(keep_prob=keep_rate)
self.dense = nn.Dense(out_channels, num_classes, has_bias=True)
def construct(self, x):
x = self.avgpool(x)
x = self.flat(x)
x = self.drop(x)
x = self.dense(x)
return x
class HarDNet(nn.Cell):
"""
the HarDNet layers
"""
__constants__ = ['layers']
def __init__(self, depth_wise=False, arch=68, pretrained=False):
super(HarDNet, self).__init__()
first_ch = [32, 64]
second_kernel = 3
max_pool = True
grmul = 1.7
keep_rate = 0.9
# HarDNet68
ch_list = [128, 256, 320, 640, 1024]
gr = [14, 16, 20, 40, 160]
n_layers = [8, 16, 16, 16, 4]
downSamp = [1, 0, 1, 1, 0]
if arch == 85:
# HarDNet85
first_ch = [48, 96]
ch_list = [192, 256, 320, 480, 720, 1280]
gr = [24, 24, 28, 36, 48, 256]
n_layers = [8, 16, 16, 16, 16, 4]
downSamp = [1, 0, 1, 0, 1, 0]
keep_rate = 0.8
elif arch == 39:
# HarDNet39
first_ch = [24, 48]
ch_list = [96, 320, 640, 1024]
grmul = 1.6
gr = [16, 20, 64, 160]
n_layers = [4, 16, 8, 4]
downSamp = [1, 1, 1, 0]
if depth_wise:
second_kernel = 1
max_pool = False
keep_rate = 0.95
blks = len(n_layers)
self.layers = nn.CellList()
self.layers.append(_ConvLayer(3, first_ch[0], kernel=3, stride=2, bias=False))
self.layers.append(_ConvLayer(first_ch[0], first_ch[1], kernel=second_kernel))
if max_pool:
self.layers.append(nn.MaxPool2d(kernel_size=3, stride=2))
else:
self.layers.append(_DWConvLayer(first_ch[1], first_ch[1], stride=2))
ch = first_ch[1]
for i in range(blks):
blk = _HarDBlock(ch, gr[i], grmul, n_layers[i], dwconv=depth_wise)
ch = blk.get_out_ch()
self.layers.append(blk)
if i == blks - 1 and arch == 85:
self.layers.append(nn.Dropout(keep_prob=0.9))
self.layers.append(_ConvLayer(ch, ch_list[i], kernel=1))
ch = ch_list[i]
if downSamp[i] == 1:
if max_pool:
self.layers.append(nn.MaxPool2d(kernel_size=2, stride=2))
else:
self.layers.append(_DWConvLayer(ch, ch, stride=2))
self.out_channels = ch_list[blks - 1]
self.keeprate = keep_rate
def construct(self, x):
for layer in self.layers:
x = layer(x)
return x
def get_out_channels(self):
return self.out_channels
def get_keep_rate(self):
return self.keeprate
class HarDNet68(nn.Cell):
"""
hardnet68
"""
def __init__(self, num_classes):
super(HarDNet68, self).__init__()
self.net = HarDNet(depth_wise=False, arch=68, pretrained=False)
out_channels = self.net.get_out_channels()
keep_rate = self.net.get_keep_rate()
self.head = _CommenHead(num_classes, out_channels, keep_rate)
def construct(self, x):
x = self.net(x)
x = self.head(x)
return x
class HarDNet85(nn.Cell):
"""
hardnet85
"""
def __init__(self, num_classes):
super(HarDNet85, self).__init__()
self.net = HarDNet(depth_wise=False, arch=85, pretrained=False)
out_channels = self.net.get_out_channels()
keep_rate = self.net.get_keep_rate()
self.head = _CommenHead(num_classes, out_channels, keep_rate)
def construct(self, x):
x = self.net(x)
x = self.head(x)
return x
| 31.25641 | 116 | 0.539274 |
cbeb67f84cfe722efa6282682e4bc7ce7c547f38
| 3,762 |
py
|
Python
|
source/pkgsrc/math/py-numpy16/patches/patch-numpy_distutils_fcompiler_gnu.py
|
Scottx86-64/dotfiles-1
|
51004b1e2b032664cce6b553d2052757c286087d
|
[
"Unlicense"
] | 1 |
2021-11-20T22:46:39.000Z
|
2021-11-20T22:46:39.000Z
|
source/pkgsrc/math/py-numpy16/patches/patch-numpy_distutils_fcompiler_gnu.py
|
Scottx86-64/dotfiles-1
|
51004b1e2b032664cce6b553d2052757c286087d
|
[
"Unlicense"
] | null | null | null |
source/pkgsrc/math/py-numpy16/patches/patch-numpy_distutils_fcompiler_gnu.py
|
Scottx86-64/dotfiles-1
|
51004b1e2b032664cce6b553d2052757c286087d
|
[
"Unlicense"
] | null | null | null |
$NetBSD: patch-numpy_distutils_fcompiler_gnu.py,v 1.1 2020/04/27 16:52:56 adam Exp $
Linker needs -shared explictly (at least with GCC 4.7 on SunOS), plus
any ABI flags as appropriate.
Do not generate debug symbols (remove '-g').
On OS X, do not use '-bundle' and 'dynamic_lookup' (to avoid Python.framework).
Do not use -funroll-loops compiler flag.
Do not run a shell command when it is "None".
--- numpy/distutils/fcompiler/gnu.py.orig 2018-04-23 16:28:56.000000000 +0000
+++ numpy/distutils/fcompiler/gnu.py
@@ -63,8 +63,10 @@ class GnuFCompiler(FCompiler):
return ('gfortran', m.group(1))
else:
# Output probably from --version, try harder:
- m = re.search(r'GNU Fortran\s+95.*?([0-9-.]+)', version_string)
+ m = re.search(r'95.*?([0-9-.]+)', version_string)
if m:
+ if m.group(1).split(".") < ["4", "2"]:
+ self.g2c = "f95"
return ('gfortran', m.group(1))
m = re.search(
r'GNU Fortran.*?\-?([0-9-.]+\.[0-9-.]+)', version_string)
@@ -91,13 +93,13 @@ class GnuFCompiler(FCompiler):
possible_executables = ['g77', 'f77']
executables = {
'version_cmd' : [None, "-dumpversion"],
- 'compiler_f77' : [None, "-g", "-Wall", "-fno-second-underscore"],
+ 'compiler_f77' : [None, "-Wall", "-fno-second-underscore"],
'compiler_f90' : None, # Use --fcompiler=gnu95 for f90 codes
'compiler_fix' : None,
- 'linker_so' : [None, "-g", "-Wall"],
+ 'linker_so' : [None, "-Wall", "-shared"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"],
- 'linker_exe' : [None, "-g", "-Wall"]
+ 'linker_exe' : [None, "-Wall"]
}
module_dir_switch = None
module_include_switch = None
@@ -146,7 +148,7 @@ class GnuFCompiler(FCompiler):
s = 'Env. variable MACOSX_DEPLOYMENT_TARGET set to 10.3'
warnings.warn(s, stacklevel=2)
- opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])
+ opt.extend(['-undefined'])
else:
opt.append("-shared")
if sys.platform.startswith('sunos'):
@@ -237,7 +239,6 @@ class GnuFCompiler(FCompiler):
opt = ['-O2']
else:
opt = ['-O3']
- opt.append('-funroll-loops')
return opt
def _c_arch_flags(self):
@@ -288,13 +289,13 @@ class Gnu95FCompiler(GnuFCompiler):
possible_executables = ['gfortran', 'f95']
executables = {
'version_cmd' : ["<F90>", "-dumpversion"],
- 'compiler_f77' : [None, "-Wall", "-g", "-ffixed-form",
+ 'compiler_f77' : [None, "-Wall", "-ffixed-form",
"-fno-second-underscore"] + _EXTRAFLAGS,
- 'compiler_f90' : [None, "-Wall", "-g",
+ 'compiler_f90' : [None, "-Wall",
"-fno-second-underscore"] + _EXTRAFLAGS,
- 'compiler_fix' : [None, "-Wall", "-g","-ffixed-form",
+ 'compiler_fix' : [None, "-Wall", "-ffixed-form",
"-fno-second-underscore"] + _EXTRAFLAGS,
- 'linker_so' : ["<F90>", "-Wall", "-g"],
+ 'linker_so' : ["<F90>", "-Wall", "-shared"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"],
'linker_exe' : [None, "-Wall"]
@@ -307,7 +308,7 @@ class Gnu95FCompiler(GnuFCompiler):
def _universal_flags(self, cmd):
"""Return a list of -arch flags for every supported architecture."""
- if not sys.platform == 'darwin':
+ if not sys.platform == 'darwin' or cmd is None:
return []
arch_flags = []
# get arches the C compiler gets.
| 44.258824 | 84 | 0.524455 |
022d097bf65128a8000a14abb3976badfcb8dec3
| 1,159 |
py
|
Python
|
src/scheduler/models/execution.py
|
monosidev/monosi
|
a88b689fc74010b10dbabb32f4b2bdeae865f4d5
|
[
"Apache-2.0"
] | 156 |
2021-11-19T18:50:14.000Z
|
2022-03-31T19:48:59.000Z
|
src/scheduler/models/execution.py
|
monosidev/monosi
|
a88b689fc74010b10dbabb32f4b2bdeae865f4d5
|
[
"Apache-2.0"
] | 30 |
2021-12-27T19:30:56.000Z
|
2022-03-30T17:49:00.000Z
|
src/scheduler/models/execution.py
|
monosidev/monosi
|
a88b689fc74010b10dbabb32f4b2bdeae865f4d5
|
[
"Apache-2.0"
] | 14 |
2022-01-17T23:24:34.000Z
|
2022-03-29T09:27:47.000Z
|
from dataclasses import dataclass, field
from datetime import datetime
from mashumaro import DataClassDictMixin
from sqlalchemy import Column, Integer, Sequence, Text, DateTime
from typing import Optional
from sqlalchemy.sql.functions import func
from scheduler import constants
from . import mapper_registry
@mapper_registry.mapped
@dataclass
class Execution(DataClassDictMixin):
job_id: str = field(metadata={"sa": Column(Text)})
state: int = field(metadata={"sa": Column(Integer)})
result: Optional[str] = field(metadata={"sa": Column(Text, nullable=True)})
id: int = field(default=None, metadata={"sa": Column(Integer, Sequence('ds_id_seq'), primary_key=True, autoincrement=True)})
datasource_id: int = field(default=None, metadata={"sa": Column(Integer, nullable=True)})
created_at: datetime = field(default=None, metadata={"sa": Column(DateTime(timezone=True), nullable=False, server_default=func.now())})
updated_at: datetime = field(default=None, metadata={"sa": Column(DateTime(timezone=True), nullable=False, server_default=func.now())})
__tablename__ = "msi_executions"
__sa_dataclass_metadata_key__ = "sa"
| 44.576923 | 139 | 0.755824 |
0283e9f4a46087d0920d283936e11e8f0ab7031f
| 545 |
py
|
Python
|
web/user/userinfo.py
|
liukai0322/docklet
|
9e4d1f4fc2e523423b0e94406242a29b20643788
|
[
"BSD-3-Clause"
] | 1 |
2016-05-31T06:52:53.000Z
|
2016-05-31T06:52:53.000Z
|
web/user/userinfo.py
|
liukai0322/docklet
|
9e4d1f4fc2e523423b0e94406242a29b20643788
|
[
"BSD-3-Clause"
] | null | null | null |
web/user/userinfo.py
|
liukai0322/docklet
|
9e4d1f4fc2e523423b0e94406242a29b20643788
|
[
"BSD-3-Clause"
] | null | null | null |
from flask import redirect, request
from dockletreq.dockletrequest import dockletRequest
from view.view import normalView
import json
class userinfoView(normalView):
template_path = "user/info.html"
@classmethod
def get(self):
userinfo = dockletRequest.post('/user/selfQuery/')
userinfo = userinfo["data"]
return self.render(self.template_path, info = userinfo)
@classmethod
def post(self):
result = json.dumps(dockletRequest.post('/user/selfModify/', request.form))
return result
| 28.684211 | 84 | 0.706422 |
5a1c6b8ad99f689ce431f2ceb8bde434544f1cf8
| 801 |
py
|
Python
|
torch/fx/experimental/fx2trt/converter_registry.py
|
vuanvin/pytorch
|
9267fd8d7395074001ad7cf2a8f28082dbff6b0b
|
[
"Intel"
] | 183 |
2018-04-06T21:10:36.000Z
|
2022-03-30T15:05:24.000Z
|
torch/fx/experimental/fx2trt/converter_registry.py
|
vuanvin/pytorch
|
9267fd8d7395074001ad7cf2a8f28082dbff6b0b
|
[
"Intel"
] | 631 |
2018-06-05T16:59:11.000Z
|
2022-03-31T16:26:57.000Z
|
torch/fx/experimental/fx2trt/converter_registry.py
|
vuanvin/pytorch
|
9267fd8d7395074001ad7cf2a8f28082dbff6b0b
|
[
"Intel"
] | 58 |
2018-06-05T16:40:18.000Z
|
2022-03-16T15:37:29.000Z
|
from typing import Callable, Dict, Any
from torch.fx.node import Target
CONVERTERS: Dict[Target, Any] = {}
NO_IMPLICIT_BATCH_DIM_SUPPORT = {}
NO_EXPLICIT_BATCH_DIM_SUPPORT = {}
def tensorrt_converter(
key: Target,
no_implicit_batch_dim: bool = False,
no_explicit_batch_dim: bool = False,
enabled: bool = True
) -> Callable[[Any], Any]:
def register_converter(converter):
CONVERTERS[key] = converter
if no_implicit_batch_dim:
NO_IMPLICIT_BATCH_DIM_SUPPORT[key] = converter
if no_explicit_batch_dim:
NO_EXPLICIT_BATCH_DIM_SUPPORT[key] = converter
return converter
def disable_converter(converter):
return converter
if enabled:
return register_converter
else:
return disable_converter
| 25.03125 | 58 | 0.695381 |
0c8292160ca0c7e04cec50e42a42bbc834cdd01c
| 5,398 |
py
|
Python
|
research/cv/ivpf/ivpf/lu.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/ivpf/ivpf/lu.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/ivpf/ivpf/lu.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Layers with weights LU decomposition.
"""
import numpy as np
from mindspore import Parameter, Tensor
from mindspore import dtype as mstype
import mindspore.nn as nn
from mindspore.ops import operations as ops
import mindspore.numpy as mnp
from .roundquant import RoundQuant
class LULinear(nn.Cell):
"""Linear layer with LU decomposed weights."""
def __init__(self, features, args, identity_init=True):
super(LULinear, self).__init__()
self.features = features
self.identity_init = identity_init
self.tril_mask = Tensor(np.tril(np.ones((features, features)), k=-1), mstype.float32)
self.triu_mask = Tensor(np.triu(np.ones((features, features)), k=1), mstype.float32)
self.weights = Parameter(
Tensor(
np.random.randn(
features,
features) /
features,
mstype.float32),
name='w',
requires_grad=True)
self.bias = Parameter(mnp.zeros(features, mstype.float32), name='b', requires_grad=True)
self._initialize(identity_init)
if args.variable_type == 'discrete':
self.rnd = RoundQuant(2**args.n_bits)
self.set_grad(False)
else:
self.rnd = None
self.matmul = ops.MatMul(transpose_b=True)
self.zeros_like = ops.ZerosLike()
def _initialize(self, identity_init):
pass
def _create_lower_upper(self):
"""get lower and upper traingular matrix of weights"""
lower = self.tril_mask * self.weights
upper = self.triu_mask * self.weights
return lower, upper
def construct(self, inputs, ldj, reverse=False):
"""construct"""
lower, upper = self._create_lower_upper()
if self.rnd is None:
if not reverse:
outputs = self.matmul(inputs, upper) + inputs
outputs = self.matmul(outputs, lower) + outputs
outputs = outputs + self.bias
else:
outputs = inputs - self.bias
for i in range(1, outputs.shape[1]):
outputs[:, i:i + 1] -= self.matmul(outputs[:, :i], lower[i:i + 1, :i])
for i in range(outputs.shape[1] - 2, -1, -1):
outputs[:, i:i + 1] -= self.matmul(outputs[:, i + 1:], upper[i:i + 1, i + 1:])
else:
inputs = mnp.reshape(mnp.ravel(inputs), inputs.shape)
if not reverse:
outputs = mnp.concatenate([self.matmul(inputs[:, i + 1:], upper[i:i + 1, i + 1:])
for i in range(inputs.shape[1] - 1)], axis=1)
outputs = mnp.concatenate([outputs, self.zeros_like(inputs[:, -1:])], axis=1)
outputs = self.rnd(outputs)
outputs += inputs
out1 = mnp.concatenate([self.matmul(outputs[:, :i], lower[i:i + 1, :i])
for i in range(1, inputs.shape[1])], axis=1)
out1 = mnp.concatenate([self.zeros_like(inputs[:, -1:]), out1], axis=1)
out1 = self.rnd(out1)
out1 += outputs
outputs = out1 + self.rnd(self.bias)
else:
outputs = inputs - self.rnd(self.bias)
for i in range(1, outputs.shape[1]):
outputs[:, i:i + 1] -= self.rnd(self.matmul(outputs[:, :i], lower[i:i + 1, :i]))
for i in range(outputs.shape[1] - 2, -1, -1):
outputs[:, i:i + 1] -= self.rnd(self.matmul(outputs[:, i + 1:], upper[i:i + 1, i + 1:]))
return outputs, ldj
class LUConv1x1(LULinear):
"""1x1 convolution layer with LU decomposed weights."""
def __init__(self, num_channels, args, identity_init=True):
super(LUConv1x1, self).__init__(num_channels, args, identity_init)
self.transpose = ops.Transpose()
self.reshape = ops.Reshape()
def _lu_forward_inverse(self, inputs, ldj, reverse=False):
"""convert 1x1 convolution to linear transform"""
b, c, h, w = inputs.shape
inputs = self.transpose(inputs, (0, 2, 3, 1))
inputs = self.reshape(inputs, (b * h * w, c))
outputs, ldj = super(LUConv1x1, self).construct(inputs, ldj, reverse)
outputs = self.reshape(outputs, (b, h, w, c))
outputs = self.transpose(outputs, (0, 3, 1, 2))
return outputs, ldj
def construct(self, inputs, ldj, reverse=False):
"""construct"""
if inputs.dim() != 4:
raise ValueError("Inputs must be a 4D tensor.")
outputs, ldj = self._lu_forward_inverse(inputs, ldj, reverse)
return outputs, ldj
| 37.748252 | 108 | 0.571693 |
0bb1c8adbc80d75ecc15a2d59531ccf07a2424db
| 4,801 |
py
|
Python
|
model_zoo/gpt/deploy/python/inference.py
|
mukaiu/PaddleNLP
|
0315365dbafa6e3b1c7147121ba85e05884125a5
|
[
"Apache-2.0"
] | null | null | null |
model_zoo/gpt/deploy/python/inference.py
|
mukaiu/PaddleNLP
|
0315365dbafa6e3b1c7147121ba85e05884125a5
|
[
"Apache-2.0"
] | null | null | null |
model_zoo/gpt/deploy/python/inference.py
|
mukaiu/PaddleNLP
|
0315365dbafa6e3b1c7147121ba85e05884125a5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import numpy as np
import paddle
from paddle import inference
from paddlenlp.data import Stack, Tuple, Pad
from paddlenlp.transformers import GPTForGreedyGeneration, GPTChineseTokenizer, GPTTokenizer
MODEL_CLASSES = {
"gpt-cn": (GPTForGreedyGeneration, GPTChineseTokenizer),
"gpt": (GPTForGreedyGeneration, GPTTokenizer),
}
def parse_args():
parser = argparse.ArgumentParser()
# yapf: disable
parser.add_argument("--model_type", default=None, type=str, required=True, help="Model type selected in the list: " + ", ".join(MODEL_CLASSES.keys()))
parser.add_argument("--model_path", default=None, type=str, required=True, help="The path prefix of inference model to be used.")
parser.add_argument("--select_device", default="gpu", choices=["gpu", "cpu", "xpu"], help="Device selected for inference.")
# yapf: enable
args = parser.parse_args()
return args
class Predictor(object):
def __init__(self, predictor, input_handles, output_handles):
self.predictor = predictor
self.input_handles = input_handles
self.output_handles = output_handles
@classmethod
def create_predictor(cls, args):
config = paddle.inference.Config(args.model_path + ".pdmodel",
args.model_path + ".pdiparams")
if args.select_device == "gpu":
# Set GPU configs accordingly
config.enable_use_gpu(100, 0)
elif args.select_device == "cpu":
# Set CPU configs accordingly,
# such as enable_mkldnn, set_cpu_math_library_num_threads
config.disable_gpu()
elif args.select_device == "xpu":
# Set XPU configs accordingly
config.enable_xpu(100)
config.switch_use_feed_fetch_ops(False)
predictor = paddle.inference.create_predictor(config)
input_handles = [
predictor.get_input_handle(name)
for name in predictor.get_input_names()
]
output_handles = [
predictor.get_output_handle(name)
for name in predictor.get_output_names()
]
return cls(predictor, input_handles, output_handles)
def predict_batch(self, data):
for input_field, input_handle in zip(data, self.input_handles):
input_handle.copy_from_cpu(input_field.numpy(
) if isinstance(input_field, paddle.Tensor) else input_field)
self.predictor.run()
output = [
output_handle.copy_to_cpu() for output_handle in self.output_handles
]
return output
def predict(self, dataset, batch_size=1):
outputs = []
for data in dataset:
output = self.predict_batch(data)
outputs.append(output)
return outputs
def main():
args = parse_args()
predictor = Predictor.create_predictor(args)
args.model_type = args.model_type.lower()
model_class, tokenizer_class = MODEL_CLASSES[args.model_type]
tokenizer = tokenizer_class.from_pretrained(os.path.dirname(
args.model_path))
if args.model_type == "gpt":
ds = [
"Question: Who is the CEO of Apple? Answer:",
"Question: Who is the CEO of Facebook? Answer:",
"Question: How tall is the highest peak in the world? Answer:",
"Question: Who is the president of the united states? Answer:",
"Question: Where is the capital of France? Answer:",
"Question: What is the largest animal in the ocean? Answer:",
"Question: Who is the chancellor of Germany? Answer:",
]
elif args.model_type == "gpt-cn":
ds = [
"问题:苹果的CEO是谁? 答案:",
"问题:中国的首都是哪里?答案:",
"问题:世界上最高的山峰是? 答案:",
]
dataset = [[
np.array(tokenizer(text)["input_ids"]).astype("int64").reshape([1, -1])
] for text in ds]
outs = predictor.predict(dataset)
for res in outs:
res_ids = list(res[0].reshape([-1]))
res_ids = [int(x) for x in res_ids]
print(tokenizer.convert_ids_to_string(res_ids))
if __name__ == "__main__":
main()
| 36.930769 | 154 | 0.653822 |
b2e19a83348cb59fe0368e27bac3842ea8cc149b
| 2,882 |
py
|
Python
|
backend/api/apps.py
|
giacomooo/CASFEE_Project2
|
420ff488d6b9deefe6623a45ecfed299f97a4639
|
[
"MIT"
] | null | null | null |
backend/api/apps.py
|
giacomooo/CASFEE_Project2
|
420ff488d6b9deefe6623a45ecfed299f97a4639
|
[
"MIT"
] | null | null | null |
backend/api/apps.py
|
giacomooo/CASFEE_Project2
|
420ff488d6b9deefe6623a45ecfed299f97a4639
|
[
"MIT"
] | null | null | null |
import json
import ssl
from django.apps import AppConfig
from django.conf import settings
from six.moves import urllib
from cryptography import x509
from cryptography.hazmat import backends
# Don't verify ssl Certificates
# pylint: disable=protected-access
ssl._create_default_https_context = ssl._create_unverified_context
class ApiConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'api'
def ready(self):
# Load Public Key (as Certificate) from Keycloak-Realm
# ------------------------------------
#return None
try: # (A) Load keys from url...
url = settings.KEYCLOAK['HOST']+"/auth/realms/" + settings.KEYCLOAK['REALM_NAME']+"/protocol/openid-connect/certs"
# pylint: disable=consider-using-with
response = urllib.request.urlopen(url)
# Response-Format:
# {
# "keys": [
# {
# "kid": "sFkzNoS9...8yQNR40",
# "kty": "RSA",
# "alg": "RS256",
# "use": "sig",
# "n": "gLfOWIww2M...tFrBG_w",
# "e": "AQAB",
# "x5c": [
# "MIIClTCCAX...Fq5EyBWe53G5gk="
# ],
# "x5t": "p7R_1_Lwk...NtfNwCw",
# "x5t#S256": "76RGrg4I...MLFmNWB3So"
# }
# ]
# }
except Exception as ex:
#"message": f"Request on '{dj_settings.KEYCLOAK_PUBLIC_KEY_URL}' failed! Check if url is reachable...", \
message = { "error": "request.urlopen()",
"message": f"Request on '{url}' failed! Check if url is reachable...",
"exception":{ "type":str(ex.__class__.__name__),
"message":str(ex),
}
}
raise Exception(message) from ex
try: # (B) Extract Public Key as Certificate out of response
json_response = json.loads(response.read().decode('utf-8'))
cert = '-----BEGIN CERTIFICATE-----\n' + json_response['keys'][0]['x5c'][0] + '\n-----END CERTIFICATE-----'
cert = x509.load_pem_x509_certificate(cert.encode('utf-8'), backends.default_backend())
pki = cert.public_key()
except Exception as ex:
message = {
"error": "extracting pki",
"message": "Error during extracting pki out of json-response of Keycloak! Debug section...",
"exception":{
"type":str(ex.__class__.__name__),
"message":str(ex),
}
}
raise Exception(message) from ex
# (C) Write result back to settings...
settings.JWT_AUTH['JWT_PUBLIC_KEY'] = pki
| 40.027778 | 126 | 0.505205 |
33639871c24dbe8220918ed7e0ceaf23e4536a1c
| 1,828 |
py
|
Python
|
python-bildungslogin/ucs-test/97_bildungslogin_python/00_unittests.py
|
univention/bildungslogin
|
29bebe858a5445dd5566aad594b33b9dd716eca4
|
[
"MIT"
] | null | null | null |
python-bildungslogin/ucs-test/97_bildungslogin_python/00_unittests.py
|
univention/bildungslogin
|
29bebe858a5445dd5566aad594b33b9dd716eca4
|
[
"MIT"
] | null | null | null |
python-bildungslogin/ucs-test/97_bildungslogin_python/00_unittests.py
|
univention/bildungslogin
|
29bebe858a5445dd5566aad594b33b9dd716eca4
|
[
"MIT"
] | null | null | null |
#!/usr/share/ucs-test/runner /usr/bin/py.test -slvv --cov --cov-config=.coveragerc --cov-report=
# -*- coding: utf-8 -*-
#
# Copyright 2021 Univention GmbH
#
# https://www.univention.de/
#
# All rights reserved.
#
# The source code of this program is made available
# under the terms of the GNU Affero General Public License version 3
# (GNU AGPL V3) as published by the Free Software Foundation.
#
# Binary versions of this program provided by Univention to you as
# well as other copyrighted, protected or trademarked materials like
# Logos, graphics, fonts, specific documentations and configurations,
# cryptographic keys etc. are subject to a license agreement between
# you and Univention and not subject to the GNU AGPL V3.
#
# In the case you use this program under the terms of the GNU AGPL V3,
# the program is provided in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License with the Debian GNU/Linux or Univention distribution in file
# /usr/share/common-licenses/AGPL-3; if not, see
# <https://www.gnu.org/licenses/>.
## desc: Execute unittests
## exposure: safe
## tags: [bildungslogin]
## roles: [domaincontroller_master, domaincontroller_backup, domaincontroller_slave]
## packages: [python-bildungslogin]
import os.path
import pytest
def test_unittests():
"""Execute unittests"""
retcode = pytest.main(
[
"-lvvx",
"--cov",
"--cov-config=.coveragerc",
"--cov-append",
"--cov-report=",
os.path.join(os.path.dirname(__file__), "unittests"),
]
)
assert retcode == 0
| 33.851852 | 96 | 0.704048 |
d7cfe0a05a02b1f1459d2136908509e268166e83
| 4,897 |
py
|
Python
|
packages/watchmen-model/src/watchmen_model/admin/pipeline.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-model/src/watchmen_model/admin/pipeline.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-model/src/watchmen_model/admin/pipeline.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
from enum import Enum
from typing import List, Optional, Union
from pydantic import BaseModel
from watchmen_model.common import construct_parameter_joint, OptimisticLock, PipelineId, \
PipelineStageId, PipelineUnitId, TenantBasedTuple, TopicId
from watchmen_utilities import ArrayHelper
from .conditional import Conditional
from .pipeline_action import DeleteTopicActionType, PipelineAction, ReadTopicActionType, SystemActionType, \
WriteTopicActionType
from .pipeline_action_delete import DeleteRowAction, DeleteRowsAction
from .pipeline_action_read import ExistsAction, ReadFactorAction, ReadFactorsAction, ReadRowAction, ReadRowsAction
from .pipeline_action_system import AlarmAction, CopyToMemoryAction, WriteToExternalAction
from .pipeline_action_write import InsertOrMergeRowAction, InsertRowAction, MergeRowAction, WriteFactorAction
def construct_action(action: Optional[Union[dict, PipelineAction]]) -> Optional[PipelineAction]:
if action is None:
return None
elif isinstance(action, PipelineAction):
return action
else:
action_type = action.get('type')
if action_type == SystemActionType.ALARM:
return AlarmAction(**action)
elif action_type == SystemActionType.COPY_TO_MEMORY:
return CopyToMemoryAction(**action)
elif action_type == SystemActionType.WRITE_TO_EXTERNAL:
return WriteToExternalAction(**action)
elif action_type == ReadTopicActionType.READ_ROW:
return ReadRowAction(**action)
elif action_type == ReadTopicActionType.READ_FACTOR:
return ReadFactorAction(**action)
elif action_type == ReadTopicActionType.EXISTS:
return ExistsAction(**action)
elif action_type == ReadTopicActionType.READ_ROWS:
return ReadRowsAction(**action)
elif action_type == ReadTopicActionType.READ_FACTORS:
return ReadFactorsAction(**action)
elif action_type == WriteTopicActionType.MERGE_ROW:
return MergeRowAction(**action)
elif action_type == WriteTopicActionType.INSERT_ROW:
return InsertRowAction(**action)
elif action_type == WriteTopicActionType.INSERT_OR_MERGE_ROW:
return InsertOrMergeRowAction(**action)
elif action_type == WriteTopicActionType.WRITE_FACTOR:
return WriteFactorAction(**action)
elif action_type == DeleteTopicActionType.DELETE_ROW:
return DeleteRowAction(**action)
elif action_type == DeleteTopicActionType.DELETE_ROWS:
return DeleteRowsAction(**action)
else:
raise Exception(f'Pipeline action type[{action_type}] cannot be recognized.')
def construct_actions(actions: Optional[list] = None) -> Optional[List[PipelineAction]]:
if actions is None:
return None
else:
return ArrayHelper(actions).map(lambda x: construct_action(x)).to_list()
class PipelineUnit(Conditional, BaseModel):
unitId: PipelineUnitId = None
name: str = None
loopVariableName: str = None
do: List[PipelineAction] = []
def __setattr__(self, name, value):
if name == 'do':
super().__setattr__(name, construct_actions(value))
elif name == 'on':
super().__setattr__(name, construct_parameter_joint(value))
else:
super().__setattr__(name, value)
def construct_unit(unit: Optional[Union[dict, PipelineUnit]]) -> Optional[PipelineUnit]:
if unit is None:
return None
elif isinstance(unit, PipelineUnit):
return unit
else:
return PipelineUnit(**unit)
def construct_units(units: Optional[list] = None) -> Optional[List[PipelineUnit]]:
if units is None:
return None
else:
return ArrayHelper(units).map(lambda x: construct_unit(x)).to_list()
class PipelineStage(Conditional, BaseModel):
stageId: PipelineStageId = None
name: str = None
units: List[PipelineUnit] = []
def __setattr__(self, name, value):
if name == 'units':
super().__setattr__(name, construct_units(value))
elif name == 'on':
super().__setattr__(name, construct_parameter_joint(value))
else:
super().__setattr__(name, value)
class PipelineTriggerType(str, Enum):
INSERT = 'insert',
MERGE = 'merge',
INSERT_OR_MERGE = 'insert-or-merge',
DELETE = 'delete',
def construct_stage(stage: Optional[Union[dict, PipelineStage]]) -> Optional[PipelineStage]:
if stage is None:
return None
elif isinstance(stage, PipelineStage):
return stage
else:
return PipelineStage(**stage)
def construct_stages(stages: Optional[list] = None) -> Optional[List[PipelineStage]]:
if stages is None:
return None
else:
return ArrayHelper(stages).map(lambda x: construct_stage(x)).to_list()
class Pipeline(Conditional, TenantBasedTuple, OptimisticLock, BaseModel):
pipelineId: PipelineId = None
topicId: TopicId = None
name: str = None
type: PipelineTriggerType = None
stages: List[PipelineStage] = []
enabled: bool = None
validated: bool = None
def __setattr__(self, name, value):
if name == 'stages':
super().__setattr__(name, construct_stages(value))
elif name == 'on':
super().__setattr__(name, construct_parameter_joint(value))
else:
super().__setattr__(name, value)
| 33.087838 | 114 | 0.771493 |
cc0e17ee0bc188f5e8239bf0cbd35a92b56bb5ec
| 33,675 |
py
|
Python
|
Packs/OpsGenie/Integrations/OpsGenieV3/OpsGenieV3_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799 |
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/OpsGenie/Integrations/OpsGenieV3/OpsGenieV3_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317 |
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/OpsGenie/Integrations/OpsGenieV3/OpsGenieV3_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297 |
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
import pytest
import io
from CommonServerPython import *
import OpsGenieV3
from unittest import mock
def util_load_json(path):
with io.open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
def test_create_alert_wrong_responders():
"""
Given:
- An app client object
When:
- Calling function create_alert with argument responders in the wrong format
Then:
- Ensure the resulted will raise an exception.
"""
mock_client = OpsGenieV3.Client(base_url="")
with pytest.raises(DemistoException):
OpsGenieV3.create_alert(mock_client, {'responders': ['team', 'id']})
def test_create_alert(mocker):
"""
Given:
- An app client object
- Responders "team,id,123"
When:
- Calling function create_alert with argument responders in the right format
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'create_alert',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/create_alert.json'))
res = OpsGenieV3.create_alert(mock_client, {'responders': "team,id,123"})
assert (res.raw_response == util_load_json('test_data/create_alert.json'))
def test_get_alerts(mocker):
"""
Given:
- An app client object
- Limit = 1
When:
- Calling function list_alerts
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'list_alerts',
return_value=util_load_json('test_data/get_alerts.json'))
res = OpsGenieV3.get_alerts(mock_client, {"limit": 1})
assert (len(res.outputs) == 1)
def test_get_alerts_going_to_right_function():
"""
Given:
- An app client object
When:
- Calling function get_alerts
Case A: "alert-id" = 1234
Case B: No arguments
Then:
- Ensure the right function was called
Case A: Called get_alert
Case B: Called list_alerts
"""
mock_client = OpsGenieV3.Client(base_url="")
mock_client.get_alert = mock.MagicMock()
OpsGenieV3.get_alerts(mock_client, {"alert-id": 1234})
assert mock_client.get_alert.called
OpsGenieV3.list_alerts = mock.MagicMock()
OpsGenieV3.get_alerts(mock_client, {})
assert OpsGenieV3.list_alerts.called
def test_delete_alert(mocker):
"""
Given:
- An app client object
- Alert-id = 1234
When:
- Calling function delete_alert
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'delete_alert',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/delete_alert.json'))
res = OpsGenieV3.delete_alert(mock_client, {"alert-id": 1234})
assert (res.raw_response == util_load_json('test_data/delete_alert.json'))
def test_ack_alert(mocker):
"""
Given:
- An app client object
- Alert-id = 1234
When:
- Calling function ack_alert
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'ack_alert',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/ack_alert.json'))
res = OpsGenieV3.ack_alert(mock_client, {"alert-id": 1234})
assert (res.raw_response == util_load_json('test_data/ack_alert.json'))
def test_close_alert(mocker):
"""
Given:
- An app client object
- Alert-id = 1234
When:
- Calling function close_alert
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'close_alert',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/close_alert.json'))
res = OpsGenieV3.close_alert(mock_client, {"alert-id": 1234})
assert (res.raw_response == util_load_json('test_data/close_alert.json'))
def test_assign_alert_without_args():
"""
Given:
- An app client object
When:
- Calling function assign_alert with no arguments
Then:
- Ensure the resulted will raise an exception.
"""
mock_client = OpsGenieV3.Client(base_url="")
with pytest.raises(DemistoException):
OpsGenieV3.assign_alert(mock_client, {})
def test_assign_alert(mocker):
"""
Given:
- An app client object
- Alert-id = 1234
- Owner_id = 123
When:
- Calling function assign_alert
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'assign_alert',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/assign_alert.json'))
res = OpsGenieV3.assign_alert(mock_client, {"alert-id": 1234, "owner_id": 123})
assert (res.raw_response == util_load_json('test_data/assign_alert.json'))
def test_add_responder_alert_wrong_responders():
"""
Given:
- An app client object
When:
- Calling function add_responder_alert with argument responders in the wrong format
Then:
- Ensure the resulted will raise an exception.
"""
mock_client = OpsGenieV3.Client(base_url="")
with pytest.raises(DemistoException):
OpsGenieV3.add_responder_alert(mock_client, {'responders': ['team', 'id']})
def test_add_responder_alert(mocker):
"""
Given:
- An app client object
- Alert-id = 1234
- owner_id = 123
When:
- Calling function add_responder_alert
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'add_responder_alert',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/add_responder_alert.json'))
res = OpsGenieV3.add_responder_alert(mock_client, {"alert-id": 1234, "owner_id": 123})
assert (res.raw_response == util_load_json('test_data/add_responder_alert.json'))
def test_get_escalations_without_args():
"""
Given:
- An app client object
When:
- Calling function escalate_alert with no arguments
Then:
- Ensure the resulted will raise an exception.
"""
mock_client = OpsGenieV3.Client(base_url="")
with pytest.raises(DemistoException):
OpsGenieV3.escalate_alert(mock_client, {})
def test_get_escalations(mocker):
"""
Given:
- An app client object
When:
- Calling function get_escalations
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'get_escalations',
return_value=util_load_json('test_data/get_escalations.json'))
res = OpsGenieV3.get_escalations(mock_client, {})
assert len(res.outputs) == 2
def test_get_escalation(mocker):
"""
Given:
- An app client object
- escalation_id = 123
When:
- Calling function get_escalations
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'get_escalation',
return_value=util_load_json('test_data/get_escalations.json'))
res = OpsGenieV3.get_escalations(mock_client, {"escalation_id": 123})
assert len(res.outputs) == 2
def test_escalate_alert_without_args():
"""
Given:
- An app client object
When:
- Calling function escalate_alert with no arguments
Then:
- Ensure the resulted will raise an exception.
"""
mock_client = OpsGenieV3.Client(base_url="")
with pytest.raises(DemistoException):
OpsGenieV3.escalate_alert(mock_client, {})
def test_escalate_alert(mocker):
"""
Given:
- An app client object
- Alert-id = 1234
= escalation_id = 123
When:
- Calling function escalate_alert
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'escalate_alert',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/escalate_alert.json'))
res = OpsGenieV3.escalate_alert(mock_client, {"alert-id": 1234, "escalation_id": 123})
assert (res.raw_response == util_load_json('test_data/escalate_alert.json'))
def test_add_alert_tag(mocker):
"""
Given:
- An app client object
- Alert-id = 1234
- tags = [1,2]
When:
- Calling function add_alert_tag
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'add_alert_tag',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/add_alert_tag.json'))
res = OpsGenieV3.add_alert_tag(mock_client, {"alert-id": 1234, "tags": [1, 2]})
assert (res.raw_response == util_load_json('test_data/add_alert_tag.json'))
def test_remove_alert_tag(mocker):
"""
Given:
- An app client object
- Alert-id = 1234
- tags = [1,2]
When:
- Calling function remove_alert_tag
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'remove_alert_tag',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/remove_alert_tag.json'))
res = OpsGenieV3.remove_alert_tag(mock_client, {"alert-id": 1234, "tags": [1, 2]})
assert (res.raw_response == util_load_json('test_data/remove_alert_tag.json'))
def test_get_alert_attachments(mocker):
"""
Given:
- An app client object
- Alert-id = 1234
When:
- Calling function get_alert_attachments
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'get_alert_attachments',
return_value=util_load_json('test_data/get_alert_attachments.json'))
res = OpsGenieV3.get_alert_attachments(mock_client, {"alert-id": 1234})
assert (res.readable_output == "### OpsGenie Attachment\n**No entries.**\n")
def test_get_schedules():
"""
Given:
- An app client object
When:
- Calling function get_schedules
Case A: "schedule_id" = 1234
Case B: No arguments
Then:
- Ensure the right function was called
Case A: Called get_schedule
Case B: Called list_schedules
"""
mock_client = OpsGenieV3.Client(base_url="")
mock_client.get_schedule = mock.MagicMock()
OpsGenieV3.get_schedules(mock_client, {"schedule_id": 1234})
assert mock_client.get_schedule.called
mock_client.list_schedules = mock.MagicMock()
OpsGenieV3.get_schedules(mock_client, {})
assert mock_client.list_schedules.called
def test_get_schedule_overrides_without_args():
"""
Given:
- An app client object
When:
- Calling function get_schedule_overrides with no arguments
Then:
- Ensure the resulted will raise an exception.
"""
mock_client = OpsGenieV3.Client(base_url="")
with pytest.raises(DemistoException):
OpsGenieV3.get_schedule_overrides(mock_client, {})
def test_get_schedule_without_args():
"""
Given:
- An app client object
When:
- Calling function get_schedule with no arguments
Then:
- Ensure the resulted will raise an exception.
"""
mock_client = OpsGenieV3.Client(base_url="")
with pytest.raises(DemistoException):
mock_client.get_schedule({})
def test_get_schedule_overrides():
"""
Given:
- An app client object
When:
- Calling function get_schedule_overrides
Case A: "schedule_id" = 1234 , override_alias = 123
Case B: No arguments
Then:
- Ensure the right function was called
Case A: Called get_schedule_override
Case B: Called list_schedule_overrides
"""
mock_client = OpsGenieV3.Client(base_url="")
mock_client.get_schedule_override = mock.MagicMock()
OpsGenieV3.get_schedule_overrides(mock_client, {"schedule_id": 1234, "override_alias": 123})
assert mock_client.get_schedule_override.called
mock_client.list_schedule_overrides = mock.MagicMock()
OpsGenieV3.get_schedule_overrides(mock_client, {"schedule_id": 1234})
assert mock_client.list_schedule_overrides.called
def test_get_on_call_without_args():
"""
Given:
- An app client object
When:
- Calling function get_on_call with no arguments
Then:
- Ensure the resulted will raise an exception.
"""
mock_client = OpsGenieV3.Client(base_url="")
with pytest.raises(DemistoException):
OpsGenieV3.get_on_call(mock_client, {})
def test_get_on_call(mocker):
"""
Given:
- An app client object
- schedule_id = 1234
When:
- Calling function get_on_call
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'get_on_call',
return_value=util_load_json('test_data/delete_incident.json'))
res = OpsGenieV3.get_on_call(mock_client, {"schedule_id": 1234})
assert (res.raw_response == util_load_json('test_data/delete_incident.json'))
def test_create_incident_wrong_args():
"""
Given:
- An app client object
When:
- Calling function create_incident with argument responders in the wrong format
Then:
- Ensure the resulted will raise an exception.
"""
mock_client = OpsGenieV3.Client(base_url="")
with pytest.raises(DemistoException):
OpsGenieV3.create_incident(mock_client, {'responders': ['team', 'id']})
def test_create_incident(mocker):
"""
Given:
- An app client object
When:
- Calling function create_incident
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'create_incident',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/create_incident.json'))
res = OpsGenieV3.create_incident(mock_client, {})
assert (res.raw_response == util_load_json('test_data/create_incident.json'))
def test_delete_incident(mocker):
"""
Given:
- incident_id = 1234
When:
- Calling function delete_incident
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'delete_incident',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/delete_incident.json'))
res = OpsGenieV3.delete_incident(mock_client, {"incident_id": 1234})
assert (res.raw_response == util_load_json('test_data/delete_incident.json'))
def test_get_incidents(mocker):
"""
Given:
- An app client object
- limit = 1
When:
- Calling function get_incidents
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'list_incidents',
return_value=util_load_json('test_data/get_incidents.json'))
res = OpsGenieV3.get_incidents(mock_client, {"limit": 1})
assert (len(res.outputs) == 1)
def test_responders_to_json():
"""
Given:
- An app client object
- responders = ["team", "id", 1, "schedule", "name", "a"]
- responder_key = 'responders'
When:
- Calling function responders_to_json
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
res = mock_client.responders_to_json(responders=["team", "id", 1, "schedule", "name", "a"],
responder_key='responders')
assert (res == {'responders': [{'id': 1, 'type': 'team'}, {'name': 'a', 'type': 'schedule'}]})
def test_get_incidents_going_to_right_function():
"""
Given:
- An app client object
When:
- Calling function get_incidents
Case A: "incident_id" = 1234
Case B: No arguments
Then:
- Ensure the right function was called
Case A: Called get_incident
Case B: Called list_incidents
"""
mock_client = OpsGenieV3.Client(base_url="")
mock_client.get_incident = mock.MagicMock()
OpsGenieV3.get_incidents(mock_client, {"incident_id": 1234})
assert mock_client.get_incident.called
OpsGenieV3.list_incidents = mock.MagicMock()
OpsGenieV3.get_incidents(mock_client, {})
assert OpsGenieV3.list_incidents.called
def test_close_incident(mocker):
"""
Given:
- An app client object
- incident_id = 1234
When:
- Calling function close_incident
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'close_incident',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/close_incident.json'))
res = OpsGenieV3.close_incident(mock_client, {"incident_id": 1234})
assert (res.raw_response == util_load_json('test_data/close_incident.json'))
def test_resolve_incident(mocker):
"""
Given:
- An app client object
- incident_id = 1234
When:
- Calling function resolve_incident
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'resolve_incident',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/resolve_incident.json'))
res = OpsGenieV3.resolve_incident(mock_client, {"incident_id": 1234})
assert (res.raw_response == util_load_json('test_data/resolve_incident.json'))
def test_add_responder_incident_wrong_args():
"""
Given:
- An app client object
When:
- Calling function add_responder_incident with argument responders in the wrong format
Then:
- Ensure the resulted will raise an exception.
"""
mock_client = OpsGenieV3.Client(base_url="")
with pytest.raises(DemistoException):
OpsGenieV3.add_responder_incident(mock_client, {'responders': ['team', 'id']})
def test_add_responder_incident(mocker):
"""
Given:
- An app client object
- incident_id = 1234
- responders = ["team", "id", "name"]
When:
- Calling function add_responder_incident
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'add_responder_incident',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/add_responder_incident.json'))
res = OpsGenieV3.add_responder_incident(mock_client, {"incident_id": 1234, "responders": ["team", "id", "name"]})
assert (res.raw_response == util_load_json('test_data/add_responder_incident.json'))
def test_add_tag_incident(mocker):
"""
Given:
- An app client object
- incident_id = 1234
- tags = [1, 2]
When:
- Calling function add_tag_incident
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'add_tag_incident',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/add_tag_incident.json'))
res = OpsGenieV3.add_tag_incident(mock_client, {"incident_id": 1234, "tags": [1, 2]})
assert (res.raw_response == util_load_json('test_data/add_tag_incident.json'))
def test_remove_tag_incident(mocker):
"""
Given:
- An app client object
- incident_id = 1234
- tags = [1, 2]
When:
- Calling function remove_tag_incident
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'remove_tag_incident',
return_value=util_load_json('test_data/request.json'))
mocker.patch.object(mock_client, 'get_request',
return_value=util_load_json('test_data/remove_tag_incident.json'))
res = OpsGenieV3.remove_tag_incident(mock_client, {"incident_id": 1234, "tags": [1, 2]})
assert (res.raw_response == util_load_json('test_data/remove_tag_incident.json'))
def test_get_teams(mocker):
"""
Given:
- An app client object
When:
- Calling function get_teams
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'list_teams',
return_value=util_load_json('test_data/get_teams.json'))
res = OpsGenieV3.get_teams(mock_client, {})
assert len(res.outputs) == 2
def test_get_teams_going_to_right_function():
"""
Given:
- An app client object
When:
- Calling function get_teams
Case A: "team_id" = 1234
Case B: No arguments
Then:
- Ensure the right function was called
Case A: Called get_team
Case B: Called list_teams
"""
mock_client = OpsGenieV3.Client(base_url="")
mock_client.get_team = mock.MagicMock()
OpsGenieV3.get_teams(mock_client, {"team_id": 1234})
assert mock_client.get_team.called
mock_client.list_teams = mock.MagicMock()
OpsGenieV3.get_teams(mock_client, {})
assert mock_client.list_teams.called
def test_fetch_incidents_command(mocker):
"""
Given:
- An app client object
When:
- Calling function fetch_incidents_command
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'list_alerts',
return_value=util_load_json('test_data/get_alerts.json'))
mocker.patch.object(mock_client, 'list_incidents',
return_value=util_load_json('test_data/get_incidents.json'))
mocker.patch.object(OpsGenieV3, '_get_utc_now', return_value=datetime(2021, 11, 26))
mocker.patch.object(OpsGenieV3, '_parse_fetch_time', return_value='2021-11-23T12:19:48Z')
res, last_run = OpsGenieV3.fetch_incidents_command(mock_client, {"max_fetch": 1})
assert len(res) == 2
assert last_run == {'Alerts': {'lastRun': '2021-11-26T00:00:00Z',
'next_page': 'https://api.opsgenie.com/v2/alerts?limit=1&sort='
'createdAt&offset=1&order=desc'},
'Incidents': {'lastRun': '2021-11-26T00:00:00Z',
'next_page': 'https://api.opsgenie.com/v1/incidents?limit=1&'
'sort=insertedAt&offset=1&order=desc'}}
def test_fetch_incidents_command_no_result(mocker):
"""
Given:
- An app client object
- max_fetch = 1
When:
- Calling function fetch_incidents_command
- The list_alerts and list_incidents functions returns empty response
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'list_alerts',
return_value=util_load_json('test_data/empty_response.json'))
mocker.patch.object(mock_client, 'list_incidents',
return_value=util_load_json('test_data/empty_response.json'))
mocker.patch.object(OpsGenieV3, '_get_utc_now', return_value=datetime(2021, 11, 26))
mocker.patch.object(OpsGenieV3, '_parse_fetch_time', return_value='2021-11-23T12:19:48Z')
res, last_run = OpsGenieV3.fetch_incidents_command(mock_client, {"max_fetch": 1})
assert len(res) == 0
assert last_run == {'Alerts': {'lastRun': '2021-11-26T00:00:00Z', 'next_page': None},
'Incidents': {'lastRun': '2021-11-26T00:00:00Z', 'next_page': None}}
def test_fetch_with_paging_only_alerts(mocker):
"""
Given:
- An app client object
- max_fetch = 2
- event_types = OpsGenieV3.ALERT_TYPE
When:
- Calling function fetch_incidents_command
- The list_alerts function returns result with paging
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'list_alerts',
return_value=util_load_json('test_data/get_alerts.json'))
mocker.patch.object(mock_client, 'get_paged',
return_value=util_load_json('test_data/get_alerts_without_next.json'))
mocker.patch.object(OpsGenieV3, '_get_utc_now', return_value=datetime(2021, 11, 26))
mocker.patch.object(OpsGenieV3, '_parse_fetch_time', return_value='2021-11-23T12:19:48Z')
res, last_run = OpsGenieV3.fetch_incidents_command(mock_client, {"max_fetch": 2,
"event_types": OpsGenieV3.ALERT_TYPE})
assert (last_run == {'Alerts': {'lastRun': '2021-11-26T00:00:00Z',
'next_page': 'https://api.opsgenie.com/v2/alerts?limit=1&sort=createdAt&offset=1&order=desc'},
'Incidents': {'lastRun': None, 'next_page': None}})
mocker.patch.object(demisto, 'getLastRun', return_value=last_run)
res, last_run = OpsGenieV3.fetch_incidents_command(mock_client,
{"max_fetch": 2, "event_types": OpsGenieV3.ALERT_TYPE},
last_run)
assert (last_run == {'Alerts': {'lastRun': '2021-11-26T00:00:00Z', 'next_page': None},
'Incidents': {'lastRun': None, 'next_page': None}})
def test_fetch_with_paging_only_incidents(mocker):
"""
Given:
- An app client object
- max_fetch = 2
- event_types = OpsGenieV3.INCIDENT_TYPE
When:
- Calling function fetch_incidents_command
- The list_incidents function returns result with paging
Then:
- Ensure the return data is correct
"""
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mock_client = OpsGenieV3.Client(base_url="")
mocker.patch.object(mock_client, 'list_incidents',
return_value=util_load_json('test_data/get_incidents.json'))
mocker.patch.object(mock_client, 'get_paged',
return_value=util_load_json('test_data/get_incidents_without_next.json'))
mocker.patch.object(OpsGenieV3, '_get_utc_now', return_value=datetime(2021, 11, 26))
mocker.patch.object(OpsGenieV3, '_parse_fetch_time', return_value='2021-11-23T12:19:48Z')
res, last_run = OpsGenieV3.fetch_incidents_command(mock_client, {"max_fetch": 2,
"event_types": OpsGenieV3.INCIDENT_TYPE})
assert (last_run == {'Incidents': {'lastRun': '2021-11-26T00:00:00Z',
'next_page': 'https://api.opsgenie.com/v1/incidents?limit='
'1&sort=insertedAt&offset=1&order=desc'},
'Alerts': {'lastRun': None, 'next_page': None}})
mocker.patch.object(demisto, 'getLastRun', return_value=last_run)
res, last_run = OpsGenieV3.fetch_incidents_command(mock_client,
{"max_fetch": 2, "event_types": OpsGenieV3.INCIDENT_TYPE},
last_run)
assert (last_run == {'Incidents': {'lastRun': '2021-11-26T00:00:00Z', 'next_page': None},
'Alerts': {'lastRun': None, 'next_page': None}})
def test_build_query_fetch():
"""
Given:
- An app client object
- args
- is_fetch_query = True
When:
- Calling function build_query
Then:
- Ensure the return data is correct
"""
args = {
"query": "createdAt < 147039484114",
"status": "Open",
"is_fetch_query": True,
"priority": "P1,P3",
"tags": "1,2"
}
mock_client = OpsGenieV3.Client(base_url="")
res = mock_client.build_query(args)
assert (res == "createdAt < 147039484114 AND status=open AND priority: (P1 OR P3) AND tag: (1 OR 2)")
def test_build_query_not_fetch():
"""
Given:
- An app client object
- args
- is_fetch_query = False
When:
- Calling function build_query
Then:
- Ensure the return data is correct
"""
args = {
"query": "createdAt < 147039484114",
"status": "Open",
"is_fetch_query": False,
"priority": "P1,P3",
"tags": "1,2"
}
mock_client = OpsGenieV3.Client(base_url="")
res = mock_client.build_query(args)
assert (res == "createdAt < 147039484114")
def test_build_query_not_fetch_without_query():
"""
Given:
- An app client object
- args
- is_fetch_query = False
When:
- Calling function build_query
Then:
- Ensure the return data is correct
"""
args = {
"status": "Open",
"is_fetch_query": False,
"priority": "P1,P3",
"tags": "1,2"
}
mock_client = OpsGenieV3.Client(base_url="")
res = mock_client.build_query(args)
assert (res == "status=open AND priority: (P1 OR P3) AND tag: (1 OR 2)")
def test_responders_to_json_empty_value():
"""
Given:
- An app client object
- responders = {}
When:
- Calling function responders_to_json
Then:
- Ensure the return data is correct
"""
mock_client = OpsGenieV3.Client(base_url="")
res = mock_client.responders_to_json(responders={},
responder_key="responder")
assert (res == {})
| 36.523861 | 130 | 0.640891 |
041a22471dd79a6cbe232b3a793ccf31afa7baf9
| 2,747 |
py
|
Python
|
fhirclient/r4models/population.py
|
Healthedata1/Flask-PL
|
88a2f40ca430c4cbb9fbded7fc92fdc166ebb9f1
|
[
"MIT"
] | null | null | null |
fhirclient/r4models/population.py
|
Healthedata1/Flask-PL
|
88a2f40ca430c4cbb9fbded7fc92fdc166ebb9f1
|
[
"MIT"
] | null | null | null |
fhirclient/r4models/population.py
|
Healthedata1/Flask-PL
|
88a2f40ca430c4cbb9fbded7fc92fdc166ebb9f1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b (http://hl7.org/fhir/StructureDefinition/Population) on 2019-05-07.
# 2019, SMART Health IT.
from . import backboneelement
class Population(backboneelement.BackboneElement):
""" A definition of a set of people that apply to some clinically related
context, for example people contraindicated for a certain medication.
A populatioof people with some set of grouping criteria.
"""
resource_type = "Population"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.ageCodeableConcept = None
""" The age of the specific population.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.ageRange = None
""" The age of the specific population.
Type `Range` (represented as `dict` in JSON). """
self.gender = None
""" The gender of the specific population.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.physiologicalCondition = None
""" The existing physiological conditions of the specific population to
which this applies.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.race = None
""" Race of the specific population.
Type `CodeableConcept` (represented as `dict` in JSON). """
super(Population, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(Population, self).elementProperties()
js.extend([
("ageCodeableConcept", "ageCodeableConcept", codeableconcept.CodeableConcept, False, "age", False),
("ageRange", "ageRange", range.Range, False, "age", False),
("gender", "gender", codeableconcept.CodeableConcept, False, None, False),
("physiologicalCondition", "physiologicalCondition", codeableconcept.CodeableConcept, False, None, False),
("race", "race", codeableconcept.CodeableConcept, False, None, False),
])
return js
import sys
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + '.codeableconcept']
try:
from . import range
except ImportError:
range = sys.modules[__package__ + '.range']
| 38.690141 | 119 | 0.631962 |
f0c7045b0199ef4521f850d201b9ea515845de52
| 34,752 |
py
|
Python
|
packages/watchmen-data-kernel/src/watchmen_data_kernel/storage_bridge/ask_from_memory.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-data-kernel/src/watchmen_data_kernel/storage_bridge/ask_from_memory.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-data-kernel/src/watchmen_data_kernel/storage_bridge/ask_from_memory.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
from abc import abstractmethod
from datetime import date, datetime, time
from decimal import Decimal
from typing import Any, Callable, List, Optional, Tuple, Union
from watchmen_auth import PrincipalService
from watchmen_data_kernel.common import ask_all_date_formats, ask_time_formats, DataKernelException
from watchmen_data_kernel.meta import TopicService
from watchmen_data_kernel.utils import MightAVariable, parse_function_in_variable, parse_variable
from watchmen_model.admin import Conditional, Factor, Topic
from watchmen_model.common import ComputedParameter, ConstantParameter, Parameter, ParameterComputeType, \
ParameterCondition, ParameterExpression, ParameterExpressionOperator, ParameterJoint, ParameterJointType, \
TopicFactorParameter, VariablePredefineFunctions
from watchmen_utilities import ArrayHelper, get_current_time_in_seconds, get_day_of_month, get_day_of_week, \
get_half_year, get_month, get_quarter, get_week_of_month, get_week_of_year, get_year, greater_or_equals_date, \
greater_or_equals_decimal, greater_or_equals_time, is_blank, is_date, is_date_or_time_instance, is_empty, \
is_not_empty, is_numeric_instance, less_or_equals_date, less_or_equals_decimal, less_or_equals_time, \
translate_date_format_to_memory, try_to_decimal, value_equals, value_not_equals
from .utils import always_none, compute_date_diff, create_from_previous_trigger_data, \
create_get_from_variables_with_prefix, create_previous_trigger_data, create_snowflake_generator, \
create_static_str, get_date_from_variables, get_value_from, test_date
from .variables import PipelineVariables
def get_topic_service(principal_service: PrincipalService) -> TopicService:
return TopicService(principal_service)
# noinspection PyUnusedLocal
def always_true(variables: PipelineVariables, principal_service: PrincipalService) -> bool:
return True
class ParsedMemoryCondition:
def __init__(self, condition: ParameterCondition, principal_service: PrincipalService):
self.condition = condition
self.parse(condition, principal_service)
@abstractmethod
def parse(self, condition: ParameterCondition, principal_service: PrincipalService) -> None:
pass
@abstractmethod
def run(self, variables: PipelineVariables, principal_service: PrincipalService) -> bool:
pass
class ParsedMemoryParameter:
def __init__(self, parameter: Optional[Parameter], principal_service: PrincipalService):
self.parameter = parameter
self.parse(parameter, principal_service)
@abstractmethod
def parse(self, parameter: Parameter, principal_service: PrincipalService) -> None:
pass
@abstractmethod
def value(self, variables: PipelineVariables, principal_service: PrincipalService) -> Any:
pass
class NoopMemoryParameter(ParsedMemoryParameter):
def parse(self, parameter: Parameter, principal_service: PrincipalService) -> None:
"""
do nothing
"""
pass
def value(self, variables: PipelineVariables, principal_service: PrincipalService) -> Any:
"""
always returns none
"""
return None
def parse_condition_in_memory(
condition: Optional[ParameterCondition], principal_service: PrincipalService) -> ParsedMemoryCondition:
if condition is None:
raise DataKernelException('Condition cannot be none.')
if isinstance(condition, ParameterJoint):
return ParsedMemoryJoint(condition, principal_service)
elif isinstance(condition, ParameterExpression):
return ParsedMemoryExpression(condition, principal_service)
else:
raise DataKernelException(f'Condition[{condition.dict()}] is not supported.')
def parse_parameter_in_memory(
parameter: Optional[Parameter], principal_service: PrincipalService) -> ParsedMemoryParameter:
if parameter is None:
return NoopMemoryParameter(None, principal_service)
elif isinstance(parameter, TopicFactorParameter):
return ParsedMemoryTopicFactorParameter(parameter, principal_service)
elif isinstance(parameter, ConstantParameter):
return ParsedMemoryConstantParameter(parameter, principal_service)
elif isinstance(parameter, ComputedParameter):
return ParsedMemoryComputedParameter(parameter, principal_service)
else:
raise DataKernelException(f'Parameter[{parameter.dict()}] is not supported.')
class ParsedMemoryJoint(ParsedMemoryCondition):
jointType: ParameterJointType = ParameterJointType.AND
filters: List[ParsedMemoryCondition] = []
def parse(self, condition: ParameterJoint, principal_service: PrincipalService) -> None:
self.jointType = ParameterJointType.OR \
if condition.jointType == ParameterJointType.OR else ParameterJointType.AND
self.filters = ArrayHelper(condition.filters) \
.map(lambda x: parse_condition_in_memory(x, principal_service)).to_list()
def run(self, variables: PipelineVariables, principal_service: PrincipalService) -> bool:
if self.jointType == ParameterJointType.OR:
return ArrayHelper(self.filters).some(lambda x: x.run(variables, principal_service))
else:
# and or not given
return ArrayHelper(self.filters).every(lambda x: x.run(variables, principal_service))
class ParsedMemoryExpression(ParsedMemoryCondition):
left: Optional[ParsedMemoryParameter] = None
operator: Optional[ParameterExpressionOperator] = None
right: Optional[ParsedMemoryParameter] = None
def parse(self, condition: ParameterExpression, principal_service: PrincipalService) -> None:
self.left = parse_parameter_in_memory(condition.left, principal_service)
self.operator = condition.operator
self.right = parse_parameter_in_memory(condition.right, principal_service)
def raise_cannot_compare(self, one: Any, another: Any) -> None:
raise DataKernelException(
f'Comparison of [none|int|float|decimal|date|time|datetime] are supported, '
f'current are [one={one}, another={another}].')
# noinspection PyMethodMayBeStatic
def equals(self, one: Any, another: Any) -> bool:
return value_equals(one, another, ask_time_formats(), ask_all_date_formats())
# noinspection PyMethodMayBeStatic
def not_equals(self, one: Any, another: Any) -> bool:
return value_not_equals(one, another, ask_time_formats(), ask_all_date_formats())
# noinspection PyMethodMayBeStatic
def try_compare(self, func: Callable[[], Tuple[bool, bool]], or_raise: Callable[[], None]) -> bool:
parsed, result = func()
if parsed:
return result
else:
or_raise()
# noinspection PyMethodMayBeStatic
def less_than(self, one: Any, another: Any) -> bool:
if one is None:
if another is None:
return False
elif is_numeric_instance(another) or is_date_or_time_instance(another):
return True
elif another is None:
if is_numeric_instance(one) or is_date_or_time_instance(one):
return False
elif isinstance(one, int) or isinstance(one, float) or isinstance(one, Decimal):
return self.try_compare(
lambda: less_or_equals_decimal(one, another, False),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, int) or isinstance(another, float) or isinstance(another, Decimal):
return self.try_compare(
lambda: greater_or_equals_decimal(one, another, True),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(one, time):
# compare time
return self.try_compare(
lambda: less_or_equals_time(one, another, ask_time_formats(), False),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, time):
# compare time
return self.try_compare(
lambda: greater_or_equals_time(another, one, ask_time_formats(), True),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(one, datetime) or isinstance(one, date):
# compare datetime or date
return self.try_compare(
lambda: less_or_equals_date(another, one, ask_all_date_formats(), False),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, datetime) or isinstance(another, date):
# compare datetime or date
return self.try_compare(
lambda: greater_or_equals_date(another, one, ask_all_date_formats(), True),
lambda: self.raise_cannot_compare(one, another))
self.raise_cannot_compare(one, another)
# noinspection PyMethodMayBeStatic
def less_than_or_equals(self, one: Any, another: Any) -> bool:
if one is None:
if another is None or is_numeric_instance(another) or is_date_or_time_instance(another):
return True
elif another is None:
if is_numeric_instance(one) or is_date_or_time_instance(one):
return False
elif isinstance(one, int) or isinstance(one, float) or isinstance(one, Decimal):
return self.try_compare(
lambda: less_or_equals_decimal(one, another, True),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, int) or isinstance(another, float) or isinstance(another, Decimal):
return self.try_compare(
lambda: greater_or_equals_decimal(one, another, False),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(one, time):
# compare time
return self.try_compare(
lambda: less_or_equals_time(one, another, ask_time_formats(), True),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, time):
# compare time
return self.try_compare(
lambda: greater_or_equals_time(another, one, ask_time_formats(), False),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(one, datetime) or isinstance(one, date):
# compare datetime or date
return self.try_compare(
lambda: less_or_equals_date(another, one, ask_all_date_formats(), True),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, datetime) or isinstance(another, date):
# compare datetime or date
return self.try_compare(
lambda: greater_or_equals_date(another, one, ask_all_date_formats(), False),
lambda: self.raise_cannot_compare(one, another))
self.raise_cannot_compare(one, another)
# noinspection PyMethodMayBeStatic
def greater_than(self, one: Any, another: Any) -> bool:
if one is None:
if another is None or is_numeric_instance(another) or is_date_or_time_instance(another):
return False
elif another is None:
if is_numeric_instance(one) or is_date_or_time_instance(one):
return True
elif isinstance(one, int) or isinstance(one, float) or isinstance(one, Decimal):
return self.try_compare(
lambda: greater_or_equals_decimal(one, another, False),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, int) or isinstance(another, float) or isinstance(another, Decimal):
return self.try_compare(
lambda: less_or_equals_decimal(one, another, True),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(one, time):
# compare time
return self.try_compare(
lambda: greater_or_equals_time(one, another, ask_time_formats(), False),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, time):
# compare time
return self.try_compare(
lambda: less_or_equals_time(another, one, ask_time_formats(), True),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(one, datetime) or isinstance(one, date):
# compare datetime or date
return self.try_compare(
lambda: greater_or_equals_date(another, one, ask_all_date_formats(), False),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, datetime) or isinstance(another, date):
# compare datetime or date
return self.try_compare(
lambda: less_or_equals_date(another, one, ask_all_date_formats(), True),
lambda: self.raise_cannot_compare(one, another))
self.raise_cannot_compare(one, another)
# noinspection PyMethodMayBeStatic
def greater_than_or_equals(self, one: Any, another: Any) -> bool:
if one is None:
if another is None:
return True
elif is_numeric_instance(another) or is_date_or_time_instance(another):
return False
elif another is None:
if is_numeric_instance(one) or is_date_or_time_instance(one):
return True
elif isinstance(one, int) or isinstance(one, float) or isinstance(one, Decimal):
return self.try_compare(
lambda: greater_or_equals_decimal(one, another, True),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, int) or isinstance(another, float) or isinstance(another, Decimal):
return self.try_compare(
lambda: less_or_equals_decimal(one, another, False),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(one, time):
# compare time
return self.try_compare(
lambda: greater_or_equals_time(one, another, ask_time_formats(), True),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, time):
# compare time
return self.try_compare(
lambda: less_or_equals_time(another, one, ask_time_formats(), False),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(one, datetime) or isinstance(one, date):
# compare datetime or date
return self.try_compare(
lambda: greater_or_equals_date(another, one, ask_all_date_formats(), True),
lambda: self.raise_cannot_compare(one, another))
elif isinstance(another, datetime) or isinstance(another, date):
# compare datetime or date
return self.try_compare(
lambda: less_or_equals_date(another, one, ask_all_date_formats(), False),
lambda: self.raise_cannot_compare(one, another))
self.raise_cannot_compare(one, another)
# noinspection PyMethodMayBeStatic
def exists(self, one: Any, another: Any) -> bool:
if another is None:
return False
elif isinstance(another, list):
return ArrayHelper(another).some(lambda x: self.equals(x, one))
elif isinstance(another, str):
if is_blank(another):
return False
return ArrayHelper(another.split(',')).some(lambda x: self.equals(x, one))
else:
raise DataKernelException(
f'Comparison of [none|int|float|decimal|date|time|datetime] in [list|comma joined string] are supported, '
f'current are [one={one}, another={another}].')
def not_exists(self, one: Any, another: Any) -> bool:
return not self.exists(one, another)
def run(self, variables: PipelineVariables, principal_service: PrincipalService) -> bool:
left_value = self.left.value(variables, principal_service)
if self.operator == ParameterExpressionOperator.EMPTY:
return is_empty(left_value)
elif self.operator == ParameterExpressionOperator.NOT_EMPTY:
return is_not_empty(left_value)
right_value = self.right.value(variables, principal_service)
if self.operator == ParameterExpressionOperator.EQUALS:
return self.equals(left_value, right_value)
elif self.operator == ParameterExpressionOperator.NOT_EQUALS:
return self.not_equals(left_value, right_value)
elif self.operator == ParameterExpressionOperator.LESS:
return self.less_than(left_value, right_value)
elif self.operator == ParameterExpressionOperator.LESS_EQUALS:
return self.less_than_or_equals(left_value, right_value)
elif self.operator == ParameterExpressionOperator.MORE:
return self.greater_than(left_value, right_value)
elif self.operator == ParameterExpressionOperator.MORE_EQUALS:
return self.greater_than_or_equals(left_value, right_value)
elif self.operator == ParameterExpressionOperator.IN:
return self.exists(left_value, right_value)
elif self.operator == ParameterExpressionOperator.NOT_IN:
return self.not_exists(left_value, right_value)
else:
raise DataKernelException(
f'Operator[{self.operator}] is not supported, found from expression[{self.condition.dict()}].')
def create_value_getter_from_current_data(name: str) -> Callable[[PipelineVariables, PrincipalService], Any]:
return lambda variables, principal_service: variables.find_from_current_data(name)
def create_value_recursive_getter_from_current_data(
name: str, names: List[str]
) -> Callable[[PipelineVariables, PrincipalService], Any]:
return lambda variables, principal_service: \
get_value_from(name, names, lambda x: variables.find_from_current_data(x), variables.is_list_on_trigger)
def create_ask_factor_value(topic: Topic, factor: Factor) -> Callable[[PipelineVariables, PrincipalService], Any]:
name = factor.name
if is_blank(name):
raise DataKernelException(f'Name of factor[id={factor.factorId}, topicId={topic.topicId}] not declared.')
names = name.strip().split('.')
# topic factor parameter always retrieve data from current trigger data
if len(names) == 1:
return create_value_getter_from_current_data(names[0])
else:
return create_value_recursive_getter_from_current_data(name, names)
class ParsedMemoryTopicFactorParameter(ParsedMemoryParameter):
topic: Topic = None
factor: Factor = None
askValue: Callable[[PipelineVariables, PrincipalService], Any] = None
def parse(self, parameter: TopicFactorParameter, principal_service: PrincipalService) -> None:
if is_blank(parameter.topicId):
raise DataKernelException(f'Topic not declared.')
topic_service = get_topic_service(principal_service)
topic: Optional[Topic] = topic_service.find_by_id(parameter.topicId)
if topic is None:
raise DataKernelException(f'Topic[id={parameter.topicId}] not found.')
self.topic = topic
if is_blank(parameter.factorId):
raise DataKernelException(f'Factor not declared.')
factor: Optional[Factor] = ArrayHelper(topic.factors).find(lambda x: x.factorId == parameter.factorId)
if factor is None:
raise DataKernelException(
f'Factor[id={parameter.factorId}] in topic[id={topic.topicId}, name={topic.name}] not found.')
self.factor = factor
self.askValue = create_ask_factor_value(topic, factor)
def value(self, variables: PipelineVariables, principal_service: PrincipalService) -> Any:
return self.askValue(variables, principal_service)
# noinspection DuplicatedCode
def create_date_diff(
prefix: str, variable_name: str, function: VariablePredefineFunctions
) -> Callable[[PipelineVariables, PrincipalService], Any]:
parsed_params = parse_function_in_variable(variable_name, function.value, 2)
end_variable_name = parsed_params[0]
start_variable_name = parsed_params[1]
end_parsed, end_date = test_date(end_variable_name)
start_parsed, start_date = test_date(start_variable_name)
if end_parsed and start_parsed:
# noinspection PyUnusedLocal
def action(variables: PipelineVariables, principal_service: PrincipalService) -> Any:
return compute_date_diff(function, end_date, start_date, variable_name)
else:
# noinspection DuplicatedCode
def action(variables: PipelineVariables, principal_service: PrincipalService) -> Any:
if not end_parsed:
e_parsed, e_value, e_date = get_date_from_variables(variables, principal_service, end_variable_name)
if not e_parsed:
raise DataKernelException(f'Value[{e_value}] cannot be parsed to date or datetime.')
else:
e_date = end_date
if not start_parsed:
s_parsed, s_value, s_date = get_date_from_variables(variables, principal_service, start_variable_name)
if not s_parsed:
raise DataKernelException(f'Value[{s_value}] cannot be parsed to date or datetime.')
else:
s_date = start_date
return compute_date_diff(function, e_date, s_date, variable_name)
def run(variables: PipelineVariables, principal_service: PrincipalService) -> Any:
value = action(variables, principal_service)
return value if is_blank(prefix) else f'{prefix}{value}'
return run
def create_date_format(prefix: str, variable_name: str) -> Callable[[PipelineVariables, PrincipalService], Any]:
parsed_params = parse_function_in_variable(variable_name, VariablePredefineFunctions.DATE_FORMAT.value, 2)
variable_name = parsed_params[0]
date_format = parsed_params[1]
if is_blank(date_format):
raise DataKernelException(f'Date format[{date_format}] cannot be recognized.')
date_format = translate_date_format_to_memory(date_format)
parsed, parsed_date = test_date(variable_name)
if parsed:
# noinspection PyUnusedLocal
def action(variables: PipelineVariables, principal_service: PrincipalService) -> Any:
return parsed_date.strftime(date_format)
else:
def action(variables: PipelineVariables, principal_service: PrincipalService) -> Any:
date_parsed, value, a_date = get_date_from_variables(variables, principal_service, variable_name)
if not date_parsed:
raise DataKernelException(f'Value[{value}] cannot be parsed to date or datetime.')
return a_date.strftime(date_format)
def run(variables: PipelineVariables, principal_service: PrincipalService) -> Any:
value = action(variables, principal_service)
return value if is_blank(prefix) else f'{prefix}{value}'
return run
# noinspection DuplicatedCode
def create_run_constant_segment(variable: MightAVariable) -> Callable[[PipelineVariables, PrincipalService], Any]:
prefix = variable.text
variable_name = variable.variable
if variable_name == VariablePredefineFunctions.NEXT_SEQ.value:
return create_snowflake_generator(prefix)
elif variable_name == VariablePredefineFunctions.NOW.value:
return lambda variables, principal_service: get_current_time_in_seconds()
elif variable_name.startswith(VariablePredefineFunctions.YEAR_DIFF.value):
return create_date_diff(prefix, variable_name, VariablePredefineFunctions.YEAR_DIFF)
elif variable_name.startswith(VariablePredefineFunctions.MONTH_DIFF.value):
return create_date_diff(prefix, variable_name, VariablePredefineFunctions.MONTH_DIFF)
elif variable_name.startswith(VariablePredefineFunctions.DAY_DIFF.value):
return create_date_diff(prefix, variable_name, VariablePredefineFunctions.DAY_DIFF)
elif variable_name.startswith(VariablePredefineFunctions.DATE_FORMAT.value):
return create_date_format(prefix, variable_name)
elif variable_name.startswith(VariablePredefineFunctions.FROM_PREVIOUS_TRIGGER_DATA.value):
if variable_name == VariablePredefineFunctions.FROM_PREVIOUS_TRIGGER_DATA.value:
if is_blank(prefix):
return create_previous_trigger_data()
else:
raise DataKernelException(f'Previous trigger data is a dict, cannot prefix by a string[{prefix}].')
length = len(VariablePredefineFunctions.FROM_PREVIOUS_TRIGGER_DATA.value)
if len(variable_name) < length + 2 or variable_name[length:length + 1] != '.':
raise DataKernelException(f'Constant[{variable_name}] is not supported.')
return create_from_previous_trigger_data(prefix, variable_name[length + 1:])
else:
return create_get_from_variables_with_prefix(prefix, variable_name)
def create_run_constant_segments(
functions: List[Callable[[PipelineVariables, PrincipalService], Any]]
) -> Callable[[PipelineVariables, PrincipalService], Any]:
def action(variables: PipelineVariables, principal_service: PrincipalService) -> Any:
return ArrayHelper(functions).map(lambda x: x(variables, principal_service)).join('')
return action
def create_ask_constant_value(variables: List[MightAVariable]) -> Callable[[PipelineVariables, PrincipalService], Any]:
if len(variables) == 1:
if variables[0].has_variable():
return create_run_constant_segment(variables[0])
else:
return create_static_str(variables[0].text)
else:
return create_run_constant_segments(
ArrayHelper(variables).map(lambda x: create_run_constant_segment(x)).to_list())
class ParsedMemoryConstantParameter(ParsedMemoryParameter):
askValue: Callable[[PipelineVariables, PrincipalService], Any] = None
# noinspection DuplicatedCode
def parse(self, parameter: ConstantParameter, principal_service: PrincipalService) -> None:
value = parameter.value
if value is None:
self.askValue = always_none
elif len(value) == 0:
self.askValue = always_none
elif is_blank(value):
self.askValue = create_static_str(value)
elif '{' not in value or '}' not in value:
self.askValue = create_static_str(value)
else:
_, variables = parse_variable(value)
self.askValue = create_ask_constant_value(variables)
def value(self, variables: PipelineVariables, principal_service: PrincipalService) -> Any:
return self.askValue(variables, principal_service)
def reducer_add(one: Decimal, another: Decimal) -> Decimal:
return one + another
def reducer_subtract(one: Decimal, another: Decimal) -> Decimal:
return one - another
def reducer_multiply(one: Decimal, another: Decimal) -> Decimal:
return one * another
def reducer_divide(one: Decimal, another: Decimal) -> Decimal:
return one / another
def reducer_modulus(one: Decimal, another: Decimal) -> Decimal:
return one % another
def parse_to_decimal(value: Any, fallback_value: Callable[[], Optional[Decimal]] = lambda x: None) -> Optional[Decimal]:
decimal_value = try_to_decimal(value)
return fallback_value() if decimal_value is None else decimal_value
def create_numeric_reducer(
parameters: List[ParsedMemoryParameter], reduce_func: Callable[[Decimal, Decimal], Decimal], numeric_name: str,
fallback_first: Callable[[], Optional[Decimal]] = lambda x: None,
fallback_rest: Callable[[], Optional[Decimal]] = lambda x: None
) -> Callable[[PipelineVariables, PrincipalService], Any]:
def reduce(variables: PipelineVariables, principal_service: PrincipalService) -> Decimal:
first_value = parameters[0].value(variables, principal_service)
first_decimal_value = parse_to_decimal(first_value, fallback_first)
if first_decimal_value is None:
raise DataKernelException(
f'{numeric_name} [value={first_value}, type={type(first_value)}] is not supported.')
result_decimal_value = first_decimal_value
rest_parameters = parameters[1:]
for rest_parameter in rest_parameters:
rest_value = rest_parameter.value(variables, principal_service)
rest_decimal_value = parse_to_decimal(rest_value, fallback_rest)
if rest_decimal_value is None:
raise DataKernelException(
f'{numeric_name} [value={rest_value}, type={type(rest_value)}] is not supported.')
result_decimal_value = reduce_func(result_decimal_value, rest_decimal_value)
return result_decimal_value
return reduce
def create_datetime_func(
parameter: ParsedMemoryParameter, func: Callable[[date], int]
) -> Callable[[PipelineVariables, PrincipalService], Any]:
def get_part_of_datetime(variables: PipelineVariables, principal_service: PrincipalService) -> Optional[int]:
value = parameter.value(variables, principal_service)
if value is None:
return None
if isinstance(value, date):
return func(value)
parsed, dt_value = is_date(value, ask_all_date_formats())
if not parsed:
raise DataKernelException(f'Cannot parse value[{value}] to datetime.')
if dt_value is None:
return None
return func(dt_value)
return get_part_of_datetime
def create_case_then(
cases: List[Tuple[PrerequisiteTest, ParsedMemoryParameter]], anyway: Optional[ParsedMemoryParameter]
) -> Callable[[PipelineVariables, PrincipalService], Any]:
def run_case_then(variables: PipelineVariables, principal_service: PrincipalService) -> Any:
found: Optional[Tuple[PrerequisiteTest, ParsedMemoryParameter]] = \
ArrayHelper(cases).find(lambda x: x[0](variables, principal_service))
if found is not None:
# find a route
return found[1].value(variables, principal_service)
elif anyway is not None:
# return anyway value when no route found
return anyway.value(variables, principal_service)
else:
# return none when no route found and no anyway route
return None
return run_case_then
def parse_conditional_parameter_in_memory(
parameter: Parameter, principal_service: PrincipalService
) -> Tuple[PrerequisiteTest, ParsedMemoryParameter]:
return \
parse_prerequisite_in_memory(parameter, principal_service), \
parse_parameter_in_memory(parameter, principal_service)
def assert_parameter_count(
func_name: str, parameters: Optional[List[Parameter]],
min_count: int = 1, max_count: int = 9999, allow_undefined: bool = False
) -> None:
if parameters is None:
raise DataKernelException(f'Parameter not found on computation[{func_name}].')
count = len(parameters)
if count < min_count:
raise DataKernelException(
f'At least {min_count} parameter(s) on computation[{func_name}], current is [{parameters}].')
if count > max_count:
raise DataKernelException(
f'At most {max_count} parameter(s) on computation[{func_name}], current is [{parameters}].')
if not allow_undefined:
found = ArrayHelper(parameters).some(lambda x: x is None)
if found:
raise DataKernelException(
f'None parameter is not allowed on computation[{func_name}], current is [{parameters}].')
class ParsedMemoryComputedParameter(ParsedMemoryParameter):
askValue: Callable[[PipelineVariables, PrincipalService], Any] = None
def parse(self, parameter: ComputedParameter, principal_service: PrincipalService) -> None:
compute_type = parameter.type
if is_blank(compute_type) or compute_type == ParameterComputeType.NONE:
raise DataKernelException(f'Compute type not declared.')
def parse_parameter(param: Parameter) -> ParsedMemoryParameter:
return parse_parameter_in_memory(param, principal_service)
def parse_sub_parameters(param: ComputedParameter) -> List[ParsedMemoryParameter]:
return ArrayHelper(param.parameters).map(parse_parameter).to_list()
if compute_type == ParameterComputeType.ADD:
assert_parameter_count('add', parameter.parameters, 2)
# treat none value as 0
self.askValue = create_numeric_reducer(
parse_sub_parameters(parameter), reducer_add, 'Add', lambda: Decimal(0), lambda: Decimal(0))
elif compute_type == ParameterComputeType.SUBTRACT:
assert_parameter_count('subtract', parameter.parameters, 2)
# treat none value as 0
self.askValue = create_numeric_reducer(
parse_sub_parameters(parameter), reducer_subtract, 'Subtract', lambda: Decimal(0), lambda: Decimal(0))
elif compute_type == ParameterComputeType.MULTIPLY:
assert_parameter_count('multiply', parameter.parameters, 2)
self.askValue = create_numeric_reducer(parse_sub_parameters(parameter), reducer_multiply, 'Multiply')
elif compute_type == ParameterComputeType.DIVIDE:
assert_parameter_count('divide', parameter.parameters, 2)
self.askValue = create_numeric_reducer(parse_sub_parameters(parameter), reducer_divide, 'Divide')
elif compute_type == ParameterComputeType.MODULUS:
assert_parameter_count('modulus', parameter.parameters, 2)
self.askValue = create_numeric_reducer(parse_sub_parameters(parameter), reducer_modulus, 'Modulus')
elif compute_type == ParameterComputeType.YEAR_OF:
assert_parameter_count('year-of', parameter.parameters, 1, 1)
self.askValue = create_datetime_func(parse_parameter(parameter.parameters[0]), get_year)
elif compute_type == ParameterComputeType.HALF_YEAR_OF:
assert_parameter_count('half-year-of', parameter.parameters, 1, 1)
self.askValue = create_datetime_func(parse_parameter(parameter.parameters[0]), get_half_year)
elif compute_type == ParameterComputeType.QUARTER_OF:
assert_parameter_count('quarter-of', parameter.parameters, 1, 1)
self.askValue = create_datetime_func(parse_parameter(parameter.parameters[0]), get_quarter)
elif compute_type == ParameterComputeType.MONTH_OF:
assert_parameter_count('month-of', parameter.parameters, 1, 1)
self.askValue = create_datetime_func(parse_parameter(parameter.parameters[0]), get_month)
elif compute_type == ParameterComputeType.WEEK_OF_YEAR:
assert_parameter_count('week-of-year', parameter.parameters, 1, 1)
self.askValue = create_datetime_func(parse_parameter(parameter.parameters[0]), get_week_of_year)
elif compute_type == ParameterComputeType.WEEK_OF_MONTH:
assert_parameter_count('week-of-month', parameter.parameters, 1, 1)
self.askValue = create_datetime_func(parse_parameter(parameter.parameters[0]), get_week_of_month)
elif compute_type == ParameterComputeType.DAY_OF_MONTH:
assert_parameter_count('day-of-month', parameter.parameters, 1, 1)
self.askValue = create_datetime_func(parse_parameter(parameter.parameters[0]), get_day_of_month)
elif compute_type == ParameterComputeType.DAY_OF_WEEK:
assert_parameter_count('day-of-week', parameter.parameters, 1, 1)
self.askValue = create_datetime_func(parse_parameter(parameter.parameters[0]), get_day_of_week)
elif compute_type == ParameterComputeType.CASE_THEN:
# noinspection DuplicatedCode
assert_parameter_count('case-then', parameter.parameters, 1)
cases = parameter.parameters
if cases is None or len(cases) == 0:
raise DataKernelException(f'Case not declared in case then computation.')
anyways = ArrayHelper(cases).filter(lambda x: not x.conditional).to_list()
if len(anyways) > 1:
raise DataKernelException(
f'Multiple anyway routes declared in case then computation[{parameter.dict()}].')
def parse_route(param: Parameter) -> Tuple[PrerequisiteTest, ParsedMemoryParameter]:
return parse_conditional_parameter_in_memory(param, principal_service)
# noinspection DuplicatedCode
routes = ArrayHelper(cases).filter(lambda x: x.conditional).map(parse_route).to_list()
anyway = anyways[0] if len(anyways) == 1 else None
if anyway is not None:
anyway_route = parse_parameter(anyway)
else:
anyway_route = None
self.askValue = create_case_then(routes, anyway_route)
else:
raise DataKernelException(f'Compute type[{compute_type}] is not supported.')
def value(self, variables: PipelineVariables, principal_service: PrincipalService) -> Any:
return self.askValue(variables, principal_service)
def ask_condition(
condition: ParsedMemoryCondition, variables: PipelineVariables,
principal_service: PrincipalService) -> bool:
return condition.run(variables, principal_service)
PrerequisiteTest = Callable[[PipelineVariables, PrincipalService], bool]
def create_ask_prerequisite(condition: ParsedMemoryCondition) -> PrerequisiteTest:
def ask(variables: PipelineVariables, principal_service: PrincipalService) -> bool:
return ask_condition(condition, variables, principal_service)
return ask
def parse_prerequisite_in_memory(
conditional: Union[Conditional, Parameter], principal_service: PrincipalService) -> PrerequisiteTest:
if conditional.conditional is None or not conditional.conditional:
# no condition is needed
return always_true
joint = conditional.on
if joint is None:
# no condition defined
return always_true
filters = joint.filters
if filters is None or len(filters) == 0:
# no filters defined
return always_true
condition = ParsedMemoryJoint(joint, principal_service)
return create_ask_prerequisite(condition)
PrerequisiteDefinedAs = Callable[[], Any]
# noinspection PyUnusedLocal
def parse_prerequisite_defined_as(
conditional: Conditional, principal_service: PrincipalService
) -> PrerequisiteDefinedAs:
defined_as = {
'conditional': False if conditional.conditional is None else conditional.conditional,
'on': None if conditional.on is None else conditional.on.dict()
}
return lambda: defined_as
| 43.494368 | 120 | 0.784703 |
a8da1d638b213d085a95457cfe7061b0eba7c69a
| 5,283 |
py
|
Python
|
src/onegov/agency/views/settings.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/agency/views/settings.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/agency/views/settings.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from wtforms_components import ColorField
from onegov.agency import _
from onegov.agency.app import AgencyApp
from onegov.core.security import Secret
from onegov.form import Form
from onegov.form.fields import ChosenSelectMultipleField
from onegov.org.models import Organisation
from onegov.org.views.settings import handle_generic_settings
from wtforms import BooleanField, IntegerField, RadioField
from wtforms.validators import Optional, NumberRange
class AgencySettingsForm(Form):
topmost_levels = 1, 2, 3
pdf_layout = RadioField(
label=_("PDF Layout"),
fieldset=_("Layout"),
default='default',
choices=[
('default', _("Default")),
('ar', "Kanton Appenzell Ausserrhoden"),
('zg', "Kanton Zug"),
('bs', "Kanton Basel-Stadt"),
],
)
root_pdf_page_break = RadioField(
label=_('For root PDF, page after every:'),
fieldset=_("Layout"),
choices=[
('1', _("1 Heading")),
('2', _("1.1 Heading")),
('3', _("1.1.1 Heading")),
],
default='1'
)
orga_pdf_page_break = RadioField(
label=_("For organisation PDF's, page after every:"),
fieldset=_("Layout"),
choices=[
('1', _("1 Heading")),
('2', _("1.1 Heading")),
('3', _("1.1.1 Heading")),
],
default='1'
)
link_color = ColorField(
label=_('PDF link color'),
fieldset=_("Layout")
)
underline_links = BooleanField(
label=_("Underline pdf links"),
fieldset=_("Layout")
)
agency_display = ChosenSelectMultipleField(
label=_('Show additional agencies to search results'),
fieldset=_('Customize search results'),
description=_(
'Level 1 represents the root agencies, Level 2 their children'),
choices=[]
)
agency_phone_internal_digits = IntegerField(
label=_(
'Use the last digits as internal phone numbers '
'(leave empty to disable)'
),
fieldset=_('Customize search results'),
validators=[
NumberRange(min=1),
Optional()
],
)
agency_phone_internal_field = RadioField(
label=_('Field used for internal phone numbers'),
fieldset=_('Customize search results'),
choices=[
('phone_direct', _('Direct Phone Number')),
('phone', _('Phone')),
],
)
agency_path_display_on_people = BooleanField(
label=_('Show full agency path'),
description=_('Always show the full path of the memberships agency'),
fieldset=_('People detail page'),
default=False
)
report_changes = BooleanField(
label=_("Users may report corrections"),
fieldset=_("Data"),
default=True,
)
def level_choice(self, lvl):
return str(lvl), self.request.translate(
_('Level ${lvl}', mapping={'lvl': lvl}))
def on_request(self):
self.agency_display.choices = [
self.level_choice(lvl) for lvl in self.topmost_levels
]
def process_obj(self, obj):
super().process_obj(obj)
self.pdf_layout.data = obj.pdf_layout or 'default'
self.root_pdf_page_break.data = str(
obj.page_break_on_level_root_pdf or 1)
self.orga_pdf_page_break.data = str(
obj.page_break_on_level_org_pdf or 1)
self.report_changes.data = obj.meta.get('report_changes', True)
self.agency_display.data = [
str(num) for num in obj.agency_display_levels or []
]
self.agency_phone_internal_digits.data = \
obj.agency_phone_internal_digits
self.agency_phone_internal_field.data = \
obj.agency_phone_internal_field
self.agency_path_display_on_people.data = \
obj.agency_path_display_on_people
self.underline_links.data = obj.pdf_underline_links
self.link_color.data = obj.pdf_link_color or '#00538c'
def populate_obj(self, obj, *args, **kwargs):
super().populate_obj(obj, *args, **kwargs)
obj.pdf_layout = self.pdf_layout.data
obj.report_changes = self.report_changes.data
obj.page_break_on_level_root_pdf = int(self.root_pdf_page_break.data)
obj.page_break_on_level_org_pdf = int(self.orga_pdf_page_break.data)
obj.agency_display_levels = [
int(num) for num in self.agency_display.data
]
obj.agency_phone_internal_digits = \
self.agency_phone_internal_digits.data
obj.agency_phone_internal_field = \
self.agency_phone_internal_field.data
obj.agency_path_display_on_people = \
self.agency_path_display_on_people.data
obj.pdf_underline_links = self.underline_links.data
obj.pdf_link_color = self.link_color.data.get_hex()
@AgencyApp.form(
model=Organisation,
name='agency-settings',
template='form.pt',
permission=Secret,
form=AgencySettingsForm,
setting=_("Agencies"),
icon='fa-university'
)
def handle_agency_settings(self, request, form):
return handle_generic_settings(self, request, form, _("Agencies"))
| 31.825301 | 77 | 0.622752 |
7660cb245f36afec495ce9ce25dda06a886ab3ce
| 3,524 |
py
|
Python
|
test/test_npu/test_network_ops/test_amp_non_finite_check_and_unscale_.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-12-02T03:07:35.000Z
|
2021-12-02T03:07:35.000Z
|
test/test_npu/test_network_ops/test_amp_non_finite_check_and_unscale_.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-11-12T07:23:03.000Z
|
2021-11-12T08:28:13.000Z
|
test/test_npu/test_network_ops/test_amp_non_finite_check_and_unscale_.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import sys
import copy
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
class Test_AmpNonFiniteCheckAndUnscale_(TestCase):
def generate_data(self, min_d, max_d, shape, dtype, input3):
input1 = np.random.uniform(min_d, max_d, shape).astype(dtype)
input1 = torch.from_numpy(input1)
input2 = np.array([0.0]).astype(dtype)
input2 = torch.from_numpy(input2)
input3 = np.array([input3]).astype(dtype)
input3 = torch.from_numpy(input3)
return input1, input2, input3
def cpu_op_exec(self, input1, input2, input3):
input1 = input1.numpy()
input2 = input2.numpy()
input3 = input3.numpy()
input1 = np.multiply(input1, input3)
return input1
def npu_op_exec(self, input1, input2, input3):
input1 = input1.to("npu")
input2 = input2.to("npu")
input3 = input3.to("npu")
torch._amp_non_finite_check_and_unscale_(input1,input2,input3)
input1 = input1.to("cpu")
input1 = input1.numpy()
return input1
def test_AmpNonFiniteCheckAndUnscale_float32_case1(self, device):
input1, input2, input3 = self.generate_data(0, 100, (4, 3), np.float32, 1.5)
cpu_output = self.cpu_op_exec(input1, input2, input3)
npu_output = self.npu_op_exec(input1, input2, input3)
self.assertRtolEqual(cpu_output, npu_output)
def test_AmpNonFiniteCheckAndUnscale_float32_case2(self, device):
input1, input2, input3 = self.generate_data(0, 100, (2, 5, 6), np.float32, 3.7)
cpu_output = self.cpu_op_exec(input1, input2, input3)
npu_output = self.npu_op_exec(input1, input2, input3)
self.assertRtolEqual(cpu_output, npu_output)
def test_AmpNonFiniteCheckAndUnscale_float16_case1(self, device):
input1, input2, input3 = self.generate_data(0, 100, (5, 7), np.float16, 1.9)
input1 = input1.to(torch.float32)
input2 = input2.to(torch.float32)
input3 = input3.to(torch.float32)
cpu_output = self.cpu_op_exec(input1, input2, input3)
npu_output = self.npu_op_exec(input1, input2, input3)
self.assertRtolEqual(cpu_output, npu_output)
def test_AmpNonFiniteCheckAndUnscale_float16_case2(self, device):
input1, input2, input3 = self.generate_data(0, 100, (2, 8, 1), np.float16, 3.2)
input1 = input1.to(torch.float32)
input2 = input2.to(torch.float32)
input3 = input3.to(torch.float32)
cpu_output = self.cpu_op_exec(input1, input2, input3)
npu_output = self.npu_op_exec(input1, input2, input3)
self.assertRtolEqual(cpu_output, npu_output)
instantiate_device_type_tests(Test_AmpNonFiniteCheckAndUnscale_, globals(), except_for='cpu')
if __name__ == '__main__':
run_tests()
| 44.05 | 93 | 0.702894 |
8c006a4b80e033f49eb4d9e74132a53d67c356b5
| 2,135 |
py
|
Python
|
Chapter10_RNN/rnnKeras2.py
|
thisisjako/UdemyTF
|
ee4102391ed6bd50f764955f732f5740425a9209
|
[
"MIT"
] | null | null | null |
Chapter10_RNN/rnnKeras2.py
|
thisisjako/UdemyTF
|
ee4102391ed6bd50f764955f732f5740425a9209
|
[
"MIT"
] | null | null | null |
Chapter10_RNN/rnnKeras2.py
|
thisisjako/UdemyTF
|
ee4102391ed6bd50f764955f732f5740425a9209
|
[
"MIT"
] | null | null | null |
import random
import numpy as np
import tensorflow as tf
from tensorflow.keras.layers import SimpleRNN
from tensorflow.keras.models import Sequential
np.random.seed(0)
tf.random.set_seed(0)
def sigmoid_fn(x: np.ndarray) -> np.ndarray:
return x # TODO
def tanh_fn(x: np.ndarray) -> np.ndarray:
return x # TODO
class SimpleRNNInference:
def __init__(
self,
rnn_layer: tf.keras.layers.SimpleRNN,
return_sequences: bool = False
) -> None:
self.return_sequences = return_sequences
self.rnn_layer = rnn_layer
self.W, self.U, self.b = self.rnn_layer.get_weights()
self.units = self.b.shape[0]
def __call__(
self,
x: np.ndarray
) -> None:
print(self.W)
print(self.U)
print(self.b)
def forward_step(self) -> None:
pass
if __name__ == "__main__":
# data set shape = (num_samples, num_timesteps, num_features)
# input shape = (num_timesteps, num_features)
# If return_sequences == True:
# output shape = (num_timesteps, units)
# Else:
# output shape = (1, units)
x = np.random.normal(size=(1, 3, 2))
units = 4
return_sequences = False
# num_features = 2
# units = 4
# h_t shape = (4), (units)
# W shape = (2, 4), (num_features, units)
# U shape = (4, 4), (units, units)
# b shape = (4), (units)
#
# matmul(x, W) (2)*(2,4) => (4)
# matmul(h, U) (4)*(4,4) => (4)
# intern + b (4)+(4) => (4)
model = Sequential()
model.add(SimpleRNN(units=units, return_sequences=return_sequences, input_shape=x.shape[1:]))
model.compile(loss="mse", optimizer="Adam")
# model.summary()
rnn = SimpleRNNInference(rnn_layer=model.layers[0], return_sequences=return_sequences)
output_rnn_own = rnn(x[0]) # 10.5
print(output_rnn_own)
print("\n\n")
output_rnn_tf = model.predict(x[[0]])
print(output_rnn_tf) # 10.5
assert np.all(np.isclose(output_rnn_own - output_rnn_tf, 0.0))
| 27.727273 | 98 | 0.581265 |
8b2a51b38fa4d2a9cf7396735c9a74ee5188a508
| 160 |
py
|
Python
|
exercises/fr/test_03_14_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 2,085 |
2019-04-17T13:10:40.000Z
|
2022-03-30T21:51:46.000Z
|
exercises/fr/test_03_14_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 79 |
2019-04-18T14:42:55.000Z
|
2022-03-07T08:15:43.000Z
|
exercises/fr/test_03_14_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 361 |
2019-04-17T13:34:32.000Z
|
2022-03-28T04:42:45.000Z
|
def test():
assert (
"for doc in nlp.pipe(TEXTS)" in __solution__
), "Itères-tu sur les docs générés par nlp.pipe ?"
__msg__.good("Joli !")
| 26.666667 | 54 | 0.60625 |
508c47f98c5d4951fc383b58161789e9c41ac1cb
| 31 |
py
|
Python
|
Trees/__init__.py
|
dileeppandey/hello-interview
|
78f6cf4e2da4106fd07f4bd86247026396075c69
|
[
"MIT"
] | null | null | null |
Trees/__init__.py
|
dileeppandey/hello-interview
|
78f6cf4e2da4106fd07f4bd86247026396075c69
|
[
"MIT"
] | null | null | null |
Trees/__init__.py
|
dileeppandey/hello-interview
|
78f6cf4e2da4106fd07f4bd86247026396075c69
|
[
"MIT"
] | 1 |
2020-02-12T16:57:46.000Z
|
2020-02-12T16:57:46.000Z
|
import Trees.TreeNode as Node
| 10.333333 | 29 | 0.806452 |
50e44e45b6ab915608bb6ef3d6fcf8ff9afcea26
| 608 |
py
|
Python
|
kts/core/backend/util.py
|
konodyuk/kts
|
3af5ccbf1d2089cb41d171626fcde4b0ba5aa8a7
|
[
"MIT"
] | 18 |
2019-02-14T13:10:07.000Z
|
2021-11-26T07:10:13.000Z
|
kts/core/backend/util.py
|
konodyuk/kts
|
3af5ccbf1d2089cb41d171626fcde4b0ba5aa8a7
|
[
"MIT"
] | 2 |
2019-02-17T14:06:42.000Z
|
2019-09-15T18:05:54.000Z
|
kts/core/backend/util.py
|
konodyuk/kts
|
3af5ccbf1d2089cb41d171626fcde4b0ba5aa8a7
|
[
"MIT"
] | 2 |
2019-09-15T13:12:42.000Z
|
2020-04-15T14:05:54.000Z
|
import sys
import ray
from kts.core.backend.address_manager import get_address_manager
from kts.core.frame import KTSFrame
def in_cli():
return sys.argv[0].endswith('kts')
def in_worker():
return sys.argv[0].endswith('default_worker.py')
def in_pytest():
return sys.argv[0].endswith('pytest')
def safe_put(kf: KTSFrame):
address_manager = get_address_manager()
h = kf.hash()
if ray.get(address_manager.has.remote(h)):
oid = ray.get(address_manager.get.remote(h))
else:
oid = ray.put(kf)
address_manager.put.remote((h, oid, False))
return oid
| 20.266667 | 64 | 0.684211 |
e859338bb40c4c64c63da07aa9568c1eae514b14
| 464 |
py
|
Python
|
exam_recipes/exam_recipes/web/urls.py
|
trenev/softuni-python-web-basics
|
0fcf6b7f3389d06685d40615c376dc4027e772f2
|
[
"MIT"
] | 1 |
2022-03-03T10:16:14.000Z
|
2022-03-03T10:16:14.000Z
|
exam_recipes/exam_recipes/web/urls.py
|
trenev/softuni-python-web-basics
|
0fcf6b7f3389d06685d40615c376dc4027e772f2
|
[
"MIT"
] | null | null | null |
exam_recipes/exam_recipes/web/urls.py
|
trenev/softuni-python-web-basics
|
0fcf6b7f3389d06685d40615c376dc4027e772f2
|
[
"MIT"
] | null | null | null |
from django.urls import path
from exam_recipes.web.views import show_home, create_recipe, edit_recipe, delete_recipe, \
show_details_recipe
urlpatterns = (
path('', show_home, name='index'),
path('create/', create_recipe, name='create recipe'),
path('edit/<int:pk>/', edit_recipe, name='edit recipe'),
path('delete/<int:pk>/', delete_recipe, name='delete recipe'),
path('details/<int:pk>/', show_details_recipe, name='details recipe'),
)
| 35.692308 | 90 | 0.700431 |
fa6553d8428219658417b6dc6e87f04db546d2cc
| 9,450 |
py
|
Python
|
test/test_npu/test_network_ops/test_cdist.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-12-02T03:07:35.000Z
|
2021-12-02T03:07:35.000Z
|
test/test_npu/test_network_ops/test_cdist.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | 1 |
2021-11-12T07:23:03.000Z
|
2021-11-12T08:28:13.000Z
|
test/test_npu/test_network_ops/test_cdist.py
|
Ascend/pytorch
|
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import copy
import torch.nn as nn
import numpy as np
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
class Testcdist(TestCase):
def generate_data(self, min_n, max_n, shape_x, shape_y, src_type):
np.random.seed(10086)
x1 = np.random.uniform(min_n, max_n, shape_x).astype(src_type)
x2 = np.random.uniform(min_n, max_n, shape_y).astype(src_type)
return x1, x2
def op_exec(self, x1, x2, p, device='cpu'):
is_fp16 = x1.dtype == np.float16
if device == 'cpu' and is_fp16:
x1 = x1.astype(np.float32)
x2 = x2.astype(np.float32)
x1 = torch.from_numpy(x1)
x2 = torch.from_numpy(x2)
x1 = x1.to(device)
x2 = x2.to(device)
y = torch.cdist(x1, x2, p)
y = y.cpu().numpy()
if device == 'cpu' and is_fp16:
y = y.astype(np.float16)
return y
def test_cdist_float16_1(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 64), (4, 64), np.float16)
cpu_output = self.op_exec(npu_input1, npu_input2, 0.0, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 0.0, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float16_2(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float16)
cpu_output = self.op_exec(npu_input1, npu_input2, 0.5, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 0.5, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float16_3(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float16)
cpu_output = self.op_exec(npu_input1, npu_input2, 1.0, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 1.0, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float16_4(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float16)
cpu_output = self.op_exec(npu_input1, npu_input2, 1.5, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 1.5, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float16_5(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float16)
cpu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float16_6(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float16)
cpu_output = self.op_exec(npu_input1, npu_input2, 2.5, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 2.5, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float16_7(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(3, 5, 500), (4, 500), np.float16)
cpu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_1(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 0.0, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 0.0, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_2(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 0.5, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 0.5, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_3(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 1.0, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 1.0, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_4(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 1.5, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 1.5, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_5(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_6(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 10), (4, 10), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 2.5, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 2.5, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_7(self, device):
npu_input1, npu_input2 = self.generate_data(-1, 1,
(5, 500), (3, 4, 500), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_8(self, device):
npu_input1, npu_input2 = self.generate_data(-100, 100,
(5, 100), (3, 4, 100), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 2.5, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 2.5, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_9(self, device):
npu_input1, npu_input2 = self.generate_data(-1000, 1000,
(5, 100), (3, 4, 100), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 1.5, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 1.5, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_10(self, device):
npu_input1, npu_input2 = self.generate_data(-0.1, 0.1,
(5, 100), (3, 4, 100), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 2.5, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 2.5, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_11(self, device):
npu_input1, npu_input2 = self.generate_data(-0.1, 0.1,
(5, 100), (3, 4, 100), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 0.5, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 0.5, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_12(self, device):
npu_input1, npu_input2 = self.generate_data(-0.1, 0.1,
(16, 11, 17, 5, 84, 2), (16, 11, 17, 5, 84, 2), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
def test_cdist_float32_13(self, device):
npu_input1, npu_input2 = self.generate_data(-0.1, 0.1,
(2, 2, 13, 39, 97, 14, 2, 7), (2, 2, 13, 39, 97, 14, 12, 7), np.float32)
cpu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'cpu')
npu_output = self.op_exec(npu_input1, npu_input2, 2.0, 'npu')
self.assertRtolEqual(cpu_output, npu_output)
instantiate_device_type_tests(Testcdist, globals(), except_for="cpu")
if __name__ == "__main__":
run_tests()
| 48.71134 | 124 | 0.59418 |
ad80c9ac4356a9d07f5c721e4250961a10add7cb
| 1,587 |
py
|
Python
|
core/score.py
|
kklocker/EiT-WiFi-placer
|
c2c621baa06377d62567d7bd0c2f048f1e6c0859
|
[
"MIT"
] | null | null | null |
core/score.py
|
kklocker/EiT-WiFi-placer
|
c2c621baa06377d62567d7bd0c2f048f1e6c0859
|
[
"MIT"
] | null | null | null |
core/score.py
|
kklocker/EiT-WiFi-placer
|
c2c621baa06377d62567d7bd0c2f048f1e6c0859
|
[
"MIT"
] | null | null | null |
import numpy as np
from numba import jit
from dask.array import ma
def create_gaussian(N=1000, sigma=1.0, mu=0):
"""
Kun for testing
"""
x, y = np.meshgrid(np.linspace(-1, 1, N), np.linspace(-1, 1, N))
d = np.sqrt(x * x + y * y)
return (
1.0
/ (sigma * np.sqrt(2 * np.pi))
* np.exp(-((d - mu) ** 2 / (2.0 * sigma ** 2)))
)
@jit
def basic_score(sol, img):
"""
Finn ut hva som skjer utenfor boundary.
"""
# u = np.abs(u)
u = np.ma.array(np.abs(sol).reshape(img.shape), mask=(img != 1.0))
area = u.count()
return np.sum(u) / area
@jit(nopython=False, forceobj=True)
def step_score(sol, img, threshold=-50):
"""
Minimum signal: u0
"""
# umax = 1e3 # np.max(sol)
umax = np.max(np.square(np.abs(sol)))
db = 10 * np.log10(np.square(np.abs(sol)) / umax).reshape(img.shape)
# A = ma.masked_array(np.abs(sol).reshape(img.shape), mask=(img != 1.0))
A = np.ma.array(np.square(np.abs(sol)).reshape(img.shape), mask=(img != 1.0))
# A = np.ma.array(np.ones_like(img), mask=(img != 1.0))
area = A.count()
tmp = A[db > threshold]
return np.sum(tmp) / area
# NB: mye tregere.
@jit
def weighted_score(u, p, degree=1.0):
"""
p: position of source (tuple of indices)
u: solution of wave-eq.
"""
temp = np.zeros_like(u)
A = u.shape[0] * u.shape[1]
for i in range(u.shape[0]):
for j in range(u.shape[1]):
r = np.linalg.norm([i - p[0], j - p[1]])
temp[i, j] = (r ** degree) * u[i, j]
return np.sum(temp) / A
| 25.596774 | 81 | 0.542533 |
adbc5f93e53fcf697262071878f69d30813ff3fa
| 1,183 |
py
|
Python
|
DataStructure/U8/Lecture/Dijkstra.py
|
qiaw99/Data-Structure
|
3b1cdce96d4f35329ccfec29c03de57378ef0552
|
[
"MIT"
] | 1 |
2019-10-29T08:21:41.000Z
|
2019-10-29T08:21:41.000Z
|
DataStructure/U8/Lecture/Dijkstra.py
|
qiaw99/Data-Structure
|
3b1cdce96d4f35329ccfec29c03de57378ef0552
|
[
"MIT"
] | null | null | null |
DataStructure/U8/Lecture/Dijkstra.py
|
qiaw99/Data-Structure
|
3b1cdce96d4f35329ccfec29c03de57378ef0552
|
[
"MIT"
] | null | null | null |
from Halde import Halde as PWSchlange
def dijkstra(s,V,E):
d = {s: 0}
f = {s: None}
henkel = dict()
U = PWSchlange(len(V)) # unfertige Knoten
henkel[s] = U.einfügen(s,0)
F = [] # fertige Knoten
while not U.istleer():
u = U.entferneMin()
for v,c in E[u]: # c = c[u,v]
if v not in d:
d[v] = d[u]+c
f[v] = u
henkel[v] = U.einfügen(v,d[v])
else:
if d[u]+c<d[v]:
d[v] = d[u]+c
f[v] = u
U.verkleinereSchlüssel(henkel[v],d[v])
for u in d:
print("Knoten {}: Distanz ={:3}, Vorgänger = {}, kürzester Weg = {}".format
(u,d[u],f[u], bestimme_Weg(u,f)))
def bestimme_Weg(u,f):
W = [u]
while f[u] is not None:
u=f[u]
W.append(u)
W.reverse()
return W
if __name__=="__main__":
E = {
"a": [("b",5),("c",2)],
"b": [("c",4),("d",8)],
"c": [("b",1),("e",9)],
"d": [("f",11),("e",9)],
"e": [("f",2),("t",6)],
"f": [("a",9),("t",5)],
"t": [] }
V = list(E)
dijkstra("a",V,E)
| 25.717391 | 83 | 0.384615 |
cb4bb730743d108277151fb6fd604c41e6d1d4b3
| 972 |
py
|
Python
|
challenges/pagesNumberingWithInk/python3/pagesNumberingWithInk.py
|
jimmynguyen/codefights
|
f4924fcffdb4ff14930618bb1a781e4e02e9aa09
|
[
"MIT"
] | 5 |
2020-05-21T03:02:34.000Z
|
2021-09-06T04:24:26.000Z
|
challenges/pagesNumberingWithInk/python3/pagesNumberingWithInk.py
|
jimmynguyen/codefights
|
f4924fcffdb4ff14930618bb1a781e4e02e9aa09
|
[
"MIT"
] | 6 |
2019-04-24T03:39:26.000Z
|
2019-05-03T02:10:59.000Z
|
challenges/pagesNumberingWithInk/python3/pagesNumberingWithInk.py
|
jimmynguyen/codefights
|
f4924fcffdb4ff14930618bb1a781e4e02e9aa09
|
[
"MIT"
] | 1 |
2021-09-06T04:24:27.000Z
|
2021-09-06T04:24:27.000Z
|
def pagesNumberingWithInk(current, numberOfDigits):
currentNumberOfDigits = len(str(current))
while numberOfDigits >= currentNumberOfDigits:
numberOfDigits -= currentNumberOfDigits
current += 1
currentNumberOfDigits = len(str(current))
return current-1
if __name__ == '__main__':
input0 = [1, 21, 8, 21, 76, 80]
input1 = [5, 5, 4, 6, 250, 1000]
expectedOutput = [5, 22, 10, 23, 166, 419]
assert len(input0) == len(expectedOutput), '# input0 = {}, # expectedOutput = {}'.format(len(input0), len(expectedOutput))
assert len(input1) == len(expectedOutput), '# input1 = {}, # expectedOutput = {}'.format(len(input1), len(expectedOutput))
for i, expected in enumerate(expectedOutput):
actual = pagesNumberingWithInk(input0[i], input1[i])
assert actual == expected, 'pagesNumberingWithInk({}, {}) returned {}, but expected {}'.format(input0[i], input1[i], actual, expected)
print('PASSES {} out of {} tests'.format(len(expectedOutput), len(expectedOutput)))
| 54 | 136 | 0.708848 |
3806b0ea8b122381b0d82fb8c223b8f6322ce4a1
| 1,330 |
py
|
Python
|
shinrl/_utils/log.py
|
omron-sinicx/ShinRL
|
09f4ae274a33d1fc1d9d542f816aef40014af6b5
|
[
"MIT"
] | 34 |
2021-12-09T07:12:57.000Z
|
2022-03-11T08:17:20.000Z
|
shinrl/_utils/log.py
|
omron-sinicx/ShinRL
|
09f4ae274a33d1fc1d9d542f816aef40014af6b5
|
[
"MIT"
] | null | null | null |
shinrl/_utils/log.py
|
omron-sinicx/ShinRL
|
09f4ae274a33d1fc1d9d542f816aef40014af6b5
|
[
"MIT"
] | 4 |
2021-12-11T07:48:01.000Z
|
2022-03-01T23:50:33.000Z
|
""" Functions for logging.
Author: Toshinori Kitamura
Affiliation: NAIST & OSX
"""
import logging
import structlog
from structlog.processors import *
from structlog.stdlib import *
def initialize_log_style() -> None:
shinrl_logger = logging.getLogger()
shinrl_logger.handlers = []
stream_handler = logging.StreamHandler()
stream_formatter = structlog.stdlib.ProcessorFormatter(
processor=structlog.dev.ConsoleRenderer(
level_styles={
"info:": "\033[31m",
}
)
)
stream_handler.setFormatter(stream_formatter)
stream_handler.setLevel(logging.INFO)
shinrl_logger.addHandler(stream_handler)
shinrl_logger.setLevel(logging.INFO)
structlog.configure(
processors=[
TimeStamper(fmt="iso"),
ExceptionPrettyPrinter(),
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
logger_factory=structlog.stdlib.LoggerFactory(),
)
def add_logfile_handler(file_path: str) -> None:
shinrl_logger = logging.getLogger()
file_handler = logging.FileHandler(file_path)
file_formatter = structlog.stdlib.ProcessorFormatter(
processor=structlog.processors.JSONRenderer(),
)
file_handler.setFormatter(file_formatter)
shinrl_logger.addHandler(file_handler)
| 28.913043 | 67 | 0.7 |
69863ca02f0b5a396ac17e9da77d3a2419670e00
| 522 |
py
|
Python
|
Python/024-Alphabet-Rangoli.py
|
sadikkuzu/HackerRank
|
2b1ed2cf41f6a5404c5b9293186f301b646b5d33
|
[
"Apache-2.0"
] | 5 |
2019-03-09T22:44:01.000Z
|
2021-09-14T00:11:38.000Z
|
Python/024-Alphabet-Rangoli.py
|
jguerra7/HackerRank-4
|
7e1663d0050ffbb0fd885b8affdada9ea13b0e80
|
[
"Apache-2.0"
] | 4 |
2018-08-16T09:39:47.000Z
|
2018-09-14T17:37:07.000Z
|
Python/024-Alphabet-Rangoli.py
|
jguerra7/HackerRank-4
|
7e1663d0050ffbb0fd885b8affdada9ea13b0e80
|
[
"Apache-2.0"
] | 1 |
2020-06-01T23:38:35.000Z
|
2020-06-01T23:38:35.000Z
|
# https://www.hackerrank.com/challenges/alphabet-rangoli
from __future__ import print_function
import string
def print_rangoli(size):
# your code goes here
N = size
alphabet = string.ascii_lowercase[:N]
height = N * 2 - 1
width = N * 4 - 3
lines = [None] * height
for i in range(N):
sub_alphabet = alphabet[(-i - 1):]
letters = ''.join(reversed(sub_alphabet)) + sub_alphabet[1:]
lines[i] = lines[-i - 1] = '-'.join(letters).center(width, '-')
print(*lines, sep='\n')
| 34.8 | 71 | 0.616858 |
6986c6124ac57116e5621bb681049043bb324a2a
| 283 |
py
|
Python
|
pyntcloud/structures/__init__.py
|
bernssolg/pyntcloud-master
|
84cf000b7a7f69a2c1b36f9624f05f65160bf992
|
[
"MIT"
] | 1,142 |
2016-10-10T08:55:30.000Z
|
2022-03-30T04:46:16.000Z
|
pyntcloud/structures/__init__.py
|
bernssolg/pyntcloud-master
|
84cf000b7a7f69a2c1b36f9624f05f65160bf992
|
[
"MIT"
] | 195 |
2016-10-10T08:30:37.000Z
|
2022-02-17T12:51:17.000Z
|
pyntcloud/structures/__init__.py
|
bernssolg/pyntcloud-master
|
84cf000b7a7f69a2c1b36f9624f05f65160bf992
|
[
"MIT"
] | 215 |
2017-02-28T00:50:29.000Z
|
2022-03-22T17:01:31.000Z
|
"""
HAKUNA MATATA
"""
from .convex_hull import ConvexHull
from .delaunay import Delaunay3D
from .kdtree import KDTree
from .voxelgrid import VoxelGrid
ALL_STRUCTURES = {
'convex_hull': ConvexHull,
'delaunay3D': Delaunay3D,
'kdtree': KDTree,
'voxelgrid': VoxelGrid
}
| 18.866667 | 35 | 0.727915 |
69a4aae0444bbdb6a629ab16667784be8b727bf4
| 4,569 |
py
|
Python
|
hihope_neptune-oh_hid/00_src/v0.1/third_party/LVM2/daemons/lvmdbusd/fetch.py
|
dawmlight/vendor_oh_fun
|
bc9fb50920f06cd4c27399f60076f5793043c77d
|
[
"Apache-2.0"
] | 1 |
2022-02-15T08:51:55.000Z
|
2022-02-15T08:51:55.000Z
|
hihope_neptune-oh_hid/00_src/v0.3/third_party/LVM2/daemons/lvmdbusd/fetch.py
|
dawmlight/vendor_oh_fun
|
bc9fb50920f06cd4c27399f60076f5793043c77d
|
[
"Apache-2.0"
] | null | null | null |
hihope_neptune-oh_hid/00_src/v0.3/third_party/LVM2/daemons/lvmdbusd/fetch.py
|
dawmlight/vendor_oh_fun
|
bc9fb50920f06cd4c27399f60076f5793043c77d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2015-2016 Red Hat, Inc. All rights reserved.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from .pv import load_pvs
from .vg import load_vgs
from .lv import load_lvs
from . import cfg
from .utils import MThreadRunner, log_debug, log_error
import threading
import queue
import traceback
def _main_thread_load(refresh=True, emit_signal=True):
num_total_changes = 0
num_total_changes += load_pvs(
refresh=refresh,
emit_signal=emit_signal,
cache_refresh=False)[1]
num_total_changes += load_vgs(
refresh=refresh,
emit_signal=emit_signal,
cache_refresh=False)[1]
num_total_changes += load_lvs(
refresh=refresh,
emit_signal=emit_signal,
cache_refresh=False)[1]
return num_total_changes
def load(refresh=True, emit_signal=True, cache_refresh=True, log=True,
need_main_thread=True):
# Go through and load all the PVs, VGs and LVs
if cache_refresh:
cfg.db.refresh(log)
if need_main_thread:
rc = MThreadRunner(_main_thread_load, refresh, emit_signal).done()
else:
rc = _main_thread_load(refresh, emit_signal)
return rc
# Even though lvm can handle multiple changes concurrently it really doesn't
# make sense to make a 1-1 fetch of data for each change of lvm because when
# we fetch the data once all previous changes are reflected.
class StateUpdate(object):
class UpdateRequest(object):
def __init__(self, refresh, emit_signal, cache_refresh, log,
need_main_thread):
self.is_done = False
self.refresh = refresh
self.emit_signal = emit_signal
self.cache_refresh = cache_refresh
self.log = log
self.need_main_thread = need_main_thread
self.result = None
self.cond = threading.Condition(threading.Lock())
def done(self):
with self.cond:
if not self.is_done:
self.cond.wait()
return self.result
def set_result(self, result):
with self.cond:
self.result = result
self.is_done = True
self.cond.notify_all()
@staticmethod
def update_thread(obj):
queued_requests = []
while cfg.run.value != 0:
# noinspection PyBroadException
try:
refresh = True
emit_signal = True
cache_refresh = True
log = True
need_main_thread = True
with obj.lock:
wait = not obj.deferred
obj.deferred = False
if len(queued_requests) == 0 and wait:
queued_requests.append(obj.queue.get(True, 2))
# Ok we have one or the deferred queue has some,
# check if any others
try:
while True:
queued_requests.append(obj.queue.get(False))
except queue.Empty:
pass
if len(queued_requests) > 1:
log_debug("Processing %d updates!" % len(queued_requests),
'bg_black', 'fg_light_green')
# We have what we can, run the update with the needed options
for i in queued_requests:
if not i.refresh:
refresh = False
if not i.emit_signal:
emit_signal = False
if not i.cache_refresh:
cache_refresh = False
if not i.log:
log = False
if not i.need_main_thread:
need_main_thread = False
num_changes = load(refresh, emit_signal, cache_refresh, log,
need_main_thread)
# Update is done, let everyone know!
for i in queued_requests:
i.set_result(num_changes)
# Only clear out the requests after we have given them a result
# otherwise we can orphan the waiting threads and they never
# wake up if we get an exception
queued_requests = []
except queue.Empty:
pass
except Exception:
st = traceback.format_exc()
log_error("update_thread exception: \n%s" % st)
cfg.blackbox.dump()
def __init__(self):
self.lock = threading.RLock()
self.queue = queue.Queue()
self.deferred = False
# Do initial load
load(refresh=False, emit_signal=False, need_main_thread=False)
self.thread = threading.Thread(target=StateUpdate.update_thread,
args=(self,),
name="StateUpdate.update_thread")
def load(self, refresh=True, emit_signal=True, cache_refresh=True,
log=True, need_main_thread=True):
# Place this request on the queue and wait for it to be completed
req = StateUpdate.UpdateRequest(refresh, emit_signal, cache_refresh,
log, need_main_thread)
self.queue.put(req)
return req.done()
def event(self):
with self.lock:
self.deferred = True
| 27.035503 | 76 | 0.711753 |
69f11f56d4c3dee6a1d4e4ee4e7025c66fde8f92
| 107 |
py
|
Python
|
gemtown/advertisings/apps.py
|
doramong0926/gemtown
|
2c39284e3c68f0cc11994bed0ee2abaad0ea06b6
|
[
"MIT"
] | null | null | null |
gemtown/advertisings/apps.py
|
doramong0926/gemtown
|
2c39284e3c68f0cc11994bed0ee2abaad0ea06b6
|
[
"MIT"
] | 5 |
2020-09-04T20:13:39.000Z
|
2022-02-17T22:03:33.000Z
|
gemtown/advertisings/apps.py
|
doramong0926/gemtown
|
2c39284e3c68f0cc11994bed0ee2abaad0ea06b6
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class AdvertisingsConfig(AppConfig):
name = 'gemtown.advertisings'
| 17.833333 | 36 | 0.785047 |
38bce8986af1535f0b0e124a7b568c6c3a54126b
| 2,939 |
py
|
Python
|
10.3389/fimmu.2018.00251/bin/make_cdr3_clusters_tsv.py
|
jqsunac/doi
|
c5912a40c7bfda8270e5d51fbdd82a9f0650bd23
|
[
"MIT"
] | null | null | null |
10.3389/fimmu.2018.00251/bin/make_cdr3_clusters_tsv.py
|
jqsunac/doi
|
c5912a40c7bfda8270e5d51fbdd82a9f0650bd23
|
[
"MIT"
] | null | null | null |
10.3389/fimmu.2018.00251/bin/make_cdr3_clusters_tsv.py
|
jqsunac/doi
|
c5912a40c7bfda8270e5d51fbdd82a9f0650bd23
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
import argparse
import re
from Bio import SeqIO
def create_tsv(f, c, o):
# read FASTA file
fastafh = open(f, 'rU')
seqid2seq = {}
for record in SeqIO.parse(fastafh, "fasta"):
seqid2seq[record.description] = str(record.seq)
fastafh.close()
# read cluster file
mptn = re.compile(r'>(.+)\.\.\.')
clusterid2seqidlist = {}
clusterid2repseqid = {}
clusterid2seqsize_fugu1 = {}
clusterid2seqsize_fugu2 = {}
clusterid2seqsize_fugu3 = {}
cluster_id = None
with open(c, 'r') as clstrfh:
for buf in clstrfh:
if buf[0:1] == '>':
cluster_id = buf[1:]
cluster_id = cluster_id.replace('\n', '')
cluster_id = cluster_id.replace(' ', '_')
# init
clusterid2seqidlist[cluster_id] = []
clusterid2repseqid[cluster_id] = None
clusterid2seqsize_fugu1[cluster_id] = 0
clusterid2seqsize_fugu2[cluster_id] = 0
clusterid2seqsize_fugu3[cluster_id] = 0
else:
m = mptn.search(buf)
seqid = m.group(1)
if 'fugu1' in buf:
clusterid2seqsize_fugu1[cluster_id] += 1
if 'fugu2' in buf:
clusterid2seqsize_fugu2[cluster_id] += 1
if 'fugu3' in buf:
clusterid2seqsize_fugu3[cluster_id] += 1
if '*' in buf:
clusterid2seqidlist[cluster_id].append('*' + seqid)
clusterid2repseqid[cluster_id] = seqid
else:
clusterid2seqidlist[cluster_id].append(seqid)
# print out tsv
with open(o, 'w') as outfh:
outfh.write('ClusterID\tRepresentSeq\tRepresentSeqLen\tFugu1Count\tFugu2Count\tFugu3Count\tTotalCount\tSeqID')
for cls_id in sorted(clusterid2repseqid.iterkeys()):
arr = [cls_id,
seqid2seq[clusterid2repseqid[cls_id]],
str(len(seqid2seq[clusterid2repseqid[cls_id]])),
str(clusterid2seqsize_fugu1[cls_id]),
str(clusterid2seqsize_fugu2[cls_id]),
str(clusterid2seqsize_fugu3[cls_id]),
str(clusterid2seqsize_fugu1[cls_id] + clusterid2seqsize_fugu2[cls_id] + clusterid2seqsize_fugu3[cls_id]),
';'.join(clusterid2seqidlist[cls_id])]
outfh.write('\t'.join(arr) + '\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description = 'Create CDR3 capture-recapture dataset.')
parser.add_argument('-f', '--fasta', required = True)
parser.add_argument('-c', '--clstr', required = True)
parser.add_argument('-o', '--output', required = True)
args = parser.parse_args()
create_tsv(args.fasta, args.clstr, args.output)
| 37.679487 | 124 | 0.5672 |
2a8a4cf7528e55b86e459f3e34037114eba2f802
| 147 |
py
|
Python
|
Algorithms/DynamticProgramming/FibonacciModified.py
|
baby5/HackerRank
|
1e68a85f40499adb9b52a4da16936f85ac231233
|
[
"MIT"
] | null | null | null |
Algorithms/DynamticProgramming/FibonacciModified.py
|
baby5/HackerRank
|
1e68a85f40499adb9b52a4da16936f85ac231233
|
[
"MIT"
] | null | null | null |
Algorithms/DynamticProgramming/FibonacciModified.py
|
baby5/HackerRank
|
1e68a85f40499adb9b52a4da16936f85ac231233
|
[
"MIT"
] | null | null | null |
#coding:utf-8
t1, t2, n = map(int, raw_input().split())
ar = [t1, t2]
for i in xrange(2, n):
ar.append(ar[i-2] + ar[i-1]**2)
print ar[n-1]
| 13.363636 | 41 | 0.55102 |
aa4a130cb268d7e604242cf03ed4b2348372d709
| 554 |
py
|
Python
|
backend/alembic/versions/7b852dfcc007_version_0_0_7n.py
|
jinnn-dev/patholearn
|
b4e6a18cfbf963e71640ed6cac3fc3a618a7ae15
|
[
"MIT"
] | 1 |
2021-11-04T17:06:07.000Z
|
2021-11-04T17:06:07.000Z
|
backend/alembic/versions/7b852dfcc007_version_0_0_7n.py
|
JamesNeumann/learning-by-annotations
|
c2b5e4b653eeb1c973aa5a7dad35ac8be18cb1ad
|
[
"MIT"
] | 21 |
2021-11-01T10:13:56.000Z
|
2021-12-02T10:02:13.000Z
|
backend/alembic/versions/7b852dfcc007_version_0_0_7n.py
|
jinnn-dev/patholearn
|
b4e6a18cfbf963e71640ed6cac3fc3a618a7ae15
|
[
"MIT"
] | 1 |
2021-12-16T18:20:55.000Z
|
2021-12-16T18:20:55.000Z
|
"""Version 0.0.7n
Revision ID: 7b852dfcc007
Revises: a5e49aaba65b
Create Date: 2021-11-10 18:58:16.440339
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "7b852dfcc007"
down_revision = "a5e49aaba65b"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 19.103448 | 65 | 0.684116 |
aabe68dc632cf7c0b16f9ce291bc48d9dc626585
| 4,330 |
py
|
Python
|
code/03_analysis_databricks/ST3-CALIBER_counts.py
|
BHFDSC/CCU013_01_ENG-COVID-19_event_phenotyping
|
83afef23f96c7b31fef64c6f735737d1afa3082d
|
[
"Apache-2.0"
] | null | null | null |
code/03_analysis_databricks/ST3-CALIBER_counts.py
|
BHFDSC/CCU013_01_ENG-COVID-19_event_phenotyping
|
83afef23f96c7b31fef64c6f735737d1afa3082d
|
[
"Apache-2.0"
] | null | null | null |
code/03_analysis_databricks/ST3-CALIBER_counts.py
|
BHFDSC/CCU013_01_ENG-COVID-19_event_phenotyping
|
83afef23f96c7b31fef64c6f735737d1afa3082d
|
[
"Apache-2.0"
] | null | null | null |
# Databricks notebook source
# MAGIC %md
# MAGIC # Supplementary Table 3: CALIBER phenotype frequencies
# MAGIC
# MAGIC **Description**
# MAGIC
# MAGIC This notebook produces the number of distinct individuals with a CALIBER phenotype prior to 01/01/2020, as used when defining comorbidities for `CCU013: COVID-19 Event Phenotypes`.
# MAGIC
# MAGIC The output from these queries produces `Supplement table 3: 269 CALIBER phenotypes, aggregated into 16 categories, and the number of individuals within the study cohort identified from GDPPR (SNOMED-CT) and HES APC (ICD-10, OPCS-4).` within the manuscript `Characterising COVID-19 related events in a nationwide electronic health record cohort of 55.9 million people in England`
# MAGIC
# MAGIC **Project(s)** CCU013
# MAGIC
# MAGIC **Author(s)** Chris Tomlinson
# MAGIC
# MAGIC **Reviewer(s)**
# MAGIC
# MAGIC **Date last updated** 2022-01-24
# MAGIC
# MAGIC **Date last reviewed** *NA*
# MAGIC
# MAGIC **Date last run** `1/24/2022, 11:34:40 AM`
# MAGIC
# MAGIC **Last export requested** `1/24/2022`
# MAGIC
# MAGIC **Data input**
# MAGIC * `ccu013_covid_events_paper_cohort`
# MAGIC * `ccu013_caliber_comorbidities_pre2020`
# MAGIC * `ccu013_caliber_category_mapping`
# MAGIC
# MAGIC **Data output**
# MAGIC Export of this notebook.
# MAGIC
# MAGIC **Software and versions** `python`
# MAGIC
# MAGIC **Packages and versions** `pyspark`
# COMMAND ----------
import databricks.koalas as ks
import pandas as pd
# COMMAND ----------
# COVID-19 events
events_table = "dars_nic_391419_j3w9t_collab.ccu013_covid_events_paper_cohort"
# CALIBER phenotypes table
phenos_table = "dars_nic_391419_j3w9t_collab.ccu013_caliber_comorbidities_pre2020"
# COMMAND ----------
patients = spark.sql(f"""SELECT person_id_deid FROM {events_table}""")
phenos = spark.table(phenos_table)
# Subset to cohort
counts = patients.join(phenos, 'person_id_deid', 'left') \
.fillna(0) \
.drop('person_id_deid')
# Col sums
counts = counts.to_koalas() \
.sum(axis=0) \
.reset_index()
# Renaming operations prior to join
counts.columns = counts.columns.fillna('count')
counts = counts.rename(columns={'index': 'Phenotype',
'count': 'Individuals'})
# COMMAND ----------
# Get phenotype-category mapping
category_dictionary_table = spark.table("dars_nic_391419_j3w9t_collab.ccu013_caliber_category_mapping") \
.drop('cat') \
.to_koalas() \
.rename(columns={'phenotype': 'Phenotype'})
# Apply mapping with merge
df = counts.merge(category_dictionary_table, on='Phenotype', how='left') \
.rename(columns={'category': 'Category'}) \
.sort_values(by=['Category', 'Individuals'], ascending=[True, False])
# Mask counts < 5. Do this last as will change count to string so then can't sort by it
df['Individuals'] = df['Individuals'].astype('str').str.replace('^[1-4]$', '<5')
# Process text
df = df.to_pandas()
df['Category'] = df['Category'].str.capitalize()
df['Phenotype'] = df['Phenotype'].str.capitalize()
df = df.replace(regex='_', value=" ")
# Manual corrections
df = df.replace({'Benign neoplasm cin': 'Benign neoplasm/CIN',
'Hiv': 'HIV',
'Bacterial diseases excl tb': 'Bacterial diseases excl TB',
'Copd': 'COPD',
'Vitamin b12 deficiency anaemia': 'Vitamin B12 deficiency anaemia',
'Rheumatic valve dz': 'Rheumatic valve disease',
'Venous thromboembolic disease excl pe': 'Venous thromboembolic disease excl PE',
'Stroke nos': 'Stroke NOS',
'Secondary malignancy brain other cns and intracranial': 'Secondary malignancy brain other CNS and intracranial',
'Primary malignancy brain other cns and intracranial': 'Primary malignancy brain other CNS and intracranial',
'Monoclonal gammopathy of undetermined significance mgus': 'Monoclonal gammopathy of undetermined significance',
'Viral diseases excluding chronic hepatitis hiv': 'Viral diseases excluding chronic hepatitis or HIV'})
# excl
df = df.replace(regex='\sexcl\s', value=" excluding ")
df = df.replace(regex='\sincl\s', value=" including ")
# Reorder
df = df[['Category', 'Phenotype', 'Individuals']]
# Set to display full Phenotype text
pd.set_option('display.max_colwidth', None)
display(df)
| 39.009009 | 386 | 0.696536 |
632fadfe02d175c47ee0d450dae1827460616a0d
| 4,008 |
py
|
Python
|
runServer.py
|
wchming1987/Tumbler
|
e309771779c8eb44e685adbb691a428d29009e05
|
[
"Apache-2.0"
] | null | null | null |
runServer.py
|
wchming1987/Tumbler
|
e309771779c8eb44e685adbb691a428d29009e05
|
[
"Apache-2.0"
] | null | null | null |
runServer.py
|
wchming1987/Tumbler
|
e309771779c8eb44e685adbb691a428d29009e05
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding=utf-8 -*-
import ConfigParser
import logging
import os.path
import signal
import sys
import time
import tornado.autoreload
import tornado.httpserver
import tornado.ioloop
import tornado.web
from tornado.options import define, options
from tornado_swagger import swagger
import pymongo
from handlers import *
reload(sys)
sys.setdefaultencoding('utf-8')
define('port', default=12000, help='run on the given port', type=int)
define('loglevel', default='debug', help='log level')
define('debug', default=True, help='run in debug mode')
MAX_WAIT_SECONDS_BEFORE_SHUTDOWN = 10
DEFAULT_REPRESENTATION = "application/json"
HTTP_BAD_REQUEST = 400
HTTP_FORBIDDEN = 403
HTTP_NOT_FOUND = 404
swagger.docs()
class DBApplication(tornado.web.Application):
def __init__(self, handlers, **settings):
tornado.web.Application.__init__(self, handlers, **settings)
try:
config = ConfigParser.SafeConfigParser()
path = os.path.join(os.path.dirname(__file__), 'config', 'db.conf')
config.read(path)
self.conn = pymongo.MongoClient(
config.get('database', 'dbhost'),
int(config.get('database', 'dbport'))
)
self.db = self.conn.get_database(config.get('database', 'dbname'))
self.db.authenticate(
config.get('database', 'dbuser'),
config.get('database', 'dbpassword')
)
except Exception, ex:
print ex
exit(-1)
def sig_handler(sig, frame):
logging.warning('Caught signal: %s', sig)
tornado.ioloop.IOLoop.instance().add_callback_from_signal(shutdown)
def shutdown():
logging.info('Stopping http server')
httpServer.stop() # 不接收新的 HTTP 请求
logging.info('Will shutdown in %s seconds ...', MAX_WAIT_SECONDS_BEFORE_SHUTDOWN)
io_loop = tornado.ioloop.IOLoop.instance()
deadline = time.time() + MAX_WAIT_SECONDS_BEFORE_SHUTDOWN
def stop_loop():
now = time.time()
if now < deadline and (io_loop._callbacks or io_loop._timeouts):
io_loop.add_timeout(now + 1, stop_loop)
else:
io_loop.stop() # 处理完现有的 callback 和 timeout 后,可以跳出 io_loop.start() 里的循环
logging.info('Shutdown')
stop_loop()
handlers = [
(r'/books', BooksHandler),
(r'/book/(.*)/buy', BuyBookHandler),
(r'/book/(.*)', BookHandler),
(r'/matters', MattersHandler),
(r'/matter/(.*)', MatterHandler),
(r'/matter', MatterHandler),
]
settings = {
# 设置 cookie_secret
# 'cookie_secret': 'FASDFA12psaldQWRJLSDFJL87123jHAFu0',
# 设置静态文件夹,此处设置为了 ./static
# 就可以直接访问 http://xxxxxx:xxxx/static/* 的文件了
# 'static_path': os.path.join(os.path.dirname(__file__), 'static'),
# 'template_path': os.path.join(os.path.dirname(__file__), 'templates'),
'debug': options.debug,
# 设置登录页面
# 'login_url': '/login.html',
# 是否防跨域 POST (具体见文档)
#"xsrf_cookies": True,
# 关掉自动 escape
"autoescape": None
}
if __name__ == '__main__':
## setup log
tornado.options.options.logging = options.loglevel
tornado.options.options.log_to_stderr = True
tornado.options.options.log_file_prefix = os.path.join(os.path.dirname(__file__), 'log', 'run.log')
tornado.options.options.log_file_max_size = 1000000
tornado.options.parse_command_line()
logging.info('---------------- Begin Start Server... ----------------')
logging.debug('LogLevel:[%s]' % tornado.options.options.logging)
logging.debug('LogFile:[%s]' % tornado.options.options.log_file_prefix)
webApp = DBApplication(handlers, **settings)
httpServer = tornado.httpserver.HTTPServer(webApp)
httpServer.bind(options.port)
signal.signal(signal.SIGTERM, sig_handler)
signal.signal(signal.SIGINT, sig_handler)
#loop = tornado.ioloop.IOLoop.instance()
#tornado.autoreload.start(loop)
#loop.start()
tornado.ioloop.IOLoop.instance().start()
logging.info('Exit')
| 29.255474 | 103 | 0.660429 |
ea02f19281507543c200df1b554943965cfa3e68
| 97 |
py
|
Python
|
Online-Judges/CodingBat/Python/List-01/08-max_end3.py
|
shihab4t/Competitive-Programming
|
e8eec7d4f7d86bfa1c00b7fbbedfd6a1518f19be
|
[
"Unlicense"
] | 3 |
2021-06-15T01:19:23.000Z
|
2022-03-16T18:23:53.000Z
|
Online-Judges/CodingBat/Python/List-01/08-max_end3.py
|
shihab4t/Competitive-Programming
|
e8eec7d4f7d86bfa1c00b7fbbedfd6a1518f19be
|
[
"Unlicense"
] | null | null | null |
Online-Judges/CodingBat/Python/List-01/08-max_end3.py
|
shihab4t/Competitive-Programming
|
e8eec7d4f7d86bfa1c00b7fbbedfd6a1518f19be
|
[
"Unlicense"
] | null | null | null |
def max_end3(nums):
if nums[0] > nums[-1]:
return nums[0:1]*3
return nums[-1:]*3
| 19.4 | 26 | 0.536082 |
105a12b83659a78b7fbcd543264101da75a1a970
| 12,778 |
py
|
Python
|
PyTorchStuff/autoencoders/autoencoder.py
|
stanton119/data-analysis
|
b6fda815c6cc1798ba13a5d2680369b7e5dfcdf9
|
[
"Apache-2.0"
] | null | null | null |
PyTorchStuff/autoencoders/autoencoder.py
|
stanton119/data-analysis
|
b6fda815c6cc1798ba13a5d2680369b7e5dfcdf9
|
[
"Apache-2.0"
] | 1 |
2021-02-11T23:44:52.000Z
|
2021-02-11T23:44:52.000Z
|
PyTorchStuff/autoencoders/autoencoder.py
|
stanton119/data-analysis
|
b6fda815c6cc1798ba13a5d2680369b7e5dfcdf9
|
[
"Apache-2.0"
] | 1 |
2021-12-16T01:02:23.000Z
|
2021-12-16T01:02:23.000Z
|
# %% [markdown]
# # Image compression - part 2. - Autoencoders
# In this post I will be looking at building an autoencoder to compress the MNIST dataset.
# See part 1. [here](https://github.com/stanton119/data-analysis/blob/master/PyTorchStuff/autoencoders/pca.md).
#
# Autoencoders build a network to encode the original images into a latent space and then build a decoder
# to reproduce back the same image.
# By having a latent space representation that is small we force the network to compress the information.
# The latent space is similar to the concept of components within PCA, but in this case the encoder and decoder
# can be nonlinear.
# The PCA weights that form components can be seen as a subset of the possible solutions for the autoencoder.
# As such I expect the autoencoder to do at least as good as PCA on compressing the images.
#
# First let's download the required dataset. This time we download the test set as well to inform us on overfitting.
# %%
from pathlib import Path
import torch
import torchvision
import matplotlib.pyplot as plt
plt.style.use("seaborn-whitegrid")
transform = torchvision.transforms.Compose([torchvision.transforms.ToTensor()])
mnist_train_data = torchvision.datasets.MNIST(
Path() / "data", train=True, download=True, transform=transform
)
mnist_train = torch.utils.data.DataLoader(mnist_train_data, batch_size=64)
mnist_test_data = torchvision.datasets.MNIST(
Path() / "data", train=False, download=True, transform=transform
)
mnist_test = torch.utils.data.DataLoader(mnist_test_data, batch_size=64)
# %% [markdown]
# ## Dense Autoencoder
# Our first autoencoder will be based on dense layers.
# I may follow up with a comparison to convolution layers which are typically used in image based problems.
#
# This is a pretty standard setup where the output size of each layer of the encoder shrinks toward the latent dimension.
# Images are reshaped into a vector and processed through normal dense layers.
# The decode effectively inverses the operations of the encoder.
# It uses a sigmoid activation at the end to ensure the correct pixel value range is observed.
# To optimise, we are minimising the reconstruction MSE.
# %%
import pytorch_lightning as pl
class AutoEncoderDense(pl.LightningModule):
def __init__(self, n_inputs: int = 1, n_latent: int = 5):
super().__init__()
self.train_log = []
self.n_latent = n_latent
self.encoder = torch.nn.Sequential(
torch.nn.Linear(28 * 28, 64),
torch.nn.ReLU(),
torch.nn.Linear(64, 32),
torch.nn.ReLU(),
torch.nn.Linear(32, n_latent),
)
self.decoder = torch.nn.Sequential(
torch.nn.Linear(n_latent, 32),
torch.nn.ReLU(),
torch.nn.Linear(32, 64),
torch.nn.ReLU(),
torch.nn.Linear(64, 28 * 28),
torch.nn.Sigmoid(),
)
def forward(self, x):
x = x.reshape(-1, 28 * 28)
encoded = self.encoder(x)
decoded = self.decoder(encoded)
return decoded.reshape(-1, 1, 28, 28)
def configure_optimizers(self, learning_rate=1e-3):
optimizer = torch.optim.Adam(
self.parameters(),
lr=learning_rate,
)
return optimizer
def training_step(self, batch, batch_idx):
x, y = batch
x_hat = self(x)
loss = torch.nn.MSELoss()(x_hat, x)
self.log("loss", loss)
self.train_log.append(loss.detach().numpy())
return loss
# %% [markdown]
# Compared to our PCA example, the number of parameters we are tuning here is significantly larger.
# In fact larger than our training set of 60k examples:
# %%
model_dense = AutoEncoderDense(n_latent=10)
print(model_dense.summarize())
# %% [markdown]
# We will examine this is more detail later.
#
# ## Training
# We will now create several dense networks with different latent space sizes.
# We save the networks each time so that we can recall them later for predictions.
# Plotting the training MSE shows if the model has converged successfully.
# %%
latent_space_dim = [3, 5, 10, 20, 30, 50]
model_path = Path() / "models"
model_path.mkdir(exist_ok=True)
for n_latent in latent_space_dim:
print(f"training: {n_latent}")
model_dense = AutoEncoderDense(n_latent=n_latent)
trainer = pl.Trainer(max_epochs=10)
trainer.fit(model_dense, mnist_train)
torch.save(model_dense, model_path / f"dense_{n_latent}.pt")
fig, ax = plt.subplots(figsize=(10, 6))
ax.plot(model_dense.train_log)
ax.set_title(f"Training error: {n_latent}")
ax.set_xlabel("Batches")
ax.set_ylabel("MSE")
# %% [markdown]
# ## Results
# We need to get the MSE of all images so we can see how the latent space affects reconstruction error.
# For this we reload each network and predict all the training images.
# %%
# use whole training dataset
dataloader = torch.utils.data.DataLoader(
dataset=mnist_train_data, batch_size=len(mnist_train_data)
)
images_all, labels_all = next(iter(dataloader))
# dense model error
mse_train_dense = []
for n_latent in latent_space_dim:
print(f"mse: {n_latent}")
model_dense = torch.load(model_path / f"dense_{n_latent}.pt")
images_all_hat = model_dense(images_all)
_loss = torch.nn.MSELoss()(images_all_hat, images_all)
mse_train_dense.append(_loss.detach().numpy())
# %% [markdown]
# To examine the results of the networks we will compare against PCA as a baseline.
# Here we fit a PCA model as previously shown.
# Then we reconstruct the images and measure the MSE at each latent space size.
# %%
import numpy as np
import pandas as pd
import sklearn.decomposition
import sklearn.metrics
# convert images to 1D vectors
images_flat = images_all[:, 0].reshape(-1, 784).numpy()
images_flat.shape
print(f"training components: {latent_space_dim[-1]}")
pca = sklearn.decomposition.PCA(n_components=latent_space_dim[-1])
images_flat_hat = pca.inverse_transform(pca.fit_transform(images_flat))
def transform_truncated(pca, X, n_components):
X = pca._validate_data(X, dtype=[np.float64, np.float32], reset=False)
if pca.mean_ is not None:
X = X - pca.mean_
X_transformed = np.dot(X, pca.components_[:n_components, :].T)
if pca.whiten:
X_transformed /= np.sqrt(pca.explained_variance_)
return X_transformed
def inv_transform(pca, X, n_components):
return np.dot(X, pca.components_[:n_components, :]) + pca.mean_
def inv_forward_transform(pca, X, n_components):
return inv_transform(
pca, transform_truncated(pca, X, n_components), n_components
)
# get pca mse
mse_train_pca = []
for n_latent in latent_space_dim:
print(f"mse: {n_latent}")
images_flat_hat = inv_forward_transform(
pca, X=images_flat, n_components=n_latent
)
_loss = sklearn.metrics.mean_squared_error(images_flat_hat, images_flat)
mse_train_pca.append(_loss)
# %% [markdown]
# Now let's plot the two approaches side by side:
# %%
# reconstruction mse
fig, ax = plt.subplots(figsize=(10, 6))
ax.plot(latent_space_dim, mse_train_dense, label="dense")
ax.plot(latent_space_dim, mse_train_pca, label="pca")
ax.set_title("Reconstruction error")
ax.set_xlabel("Latent space size")
ax.set_ylabel("MSE")
fig.legend()
# %% [markdown]
# We can see that the dense autoencoder does do better generally.
# Particularly so at small latent space sizes.
# Once the latent space gets much larger PCA becomes comparible.
# With a latent space of 50, in the autoencoder this is greater than the output
# size of the preceeding layer, hence we dont expect any improvement here.
#
# ## Test set
# However as noted prior, there are more parameters than images, so we could easily be overfitting here.
# To confirm we can check the reconstruction error on the unseen test set.
# %%
# Run same analysis on test set to check for overfitting
# use whole training dataset
dataloader = torch.utils.data.DataLoader(
dataset=mnist_test_data, batch_size=len(mnist_test_data)
)
images_all, labels_all = next(iter(dataloader))
images_flat = images_all[:, 0].reshape(-1, 784).numpy()
# autoencoder
mse_test_dense = []
for n_latent in latent_space_dim:
print(f"mse: {n_latent}")
model_dense = torch.load(model_path / f"dense_{n_latent}.pt")
images_all_hat = model_dense(images_all)
_loss = torch.nn.MSELoss()(images_all_hat, images_all)
mse_test_dense.append(_loss.detach().numpy())
# pca
mse_test_pca = []
for n_latent in latent_space_dim:
print(f"mse: {n_latent}")
images_flat_hat = inv_forward_transform(
pca, X=images_flat, n_components=n_latent
)
_loss = sklearn.metrics.mean_squared_error(images_flat_hat, images_flat)
mse_test_pca.append(_loss)
# reconstruction mse
fig, ax = plt.subplots(figsize=(10, 6))
ax.plot(latent_space_dim, mse_test_dense, label="dense")
ax.plot(latent_space_dim, mse_test_pca, label="pca")
ax.set_title("Reconstruction error")
ax.set_xlabel("Latent space size")
ax.set_ylabel("MSE")
fig.legend()
# %% [markdown]
# We obtain very similar results to before.
# This gives us a good indication we are not overfitting.
# Therefore the autoencoders should generalise to unseen images fine.
# For more confidence it would be nice to apply cross validation and get multiple instances of the model and results.
# I'll skip this for now in the interests of time.
#
# ## Results - images
# We have an improvement in MSE but it's good to check the actual reconstructed images to confirm with our eyes.
#
# First for PCA - top row are the originals, subsequent rows are increasing latent space size.
# %%
fig, ax = plt.subplots(figsize=(20, 20), ncols=6, nrows=5)
for row, n_latent in enumerate(latent_space_dim[:4]):
images_hat = inv_forward_transform(
pca, X=images_flat, n_components=n_latent
).reshape(-1, 28, 28)
for col in range(6):
ax[0, col].imshow(images_all[col, 0])
ax[0, col].set_title(str(labels_all[col].numpy()))
ax[row + 1, col].imshow(images_hat[col])
ax[row + 1, col].set_title(str(labels_all[col].numpy()))
# %% [markdown]
# The same for the autoencoder:
# %%
fig, ax = plt.subplots(figsize=(20, 20), ncols=6, nrows=5)
for row, n_latent in enumerate(latent_space_dim[:4]):
model_dense = torch.load(model_path / f"dense_{n_latent}.pt")
images_hat = model_dense(images_all).detach()
for col in range(6):
ax[0, col].imshow(images_all[col, 0])
ax[0, col].set_title(str(labels_all[col].numpy()))
ax[row + 1, col].imshow(images_hat[col,0])
ax[row + 1, col].set_title(str(labels_all[col].numpy()))
# %% [markdown]
# We can see that the autoencoder is much clearer at small latent spaces.
# Even at only 3, the images are pretty decent.
#
# Similar to PCA, some digits look worse than others.
# We can plot the MSE against the digit to see which are hard to construct:
# %%
# MSE against label - PCA benchmark
images_flat_hat = inv_forward_transform(
pca, X=images_flat, n_components=latent_space_dim[2]
)
loss_label_pca = []
for label in range(0, 10):
filt = labels_all == label
_loss = sklearn.metrics.mean_squared_error(
images_flat_hat[filt], images_flat[filt]
)
loss_label_pca.append(_loss)
# MSE against label for autoencoder
loss_label = []
for row, n_latent in enumerate(latent_space_dim):
model_dense = torch.load(model_path / f"dense_{n_latent}.pt")
images_all_hat = model_dense(images_all)
_loss_label = []
for label in range(0, 10):
filt = labels_all == label
_loss = torch.nn.MSELoss()(
images_all_hat[filt].detach(), images_all[filt].detach()
).numpy().flatten()[0]
_loss_label.append(_loss)
loss_label.append(_loss_label)
# create plot with pca benchmark
df_loss = pd.DataFrame(
loss_label, index=latent_space_dim, columns=range(0, 10)
).transpose()
fig, ax = plt.subplots(figsize=(10, 6))
df_loss.plot(ax=ax, legend=False)
ax.plot(range(0, 10), loss_label_pca, '--', label=f'pca_{latent_space_dim[2]}')
ax.set_title("Reconstruction error by digit number")
ax.set_xlabel("Digit label")
ax.set_ylabel("MSE")
fig.legend()
# %% [markdown]
# The digits the autoencoder struggle with are generally the same as PCA.
# We can see the reconstruction error for an autoencoder with 5 latent variables is comparible
# to PCA with 10 components.
# The autoencoder seems to do better reconstructing '1', '6' and '7'.
#
# There are plenty of hyperparameters to tune with the autoencoder.
# For example all the other layer sizes and number of layers.
# Indeed we could switch the dense layers out for convolution layers...
#
# Another thought, it should be fairly easy to train a digit classifier on the latent representation of the images.
| 37.037681 | 121 | 0.715761 |
52b4aa4d80d68268e826d7c76459d1ca24597cc0
| 6,249 |
py
|
Python
|
cs/lambda_cs/03_data_structures/lru_cache/lru_cache.py
|
tobias-fyi/vela
|
b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82
|
[
"MIT"
] | null | null | null |
cs/lambda_cs/03_data_structures/lru_cache/lru_cache.py
|
tobias-fyi/vela
|
b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82
|
[
"MIT"
] | 8 |
2020-03-24T17:47:23.000Z
|
2022-03-12T00:33:21.000Z
|
cs/lambda_cs/03_data_structures/lru_cache/lru_cache.py
|
tobias-fyi/vela
|
b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82
|
[
"MIT"
] | null | null | null |
"""
Data Structures :: LRU Cache
"""
class ListNode:
def __init__(self, key, value, prev=None, next=None):
"""A node in a doubly-linked list-based LRU cache.
:param key : Key by which to access nodes.
:param value : Value accessed by key.
:param prev [ListNode] : Previous ListNode in list, defaults to None
:param next [ListNode] : Next ListNode in list, defaults to None
"""
self.key = key
self.value = value
self.prev = prev
self.next = next
def delete(self):
"""Rearranges the node's previous and next pointers
accordingly, effectively deleting it."""
if self.prev:
self.prev.next = self.next
if self.next:
self.next.prev = self.prev
class DoublyLinkedList:
def __init__(self, node=None):
"""Doubly-linked list class that holds references to
the list's head and tail nodes, and list length."""
self.head = node
self.tail = node
self.length = 1 if node is not None else 0
def __len__(self):
"""Returns length of list; for use with the built-in
`len()` function."""
return self.length
def add_to_head(self, key, value):
"""Wraps the given value in a ListNode and inserts it
as the new head of the list."""
new_node = ListNode(key, value)
self.length += 1
if not self.head and not self.tail:
self.head = new_node
self.tail = new_node
else:
new_node.next = self.head
self.head.prev = new_node
self.head = new_node
def remove_from_tail(self):
"""Removes the List's current tail node, making the
current tail's previous node the new tail of the List.
Returns the value of the removed Node.
:return value : Value of the removed Node or None.
"""
if self.tail is not None:
self.delete(self.tail)
def move_to_head(self, node):
"""Removes the input node from its current spot in the
List and inserts it as the new head node of the List.
:param node (ListNode) : Node to be moved to head.
"""
if node is self.head:
return
key, value = node.key, node.value
self.delete(node)
self.add_to_head(key, value)
def delete(self, node):
"""Removes a node from the list and handles cases where
the node was the head or the tail.
:param node (ListNode) : Node to be removed from list.
"""
# TODO: Catch errors if empty or node not in list
self.length -= 1 # Update length
# If head and tail, both get set to None
if self.head is self.tail:
self.head = None
self.tail = None
elif node is self.head: # If head, set current head to next
self.head = self.head.next
node.delete()
elif node is self.tail: # If tail, set current tail to prev
self.tail = self.tail.prev
node.delete()
else: # If regular node, just delete
node.delete()
class LRUCache:
def __init__(self, limit=10):
"""The LRUCache class keeps track of the max number of nodes it
can hold, the current number of nodes it is holding, a doubly-
linked list that holds the key-value entries in the correct
order, as well as a storage dict that provides fast access
to every node stored in the cache.
Head node is most recent. Tail node is oldest.
:param node [ListNode] : Optional initial ListNode.
:param limit [int] : Max number of elements in cache, default 10.
"""
self.limit = limit
self.storage = DoublyLinkedList()
def get(self, key):
"""Retrieves the value associated with the given key.
Moves the key-value pair to the end of the order
such that the pair is considered most-recently used.
Returns the value associated with the key or None if the
key-value pair doesn't exist in the cache.
"""
if len(self.storage) < 1: # In case nothing in cache
return None
node = self.storage.head # Start at the head
while node: # Loop through nodes, looking for key
if node.key == key:
value = node.value # Return value of node
if node is not self.storage.head: # If head, no need to move
self.storage.move_to_head(node)
return value # Returning value implies breaking loop
node = node.next # Iterate
def set(self, key, value):
"""Adds the given key-value pair to the cache.
The newly-added pair is considered the most-recently used
entry in the cache. If the cache is already at max capacity
before this entry is added, then the oldest entry in the
cache is removed to make room. In the case that the key
already exists in the cache, the old value associated with
the key is overwritten by the new value.
"""
# Look for key in cache using `self.get()`
if self.get(key) is not None:
# If exists, the call will relocate it to head position
# Thus, head will only need to be updated with new value
# Length of list does not change; does not need checking
self.storage.head.value = value
else:
# If not exists (returns None), add key-value to head
# Before adding, check length of list
# If length == limit, remove from tail first
if len(self.storage) == self.limit:
self.storage.remove_from_tail()
self.storage.add_to_head(key, value)
# cache = LRUCache(3)
# cache.set("item1", "a")
# cache.set("item2", "b")
# cache.set("item3", "c")
# cache.set("item2", "z")
# cache.set("item1", "a")
# cache.set("item2", "b")
# cache.set("item3", "c")
# cache.get("item1")
# cache.set("item4", "d")
# cache.get("item1")
# cache.get("item3")
# cache.get("item4")
# cache.get("item2")
| 35.106742 | 77 | 0.587774 |
5222fa049d599fb481c2d0979a5845d32fa77c9c
| 4,461 |
py
|
Python
|
Chapter9_AdvancedDL/Chapter9_3_ModelImprovement/bostonDnnFinal.py
|
thisisjako/UdemyTF
|
ee4102391ed6bd50f764955f732f5740425a9209
|
[
"MIT"
] | null | null | null |
Chapter9_AdvancedDL/Chapter9_3_ModelImprovement/bostonDnnFinal.py
|
thisisjako/UdemyTF
|
ee4102391ed6bd50f764955f732f5740425a9209
|
[
"MIT"
] | null | null | null |
Chapter9_AdvancedDL/Chapter9_3_ModelImprovement/bostonDnnFinal.py
|
thisisjako/UdemyTF
|
ee4102391ed6bd50f764955f732f5740425a9209
|
[
"MIT"
] | null | null | null |
import os
from typing import Tuple
import numpy as np
import tensorflow as tf
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.callbacks import LearningRateScheduler
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Dropout
from tensorflow.keras.layers import Input
from tensorflow.keras.layers import LeakyReLU
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tf_utils.bostonDataAdvanced import BOSTON
from tf_utils.callbacks import LRTensorBoard
from tf_utils.callbacks import schedule_fn5
np.random.seed(0)
tf.random.set_seed(0)
LOGS_DIR = os.path.abspath("C:/Users/Jan/Dropbox/_Programmieren/UdemyTF/logs/")
if not os.path.exists(LOGS_DIR):
os.mkdir(LOGS_DIR)
def r_squared(y_true: tf.Tensor, y_pred: tf.Tensor) -> tf.Tensor:
error = tf.math.subtract(y_true, y_pred)
squared_error = tf.math.square(error)
numerator = tf.math.reduce_sum(squared_error)
y_true_mean = tf.math.reduce_mean(y_true)
mean_deviation = tf.math.subtract(y_true, y_true_mean)
squared_mean_deviation = tf.math.square(mean_deviation)
denominator = tf.reduce_sum(squared_mean_deviation)
r2 = tf.math.subtract(1.0, tf.math.divide(numerator, denominator))
r2_clipped = tf.clip_by_value(r2, clip_value_min=0.0, clip_value_max=1.0)
return r2_clipped
def build_model(
num_features: int,
num_targets: int,
optimizer: tf.keras.optimizers.Optimizer,
learning_rate: float,
dense_layer_size1: int,
dense_layer_size2: int,
activation_str: str,
dropout_rate: bool,
use_batch_normalization: bool,
) -> Model:
# Input
input_house = Input(shape=num_features)
# Dense Layer 1
x = Dense(units=dense_layer_size1)(input_house)
if use_batch_normalization:
x = BatchNormalization()(x)
if dropout_rate > 0.0:
x = Dropout(rate=dropout_rate)(x)
if activation_str == "LeakyReLU":
x = LeakyReLU()(x)
else:
x = Activation(activation_str)(x)
# Dense Layer 2
x = Dense(units=dense_layer_size2)(x)
if use_batch_normalization:
x = BatchNormalization()(x)
if dropout_rate > 0.0:
x = Dropout(rate=dropout_rate)(x)
if activation_str == "LeakyReLU":
x = LeakyReLU()(x)
else:
x = Activation(activation_str)(x)
# Output Layer
x = Dense(units=num_targets)(x)
y_pred = Activation("linear")(x)
model = Model(
inputs=[input_house],
outputs=[y_pred]
)
opt = optimizer(learning_rate=learning_rate)
model.compile(
loss="mse",
optimizer=opt,
metrics=[r_squared]
)
model.summary()
return model
if __name__ == "__main__":
"""
Chapter 5: 0.7287 R2
LinReg: 0.7174 R2
Model: 0.8197 R2
"""
data = BOSTON()
train_dataset = data.get_train_set()
val_dataset = data.get_val_set()
test_dataset = data.get_test_set()
num_features = data.num_features
num_targets = data.num_targets
# Global params
epochs = 2_000
batch_size = 64
params = {
"optimizer": Adam,
"learning_rate": 0.001,
"dense_layer_size1": 256,
"dense_layer_size2": 128,
# relu, elu, LeakyReLU
"activation_str": "relu",
# 0.05, 0.1, 0.2
"dropout_rate": 0.00,
# True, False
"use_batch_normalization": True
}
model = build_model(
num_features,
num_targets,
**params
)
model_log_dir = os.path.join(LOGS_DIR, "model_Final_BOSTON")
lr_callback = LRTensorBoard(
log_dir=model_log_dir,
histogram_freq=0,
profile_batch=0,
write_graph=False
)
lrs_callback = LearningRateScheduler(
schedule=schedule_fn5,
verbose=0
)
es_callback = EarlyStopping(
monitor="val_loss",
patience=30,
verbose=2,
restore_best_weights=True,
min_delta=0.0005
)
model.fit(
train_dataset,
verbose=1,
batch_size=batch_size,
epochs=epochs,
callbacks=[lr_callback, lrs_callback, es_callback],
validation_data=val_dataset
)
score = model.evaluate(
test_dataset,
verbose=0,
batch_size=batch_size
)
print(f"Test performance: {score}")
| 25.786127 | 79 | 0.667563 |
52470857cc0f7310b4a4edf3f77a2669a09b8027
| 2,311 |
py
|
Python
|
PINp/2015/DENISENKO_A_N/task_10_27.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
PINp/2015/DENISENKO_A_N/task_10_27.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
PINp/2015/DENISENKO_A_N/task_10_27.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
# Задача 10
# Напишите программу "Генератор персонажей" для игры. Пользователю должно
#быть предоставлено 30 пунктов, которые можно распределить между четырьмя
#характеристиками: Сила, Здоровье, Мудрость и Ловкость. Надо сделать так, чтобы
#пользователь мог не только брать эти пункты из общего "пула", но и возвращать их
#туда из характеристик, которым он решил присвоить другие значения.
# Denisenko A. N.
# 02.06.2016
def print_skills ():
global skills
global znacheniya
for i in range(5):
print (skills[i],znacheniya[i])
def increase_skill ():
global skills
global znacheniya
userSkill = int (input ("Куда накинем очков?\nСила - 1, здоровье - 2, мудрость - 3, ловксоть - 4\n"))
while userSkill not in range (1,5):
userSkill = int (input ("ОШИБОЧНЫЙ ВВОД! Попробуйте заново "))
userNumber = int (input ("Сколько хотите накинуть? "))
while userNumber <= 0 or userNumber > znacheniya[0] :
userNumber = int (input ("ОШИБОЧНЫЙ ВВОД! Попробуйте заново "))
znacheniya[userSkill] += userNumber
znacheniya[0] -= userNumber
print_skills()
def reduce_skill ():
global skills
global znacheniya
userSkill = int (input ("Откуда заберём очки?\nСила - 1, здоровье - 2, мудрость - 3, ловксоть - 4\n"))
while userSkill not in range (1,5) or znacheniya[userSkill] == 0:
userSkill = int (input ("ОШИБОЧНЫЙ ВВОД! Попробуйте заново "))
userNumber = int (input ("Сколько хотите отнять? "))
while userNumber <= 0 or userNumber > znacheniya[userSkill] :
userNumber = int (input ("ОШИБОЧНЫЙ ВВОД! Попробуйте заново "))
znacheniya[userSkill] -= userNumber
znacheniya[0] += userNumber
print_skills()
skills = ("Осталось очков - ",'Сила ','Здоровье ','Мудрость ','Ловксоть ')
znacheniya = [30, 0 , 0 , 0 , 0]
print ("\tЭтому миру нужен новый герой!!!\n\n")
userDecision = 10
print_skills()
while userDecision != 1:
userDecision = int (input ("Ваши действия? 1 - закончить разработку Генриха, 2 - увеличить какой-нибудь скилл, 3 - уменьшить какой-нибудь скилл\n"))
if userDecision == 2:
increase_skill()
if userDecision == 3:
reduce_skill()
print()
input("\n\nДля выхода нажмите ENTER")
| 37.885246 | 154 | 0.658589 |
0dbb4978f0a9567e13d7bc0339ab70c82f5851ec
| 1,720 |
py
|
Python
|
backend/apps/mapview/views.py
|
n-hackert/match4healthcare
|
761248c27b49e568c545c643a72eac9a040649d7
|
[
"MIT"
] | 3 |
2020-03-27T20:39:31.000Z
|
2020-03-31T20:24:55.000Z
|
backend/apps/mapview/views.py
|
n-hackert/match4healthcare
|
761248c27b49e568c545c643a72eac9a040649d7
|
[
"MIT"
] | 21 |
2020-03-28T09:57:15.000Z
|
2020-03-31T11:38:00.000Z
|
backend/apps/mapview/views.py
|
n-hackert/match4healthcare
|
761248c27b49e568c545c643a72eac9a040649d7
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader
from apps.mapview.utils import plzs
from apps.iamstudent.models import Student
from functools import lru_cache
import time
from django.views.decorators.gzip import gzip_page
# Should be safe against BREACH attack because we don't have user input in reponse body
@gzip_page
def index(request):
locations_and_number = prepare_students(ttl_hash=get_ttl_hash())
template = loader.get_template('mapview/map.html')
context = {
'locations': list(locations_and_number.values()),
}
return HttpResponse(template.render(context, request))
@lru_cache(maxsize=1)
def prepare_students(ttl_hash=None):
# Source: https://stackoverflow.com/questions/31771286/python-in-memory-cache-with-time-to-live
del ttl_hash # to emphasize we don't use it and to shut pylint up
students = Student.objects.filter(user__validated_email=True)
locations_and_number = {}
i=0
for student in students:
cc = student.countrycode
plz = student.plz
key = cc + "_" + plz
if key in locations_and_number:
locations_and_number[cc + "_" + plz]["count"] += 1
else:
lat, lon, ort = plzs[cc][plz]
locations_and_number[key] = {
"countrycode": cc,
"plz": plz,
"count": 1,
"lat": lat,
"lon": lon,
"ort": ort,
"i": i,
}
i+=1
return locations_and_number
def get_ttl_hash(seconds=300):
"""Return the same value withing `seconds` time period"""
return round(time.time() / seconds)
| 30.714286 | 99 | 0.643023 |
10fca0718d1cb946212f20c12d90e44ce3a6c568
| 2,621 |
py
|
Python
|
Apps/Auswertung/tests/test_data_frame.py
|
RGreinacher/bachelor-thesis
|
60dbc03ce40e3ec42f2538d67a6aabfea6fbbfc8
|
[
"MIT"
] | 1 |
2021-04-13T10:00:46.000Z
|
2021-04-13T10:00:46.000Z
|
Apps/Auswertung/tests/test_data_frame.py
|
RGreinacher/bachelor-thesis
|
60dbc03ce40e3ec42f2538d67a6aabfea6fbbfc8
|
[
"MIT"
] | null | null | null |
Apps/Auswertung/tests/test_data_frame.py
|
RGreinacher/bachelor-thesis
|
60dbc03ce40e3ec42f2538d67a6aabfea6fbbfc8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# import python libs
import json
from pprint import pprint as pp
import sys
sys.path.append('../')
# import project libs
from constants import *
from helper import *
# defining globals & constants
PREANNOTATIONS_START_INDEX = 27
BLOCK_OFFSET = 58
# methods
def check_annotation_corrections(data_frame):
for subject_id, subject_data in enumerate(data_frame):
for block_index in range(0, 4):
(preannotations, corrections) = restructure_data(subject_data, block_index)
for index, number_of_annotations in enumerate(preannotations):
if number_of_annotations != sum(corrections[index]):
print('ERROR FOUND! subject', subject_id, ' block', block_index)
print('sum does not match for class', index)
print('expecting', number_of_annotations, 'annotations, got:', corrections[index])
print('full distribution and answer table for subject:')
pp(preannotations)
pp(corrections)
exit()
print('✓ annotation corrections of subject ID', subject_id, 'are valid')
def restructure_data(subject_data, block_index):
start_index = PREANNOTATIONS_START_INDEX + (BLOCK_OFFSET * block_index)
preannotations = [
subject_data[start_index],
subject_data[start_index + 7],
subject_data[start_index + 14],
subject_data[start_index + 21],
subject_data[start_index + 28],
subject_data[start_index + 35]
]
corrections = [
[],
[],
[],
[],
[],
[]
]
data_offset = (BLOCK_OFFSET * block_index)
for class_index in range(0, 6):
for answer_index in range(0, 6):
answer_count = subject_data[PREANNOTATIONS_START_INDEX + data_offset + 1 + class_index + answer_index]
corrections[class_index].append(answer_count)
data_offset += 6
return (preannotations, corrections)
def check_shape(data_frame):
header_length = len(data_frame[0])
for subject_id in range(0, len(data_frame) - 1):
subject_row = data_frame[subject_id + 1]
current_length = len(subject_row)
if current_length != header_length:
print('ERROR FOUND! row of subject ID', subject_id, 'is', current_length, 'but should be', header_length)
exit()
print('✓ length of subject ID', subject_id, 'is valid')
data = read_json_file('../' + JSON_DATA_FRAME_FILE_NAME)
check_shape(data)
check_annotation_corrections(data)
| 33.177215 | 117 | 0.642503 |
33bc96a6b82d0998e27bd153b054902618cd01d8
| 2,810 |
py
|
Python
|
GZP_GTO_QGIS/INSTALLATION/GeoTaskOrganizer/mActionGTOpoint.py
|
msgis/swwat-gzp-template
|
080afbe9d49fb34ed60ba45654383d9cfca01e24
|
[
"MIT"
] | 3 |
2019-06-18T15:28:09.000Z
|
2019-07-11T07:31:45.000Z
|
GZP_GTO_QGIS/INSTALLATION/GeoTaskOrganizer/mActionGTOpoint.py
|
msgis/swwat-gzp-template
|
080afbe9d49fb34ed60ba45654383d9cfca01e24
|
[
"MIT"
] | 2 |
2019-07-11T14:03:25.000Z
|
2021-02-08T16:14:04.000Z
|
GZP_GTO_QGIS/INSTALLATION/GeoTaskOrganizer/mActionGTOpoint.py
|
msgis/swwat-gzp-template
|
080afbe9d49fb34ed60ba45654383d9cfca01e24
|
[
"MIT"
] | 1 |
2019-06-12T11:07:37.000Z
|
2019-06-12T11:07:37.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from PyQt5.QtWidgets import QToolBar, QWidget, QSizePolicy
from PyQt5.QtCore import QObject, Qt
from .gto_point import GTOPointWidget
class run(QObject): # gtoAction
def __init__(self, id, gtoTool, config, debug):
super(run, self).__init__()
# references
self.debug = debug
self.id = id
self.config = config
self.info = gtoTool.info
try:
self.action = gtoTool.action
self.action.setCheckable(True)
self.gtomain = gtoTool.gtomain
self.helper = self.gtomain.helper
self.metadata = self.gtomain.metadata
self.iface = self.gtomain.iface
self.canvas = self.iface.mapCanvas()
if not self.config.get("is_widgetaction", False):
# tool data
self.toolbar_dock = self.config.get("toolbar_dock", 4)
# widget
self.toolbar = None
# load toolbar
self.objName = "gtoTB_" + gtoTool.action.objectName() + str(id)
self.toolbar = self.gtomain.helper.findToolbar(self.iface, self.objName)
if self.toolbar is None:
if self.debug: self.info.log("load", self.objName)
self.toolbar = QToolBar()
self.toolbar.setObjectName(self.objName)
self.toolbar.setWindowTitle(u'GTO Coordinate')
self.toolbar.setAllowedAreas(Qt.BottomToolBarArea | Qt.TopToolBarArea)
self.iface.mainWindow().addToolBarBreak(self.toolbar_dock)
self.iface.addToolBar(self.toolbar, self.toolbar_dock)
else:
self.toolbar.clear()
self.wid = GTOPointWidget(self.gtomain, self.toolbar)
self.toolbar.addWidget(self.wid)
if self.config.get("spacer", False):
spacer = QWidget()
spacer.setObjectName('spacer')
spacer.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
spacer.setStyleSheet("QWidget{background: transparent;}")
self.toolbar.addWidget(spacer)
self.wid.set_parent_widget(self)
self.wid.isActive.connect(self.set_status) # not always(?) working?
self.wid.setConfig(self.config)
self.wid.added()
self.wid.setMapTool()
self.toolbar.show()
except Exception as e:
self.info.err(e)
def set_status(self, isActive):
try:
self.action.setChecked(isActive)
self.toolbar.setHidden(not isActive)
except Exception as e:
self.info.err(e)
| 40.724638 | 90 | 0.562989 |
d5697ab5e5c29d2672f3bcb9baa216f5d4804748
| 898 |
py
|
Python
|
Backend/models/dashboard.py
|
dbvis-ukon/coronavis
|
f00374ac655c9d68541183d28ede6fe5536581dc
|
[
"Apache-2.0"
] | 15 |
2020-04-24T20:18:11.000Z
|
2022-01-31T21:05:05.000Z
|
Backend/models/dashboard.py
|
dbvis-ukon/coronavis
|
f00374ac655c9d68541183d28ede6fe5536581dc
|
[
"Apache-2.0"
] | 2 |
2021-05-19T07:15:09.000Z
|
2022-03-07T08:29:34.000Z
|
Backend/models/dashboard.py
|
dbvis-ukon/coronavis
|
f00374ac655c9d68541183d28ede6fe5536581dc
|
[
"Apache-2.0"
] | 4 |
2020-04-27T16:20:13.000Z
|
2021-02-23T10:39:42.000Z
|
from datetime import datetime
from tzlocal import get_localzone
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from db import db
class Dashboard(db.Model):
__tablename__ = 'dashboards'
id = db.Column(db.String, primary_key=True, nullable=False, autoincrement=False, unique=True)
dashboard = db.Column(db.JSON, unique=False, nullable=False)
upvotes = db.Column(db.Integer, unique=False, nullable=False, default=0)
visits = db.Column(db.Integer, unique=False, nullable=False, default=0)
created_at = db.Column(db.TIMESTAMP, nullable=False, default=datetime.now(tz=get_localzone()))
parent_id = db.Column(db.String, nullable=True)
current: bool = False
class DashboardsSchema(SQLAlchemyAutoSchema):
class Meta:
fields = ("id", "dashboard", "upvotes", "visits", "created_at")
model = Dashboard
dashboard_schema = DashboardsSchema()
| 32.071429 | 98 | 0.737194 |
896e4a74c260f7ec2fdf348d074dca5f5baa04fa
| 1,662 |
py
|
Python
|
Algorithms/Bit_Manipulation/counter_game.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
Algorithms/Bit_Manipulation/counter_game.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
Algorithms/Bit_Manipulation/counter_game.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
for _ in range(int(input())):
N = int(input().strip())
# If they set counter to 1, Richard wins,
# because its Louise' turn and she cannot make a move.
N -= 1
b = bin(N)[2:]
b_sum = sum([bn == '1' for bn in b])
if b_sum & 1:
print('Louise')
else:
print('Richard')
# Awesome
# for _ in range(int(input())):
# on = sum([b == '1' for b in bin(int(input().strip()) - 1)[2:]])
# if on & 1:
# print('Louise')
# else:
# print('Richard')
# Use bit operation
'''
def closest_search(power_of_2, lo, hi, N):
while lo < hi:
mid = (lo + hi) // 2
if N < power_of_2[mid]:
hi = mid - 1
elif power_of_2[mid] < N:
lo = mid + 1
else:
return mid
return lo - 1
power_of_2 = [2 ** i for i in range(1, 64)]
for _ in range(int(input())):
N = int(input())
cnt = 0
if N in power_of_2:
while N != 1:
N /= 2
cnt += 1
# print('\t', N, cnt)
else:
while N > 3:
closest_val = closest_search(power_of_2, 0, len(power_of_2) - 1, N)
print(N, power_of_2[closest_val])
N -= power_of_2[closest_val]
cnt += 1
print('\t', N, cnt)
if N in power_of_2:
while N != 1:
N /= 2
cnt += 1
print('\t\t', N, cnt)
while N > 1:
N -= 2
cnt += 1
print('-->> \t\t', N, cnt)
if cnt % 2 == 1:
print('Louise')
else:
print('Richard')
'''
| 22.767123 | 79 | 0.43923 |
89a6dd760067157994139d9c3d93fca3d158f064
| 380 |
py
|
Python
|
30 Days of Code/30DoC-day-16/30DoC_day_16.py
|
nirobio/puzzles
|
fda8c84d8eefd93b40594636fb9b7f0fde02b014
|
[
"MIT"
] | null | null | null |
30 Days of Code/30DoC-day-16/30DoC_day_16.py
|
nirobio/puzzles
|
fda8c84d8eefd93b40594636fb9b7f0fde02b014
|
[
"MIT"
] | null | null | null |
30 Days of Code/30DoC-day-16/30DoC_day_16.py
|
nirobio/puzzles
|
fda8c84d8eefd93b40594636fb9b7f0fde02b014
|
[
"MIT"
] | null | null | null |
#!/bin/python3
import sys
# print parsed integer value of S
# or print("Bad String")
# Note: You must use the String-to-Integer and exception handling constructs built into your submission language. If you attempt to use loops/conditional statements, you will get a score.
S = input().strip()
try:
intS = int(S)
print(intS)
except Exception:
print("Bad String")
| 22.352941 | 188 | 0.718421 |
7f4be777897f4040ca7581001a3c94a9916c59a3
| 4,895 |
py
|
Python
|
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/network/cnos/test_cnos_bgp.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/network/cnos/test_cnos_bgp.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/network/cnos/test_cnos_bgp.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat.mock import patch
from ansible_collections.community.general.plugins.modules.network.cnos import cnos_bgp
from ansible_collections.community.general.tests.unit.modules.utils import set_module_args
from ..cnos_module import TestCnosModule, load_fixture
class TestCnosBgpModule(TestCnosModule):
module = cnos_bgp
def setUp(self):
super(TestCnosBgpModule, self).setUp()
self.mock_run_cnos_commands = patch('ansible_collections.community.general.plugins.module_utils.network.cnos.cnos.run_cnos_commands')
self.run_cnos_commands = self.mock_run_cnos_commands.start()
def tearDown(self):
super(TestCnosBgpModule, self).tearDown()
self.mock_run_cnos_commands.stop()
def load_fixtures(self, commands=None, transport='cli'):
self.run_cnos_commands.return_value = [load_fixture('cnos_bgp_config.cfg')]
def test_bgp_neighbor(self):
set_module_args({'username': 'admin', 'password': 'pass',
'host': '10.241.107.39', 'deviceType': 'g8272_cnos',
'outputfile': self.test_log, 'asNum': '33',
'bgpArg1': 'neighbor', 'bgpArg2': '10.241.107.40',
'bgpArg3': '13', 'bgpArg4': 'address-family',
'bgpArg5': 'ipv4', 'bgpArg6': 'next-hop-self'})
result = self.execute_module(changed=True)
expected_result = 'BGP configurations accomplished'
self.assertEqual(result['msg'], expected_result)
def test_cnos_bgp_dampening(self):
set_module_args({'username': 'admin', 'password': 'pass',
'host': '10.241.107.39', 'deviceType': 'g8272_cnos',
'outputfile': self.test_log, 'asNum': '33',
'bgpArg1': 'address-family', 'bgpArg2': 'ipv4',
'bgpArg3': 'dampening', 'bgpArg4': '13',
'bgpArg5': '233', 'bgpArg6': '333',
'bgpArg7': '15', 'bgpArg8': '33'})
result = self.execute_module(changed=True)
expected_result = 'BGP configurations accomplished'
self.assertEqual(result['msg'], expected_result)
def test_cnos_bgp_network(self):
set_module_args({'username': 'admin', 'password': 'pass',
'host': '10.241.107.39', 'deviceType': 'g8272_cnos',
'outputfile': self.test_log, 'asNum': '33',
'bgpArg1': 'address-family', 'bgpArg2': 'ipv4',
'bgpArg3': 'network', 'bgpArg4': '1.2.3.4/5',
'bgpArg5': 'backdoor'})
result = self.execute_module(changed=True)
expected_result = 'BGP configurations accomplished'
self.assertEqual(result['msg'], expected_result)
def test_cnos_bgp_clusterid(self):
set_module_args({'username': 'admin', 'password': 'pass',
'host': '10.241.107.39', 'deviceType': 'g8272_cnos',
'outputfile': self.test_log, 'asNum': '33',
'bgpArg1': 'cluster-id', 'bgpArg2': '10.241.107.40'})
result = self.execute_module(changed=True)
expected_result = 'BGP configurations accomplished'
self.assertEqual(result['msg'], expected_result)
def test_cnos_bgp_graceful_restart(self):
set_module_args({'username': 'admin', 'password': 'pass',
'host': '10.241.107.39', 'deviceType': 'g8272_cnos',
'outputfile': self.test_log, 'asNum': '33',
'bgpArg1': 'graceful-restart', 'bgpArg2': '333'})
result = self.execute_module(changed=True)
expected_result = 'BGP configurations accomplished'
self.assertEqual(result['msg'], expected_result)
def test_cnos_bgp_routerid(self):
set_module_args({'username': 'admin', 'password': 'pass',
'host': '10.241.107.39', 'deviceType': 'g8272_cnos',
'outputfile': self.test_log, 'asNum': '33',
'bgpArg1': 'router-id', 'bgpArg2': '1.2.3.4'})
result = self.execute_module(changed=True)
expected_result = 'BGP configurations accomplished'
self.assertEqual(result['msg'], expected_result)
def test_cnos_bgp_vrf(self):
set_module_args({'username': 'admin', 'password': 'pass',
'host': '10.241.107.39', 'deviceType': 'g8272_cnos',
'outputfile': self.test_log, 'asNum': '33',
'bgpArg1': 'vrf'})
result = self.execute_module(changed=True)
expected_result = 'BGP configurations accomplished'
self.assertEqual(result['msg'], expected_result)
| 50.989583 | 141 | 0.593054 |
c3832d7cabef802cff641b32eac6406948f75ad4
| 791 |
py
|
Python
|
third-party/pybind11/pybind11/__main__.py
|
alskondr/xlnt
|
7f00e91760886204c847fc659bed47234f46f3e6
|
[
"Unlicense"
] | 54 |
2019-01-14T18:38:23.000Z
|
2022-03-31T02:03:23.000Z
|
third-party/pybind11/pybind11/__main__.py
|
alskondr/xlnt
|
7f00e91760886204c847fc659bed47234f46f3e6
|
[
"Unlicense"
] | 8 |
2019-01-13T09:58:04.000Z
|
2021-08-14T12:12:39.000Z
|
third-party/pybind11/pybind11/__main__.py
|
alskondr/xlnt
|
7f00e91760886204c847fc659bed47234f46f3e6
|
[
"Unlicense"
] | 17 |
2019-05-16T06:08:34.000Z
|
2022-03-11T15:18:05.000Z
|
from __future__ import print_function
import argparse
import sys
import sysconfig
from . import get_include
def print_includes():
dirs = [sysconfig.get_path('include')]
if sysconfig.get_path('platinclude') not in dirs:
dirs.append(sysconfig.get_path('platinclude'))
if get_include() not in dirs:
dirs.append(get_include())
print(' '.join('-I' + d for d in dirs))
def main():
parser = argparse.ArgumentParser(prog='python -m pybind11')
parser.add_argument('--includes', action='store_true',
help='Include flags for both pybind11 and Python headers.')
args = parser.parse_args()
if not sys.argv[1:]:
parser.print_help()
if args.includes:
print_includes()
if __name__ == '__main__':
main()
| 24.71875 | 83 | 0.65866 |
7f0e0e9de9bf93cf1117f1f94a68c3b56ccb2a78
| 1,765 |
py
|
Python
|
furniture_store/furniture_store/web/migrations/0001_initial.py
|
trenev/softuni-python-web-basics
|
0fcf6b7f3389d06685d40615c376dc4027e772f2
|
[
"MIT"
] | 1 |
2022-03-03T10:16:14.000Z
|
2022-03-03T10:16:14.000Z
|
furniture_store/furniture_store/web/migrations/0001_initial.py
|
trenev/softuni-python-web-basics
|
0fcf6b7f3389d06685d40615c376dc4027e772f2
|
[
"MIT"
] | null | null | null |
furniture_store/furniture_store/web/migrations/0001_initial.py
|
trenev/softuni-python-web-basics
|
0fcf6b7f3389d06685d40615c376dc4027e772f2
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.2 on 2022-02-24 12:28
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import furniture_store.web.validators
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=20)),
('last_name', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Furniture',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(max_length=30, validators=[django.core.validators.MinLengthValidator(4)])),
('price', models.FloatField(validators=[furniture_store.web.validators.validate_min_price])),
('model', models.CharField(max_length=30, validators=[django.core.validators.MinLengthValidator(4)])),
('image', models.URLField()),
('year', models.IntegerField(validators=[django.core.validators.MinValueValidator(1950), django.core.validators.MaxValueValidator(2050)])),
('material', models.CharField(blank=True, max_length=50, null=True)),
('description', models.CharField(max_length=100, validators=[django.core.validators.MinLengthValidator(10)])),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.profile')),
],
),
]
| 44.125 | 155 | 0.635127 |
9c9b3cb7e0123f8e7b14c555aacaebf8163af36c
| 2,663 |
py
|
Python
|
src/onegov/user/sync.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/user/sync.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/user/sync.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
class UserSource(object):
""" Generalized UserSource to facilitate ldap sync """
def __init__(self, name, bases, org=None, filters=None, user_type=None,
default_filter='(objectClass=*)',
verbose=False):
self.name = name.lower()
self._bases = bases
self.default_filter = default_filter
if filters:
assert len(filters) == len(bases)
self.filters = filters or self.default_filters
self._org = org
self._user_type = user_type
self.verbose = verbose
@staticmethod
def scalar(value, default=''):
if value and isinstance(value, list):
return value[0]
return value or default
@property
def ldap_attributes(self):
raise NotImplementedError
@property
def ldap_mapping(self):
raise NotImplementedError
@property
def organisation(self):
return getattr(self, f'org_{self.name}', self._org)
@property
def bases(self):
return getattr(self, f'bases_{self.name}', self._bases)
def user_type_default(self, entry):
return self._user_type
def user_type(self, entry):
func = getattr(self, f'user_type_{self.name}', None)
return func(entry) if func else self.user_type_default(entry)
def excluded_default(self, entry):
""" Default when no function specific to the source name exists. """
return False
def excluded(self, entry):
""" Finds a specific exclusion function specific to the name or use
the fallback """
func = getattr(self, f'exclude_{self.name}', None)
return func(entry) if func else self.excluded_default(entry)
@property
def default_filters(self):
return [self.default_filter for i in range(len(self.bases))]
@property
def bases_filters_attributes(self):
return tuple(
(b, f, self.ldap_attributes)
for b, f in zip(self.bases, self.filters)
)
def map_entry(self, entry):
attrs = entry.entry_attributes_as_dict
user = {
column: self.scalar(attrs.get(attr))
for attr, column in self.ldap_mapping.items()
}
return user
def complete_entry(self, user, **kwargs):
""" Add additional logic after the user is mapped before writing to
the db. """
return user
def map_entries(self, entries, **kwargs):
for e in entries:
if self.excluded(e):
continue
user = self.map_entry(e)
user = self.complete_entry(user, **kwargs)
yield user
| 29.263736 | 76 | 0.609087 |
0d2e49f7696958e6d02efb00e6cf631de10d088b
| 306 |
py
|
Python
|
src/data_science/tests/tools/test_transformations.py
|
viclule/api_models_deployment_framework
|
7595cf0b4f3e277925b968014102d7561547bcd4
|
[
"MIT"
] | null | null | null |
src/data_science/tests/tools/test_transformations.py
|
viclule/api_models_deployment_framework
|
7595cf0b4f3e277925b968014102d7561547bcd4
|
[
"MIT"
] | null | null | null |
src/data_science/tests/tools/test_transformations.py
|
viclule/api_models_deployment_framework
|
7595cf0b4f3e277925b968014102d7561547bcd4
|
[
"MIT"
] | null | null | null |
import unittest
from data_science.tools.transformations import per_hour_to_per_second
class TransformationsTest(unittest.TestCase):
"""Test that the functions in transformations work as expected"""
def test_per_hour_to_per_second(self):
self.assertEqual(per_hour_to_per_second(3600), 1)
| 30.6 | 69 | 0.800654 |
0d401bc9e2630812016d1a71dbadb57a459f43b3
| 140 |
py
|
Python
|
Versuch4/latex/src/sinPlot.py
|
Tobias-Schoch/SSS
|
f8b078ca7f6482fc7c89d5f9e784a549459eefb7
|
[
"MIT"
] | null | null | null |
Versuch4/latex/src/sinPlot.py
|
Tobias-Schoch/SSS
|
f8b078ca7f6482fc7c89d5f9e784a549459eefb7
|
[
"MIT"
] | null | null | null |
Versuch4/latex/src/sinPlot.py
|
Tobias-Schoch/SSS
|
f8b078ca7f6482fc7c89d5f9e784a549459eefb7
|
[
"MIT"
] | 1 |
2022-01-06T12:47:53.000Z
|
2022-01-06T12:47:53.000Z
|
import numpy as np
X = np.linspace(-np.pi, np.pi, 256)
C,S = np.cos(X), np.sin(X)
# plot
fig,ax = plt.subplots()
ax.plot(X,C)
ax.plot(X,S)
| 15.555556 | 35 | 0.621429 |
b4f2d9b18ff0d98da40188e9751b360e8a3ad997
| 250 |
py
|
Python
|
exercises/fr/solution_01_07.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 2,085 |
2019-04-17T13:10:40.000Z
|
2022-03-30T21:51:46.000Z
|
exercises/fr/solution_01_07.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 79 |
2019-04-18T14:42:55.000Z
|
2022-03-07T08:15:43.000Z
|
exercises/fr/solution_01_07.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 361 |
2019-04-17T13:34:32.000Z
|
2022-03-28T04:42:45.000Z
|
import spacy
# Charge le modèle "fr_core_news_sm"
nlp = spacy.load("fr_core_news_sm")
text = "Apple a été créée en 1976 par Steve Wozniak, Steve Jobs et Ron Wayne."
# Traite le texte
doc = nlp(text)
# Affiche le texte du document
print(doc.text)
| 19.230769 | 78 | 0.736 |
2585bb11557ba0225ddcd3a77a31a15631114318
| 434 |
py
|
Python
|
api/wsgi/src/manage.py
|
easyCZ/SLIP-A-2015
|
ad386df9c438d93ec89f68c63d3deda12d27d1ed
|
[
"MIT"
] | 2 |
2020-06-08T19:17:10.000Z
|
2020-06-08T20:45:07.000Z
|
api/wsgi/src/manage.py
|
easyCZ/SLIP-A-2015
|
ad386df9c438d93ec89f68c63d3deda12d27d1ed
|
[
"MIT"
] | 14 |
2015-10-07T09:31:50.000Z
|
2022-02-23T07:34:10.000Z
|
api/wsgi/src/manage.py
|
easyCZ/SLIP-A-2015
|
ad386df9c438d93ec89f68c63d3deda12d27d1ed
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
# Use local config if not on openshift server
if os.environ.get('SLIP_ENV') == 'local':
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.local")
else:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 27.125 | 75 | 0.721198 |
4cf5cbda98b109cd800545f47f8ecebe6fb83e1a
| 341 |
py
|
Python
|
solution/string/9342/main.py
|
gkgg123/baekjoon
|
4ff8a1238a5809e4958258b5f2eeab7b22105ce9
|
[
"MIT"
] | 2,236 |
2019-08-05T00:36:59.000Z
|
2022-03-31T16:03:53.000Z
|
solution/string/9342/main.py
|
juy4556/baekjoon
|
bc0b0a0ebaa45a5bbd32751f84c458a9cfdd9f92
|
[
"MIT"
] | 225 |
2020-12-17T10:20:45.000Z
|
2022-01-05T17:44:16.000Z
|
solution/string/9342/main.py
|
juy4556/baekjoon
|
bc0b0a0ebaa45a5bbd32751f84c458a9cfdd9f92
|
[
"MIT"
] | 602 |
2019-08-05T00:46:25.000Z
|
2022-03-31T13:38:23.000Z
|
# Authored by : tony9402
# Co-authored by : -
# Link : http://boj.kr/4fcec3ea35e4472bbca8beeebd2fe065
import sys
import re
def input():
return sys.stdin.readline().rstrip()
regex = re.compile('^[A-F]?A+F+C+[A-F]?$')
N = int(input())
for testcase in range(N):
line = input()
print("Infected!" if regex.match(line) else "Good")
| 21.3125 | 55 | 0.653959 |
e26fa788d7a2f4d559051b1a1147be8b7c90801f
| 1,045 |
py
|
Python
|
INBa/2015/Cherniy_F_Y/task_7_28.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
INBa/2015/Cherniy_F_Y/task_7_28.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
INBa/2015/Cherniy_F_Y/task_7_28.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
# Задача 7. Вариант 28.
# 1-50. Разработайте систему начисления очков для задачи 6, в соответствии с которой игрок получал бы большее количество баллов за меньшее количество попыток.
# Cherniy F. Y.
# 28.03.2016
import random
print("Компьютер загадал название одного из шести континентов Земли, а Вы должны его угадать.\n")
continents = ('Евразия','Африка','Северная Америка','Южная Америка','Австралия','Антарктида')
continent = random.randint(0,5)
x = 0
i = 0
score = 0
#print (continents[0]\n,continents[1]\n,continents[2]\n,continents[3]\n,continents[4]\n,continents[5])
while(x != 6):
print(continents[x])
x += 1
answer = input("\nВведите название континента: ")
while(answer != continents[continent]):
print("Неверно, попробуйте ещё раз.")
answer = input("\nВведите название континента: ")
i += 1
if i == 0:
score = 10
elif 0<i<6:
score = 10 - i*2
else:
score = 0
print("Верно, Вы победили!")
print("Число попыток: "+str(i))
print("Вы заработали "+str(score)+" баллов")
input("\nДля выхода нажмите Enter.")
| 23.75 | 158 | 0.696651 |
2c8862e7f664b00b5d2c87d5e4299825ea4a180d
| 670 |
py
|
Python
|
skills/files.py
|
gidocarper/rhasspy-skills-test
|
eaf57287a67a0e43a299053cf0b107d61afd9b13
|
[
"Unlicense"
] | 4 |
2021-01-08T21:08:50.000Z
|
2021-12-08T15:30:50.000Z
|
skills/files.py
|
gidocarper/rhasspy-skills-test
|
eaf57287a67a0e43a299053cf0b107d61afd9b13
|
[
"Unlicense"
] | null | null | null |
skills/files.py
|
gidocarper/rhasspy-skills-test
|
eaf57287a67a0e43a299053cf0b107d61afd9b13
|
[
"Unlicense"
] | null | null | null |
import json
import io, configparser
def read_configuration_file(config_file_path):
try:
cp = configparser.ConfigParser()
with io.open(config_file_path + "/config.ini", encoding="utf-8") as f:
cp.read_file(f)
return {section: {option_name: option for option_name, option in cp.items(section)}
for section in cp.sections()}
except (IOError, configparser.Error):
return dict()
def read_language_file(language, language_file_path):
try:
messages_json_content = open(language_file_path+ '/' + language + '.json')
return json.load(messages_json_content)
except:
return {}
| 30.454545 | 91 | 0.661194 |
e2d03fbe90c6f90d2d97d2289db521bfe95538e2
| 3,275 |
py
|
Python
|
Co-Simulation/Sumo/sumo-1.7.0/tools/build/version.py
|
uruzahe/carla
|
940c2ab23cce1eda1ef66de35f66b42d40865fb1
|
[
"MIT"
] | 4 |
2020-11-13T02:35:56.000Z
|
2021-03-29T20:15:54.000Z
|
Co-Simulation/Sumo/sumo-1.7.0/tools/build/version.py
|
uruzahe/carla
|
940c2ab23cce1eda1ef66de35f66b42d40865fb1
|
[
"MIT"
] | 9 |
2020-12-09T02:12:39.000Z
|
2021-02-18T00:15:28.000Z
|
Co-Simulation/Sumo/sumo-1.7.0/tools/build/version.py
|
uruzahe/carla
|
940c2ab23cce1eda1ef66de35f66b42d40865fb1
|
[
"MIT"
] | 1 |
2020-11-20T19:31:26.000Z
|
2020-11-20T19:31:26.000Z
|
#!/usr/bin/env python
# Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.org/sumo
# Copyright (C) 2008-2020 German Aerospace Center (DLR) and others.
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License 2.0 which is available at
# https://www.eclipse.org/legal/epl-2.0/
# This Source Code may also be made available under the following Secondary
# Licenses when the conditions for such availability set forth in the Eclipse
# Public License 2.0 are satisfied: GNU General Public License, version 2
# or later which is available at
# https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html
# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later
# @file version.py
# @author Michael Behrisch
# @author Daniel Krajzewicz
# @author Jakob Erdmann
# @date 2007
"""
This script rebuilds "<BUILD_DIR>/src/version.h", the file which
lets the applications know the version of their build.
It does this by parsing the output of git describe where the function is
implemented in sumolib.
If the version file is newer than the .git index file or the revision cannot be
determined any existing version.h is kept.
"""
from __future__ import absolute_import
from __future__ import print_function
import sys
from os.path import dirname, exists, getmtime, join, abspath
sys.path.append(dirname(dirname(abspath(__file__))))
import sumolib # noqa
SUMO_ROOT = abspath(join(dirname(__file__), '..', '..'))
def get_version(padZero=True):
return sumolib.version.gitDescribe(gitDir=join(SUMO_ROOT, ".git"), padZero=padZero)
def get_pep440_version():
v = get_version(padZero=False)[1:-11].replace("_", ".").replace("+", ".post")
vs = v.split(".")
if len(vs) == 4 and vs[3] == "post0":
return v[:-6]
return v
def create_version_file(versionFile, revision):
with open(versionFile, 'w') as f:
print('#define VERSION_STRING "%s"' % revision, file=f)
def main():
# determine output file
if len(sys.argv) > 1:
versionDir = sys.argv[1]
if sys.argv[1] == "-":
sys.stdout.write(get_version())
return
else:
versionDir = join(SUMO_ROOT, "src")
versionFile = join(versionDir, 'version.h')
vcsFile = join(SUMO_ROOT, ".git", "index")
try:
if exists(vcsFile):
if not exists(versionFile) or getmtime(versionFile) < getmtime(vcsFile):
# vcsFile is newer. lets update the revision number
print('generating %s from revision in %s' % (versionFile, vcsFile))
create_version_file(versionFile, get_version())
else:
print("version control file '%s' not found" % vcsFile)
if not exists(versionFile):
print('trying to generate version file %s from existing header' % versionFile)
create_version_file(versionFile, sumolib.version.fromVersionHeader())
except Exception as e:
print("Error creating", versionFile, e)
try:
# try at least to create something
create_version_file(versionFile, "UNKNOWN")
except Exception as ee:
print("Error creating", versionFile, ee)
pass
if __name__ == "__main__":
main()
| 35.215054 | 90 | 0.677252 |
1a7b0624b57c04d4340b0f6a8ff435cec9d643de
| 218 |
py
|
Python
|
benwaonline/gallery/__init__.py
|
goosechooser/benwaonline
|
e2879412aa6c3c230d25cd60072445165517b6b6
|
[
"MIT"
] | null | null | null |
benwaonline/gallery/__init__.py
|
goosechooser/benwaonline
|
e2879412aa6c3c230d25cd60072445165517b6b6
|
[
"MIT"
] | 16 |
2017-09-13T10:21:40.000Z
|
2020-06-01T04:32:22.000Z
|
benwaonline/gallery/__init__.py
|
goosechooser/benwaonline
|
e2879412aa6c3c230d25cd60072445165517b6b6
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
from flask_uploads import UploadSet, IMAGES
gallery = Blueprint('gallery', __name__, template_folder='templates')
images = UploadSet('images', IMAGES)
from benwaonline.gallery import views
| 31.142857 | 69 | 0.811927 |
1a7c8a85d816212f602c3b4fd210015bcedfaac2
| 129 |
py
|
Python
|
gestionProtocolos_y_Estudios/apps.py
|
AnuTor/UniNeuroLab
|
5825f440d4663650f038083f3da05229cc5ada4f
|
[
"Apache-2.0"
] | 1 |
2021-01-09T01:20:45.000Z
|
2021-01-09T01:20:45.000Z
|
gestionProtocolos_y_Estudios/apps.py
|
AnuTor/UniNeuroLab
|
5825f440d4663650f038083f3da05229cc5ada4f
|
[
"Apache-2.0"
] | 1 |
2021-01-09T00:53:55.000Z
|
2021-01-09T00:53:55.000Z
|
gestionProtocolos_y_Estudios/apps.py
|
AnuTor/UniNeuroLab
|
5825f440d4663650f038083f3da05229cc5ada4f
|
[
"Apache-2.0"
] | 1 |
2021-01-07T23:57:28.000Z
|
2021-01-07T23:57:28.000Z
|
from django.apps import AppConfig
class GestionprotocolosYEstudiosConfig(AppConfig):
name = 'gestionProtocolos_y_Estudios'
| 21.5 | 50 | 0.829457 |
1a9ab9e6777acdfe23072754ca44155bb5143aa5
| 3,829 |
py
|
Python
|
apps/projects/views.py
|
OpenAdaptronik/Rattler
|
c3bdde0ca56b6d77f49bc830fa2b8bb41a26bae4
|
[
"MIT"
] | 2 |
2018-05-18T08:38:29.000Z
|
2018-05-22T08:26:09.000Z
|
apps/projects/views.py
|
IT-PM-OpenAdaptronik/Webapp
|
c3bdde0ca56b6d77f49bc830fa2b8bb41a26bae4
|
[
"MIT"
] | 118 |
2017-10-31T13:45:09.000Z
|
2018-02-24T20:51:42.000Z
|
apps/projects/views.py
|
OpenAdaptronik/Rattler
|
c3bdde0ca56b6d77f49bc830fa2b8bb41a26bae4
|
[
"MIT"
] | null | null | null |
from apps.projects.models import Category, Project, Experiment
from django.shortcuts import render, HttpResponseRedirect
from django.core.exceptions import PermissionDenied
from django.views.generic import FormView, CreateView, ListView, DetailView, UpdateView
from django.contrib.auth.mixins import LoginRequiredMixin
from .forms import ProjectForm, ProjectImageCreateFormSet, ProjectImageFormSet
from django.core import serializers
from django.utils.encoding import uri_to_iri
from django.views.generic import CreateView
from django.contrib.auth.mixins import LoginRequiredMixin
class NewProject(LoginRequiredMixin, CreateView):
form_class = ProjectForm
template_name = 'projects/project_create.html'
def get_context_data(self, **kwargs):
data = super(NewProject, self).get_context_data(**kwargs)
if self.request.method == 'POST':
data['project_image'] = ProjectImageCreateFormSet(self.request.POST, self.request.FILES, instance=self.object)
else:
data['project_image'] = ProjectImageCreateFormSet(instance=self.object)
return data
def form_valid(self, form):
user = self.request.user
form.instance.user = user
context = self.get_context_data()
project_image = context['project_image']
self.object = form.save()
if project_image.is_valid():
project_image.instance = self.object
project_image.save()
return super(NewProject, self).form_valid(form)
class UpdateProject(LoginRequiredMixin, UpdateView):
model = Project
form_class = ProjectForm
pk_url_kwarg = 'id'
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if not self.object.user == self.request.user and not self.object.visibility:
raise PermissionDenied()
return super(UpdateProject, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
data = super(UpdateProject, self).get_context_data(**kwargs)
if self.request.method == 'POST':
data['project_image'] = ProjectImageFormSet(self.request.POST, self.request.FILES, instance=self.object)
else:
data['project_image'] = ProjectImageFormSet(instance=self.object)
return data
def form_valid(self, form):
context = self.get_context_data()
project_image = context['project_image']
if project_image.is_valid():
project_image.save()
return super(UpdateProject, self).form_valid(form)
class MyProjects(LoginRequiredMixin, ListView):
model = Project
allow_empty = True
paginate_by = 10
def get_queryset(self):
user = self.request.user
return Project.objects.filter(user=user).order_by('created')
class ProjectDetail(DetailView):
model = Project
pk_url_kwarg = 'id'
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if not self.object.user == self.request.user and not self.object.visibility:
raise PermissionDenied()
return super(ProjectDetail, self).get(request, *args, **kwargs)
def categories(request, id=None):
return render(
request,
'projects/categories.html',
{
'categories': Category.objects.allDescandends(parent=id)
}
)
def createExperiment(request, name, id):
if request.method == 'POST':
post_data = request.POST.copy()
description = post_data['description']
new_experiment = Experiment(project_id=id, description=description)
new_experiment.save()
return render(request, 'projects/createExperiment.html')
def delete_project(request, project_id):
Project.objects.get(id=project_id).delete()
return HttpResponseRedirect('/projects/')
| 33.884956 | 122 | 0.693131 |
20108ad36fce0ecd643179591579010442361c7b
| 5,801 |
py
|
Python
|
integrators.py
|
patcher1/numerik
|
ad24c8522d61970a3a881e034a7940d43ba486be
|
[
"BSD-3-Clause"
] | null | null | null |
integrators.py
|
patcher1/numerik
|
ad24c8522d61970a3a881e034a7940d43ba486be
|
[
"BSD-3-Clause"
] | null | null | null |
integrators.py
|
patcher1/numerik
|
ad24c8522d61970a3a881e034a7940d43ba486be
|
[
"BSD-3-Clause"
] | 1 |
2019-10-01T14:36:03.000Z
|
2019-10-01T14:36:03.000Z
|
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
from helpers import gaussquad
def trapezoid(f, a, b, N):
"""Zusammengesetzte Trapezregel in 1d.
Input:
f : Funktion f(x) welche integiert werden soll.
a, b : untere/obere Grenze des Integrals.
N : Anzahl Teilintervalle in der zusammengesetzten Regel.
"""
# Für geschlossene Quadraturformeln kann man mit einem
# Trick die Anzahlfunktionsaufrufe reduzieren.
# Die Beobachtung ist, dass zwei benachbarte Teilintervalle einen
# gemeinsamen Knoten haben. Anstatt, `f` zweimal für diesen Knoten
# auszurechnen, summiert man einfach die Gewichte:
x, h = np.linspace(a, b, N+1, retstep=True)
return h*(.5*f(a) + sum(f(m) for m in x[1:-1]) + .5*f(b))
def simpson(f, a, b, N):
"""Zusammengesetzte Simpsonregel in 1d.
Input:
f : Funktion f(x) welche integiert werden soll.
a, b : untere/obere Grenze des Integrals.
N : Anzahl Teilintervalle in der zusammengesetzten Regel.
"""
x, h = np.linspace(a, b, N+1, retstep=True)
xm = .5*(x[1:] + x[:-1])
return h/6.0 * (f(a) + 4.0*sum(f(m) for m in xm) + 2.0*sum(f(z) for z in x[1:-1]) + f(b))
def midpoint(f, a, b, N):
"""Zusammengesetzte Mittelpunktsregel in 1d.
Input:
f : Funktion f(x) welche integiert werden soll.
a, b : untere/obere Grenze des Integrals.
N : Anzahl Teilintervalle in der zusammengesetzten Regel.
"""
x, h = np.linspace(a, b, N+1, retstep=True)
return h*sum(f(m) for m in .5*(x[1:] + x[:-1]))
def two_dim(rule, f, a, b, Nx, c, d, Ny):
F = lambda y: rule(lambda x: f(x, y), a, b, Nx)
return rule(F, c, d, Ny)
def gauss_legendre(f, a, b, n):
""" Gauss-Legendre Quadratur (nicht zusammengesetzt).
f: Funktion f(x)
a, b: Obere und untere Grenze des Intervalls.
n: Anzahl Quadraturpunkte.
"""
xs, ws = gaussquad(n) #7.3.3
x = a + (xs + 1.)*(b-a)*.5
return np.sum(.5*(b-a)*ws*f(x))
def composite_legendre(f, a, b, N, n=100):
""" Zusammengesetzte Gauss-Legendre Quadratur.
f: Funktion f(x)
a, b: Obere und untere Grenze des Intervalls.
N: Anzahl Teilintervalle.
n: Anzahl Quadraturpunkte pro Teilintervall.
"""
dx = (b-a)/N
return sum(gauss_legendre(f, a + i*dx, a + (i+1)*dx, n) for i in range(N))
def mcquad(f, a, b, N, d=1):
"""Berechnet das `d`-dimensionale Integral von `f`.
Input:
f : Funktion welche integriert werden soll. Das Argument von `f` ist ein d-dim Array.
a, b : untere/obere Grenzen des Integrals. Bei mehreren Dimensionen können beide d-dimensinale Arrays sein.
d : Anzahl Dimensionen.
N : Anzahl Zufallsvektoren.
Output:
mean : Approximation.
sigma : Standardabweichung. @see 7.6.9
"""
faccepts = 1 # 0: column vectors ([[a],[b]]) or 1: row vectors ([a,b])
x = a + (b-a)*np.random.rand(N, d)
fx = np.array([f(m if faccepts == 1 else m.reshape(d,1)) for m in x]) #could produce errors for some functions
vol = np.abs(np.prod(b-a))
mean = vol*np.sum(fx)/float(N)
return mean, np.sqrt((vol**2*np.sum(fx**2)/N - mean**2)/(N-1.))
'''
Adaptive integration. Uses a worse and a better integration method (for example: Simpson and Trapezoid)
to approximate where smaller steps are needed. Of course in the end, the integral will be calculated with the better method.
@param {callable} f - function to integrate
@param {float} a - lower bound
@param {float} b - upper bound
@param {int} N - initial number of intervalls
@param {callable} psilow - a quadrature method
@param {callable} psihigh - a better quadrature method
@param {float} rtol - relative tolerance
@param {float} atol - absolute tolerance
@param {array} ev - initial evaluation points for approximating the integral
@return I - approximated Integral
@return ev - evaluation points
'''
def adaptQuad(f, a, b, N, psilow, psihigh, rtol=1e-5, atol=1e-5, ev=None):
ev = np.linspace(a, b, N) if ev == None else ev
Il = np.zeros(ev.size - 1)
Ih = np.zeros(ev.size - 1)
for i in range(ev.size - 1):
Il[i] = psilow(f, ev[i], ev[i+1], 1)
Ih[i] = psihigh(f, ev[i], ev[i+1], 1)
I = np.sum(Ih) #We take the better approximation as the Integral
eloc = np.abs(Ih - Il)
eglob = np.sum(eloc)
if eglob > rtol*np.abs(I) and eglob > atol:
midpoints = .5*(ev[:-1] + ev[1:])
refcells = np.nonzero(eloc > .9*np.sum(eloc)/np.size(eloc))[0]
ev = np.sort(np.append(ev, midpoints[refcells]))
I, ev = adaptQuad(f, a, b, N, psilow, psihigh, rtol, atol, ev)
return I, ev
#########
# Tests #
#########
def f1(x):
return 1.0 / (1.0 + 5.0*x**2)
def f2(x):
return np.sqrt(x)
def f2d(x, y):
return 1.0 / np.sqrt((x - 2.0) ** 2 + (y - 2.0) ** 2)
def f3(x):
return 1/(10**(-4)+x**2) #function with extremely large values around 0
if __name__ == "__main__":
# Testfunktionen
If1ex = np.arctan(np.sqrt(5.0)) / np.sqrt(5.0)
If2ex = 2.0 / 3.0
IF2dex = 1.449394876268660
a, b = -1, 1
N = 10
ev = np.linspace(a, b, N)
I, ev = adaptQuad(f3, a, b, N, trapezoid, simpson, 1e-3, 1e-3)
print("Integral: adapt", I)
print("Integral Simps: ", simpson(f3, a, b, len(ev)))
print("EV-Points: ", ev)
plt.figure()
x = np.linspace(a, b, 100)
plt.plot(x, f3(x), 'b')
plt.plot(ev, f3(ev), 'r^')
plt.show()
#print(composite_legendre(f1, 0.0, 1.0, 128))
#print(If1ex)
#print(If1ex-composite_legendre(f1, 0.0, 1.0, 128))
| 33.923977 | 128 | 0.589036 |
2051453e3e595fd6b58da4437fecfdd63396a13d
| 6,904 |
py
|
Python
|
tests/onegov/swissvotes/test_external_resources.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
tests/onegov/swissvotes/test_external_resources.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
tests/onegov/swissvotes/test_external_resources.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from datetime import date
from decimal import Decimal
from onegov.core.utils import Bunch
from onegov.swissvotes.collections import SwissVoteCollection
from onegov.swissvotes.external_resources import MfgPosters
from onegov.swissvotes.external_resources import SaPosters
from onegov.swissvotes.external_resources.posters import Posters
from pytest import raises
from unittest.mock import MagicMock
from unittest.mock import patch
xml = '''
<object>
<field name="primaryMedia">
<value>{}</value>
</field>
</object>
'''
def test_posters_fetch(swissvotes_app):
session = swissvotes_app.session()
mfg_posters = MfgPosters('xxx')
sa_posters = SaPosters()
assert mfg_posters.fetch(session) == (0, 0, 0, set())
assert sa_posters.fetch(session) == (0, 0, 0, set())
votes = SwissVoteCollection(swissvotes_app)
votes = SwissVoteCollection(swissvotes_app)
kwargs = {
'date': date(1990, 6, 2),
'title_de': "Vote DE",
'title_fr': "Vote FR",
'short_title_de': "V D",
'short_title_fr': "V F",
'_legal_form': 1,
}
vote_1 = votes.add(
id=1,
bfs_number=Decimal('1'),
posters_mfg_yea='1.1 1.2 1.3 1.4',
posters_mfg_nay='',
posters_sa_yea='1.5 1.6 1.7 1.8',
posters_sa_nay='',
**kwargs
)
vote_2 = votes.add(
id=2,
bfs_number=Decimal('2'),
posters_mfg_yea='2.1',
posters_mfg_nay='2.2',
posters_sa_yea='2.3',
posters_sa_nay='2.4',
**kwargs
)
vote_3 = votes.add(
id=3,
bfs_number=Decimal('3'),
posters_mfg_yea='',
posters_mfg_nay='',
posters_sa_yea='',
posters_sa_nay='3.1',
**kwargs
)
with patch(
'onegov.swissvotes.external_resources.posters.get',
return_value=MagicMock(content=xml.format('http://source/xxx'))
):
assert mfg_posters.fetch(session) == (6, 0, 0, set())
assert vote_1.posters_mfg_yea_imgs == {
'1.1': 'https://source/xxx',
'1.2': 'https://source/xxx',
'1.3': 'https://source/xxx',
'1.4': 'https://source/xxx'
}
assert vote_1.posters_mfg_nay_imgs == {}
assert vote_2.posters_mfg_yea_imgs == {'2.1': 'https://source/xxx'}
assert vote_2.posters_mfg_nay_imgs == {'2.2': 'https://source/xxx'}
assert vote_3.posters_mfg_yea_imgs == {}
assert vote_3.posters_mfg_nay_imgs == {}
assert sa_posters.fetch(session) == (7, 0, 0, set())
assert vote_1.posters_sa_yea_imgs == {
'1.5': 'https://source/xxx',
'1.6': 'https://source/xxx',
'1.7': 'https://source/xxx',
'1.8': 'https://source/xxx'
}
assert vote_1.posters_sa_nay_imgs == {}
assert vote_2.posters_sa_yea_imgs == {'2.3': 'https://source/xxx'}
assert vote_2.posters_sa_nay_imgs == {'2.4': 'https://source/xxx'}
assert vote_3.posters_sa_yea_imgs == {}
assert vote_3.posters_sa_nay_imgs == {'3.1': 'https://source/xxx'}
vote_1.posters_mfg_yea = '1.1 1.2' # -2
vote_1.posters_mfg_nay = '1.9 1.10' # +2
vote_1.posters_sa_yea = '1.5 1.6' # -2
vote_1.posters_sa_nay = '1.11 1.12' # +2
vote_3.posters_sa_nay = '' # -1
with patch(
'onegov.swissvotes.external_resources.posters.get',
return_value=MagicMock(content=xml.format('http://source/yyy'))
):
assert mfg_posters.fetch(session) == (2, 4, 2, set())
assert vote_1.posters_mfg_yea_imgs == {
'1.1': 'https://source/yyy',
'1.2': 'https://source/yyy',
}
assert vote_1.posters_mfg_nay_imgs == {
'1.9': 'https://source/yyy',
'1.10': 'https://source/yyy',
}
assert vote_2.posters_mfg_yea_imgs == {'2.1': 'https://source/yyy'}
assert vote_2.posters_mfg_nay_imgs == {'2.2': 'https://source/yyy'}
assert vote_3.posters_mfg_yea_imgs == {}
assert vote_3.posters_mfg_nay_imgs == {}
assert sa_posters.fetch(session) == (2, 4, 3, set())
assert vote_1.posters_sa_yea_imgs == {
'1.5': 'https://source/yyy',
'1.6': 'https://source/yyy',
}
assert vote_1.posters_sa_nay_imgs == {
'1.11': 'https://source/yyy',
'1.12': 'https://source/yyy',
}
assert vote_2.posters_sa_yea_imgs == {'2.3': 'https://source/yyy'}
assert vote_2.posters_sa_nay_imgs == {'2.4': 'https://source/yyy'}
assert vote_3.posters_sa_yea_imgs == {}
assert vote_3.posters_sa_nay_imgs == {}
with patch(
'onegov.swissvotes.external_resources.posters.get',
side_effect=Exception()
):
assert mfg_posters.fetch(session) == (
0, 0, 0, {vote_1.bfs_number, vote_2.bfs_number}
)
assert vote_1.posters_mfg_yea_imgs == {
'1.1': 'https://source/yyy',
'1.2': 'https://source/yyy',
}
assert vote_1.posters_mfg_nay_imgs == {
'1.9': 'https://source/yyy',
'1.10': 'https://source/yyy',
}
assert vote_2.posters_mfg_yea_imgs == {'2.1': 'https://source/yyy'}
assert vote_2.posters_mfg_nay_imgs == {'2.2': 'https://source/yyy'}
assert vote_3.posters_mfg_yea_imgs == {}
assert vote_3.posters_mfg_nay_imgs == {}
assert sa_posters.fetch(session) == (
0, 0, 0, {vote_1.bfs_number, vote_2.bfs_number}
)
assert vote_1.posters_sa_yea_imgs == {
'1.5': 'https://source/yyy',
'1.6': 'https://source/yyy',
}
assert vote_1.posters_sa_nay_imgs == {
'1.11': 'https://source/yyy',
'1.12': 'https://source/yyy',
}
assert vote_2.posters_sa_yea_imgs == {'2.3': 'https://source/yyy'}
assert vote_2.posters_sa_nay_imgs == {'2.4': 'https://source/yyy'}
assert vote_3.posters_sa_yea_imgs == {}
assert vote_3.posters_sa_nay_imgs == {}
def test_posters_meta_data_url():
assert MfgPosters('xxx').meta_data_url('object') == (
'https://www.emuseum.ch/objects/object/xml'
)
assert SaPosters().meta_data_url('object') == (
'https://swissvotes.sozialarchiv.ch/object'
)
def test_posters_parse_xml(session):
class MyPosters(Posters):
def meta_data_url(self, url):
return url
# parse xml
posters = MyPosters()
with raises(Exception):
posters.parse_xml(Bunch(content=None))
with raises(Exception):
posters.parse_xml(Bunch(content=''))
with raises(ValueError):
posters.parse_xml(Bunch(content='<object></object>'))
with raises(ValueError):
posters.parse_xml(Bunch(content=xml.format('')))
assert posters.parse_xml(Bunch(content=xml.format('url'))) == 'url'
| 33.843137 | 75 | 0.584878 |
b3aca463b0a0d41b49ed39456372000c692ddaaf
| 1,489 |
py
|
Python
|
oneflow/python/test/custom_ops/user_sigmoid/user_sigmoid_py_api.py
|
wanghongsheng01/framework_enflame
|
debf613e05e3f5ea8084c3e79b60d0dd9e349526
|
[
"Apache-2.0"
] | 2 |
2021-09-10T00:19:49.000Z
|
2021-11-16T11:27:20.000Z
|
oneflow/python/test/custom_ops/user_sigmoid/user_sigmoid_py_api.py
|
duijiudanggecl/oneflow
|
d2096ae14cf847509394a3b717021e2bd1d72f62
|
[
"Apache-2.0"
] | 1 |
2021-06-16T08:37:50.000Z
|
2021-06-16T08:37:50.000Z
|
oneflow/python/test/custom_ops/user_sigmoid/user_sigmoid_py_api.py
|
duijiudanggecl/oneflow
|
d2096ae14cf847509394a3b717021e2bd1d72f62
|
[
"Apache-2.0"
] | 1 |
2021-11-10T07:57:01.000Z
|
2021-11-10T07:57:01.000Z
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import os
import oneflow as flow
from typing import Union, Tuple, List, Optional, Sequence, Callable
def user_sigmoid_forward(x, name: Optional[str] = None):
return (
flow.user_op_builder(
name if name is not None else flow.util.unique_str("UserSigmoidForward_")
)
.Op("user_sigmoid_forward")
.Input("x", [x])
.Output("y")
.Build()
.InferAndTryRun()
.RemoteBlobList()[0]
)
def user_sigmoid_backward(
y, dy, name: Optional[str] = None,
):
return (
flow.user_op_builder(
name if name is not None else flow.util.unique_str("UerSigmoidBackward_")
)
.Op("user_sigmoid_backward")
.Input("y", [y])
.Input("dy", [dy])
.Output("dx")
.Build()
.InferAndTryRun()
.RemoteBlobList()[0]
)
| 28.09434 | 85 | 0.664204 |
374584dcfbfbacf308413d8a39fc3ec3096ac830
| 9,582 |
py
|
Python
|
src/Sephrasto/CharakterInventarWrapper.py
|
Ilaris-Tools/Sephrasto
|
8574a5b45da8ebfa5f69a775066fd3136da1c718
|
[
"MIT"
] | 1 |
2022-02-02T16:15:59.000Z
|
2022-02-02T16:15:59.000Z
|
src/Sephrasto/CharakterInventarWrapper.py
|
Ilaris-Tools/Sephrasto
|
8574a5b45da8ebfa5f69a775066fd3136da1c718
|
[
"MIT"
] | 1 |
2022-01-14T11:04:19.000Z
|
2022-01-14T11:04:19.000Z
|
src/Sephrasto/CharakterInventarWrapper.py
|
lukruh/Sephrasto
|
8574a5b45da8ebfa5f69a775066fd3136da1c718
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 10 17:25:53 2017
@author: Lennart
"""
from Wolke import Wolke
import UI.CharakterInventar
from PyQt5 import QtWidgets, QtCore, QtGui
import Objekte
import Definitionen
from CharakterRuestungPickerWrapper import RuestungPicker
import logging
from Hilfsmethoden import Hilfsmethoden
from EventBus import EventBus
import re
from TextTagCompleter import TextTagCompleter
import copy
class CharakterInventarWrapper(QtCore.QObject):
modified = QtCore.pyqtSignal()
def __init__(self):
super().__init__()
logging.debug("Initializing InventarWrapper...")
self.form = QtWidgets.QWidget()
self.ui = UI.CharakterInventar.Ui_formInventar()
self.ui.setupUi(self.form)
logging.debug("UI Setup...")
palette = QtWidgets.QApplication.instance().palette()
alternateBgStyle = "background-color: " + palette.alternateBase().color().name() + ";"
self.editRName = []
self.spinBE = []
self.spinRS = []
self.spinZRS = []
self.spinPunkte = []
self.addR = []
for i in range(3):
editRName = getattr(self.ui, "editR" + str(i+1) + "name")
editRName.editingFinished.connect(self.updateRuestungen)
self.editRName.append(editRName)
spinBE = getattr(self.ui, "spinR" + str(i+1) + "be")
spinBE.valueChanged.connect(self.updateRuestungen)
self.spinBE.append(spinBE)
spinRS = getattr(self.ui, "spinR" + str(i+1) + "RS")
spinRS.valueChanged.connect(self.updateRuestungen)
self.spinRS.append(spinRS)
self.spinZRS.append([getattr(self.ui, "spinR" + str(i+1) + "bein"),
getattr(self.ui, "spinR" + str(i+1) + "larm"),
getattr(self.ui, "spinR" + str(i+1) + "rarm"),
getattr(self.ui, "spinR" + str(i+1) + "bauch"),
getattr(self.ui, "spinR" + str(i+1) + "brust"),
getattr(self.ui, "spinR" + str(i+1) + "kopf")])
for spin in self.spinZRS[-1]:
spin.valueChanged.connect(self.updateRuestungen)
self.spinPunkte.append(getattr(self.ui, "spinR" + str(i+1) + "punkte"))
addR = getattr(self.ui, "addR" + str(i+1))
addR.setFont(QtGui.QFont("Font Awesome 6 Free Solid", 9, QtGui.QFont.Black))
addR.setText('\u002b')
addR.setMaximumSize(QtCore.QSize(20, 20))
addR.clicked.connect(lambda state, idx=i: self.selectArmor(idx))
self.addR.append(addR)
logging.debug("Check Toggle...")
self.ui.checkZonen.setChecked(Wolke.Char.zonenSystemNutzen)
self.ui.checkZonen.stateChanged.connect(self.refreshZRSVisibility)
self.inventoryLines = []
for i in range(0,20):
lineEdit = getattr(self.ui, "lineEdit_"+ str(i+1))
lineEdit.editingFinished.connect(self.updateInventory)
self.inventoryLines.append(lineEdit)
if i in [2, 3, 6, 7, 10, 11, 14, 15, 18, 19]:
lineEdit.setStyleSheet(alternateBgStyle)
self.currentlyLoading = False
self.refreshZRSVisibility()
def refreshDerivedArmorValues(self, R, index):
if self.ui.checkZonen.isChecked():
self.spinRS[index].blockSignals(True)
self.spinRS[index].setValue(R.getRSGesamtInt())
self.spinRS[index].blockSignals(False)
else:
for i in range(0, 6):
self.spinZRS[index][i].blockSignals(True)
self.spinZRS[index][i].setValue(R.getRSGesamtInt())
self.spinZRS[index][i].blockSignals(False)
spinPunkte = self.spinPunkte[index]
if sum(R.rs) % 6 != 0:
spinPunkte.setStyleSheet("border: 1px solid orange;")
missingPoints = 6 - sum(R.rs) % 6
if missingPoints == 1:
spinPunkte.setToolTip("Der Rüstung fehlt " + str(missingPoints) + " Punkt ZRS.")
else:
spinPunkte.setToolTip("Der Rüstung fehlen " + str(missingPoints) + " Punkte ZRS.")
else:
spinPunkte.setStyleSheet("")
spinPunkte.setToolTip("")
spinPunkte.setValue(sum(R.rs))
def createRuestung(self, index):
R = Objekte.Ruestung()
R.name = self.editRName[index].text()
R.be = int(self.spinBE[index].value())
if self.ui.checkZonen.isChecked():
for i in range(0, 6):
R.rs[i] = self.spinZRS[index][i].value()
else:
R.rs = 6*[self.spinRS[index].value()]
return R
def updateRuestungen(self):
if self.currentlyLoading:
return
changed = False
ruestungNeu = []
for index in range(3):
R = self.createRuestung(index)
ruestungNeu.append(R)
self.refreshDerivedArmorValues(R, index)
if R == Objekte.Ruestung():
self.addR[index].setText('\u002b')
else:
self.addR[index].setText('\uf2ed')
if not Hilfsmethoden.ArrayEqual(ruestungNeu, Wolke.Char.rüstung):
changed = True
Wolke.Char.rüstung = ruestungNeu
if Wolke.Char.zonenSystemNutzen != self.ui.checkZonen.isChecked():
Wolke.Char.zonenSystemNutzen = self.ui.checkZonen.isChecked()
changed = True
if changed:
self.modified.emit()
def load(self):
self.currentlyLoading = True
# Load in Armor
for index in range(len(Wolke.Char.rüstung)):
R = Wolke.Char.rüstung[index]
if index < 3:
self.loadArmorIntoFields(R, index, True)
# Load in inventory
count = 0
for el in Wolke.Char.ausrüstung:
self.inventoryLines[count].setText(el)
count += 1
if count >= 20:
break
self.currentlyLoading = False
def updateInventory(self):
# Update inventory
ausruestungNeu = []
for i in range(0,20):
ausruestungNeu.append(self.inventoryLines[i].text())
#Preserve the position of actual elements but remove any trailing empty elements
#This is needed for ArrayEqual later to work as intended
for ausr in reversed(ausruestungNeu):
if ausr == "":
ausruestungNeu.pop()
else:
break
if not Hilfsmethoden.ArrayEqual(ausruestungNeu, Wolke.Char.ausrüstung):
Wolke.Char.ausrüstung = ausruestungNeu
self.modified.emit()
def loadArmorIntoFields(self, R, index, replace):
if replace or self.editRName[index].text() == "":
self.editRName[index].setText(R.name)
else:
self.editRName[index].setText(self.editRName[index].text() + ", " + R.name)
if not replace:
for i in range(0, 6):
R.rs[i] += self.spinZRS[index][i].value()
beDelta = self.spinBE[index].value() - self.spinRS[index].value()
R.be = R.getRSGesamtInt() + beDelta
for i in range(0, 6):
if self.ui.checkZonen.isChecked():
self.spinZRS[index][i].setValue(R.rs[i])
else:
self.spinZRS[index][i].setValue(R.getRSGesamtInt())
self.spinBE[index].setValue(EventBus.applyFilter("ruestung_be", R.be, { "name" : R.name }))
self.spinRS[index].setValue(R.getRSGesamtInt())
self.refreshDerivedArmorValues(R, index)
if R == Objekte.Ruestung():
self.addR[index].setText('\u002b')
else:
self.addR[index].setText('\uf2ed')
def selectArmor(self, index):
if index >= len (Wolke.Char.rüstung) or Wolke.Char.rüstung[index] == Objekte.Ruestung():
logging.debug("Starting RuestungPicker")
pickerClass = EventBus.applyFilter("class_ruestungspicker_wrapper", RuestungPicker)
picker = pickerClass(self.editRName[index].text(), 2 if self.ui.checkZonen.isChecked() else 1)
logging.debug("RuestungPicker created")
if picker.ruestung is not None:
self.currentlyLoading = True
self.loadArmorIntoFields(picker.ruestung, index, picker.ruestungErsetzen)
self.currentlyLoading = False
self.updateRuestungen()
else:
self.currentlyLoading = True
self.loadArmorIntoFields(Objekte.Ruestung(), index, True)
self.currentlyLoading = False
self.updateRuestungen()
def refreshZRSVisibility(self):
if self.currentlyLoading:
return
self.currentlyLoading = True
labels = [self.ui.labelBein, self.ui.labelBauch, self.ui.labelBrust, self.ui.labelLarm, self.ui.labelRarm, self.ui.labelKopf, self.ui.labelPunkte]
if self.ui.checkZonen.isChecked():
for index in range(3):
for j in range(6):
self.spinZRS[index][j].show()
self.spinPunkte[index].show()
self.spinRS[index].setEnabled(False)
for label in labels:
label.show()
else:
for index in range(3):
for j in range(6):
self.spinZRS[index][j].hide()
self.spinPunkte[index].hide()
self.spinRS[index].setEnabled(True)
for label in labels:
label.hide()
self.currentlyLoading = False
| 37.429688 | 154 | 0.582551 |
3798dec12ccebabee92bafb450ba38c1bfdbf844
| 44,839 |
py
|
Python
|
Packs/CrowdStrikeFalconX/Integrations/CrowdStrikeFalconX/TestsInput/context.py
|
jrauen/content
|
81a92be1cbb053a5f26a6f325eff3afc0ca840e0
|
[
"MIT"
] | 1 |
2021-11-02T05:36:38.000Z
|
2021-11-02T05:36:38.000Z
|
Packs/CrowdStrikeFalconX/Integrations/CrowdStrikeFalconX/TestsInput/context.py
|
jrauen/content
|
81a92be1cbb053a5f26a6f325eff3afc0ca840e0
|
[
"MIT"
] | 61 |
2021-10-07T08:54:38.000Z
|
2022-03-31T10:25:35.000Z
|
Packs/CrowdStrikeFalconX/Integrations/CrowdStrikeFalconX/TestsInput/context.py
|
jrauen/content
|
81a92be1cbb053a5f26a6f325eff3afc0ca840e0
|
[
"MIT"
] | 1 |
2021-12-20T15:12:22.000Z
|
2021-12-20T15:12:22.000Z
|
from CommonServerPython import DemistoException
SEND_UPLOADED_FILE_TO_SENDBOX_ANALYSIS_CONTEXT = {
'submitted_id': 'id',
'state': 'created',
'created_timestamp': '2020-05-12T15:34:11Z',
'environment_id': 160,
'sha256': 'sha256'
}
SEND_URL_TO_SANDBOX_ANALYSIS_CONTEXT = {
'submitted_id': 'id',
'state': 'created',
'created_timestamp': '2020-05-12T16:40:52Z',
'environment_id': 160
}
GET_REPORT_SUMMARY_CONTEXT = {
'File(val.MD5 && val.MD5 == obj.MD5 || val.SHA1 && val.SHA1 == obj.SHA1 '
'|| val.SHA256 && val.SHA256 == obj.SHA256 || val.SHA512 && val.SHA512 == obj.SHA512 '
'|| val.CRC32 && val.CRC32 == obj.CRC32 || val.CTPH && val.CTPH == obj.CTPH'
' || val.SSDeep && val.SSDeep == obj.SSDeep)': [
{'SHA256': 'sha256', 'Company': 'Microsoft Corporation', 'ProductName': 'Microsoft Windows Operating System',
'Signature': {'Authentihash': '', 'Copyright': 'Microsoft Corporation. All rights reserved.',
'Description': 'Microsoft Smartcard Certificate Propagation Service',
'FileVersion': '10.0.19041.844 (WinBuild.160101.0800)', 'InternalName': 'certprop.dll',
'OriginalName': 'certprop.dll'},
'Hashes': [{'type': 'SHA256', 'value': 'sha256'}]
}
],
'DBotScore(val.Indicator && val.Indicator == obj.Indicator && val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{'Indicator': 'sha256', 'Type': 'file', 'Vendor': '', 'Score': 2, 'Reliability': 'B - Usually reliable'}],
'csfalconx.resource(val.id && val.id == obj.id)': {
'environment_description': 'Windows 10 64 bit',
'environment_id': 160, 'sha256': 'sha256',
'submission_type': 'page_url',
'submit_url': 'hxxps://www.google.com', 'threat_score': 13,
'created_timestamp': '2020-03-16T17:04:48Z', 'id': 'id',
'ioc_report_broad_csv_artifact_id': 'ioc_report_broad_csv_artifact_id',
'ioc_report_broad_json_artifact_id': 'ioc_report_broad_json_artifact_id',
'ioc_report_broad_maec_artifact_id': 'ioc_report_broad_maec_artifact_id',
'ioc_report_broad_stix_artifact_id': 'ioc_report_broad_stix_artifact_id',
'ioc_report_strict_csv_artifact_id': 'ioc_report_strict_csv_artifact_id',
'ioc_report_strict_json_artifact_id': 'ioc_report_strict_json_artifact_id',
'ioc_report_strict_maec_artifact_id': 'ioc_report_strict_maec_artifact_id',
'ioc_report_strict_stix_artifact_id': 'ioc_report_strict_stix_artifact_id',
'verdict': 'suspicious'}}
GET_ANALYSIS_STATUS_CONTEXT = {
'csfalconx.resource(val.id && val.id == obj.id)':
{
'id': 'id',
'state': 'success',
'created_timestamp': '2020-03-16T17:04:48Z',
'environment_id': 160
}
}
CHECK_QUOTA_STATUS_CONTEXT = {
'csfalconx.resource(val.id && val.id == obj.id)':
{
'total': 100,
'used': 47,
'in_progress': 2
}
}
FIND_SANDBOX_REPORTS_CONTEXT = {
'csfalconx.resource(val.id && val.id == obj.id)':
{
'resources': ['resources1', 'resources2', 'resources3', 'resources4']
}
}
FIND_SANDBOX_REPORTS_HASH_CONTEXT = {
'csfalconx.resource(val.id && val.id == obj.id)': {
'resources': ['resources1', 'resources2', 'resources3', 'resources4'],
'FindReport': [{'sha256': 'hash1', 'reportIds': ['resources1', 'resources2', 'resources3', 'resources4']}]
}
}
FIND_SANDBOX_REPORTS_NOT_FOUND_HASH_CONTEXT = {
'csfalconx.resource(val.id && val.id == obj.id)': {
'resources': [],
'FindReport': [{'sha256': 'hash1', 'reportIds': []}]
}
}
FIND_SUBMISSION_ID_CONTEXT = {
'csfalconx.resource(val.id && val.id == obj.id)':
{
'resources': ['resources1', 'resources2', 'resources3', 'resources4']
}
}
GET_FULL_REPORT_CONTEXT_EXTENDED = {'environment_description': 'Windows 10 64 bit', 'environment_id': 160,
'sha256': 'sha256', 'submission_type': 'page_url',
'submit_url': 'hxxps://www.google.com', 'threat_score': 13,
'architecture': 'WINDOWS', 'classification': ['91.6% (.URL) Windows URL shortcut',
'8.3% (.INI) Generic INI configuration'],
'contacted_hosts': [{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428},
{'name': 'name.exe', 'pid': 9372}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428},
{'name': 'name.exe', 'pid': 9372}],
'country': 'United States', 'port': 80, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'}],
'dns_requests': [{'address': '111.111.1.1', 'country': 'United States',
'domain': 'googleads.g.doubleclick.net',
'registrar_creation_timestamp': '1996-01-16T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'registrar_organization'},
{'address': '172.217.7.163', 'country': 'United States',
'domain': 'domain'},
{'address': '111.27.12.67', 'country': 'United States',
'domain': 'ssl.gstatic.com',
'registrar_creation_timestamp': '2008-02-11T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'Google Inc.'},
{'address': '172.217.14.163', 'country': 'United States',
'domain': 'www.gstatic.com',
'registrar_creation_timestamp': '2008-02-11T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'registrar_organization'}],
'http_requests': [
{'header': 'header', 'host': 'host', 'host_ip': '111.27.12.67', 'host_port': 80,
'method': 'GET', 'url': 'url'},
{'header': 'header', 'host': 'host', 'host_ip': '111.27.12.67', 'host_port': 80,
'method': 'GET', 'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET', 'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET', 'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET', 'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET', 'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET', 'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET', 'url': 'url'}], 'incidents': [
{'details': ['Contacts 4 domains and 4 hosts'], 'name': 'Network Behavior'}], 'processes': [
{'command_line': 'command_line', 'icon_artifact_id': 'icon_artifact_id', 'name': 'rundll32.exe',
'normalized_path': 'normalized_path.exe', 'pid': 6648, 'process_flags': [{'name': 'Reduced Monitoring'}],
'sha256': 'sha256', 'uid': '00074182-00006648'}], 'screenshots_artifact_ids': ['screenshots_artifact_ids1',
'screenshots_artifact_ids2',
'screenshots_artifact_ids3',
'screenshots_artifact_ids4'],
'signatures': [
{'category': 'General', 'description': 'description', 'identifier': 'network-0',
'name': 'Contacts domains', 'origin': 'Network Traffic', 'relevance': 1,
'threat_level_human': 'informative', 'type': 7},
{'category': 'General', 'description': 'description', 'identifier': 'network-1',
'name': 'Contacts server', 'origin': 'Network Traffic', 'relevance': 1,
'threat_level_human': 'informative', 'type': 7},
{'category': 'Network Related', 'description': 'description',
'identifier': 'string-3', 'name': 'Found potential URL in binary/memory',
'origin': 'String', 'relevance': 10, 'threat_level_human': 'informative',
'type': 2}, {'category': 'External Systems', 'description': 'description',
'identifier': 'suricata-0', 'name': 'Detected Suricata Alert',
'origin': 'Suricata Alerts', 'relevance': 10,
'threat_level_human': 'informative', 'type': 18},
{'category': 'Ransomware/Banking', 'description': 'description',
'identifier': 'string-12',
'name': 'Detected text artifact in screenshot that indicate file could be ransomware',
'origin': 'String', 'relevance': 10, 'threat_level': 1,
'threat_level_human': 'suspicious', 'type': 2},
{'category': 'Network Related', 'description': 'description',
'identifier': 'network-23',
'name': 'Sends traffic on typical HTTP outbound port, but without HTTP header',
'origin': 'Network Traffic', 'relevance': 5, 'threat_level': 1,
'threat_level_human': 'suspicious', 'type': 7}],
'created_timestamp': '2020-03-16T17:04:48Z', 'id': 'id',
'ioc_report_broad_csv_artifact_id': 'ioc_report_broad_csv_artifact_id',
'ioc_report_broad_json_artifact_id': 'ioc_report_broad_json_artifact_id',
'ioc_report_broad_maec_artifact_id': 'ioc_report_broad_maec_artifact_id',
'ioc_report_broad_stix_artifact_id': 'ioc_report_broad_stix_artifact_id',
'ioc_report_strict_csv_artifact_id': 'ioc_report_strict_csv_artifact_id',
'ioc_report_strict_json_artifact_id': 'ioc_report_strict_json_artifact_id',
'ioc_report_strict_maec_artifact_id': 'ioc_report_strict_maec_artifact_id',
'ioc_report_strict_stix_artifact_id': 'ioc_report_strict_stix_artifact_id',
'verdict': 'no specific threat',
'sandbox': {'architecture': 'WINDOWS',
'classification': [
'91.6% (.URL) Windows URL shortcut',
'8.3% (.INI) Generic INI configuration'],
'contacted_hosts': [
{'address': '111.27.12.67',
'associated_runtime': [
{'name': 'name.exe', 'pid': 6428},
{'name': 'name.exe', 'pid': 9372}],
'country': 'United States',
'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [
{'name': 'name.exe', 'pid': 6428},
{'name': 'name.exe', 'pid': 9372}],
'country': 'United States',
'port': 80, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [
{'name': 'name.exe', 'pid': 6428}],
'country': 'United States',
'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [
{'name': 'name.exe', 'pid': 6428}],
'country': 'United States',
'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [
{'name': 'name.exe', 'pid': 6428}],
'country': 'United States',
'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [
{'name': 'name.exe', 'pid': 6428}],
'country': 'United States',
'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67', 'associated_runtime': [
{'name': 'name.exe',
'pid': 6428}],
'country': 'United States',
'port': 443, 'protocol': 'TCP'}],
'dns_requests': [
{'address': '111.111.1.1',
'country': 'United States',
'domain': 'googleads.g.doubleclick.net',
'registrar_creation_timestamp': '1996-01-16T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'registrar_organization'},
{'address': '172.217.7.163',
'country': 'United States',
'domain': 'domain'},
{'address': '111.27.12.67',
'country': 'United States',
'domain': 'ssl.gstatic.com',
'registrar_creation_timestamp': '2008-02-11T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'Google Inc.'},
{'address': '172.217.14.163',
'country': 'United States',
'domain': 'www.gstatic.com',
'registrar_creation_timestamp': '2008-02-11T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'registrar_organization'}],
'http_requests': [
{'header': 'header',
'host': 'host',
'host_ip': '111.27.12.67',
'host_port': 80,
'method': 'GET',
'url': 'url'},
{'header': 'header',
'host': 'host',
'host_ip': '111.27.12.67',
'host_port': 80,
'method': 'GET',
'url': 'url'},
{'header': 'header',
'host': 'ocsp.pki.goog',
'host_ip': '172.217.7.163',
'host_port': 80,
'method': 'GET',
'url': 'url'},
{'header': 'header',
'host': 'ocsp.pki.goog',
'host_ip': '172.217.7.163',
'host_port': 80,
'method': 'GET',
'url': 'url'},
{'header': 'header',
'host': 'ocsp.pki.goog',
'host_ip': '172.217.7.163',
'host_port': 80,
'method': 'GET',
'url': 'url'},
{'header': 'header',
'host': 'ocsp.pki.goog',
'host_ip': '172.217.7.163',
'host_port': 80,
'method': 'GET',
'url': 'url'},
{'header': 'header',
'host': 'ocsp.pki.goog',
'host_ip': '172.217.7.163',
'host_port': 80,
'method': 'GET',
'url': 'url'},
{'header': 'header',
'host': 'ocsp.pki.goog',
'host_ip': '172.217.7.163',
'host_port': 80,
'method': 'GET',
'url': 'url'}],
'incidents': [{'details': [
'Contacts 4 domains and 4 hosts'],
'name': 'Network Behavior'}],
'processes': [
{'command_line': 'command_line',
'icon_artifact_id': 'icon_artifact_id',
'name': 'rundll32.exe',
'normalized_path': 'normalized_path.exe',
'pid': 6648, 'process_flags': [{
'name': 'Reduced Monitoring'}],
'sha256': 'sha256',
'uid': '00074182-00006648'}],
'screenshots_artifact_ids': [
'screenshots_artifact_ids1',
'screenshots_artifact_ids2',
'screenshots_artifact_ids3',
'screenshots_artifact_ids4'],
'signatures': [{'category': 'General',
'description': 'description',
'identifier': 'network-0',
'name': 'Contacts domains',
'origin': 'Network Traffic',
'relevance': 1,
'threat_level_human': 'informative',
'type': 7},
{'category': 'General',
'description': 'description',
'identifier': 'network-1',
'name': 'Contacts server',
'origin': 'Network Traffic',
'relevance': 1,
'threat_level_human': 'informative',
'type': 7}, {
'category': 'Network Related',
'description': 'description',
'identifier': 'string-3',
'name': 'Found potential URL in binary/memory',
'origin': 'String',
'relevance': 10,
'threat_level_human': 'informative',
'type': 2}, {
'category': 'External Systems',
'description': 'description',
'identifier': 'suricata-0',
'name': 'Detected Suricata Alert',
'origin': 'Suricata Alerts',
'relevance': 10,
'threat_level_human': 'informative',
'type': 18}, {
'category': 'Ransomware/Banking',
'description': 'description',
'identifier': 'string-12',
'name': 'Detected text artifact in screenshot that indicate file could be ransomware',
'origin': 'String',
'relevance': 10,
'threat_level': 1,
'threat_level_human': 'suspicious',
'type': 2}, {
'category': 'Network Related',
'description': 'description',
'identifier': 'network-23',
'name': 'Sends traffic on typical HTTP outbound port, but without HTTP header',
'origin': 'Network Traffic',
'relevance': 5,
'threat_level': 1,
'threat_level_human': 'suspicious',
'type': 7}]}}
GET_FULL_REPORT_CONTEXT = {'environment_description': 'Windows 10 64 bit', 'environment_id': 160, 'sha256': 'sha256',
'submission_type': 'page_url', 'submit_url': 'hxxps://www.google.com', 'threat_score': 13,
'architecture': 'WINDOWS',
'classification': ['91.6% (.URL) Windows URL shortcut',
'8.3% (.INI) Generic INI configuration'],
'contacted_hosts': [{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428},
{'name': 'name.exe', 'pid': 9372}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [
{'name': 'name.exe',
'pid': 6428},
{'name': 'name.exe',
'pid': 9372}],
'country': 'United States',
'port': 80, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'}],
'dns_requests': [
{'address': '111.111.1.1', 'country': 'United States',
'domain': 'googleads.g.doubleclick.net',
'registrar_creation_timestamp': '1996-01-16T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'registrar_organization'},
{'address': '172.217.7.163', 'country': 'United States', 'domain': 'domain'},
{'address': '111.27.12.67', 'country': 'United States', 'domain': 'ssl.gstatic.com',
'registrar_creation_timestamp': '2008-02-11T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'Google Inc.'},
{'address': '172.217.14.163', 'country': 'United States', 'domain': 'www.gstatic.com',
'registrar_creation_timestamp': '2008-02-11T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'registrar_organization'}],
'http_requests': [
{'header': 'header', 'host': 'host', 'host_ip': '111.27.12.67', 'host_port': 80,
'method': 'GET', 'url': 'url'},
{'header': 'header', 'host': 'host', 'host_ip': '111.27.12.67', 'host_port': 80,
'method': 'GET', 'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'}],
'incidents': [{'details': ['Contacts 4 domains and 4 hosts'], 'name': 'Network Behavior'}],
'processes': [{'command_line': 'command_line', 'icon_artifact_id': 'icon_artifact_id',
'name': 'rundll32.exe',
'normalized_path': 'normalized_path.exe', 'pid': 6648,
'process_flags': [{'name': 'Reduced Monitoring'}], 'sha256': 'sha256',
'uid': '00074182-00006648'}],
'screenshots_artifact_ids': ['screenshots_artifact_ids1', 'screenshots_artifact_ids2',
'screenshots_artifact_ids3',
'screenshots_artifact_ids4'],
'created_timestamp': '2020-03-16T17:04:48Z', 'id': 'id',
'ioc_report_broad_csv_artifact_id': 'ioc_report_broad_csv_artifact_id',
'ioc_report_broad_json_artifact_id': 'ioc_report_broad_json_artifact_id',
'ioc_report_broad_maec_artifact_id': 'ioc_report_broad_maec_artifact_id',
'ioc_report_broad_stix_artifact_id': 'ioc_report_broad_stix_artifact_id',
'ioc_report_strict_csv_artifact_id': 'ioc_report_strict_csv_artifact_id',
'ioc_report_strict_json_artifact_id': 'ioc_report_strict_json_artifact_id',
'ioc_report_strict_maec_artifact_id': 'ioc_report_strict_maec_artifact_id',
'ioc_report_strict_stix_artifact_id': 'ioc_report_strict_stix_artifact_id',
'verdict': 'no specific threat',
'sandbox': {'architecture': 'WINDOWS',
'classification': ['91.6% (.URL) Windows URL shortcut',
'8.3% (.INI) Generic INI configuration'],
'contacted_hosts': [
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428},
{'name': 'name.exe', 'pid': 9372}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428},
{'name': 'name.exe', 'pid': 9372}],
'country': 'United States', 'port': 80, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443, 'protocol': 'TCP'},
{'address': '111.27.12.67',
'associated_runtime': [{'name': 'name.exe', 'pid': 6428}],
'country': 'United States', 'port': 443,
'protocol': 'TCP'}],
'dns_requests': [
{'address': '111.111.1.1', 'country': 'United States',
'domain': 'googleads.g.doubleclick.net',
'registrar_creation_timestamp': '1996-01-16T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'registrar_organization'},
{'address': '172.217.7.163', 'country': 'United States', 'domain': 'domain'},
{'address': '111.27.12.67', 'country': 'United States',
'domain': 'ssl.gstatic.com',
'registrar_creation_timestamp': '2008-02-11T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'Google Inc.'},
{'address': '172.217.14.163', 'country': 'United States',
'domain': 'www.gstatic.com',
'registrar_creation_timestamp': '2008-02-11T00:00:00+00:00',
'registrar_name': 'registrar_name',
'registrar_organization': 'registrar_organization'}],
'http_requests': [
{'header': 'header', 'host': 'host', 'host_ip': '111.27.12.67',
'host_port': 80,
'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'host', 'host_ip': '111.27.12.67',
'host_port': 80,
'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'},
{'header': 'header', 'host': 'ocsp.pki.goog', 'host_ip': '172.217.7.163',
'host_port': 80, 'method': 'GET',
'url': 'url'}],
'incidents': [
{'details': ['Contacts 4 domains and 4 hosts'], 'name': 'Network Behavior'}],
'processes': [
{'command_line': 'command_line', 'icon_artifact_id': 'icon_artifact_id',
'name': 'rundll32.exe',
'normalized_path': 'normalized_path.exe', 'pid': 6648,
'process_flags': [{'name': 'Reduced Monitoring'}], 'sha256': 'sha256',
'uid': '00074182-00006648'}],
'screenshots_artifact_ids': ['screenshots_artifact_ids1',
'screenshots_artifact_ids2',
'screenshots_artifact_ids3',
'screenshots_artifact_ids4']}}
MULTIPLE_ERRORS_RESULT = DemistoException(
'403: access denied, authorization failed\n401: test error #1\n402: test error #2')
| 81.083183 | 153 | 0.336024 |
80a11c4022047b43ecf36e488ae24d5a204e8bc5
| 330 |
py
|
Python
|
scripts/hello_world_again.py
|
breezage/Hacktoberfest-1
|
6f6d52248c79c0e72fd13b599500318fce3f9ab0
|
[
"MIT"
] | null | null | null |
scripts/hello_world_again.py
|
breezage/Hacktoberfest-1
|
6f6d52248c79c0e72fd13b599500318fce3f9ab0
|
[
"MIT"
] | null | null | null |
scripts/hello_world_again.py
|
breezage/Hacktoberfest-1
|
6f6d52248c79c0e72fd13b599500318fce3f9ab0
|
[
"MIT"
] | 1 |
2019-10-24T06:45:21.000Z
|
2019-10-24T06:45:21.000Z
|
# LANGUAGE: Python 3
# AUTHOR: Luiz Devitte
# GitHub: https://github.com/LuizDevitte
def greetings(name):
print('\n')
print('Hello, World!')
print('And Hello, {}!'.format(name))
print('\n')
return 0
def main():
name = input('Hey, who are you? ')
greetings(name)
if __name__=='__main__':
main()
| 17.368421 | 40 | 0.60303 |
20ae05343a45a8ac865897b657d0cd1513a3782e
| 205 |
py
|
Python
|
Curso_Python/Secao4-Python-introducao-a-programacao-orientada-a-objetos-POO/104_heranca_multipla/main.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso_Python/Secao4-Python-introducao-a-programacao-orientada-a-objetos-POO/104_heranca_multipla/main.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso_Python/Secao4-Python-introducao-a-programacao-orientada-a-objetos-POO/104_heranca_multipla/main.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
from smartphone import Smartphone
smartphone = Smartphone('IPhone')
smartphone.ligar()
smartphone.conetar()
smartphone.conetar()
smartphone.desconectar()
smartphone.desconectar()
smartphone.desconectar()
| 20.5 | 33 | 0.819512 |
20b486d58a13b3d9c2200ea6bcb78acffc9e62a6
| 732 |
py
|
Python
|
06.BinarySearch/HG/B2805.py
|
SP2021-2/Algorithm
|
2e629eb5234212fad8bbc11491aad068e5783780
|
[
"MIT"
] | 1 |
2021-11-21T06:03:06.000Z
|
2021-11-21T06:03:06.000Z
|
06.BinarySearch/HG/B2805.py
|
SP2021-2/Algorithm
|
2e629eb5234212fad8bbc11491aad068e5783780
|
[
"MIT"
] | 2 |
2021-10-13T07:21:09.000Z
|
2021-11-14T13:53:08.000Z
|
06.BinarySearch/HG/B2805.py
|
SP2021-2/Algorithm
|
2e629eb5234212fad8bbc11491aad068e5783780
|
[
"MIT"
] | null | null | null |
import sys
N, M = map(int, sys.stdin.readline().split())
trees = list(map(int, sys.stdin.readline().split()))
Top = max(trees)
def bs(trees, value, low, high):
result = 0
while True:
if (low > high):
break
mid = int((low + high) / 2)
h = []
for item in trees:
v = item - mid
if v < 0:
h.append(0)
else:
h.append(v)
# print(mid, h)
H = sum(h)
if H > value:
result = mid
low = mid+1
elif H < value :
high = mid-1
else:
result = mid
break
print(result)
return result
bs(trees, M, 0, Top)
| 20.333333 | 52 | 0.419399 |
4586d2e82e9e8e9837283f223f5188489487655e
| 2,194 |
py
|
Python
|
Sample/DB.py
|
BlaCkinkGJ/SFSH
|
0134f1e4698ef34caee2d5a8cd875c51507b3527
|
[
"MIT"
] | 1 |
2019-02-28T08:39:55.000Z
|
2019-02-28T08:39:55.000Z
|
Sample/DB.py
|
BlaCkinkGJ/SFSH
|
0134f1e4698ef34caee2d5a8cd875c51507b3527
|
[
"MIT"
] | 5 |
2018-07-17T13:09:34.000Z
|
2018-09-11T13:55:33.000Z
|
Sample/DB.py
|
BlaCkinkGJ/SFSH
|
0134f1e4698ef34caee2d5a8cd875c51507b3527
|
[
"MIT"
] | 2 |
2019-05-17T03:07:08.000Z
|
2022-01-01T07:04:31.000Z
|
import pymongo
class DB:
def __init__(self, ip = 'localhost', port = 27017, table="employees"):
self.client = pymongo.MongoClient(ip, port)
self.db = self.client.SFSH
self.collection = self.db[table]
self.buffer = {
'_id' : None,
'name' : None,
'sleep' : None,
'temper' : None,
'connected' : None,
'WiFi' : None
}
self.fieldList = ['_id', 'name', 'sleep', 'temper', 'connected', 'WiFi']
def bufferInit(self):
self.buffer = {
'_id' : None,
'name' : None,
'sleep' : None,
'temper' : None,
'connected' : None,
'WiFi' : None
}
def isExist(self, id, name):
return self.collection.find_one({"name" : name, "_id" : id}) is not None
def getLastID(self):
value = self.collection.find().sort("_id", pymongo.DESCENDING)[0]["_id"]
if value : return value
else : return 0
def getData(self, name, id=None):
if id is None:
return self.collection.find({"name" : name})
else:
return self.collection.find_one({"name" : name, "_id" : id})
def setData(self, field, data):
if field in self.fieldList:
self.buffer[field] = data
else:
raise ValueError('buffer has no attribute {}'.format(field))
def setBuffer(self, post):
if post.keys() == self.buffer.keys():
self.buffer = post
else:
raise ValueError("post keys and buffer keys don't match : {}, {}".format(post.keys(), self.buffer.keys()))
def upload(self):
for key in self.buffer.keys():
if self.buffer[key] is None:
raise ValueError('{} field value is None.'.format(key))
key = {"_id" : self.buffer["_id"]}
if self.collection.find_one(key) is None:
self.collection.insert(self.buffer)
else:
self.collection.update(key, self.buffer)
self.bufferInit()
def __del__(self):
self.client.close()
| 30.054795 | 118 | 0.511851 |
45dbcb5f4267102a32936263d291ae0dcaefde15
| 196 |
py
|
Python
|
modules/config/model_path.py
|
cloud441/Procerfa
|
7472aeecc4eb3003e2e0f09bb0ae71f1b33e9ccf
|
[
"MIT"
] | null | null | null |
modules/config/model_path.py
|
cloud441/Procerfa
|
7472aeecc4eb3003e2e0f09bb0ae71f1b33e9ccf
|
[
"MIT"
] | null | null | null |
modules/config/model_path.py
|
cloud441/Procerfa
|
7472aeecc4eb3003e2e0f09bb0ae71f1b33e9ccf
|
[
"MIT"
] | null | null | null |
import os
# Linux dev:
#dir_path = f"data{os.sep}pdf_models{os.sep}"
# Windows release:
dir_path = f"..{os.sep}..{os.sep}data{os.sep}pdf_models{os.sep}"
model_path = dir_path + "new_model.pdf"
| 19.6 | 64 | 0.693878 |
926be1c7e47147a10f0c587c6de98675436760ba
| 697 |
py
|
Python
|
Python/zzz_training_challenge/Python_Challenge/solutions/tests/ch04_strings/ex05_no_duplicate_chars_test.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
Python/zzz_training_challenge/Python_Challenge/solutions/tests/ch04_strings/ex05_no_duplicate_chars_test.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
Python/zzz_training_challenge/Python_Challenge/solutions/tests/ch04_strings/ex05_no_duplicate_chars_test.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
# Beispielprogramm für das Buch "Python Challenge"
#
# Copyright 2020 by Michael Inden
import pytest
from ch04_strings.solutions.ex05_no_duplicate_chars import check_no_duplicate_chars, check_no_duplicate_chars_v2
def input_and_expected():
return [("Otto", False), ("Adrian", False),
("Micha", True), ("ABCDEFG", True)]
@pytest.mark.parametrize("input, expected", input_and_expected())
def test_check_no_duplicate_chars(input, expected):
assert check_no_duplicate_chars(input) == expected
@pytest.mark.parametrize("input, expected", input_and_expected())
def test_check_no_duplicate_chars_v2(input, expected):
assert check_no_duplicate_chars_v2(input) == expected
| 29.041667 | 112 | 0.771879 |
341f365f8c17bd1b7f111e47079992f9c4bf2b8d
| 2,532 |
py
|
Python
|
lib/python/qmk/cli/compile.py
|
fzf/qmk_toolbox
|
10d6b425bd24b45002555022baf16fb11254118b
|
[
"MIT"
] | null | null | null |
lib/python/qmk/cli/compile.py
|
fzf/qmk_toolbox
|
10d6b425bd24b45002555022baf16fb11254118b
|
[
"MIT"
] | null | null | null |
lib/python/qmk/cli/compile.py
|
fzf/qmk_toolbox
|
10d6b425bd24b45002555022baf16fb11254118b
|
[
"MIT"
] | null | null | null |
"""Compile a QMK Firmware.
You can compile a keymap already in the repo or using a QMK Configurator export.
"""
import subprocess
from argparse import FileType
from milc import cli
from qmk.decorators import automagic_keyboard, automagic_keymap
from qmk.commands import compile_configurator_json, create_make_command, parse_configurator_json
@cli.argument('filename', nargs='?', arg_only=True, type=FileType('r'), help='The configurator export to compile')
@cli.argument('-kb', '--keyboard', help='The keyboard to build a firmware for. Ignored when a configurator export is supplied.')
@cli.argument('-km', '--keymap', help='The keymap to build a firmware for. Ignored when a configurator export is supplied.')
@cli.argument('-n', '--dry-run', arg_only=True, action='store_true', help="Don't actually build, just show the make command to be run.")
@cli.subcommand('Compile a QMK Firmware.')
@automagic_keyboard
@automagic_keymap
def compile(cli):
"""Compile a QMK Firmware.
If a Configurator export is supplied this command will create a new keymap, overwriting an existing keymap if one exists.
If a keyboard and keymap are provided this command will build a firmware based on that.
"""
command = None
if cli.args.filename:
# If a configurator JSON was provided generate a keymap and compile it
# FIXME(skullydazed): add code to check and warn if the keymap already exists when compiling a json keymap.
user_keymap = parse_configurator_json(cli.args.filename)
command = compile_configurator_json(user_keymap)
else:
if cli.config.compile.keyboard and cli.config.compile.keymap:
# Generate the make command for a specific keyboard/keymap.
command = create_make_command(cli.config.compile.keyboard, cli.config.compile.keymap)
elif not cli.config.compile.keyboard:
cli.log.error('Could not determine keyboard!')
elif not cli.config.compile.keymap:
cli.log.error('Could not determine keymap!')
# Compile the firmware, if we're able to
if command:
cli.log.info('Compiling keymap with {fg_cyan}%s', ' '.join(command))
if not cli.args.dry_run:
cli.echo('\n')
subprocess.run(command)
else:
cli.log.error('You must supply a configurator export, both `--keyboard` and `--keymap`, or be in a directory for a keyboard or keymap.')
cli.echo('usage: qmk compile [-h] [-b] [-kb KEYBOARD] [-km KEYMAP] [filename]')
return False
| 44.421053 | 144 | 0.706161 |
f3d4991d3a5187dcfbc524bc1a8c9cde9329ab1e
| 1,370 |
py
|
Python
|
Saegewerk/bin/generatedaten.py
|
Robotron-GmbH/splunk-youtube-material
|
f04043dae7ca22d3a22b7b21acb97c325724eadb
|
[
"MIT"
] | 1 |
2021-07-28T15:51:07.000Z
|
2021-07-28T15:51:07.000Z
|
Saegewerk/bin/generatedaten.py
|
Robotron-GmbH/splunk-youtube-material
|
f04043dae7ca22d3a22b7b21acb97c325724eadb
|
[
"MIT"
] | null | null | null |
Saegewerk/bin/generatedaten.py
|
Robotron-GmbH/splunk-youtube-material
|
f04043dae7ca22d3a22b7b21acb97c325724eadb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
import sys,platform, os
import time
path = os.path.normpath(os.getcwd())
splunkhome_arr=path.split(os.sep)[:-4]
splunkhome=os.sep.join(splunkhome_arr)
sys.path.append(os.path.join(splunkhome, 'etc', 'apps', 'Saegewerk', 'lib'))
from splunklib.searchcommands import dispatch, GeneratingCommand, Configuration, Option, validators
@Configuration()
class generatedatenCommand(GeneratingCommand):
path = os.path.normpath(os.getcwd())
splunkhome_arr=path.split(os.sep)[:-4]
splunkhome=os.sep.join(splunkhome_arr)
if platform.system()=="Windows":
Python=os.path.join(splunkhome,"bin","python.exe") #Pfad zur App Windows
else:
Python=os.path.join(splunkhome,"bin","python") #Pfad zur App Mac und Linux
seconds_running = Option(require=True, validate=validators.Integer())
def generate(self):
for i in range(1, 2):
Befehl='"'+self.Python+'"'+' Saegewerk.py '+ str(self.seconds_running)
os.system(Befehl)
text = 'Dateien werden hier gespeichert: '+os.getcwd() + "\nSplunkhome: "+self.splunkhome +"\nPython: "+self.Python+"\nBefehl: "+Befehl+"\n Seconds running: "+str(self.seconds_running)
yield {'_time': time.time(), 'event_no': i, '_raw': text}
dispatch(generatedatenCommand, sys.argv, sys.stdin, sys.stdout, __name__)
| 37.027027 | 196 | 0.683942 |
1e4892aa9bd32893c3f88e416549df5ae7d18347
| 6,410 |
py
|
Python
|
sdks/python/apache_beam/utils/windowed_value_test.py
|
diegomez17/beam
|
29dea08c873abc75871674762e68fd5b29b8052b
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 2 |
2021-12-29T09:01:21.000Z
|
2022-02-26T14:32:16.000Z
|
sdks/python/apache_beam/utils/windowed_value_test.py
|
diegomez17/beam
|
29dea08c873abc75871674762e68fd5b29b8052b
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 4 |
2021-12-21T21:15:41.000Z
|
2022-02-15T19:21:28.000Z
|
sdks/python/apache_beam/utils/windowed_value_test.py
|
diegomez17/beam
|
29dea08c873abc75871674762e68fd5b29b8052b
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 17 |
2021-12-15T19:31:54.000Z
|
2022-01-31T18:54:23.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the windowed_value."""
# pytype: skip-file
import copy
import itertools
import pickle
import unittest
from parameterized import parameterized
from parameterized import parameterized_class
from apache_beam.utils import windowed_value
from apache_beam.utils.timestamp import Timestamp
class WindowedValueTest(unittest.TestCase):
def test_timestamps(self):
wv = windowed_value.WindowedValue(None, 3, ())
self.assertEqual(wv.timestamp, Timestamp.of(3))
self.assertTrue(wv.timestamp is wv.timestamp)
self.assertEqual(
windowed_value.WindowedValue(None, -2.5, ()).timestamp,
Timestamp.of(-2.5))
def test_with_value(self):
pane_info = windowed_value.PaneInfo(
True, True, windowed_value.PaneInfoTiming.ON_TIME, 0, 0)
wv = windowed_value.WindowedValue(1, 3, (), pane_info)
self.assertEqual(
wv.with_value(10), windowed_value.WindowedValue(10, 3, (), pane_info))
def test_equality(self):
self.assertEqual(
windowed_value.WindowedValue(1, 3, ()),
windowed_value.WindowedValue(1, 3, ()))
self.assertNotEqual(
windowed_value.WindowedValue(1, 3, ()),
windowed_value.WindowedValue(100, 3, ()))
self.assertNotEqual(
windowed_value.WindowedValue(1, 3, ()),
windowed_value.WindowedValue(1, 300, ()))
self.assertNotEqual(
windowed_value.WindowedValue(1, 3, ()),
windowed_value.WindowedValue(1, 300, ((), )))
self.assertNotEqual(windowed_value.WindowedValue(1, 3, ()), object())
def test_hash(self):
wv = windowed_value.WindowedValue(1, 3, ())
wv_copy = copy.copy(wv)
self.assertFalse(wv is wv_copy)
self.assertEqual({wv: 100}.get(wv_copy), 100)
def test_pickle(self):
pane_info = windowed_value.PaneInfo(
True, True, windowed_value.PaneInfoTiming.ON_TIME, 0, 0)
wv = windowed_value.WindowedValue(1, 3, (), pane_info)
self.assertTrue(pickle.loads(pickle.dumps(wv)) == wv)
WINDOWED_BATCH_INSTANCES = [
windowed_value.HomogeneousWindowedBatch.of(
None, 3, (), windowed_value.PANE_INFO_UNKNOWN),
windowed_value.HomogeneousWindowedBatch.of(
None,
3, (),
windowed_value.PaneInfo(
True, False, windowed_value.PaneInfoTiming.ON_TIME, 0, 0)),
]
class WindowedBatchTest(unittest.TestCase):
def test_homogeneous_windowed_batch_with_values(self):
pane_info = windowed_value.PaneInfo(
True, True, windowed_value.PaneInfoTiming.ON_TIME, 0, 0)
wb = windowed_value.HomogeneousWindowedBatch.of(['foo', 'bar'],
6, (),
pane_info)
self.assertEqual(
wb.with_values(['baz', 'foo']),
windowed_value.HomogeneousWindowedBatch.of(['baz', 'foo'],
6, (),
pane_info))
def test_homogeneous_windowed_batch_as_windowed_values(self):
pane_info = windowed_value.PaneInfo(
True, True, windowed_value.PaneInfoTiming.ON_TIME, 0, 0)
wb = windowed_value.HomogeneousWindowedBatch.of(['foo', 'bar'],
3, (),
pane_info)
self.assertEqual(
list(wb.as_windowed_values(iter)),
[
windowed_value.WindowedValue('foo', 3, (), pane_info),
windowed_value.WindowedValue('bar', 3, (), pane_info)
])
@parameterized.expand(itertools.combinations(WINDOWED_BATCH_INSTANCES, 2))
def test_inequality(self, left_wb, right_wb):
self.assertNotEqual(left_wb, right_wb)
def test_equals_different_type(self):
wb = windowed_value.HomogeneousWindowedBatch.of(
None, 3, (), windowed_value.PANE_INFO_UNKNOWN)
self.assertNotEqual(wb, object())
def test_homogeneous_from_windowed_values(self):
pane_info = windowed_value.PaneInfo(
True, True, windowed_value.PaneInfoTiming.ON_TIME, 0, 0)
windowed_values = [
windowed_value.WindowedValue('foofoo', 3, (), pane_info),
windowed_value.WindowedValue('foobar', 6, (), pane_info),
windowed_value.WindowedValue('foobaz', 9, (), pane_info),
windowed_value.WindowedValue('barfoo', 3, (), pane_info),
windowed_value.WindowedValue('barbar', 6, (), pane_info),
windowed_value.WindowedValue('barbaz', 9, (), pane_info),
windowed_value.WindowedValue('bazfoo', 3, (), pane_info),
windowed_value.WindowedValue('bazbar', 6, (), pane_info),
windowed_value.WindowedValue('bazbaz', 9, (), pane_info),
]
self.assertEqual(
list(
windowed_value.WindowedBatch.from_windowed_values(
windowed_values, produce_fn=list)),
[
windowed_value.HomogeneousWindowedBatch.of(
['foofoo', 'barfoo', 'bazfoo'], 3, (), pane_info),
windowed_value.HomogeneousWindowedBatch.of(
['foobar', 'barbar', 'bazbar'], 6, (), pane_info),
windowed_value.HomogeneousWindowedBatch.of(
['foobaz', 'barbaz', 'bazbaz'], 9, (), pane_info)
])
@parameterized_class(('wb', ), [(wb, ) for wb in WINDOWED_BATCH_INSTANCES])
class WindowedBatchUtilitiesTest(unittest.TestCase):
def test_hash(self):
wb_copy = copy.copy(self.wb)
self.assertFalse(self.wb is wb_copy)
self.assertEqual({self.wb: 100}.get(wb_copy), 100)
def test_pickle(self):
self.assertTrue(pickle.loads(pickle.dumps(self.wb)) == self.wb)
if __name__ == '__main__':
unittest.main()
| 37.928994 | 78 | 0.65819 |
94f31aadff8b7023504f9da7975f9f62965d8697
| 128 |
py
|
Python
|
vkapp/bot/models/__init__.py
|
ParuninPavel/lenta4_hack
|
6d3340201deadf5757e37ddd7cf5580b928d7bda
|
[
"MIT"
] | 1 |
2017-11-23T13:33:13.000Z
|
2017-11-23T13:33:13.000Z
|
vkapp/bot/models/__init__.py
|
ParuninPavel/lenta4_hack
|
6d3340201deadf5757e37ddd7cf5580b928d7bda
|
[
"MIT"
] | null | null | null |
vkapp/bot/models/__init__.py
|
ParuninPavel/lenta4_hack
|
6d3340201deadf5757e37ddd7cf5580b928d7bda
|
[
"MIT"
] | null | null | null |
from .users import Blogger, VKUser, Admin
from .news import News, AdminReview, Publication
from .balance import Income, Payment
| 32 | 48 | 0.804688 |
ec948eb5ed308166bb9ffe9e40c90c1d7808b38f
| 2,467 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/manufacturing/doctype/bom_update_tool/bom_update_tool.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/manufacturing/doctype/bom_update_tool/bom_update_tool.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/manufacturing/doctype/bom_update_tool/bom_update_tool.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr, flt
from frappe import _
from erpnext.manufacturing.doctype.bom.bom import get_boms_in_bottom_up_order
from frappe.model.document import Document
class BOMUpdateTool(Document):
def replace_bom(self):
self.validate_bom()
self.update_new_bom()
bom_list = self.get_parent_boms(self.new_bom)
updated_bom = []
for bom in bom_list:
bom_obj = frappe.get_doc("BOM", bom)
updated_bom = bom_obj.update_cost_and_exploded_items(updated_bom)
bom_obj.calculate_cost()
bom_obj.update_parent_cost()
bom_obj.db_update()
frappe.msgprint(_("BOM replaced"))
def validate_bom(self):
if cstr(self.current_bom) == cstr(self.new_bom):
frappe.throw(_("Current BOM and New BOM can not be same"))
if frappe.db.get_value("BOM", self.current_bom, "item") \
!= frappe.db.get_value("BOM", self.new_bom, "item"):
frappe.throw(_("The selected BOMs are not for the same item"))
def update_new_bom(self):
new_bom_unitcost = frappe.db.sql("""select total_cost/quantity
from `tabBOM` where name = %s""", self.new_bom)
new_bom_unitcost = flt(new_bom_unitcost[0][0]) if new_bom_unitcost else 0
frappe.db.sql("""update `tabBOM Item` set bom_no=%s,
rate=%s, amount=stock_qty*%s where bom_no = %s and docstatus < 2 and parenttype='BOM'""",
(self.new_bom, new_bom_unitcost, new_bom_unitcost, self.current_bom))
def get_parent_boms(self, bom, bom_list=None):
if not bom_list:
bom_list = []
data = frappe.db.sql(""" select distinct parent from `tabBOM Item`
where ifnull(bom_no, '') = %s and docstatus < 2 and parenttype='BOM'""", bom)
for d in data:
bom_list.append(d[0])
self.get_parent_boms(d[0], bom_list)
return bom_list
@frappe.whitelist()
def enqueue_update_cost():
frappe.enqueue("erpnext.manufacturing.doctype.bom_update_tool.bom_update_tool.update_cost")
frappe.msgprint(_("Queued for updating latest price in all Bill of Materials. It may take a few minutes."))
def update_latest_price_in_all_boms():
if frappe.db.get_single_value("Manufacturing Settings", "update_bom_costs_automatically"):
update_cost()
def update_cost():
bom_list = get_boms_in_bottom_up_order()
for bom in bom_list:
frappe.get_doc("BOM", bom).update_cost(update_parent=False, from_child_bom=True)
| 35.753623 | 108 | 0.745845 |
bf3d89941b6ec1c8ad1f2fa462a2826366581cf8
| 7,212 |
py
|
Python
|
PopulationLib/GroupPlanting.py
|
jvollhueter/pyMANGA-1
|
414204a394d44405225b4b8224b19464c1006f1d
|
[
"MIT"
] | 1 |
2021-03-16T08:35:50.000Z
|
2021-03-16T08:35:50.000Z
|
PopulationLib/GroupPlanting.py
|
jvollhueter/pyMANGA-1
|
414204a394d44405225b4b8224b19464c1006f1d
|
[
"MIT"
] | 67 |
2019-11-14T11:29:52.000Z
|
2022-03-09T14:37:11.000Z
|
PopulationLib/GroupPlanting.py
|
jvollhueter/pyMANGA-1
|
414204a394d44405225b4b8224b19464c1006f1d
|
[
"MIT"
] | 6 |
2019-11-12T11:11:41.000Z
|
2021-08-12T13:57:22.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@date: 2018-Today
@author: [email protected]
"""
import numpy as np
if __name__ == '__main__' and __package__ is None:
from os import sys, path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
import PopulationLib as PLib
from PopulationLib import TreeGroup
## Initializes groups of tree population and defines necessary functions.
class GroupPlanting(TreeGroup):
## Function initializing tree group and initial population of this group,
# depending on specification in project file.
# @param args: arguments specified in project file. Please see tag
# documentation.
def __init__(self, args):
self.species = args.find("species").text
self.name = args.find("name").text
self.trees = []
self.max_id = 0
distribution = args.find("distribution")
distribution_type = distribution.find("type").text
print("Initialise tree group " + self.name + " with " +
distribution_type + " distribution type and trees of species " +
self.species + ".")
if distribution_type == "Random":
self.plantRandomDistributedTrees(distribution)
elif distribution_type == "GroupFromFile":
self.plantTreesFromFile(distribution)
else:
raise KeyError("Population initialisation of type " +
distribution_type + " not implemented!")
## Function initializing tree population of size n_individuals within given
# rectangular domain.
# @param args: arguments specified in project file. Please see tag
# documentation.
def plantRandomDistributedTrees(self, args):
missing_tags = [
"type", "domain", "x_1", "x_2", "y_1", "y_2", "n_individuals"
]
# Set default value
self.n_recruitment = 0
for arg in args.iterdescendants():
tag = arg.tag
if tag == "n_individuals":
n_individuals = int(arg.text)
elif tag == "x_1":
self.x_1 = float(arg.text)
elif tag == "x_2":
x_2 = float(arg.text)
elif tag == "y_1":
self.y_1 = float(arg.text)
elif tag == "y_2":
y_2 = float(arg.text)
elif tag == "n_recruitment_per_step":
self.n_recruitment = int(arg.text)
if tag != "n_recruitment_per_step":
try:
missing_tags.remove(tag)
except ValueError:
raise ValueError(
"Tag " + tag +
" not specified for random tree planting!")
if len(missing_tags) > 0:
string = ""
for tag in missing_tags:
string += tag + " "
raise KeyError(
"Tag(s) " + string +
"are not given for random tree planting in project file.")
self.l_x = x_2 - self.x_1
self.l_y = y_2 - self.y_1
for i in range(n_individuals):
r_x, r_y = (np.random.rand(2))
x_i = self.x_1 + self.l_x * r_x
y_i = self.y_1 + self.l_y * r_y
self.addTree(x_i, y_i)
## Function initializing tree population of size n_individuals within given
# rectangular domain.
# @param args: arguments specified in project file. Please see tag
# documentation.
def plantTreesFromFile(self, args):
missing_tags = ["type", "filename"]
# Set default value
self.n_recruitment = 0
for arg in args.iterdescendants():
tag = arg.tag
if tag == "filename":
filename = arg.text
elif tag == "n_recruitment_per_step":
self.n_recruitment = int(arg.text)
if tag != "n_recruitment_per_step":
try:
missing_tags.remove(tag)
except ValueError:
raise ValueError(
"Tag " + tag +
" not specified for random tree planting!")
if len(missing_tags) > 0:
string = ""
for tag in missing_tags:
string += tag + " "
raise KeyError(
"Mandatory tag(s) " + string +
"is(are) not given for tree planting in project file.")
# Loading the Population Data
tree_file = open(filename)
i = 0
x_idx, y_idx = 99999, 99999
r_crown_idx, r_stem_idx, r_root_idx, h_stem_idx = (99999, 99999, 99999,
99999)
geometry = {}
max_x, max_y = -99999, -99999
min_x, min_y = 99999, 99999
for line in tree_file.readlines():
line = line.replace("\t", "").split(",")
if i == 0:
j = 0
for tag in line:
tag = tag.strip()
print(tag)
if tag == "x" and x_idx == 99999:
x_idx = int(j)
i += 1
if tag == "y" and y_idx == 99999:
y_idx = int(j)
i += 1
if tag == "r_crown" and r_crown_idx == 99999:
r_crown_idx = int(j)
i += 1
if tag == "r_stem" and r_stem_idx == 99999:
r_stem_idx = int(j)
i += 1
if tag == "r_root" and r_root_idx == 99999:
r_root_idx = int(j)
i += 1
if tag == "h_stem" and h_stem_idx == 99999:
i += 1
h_stem_idx = int(j)
j += 1
if i != 6:
raise KeyError(
6 - i, "Tree properties were not correctly " +
"indicated in the population input file! " +
"Please check the documentation!")
else:
x, y = float(line[x_idx]), float(line[y_idx])
geometry["r_crown"] = float(line[r_crown_idx])
geometry["r_root"] = float(line[r_root_idx])
geometry["r_stem"] = float(line[r_stem_idx])
geometry["h_stem"] = float(line[h_stem_idx])
max_x = max(max_x, x)
max_y = max(max_y, y)
min_x = min(min_x, x)
min_y = min(min_y, y)
self.addTree(x, y, initial_geometry=geometry)
self.x_1 = min_x
self.y_1 = min_y
self.l_x = max_x - self.x_1
self.l_y = max_y - self.y_1
## Randomly recruiting trees within given domain.
def recruitTrees(self):
for i in range(self.n_recruitment):
r_x, r_y = (np.random.rand(2))
x_i = self.x_1 + self.l_x * r_x
y_i = self.y_1 + self.l_y * r_y
self.addTree(x_i, y_i)
## Returns all living trees belonging to this group.
def getGroup(self):
return self.tree_group
| 38.774194 | 79 | 0.503466 |
17903c32b851c55b6f319038e872fc8f057743b5
| 284 |
py
|
Python
|
Licence 2/I33/TP 1/ex_4.py
|
axelcoezard/licence
|
1ed409c4572dea080169171beb7e8571159ba071
|
[
"MIT"
] | 8 |
2020-11-26T20:45:12.000Z
|
2021-11-29T15:46:22.000Z
|
Licence 2/I33/TP 1/ex_4.py
|
axelcoezard/licence
|
1ed409c4572dea080169171beb7e8571159ba071
|
[
"MIT"
] | null | null | null |
Licence 2/I33/TP 1/ex_4.py
|
axelcoezard/licence
|
1ed409c4572dea080169171beb7e8571159ba071
|
[
"MIT"
] | 6 |
2020-10-23T15:29:24.000Z
|
2021-05-05T19:10:45.000Z
|
def minimum2(L):
min_1 = L[0]
min_2 = L[1]
if min_1 < min_2:
min_1, min_2 = min_2, min_1
i = 0
while i < len(L):
if L[i] < min_1:
min_2, min_1 = min_1, L[i]
elif L[i] < min_2 and L[i] > min_1:
min_2 = L[i]
i += 1
return min_2
print(minimum2([3,2,5,7,2]))
| 17.75 | 38 | 0.545775 |
bd580a2c1bc330707470a98aa4f6767b48fd94b2
| 1,239 |
py
|
Python
|
tp2/source/tp2.py
|
ha2398/ia-tps
|
3696c92c8e2549aab6ac83da317cef8e15762eea
|
[
"MIT"
] | null | null | null |
tp2/source/tp2.py
|
ha2398/ia-tps
|
3696c92c8e2549aab6ac83da317cef8e15762eea
|
[
"MIT"
] | null | null | null |
tp2/source/tp2.py
|
ha2398/ia-tps
|
3696c92c8e2549aab6ac83da317cef8e15762eea
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
'''
Trabalho Pratico 2: Inteligência Artificial
Autor: Hugo Araujo de Sousa (2013007463)
tp2.py:
'''
import argparse
import numpy as np
parser = argparse.ArgumentParser()
parser.add_argument('map', type=str, help='Map of the world.')
parser.add_argument('alpha', type=float, help='Learning rate.')
parser.add_argument('gamma', type=float, help='Discount factor.')
parser.add_argument('iter', type=int, help='Number of iterations.')
parser.add_argument('-e', dest='EPSILON', default=None, type=float,
help='Epsilon for e-greedy policy.')
parser.add_argument('-s', dest='SEED', default=0, type=int,
help='Seed for pseudo-number generator.')
parser.add_argument('-q', dest='QSUMF', default=None, type=str,
help='Turn on QSum data printing to specified file.')
args = parser.parse_args()
np.random.seed(args.SEED)
from MDP import MDP
STATES = ['-', '0', '&']
ACTIONS = ['^', '<', '>', 'v']
REWARDS = {'-': -1, '0': 10, '&': -10}
def main():
'''
Main function.
'''
pac_maze = MDP(STATES, ACTIONS, REWARDS, args.alpha, args.gamma,
args.map, args.iter, args.EPSILON, args.QSUMF)
pac_maze.qlearning()
main()
| 26.361702 | 73 | 0.635997 |
0070739863ca4af6e712773c68976b33f3b9aba1
| 495 |
py
|
Python
|
solid_bodies.py
|
15menou/srocket
|
7650d25ef03499ce38ae957cd6ce356541f6cf61
|
[
"Apache-2.0"
] | null | null | null |
solid_bodies.py
|
15menou/srocket
|
7650d25ef03499ce38ae957cd6ce356541f6cf61
|
[
"Apache-2.0"
] | null | null | null |
solid_bodies.py
|
15menou/srocket
|
7650d25ef03499ce38ae957cd6ce356541f6cf61
|
[
"Apache-2.0"
] | null | null | null |
class Body:
supported_characteristics = ['m', 'mass',
'J', 'inertia']
def __init__(self, options):
for key in options.keys():
if key in Body.supported_characteristics:
val = options[key]
if key in ['m', 'mass']:
self.mass = val
elif key in ['J', 'inertia']:
self.J = val
else:
Log.print('Not supported key:{}'.format(key))
| 29.117647 | 61 | 0.446465 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.