seq_id
stringlengths 7
11
| text
stringlengths 156
1.7M
| repo_name
stringlengths 7
125
| sub_path
stringlengths 4
132
| file_name
stringlengths 4
77
| file_ext
stringclasses 6
values | file_size_in_byte
int64 156
1.7M
| program_lang
stringclasses 1
value | lang
stringclasses 38
values | doc_type
stringclasses 1
value | stars
int64 0
24.2k
⌀ | dataset
stringclasses 1
value | pt
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|---|
74975645307
|
from model.plot_utils import *
from model.allocations import solve
def allocation_chart(x, solutions, ax=None, xlabel="", legend=True):
if not ax:
fig, ax = plt.subplots(1, 1, figsize=(5, 3))
x = np.array(list(x))
strategies = ["Eucalyptus 25", "Mahogany 100", "Mahogany 200", "Reserve"]
allocation_changes = {}
for strategy in strategies:
allocation_changes[strategy] = np.array([s.allocations[strategy] for s in solutions])
bottom = np.zeros(len(x))
w = x[1] - x[0]
for s in strategies:
# assert x.shape == allocation_changes[s].shape, f"mismatch for {s}, {x.shape} vs {allocation_changes[s].shape}"
ax.bar(x, allocation_changes[s], color=colors[s], label=s, bottom=bottom, width=w)
bottom += allocation_changes[s]
if legend:
ax.legend(framealpha=1, fontsize="xx-small", loc="lower left")
ax.margins(x=0, y=0)
ax.set_xticklabels([int(i) for i in ax.get_xticks()], rotation = 45)
ax.set_xlabel(xlabel)
ax.set_ylabel('Allocations (Hectares)')
def plot_constrained_allocations():
min_habs = range(0, 200)
solutions_given_habs = []
for min_hab in min_habs:
solutions_given_habs.append(solve(set_min_habitats=min_hab))
min_yield = range(100000, 200000, 1000)
solutions_given_yield = []
for min_y in min_yield:
solutions_given_yield.append(solve(set_min_yield=min_y))
fig, axes = plt.subplots(2, 2, figsize=(4.5, 5), width_ratios=[1, 0.25])
fig.suptitle("Only Eucalyptus Satisfies Stricter Constraints")
allocation_chart(min_habs, solutions_given_habs, ax=axes[0, 0], xlabel="Minimum Habitats", legend=False)
allocation_chart(min_yield, solutions_given_yield, ax=axes[1, 0], xlabel="Minimum Yield", legend=False)
axes[0, 1].axis('off')
axes[1, 1].axis('off')
axes[0, 1].legend(legend_content[:-2], legend_names[:-2], fontsize=8, frameon=False)
plt.tight_layout()
|
leo-ware/forest-model
|
model/make_plots.py
|
make_plots.py
|
py
| 1,944 |
python
|
en
|
code
| 0 |
github-code
|
6
|
38047305072
|
from flask import Flask, jsonify, render_template
import psutil
import subprocess
app = Flask(__name__)
def get_gpu_usage():
result = subprocess.check_output("nvidia-smi --query-gpu=utilization.gpu --format=csv,noheader,nounits", shell=True)
gpu_usage = float(result.strip())
return gpu_usage
@app.route('/')
def home():
return render_template('index.html')
@app.route('/system_info')
def system_info():
info = {
"cpu_usage": psutil.cpu_percent(),
"ram_usage": psutil.virtual_memory().percent,
"disk_usage": psutil.disk_usage('/').percent,
"network_info": psutil.net_io_counters(pernic=True),
"gpu_usage": get_gpu_usage()
}
return jsonify(info)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000)
|
agbld/webserver_for_system_infos
|
app.py
|
app.py
|
py
| 784 |
python
|
en
|
code
| 0 |
github-code
|
6
|
31788083835
|
f = open("marks.txt", "rt")
for line in f.readlines():
parts = line.strip().split(",")
if len(parts) < 2:
continue
#print(parts)
name = parts[0]
marks = [int(v) for v in parts[1:] if v.isdigit()]
#print(marks)
total = sum(marks)
# total = sum(map(int, parts[1:]))
print(f"{name:15} {total:3} {total/len(marks):5.2f}")
f.close()
|
srikanthpragada/PYTHON_18_JULY_2022
|
demo/libdemo/marks_list.py
|
marks_list.py
|
py
| 377 |
python
|
en
|
code
| 0 |
github-code
|
6
|
30364393271
|
import os.path
import shutil
import sys
import tempfile
import textwrap
import testfixtures
from okonomiyaki.file_formats import EggMetadata, PackageInfo
from okonomiyaki.utils.test_data import NOSE_1_3_4_RH5_X86_64
from okonomiyaki._cli import main
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
class TestMain(unittest.TestCase):
maxDiff = None
def setUp(self):
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tempdir)
def test_spec_depend(self):
# Given
egg = NOSE_1_3_4_RH5_X86_64
r_output = textwrap.dedent("""\
metadata_version = '1.3'
name = 'nose'
version = '1.3.4'
build = 1
arch = 'amd64'
platform = 'linux2'
osdist = 'RedHat_5'
python = '2.7'
python_tag = 'cp27'
abi_tag = 'cp27m'
platform_tag = 'linux_x86_64'
packages = []
""")
# When
with testfixtures.OutputCapture() as capture:
main(["spec-depend", egg])
# Then
self.assertMultiLineEqual(capture.output.getvalue(), r_output)
def test_pkg_info(self):
# Given
egg = NOSE_1_3_4_RH5_X86_64
r_output = PackageInfo.from_egg(egg).to_string()
# When
with testfixtures.OutputCapture() as capture:
main(["pkg-info", egg])
# Then
self.assertMultiLineEqual(capture.output.getvalue(), r_output)
def test_summary(self):
# Given
egg = NOSE_1_3_4_RH5_X86_64
r_output = textwrap.dedent("""\
Extends the Python Unittest module with additional disocvery and running
options
""")
# When
with testfixtures.OutputCapture() as capture:
main(["summary", egg])
# Then
self.assertMultiLineEqual(capture.output.getvalue(), r_output)
def test_no_pkg_info(self):
# Given
path = os.path.join(
self.tempdir, os.path.basename(NOSE_1_3_4_RH5_X86_64)
)
m = EggMetadata.from_egg(NOSE_1_3_4_RH5_X86_64)
m._pkg_info = None
m.dump(path)
# When/Then
with testfixtures.OutputCapture() as capture:
with self.assertRaises(SystemExit) as exc:
main(["pkg-info", path])
if sys.version_info < (2, 7):
code = exc.exception
else:
code = exc.exception.code
self.assertEqual(code, -1)
capture.compare("No PKG-INFO")
|
enthought/okonomiyaki
|
okonomiyaki/_cli/tests/test_cli.py
|
test_cli.py
|
py
| 2,580 |
python
|
en
|
code
| 2 |
github-code
|
6
|
36466168685
|
#!/usr/bin/env python
import utils
import gzip
import argparse
from pysam import TabixFile
import numpy as np
import glob
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('-r',
dest='rate_dir',
required=True,
help='Path to directory containing rate files')
args = parser.parse_args()
return args
def main():
args = get_args()
regions = []
first_file = True
file_i = 0
for rate_file in glob.glob(args.rate_dir + '*.probe.coverage_rate.bed.gz'):
with gzip.open(rate_file,'rt') as f:
line_i = 0
for l in f:
A = l.rstrip().split('\t')
region = utils.Interval(chrom=A[0],
start=int(A[1]),
end=int(A[2]),
data=A[3:] if len(A) > 3 else None)
if region.start == region.end:
continue
interval = utils.Interval(chrom=A[0],
start=int(A[1]),
end=int(A[2]),
data=A[3:] if len(A) > 3 else None)
if first_file:
regions.append([interval])
else:
regions[line_i].append(interval)
line_i += 1
first_file = False
file_i += 1
for region in regions:
depths = []
for interval in region:
depths.append(float(interval.data[1]))
print('\t'.join([str(x) for x in [region[0].chrom,
region[0].start,
region[0].end,
region[0].data[0],
np.mean(depths),
np.std(depths)]]))
if __name__ == '__main__': main()
|
ryanlayerlab/layer_lab_chco
|
bin/get_regions_zscores.py
|
get_regions_zscores.py
|
py
| 2,039 |
python
|
en
|
code
| 1 |
github-code
|
6
|
8692787672
|
from strong.models import Project, Images
from rest_framework import serializers
class ImagesSerializers(serializers.HyperlinkedModelSerializer):
project_id = serializers.PrimaryKeyRelatedField(queryset=Project.objects.all(),source='project.id')
class Meta:
model = Images
fields = ('project_id', 'image')
def create(self, validated_data):
subject = Images.objects.create(parent=validated_data['project']['id'], child_name=validated_data['image'])
return child
class ProjectSerializers(serializers.ModelSerializer):
images = ImagesSerializers(many=True, read_only=True)
class Meta:
model = Project
fields = ('type_of_project', 'description', 'images')
# fields = "__all__"
|
urielcookies/RESTFUL_API
|
strong/serializers.py
|
serializers.py
|
py
| 753 |
python
|
en
|
code
| 0 |
github-code
|
6
|
38861064997
|
import sys
sys.setrecursionlimit(10000)
def check_array(N):
return [[False for _ in range(10)]for i in range(N)]
N, K = map(int, input().split())
board = [list(input()) for i in range(N)]
ck1 = check_array(N)
ck2 = check_array(N)
dx, dy = [0, 1, 0, -1], [1, 0, -1, 0]
def countDFS(x, y):
ck1[x][y] = True
ret = 1
for i in range(4):
nx, ny = x + dx[i], y+dy[i]
if nx < 0 or ny < 0 or nx >= N or ny >= 10:
continue
elif ck1[nx][ny] or board[x][y] != board[nx][ny]:
continue
ret += countDFS(nx, ny)
return ret
def removeDFS(x, y, num):
ck2[x][y] = True
board[x][y] = '0'
for i in range(4):
nx, ny = x+dx[i], y+dy[i]
if nx < 0 or ny < 0 or nx >= N or ny >= 10:
continue
elif ck2[nx][ny] or num != board[nx][ny]:
continue
removeDFS(nx, ny, num)
def down():
for i in range(10):
temp = []
for j in range(N):
if board[j][i] != '0':
temp.append(board[j][i])
for j in range(N-len(temp)):
board[j][i] = '0'
for j in range(N-len(temp), N):
board[j][i] = temp[j-(N-len(temp))]
while True:
exit = False
ck1 = check_array(N)
ck2 = check_array(N)
for i in range(N):
for j in range(10):
if board[i][j] == '0' or ck1[i][j]:
continue
res = countDFS(i, j)
if res >= K:
removeDFS(i, j, board[i][j])
exit = True
# 제거할게 False이면, 없는상태라는거니까, while문 종료
if not exit:
break
# 그게아니면, 내려주고 다시 반복
down()
for i in board:
print(''.join(i))
'''
[code Review]
1. 반복문을 탈출할때, ck, exit등 존재유무, 방문여부 등을 체크하는 변수를 두고 탈출하는 것을 자꾸 잊는다.
2. 위 방법을 사용하면 꽤나 많은 문제가 풀렸음에도, 자꾸 안쓰는건 연습부족.
3. 다시 한번 연습차원에서 보지 않고, MooyoMooyo_2.py로 작성해보기!
'''
|
minhee0327/Algorithm
|
python/BOJ/12_탐색/16768_MooyoMooyo.py
|
16768_MooyoMooyo.py
|
py
| 2,117 |
python
|
en
|
code
| 0 |
github-code
|
6
|
34508758020
|
# https://www.geeksforgeeks.org/find-zeroes-to-be-flipped-so-that-number-of-consecutive-1s-is-maximized/
# https://leetcode.com/problems/max-consecutive-ones-iii/discuss/1304346/Simple-Solution-w-Explanation-or-Sliding-Window-Approach-with-Comments
# https://leetcode.com/problems/max-consecutive-ones-iii/discuss/278322/Easy-to-understand-Python-solution
# https://leetcode.com/problems/max-consecutive-ones-iii/discuss/432952/Python-sliding-window-9-lines
# https://leetcode.com/discuss/interview-question/algorithms/125017/amazon-max-consecutive-ones
def longest_ones(nums, k):
n ,ans, l = len(nums), 0,0
for r in range(n):
if nums[r] == 0:
if k == 0:
while nums[l]!= 0:
l = l + 1
l = l + 1
else:
k = k-1
ans = max(ans, r-l+1)
return ans
if __name__ == "__main__":
a = [1,1,1,0,0,0,1,1,1,1,0,0,1,0,1,1,1]
k =2
print(longest_ones(a,k))
|
ved93/deliberate-practice-challenges
|
code-everyday-challenge/n189_maximize_number_of_ones.py
|
n189_maximize_number_of_ones.py
|
py
| 997 |
python
|
en
|
code
| 0 |
github-code
|
6
|
73416501948
|
"""
Selection Sort, inefficient swapping sort. Practiced for understanding
algorithmic design, however, python sorting functions are the way to go here.
"""
def selectionSort(L):
"""Assumes that L is a list of elements that can be compared using >.
Sorts L in ascending order """
suffixStart = 0
while suffixStart != len(L):
# look at each element in suffix
for i in range(suffixStart, len(L)):
if L[i] < L[suffixStart]:
# swap position of elements
L[suffixStart], L[i] = L[i], L[suffixStart]
suffixStart += 1
return L
lst = [1,56,4,3,3,2,15,13,5]
print(selectionSort(lst))
|
AndreiBratkovski/Training
|
MIT-Guttag/selectionSort.py
|
selectionSort.py
|
py
| 598 |
python
|
en
|
code
| 1 |
github-code
|
6
|
21833792754
|
#!/usr/bin/env python3
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + '/..')
from test_framework.test_framework import BitcoinTestFramework
BLOCKS = 100
TXS = 100
class TXFlood(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
self.nodes[0].createwallet('miner')
addr = self.nodes[0].getnewaddress()
self.generatetoaddress(self.nodes[0], 200, addr)
for b in range(BLOCKS):
for t in range(TXS):
txid = self.nodes[0].sendtoaddress(address=addr, amount=0.001)
print(f"sending tx {t}/{TXS}: {txid}")
block = self.generate(self.nodes[0], 1)
print(f"generating block {b}/{BLOCKS}: {block}")
if __name__ == '__main__':
TXFlood().main()
|
pinheadmz/warnet-scenarios
|
scenarios/tx-flood.py
|
tx-flood.py
|
py
| 869 |
python
|
en
|
code
| 0 |
github-code
|
6
|
20755734857
|
import pandas as pd
import logging as lg
import pickle
lg.basicConfig(filename='data_test_automation.log', level=lg.INFO, format='%(asctime)s %(name)-12s %(levelname)-8s %('
'message)s', datefmt='%m-%d %H:%M',
filemode='w')
def automated(a):
"""This function takes the input of the test data file location, and performs all the data processing done
on the test data set.
For logs check the data_test_automation.log file in your system"""
try:
lg.warning("user gave the input path/file as:"+' '+str(a))
df=pd.read_excel(a)
lg.warning("data successfully loaded from the file/path"+' '+str(a))
lg.info("starting all the pre-processing done for the train dataset")
df.dropna(inplace=True)
lg.warning("successfully dropped all null values in the given dataset")
def change_into_datetime(col):
df[col]=pd.to_datetime(df[col])
for i in ['Date_of_Journey','Dep_Time', 'Arrival_Time']:
change_into_datetime(i)
lg.info("successfully changed the required columns into datetime format")
df['journey_day']=df['Date_of_Journey'].dt.day
lg.info("successfully extracted day from Date_of_journey and creating a separate column for day")
df['journey_month']=df['Date_of_Journey'].dt.month
lg.info("successfully extracted month from Date_of_Journey and creating a separate column for month")
def extract_hour(data,col):
data[col+'_hour']=data[col].dt.hour
def extract_min(data,col):
data[col+'_min']=data[col].dt.minute
def drop_col(data,col):
data.drop(col,axis=1,inplace=True)
extract_hour(df,'Dep_Time')
lg.info("successfully extracted hours from Dep_Time and dumped the data into new column Dep_Time_hour")
extract_min(df,'Dep_Time')
lg.info("successfully extracted minutes from Dep_Time and dumped the data into new column Dep_Time_min")
drop_col(df,'Dep_Time')
lg.warning("dropping the original Dep_Time column as we extracted the values form that column")
extract_hour(df,'Arrival_Time')
lg.info("successfully extracted hours from Arrival_Time and dumped the data into new column Arrival_Time_hour")
extract_min(df,'Arrival_Time')
lg.info("successfully extracted min from Arrival_Time and dumped the data into new column Arrival_Time_min")
drop_col(df,'Arrival_Time')
lg.warning("dropping the original Arrival_Time column as we extracted the values form that column")
duration = list(df["Duration"])
for i in range(len(duration)):
if len(duration[i].split()) != 2:
if "h" in duration[i]:
duration[i] = duration[i].strip() + " 0m"
else:
duration[i] = "0h " + duration[i]
duration_hours = []
duration_mins = []
for i in range(len(duration)):
duration_hours.append(int(duration[i].split(sep = "h")[0]))
duration_mins.append(int(duration[i].split(sep = "m")[0].split()[-1]))
df["Duration_hours"] = duration_hours
lg.info("successfully extracted hours from Duration column and dumped the data into new column Duration_hours")
df["Duration_mins"] = duration_mins
lg.info("successfully extracted minutes from Duration column and dumped the data into new column Duration_mins")
df.drop(["Date_of_Journey","Duration","Additional_Info"], inplace=True,axis=1)
lg.warning("dropping the Date_of_Journey, Duration, Additional_Info columns as we extracted the required "
"information")
Airline=pd.get_dummies(df['Airline'],drop_first=True)
lg.info("creating dummy variables for Airline and dropping the first dummy column")
source=pd.get_dummies(df['Source'],drop_first=True)
lg.info("creating dummy variables for Source and dropping the first dummy column")
destination=pd.get_dummies(df['Destination'],drop_first=True)
lg.info("creating dummy variables for Destination and dropping the first dummy column")
dict={'non-stop':0, '2 stops':2, '1 stop':1, '3 stops':3, '4 stops':4}
df['Total_Stops']=df['Total_Stops'].map(dict)
lg.info("successfully mapped the Total_Stops column to 0,1,2,3,4 respectfully")
df=pd.concat([df, Airline, source, destination], axis = 1)
lg.warning("concatenating all the newly created columns into the main dataframe")
df.drop(["Airline", 'Source', 'Destination','Route'],inplace=True,axis=1)
lg.warning("dropping the categorical columns as we dummy encoded them")
df['Trujet']=0
lg.info("adding an extra column as this feature is not there in our test dataset")
model = open('flight_rf.pkl','rb')
forest = pickle.load(model)
lg.info("loading our test model for prediction")
y_prediction = forest.predict(df)
lg.info("processing the prediction")
a=pd.DataFrame(y_prediction)
lg.info("dumping all our predicted values into a dataframe and showing the results")
print(a)
return a
except Exception as e:
lg.warning("error occurred during execution, which is:"+' '+str(e))
return "error occurs is:"+' '+str(e)
a=input(str("give the file path or file name:"))
automated(a)
|
InduMouliMahamkali/flightfareprediction
|
pre-processing and modeling/automated_model_test.py
|
automated_model_test.py
|
py
| 5,520 |
python
|
en
|
code
| 3 |
github-code
|
6
|
16601473639
|
import socket
import serial
import sqlite3
import select
import time
import datetime
HEADERSIZE = 10
running_on_pie = False # pie or windows
if running_on_pie:
host = '192.168.1.10'
pos = '192.168.1.10'
win1 = '192.168.1.11'
win2 = '192.168.1.12'
conn = sqlite3.connect('/home/sysop/pos/order.db')
robot = serial.Serial('/dev/ttyUSB0', 19200)
else:
host = '192.168.86.26'
pos = '192.168.86.26'
win1 = '192.168.86.26'
win2 = '192.168.86.11'
conn = sqlite3.connect('order.db')
robot = serial.Serial('COM9', 19200)
port = 12345
c = conn.cursor()
send_to_bot = False
send_to_w1 = False
send_to_w2 = False
send_to_pos = False
bot_data = ''
bot_hold = ''
old_data = ''
serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serverSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
serverSocket.bind((host, port))
serverSocket.listen(5)
sockets_list = [serverSocket]
clients = {}
print('Listening for connections on {}:{}...'.format(host, port))
def un_start():
command = "/usr/bin/sudo /sbin/shutdown -r now"
import subprocess
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
output = process.communicate()[0]
print(output)
def log_it(note):
if running_on_pie:
f = open("/home/sysop/pos/log.txt", "a+")
else:
f = open("log.txt", "a+")
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%d-%m-%Y %H:%M:%S')
info = st + ' > '
info += note
info += '\n\r'
f.write(info)
def create_table():
c.execute('CREATE TABLE IF NOT EXISTS donut(donutID int, drink int, topping int, orderNUM int,'
' pay int)')
def data_test(a1):
c.execute("SELECT * FROM donut WHERE orderNUM=:a1", {"a1": str(a1)})
data1 = c.fetchall()
if data1:
return data1
def data_delete(a1):
c.execute("DELETE FROM donut WHERE orderNUM=:a1", {"a1": str(a1)})
conn.commit()
# Handles message receiving
def receive_message(client_socket1):
try:
message_header = client_socket1.recv(HEADERSIZE)
if not len(message_header):
return False
message_length = int(message_header.decode('utf-8').strip())
return {'header': message_header, 'data': client_socket1.recv(message_length)}
except Exception as e:
print(e)
return False
def order_process(order):
if len(order) == 4:
xa = str(data_test(order))
if xa != 'None':
xa = xa.strip('[]()')
xa = xa.replace(" ", "")
xa = xa.split(",")
dm = '$A' + str(xa[0])
if str(xa[1]) == '0':
dm += '1000#'
else:
dm += str(xa[1])
dm += '#'
else:
dm = xa
return dm
create_table()
while True:
read_sockets, _, exception_sockets = select.select(sockets_list, [], sockets_list)
for notified_socket in read_sockets:
if notified_socket == serverSocket:
client_socket, client_address = serverSocket.accept()
# Client should send his name right away, receive it
user = receive_message(client_socket)
# If False - client disconnected before he sent his name
if user is False:
continue
# Add accepted socket to select.select() list
sockets_list.append(client_socket)
# Also save username and username header
clients[client_socket] = user
print('Accepted connection from {}, username: {}'.format(client_address, user['data'].decode('utf-8')))
log_it('Accepted connection from {}, username: {}'.format(client_address, user['data'].decode('utf-8')))
else:
# Receive message
message = receive_message(notified_socket)
# If False, client disconnected, cleanup
if message is False:
print('Closed connection from: {}'.format(clients[notified_socket]['data'].decode('utf-8')))
log_it('Closed connection from: {}'.format(clients[notified_socket]['data'].decode('utf-8')))
# Remove from list for socket.socket()
sockets_list.remove(notified_socket)
# Remove from our list of users
del clients[notified_socket]
continue
# Get user by notified socket, so we will know who sent the message
user = clients[notified_socket]
data = message["data"].decode("utf-8")
line = ''
if data != 'ready':
if data != old_data:
print('Received message from {}: {}'.format(user["data"].decode("utf-8"), data))
log_it('Received message from {}: {}'.format(user["data"].decode("utf-8"), data))
old_data = data
if user["data"] == 'm1'.encode("utf-8"):
# start the order
if data == 'A order In':
message2_header = '{:10}'.format(len(data))
message['header'] = message2_header.encode("utf-8")
message['data'] = data.encode("utf-8")
send_to_w1 = True
# pos info
if user["data"] == 'pos'.encode("utf-8"):
robot.write(data.encode("utf-8"))
time.sleep(1)
line_in = robot.readline()
line = line_in.decode("utf-8")
line = line.rstrip()
if line[0] == '$':
message2_header = '{:10}'.format(len(line))
message['header'] = message2_header.encode("utf-8")
message['data'] = line.encode("utf-8")
print(line.encode("utf-8"))
send_to_pos = True
# window A data and processing
if user["data"] == 'w1'.encode("utf-8"):
if len(data) == 4:
if data == '0000':
un_start()
message2 = order_process(data)
bot_hold = message2
time.sleep(1)
message2_header = '{:10}'.format(len(message2))
message['header'] = message2_header.encode("utf-8")
message['data'] = message2.encode("utf-8")
send_to_w1 = True
if len(data) == 5:
if data == 'start':
robot.write(bot_hold.encode("utf-8"))
time.sleep(1)
if data == 'ready':
line_in = robot.readline()
line = line_in.decode("utf-8")
line = line.rstrip()
# if line == 'end':
message2_header = '{:10}'.format(len(line))
message['header'] = message2_header.encode("utf-8")
message['data'] = line.encode("utf-8")
send_to_w1 = True
# main com arduino
# if robot.in_waiting > 0:
# line_in = robot.readline()
# line = line_in.decode("utf-8")
# line = line.rstrip()
#
# if len(line) > 1:
#
# if line == 'end' and user["data"] == 'w1'.encode("utf-8"):
# message2_header = '{:10}'.format(len(line))
# message['header'] = message2_header.encode("utf-8")
# message['data'] = line.encode("utf-8")
# send_to_w1 = True
# if line[0] == '$' and user["data"] == 'pos'.encode("utf-8"):
# message2_header = '{:10}'.format(len(line))
# message['header'] = message2_header.encode("utf-8")
# message['data'] = line.encode("utf-8")
# send_to_pos = True
# Iterate over connected clients and broadcast message
for client_socket in clients:
# sent it
the_ip = client_socket.getpeername()[0]
if the_ip == win1 and send_to_w1:
client_socket.send(user['header'] + user['data'] + message['header'] + message['data'])
print(user['header'] + user['data'] + message['header'] + message['data'])
send_to_w1 = False
if the_ip == win2 and send_to_w2:
client_socket.send(user['header'] + user['data'] + message['header'] + message['data'])
send_to_w2 = False
if the_ip == pos and send_to_pos:
client_socket.send(user['header'] + user['data'] + message['header'] + message['data'])
print(user['header'] + user['data'] + message['header'] + message['data'])
send_to_pos = False
line = ''
# It's not really necessary to have this, but will handle some socket exceptions just in case
for notified_socket in exception_sockets:
# Remove from list for socket.socket()
sockets_list.remove(notified_socket)
# Remove from our list of users
del clients[notified_socket]
|
RG11rant/donuts
|
server.py
|
server.py
|
py
| 9,334 |
python
|
en
|
code
| 1 |
github-code
|
6
|
30282625342
|
from dataclasses import dataclass, replace
from typing import Any
from uuid import UUID
from topics.domain.repositories.topic_repository import TopicRepository
from topics.domain.usecases.base import Usecase
@dataclass(kw_only=True)
class UpdateTopicRequest:
id: UUID
content: str | None = None
discussed: bool | None = None
@dataclass(kw_only=True)
class UpdateTopicUsecase(Usecase[UpdateTopicRequest, None]):
topic_repository: TopicRepository
def handle(self, request: UpdateTopicRequest) -> None:
topic = self.topic_repository.get(request.id)
updated_fields: dict[str, Any] = {}
if request.content is not None:
updated_fields["content"] = request.content
if request.discussed is not None:
updated_fields["discussed"] = request.discussed
self.topic_repository.update(replace(topic, **updated_fields))
|
cbenavid/topics
|
src/topics/domain/usecases/topic/update_topic.py
|
update_topic.py
|
py
| 893 |
python
|
en
|
code
| 0 |
github-code
|
6
|
5949670885
|
import time
dic_function_time = {}
def store_time(function):
"""Décorateur qui stocke le nombre de secondes écoulées
entre le début et la fin de l'exécution de la fonction.
Un décorateur est une fonction qui prend une autre fonction (ou classe) en paramètre
pour modifier son comportement lors de son exécution."""
def modified_function(*args, **kwargs):
"""On est dans la fonction modifiée qui a pour but de
calculer et stocker le temps d'exécution de 'function' dans le dictionnaire.
'function' est bien accessible dans ce bloc étant
dans la définition de 'store_time'.
:param args: tuple des paramètres non nommés (arguments).
:param kwargs: dictionnaire des paramètres nommés (key word arguments).
"""
t1 = time.time()
value_returned = function(*args, **kwargs)
t2 = time.time()
elapsed_time = t2 - t1
if function not in dic_function_time:
dic_function_time[function] = [elapsed_time]
else:
dic_function_time[function].append(elapsed_time)
# On retourne dans 'modified' la valeur retournée de 'function' (2)
return value_returned
# On retourne dans 'store_time' la fonction 'modified_function' (1)
return modified_function
"""
NUMEROS = ORDRE EXECUTION
Les décorateurs ne sont pas magiques, dans notre cas, considérons le cas suivant :
@store_time
def function(a, b, c):
return a, b, c
Le décorateur ne fait rien d'autre que ça : modified_function = store_time(function)
(Vu que store_time retourne 'modified_function')
function est en fait remplacée par modified_function et puisqu'on a défini plus haut
'def modified_function(*args, **kwargs)' on peut lui passer les paramètres a b et c comme si c'était 'function'.
Donc dans ce cas à chaque fois qu'on fait function(1, 2, 3) on exécute en fait store_time(function)(1, 2, 3)
c'est à dire modified_function(1, 2, 3) (Vu que store_time retourne 'modified_function')
On calcule le temps d'exécution de 'function' en pensant bien à récupérer la sortie (value_returned) de 'function'
et afin de ne pas altérer son fonctionnement on retourne value, c'est à dire ce qu'aurait dû retourner 'function'
à la fin de son exécution.
Donc à la fin on a juste ajouté une fonctionnalité à notre fonction avec notre fonction 'store_time'.
==> C'est le but d'un décorateur
Si on veut ajouter des paramètres à store_time il faut rajouter une couche de fonction :
"""
def store_time2(min_time):
def decorator(function):
def modified_function(*args_function, **kwargs_function):
t1 = time.time()
value_returned = function(*args_function, **kwargs_function)
t2 = time.time()
elapsed_time = t2 - t1
if elapsed_time >= min_time:
if function not in dic_function_time:
dic_function_time[function] = [elapsed_time]
else:
dic_function_time[function].append(elapsed_time)
# On retourne dans 'modified'_function la valeur retournée de 'function'(3)
return value_returned
# On retourne dans 'decorator' la fonction 'modified_function' pour s'en servir (2)
return modified_function
# On retourne dans 'store_time2' notre fonction decorateur (1)
return decorator
"""
Cela revient alors au même que de faire modified_function = store_time2(1)(function)
(Uniquement les durées de chaque exécution >= 1 seront stockées)
Puis si on exécute store_time2(1)(function)(1, 2, 3) c'est à dire : decorator(function)(1, 2, 3),
(en gardant à l'esprit qu'on a accès à min_time puisqu'on est toujours dans la définition de 'store_time2')
c'est à a dire modified_function(1, 2, 3).
"""
|
JeremyRozier/SawImageProject
|
version1/decorators.py
|
decorators.py
|
py
| 3,861 |
python
|
fr
|
code
| 0 |
github-code
|
6
|
47533751
|
from typing import List
class Solution:
def maxUncrossedLines(self, nums1: List[int], nums2: List[int]) -> int:
# dp table
dp = [[0] * (len(nums2)+1) for _ in range(len(nums1)+1)]
# initialize
# pass
# traverse dp table
for i in range(1, len(nums1)+1):
for j in range(1, len(nums2)+1):
if nums1[i-1] == nums2[j-1]:
dp[i][j] = dp[i-1][j-1] + 1
else:
dp[i][j] = max(dp[i-1][j], dp[i][j-1])
return dp[-1][-1]
if __name__ == "__main__":
nums1 = [1,4,2]
nums2 = [1,2,4]
s = Solution()
assert s.maxUncrossedLines(nums1, nums2) == 2
|
code-cp/leetcode
|
solutions/1035/main.py
|
main.py
|
py
| 689 |
python
|
en
|
code
| 0 |
github-code
|
6
|
29944774792
|
import os
import sys
sys.path.insert(1, os.path.join(sys.path[0], 'utils'))
import numpy as np
import pandas as pd
import argparse
import h5py
import librosa
from scipy import signal
import matplotlib.pyplot as plt
import time
import csv
import random
from concurrent.futures import ProcessPoolExecutor
from functools import partial
from multiprocessing import cpu_count
from utilities import read_audio, create_folder, read_meta
import config
from tqdm import tqdm
# Global flags and variables.
PLOT_FEATURES = False
class LogMelExtractor():
def __init__(self, sample_rate, window_size, overlap, mel_bins):
self.window_size = window_size
self.overlap = overlap
# Loading hamming window and Mel-filters.
self.ham_win = np.hamming(window_size)
self.melW = librosa.filters.mel(sr=sample_rate,
n_fft=window_size,
n_mels=mel_bins,
fmin=50.,
fmax=sample_rate // 2).T
# transform: Assumes a numpy array representing raw audio-signal.
def transform(self, audio):
ham_win = self.ham_win
window_size = self.window_size
overlap = self.overlap
# Compute a spectrogram with consecutive Fourier transforms.
[f, t, x] = signal.spectral.spectrogram(
audio,
window=ham_win,
nperseg=window_size,
noverlap=overlap,
detrend=False,
return_onesided=True,
mode='magnitude')
x = x.T
# Applying mel-filters on sequence of fourier transforms.
x = np.dot(x, self.melW)
# Applying log on mel-filters.
x = np.log(x + 1e-8)
x = x.astype(np.float32)
return x
def calculate_logmel(audio_path, sample_rate, feature_extractor, n=-1):
# Read audio (first 4 seconds only).
(audio, fs) = read_audio(audio_path, target_fs=sample_rate)
# Extract feature
feature = feature_extractor.transform(audio)
return feature, n
def calculate_features(args):
n_jobs = cpu_count()
executor = ProcessPoolExecutor(max_workers=n_jobs)
futures = []
print("Using {} workers in parallel.".format(n_jobs))
# Arguments.
dataset_dir = args.dataset_dir
workspace = args.workspace
features_type = args.mode
features_file_name = args.features_file_name
# Parameters for feature extraction.
metadata_delimiter = config.metadata_delimiter
sample_rate = config.sample_rate
window_size = config.window_size
overlap = config.overlap
seq_len = config.seq_len
mel_bins = config.mel_bins
# Dislaying arguments and parameters.
print("Arguments and Parameters:")
print("Dataset Directory: {}".format(dataset_dir))
print("Workspace: {}".format(workspace))
print("Sample Rate: {}".format(sample_rate))
print("Window Size: {}".format(window_size))
print("Overlapping Frames: {}".format(overlap))
print("Sequence Length: {}".format(seq_len)) # Dimension of feature corresponding to each audio file: (seq_len, mel_bins)
print("Mel Bins: {}".format(mel_bins))
# Paths
audio_dir = os.path.join(dataset_dir, 'audio')
meta_csv = os.path.join(dataset_dir, 'metadata', 'UrbanSound8K.csv')
hdf5_path = os.path.join(workspace, 'features', features_type, features_file_name)
# Displaying paths.
print("Reading audio from: {}".format(audio_dir))
print("Reading meatadata file form: {}".format(meta_csv))
print("Saving the extracted features at: {}".format(hdf5_path))
create_folder(os.path.dirname(hdf5_path))
# Feature extractor
feature_extractor = LogMelExtractor(sample_rate=sample_rate,
window_size=window_size,
overlap=overlap,
mel_bins=mel_bins)
audio_names, fs_IDs, start_times, end_times, saliences, folds, class_IDs, classes = read_meta(meta_csv, metadata_delimiter)
# Create hdf5 file
hf = h5py.File(hdf5_path, 'w')
# Intialising hdf5 file to store audios/labels of all folds.
for fold_id in range(1, 11):
hf.create_dataset(
name='features_fold{}'.format(fold_id),
shape=(0, seq_len, mel_bins),
maxshape=(None, seq_len, mel_bins),
dtype=np.float32)
hf.create_dataset(
name='labels_fold{}'.format(fold_id),
shape=(0, 1),
maxshape=(None, 1),
dtype=np.float32)
# To remember number of audio files processed in each fold.
fold_count = [0] * 11
for (n, audio_name) in enumerate(audio_names):
# Calculate feature.
audio_path = os.path.join(audio_dir, 'fold{}'.format(folds[n]), audio_name)
futures.append(executor.submit(partial(calculate_logmel, audio_path, sample_rate, feature_extractor, n)))
for future in tqdm(futures):
if future.result() is not None:
feature, n = future.result()
hf['features_fold{}'.format(folds[n])].resize((fold_count[folds[n]] + 1, seq_len, mel_bins))
hf['features_fold{}'.format(folds[n])][fold_count[folds[n]]] = feature
hf['labels_fold{}'.format(folds[n])].resize((fold_count[folds[n]] + 1, 1))
hf['labels_fold{}'.format(folds[n])][fold_count[folds[n]]] = class_IDs[n]
fold_count[folds[n]] += 1
# Plot log-Mel for debug.
if PLOT_FEATURES:
plt.matshow(feature.T, origin='lower', aspect='auto', cmap='jet')
plt.show()
hf.close()
# Displaying total files processed from each fold.
print("Files Processed from each fold:")
for fold_id in range(1, 11):
print("Fold {}: {} files.".format(fold_id, fold_count[fold_id]))
# USAGE: python features.py logmel --dataset_dir=$DATASET_DIR --workspace=$WORKSPACE
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='')
subparsers = parser.add_subparsers(dest='mode') # Different modes can be added to extract different type of features.
parser_logmel = subparsers.add_parser('logmel')
parser_logmel.add_argument('--dataset_dir', type=str, required=True) # Path to the UrbanSound8K folder.
parser_logmel.add_argument('--workspace', type=str, required=True) # Directory where extracted features, model and logs of experiments are stored.
parser_logmel.add_argument('--features_file_name', type=str, required=True) # logmel-features.h5
args = parser.parse_args()
if args.mode == 'logmel':
calculate_features(args)
else:
raise Exception('Incorrect arguments!')
|
iamjanvijay/Background-Sound-Classification-in-Speech-Audio-Segments
|
utils/features.py
|
features.py
|
py
| 7,026 |
python
|
en
|
code
| 4 |
github-code
|
6
|
36766561062
|
'''
점수중에 최대값을 M. 모든 점수를 점수/M*100으로 고쳤다.
최고점이 70이고, 수학점수가 50이었으면 수학점수는 50/70*100이 되어 71.43점이 된다.
세준이의 성적을 위의 방법대로 새로 계산했을 때, 새로운 평균을 구하는 프로그램을 작성하시오.
3
40 80 60
75.0
'''
from sys import stdin
n = int(input())
score_list = list(map(int, stdin.readline().split()))
max_score = max(score_list)
new_score_list = []
for score in score_list:
new_score_list.append(score / max_score * 100)
print(sum(new_score_list) / n)
|
jiyoung-dev/Algorithm
|
Baekjoon/단계별(Python)/5단계_1차원배열/b1546_평균.py
|
b1546_평균.py
|
py
| 593 |
python
|
ko
|
code
| 0 |
github-code
|
6
|
70952127868
|
import re
book1=open("Book1.txt",'r')
book2=open("Book2.txt",'r')
book3=open("Book3.txt",'r')
line=book1.read()
line=book2.read()
line=book3.read()
word=line.strip(" ")
nn=word.split()
longest_word = max(word, key=len)
print (longest_word)
|
inwk6312fall2017/programming-task-final-bhagi162
|
task1c.py
|
task1c.py
|
py
| 243 |
python
|
en
|
code
| 0 |
github-code
|
6
|
35543810797
|
from django.urls import path
from . import views
app_name='cv'
urlpatterns = [
path('curriculo', views.index, name='index'),
path('curriculo/dados-pessoais', views.cadastrar_ou_aletarar_foto_e_objetivo, name='editar_dados'),
path('curriculo/educacao', views.cadastrar_educacao, name='educacao'),
path('curriculo/educacao/<id>/excluir', views.excluir_educacao, name='excluir_educacao'),
path('curriculo/experiencia', views.cadastrar_experiencia, name='experiencia'),
path('curriculo/experiencia/<id>/excluir', views.excluir_experiencia, name='excluir_experiencia'),
path('curriculo/visualizar', views.curriculo, name='curriculo'),
]
|
smctinf/casa_do_trabalhador
|
curriculo/urls.py
|
urls.py
|
py
| 662 |
python
|
pt
|
code
| 0 |
github-code
|
6
|
13442824529
|
import pygame, os
from modules.entitysets._puresensor import PureSensor
from imageload import loadImage
from button import Button
from menustate import MenuState
from staticimage import StaticImage
from gridrounding import gridRound
from selectionbox import SelectionBox
from label import Label
class RemoveSensorButton( Button ):
image = loadImage("remove.png", 2 )
def __init__( self, menu=None ):
Button.__init__( self, None, None, menu )
self.rect = self.image.get_rect()
self.rect.topleft = ( 54, 24 )
def push( self, clickKey, click ):
if "up" in clickKey:
self.parentState.toggleRemove()
class SnapToGridButton( Button ):
image = loadImage( "gridbutton.png", 2 )
def __init__( self, menu=None ):
Button.__init__( self, None, None, menu )
self.rect = self.image.get_rect()
self.rect.topleft = ( 24, 24 )
def push( self, clickKey, click ):
if "up" in clickKey:
self.parentState.toggleSnapToGrid()
class SensorEditButton( Button ):
image = loadImage( "sensoreditbutton.png", 2 )
rect = image.get_rect()
rect.topleft = ( 24, 144 )
def __init__( self, menu=None ):
Button.__init__( self, None, None, menu )
def push( self, clickKey, click ):
if "up" in clickKey:
aBoundEditState = SensorEditState( self.parentState.menu )
self.parentState.menu.loadMenuState( aBoundEditState )
class SensorEditState( MenuState ):
def __init__( self, menu, sprites=[] ):
MenuState.__init__( self, menu, sprites )
self.sprites = [self.fileNameLabel, self.miniMap]
self.buttons = []
self.panel = StaticImage(loadImage( "devmenu.png", 2 ), (10, 10))
self.addSprite( self.panel )
self.snapToGridButton = SnapToGridButton( self )
self.addButton( self.snapToGridButton )
self.removeButton = RemoveSensorButton( self )
self.addButton( self.removeButton )
self.gridButtonSelectionBox = None
self.removeButtonSelectionBox = None
self.addingMode = True
self.removingMode = False
self.curGrabbedSens = None
self.curStart = None
self.gridX = 40
self.gridY = 40
self.snapToGrid = False
self.whereEntWasGrabbed = None
def toggleSnapToGrid( self ):
self.snapToGrid = not self.snapToGrid
if self.gridButtonSelectionBox is None:
self.gridButtonSelectionBox = SelectionBox( self.snapToGridButton.rect, self )
self.addSprite( self.gridButtonSelectionBox )
else:
self.removeSprite( self.gridButtonSelectionBox )
self.gridButtonSelectionBox = None
self.menu.loadMenuState( self )
def toggleRemove( self ):
self.removingMode = not self.removingMode
if self.removeButtonSelectionBox is None:
self.removeButtonSelectionBox = SelectionBox( self.removeButton.rect, self )
self.addSprite( self.removeButtonSelectionBox )
else:
self.removeSprite( self.removeButtonSelectionBox )
self.removeButtonSelectionBox = None
self.menu.loadMenuState( self )
def getPressedSensor( self, point ):
"""See which sensor is at this point"""
escape = False
for eachSpriteList in ( eachGroup.sprites() for eachGroup in self.menu.playState.groups ):
for eachSprite in [ sprite for sprite in eachSpriteList if sprite.pureSensor]:
if eachSprite.rect.collidepoint( point ):
return eachSprite
def update( self, dt, click, clickKey, curMousePos=None ):
MenuState.update( self, dt, click, clickKey, curMousePos )
playState = self.menu.playState
curMousePos = curMousePos[0]-playState.panX, curMousePos[1]-playState.panY
if self.snapToGrid:
curMousePos = gridRound( curMousePos, self.gridX, self.gridY, trueRounding=True )
else:
curMousePos = curMousePos
if self.curStart is not None:
self.menu.playState.lineVisualiser.devMenuLineGroups = [ [ self.curStart, ( self.curStart[0], curMousePos[1] ) ],
[ ( self.curStart[0], curMousePos[1] ), curMousePos ], [ curMousePos, ( curMousePos[0], self.curStart[1] ) ], [ ( curMousePos[0], self.curStart[1] ), self.curStart ] ]
self.menu.playState.lineVisualiser.devMenuLineGroups = [ [ (each[0]+playState.panX, each[1]+playState.panY) for each in eachLine ] for eachLine in self.menu.playState.lineVisualiser.devMenuLineGroups ]
self.menu.playState.lineVisualiser.flush = True
self.menu.playState.lineVisualiser.renderLines = True
self.menu.playState.lineVisualiser.renderPhysicsLines = True
self.menu.playState.lineVisualiser.forceNoRender = True
if click is not None:
if clickKey is 'mouse1down' and self.curStart is None:
self.curStart = curMousePos
elif clickKey is 'mouse1up':
#ADD SENSOR HERE
destPoint = min( self.curStart[0], curMousePos[0] ), min( self.curStart[1], curMousePos[1] )
w = abs( self.curStart[0] - curMousePos[0] )
h = abs( self.curStart[1] - curMousePos[1] )
if w != 0 or h != 0:
destPoint = destPoint[0] + w/2, destPoint[1] + h/2
destGroup = getattr( self.menu.playState, PureSensor.playStateGroup )
PureSensor( pos=destPoint, group=destGroup, width=w, height=h )
self.curStart = None
elif clickKey is 'mouse3down':
self.curGrabbedSens = self.getPressedSensor( (curMousePos[0]+playState.panX, curMousePos[1]+playState.panY) )
if self.curGrabbedSens is not None:
entPos = self.curGrabbedSens.getPosition()
self.whereEntWasGrabbed = curMousePos[0] - entPos[0], curMousePos[1] - entPos[1]
elif clickKey is 'mouse3up':
pickedSensor = self.getPressedSensor( (curMousePos[0]+playState.panX, curMousePos[1]+playState.panY) )
if pickedSensor is not None:
if self.removingMode:
pickedSensor.kill()
self.curGrabbedSens = None
self.whereEntWasGrabbed = None
elif curMousePos is not None:
if self.curGrabbedSens is not None:
curEnt = self.curGrabbedSens
newPos = curMousePos[0]-self.whereEntWasGrabbed[0], curMousePos[1]-self.whereEntWasGrabbed[1]
curEnt.setPosition( newPos )
|
Occuliner/ThisHackishMess
|
modules/menuentries/sensoredit.py
|
sensoredit.py
|
py
| 6,709 |
python
|
en
|
code
| 2 |
github-code
|
6
|
31373127129
|
# encoding=utf8
import datefinder
from datetime import datetime
import sys
import csv
import boto3
from data import Data
from PIL import Image
import pytesseract
import cv2
import os
import re
class TestData:
"""docstring for TestData"""
@staticmethod
def get():
data = Data()
data.set('Filename', 'in.jpg')
data.set('Extracted text', 'abc xxx def')
return data
@staticmethod
def extract(filename):
"""
with open('/home/ubuntu/date-extraction-from-image/credentials.csv', 'r') as input:
next(input)
reader = csv.reader(input)
for line in reader:
access_key_id = line[2]
secret_access_key = line[3]
"""
access_key_id = 'AKIA6LRPMXT6S5TPPDIO'
secret_access_key = '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'
client = boto3.client('rekognition',
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key, region_name='us-east-1')
UPLOADED_FILE = '/tmp/'+filename
filename = "{}.png|jpeg|jpg".format(os.getpid())
with open(UPLOADED_FILE, 'rb') as source_image:
source_bytes = source_image.read()
response = client.detect_text(Image={'Bytes': source_bytes})
text = str(" ".join(re.findall(r"[a-z0-9\/\-\.\,]+", str(response), flags=re.I))).strip().title()
text = re.sub(r"([a-z]+)([0-9]+)", r"\1 \2", text, flags=re.I)
text = re.sub(r"([0-9]+)([a-z]+)", r"\1 \2", text, flags=re.I)
l1=[]
date=[]
opt = dict()
date_reg_exp2 = re.compile(r'detectedtext\s*[a-zA-Z0-9\s]{0,30}((?:(?:0[1-9]|1[0-9]|2[0-9]|3[0-1])(?:\D)(?:0[1-9]|1[0-2])(?:\D)(?:(?:19[7-9]\d|20\d{2})|\d{2}))|(?:(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|(Nov|Dec)(?:ember)?)(?:\D)(?:0[1-9]|1[0-9]|2[0-9]|3[0-1])(?:\D)(?:(?:19[7-9]\d|20\d{2})|\d{2}))|(?:(?:0[1-9]|1[0-2])(?:\D)(?:0[1-9]|1[0-9]|2[0-9]|3[0-1])(?:\D)(?:(?:19[7-9]\d|20\d{2})|\d{2}))|(?:(?:0[1-9]|1[0-9]|2[0-9]|3[0-1])(?:\D)(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|(Nov|Dec)(?:ember)?)(?:\D)(?:(?:19[7-9]\d|20\d{2})|\d{2})))',flags=re.I)
line = re.search(date_reg_exp2, str(text))
if line:
l1 =list(filter(None,line.groups()))
newDate = [ x for x in datefinder.find_dates(l1[0]) ][0]
date=re.split('[- / . ' ' ]',l1[0])
#opt["date"] = [date[2] + "/" + date[1] + "/" + date[0]]
opt["Date"] = [ newDate.strftime("%Y - %m - %d") ]
return opt
else:
opt['date'] = "date is not present"
return opt
# os.remove(filename)
# return text
|
prasadbiradar/date-extraction-from-images
|
testdata.py
|
testdata.py
|
py
| 2,928 |
python
|
en
|
code
| 0 |
github-code
|
6
|
25849126163
|
import random
import time
from colorama import Back, Fore, init
from SudokuF import *
from SudokuT import *
from Menus import *
from Ahorcado import *
lop = 0
while lop == 0:
menuprincipal()
opcionprincipal = input(Fore.BLUE + "[4] Finalizar: " + Fore.RESET)
if opcionprincipal == "Fernando": #Easter egg
newgame()
if opcionprincipal == "Endgame": #Easter egg
creditoss()
exit()
if opcionprincipal == "Ayuda": #Easter egg
jueguito()
if opcionprincipal == "Mario":
from mario_level_1 import *
if __name__ == '__main__':
main()
pg.quit()
#sys.exit()
numbers2 = opcionprincipal.split()
while len(numbers2) != 1 or not numbers2[0].isdigit() or int(numbers2[0]) > 4 or int(numbers2[0]) < 1:
print(Fore.RED + "...Opción incorrecta. Intente nuevamente." + Fore.RESET)
opcionprincipal = input(Fore.BLUE + "[4] Finalizar: " + Fore.RESET)
numbers2 = opcionprincipal.split()
opcionprincipal = int(numbers2[0])
loop = True
while loop == True:
if opcionprincipal == 1:
name = input(Fore.MAGENTA + "Nombre del jugador: " + Fore.RESET)
name_tablero = input(Fore.LIGHTMAGENTA_EX +"Nombre del tablero: " + Fore.RESET)
tablero = tableros() #[[1,3,4,5,8,9,2,6,7],[8,5,7,2,6,3,1,9,4],[9,6,2,1,4,7,8,3,5],[2,9,3,7,1,8,4,5,6],[5,4,1,3,2,6,7,8,9],[7,8,6,9,5,4,3,2,1],[4,7,9,8,3,5,6,1,2],[6,2,8,4,9,1,5,7,3],[3,1,5,6,7,2,9,4,0]]
imprimeTablero(tablero, name_tablero)
tablerverificar = []
for i in range(9):
tablerverificar.append([])
for j in range(9):
tablerverificar[i].append(tablero[i][j])
while not tableroCompleto(tablero):
numbers = input("Ingrese fila columna cifra: ")
numbers = numbers.split()
while verificardigitosvalidos(numbers) == False:
numbers = input("Ingrese fila columna cifra: ")
numbers = numbers.split()
x = int(numbers[0])
y = int(numbers[1])
d = int(numbers[2])
if verificarjugadavalida(tablero, x, y, d, tablerverificar) == True:
tablero[x-1][y-1] = d
imprimeTablero(tablero, name_tablero)
if final(tablero, name, name_tablero) == True:
loop = False
elif opcionprincipal == 2:
print("Desea cargar un tablero propio?")
print("[1] Si")
opcion = (input("[2] Cargar Tableros predefinidos: "))
while opcion != "1" and opcion != "2":
print(Fore.RED + "Opción incorrecta. Intente nuevamente." + Fore.RESET)
opcion = (input("[2] Cargar Tableros predefinidos: "))
opcion = int(opcion)
if opcion == 1:
vertablerosguardados()
if opcion == 2:
print()
alltableros()
numero = input("Ingrese el número del tablero que desea ver/jugar: ")
while not numero.isdigit() or int(numero) > 10 or int(numero) < 1:
print(Fore.RED + "Opción incorrecta. Intente nuevamente." + Fore.RESET)
numero = input("Ingrese el número del tablero que desea ver/jugar: ")
numero = int(numero)
tablero = selecionartablero(numero)
imprimeTablero(tablero, " ")
print("Desea jugar este tablero?:")
print("[1] Si")
opcion = (input("[2] Regresar: "))
opcion = opcion.split()
while len(opcion) != 1 or not opcion[0].isdigit() or int(opcion[0]) > 2 or int(opcion[0]) < 1:
print(Fore.RED + "...Opción incorrecta. Intente nuevamente." + Fore.RESET)
opcion = (input("[2] Regresar: "))
opcion = opcion.split()
opcion = int(opcion[0])
# verificar que la opcion introducida sea valida
if opcion == 1:
tablerverificar = []
for i in range(9):
tablerverificar.append([])
for j in range(9):
tablerverificar[i].append(tablero[i][j])
imprimeTablero(tablero, name_tablero = "")
while not tableroCompleto(tablero):
numbers = input("Ingrese fila columna cifra: ")
numbers = numbers.split()
while verificardigitosvalidos(numbers) == False:
numbers = input("Ingrese fila columna cifra: ")
numbers = numbers.split()
x = int(numbers[0])
y = int(numbers[1])
d = int(numbers[2])
if verificarjugadavalida(tablero, x, y, d, tablerverificar) == True:
tablero[x-1][y-1] = d
imprimeTablero(tablero, name_tablero = "")
creditosfinales()
print(Fore.MAGENTA + "Felicidades, has ganado el juego" + Fore.RESET)
loop = False
elif opcion == 2:
print("volviendo al menu principal")
loop = False
elif opcionprincipal == 3:
print("Selecione la dificultad del tablero")
print(Fore.GREEN + "[1] Fácil" + Fore.RESET)
print(Fore.YELLOW+ "[2] Medio" + Fore.RESET)
print(Fore.RED + "[3] Difícil"+ Fore.RESET)
opcion = (input("[4] Regresar: "))
while opcion != "1" and opcion != "2" and opcion != "3" and opcion != "4":
print(Fore.RED + "Opción incorrecta. Intente nuevamente." + Fore.RESET)
opcion = (input("[4] Regresar: "))
opcion = int(opcion)
tablero = tablerodificultad(opcion)
name = input(Fore.MAGENTA + "Nombre del jugador: " + Fore.RESET)
name_tablero = input(Fore.LIGHTMAGENTA_EX +"Nombre del tablero: " + Fore.RESET)
imprimeTablero(tablero, name_tablero)
tablerverificar = []
for i in range(9):
tablerverificar.append([])
for j in range(9):
tablerverificar[i].append(tablero[i][j])
while not tableroCompleto(tablero):
numbers = input("Ingrese fila columna cifra: ")
numbers = numbers.split()
while verificardigitosvalidos(numbers) == False:
numbers = input("Ingrese fila columna cifra: ")
numbers = numbers.split()
x = int(numbers[0])
y = int(numbers[1])
d = int(numbers[2])
if verificarjugadavalida(tablero, x, y, d, tablerverificar) == True:
tablero[x-1][y-1] = d
imprimeTablero(tablero, name_tablero)
if final(tablero, name, name_tablero) == True:
loop = False
elif opcionprincipal == 4:
print(Fore.RED + "Gracias por jugar."+ Fore.RESET)
exit()
|
K23NO/Soduko
|
Sudoku.py
|
Sudoku.py
|
py
| 7,562 |
python
|
es
|
code
| 0 |
github-code
|
6
|
40176574464
|
import re
import urllib.parse
from bs4 import BeautifulSoup
from . import url
def name(soup):
title_bar = soup.find('div', {"class": "titleBar"})
name = title_bar.h1.text
#logger.debug("parse_brewery_name: name: {}".format(name))
return name
def beers(soup):
baContent = soup.find("div", {"id":"ba-content"})
#logger.debug("parse_beers_from_brewery: ba-content: {}".format(baContent))
sortable_table = soup.find("table", {"class": "sortable"})
if not sortable_table:
return []
t_body = sortable_table.tbody
this_brewery_beers = []
for row in t_body:
cols = row.find_all('td')
beer = {}
# Get Link and Name from 1st col
#a = cols[0].find('a', href=re.compile('/beer/profile/'))
a = cols[0].a
link = a['href']
beer['id'] = url.beer_id(link)
# NOTE The brewery ID is extracted from the link to every beer because
# some places list beers that redirect to different breweries.
beer['brewery_id'] = url.brewery_id(link)
beer['name'] = a.text
# Get Style from 2nd col
beer['style'] = a = cols[1].a.text
# Get ABV from 3rd col
abv_text = cols[2].text
try:
beer['abv'] = float(abv_text)
except:
beer['abv'] = None
# Get Ratings from 4th col
ratings_text = cols[3].text
try:
beer['ratings'] = float(ratings_text)
except:
beer['ratings'] = None
# Get Score from 5th col
score_text = cols[4].text
try:
beer['score'] = float(score_text)
except:
beer['score'] = None
this_brewery_beers.append(beer)
return this_brewery_beers
|
JohnMcAninley/beer-goggles
|
scraper/src/parse/brewery.py
|
brewery.py
|
py
| 1,516 |
python
|
en
|
code
| 0 |
github-code
|
6
|
21527705720
|
from .models import Order, Customer
from django.http import HttpResponse
def cartData(request):
try:
customer = request.user.customer
except:
device = request.COOKIES['device']
customer, created = Customer.objects.get_or_create(device=device)
order, created = Order.objects.get_or_create(customer=customer, complete=False)
items = order.orderitem_set.all()
counterCartItems = order.get_amount_of_items_cart
cartItems = order.get_cart_items
return {'cartItems': cartItems, 'counterCartItems': counterCartItems, 'customer': customer, 'order': order, 'items':items}
|
SonnyTopG/Ecommerce-Website
|
website/shop/utils.py
|
utils.py
|
py
| 618 |
python
|
en
|
code
| 0 |
github-code
|
6
|
4975361004
|
def is_pent(n):
quad = (1 + (1+24 * n)**(1/2))/6
if quad == int(quad):
return True
return False
def is_hex(n):
quad = (1 + (1+ 8 * n)**(1/2))/4
if quad == int(quad):
return True
return False
stop = False
i = 286
while not stop:
tn = i * (i+1) / 2
if is_hex(tn):
if is_pent(tn):
stop = True
print('Tn: ',tn)
i+=1
print(is_hex(7906276))
|
colinmiller94/Project_Euler
|
e45.py
|
e45.py
|
py
| 427 |
python
|
en
|
code
| 0 |
github-code
|
6
|
15974434638
|
# Api Agenda Lionx
from src.infrastructures.mongo.mongo_infrastructure import MongoInfrastructure
# Third party
from decouple import config
from pymongo.cursor import Cursor
from pymongo.collection import InsertOneResult, UpdateResult
class MongoRepository:
def __init__(self):
self.mongo_client = MongoInfrastructure.get_client()
self.database = self.mongo_client.get_database(config("DATABASE_NAME"))
self.collection = self.database.get_collection(config("COLLECTION_NAME"))
def get_all_contacts(self) -> Cursor:
remove_pymongo_id = {"_id": 0}
contacts_cursor = self.collection.find({}, remove_pymongo_id)
return contacts_cursor
def get_contact_by_id(self, id) -> dict:
filter_by_id = {"contact_id": id, "situation": "active"}
remove_pymongo_id = {"_id": 0}
contact = self.collection.find_one(filter_by_id, remove_pymongo_id)
return contact
def get_contacts_by_first_letters(self, letters) -> Cursor:
regex_first_letters_contact = {"$regex": f"^{letters}", "$options": "i"}
filter_by_first_name_letters = {"firstName": regex_first_letters_contact, "situation": "active"}
remove_pymongo_id = {"_id": 0}
contacts_cursor = self.collection.find(filter_by_first_name_letters, remove_pymongo_id)
return contacts_cursor
def register_contact(self, new_contact) -> InsertOneResult:
insert_result = self.collection.insert_one(new_contact)
return insert_result
def update_contact(self, edited_contact, id) -> UpdateResult:
filter_by_id = {"contact_id": id}
new_values = {"$set": edited_contact}
update_result = self.collection.update_one(filter_by_id, new_values)
return update_result
def soft_delete_contact(self, id) -> UpdateResult:
filter_by_id = {"contact_id": id}
field_to_update = {"$set": {"situation": "deactivated"}}
update_result = self.collection.update_one(filter_by_id, field_to_update)
return update_result
|
vinireeis/api_agenda_lionx
|
src/repositories/mongo/repository.py
|
repository.py
|
py
| 2,050 |
python
|
en
|
code
| 1 |
github-code
|
6
|
7368403785
|
#!/usr/bin/env python2.6
# -*- coding: utf-8 -*-
# mainframe.py
# Pomodoro
#
# Created by Roman Rader on 22.06.11.
# New BSD License 2011 Antigluk https://github.com/antigluk/Pomodoro
"""
Contains main frame of application.
"""
import wx
from state import PomodoroStateProxy as PomodoroState
from NotificationCenter.NotificationCenter import NotificationCenter
import logging
logging.getLogger('Pomodoro')
class MainFrameController(wx.Frame):
"""Main frame of Pomodoro"""
def __init__(self):
wx.Frame.__init__(
self,
None,
-1,
'Pomodoro it!',
style=wx.BORDER_DEFAULT | wx.STAY_ON_TOP,
size=(220, 120),
)
state = PomodoroState()
self.__state_dict = {
state.StateNoState: {'bs': '...'},
state.StateInPomodoro: {'bs': u"Отменить..."},
state.StateInRest: {'bs': u"Отдыхайте!"},
state.StateWaitingPomodoro: {'bs': u"Начать помидору"},
state.StateWaitingRest: {'bs': u"Начать отдых"},
state.StatePomodoroKilled: {'bs': u"Начать помидору"},
}
self.buildFrame()
self.updateUI()
self.makeMenu()
self.Show(False)
NotificationCenter().addObserver(self,self.onDBUpdate,"dbUpdated")
NotificationCenter().addObserver(self,self.onUpdateUI,"updateUI")
def buildFrame(self):
self.panel = wx.Panel(self)
self.txt = wx.StaticText(self.panel, pos=(10, 10),
label='Pomodoro!')
self.times_l = wx.StaticText(self.panel, pos=(120, 10),
label=u"0 помидор")
self.timer_ctrl = wx.TextCtrl(self.panel, pos=(10, 30),
size=(200, -1), style=wx.TE_READONLY | wx.TE_CENTER)
self.start_button = wx.Button(self.panel, pos=(20, 70), label=''
, size=(170, -1))
self.start_button.Bind(wx.EVT_BUTTON, self.bClick)
def onUpdateUI(self, event):
self.updateUI()
def updateUI(self):
#TODO: проверять видимо ли окно. иначе не обновлять
#TODO: remove this ugly method
state = PomodoroState()
self.timer_ctrl.SetValue(state.text)
self.start_button.SetLabel(self.__state_dict[state.active]['bs'])
self.txt.SetLabel(state.caption)
self.times_l.SetLabel(u"%d помидор" % state.GetTodayCount())
def bClick(self, m):
logging.debug("Toggle state called from menu")
self.controller.toggleState()
def onExit(self,m):
logging.debug("Quit called from menu")
self.controller.quit()
def makeMenu(self):
self.menuBar = wx.MenuBar()
self.filemenu = wx.Menu()
self.pomodmenu = wx.Menu()
item = self.filemenu.Append(wx.ID_ANY, "Hide")
self.Bind(wx.EVT_MENU, self.hideFrame, item)
item = self.filemenu.Append(wx.ID_ANY, "Toggle pomodoro")
self.Bind(wx.EVT_MENU, self.bClick, item)
self.filemenu.AppendSeparator()
item = self.filemenu.Append(wx.ID_EXIT, "&Quit", "quit")
self.Bind(wx.EVT_MENU, self.onExit, id=wx.ID_EXIT)
item = self.pomodmenu.Append(wx.ID_ANY, "All", "List of pomodoros")
self.Bind(wx.EVT_MENU, self.showListOfPomodoros, item)
item = self.pomodmenu.Append(wx.ID_ANY, "Statistics", "Statistics")
self.Bind(wx.EVT_MENU, self.showStatistics, item)
self.menuBar.Append(self.filemenu, "&File")
self.menuBar.Append(self.pomodmenu, "&Pomodors")
self.SetMenuBar(self.menuBar)
def onDBUpdate(self, obj):
pass
def hideFrame(self, m):
logging.debug("Hide frame called from menu")
self.Show(False)
def showListOfPomodoros(self, m):
logging.debug("Show list of pomodors called from menu")
self.controller.showListOfPomodoros()
def showStatistics(self, m):
logging.debug("Show statistics of pomodors called from menu")
self.controller.showStatistics()
|
rrader/Pomodoro
|
pomodoro/mainframe.py
|
mainframe.py
|
py
| 4,124 |
python
|
en
|
code
| 1 |
github-code
|
6
|
17600865196
|
#encoding:UTF-8
import urllib
import urllib.request
import json
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
plt.rcParams['font.sans-serif'] = ['SimHei']
import matplotlib.lines as mlines
import numpy as np
import time
data = urllib.request.urlopen('https://stationdata.wunderground.com/cgi-bin/stationdata?iconsize=3&width=2048&height=2048&maxage=3600&format=json&maxstations=100000&rf_filter=1&minlat=38&minlon=-85&maxlat=48&maxlon=-73').read()
record = data.decode('UTF-8')
record = record.replace('},}', '}}')
a = open("/Users/hsw/Desktop/CA_region_rawdata.txt", "w+")
a.write(record)
a.close()
data = json.loads(record)
#print(data)
print(data['conds'])
preprocess = data['conds']
station = []
T = []
lats = []
lons = []
for o in preprocess:
id = str(o)
station.append(preprocess[id]['id'])
T.append(5/9*(float(preprocess[id]['tempf'])-32))
lats.append(float(preprocess[id]['lat']))
lons.append(float(preprocess[id]['lon']))
# ============================================ # plot
# ============================================initialize the plot
plt.figure(figsize=(11, 8), dpi=120)
axes = plt.subplot(111)
# set up map projection with
# use low resolution coastlines.
map = Basemap(llcrnrlon=-87, llcrnrlat=38, urcrnrlon=-73, urcrnrlat=48, \
rsphere=(6378137.00, 6356752.3142), \
resolution='i', projection='merc', \
lat_0=40., lon_0=-20., lat_ts=20.)
# draw coastlines, country boundaries, fill continents.
map.drawcoastlines(linewidth=0.25)
map.drawcountries(linewidth=0.25)
# draw the edge of the map projection region (the projection limb)
map.drawmapboundary(fill_color='#87CEFA')#689CD2
# draw lat/lon grid lines every 30 degrees.
#map.drawmeridians(np.arange(0, 360, 10))
map.drawmeridians(np.arange(0, 360, 10),labels=[0,0,0,1],fontsize=10)
#map.drawparallels(np.arange(-90, 90, 10))
map.drawparallels(np.arange(-90, 90, 10),labels=[1,0,0,0],fontsize=10)
# Fill continent wit a different color
map.fillcontinents(color='#FFFFFF', lake_color='#EEEEEE', zorder=0)
# ============================================draw the stations and data
# compute native map projection coordinates of lat/lon grid.
x, y = map(lons, lats)
max_T = max(T)
# Plot each city in a loop.
# Set some parameters
size_factor = 100.0
x_offset = 20.0
y_offset = -20.0
rotation = 0
temp=0
f = open("/Users/hsw/Desktop/CA_region_Tdata.txt", "w+")
f.close()
#draw station point
analyze = ''
for i, j, k, l in zip(x, y, T, station):
temp = temp+1
size = size_factor * k / max_T
if k <= -10.0 and k >= -100.0:
cs1 = map.scatter(i, j, s=15, marker='o', color='#00008F')
if -10 < k and k <= -5:
cs2 = map.scatter(i, j, s=15, marker='o', color='#00009F')
if -5 < k and k <= -2:
cs3 = map.scatter(i, j, s=15, marker='o', color='#0000FF')
if -2 < k and k <= 2:
cs4 = map.scatter(i, j, s=15, marker='o', color='#006FFF')
if 2 < k and k <= 6:
cs5 = map.scatter(i, j, s=15, marker='o', color='#00BFFF')
if 6 <= k and k <= 10:
cs5 = map.scatter(i, j, s=15, marker='o', color='#00FFFF')
if 10 <= k and k <= 14:
cs5 = map.scatter(i, j, s=15, marker='o', color='#4FFFAF')
if 14 <= k and k <= 18:
cs5 = map.scatter(i, j, s=15, marker='o', color='#7FF77F')
if 18 <= k and k <= 22:
cs5 = map.scatter(i, j, s=15, marker='o', color='#FFFF00')
if 22 <= k and k <= 26:
cs5 = map.scatter(i, j, s=15, marker='o', color='#FFBF00')
if 26 <= k and k <= 30:
cs5 = map.scatter(i, j, s=15, marker='o', color='#FF6F00')
if 30 <= k and k <= 35:
cs5 = map.scatter(i, j, s=15, marker='o', color='#FF0000')
if 35 < k and k <= 100:
cs6 = map.scatter(i, j, s=15, marker='o', color='#7F0000')
#if k != 9999:
# plt.text(i, j, str(k) + '°C', rotation=rotation, fontsize=10)
f = open("/Users/hsw/Desktop/CA_region_Tdata.txt", "a+")
f.write(' Station:'+ l + ' Temperature:' + str(k) + '\n')
f.close()
title = '多伦多及附近地区气温分布图\n' + '数据更新时间:' + time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()- 8 * 60 * 60 * 1000)) + 'UTC\n数据来自:wunderground weather, 绘制@Louis_He'
# ============================================#define legends
a = mlines.Line2D([], [], color='#7F0000', marker='o',
markersize=5, label='>35°C',ls='')
b = mlines.Line2D([], [], color='#FF0000', marker='o',
markersize=5, label='>30°C',ls='')
c = mlines.Line2D([], [], color='#FF6F00', marker='o',
markersize=5, label='26~30°C',ls='')
d = mlines.Line2D([], [], color='#FFBF00', marker='o',
markersize=5, label='22~26°C',ls='')
e = mlines.Line2D([], [], color='#FFFF00', marker='o',
markersize=5, label='18~22°C',ls='')
f = mlines.Line2D([], [], color='#7FF77F', marker='o',
markersize=5, label='14~18°C',ls='')
g = mlines.Line2D([], [], color='#4FFFAF', marker='o',
markersize=5, label='10~14°C',ls='')
h = mlines.Line2D([], [], color='#00FFFF', marker='o',
markersize=5, label='6~10°C',ls='')
i = mlines.Line2D([], [], color='#00BFFF', marker='o',
markersize=5, label='2~6°C',ls='')
j = mlines.Line2D([], [], color='#006FFF', marker='o',
markersize=5, label='-2~2°C',ls='')
k = mlines.Line2D([], [], color='#0000FF', marker='o',
markersize=5, label='-5~-2°C',ls='')
l = mlines.Line2D([], [], color='#00009F', marker='o',
markersize=5, label='-10~-5°C',ls='')
m = mlines.Line2D([], [], color='#00008F', marker='o',
markersize=5, label='<-10°C',ls='')
plt.legend(handles=[b, c, d, e, f, g, h, i, j, k, l, m])
plt.title(title)
save = '/Users/hsw/Desktop/CA_region_Tsample.png'
plt.savefig(save, dpi=120)
|
Louis-He/weather_map
|
wunderground_weather.py
|
wunderground_weather.py
|
py
| 6,022 |
python
|
en
|
code
| 0 |
github-code
|
6
|
74903206266
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
from collections import deque
class Solution:
def levelOrder(self, root: Optional[TreeNode]) -> List[List[int]]:
if not root:
return []
queue = deque([root])
result = []
while queue:
levelLength = len(queue)
levelResult = []
for i in range(levelLength):
current_node = queue.popleft()
levelResult.append(current_node.val)
if current_node.left:
queue.append(current_node.left)
if current_node.right:
queue.append(current_node.right)
result.append(levelResult)
return result
|
eungang3/Leetcode
|
binary-tree-level-order-traversal/binary-tree-level-order-traversal.py
|
binary-tree-level-order-traversal.py
|
py
| 973 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71836229628
|
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_insertar(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(570, 518)
self.verticalLayout = QtWidgets.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox = QtWidgets.QGroupBox(Form)
self.groupBox.setFlat(True)
self.groupBox.setObjectName("groupBox")
self.verticalLayout_9 = QtWidgets.QVBoxLayout(self.groupBox)
self.verticalLayout_9.setObjectName("verticalLayout_9")
self.frame_11 = QtWidgets.QFrame(self.groupBox)
self.frame_11.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_11.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_11.setObjectName("frame_11")
self.horizontalLayout_5 = QtWidgets.QHBoxLayout(self.frame_11)
self.horizontalLayout_5.setSizeConstraint(QtWidgets.QLayout.SetMinimumSize)
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.verticalLayout_9.addWidget(self.frame_11)
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
self.verticalLayout_9.addItem(spacerItem)
self.frame_12 = QtWidgets.QFrame(self.groupBox)
self.frame_12.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_12.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_12.setObjectName("frame_12")
self.verticalLayout_10 = QtWidgets.QVBoxLayout(self.frame_12)
self.verticalLayout_10.setObjectName("verticalLayout_10")
self.label_12 = QtWidgets.QLabel(self.frame_12)
self.label_12.setFrameShadow(QtWidgets.QFrame.Plain)
self.label_12.setAlignment(QtCore.Qt.AlignCenter)
self.label_12.setObjectName("label_12")
self.verticalLayout_10.addWidget(self.label_12)
self.verticalLayout_9.addWidget(self.frame_12)
self.frame_13 = QtWidgets.QFrame(self.groupBox)
self.frame_13.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_13.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_13.setObjectName("frame_13")
self.horizontalLayout_6 = QtWidgets.QHBoxLayout(self.frame_13)
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.frame_14 = QtWidgets.QFrame(self.frame_13)
self.frame_14.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_14.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_14.setObjectName("frame_14")
self.verticalLayout_11 = QtWidgets.QVBoxLayout(self.frame_14)
self.verticalLayout_11.setObjectName("verticalLayout_11")
self.label_13 = QtWidgets.QLabel(self.frame_14)
self.label_13.setObjectName("label_13")
self.verticalLayout_11.addWidget(self.label_13)
self.label_14 = QtWidgets.QLabel(self.frame_14)
self.label_14.setObjectName("label_14")
self.verticalLayout_11.addWidget(self.label_14)
self.label_15 = QtWidgets.QLabel(self.frame_14)
self.label_15.setObjectName("label_15")
self.verticalLayout_11.addWidget(self.label_15)
self.horizontalLayout_6.addWidget(self.frame_14)
self.frame_15 = QtWidgets.QFrame(self.frame_13)
self.frame_15.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_15.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_15.setObjectName("frame_15")
self.verticalLayout_12 = QtWidgets.QVBoxLayout(self.frame_15)
self.verticalLayout_12.setObjectName("verticalLayout_12")
self.text_act1 = QtWidgets.QLineEdit(self.frame_15)
self.text_act1.setObjectName("text_act1")
self.verticalLayout_12.addWidget(self.text_act1)
self.text_act2 = QtWidgets.QLineEdit(self.frame_15)
self.text_act2.setObjectName("text_act2")
self.verticalLayout_12.addWidget(self.text_act2)
self.text_act3 = QtWidgets.QLineEdit(self.frame_15)
self.text_act3.setObjectName("text_act3")
self.verticalLayout_12.addWidget(self.text_act3)
self.horizontalLayout_6.addWidget(self.frame_15)
self.frame_16 = QtWidgets.QFrame(self.frame_13)
self.frame_16.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_16.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_16.setObjectName("frame_16")
self.verticalLayout_13 = QtWidgets.QVBoxLayout(self.frame_16)
self.verticalLayout_13.setObjectName("verticalLayout_13")
self.label_16 = QtWidgets.QLabel(self.frame_16)
self.label_16.setObjectName("label_16")
self.verticalLayout_13.addWidget(self.label_16)
self.label_17 = QtWidgets.QLabel(self.frame_16)
self.label_17.setObjectName("label_17")
self.verticalLayout_13.addWidget(self.label_17)
self.label_18 = QtWidgets.QLabel(self.frame_16)
self.label_18.setObjectName("label_18")
self.verticalLayout_13.addWidget(self.label_18)
self.horizontalLayout_6.addWidget(self.frame_16)
self.frame_17 = QtWidgets.QFrame(self.frame_13)
self.frame_17.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_17.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_17.setObjectName("frame_17")
self.verticalLayout_14 = QtWidgets.QVBoxLayout(self.frame_17)
self.verticalLayout_14.setObjectName("verticalLayout_14")
self.text_act4 = QtWidgets.QLineEdit(self.frame_17)
self.text_act4.setObjectName("text_act4")
self.verticalLayout_14.addWidget(self.text_act4)
self.text_act5 = QtWidgets.QLineEdit(self.frame_17)
self.text_act5.setObjectName("text_act5")
self.verticalLayout_14.addWidget(self.text_act5)
self.text_act6 = QtWidgets.QLineEdit(self.frame_17)
self.text_act6.setObjectName("text_act6")
self.verticalLayout_14.addWidget(self.text_act6)
self.horizontalLayout_6.addWidget(self.frame_17)
self.verticalLayout_9.addWidget(self.frame_13)
self.frame_18 = QtWidgets.QFrame(self.groupBox)
self.frame_18.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_18.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_18.setObjectName("frame_18")
self.verticalLayout_15 = QtWidgets.QVBoxLayout(self.frame_18)
self.verticalLayout_15.setObjectName("verticalLayout_15")
self.label_19 = QtWidgets.QLabel(self.frame_18)
self.label_19.setAlignment(QtCore.Qt.AlignCenter)
self.label_19.setObjectName("label_19")
self.verticalLayout_15.addWidget(self.label_19)
self.verticalLayout_9.addWidget(self.frame_18)
self.frame_19 = QtWidgets.QFrame(self.groupBox)
self.frame_19.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_19.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_19.setObjectName("frame_19")
self.horizontalLayout_7 = QtWidgets.QHBoxLayout(self.frame_19)
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.label_20 = QtWidgets.QLabel(self.frame_19)
self.label_20.setObjectName("label_20")
self.horizontalLayout_7.addWidget(self.label_20)
self.text_estado = QtWidgets.QLineEdit(self.frame_19)
self.text_estado.setObjectName("text_estado")
self.horizontalLayout_7.addWidget(self.text_estado)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_7.addItem(spacerItem1)
self.verticalLayout_9.addWidget(self.frame_19)
self.verticalLayout.addWidget(self.groupBox)
self.frame_10 = QtWidgets.QFrame(Form)
self.frame_10.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_10.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_10.setObjectName("frame_10")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.frame_10)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem2)
self.btn_guardar = QtWidgets.QPushButton(self.frame_10)
self.btn_guardar.setObjectName("btn_guardar")
self.horizontalLayout_4.addWidget(self.btn_guardar)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem3)
self.verticalLayout.addWidget(self.frame_10)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Insetar datos"))
self.groupBox.setTitle(_translate("Form", "Insertar datos de entrenamiento"))
self.label_12.setText(_translate("Form", "Para cada actividad, introduce \"A\" si aprobo, \"R\" si reprobo o \"NP\" si no presento."))
self.label_13.setText(_translate("Form", "Actividad 1:"))
self.label_14.setText(_translate("Form", "Actividad 2:"))
self.label_15.setText(_translate("Form", "Actividad 3:"))
self.label_16.setText(_translate("Form", "Actividad 4:"))
self.label_17.setText(_translate("Form", "Actividad 5:"))
self.label_18.setText(_translate("Form", "Actividad 6:"))
self.label_19.setText(_translate("Form", "Introduce \"SI\" o \"NO\""))
self.label_20.setText(_translate("Form", "Aprobo: "))
self.text_estado.setPlaceholderText(_translate("Form", "Ingrese \"SI\" o \"NO\""))
self.btn_guardar.setText(_translate("Form", "Guardar datos"))
|
JoseVale99/simulador_prediccion_desemepe-o
|
view/insertar.py
|
insertar.py
|
py
| 9,728 |
python
|
en
|
code
| 0 |
github-code
|
6
|
170942993
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from rp_ui_harness import RequestPolicyTestCase
from marionette import expectedFailure
from marionette_driver import Wait
import re
msg = ("Marionette -- test_logging_error_detection -- "
"this test is expected to fail")
class ErrorDetectionTests(object):
################
# Test Methods #
################
def test_normal_error(self, n=1):
self.error_triggerer.trigger_error(
"error", "backgroundscript", msg=msg)
self._do_checks(
n,
r'^console.error:\s+\[RequestPolicy\] ' + msg + '$')
def test_reference_error(self, n=1):
self.error_triggerer.trigger_error(
"ReferenceError", "backgroundscript")
self._do_checks(
n,
(
r"^JavaScript error: "
r"chrome://rpcontinued/content/ui-testing/services\.js, "
r"line [0-9]+: ReferenceError: "
)
)
def test_reference_error_in_promise_chain(self, n=1):
self.error_triggerer.trigger_error(
"ReferenceError:Promise", "backgroundscript")
self._do_checks(
n,
(
r"^JavaScript error: "
r"chrome://rpcontinued/content/ui-testing/services\.js, "
r"line [0-9]+: ReferenceError: "
)
)
##########################
# Private Helper Methods #
##########################
def _do_checks(self, n, message_regexp):
raise NotImplementedError
class ErrorDetectionTestCase(RequestPolicyTestCase):
expected_error = False
def setUp(self):
super(ErrorDetectionTestCase, self).setUp()
self.gecko_log.start_ignoring_errors(expected=self.expected_error)
def tearDown(self):
try:
self.gecko_log.stop_ignoring_errors()
finally:
super(ErrorDetectionTestCase, self).tearDown()
class TestGeckoLog(ErrorDetectionTests, ErrorDetectionTestCase):
def setUp(self):
super(TestGeckoLog, self).setUp()
self._assert_n_errors(0)
##########################
# Private Helper Methods #
##########################
def _do_checks(self, n, message_regexp):
self._assert_n_errors(n)
self._assert_error(message_regexp)
def _get_error_lines_including_ignored_errors(self):
return self.gecko_log.get_error_lines_of_current_test(
return_ignored_as_well=True)
def _get_error_lines(self):
return self.gecko_log.get_error_lines_of_current_test()
def _assert_n_errors(self, n):
Wait(self.marionette).until(
lambda _: (
len(self._get_error_lines_including_ignored_errors()) == n
)
)
self.assertEqual(0, len(self._get_error_lines()))
def _assert_error(self, message_regexp):
error_lines = self._get_error_lines_including_ignored_errors()
line = error_lines[-1]
self.assertTrue(
re.search(message_regexp, line),
msg=("String \"" + line + "\" matched!"))
class TestFailureOnTearDown(ErrorDetectionTests, ErrorDetectionTestCase):
expected_error = True
@expectedFailure
def tearDown(self):
super(TestFailureOnTearDown, self).tearDown()
##########################
# Private Helper Methods #
##########################
# Explicitly do *not* perform checks in _do_checks(), to test if the
# TestRunner's tearDown fn waits long enough to detect all logging errors.
def _do_checks(self, n, message_regexp):
pass
|
RequestPolicyContinued/requestpolicy
|
tests/marionette/rp_puppeteer/tests-quick/test_error_detection.py
|
test_error_detection.py
|
py
| 3,791 |
python
|
en
|
code
| 253 |
github-code
|
6
|
73652353149
|
# 给你 二维 平面上两个 由直线构成且边与坐标轴平行/垂直 的矩形,请你计算并返回两个矩形覆盖的总面积。
# 每个矩形由其 左下 顶点和 右上 顶点坐标表示:
# 第一个矩形由其左下顶点 (ax1, ay1) 和右上顶点 (ax2, ay2) 定义。
# 第二个矩形由其左下顶点 (bx1, by1) 和右上顶点 (bx2, by2) 定义。
class Solution(object):
def computeArea(self, ax1, ay1, ax2, ay2, bx1, by1, bx2, by2):
"""
:type ax1: int
:type ay1: int
:type ax2: int
:type ay2: int
:type bx1: int
:type by1: int
:type bx2: int
:type by2: int
:rtype: int
"""
lengthX = max(min(ax2, bx2) - max(ax1, bx1), 0)
lengthY = max(min(ay2, by2) - max(ay1, by1), 0)
area1 = (ax2 - ax1) * (ay2 - ay1)
area2 = (bx2 - bx1) * (by2 - by1)
return area1 + area2 - lengthX * lengthY
ax1 = -3
ay1 = 0
ax2 = 3
ay2 = 4
bx1 = 0
by1 = -1
bx2 = 9
by2 = 2
a = Solution()
print(a.computeArea(ax1, ay1, ax2, ay2, bx1, by1, bx2, by2))
|
xxxxlc/leetcode
|
array/computeArea.py
|
computeArea.py
|
py
| 1,096 |
python
|
zh
|
code
| 0 |
github-code
|
6
|
16791541041
|
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 23 12:32:47 2022
@author: maksi
"""
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from sklearn.datasets import load_digits
from keras.models import Sequential
from keras.layers import Dense
from tensorflow.keras.optimizers import Adam
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
data = load_digits()
X = data.data
y = data.target
y = pd.Categorical(y)
y = pd.get_dummies(y).values
class_num = y.shape[1]
model = Sequential()
model.add(Dense(64, input_shape = (X.shape[1],), activation = 'relu'))
model.add(Dense(64, activation = 'relu'))
model.add(Dense(64, activation = 'relu'))
model.add(Dense(class_num, activation = 'softmax'))
learning_rate = 0.0001
model.compile(optimizer= Adam(learning_rate), loss='categorical_crossentropy',metrics=('accuracy'))
model.summary()
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2)
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)
model.fit(X_train, y_train, batch_size=32, epochs=100, validation_data=(X_test, y_test), verbose=2)
historia = model.history.history
floss_train = historia['loss']
floss_test = historia['val_loss']
acc_train = historia['accuracy']
acc_test = historia['val_accuracy']
fig,ax = plt.subplots(1,2, figsize=(20,10))
epochs = np.arange(0, 100)
ax[0].plot(epochs, floss_train, label = 'floss_train')
ax[0].plot(epochs, floss_test, label = 'floss_test')
ax[0].set_title('Funkcje strat')
ax[0].legend()
ax[1].set_title('Dokladnosci')
ax[1].plot(epochs, acc_train, label = 'acc_train')
ax[1].plot(epochs, acc_test, label = 'acc_test')
ax[1].legend()
|
makspervov/Podstawy-SI-Python
|
lab5/lab5_zad2.py
|
lab5_zad2.py
|
py
| 1,785 |
python
|
en
|
code
| 0 |
github-code
|
6
|
30459355654
|
#!/usr/bin/python3
"""
This is a file to print squares with a given number
I am making up some words to get a better score I think
"""
def print_square(size):
"""Prints a square with a given size"""
if type(size) is not int:
TypeError("size must be an integer")
if size < 0:
ValueError("size must be >= 0")
for i in range(size):
for j in range(size):
print("#", end="")
print()
|
Ethan-23/holbertonschool-higher_level_programming
|
0x07-python-test_driven_development/4-print_square.py
|
4-print_square.py
|
py
| 440 |
python
|
en
|
code
| 0 |
github-code
|
6
|
11278711462
|
"""
Here we test a basic strategy that includes an indicator and FX rate movements.
We'll start with an ($100K) AUD denominated portfolio and buy 100 shares of SPY
only if the VIX < 26.
Also, buying in SPY will make us short USD.
Generate funding trades, to be executed the day after we buy SPY, so that
we aren't short USD.
For the sake of testing we'll focus on the dates 1st Sep -> 1st Oct.
Points to note:
- We'll buy 100 shares of SPY @337.11 on Sep 14th VIX=25.85
- Hold until Oct 1st when SPY=337.04, AUDUSD=0.7167
- Because our portfolio is denominated in AUD we need to calculate AUD prices.
- So buying SPY at 337.11 (Sep 14th price) / 0.729682 (fx 15th) = AUD 462
- And holding to a value of 337.04 / 0.716651 = AUD 470.30
- PNL will be $8.30 (= 470.30 - 462.00) for each of 100 shares purchased.
"""
from datetime import date
import pandas as pd
from pxtrade import Trade
from pxtrade.assets import reset, Stock, Cash, FxRate, Portfolio
from pxtrade.backtest import Backtest
from pxtrade.strategy import Strategy
from pxtrade.events.yahoo import load_yahoo_prices
from pxtrade.compliance import Compliance, UnitLimit
from pxtrade.history import History
def test_buy_spy_with_indicator():
# create your stock and portfolio
reset()
spy = Stock("SPY", currency_code="USD")
aud = Cash("AUD")
usd = Cash("USD")
audusd = FxRate("AUDUSD")
portfolio = Portfolio("AUD")
starting_value = 1e5 # start with $100K AUD
portfolio.transfer(aud, starting_value)
# impose a compliance rule so we are unable to
# hold more than 100 shares.
portfolio.compliance = Compliance().add_rule(UnitLimit(spy, 100))
# define a strategy to buy 100 shares of SPY
# if we are short USD then also fund this shortfall with AUD
class BuySpyWithIndicator(Strategy):
def show(self, trades):
if len(trades) == 0:
return
print(backtest.datetime)
print("^VIX: ", backtest.get_indicator("^VIX"))
print("AUDUSD: ", audusd.rate)
print("SPY: ", spy.price)
for trade in trades:
print(trade)
print("-------")
def generate_trades(self):
trades = list()
usd_holding = portfolio.get_holding_units("USD")
if usd_holding < 0:
trades.append(Trade(portfolio, usd, int(-usd_holding) + 1))
if backtest.get_indicator("^VIX") >= 26:
# don't buy any spy, just fund usd (if required)
self.show(trades)
return trades
trades.append(Trade(portfolio, spy, 100))
self.show(trades)
return trades
# create your backtest instance
backtest = Backtest(BuySpyWithIndicator())
history = History(
portfolios=portfolio,
backtest=backtest,
)
# load price events from yahoo for spy, audusd, vix
start_date = date(2020, 9, 1)
end_date = date(2020, 10, 1)
load_yahoo_prices(
[spy, audusd, "^VIX"],
backtest,
start_date=start_date,
end_date=end_date,
)
# run the backtest and check pnl
backtest.run()
df = history.get()
# print(portfolio)
# print(audusd.rate)
print(backtest.datetime)
print(df)
# Note that when running on windows the last FX rate we get from yahoo
# is on the 30 Sep AUDUSD = 7.716651. However, running on linux we get
# a price from 1 Oct of AUDUSD = 0.718288.
# This looks to be an issue with the yahoo api, but it has implications
# for our assertions around portfolio value.
starting_aud_price = 462 # this has not changed
ending_aud_price = 337.04 / audusd.rate
expected_pnl = (ending_aud_price - starting_aud_price) * 100
expected_value = starting_value + expected_pnl
assert round(portfolio.value, -1) == round(expected_value, -1)
start_date = pd.Timestamp(start_date)
end_date = pd.Timestamp(end_date)
assert int(df.at[start_date, "Portfolio"]) == int(starting_value)
assert round(df.at[end_date, "Portfolio"], -1) == round(expected_value, -1)
assert round(df.at[pd.Timestamp(date(2020, 9, 14)), "^VIX"], 2) == 25.85
|
simongarisch/pxtrade
|
tests/test_strategy2.py
|
test_strategy2.py
|
py
| 4,212 |
python
|
en
|
code
| 2 |
github-code
|
6
|
17282581755
|
from pynput.keyboard import Listener, Key
import time
from threading import Thread
from canvas import Canvas
from pedal import Pedal
from snake import Snake
from ball import Ball
import os
def on_press(key):
if hasattr(key, 'char'): # Write the character pressed if available
print(key.char)
elif key == Key.up: # If space was pressed, write a space
print('up')
snake.up()
elif key == Key.down: # If space was pressed, write a space
print('down')
snake.down()
elif key == Key.left: # If space was pressed, write a space
print('left')
snake.left()
elif key == Key.right: # If space was pressed, write a space
print('right')
snake.right()
canvas = Canvas(15, 50)
snake = Snake(10, 5)
snake.left()
pedal = Pedal(1, 7)
ball = Ball(8,7)
gameover = False
while(not gameover):
with Listener(on_press=on_press) as ls:
def time_out(period_sec: int):
time.sleep(period_sec) # Listen to keyboard for period_sec seconds
ls.stop()
os.system('cls' if os.name == 'nt' else 'clear')
Thread(target=time_out, args=(0.5,)).start()
ls.join()
#move entities
ball.collision(canvas)
ball.move()
snake.move()
pedal.move(canvas)
canvas.clear()
canvas.createBorder()
if(snake.detectColission(canvas) or ball.getgameOver()):
gameover = True
#print entities
canvas = snake.drawSnake(canvas)
canvas = pedal.print(canvas)
canvas = ball.print(canvas)
canvas.print()
print("your score: ",ball.score)
os.system('cls' if os.name == 'nt' else 'clear')
print("Loser your score was: ",ball.score)
|
devbit-algorithms/snakepong-snaka69
|
gameLoop.py
|
gameLoop.py
|
py
| 1,665 |
python
|
en
|
code
| 0 |
github-code
|
6
|
13918838862
|
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
import time
from collections import defaultdict
from datetime import datetime
from odoo import api, models
class ManagementDashboard(models.Model):
_name = 'management.dashboard'
_description = "Project Management Dashboard"
@api.model
def get_task_chart_data(self):
"""
Map the Stage Colour and Name to id
Returns:
dictionary -- stage_id as key and details as value
"""
task_obj = self.env['project.task']
task_rec = task_obj.search([])
groups = defaultdict(list)
for obj in task_rec:
groups[obj.stage_id].append(obj)
result = {}
for rec in groups.items():
result.update({
rec[0].name: {
'count': len(rec[1]),
'color': rec[0].color,
'name': rec[0].name,
}})
return result
@api.model
def get_config_values(self):
"""
To get the Colour value for dashboard.
Using sudo env to bypass the access right.
Returns:
dictionary -- Dictionary of config colours.
"""
result = self.env['res.config.settings'].sudo().get_values()
return {
'color_close_state': result.get('color_close_state', False),
'color_control_state': result.get('color_control_state', False),
'color_execute_state': result.get('color_execute_state', False),
'color_init_state': result.get('color_init_state', False),
'color_plan_state': result.get('color_plan_state', False),
'card_header_color': result.get('card_header_color', False),
}
@api.model
def get_color_code(self, project):
# 0:Green, 1:Orange, 2:Red
open_task = 0
open_issue = 0
spent_budget = 0
pending_invoice = 0
# ('date_start', '>=', self._context.get('start_date')),
# ('date_start', '<=', self._context.get('end_date')),
project_tasks = self.env['project.task'].search(
[('project_id', '=', project['id']),
('stage_id.name',
'not in',
("Done", "Completed", "Approval",
"Canceled", "Closure", "Release",
"Implementation")),
('date_end', '=', False)])
today_date = datetime.strptime(
time.strftime('%Y-%m-%d'), '%Y-%m-%d').date()
for task in project_tasks:
if task.schedule_date:
schedule_date = datetime.strptime(
str(task.schedule_date), '%Y-%m-%d %H:%M:%S').date()
daysdiff = (schedule_date - today_date).days
if daysdiff <= 1:
open_task = 2
if daysdiff <= 7 and daysdiff > 1 and open_task != 2:
open_task = 1
if daysdiff > 7 and open_task not in (2, 1):
open_task = 0
project_issues = self.env['helpdesk.ticket'].with_context(
view_project_issues=1).search([
('stage_id.closed', '!=', True),
('project_id.id', '=', project['id']),
('closed_date', '=', False)])
for issue in project_issues or []:
if ((int(int(issue.ticket_aging)) > 30 and issue.priority == '0') or
(int(issue.ticket_aging) > 10 and issue.priority == '1') or
(int(issue.ticket_aging) > 2 and issue.priority in
('2', '3'))):
open_issue = 2
if ((int(issue.ticket_aging) > 10 and int(issue.ticket_aging) <= 30 and
issue.priority == '0') or
(int(issue.ticket_aging) > 2 and int(issue.ticket_aging) <= 10 and
issue.priority == '1') or
(int(issue.ticket_aging) > 0 and int(issue.ticket_aging) <= 2 and
issue.priority in ('2', '3')) and
open_issue != 2):
open_issue = 1
if ((int(issue.ticket_aging) <= 10 and issue.priority == '0') or
(int(issue.ticket_aging) <= 2 and issue.priority == '1') or
(int(issue.ticket_aging) == 0 and issue.priority in
('2', '3')) and
open_issue not in (2, 1)):
open_issue = 0
budget = 0
if project['spent_budget'] > 0 and project['actual_budget'] > 0:
budget = ((project['spent_budget'] - project['actual_budget'])/project['actual_budget']) * 100
if budget > 30:
spent_budget = 2
elif budget > 10 and budget <= 30:
spent_budget = 1
elif budget <= 10:
spent_budget = 0
invoices = self.env['timesheet.invoice'].search([
('project_id', '=', project['id']),
('state', 'in', ('draft', 'confirm', 'pre-approved'))])
for inv in invoices or []:
if int(inv.timesheet_inv_age) > 30:
pending_invoice = 2
elif int(inv.timesheet_inv_age) > 10 and int(inv.timesheet_inv_age) < 30 and pending_invoice != 2:
pending_invoice = 1
elif int(inv.timesheet_inv_age) < 10 and pending_invoice not in (2, 1):
pending_invoice = 0
return {
'spent_budget': spent_budget,
'pending_invoice': pending_invoice,
'open_task': open_task,
'open_issue': open_issue}
@api.model
def get_treeview_id(self, view):
return self.env.ref(view).id,
|
onesteinbv/ProjectManagement
|
management_dashboard/models/management_dashboard.py
|
management_dashboard.py
|
py
| 5,708 |
python
|
en
|
code
| 1 |
github-code
|
6
|
30059046061
|
from pyswarms.base.base_discrete import DiscreteSwarmBase
import numpy as np
from scipy.spatial import cKDTree
class PerezPSO(DiscreteSwarmBase):
def assertions(self):
"""Assertion method to check various inputs.
Raises
------
KeyError
When one of the required dictionary keys is missing.
ValueError
When the number of neighbors is not within the range
:code:`[0, n_particles]`.
When the p-value is not in the list of values :code:`[1,2]`.
"""
super(PerezPSO, self).assertions()
if not all(key in self.options for key in ('k', 'p')):
raise KeyError('Missing either k or p in options')
if not 0 <= self.k <= self.n_particles:
raise ValueError('No. of neighbors must be between 0 and no. of'
'particles.')
if self.p not in [1, 2]:
raise ValueError('p-value should either be 1 (for L1/Minkowski)'
'or 2 (for L2/Euclidean).')
def __init__(self, n_particles, dimensions, alpha, options, velocity_clamp=None):
"""Initializes the swarm.
Attributes
----------
n_particles : int
number of particles in the swarm.
dimensions : int
number of dimensions in the space.
velocity_clamp : tuple (default is :code:`None`)
a tuple of size 2 where the first entry is the minimum velocity
and the second entry is the maximum velocity. It
sets the limits for velocity clamping.
options : dict with keys :code:`{'c1', 'c2', 'k', 'p'}`
a dictionary containing the parameters for the specific
optimization technique
* c1 : float
cognitive parameter
* c2 : float
social parameter
* w : float
inertia parameter
* k : int
number of neighbors to be considered. Must be a
positive integer less than :code:`n_particles`
* p: int {1,2}
the Minkowski p-norm to use. 1 is the
sum-of-absolute values (or L1 distance) while 2 is
the Euclidean (or L2) distance.
"""
# Initialize logger
# self.logger = logging.getLogger(__name__)
binary = False
# Assign k-neighbors and p-value as attributes
self.k, self.p = options['k'], options['p']
# Initialize parent class
super(PerezPSO, self).__init__(n_particles, dimensions, binary,
options, velocity_clamp)
# Invoke assertions
self.assertions()
# Initialize the resettable attributes
self.reset()
# Set initial glo
self.glo = np.full((1, self.dimensions), np.inf)
self.glo_cost = np.inf
self.y = np.full(self.n_particles, 0)
self.alpha = alpha
self.loc_pos = 0
def optimize(self, objective_func, iters, print_step=1, verbose=1):
"""Optimizes the swarm for a number of iterations.
Performs the optimization to evaluate the objective
function :code:`f` for a number of iterations :code:`iter.`
Parameters
----------
objective_func : function
objective function to be evaluated
iters : int
number of iterations
print_step : int (the default is 1)
amount of steps for printing into console.
verbose : int (the default is 1)
verbosity setting.
Returns
-------
tuple
the local best cost and the local best position among the
swarm.
"""
for i in range(iters):
# Compute cost for current position and personal best
current_cost = objective_func(self.pos)
# Obtain the indices of the best position for each
# neighbour-space, and get the local best cost and
# local best positions from it.
nmin_idx = self._get_neighbors(current_cost) # get index of loc for each neighborhood of the cur position
self.best_cost = current_cost[nmin_idx] # the loc optimum cost for each particle
cost_abs = np.abs(current_cost)
loc_min = cost_abs.min()
if loc_min < np.abs(self.glo_cost):
pos_min_index = np.where(cost_abs == loc_min)[0][0] # index of pos min
self.glo = self.pos[pos_min_index]
self.glo_cost = current_cost[pos_min_index]
del loc_min, cost_abs
# Get the local min realative to each point
self.loc_pos = self.pos[nmin_idx]
self.y = self._get_y(self.loc_pos)
# Perform position velocity update
self._update_velocity() # must be called first
self._update_position()
care = r"""
Iter: {}
glo: {}, {}
Cur_cost: {}
loc_pos: {}
nmin_idx: {}
y: {}
velocity: {}
position: {}
""".format(i, self.glo, self.glo_cost, current_cost, self.loc_pos,
nmin_idx, self.y, self.velocity, self.pos)
if i % print_step == 0:
print(care + "\n\n")
if all_eq(self.pos):
break
if self.glo_cost == 0:
break
# Obtain the final best_cost and the final best_position
# final_best_cost_arg = np.argmin(self.best_cost)
# final_best_cost = np.min(self.best_cost)
# final_best_pos = self.best_pos[final_best_cost_arg]
return self.glo_cost, self.glo
def _get_neighbors(self, pbest_cost):
"""Helper function to obtain the best position found in the
neighborhood. This uses the cKDTree method from :code:`scipy`
to obtain the nearest neighbours
Parameters
----------
pbest_cost : numpy.ndarray of size (n_particles, )
the cost incurred at the historically best position. Will be used
for mapping the obtained indices to its actual cost.
Returns
-------
array of size (n_particles, ) dtype=int64
indices containing the best particles for each particle's
neighbour-space that have the lowest cost
"""
# Use cKDTree to get the indices of the nearest neighbors
tree = cKDTree(self.pos)
_, idx = tree.query(self.pos, p=self.p, k=self.k)
# Map the computed costs to the neighbour indices and take the
# argmin. If k-neighbors is equal to 1, then the swarm acts
# independently of each other.
if self.k == 1:
# The minimum index is itself, no mapping needed.
best_neighbor = pbest_cost[idx][:, np.newaxis].argmin(axis=1)
else:
idx_min = pbest_cost[idx].argmin(axis=1)
best_neighbor = idx[np.arange(len(idx)), idx_min]
return best_neighbor
def _update_velocity(self):
"""Updates the velocity matrix of the swarm.
This method updates the attribute :code:`self.velocity` of
the instantiated object. It is called by the
:code:`self.optimize()` method.
"""
# Define the hyperparameters from options dictionary
c1, c2, w = self.options['c1'], self.options['c2'], self.options['w']
# Compute for cognitive and social terms
cognitive = (c1 * np.random.uniform(0, 1) * (-1 - self.y))
social = (c2 * np.random.uniform(0, 1)
* (1 - self.y))
temp_velocity = (w * self.velocity) + cognitive + social
# Create a mask to clamp the velocities
if self.velocity_clamp is not None:
# Create a mask depending on the set boundaries
min_velocity, max_velocity = self.velocity_clamp[0], \
self.velocity_clamp[1]
_b = np.logical_and(temp_velocity >= min_velocity,
temp_velocity <= max_velocity)
# Use the mask to finally clamp the velocities
self.velocity = np.where(~_b, self.velocity, temp_velocity)
else:
self.velocity = temp_velocity
def _update_position(self):
"""Updates the position matrix of the swarm.
This method updates the attribute :code:`self.pos` of
the instantiated object. It is called by the
:code:`self.optimize()` method.
"""
del self.pos
next_pos = np.random.randint(-2000, 2000, size=self.swarm_size)
_decision = self.y + self.velocity
# print("des: {}".format(_decision))
# mext_pos = np.where(_decision > self.alpha, self.glo, next_pos)
# next_pos = np.where(_decision < self.alpha, self.loc_pos, next_pos)
for i in range(self.n_particles):
if _decision[i] > self.alpha:
next_pos[i] = self.glo
elif _decision[i] < -self.alpha:
next_pos[i] = self.loc_pos[i]
self.pos = next_pos
def _get_y(self, loc):
_y = np.array([])
for i in range(self.n_particles):
if np.array_equal(self.glo, self.pos[i]):
_y = np.concatenate((_y, [1]))
elif np.array_equal(loc[i], self.pos[i]):
_y = np.concatenate((_y, [-1]))
else:
_y = np.concatenate((_y, [0]))
return _y
def _sigmoid(self, x):
"""Helper sigmoid function.
Inputs
------
x : numpy.ndarray
Input vector to compute the sigmoid from
Returns
-------
numpy.ndarray
Output sigmoid computation
"""
return 1 / (1 + np.exp(x))
def all_eq(position):
first = position[0]
for x in position:
if not np.array_equal(x, first):
return False
return True
if __name__ == "__main__":
record_holder = np.fromstring("-1251 -555 -1024 1119 273 -1101 1728 -1835 3 1968 1375 139 -1051 -547 -1531 298") # -16047022661760
# print(record_holder)
from Particle import majic_func as obj_func
from pyswarms.utils.environments import PlotEnvironment
file = open("best_record.txt", "a")
for loop in range(10):
test = PerezPSO(12345, 16, 0.3, {"k": 10, 'c1': 0.8, 'c2': 0.2, 'w': 0.75, 'p': 2})
test.pos = np.random.randint(-2000, 2000, size=test.swarm_size)
test.velocity = np.full(test.n_particles, 0)
# test.pos[0] = record_holder
# print(test.pos)
proposed = test.optimize(obj_func, iters=200, print_step=50)
file.write("{}: {}\n\n".format(proposed[0], proposed[1]))
file.close()
|
Ninalgad/PerezSwarm
|
base_discrete.py
|
base_discrete.py
|
py
| 10,842 |
python
|
en
|
code
| 1 |
github-code
|
6
|
33584979695
|
__author__ = 'Vivek'
def nextPermutation(A):
"""
:param: List of integer
:return : numerically next greater permutation of integers
"""
i = -1
last = False
for k in range(len(A)-1) :
if A[k] < A[k+1] :
i = k
if i == -1 :
last = True
j = -1
if not last :
for k in range(len(A)) :
if k > i and A[k] > A[i] :
j = k
A[i], A[j] = A[j], A[i]
temp = A[i+1:]
temp.reverse()
temp1 = A[:i+1]
temp1.extend(temp)
return temp1
if last :
A.reverse()
return A
print(nextPermutation([1,4,3,2])) # will return [2, 1, 3, 4]
print(nextPermutation([1,2,3,4])) # Will return [1, 2, 4, 3]
|
viveksyngh/InterviewBit
|
Arrays/NEXTPERM.py
|
NEXTPERM.py
|
py
| 740 |
python
|
en
|
code
| 3 |
github-code
|
6
|
11859412516
|
import logging
from copy import deepcopy
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Any, Dict, List, Union
from pandas import DataFrame, to_datetime
from tabulate import tabulate
from freqtrade.constants import (DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN, UNLIMITED_STAKE_AMOUNT,
Config)
from freqtrade.data.metrics import (calculate_cagr, calculate_csum, calculate_market_change,
calculate_max_drawdown)
from freqtrade.misc import decimals_per_coin, file_dump_joblib, file_dump_json, round_coin_value
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
logger = logging.getLogger(__name__)
def store_backtest_stats(
recordfilename: Path, stats: Dict[str, DataFrame], dtappendix: str) -> None:
"""
Stores backtest results
:param recordfilename: Path object, which can either be a filename or a directory.
Filenames will be appended with a timestamp right before the suffix
while for directories, <directory>/backtest-result-<datetime>.json will be used as filename
:param stats: Dataframe containing the backtesting statistics
:param dtappendix: Datetime to use for the filename
"""
if recordfilename.is_dir():
filename = (recordfilename / f'backtest-result-{dtappendix}.json')
else:
filename = Path.joinpath(
recordfilename.parent, f'{recordfilename.stem}-{dtappendix}'
).with_suffix(recordfilename.suffix)
# Store metadata separately.
file_dump_json(get_backtest_metadata_filename(filename), stats['metadata'])
del stats['metadata']
file_dump_json(filename, stats)
latest_filename = Path.joinpath(filename.parent, LAST_BT_RESULT_FN)
file_dump_json(latest_filename, {'latest_backtest': str(filename.name)})
def store_backtest_signal_candles(
recordfilename: Path, candles: Dict[str, Dict], dtappendix: str) -> Path:
"""
Stores backtest trade signal candles
:param recordfilename: Path object, which can either be a filename or a directory.
Filenames will be appended with a timestamp right before the suffix
while for directories, <directory>/backtest-result-<datetime>_signals.pkl will be used
as filename
:param stats: Dict containing the backtesting signal candles
:param dtappendix: Datetime to use for the filename
"""
if recordfilename.is_dir():
filename = (recordfilename / f'backtest-result-{dtappendix}_signals.pkl')
else:
filename = Path.joinpath(
recordfilename.parent, f'{recordfilename.stem}-{dtappendix}_signals.pkl'
)
file_dump_joblib(filename, candles)
return filename
def _get_line_floatfmt(stake_currency: str) -> List[str]:
"""
Generate floatformat (goes in line with _generate_result_line())
"""
return ['s', 'd', '.2f', '.2f', f'.{decimals_per_coin(stake_currency)}f',
'.2f', 'd', 's', 's']
def _get_line_header(first_column: str, stake_currency: str,
direction: str = 'Entries') -> List[str]:
"""
Generate header lines (goes in line with _generate_result_line())
"""
return [first_column, direction, 'Avg Profit %', 'Cum Profit %',
f'Tot Profit {stake_currency}', 'Tot Profit %', 'Avg Duration',
'Win Draw Loss Win%']
def generate_wins_draws_losses(wins, draws, losses):
if wins > 0 and losses == 0:
wl_ratio = '100'
elif wins == 0:
wl_ratio = '0'
else:
wl_ratio = f'{100.0 / (wins + draws + losses) * wins:.1f}' if losses > 0 else '100'
return f'{wins:>4} {draws:>4} {losses:>4} {wl_ratio:>4}'
def _generate_result_line(result: DataFrame, starting_balance: int, first_column: str) -> Dict:
"""
Generate one result dict, with "first_column" as key.
"""
profit_sum = result['profit_ratio'].sum()
# (end-capital - starting capital) / starting capital
profit_total = result['profit_abs'].sum() / starting_balance
return {
'key': first_column,
'trades': len(result),
'profit_mean': result['profit_ratio'].mean() if len(result) > 0 else 0.0,
'profit_mean_pct': result['profit_ratio'].mean() * 100.0 if len(result) > 0 else 0.0,
'profit_sum': profit_sum,
'profit_sum_pct': round(profit_sum * 100.0, 2),
'profit_total_abs': result['profit_abs'].sum(),
'profit_total': profit_total,
'profit_total_pct': round(profit_total * 100.0, 2),
'duration_avg': str(timedelta(
minutes=round(result['trade_duration'].mean()))
) if not result.empty else '0:00',
# 'duration_max': str(timedelta(
# minutes=round(result['trade_duration'].max()))
# ) if not result.empty else '0:00',
# 'duration_min': str(timedelta(
# minutes=round(result['trade_duration'].min()))
# ) if not result.empty else '0:00',
'wins': len(result[result['profit_abs'] > 0]),
'draws': len(result[result['profit_abs'] == 0]),
'losses': len(result[result['profit_abs'] < 0]),
}
def generate_pair_metrics(pairlist: List[str], stake_currency: str, starting_balance: int,
results: DataFrame, skip_nan: bool = False) -> List[Dict]:
"""
Generates and returns a list for the given backtest data and the results dataframe
:param pairlist: Pairlist used
:param stake_currency: stake-currency - used to correctly name headers
:param starting_balance: Starting balance
:param results: Dataframe containing the backtest results
:param skip_nan: Print "left open" open trades
:return: List of Dicts containing the metrics per pair
"""
tabular_data = []
for pair in pairlist:
result = results[results['pair'] == pair]
if skip_nan and result['profit_abs'].isnull().all():
continue
tabular_data.append(_generate_result_line(result, starting_balance, pair))
# Sort by total profit %:
tabular_data = sorted(tabular_data, key=lambda k: k['profit_total_abs'], reverse=True)
# Append Total
tabular_data.append(_generate_result_line(results, starting_balance, 'TOTAL'))
return tabular_data
def generate_tag_metrics(tag_type: str,
starting_balance: int,
results: DataFrame,
skip_nan: bool = False) -> List[Dict]:
"""
Generates and returns a list of metrics for the given tag trades and the results dataframe
:param starting_balance: Starting balance
:param results: Dataframe containing the backtest results
:param skip_nan: Print "left open" open trades
:return: List of Dicts containing the metrics per pair
"""
tabular_data = []
if tag_type in results.columns:
for tag, count in results[tag_type].value_counts().items():
result = results[results[tag_type] == tag]
if skip_nan and result['profit_abs'].isnull().all():
continue
tabular_data.append(_generate_result_line(result, starting_balance, tag))
# Sort by total profit %:
tabular_data = sorted(tabular_data, key=lambda k: k['profit_total_abs'], reverse=True)
# Append Total
tabular_data.append(_generate_result_line(results, starting_balance, 'TOTAL'))
return tabular_data
else:
return []
def generate_exit_reason_stats(max_open_trades: int, results: DataFrame) -> List[Dict]:
"""
Generate small table outlining Backtest results
:param max_open_trades: Max_open_trades parameter
:param results: Dataframe containing the backtest result for one strategy
:return: List of Dicts containing the metrics per Sell reason
"""
tabular_data = []
for reason, count in results['exit_reason'].value_counts().items():
result = results.loc[results['exit_reason'] == reason]
profit_mean = result['profit_ratio'].mean()
profit_sum = result['profit_ratio'].sum()
profit_total = profit_sum / max_open_trades
tabular_data.append(
{
'exit_reason': reason,
'trades': count,
'wins': len(result[result['profit_abs'] > 0]),
'draws': len(result[result['profit_abs'] == 0]),
'losses': len(result[result['profit_abs'] < 0]),
'profit_mean': profit_mean,
'profit_mean_pct': round(profit_mean * 100, 2),
'profit_sum': profit_sum,
'profit_sum_pct': round(profit_sum * 100, 2),
'profit_total_abs': result['profit_abs'].sum(),
'profit_total': profit_total,
'profit_total_pct': round(profit_total * 100, 2),
}
)
return tabular_data
def generate_strategy_comparison(bt_stats: Dict) -> List[Dict]:
"""
Generate summary per strategy
:param bt_stats: Dict of <Strategyname: DataFrame> containing results for all strategies
:return: List of Dicts containing the metrics per Strategy
"""
tabular_data = []
for strategy, result in bt_stats.items():
tabular_data.append(deepcopy(result['results_per_pair'][-1]))
# Update "key" to strategy (results_per_pair has it as "Total").
tabular_data[-1]['key'] = strategy
tabular_data[-1]['max_drawdown_account'] = result['max_drawdown_account']
tabular_data[-1]['max_drawdown_abs'] = round_coin_value(
result['max_drawdown_abs'], result['stake_currency'], False)
return tabular_data
def generate_edge_table(results: dict) -> str:
floatfmt = ('s', '.10g', '.2f', '.2f', '.2f', '.2f', 'd', 'd', 'd')
tabular_data = []
headers = ['Pair', 'Stoploss', 'Win Rate', 'Risk Reward Ratio',
'Required Risk Reward', 'Expectancy', 'Total Number of Trades',
'Average Duration (min)']
for result in results.items():
if result[1].nb_trades > 0:
tabular_data.append([
result[0],
result[1].stoploss,
result[1].winrate,
result[1].risk_reward_ratio,
result[1].required_risk_reward,
result[1].expectancy,
result[1].nb_trades,
round(result[1].avg_trade_duration)
])
# Ignore type as floatfmt does allow tuples but mypy does not know that
return tabulate(tabular_data, headers=headers,
floatfmt=floatfmt, tablefmt="orgtbl", stralign="right")
def _get_resample_from_period(period: str) -> str:
if period == 'day':
return '1d'
if period == 'week':
return '1w'
if period == 'month':
return '1M'
raise ValueError(f"Period {period} is not supported.")
def generate_periodic_breakdown_stats(trade_list: List, period: str) -> List[Dict[str, Any]]:
results = DataFrame.from_records(trade_list)
if len(results) == 0:
return []
results['close_date'] = to_datetime(results['close_date'], utc=True)
resample_period = _get_resample_from_period(period)
resampled = results.resample(resample_period, on='close_date')
stats = []
for name, day in resampled:
profit_abs = day['profit_abs'].sum().round(10)
wins = sum(day['profit_abs'] > 0)
draws = sum(day['profit_abs'] == 0)
loses = sum(day['profit_abs'] < 0)
stats.append(
{
'date': name.strftime('%d/%m/%Y'),
'profit_abs': profit_abs,
'wins': wins,
'draws': draws,
'loses': loses
}
)
return stats
def generate_trading_stats(results: DataFrame) -> Dict[str, Any]:
""" Generate overall trade statistics """
if len(results) == 0:
return {
'wins': 0,
'losses': 0,
'draws': 0,
'holding_avg': timedelta(),
'winner_holding_avg': timedelta(),
'loser_holding_avg': timedelta(),
}
winning_trades = results.loc[results['profit_ratio'] > 0]
draw_trades = results.loc[results['profit_ratio'] == 0]
losing_trades = results.loc[results['profit_ratio'] < 0]
holding_avg = (timedelta(minutes=round(results['trade_duration'].mean()))
if not results.empty else timedelta())
winner_holding_avg = (timedelta(minutes=round(winning_trades['trade_duration'].mean()))
if not winning_trades.empty else timedelta())
loser_holding_avg = (timedelta(minutes=round(losing_trades['trade_duration'].mean()))
if not losing_trades.empty else timedelta())
return {
'wins': len(winning_trades),
'losses': len(losing_trades),
'draws': len(draw_trades),
'holding_avg': holding_avg,
'holding_avg_s': holding_avg.total_seconds(),
'winner_holding_avg': winner_holding_avg,
'winner_holding_avg_s': winner_holding_avg.total_seconds(),
'loser_holding_avg': loser_holding_avg,
'loser_holding_avg_s': loser_holding_avg.total_seconds(),
}
def generate_daily_stats(results: DataFrame) -> Dict[str, Any]:
""" Generate daily statistics """
if len(results) == 0:
return {
'backtest_best_day': 0,
'backtest_worst_day': 0,
'backtest_best_day_abs': 0,
'backtest_worst_day_abs': 0,
'winning_days': 0,
'draw_days': 0,
'losing_days': 0,
'daily_profit_list': [],
}
daily_profit_rel = results.resample('1d', on='close_date')['profit_ratio'].sum()
daily_profit = results.resample('1d', on='close_date')['profit_abs'].sum().round(10)
worst_rel = min(daily_profit_rel)
best_rel = max(daily_profit_rel)
worst = min(daily_profit)
best = max(daily_profit)
winning_days = sum(daily_profit > 0)
draw_days = sum(daily_profit == 0)
losing_days = sum(daily_profit < 0)
daily_profit_list = [(str(idx.date()), val) for idx, val in daily_profit.items()]
return {
'backtest_best_day': best_rel,
'backtest_worst_day': worst_rel,
'backtest_best_day_abs': best,
'backtest_worst_day_abs': worst,
'winning_days': winning_days,
'draw_days': draw_days,
'losing_days': losing_days,
'daily_profit': daily_profit_list,
}
def generate_strategy_stats(pairlist: List[str],
strategy: str,
content: Dict[str, Any],
min_date: datetime, max_date: datetime,
market_change: float
) -> Dict[str, Any]:
"""
:param pairlist: List of pairs to backtest
:param strategy: Strategy name
:param content: Backtest result data in the format:
{'results: results, 'config: config}}.
:param min_date: Backtest start date
:param max_date: Backtest end date
:param market_change: float indicating the market change
:return: Dictionary containing results per strategy and a strategy summary.
"""
results: Dict[str, DataFrame] = content['results']
if not isinstance(results, DataFrame):
return {}
config = content['config']
max_open_trades = min(config['max_open_trades'], len(pairlist))
start_balance = config['dry_run_wallet']
stake_currency = config['stake_currency']
pair_results = generate_pair_metrics(pairlist, stake_currency=stake_currency,
starting_balance=start_balance,
results=results, skip_nan=False)
enter_tag_results = generate_tag_metrics("enter_tag", starting_balance=start_balance,
results=results, skip_nan=False)
exit_reason_stats = generate_exit_reason_stats(max_open_trades=max_open_trades,
results=results)
left_open_results = generate_pair_metrics(
pairlist, stake_currency=stake_currency, starting_balance=start_balance,
results=results.loc[results['exit_reason'] == 'force_exit'], skip_nan=True)
daily_stats = generate_daily_stats(results)
trade_stats = generate_trading_stats(results)
best_pair = max([pair for pair in pair_results if pair['key'] != 'TOTAL'],
key=lambda x: x['profit_sum']) if len(pair_results) > 1 else None
worst_pair = min([pair for pair in pair_results if pair['key'] != 'TOTAL'],
key=lambda x: x['profit_sum']) if len(pair_results) > 1 else None
winning_profit = results.loc[results['profit_abs'] > 0, 'profit_abs'].sum()
losing_profit = results.loc[results['profit_abs'] < 0, 'profit_abs'].sum()
profit_factor = winning_profit / abs(losing_profit) if losing_profit else 0.0
backtest_days = (max_date - min_date).days or 1
strat_stats = {
'trades': results.to_dict(orient='records'),
'locks': [lock.to_json() for lock in content['locks']],
'best_pair': best_pair,
'worst_pair': worst_pair,
'results_per_pair': pair_results,
'results_per_enter_tag': enter_tag_results,
'exit_reason_summary': exit_reason_stats,
'left_open_trades': left_open_results,
# 'days_breakdown_stats': days_breakdown_stats,
'total_trades': len(results),
'trade_count_long': len(results.loc[~results['is_short']]),
'trade_count_short': len(results.loc[results['is_short']]),
'total_volume': float(results['stake_amount'].sum()),
'avg_stake_amount': results['stake_amount'].mean() if len(results) > 0 else 0,
'profit_mean': results['profit_ratio'].mean() if len(results) > 0 else 0,
'profit_median': results['profit_ratio'].median() if len(results) > 0 else 0,
'profit_total': results['profit_abs'].sum() / start_balance,
'profit_total_long': results.loc[~results['is_short'], 'profit_abs'].sum() / start_balance,
'profit_total_short': results.loc[results['is_short'], 'profit_abs'].sum() / start_balance,
'profit_total_abs': results['profit_abs'].sum(),
'profit_total_long_abs': results.loc[~results['is_short'], 'profit_abs'].sum(),
'profit_total_short_abs': results.loc[results['is_short'], 'profit_abs'].sum(),
'cagr': calculate_cagr(backtest_days, start_balance, content['final_balance']),
'profit_factor': profit_factor,
'backtest_start': min_date.strftime(DATETIME_PRINT_FORMAT),
'backtest_start_ts': int(min_date.timestamp() * 1000),
'backtest_end': max_date.strftime(DATETIME_PRINT_FORMAT),
'backtest_end_ts': int(max_date.timestamp() * 1000),
'backtest_days': backtest_days,
'backtest_run_start_ts': content['backtest_start_time'],
'backtest_run_end_ts': content['backtest_end_time'],
'trades_per_day': round(len(results) / backtest_days, 2),
'market_change': market_change,
'pairlist': pairlist,
'stake_amount': config['stake_amount'],
'stake_currency': config['stake_currency'],
'stake_currency_decimals': decimals_per_coin(config['stake_currency']),
'starting_balance': start_balance,
'dry_run_wallet': start_balance,
'final_balance': content['final_balance'],
'rejected_signals': content['rejected_signals'],
'timedout_entry_orders': content['timedout_entry_orders'],
'timedout_exit_orders': content['timedout_exit_orders'],
'canceled_trade_entries': content['canceled_trade_entries'],
'canceled_entry_orders': content['canceled_entry_orders'],
'replaced_entry_orders': content['replaced_entry_orders'],
'max_open_trades': max_open_trades,
'max_open_trades_setting': (config['max_open_trades']
if config['max_open_trades'] != float('inf') else -1),
'timeframe': config['timeframe'],
'timeframe_detail': config.get('timeframe_detail', ''),
'timerange': config.get('timerange', ''),
'enable_protections': config.get('enable_protections', False),
'strategy_name': strategy,
# Parameters relevant for backtesting
'stoploss': config['stoploss'],
'trailing_stop': config.get('trailing_stop', False),
'trailing_stop_positive': config.get('trailing_stop_positive'),
'trailing_stop_positive_offset': config.get('trailing_stop_positive_offset', 0.0),
'trailing_only_offset_is_reached': config.get('trailing_only_offset_is_reached', False),
'use_custom_stoploss': config.get('use_custom_stoploss', False),
'minimal_roi': config['minimal_roi'],
'use_exit_signal': config['use_exit_signal'],
'exit_profit_only': config['exit_profit_only'],
'exit_profit_offset': config['exit_profit_offset'],
'ignore_roi_if_entry_signal': config['ignore_roi_if_entry_signal'],
**daily_stats,
**trade_stats
}
try:
max_drawdown_legacy, _, _, _, _, _ = calculate_max_drawdown(
results, value_col='profit_ratio')
(drawdown_abs, drawdown_start, drawdown_end, high_val, low_val,
max_drawdown) = calculate_max_drawdown(
results, value_col='profit_abs', starting_balance=start_balance)
# max_relative_drawdown = Underwater
(_, _, _, _, _, max_relative_drawdown) = calculate_max_drawdown(
results, value_col='profit_abs', starting_balance=start_balance, relative=True)
strat_stats.update({
'max_drawdown': max_drawdown_legacy, # Deprecated - do not use
'max_drawdown_account': max_drawdown,
'max_relative_drawdown': max_relative_drawdown,
'max_drawdown_abs': drawdown_abs,
'drawdown_start': drawdown_start.strftime(DATETIME_PRINT_FORMAT),
'drawdown_start_ts': drawdown_start.timestamp() * 1000,
'drawdown_end': drawdown_end.strftime(DATETIME_PRINT_FORMAT),
'drawdown_end_ts': drawdown_end.timestamp() * 1000,
'max_drawdown_low': low_val,
'max_drawdown_high': high_val,
})
csum_min, csum_max = calculate_csum(results, start_balance)
strat_stats.update({
'csum_min': csum_min,
'csum_max': csum_max
})
except ValueError:
strat_stats.update({
'max_drawdown': 0.0,
'max_drawdown_account': 0.0,
'max_relative_drawdown': 0.0,
'max_drawdown_abs': 0.0,
'max_drawdown_low': 0.0,
'max_drawdown_high': 0.0,
'drawdown_start': datetime(1970, 1, 1, tzinfo=timezone.utc),
'drawdown_start_ts': 0,
'drawdown_end': datetime(1970, 1, 1, tzinfo=timezone.utc),
'drawdown_end_ts': 0,
'csum_min': 0,
'csum_max': 0
})
return strat_stats
def generate_backtest_stats(btdata: Dict[str, DataFrame],
all_results: Dict[str, Dict[str, Union[DataFrame, Dict]]],
min_date: datetime, max_date: datetime
) -> Dict[str, Any]:
"""
:param btdata: Backtest data
:param all_results: backtest result - dictionary in the form:
{ Strategy: {'results: results, 'config: config}}.
:param min_date: Backtest start date
:param max_date: Backtest end date
:return: Dictionary containing results per strategy and a strategy summary.
"""
result: Dict[str, Any] = {
'metadata': {},
'strategy': {},
'strategy_comparison': [],
}
market_change = calculate_market_change(btdata, 'close')
metadata = {}
pairlist = list(btdata.keys())
for strategy, content in all_results.items():
strat_stats = generate_strategy_stats(pairlist, strategy, content,
min_date, max_date, market_change=market_change)
metadata[strategy] = {
'run_id': content['run_id'],
'backtest_start_time': content['backtest_start_time'],
}
result['strategy'][strategy] = strat_stats
strategy_results = generate_strategy_comparison(bt_stats=result['strategy'])
result['metadata'] = metadata
result['strategy_comparison'] = strategy_results
return result
###
# Start output section
###
def text_table_bt_results(pair_results: List[Dict[str, Any]], stake_currency: str) -> str:
"""
Generates and returns a text table for the given backtest data and the results dataframe
:param pair_results: List of Dictionaries - one entry per pair + final TOTAL row
:param stake_currency: stake-currency - used to correctly name headers
:return: pretty printed table with tabulate as string
"""
headers = _get_line_header('Pair', stake_currency)
floatfmt = _get_line_floatfmt(stake_currency)
output = [[
t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'],
t['profit_total_pct'], t['duration_avg'],
generate_wins_draws_losses(t['wins'], t['draws'], t['losses'])
] for t in pair_results]
# Ignore type as floatfmt does allow tuples but mypy does not know that
return tabulate(output, headers=headers,
floatfmt=floatfmt, tablefmt="orgtbl", stralign="right")
def text_table_exit_reason(exit_reason_stats: List[Dict[str, Any]], stake_currency: str) -> str:
"""
Generate small table outlining Backtest results
:param sell_reason_stats: Exit reason metrics
:param stake_currency: Stakecurrency used
:return: pretty printed table with tabulate as string
"""
headers = [
'Exit Reason',
'Exits',
'Win Draws Loss Win%',
'Avg Profit %',
'Cum Profit %',
f'Tot Profit {stake_currency}',
'Tot Profit %',
]
output = [[
t.get('exit_reason', t.get('sell_reason')), t['trades'],
generate_wins_draws_losses(t['wins'], t['draws'], t['losses']),
t['profit_mean_pct'], t['profit_sum_pct'],
round_coin_value(t['profit_total_abs'], stake_currency, False),
t['profit_total_pct'],
] for t in exit_reason_stats]
return tabulate(output, headers=headers, tablefmt="orgtbl", stralign="right")
def text_table_tags(tag_type: str, tag_results: List[Dict[str, Any]], stake_currency: str) -> str:
"""
Generates and returns a text table for the given backtest data and the results dataframe
:param pair_results: List of Dictionaries - one entry per pair + final TOTAL row
:param stake_currency: stake-currency - used to correctly name headers
:return: pretty printed table with tabulate as string
"""
if (tag_type == "enter_tag"):
headers = _get_line_header("TAG", stake_currency)
else:
headers = _get_line_header("TAG", stake_currency, 'Exits')
floatfmt = _get_line_floatfmt(stake_currency)
output = [
[
t['key'] if t['key'] is not None and len(
t['key']) > 0 else "OTHER",
t['trades'],
t['profit_mean_pct'],
t['profit_sum_pct'],
t['profit_total_abs'],
t['profit_total_pct'],
t['duration_avg'],
generate_wins_draws_losses(
t['wins'],
t['draws'],
t['losses'])] for t in tag_results]
# Ignore type as floatfmt does allow tuples but mypy does not know that
return tabulate(output, headers=headers,
floatfmt=floatfmt, tablefmt="orgtbl", stralign="right")
def text_table_periodic_breakdown(days_breakdown_stats: List[Dict[str, Any]],
stake_currency: str, period: str) -> str:
"""
Generate small table with Backtest results by days
:param days_breakdown_stats: Days breakdown metrics
:param stake_currency: Stakecurrency used
:return: pretty printed table with tabulate as string
"""
headers = [
period.capitalize(),
f'Tot Profit {stake_currency}',
'Wins',
'Draws',
'Losses',
]
output = [[
d['date'], round_coin_value(d['profit_abs'], stake_currency, False),
d['wins'], d['draws'], d['loses'],
] for d in days_breakdown_stats]
return tabulate(output, headers=headers, tablefmt="orgtbl", stralign="right")
def text_table_strategy(strategy_results, stake_currency: str) -> str:
"""
Generate summary table per strategy
:param strategy_results: Dict of <Strategyname: DataFrame> containing results for all strategies
:param stake_currency: stake-currency - used to correctly name headers
:return: pretty printed table with tabulate as string
"""
floatfmt = _get_line_floatfmt(stake_currency)
headers = _get_line_header('Strategy', stake_currency)
# _get_line_header() is also used for per-pair summary. Per-pair drawdown is mostly useless
# therefore we slip this column in only for strategy summary here.
headers.append('Drawdown')
# Align drawdown string on the center two space separator.
if 'max_drawdown_account' in strategy_results[0]:
drawdown = [f'{t["max_drawdown_account"] * 100:.2f}' for t in strategy_results]
else:
# Support for prior backtest results
drawdown = [f'{t["max_drawdown_per"]:.2f}' for t in strategy_results]
dd_pad_abs = max([len(t['max_drawdown_abs']) for t in strategy_results])
dd_pad_per = max([len(dd) for dd in drawdown])
drawdown = [f'{t["max_drawdown_abs"]:>{dd_pad_abs}} {stake_currency} {dd:>{dd_pad_per}}%'
for t, dd in zip(strategy_results, drawdown)]
output = [[
t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'],
t['profit_total_pct'], t['duration_avg'],
generate_wins_draws_losses(t['wins'], t['draws'], t['losses']), drawdown]
for t, drawdown in zip(strategy_results, drawdown)]
# Ignore type as floatfmt does allow tuples but mypy does not know that
return tabulate(output, headers=headers,
floatfmt=floatfmt, tablefmt="orgtbl", stralign="right")
def text_table_add_metrics(strat_results: Dict) -> str:
if len(strat_results['trades']) > 0:
best_trade = max(strat_results['trades'], key=lambda x: x['profit_ratio'])
worst_trade = min(strat_results['trades'], key=lambda x: x['profit_ratio'])
short_metrics = [
('', ''), # Empty line to improve readability
('Long / Short',
f"{strat_results.get('trade_count_long', 'total_trades')} / "
f"{strat_results.get('trade_count_short', 0)}"),
('Total profit Long %', f"{strat_results['profit_total_long']:.2%}"),
('Total profit Short %', f"{strat_results['profit_total_short']:.2%}"),
('Absolute profit Long', round_coin_value(strat_results['profit_total_long_abs'],
strat_results['stake_currency'])),
('Absolute profit Short', round_coin_value(strat_results['profit_total_short_abs'],
strat_results['stake_currency'])),
] if strat_results.get('trade_count_short', 0) > 0 else []
drawdown_metrics = []
if 'max_relative_drawdown' in strat_results:
# Compatibility to show old hyperopt results
drawdown_metrics.append(
('Max % of account underwater', f"{strat_results['max_relative_drawdown']:.2%}")
)
drawdown_metrics.extend([
('Absolute Drawdown (Account)', f"{strat_results['max_drawdown_account']:.2%}")
if 'max_drawdown_account' in strat_results else (
'Drawdown', f"{strat_results['max_drawdown']:.2%}"),
('Absolute Drawdown', round_coin_value(strat_results['max_drawdown_abs'],
strat_results['stake_currency'])),
('Drawdown high', round_coin_value(strat_results['max_drawdown_high'],
strat_results['stake_currency'])),
('Drawdown low', round_coin_value(strat_results['max_drawdown_low'],
strat_results['stake_currency'])),
('Drawdown Start', strat_results['drawdown_start']),
('Drawdown End', strat_results['drawdown_end']),
])
entry_adjustment_metrics = [
('Canceled Trade Entries', strat_results.get('canceled_trade_entries', 'N/A')),
('Canceled Entry Orders', strat_results.get('canceled_entry_orders', 'N/A')),
('Replaced Entry Orders', strat_results.get('replaced_entry_orders', 'N/A')),
] if strat_results.get('canceled_entry_orders', 0) > 0 else []
# Newly added fields should be ignored if they are missing in strat_results. hyperopt-show
# command stores these results and newer version of freqtrade must be able to handle old
# results with missing new fields.
metrics = [
('Backtesting from', strat_results['backtest_start']),
('Backtesting to', strat_results['backtest_end']),
('Max open trades', strat_results['max_open_trades']),
('', ''), # Empty line to improve readability
('Total/Daily Avg Trades',
f"{strat_results['total_trades']} / {strat_results['trades_per_day']}"),
('Starting balance', round_coin_value(strat_results['starting_balance'],
strat_results['stake_currency'])),
('Final balance', round_coin_value(strat_results['final_balance'],
strat_results['stake_currency'])),
('Absolute profit ', round_coin_value(strat_results['profit_total_abs'],
strat_results['stake_currency'])),
('Total profit %', f"{strat_results['profit_total']:.2%}"),
('CAGR %', f"{strat_results['cagr']:.2%}" if 'cagr' in strat_results else 'N/A'),
('Profit factor', f'{strat_results["profit_factor"]:.2f}' if 'profit_factor'
in strat_results else 'N/A'),
('Trades per day', strat_results['trades_per_day']),
('Avg. daily profit %',
f"{(strat_results['profit_total'] / strat_results['backtest_days']):.2%}"),
('Avg. stake amount', round_coin_value(strat_results['avg_stake_amount'],
strat_results['stake_currency'])),
('Total trade volume', round_coin_value(strat_results['total_volume'],
strat_results['stake_currency'])),
*short_metrics,
('', ''), # Empty line to improve readability
('Best Pair', f"{strat_results['best_pair']['key']} "
f"{strat_results['best_pair']['profit_sum']:.2%}"),
('Worst Pair', f"{strat_results['worst_pair']['key']} "
f"{strat_results['worst_pair']['profit_sum']:.2%}"),
('Best trade', f"{best_trade['pair']} {best_trade['profit_ratio']:.2%}"),
('Worst trade', f"{worst_trade['pair']} "
f"{worst_trade['profit_ratio']:.2%}"),
('Best day', round_coin_value(strat_results['backtest_best_day_abs'],
strat_results['stake_currency'])),
('Worst day', round_coin_value(strat_results['backtest_worst_day_abs'],
strat_results['stake_currency'])),
('Days win/draw/lose', f"{strat_results['winning_days']} / "
f"{strat_results['draw_days']} / {strat_results['losing_days']}"),
('Avg. Duration Winners', f"{strat_results['winner_holding_avg']}"),
('Avg. Duration Loser', f"{strat_results['loser_holding_avg']}"),
('Rejected Entry signals', strat_results.get('rejected_signals', 'N/A')),
('Entry/Exit Timeouts',
f"{strat_results.get('timedout_entry_orders', 'N/A')} / "
f"{strat_results.get('timedout_exit_orders', 'N/A')}"),
*entry_adjustment_metrics,
('', ''), # Empty line to improve readability
('Min balance', round_coin_value(strat_results['csum_min'],
strat_results['stake_currency'])),
('Max balance', round_coin_value(strat_results['csum_max'],
strat_results['stake_currency'])),
*drawdown_metrics,
('Market change', f"{strat_results['market_change']:.2%}"),
]
return tabulate(metrics, headers=["Metric", "Value"], tablefmt="orgtbl")
else:
start_balance = round_coin_value(strat_results['starting_balance'],
strat_results['stake_currency'])
stake_amount = round_coin_value(
strat_results['stake_amount'], strat_results['stake_currency']
) if strat_results['stake_amount'] != UNLIMITED_STAKE_AMOUNT else 'unlimited'
message = ("No trades made. "
f"Your starting balance was {start_balance}, "
f"and your stake was {stake_amount}."
)
return message
def show_backtest_result(strategy: str, results: Dict[str, Any], stake_currency: str,
backtest_breakdown=[]):
"""
Print results for one strategy
"""
# Print results
print(f"Result for strategy {strategy}")
table = text_table_bt_results(results['results_per_pair'], stake_currency=stake_currency)
if isinstance(table, str):
print(' BACKTESTING REPORT '.center(len(table.splitlines()[0]), '='))
print(table)
if (results.get('results_per_enter_tag') is not None
or results.get('results_per_buy_tag') is not None):
# results_per_buy_tag is deprecated and should be removed 2 versions after short golive.
table = text_table_tags(
"enter_tag",
results.get('results_per_enter_tag', results.get('results_per_buy_tag')),
stake_currency=stake_currency)
if isinstance(table, str) and len(table) > 0:
print(' ENTER TAG STATS '.center(len(table.splitlines()[0]), '='))
print(table)
exit_reasons = results.get('exit_reason_summary', results.get('sell_reason_summary'))
table = text_table_exit_reason(exit_reason_stats=exit_reasons,
stake_currency=stake_currency)
if isinstance(table, str) and len(table) > 0:
print(' EXIT REASON STATS '.center(len(table.splitlines()[0]), '='))
print(table)
table = text_table_bt_results(results['left_open_trades'], stake_currency=stake_currency)
if isinstance(table, str) and len(table) > 0:
print(' LEFT OPEN TRADES REPORT '.center(len(table.splitlines()[0]), '='))
print(table)
for period in backtest_breakdown:
days_breakdown_stats = generate_periodic_breakdown_stats(
trade_list=results['trades'], period=period)
table = text_table_periodic_breakdown(days_breakdown_stats=days_breakdown_stats,
stake_currency=stake_currency, period=period)
if isinstance(table, str) and len(table) > 0:
print(f' {period.upper()} BREAKDOWN '.center(len(table.splitlines()[0]), '='))
print(table)
table = text_table_add_metrics(results)
if isinstance(table, str) and len(table) > 0:
print(' SUMMARY METRICS '.center(len(table.splitlines()[0]), '='))
print(table)
if isinstance(table, str) and len(table) > 0:
print('=' * len(table.splitlines()[0]))
print()
def show_backtest_results(config: Config, backtest_stats: Dict):
stake_currency = config['stake_currency']
for strategy, results in backtest_stats['strategy'].items():
show_backtest_result(
strategy, results, stake_currency,
config.get('backtest_breakdown', []))
if len(backtest_stats['strategy']) > 1:
# Print Strategy summary table
table = text_table_strategy(backtest_stats['strategy_comparison'], stake_currency)
print(f"{results['backtest_start']} -> {results['backtest_end']} |"
f" Max open trades : {results['max_open_trades']}")
print(' STRATEGY SUMMARY '.center(len(table.splitlines()[0]), '='))
print(table)
print('=' * len(table.splitlines()[0]))
print('\nFor more details, please look at the detail tables above')
def show_sorted_pairlist(config: Config, backtest_stats: Dict):
if config.get('backtest_show_pair_list', False):
for strategy, results in backtest_stats['strategy'].items():
print(f"Pairs for Strategy {strategy}: \n[")
for result in results['results_per_pair']:
if result["key"] != 'TOTAL':
print(f'"{result["key"]}", // {result["profit_mean"]:.2%}')
print("]")
|
robcaulk/freqai
|
freqtrade/optimize/optimize_reports.py
|
optimize_reports.py
|
py
| 41,632 |
python
|
en
|
code
| 42 |
github-code
|
6
|
71601601789
|
def tabung ( b, c):
volume = 22/7 * b * c
luas = 2*22/7*b*(b + c)
return volume,luas
def balok ( a, b , c):
volume = a * b * c
return volume
def main():
ma = input("masukkan yang ingin di pake rumus (tabung, balok): ")
if ma == "tabung":
b = float(input("masukkan jari"))
c = float(input("masukkan tinggi"))
volume, luas = tabung( b, c)
elif ma == "balok":
a = float(input("panjang : "))
b = float(input("lebar : "))
c = float(input("tinggi"))
volume =balok (a, b, c)
else:
print("Tidak ada rumus.")
return
print(f"volume: {volume}")
print(f"luas: {luas}")
if __name__ == "__main__":
main()
|
MErlanggaa/Tugas
|
Python/tugas/menghitungtabungdanbalok.py
|
menghitungtabungdanbalok.py
|
py
| 733 |
python
|
id
|
code
| 0 |
github-code
|
6
|
41457469135
|
"""
flaskr.utils.db
~~~~~~~~~~~~~~~
Utilities for database operations.
"""
import sqlite3
from typing import List, Optional
from datetime import datetime, timezone
from flask import g
from flask import current_app
from flaskr.utils.node import Node
def convert_timestamp(t):
return datetime.fromisoformat(t.decode()).replace(tzinfo=timezone.utc).timestamp()
# Register the converter
sqlite3.register_converter("timestamp", convert_timestamp)
def get_db():
"""Connect to the application's configured database. The connection
is unique for each request and will be reused if this is called
again.
"""
if "db" not in g:
g.db = sqlite3.connect(
current_app.config["DATABASE"], detect_types=sqlite3.PARSE_DECLTYPES
)
g.db.row_factory = sqlite3.Row
return g.db
def close_db(e=None):
"""If this request connected to the database, close the
connection.
"""
db = g.pop("db", None)
if db is not None:
db.close()
def init_db(app):
"""Clear existing data and create new tables."""
with app.app_context():
db = get_db()
with app.open_resource("schema.sql") as f:
db.executescript(f.read().decode("utf8"))
def init_app(app):
"""Register database functions with the Flask app. This is called by
the application factory.
"""
app.teardown_appcontext(close_db)
# modified from https://github.com/matthiask/django-tree-queries/blob/8863c5237f32585cc5ddc21041231155cb806149/tree_queries/compiler.py#L120
CTE = """WITH RECURSIVE __tree(tree_depth,
tree_path,
tree_ordering,
tree_pk) AS (
SELECT
0 tree_depth,
printf("%s ", id) tree_path,
printf(" %020s ", id) tree_ordering,
T.id tree_pk
FROM comment T
WHERE T.parent_id IS NULL
UNION ALL
SELECT
__tree.tree_depth + 1,
__tree.tree_path || printf("%s ", T.id),
__tree.tree_ordering || printf("%020s ", T.id),
T.id
FROM comment T
JOIN __tree ON T.parent_id = __tree.tree_pk
)
SELECT
comment.id,
comment.parent_id,
comment.post_id,
comment.body, comment.created,
comment.author_id, user.username as author_name,
-- __tree.tree_depth
__tree.tree_path
FROM __tree
JOIN comment ON comment.id=__tree.tree_pk
JOIN user ON user.id=comment.author_id
WHERE comment.post_id=?
-- AND instr(__tree.tree_path, '3') !=0
ORDER BY __tree.tree_ordering;
"""
def get_all_comments(post_id: int) -> List[Node]:
root_nodes = Node.build_from_cte_rows(get_db().execute(CTE, (post_id, )))
return list(root_nodes)
def put_in_child(parent: dict, child: dict):
if 'children' in parent:
parent['children'].append(child)
else:
parent['children'] = [child]
def get_post(post_id, with_comments=True) -> Optional[dict]:
"""Get a post and its author by id.
:param post_id: id of post to get
:param with_comments: if return with comments
:return: the post with author information
"""
post = (
get_db()
.execute(
"SELECT p.id, title, body, created, author_id, username"
" FROM post p JOIN user u ON p.author_id = u.id"
" WHERE p.id = ?",
(post_id,),
)
.fetchone()
)
if post is None:
return None
if with_comments:
comments = get_all_comments(post_id=post['id'])
return dict(post) | dict(comments=comments)
else:
return dict(post)
def get_all_posts():
db = get_db()
rst = []
for post in db.execute(
"SELECT p.id, title, body, created, author_id, username as author_name"
" FROM post p JOIN user u ON p.author_id = u.id"
" ORDER BY p.created DESC, p.id DESC"
):
rst.append(dict(post) | dict(comments=get_all_comments(post["id"])))
return rst
|
MioYvo/unlimited-level-messages
|
backend/flaskr/utils/db.py
|
db.py
|
py
| 3,870 |
python
|
en
|
code
| 0 |
github-code
|
6
|
12830935610
|
class Solution:
def isSubsequence(self, s: str, t: str) -> bool:
if len(s) > len(t):
return False
if len(s) == 0:
return True
index_s = 0
for char_t in t:
if char_t == s[index_s]:
index_s += 1
if index_s == len(s):
return True
return False
print(Solution().isSubsequence("abcc", "ahbgdc"))
|
theRobertSan/LeetCode-Solutions-Python
|
392.py
|
392.py
|
py
| 426 |
python
|
en
|
code
| 1 |
github-code
|
6
|
42012547996
|
# python 3 has different package names
try: from urlparse import urlparse
except ImportError: from urllib.parse import urlparse
from collections import defaultdict
def _new_collection():
""" Collection data type is
{path: {method: (ResponseClass,) }}
So e.g. a POST request to http://venmo.com/feed is stored as
{'/feed': {'POST': (ResponseClass,)}}
the ResponseClass will have had the the constructor partially applied
with the specified stubbed data so after finding it we finish
instantiatiing with the request we received and return it.
Why? So the request attribute on the response is the request that
was made, not just the matching criteria in the stub
"""
return defaultdict(lambda: defaultdict(lambda: ()))
class RequestCollection(object):
_requests = _new_collection()
@classmethod
def add(cls, request, response):
parsed = urlparse(request.url)
cls._requests[parsed.path][request.method] = \
cls._requests[parsed.path][request.method] + (response,)
@classmethod
def find(cls, request):
parsed = urlparse(request.url)
responses = cls._requests[parsed.path][request.method]
if len(responses) > 0:
head = responses[0]
cls._requests[parsed.path][request.method] = \
cls._requests[parsed.path][request.method][1:] + (head,)
else:
head = None
return head
@classmethod
def remove(cls, request):
parsed = urlparse(request.url)
del cls._requests[parsed.path][request.method]
@classmethod
def reset(cls):
cls._requests = _new_collection()
|
venmo/tornado-stub-client
|
tornado_stub_client/collection.py
|
collection.py
|
py
| 1,717 |
python
|
en
|
code
| 9 |
github-code
|
6
|
29542340141
|
import re
from collections import defaultdict
from string import Template
from odoo import _
from odoo.exceptions import MissingError
DEFAULT_REFERENCE_SEPARATOR = ""
PLACE_HOLDER_4_MISSING_VALUE = "/"
class ReferenceMask(Template):
pattern = r"""\[(?:
(?P<escaped>\[) |
(?P<named>[^\]]+?)\] |
(?P<braced>[^\]]+?)\] |
(?P<invalid>)
)"""
def extract_token(s):
pattern = re.compile(r"\[([^\]]+?)\]")
return set(pattern.findall(s))
def sanitize_reference_mask(product, mask):
tokens = extract_token(mask)
attribute_names = set()
for line in product.attribute_line_ids:
attribute_names.add(line.attribute_id.name)
if not tokens.issubset(attribute_names):
raise MissingError(
_("Found unrecognized attribute name in " '"Partcode Template"')
)
def get_rendered_default_code(product, mask):
product_attrs = defaultdict(str)
reference_mask = ReferenceMask(mask)
for value in product.product_template_attribute_value_ids:
if value.attribute_id.code:
product_attrs[value.attribute_id.name] += value.attribute_id.code
if value.product_attribute_value_id.code:
product_attrs[
value.attribute_id.name
] += value.product_attribute_value_id.code
all_attrs = extract_token(mask)
missing_attrs = all_attrs - set(product_attrs.keys())
missing = dict.fromkeys(missing_attrs, PLACE_HOLDER_4_MISSING_VALUE)
product_attrs.update(missing)
default_code = reference_mask.safe_substitute(product_attrs)
return default_code
def render_default_code(product, mask):
sanitize_reference_mask(product, mask)
product.default_code = get_rendered_default_code(product, mask)
|
odoonz/odoonz-addons
|
product_code_builder/models/helper_methods.py
|
helper_methods.py
|
py
| 1,828 |
python
|
en
|
code
| 14 |
github-code
|
6
|
39993181023
|
import os
import numpy as np
import matplotlib.pyplot as plt
import cv2
import open3d as o3d
def mkdirs(path):
try:
os.makedirs(path)
except:
pass
class Saver(object):
def __init__(self, save_dir):
self.idx = 0
self.save_dir = os.path.join(save_dir, "results")
if not os.path.exists(self.save_dir):
mkdirs(self.save_dir)
def save_as_point_cloud(self, depth, rgb, path, mask=None):
h, w = depth.shape
Theta = np.arange(h).reshape(h, 1) * np.pi / h + np.pi / h / 2
Theta = np.repeat(Theta, w, axis=1)
Phi = np.arange(w).reshape(1, w) * 2 * np.pi / w + np.pi / w - np.pi
Phi = -np.repeat(Phi, h, axis=0)
X = depth * np.sin(Theta) * np.sin(Phi)
Y = depth * np.cos(Theta)
Z = depth * np.sin(Theta) * np.cos(Phi)
if mask is None:
X = X.flatten()
Y = Y.flatten()
Z = Z.flatten()
R = rgb[:, :, 0].flatten()
G = rgb[:, :, 1].flatten()
B = rgb[:, :, 2].flatten()
else:
X = X[mask]
Y = Y[mask]
Z = Z[mask]
R = rgb[:, :, 0][mask]
G = rgb[:, :, 1][mask]
B = rgb[:, :, 2][mask]
XYZ = np.stack([X, Y, Z], axis=1)
RGB = np.stack([R, G, B], axis=1)
pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(XYZ)
pcd.colors = o3d.utility.Vector3dVector(RGB)
o3d.io.write_point_cloud(path, pcd)
def save_samples(self, rgbs, gt_depths, pred_depths, depth_masks=None):
"""
Saves samples
"""
rgbs = rgbs.cpu().numpy().transpose(0, 2, 3, 1)
depth_preds = pred_depths.cpu().numpy()
gt_depths = gt_depths.cpu().numpy()
if depth_masks is None:
depth_masks = gt_depths != 0
else:
depth_masks = depth_masks.cpu().numpy()
for i in range(rgbs.shape[0]):
self.idx = self.idx+1
mkdirs(os.path.join(self.save_dir, '%04d'%(self.idx)))
cmap = plt.get_cmap("rainbow_r")
depth_pred = cmap(depth_preds[i][0].astype(np.float32)/10)
depth_pred = np.delete(depth_pred, 3, 2)
path = os.path.join(self.save_dir, '%04d' % (self.idx) ,'_depth_pred.jpg')
cv2.imwrite(path, (depth_pred * 255).astype(np.uint8))
depth_gt = cmap(gt_depths[i][0].astype(np.float32)/10)
depth_gt = np.delete(depth_gt, 3, 2)
depth_gt[..., 0][~depth_masks[i][0]] = 0
depth_gt[..., 1][~depth_masks[i][0]] = 0
depth_gt[..., 2][~depth_masks[i][0]] = 0
path = os.path.join(self.save_dir, '%04d' % (self.idx), '_depth_gt.jpg')
cv2.imwrite(path, (depth_gt * 255).astype(np.uint8))
path = os.path.join(self.save_dir, '%04d'%(self.idx) , '_pc_pred.ply')
self.save_as_point_cloud(depth_preds[i][0], rgbs[i], path)
path = os.path.join(self.save_dir, '%04d'%(self.idx) , '_pc_gt.ply')
self.save_as_point_cloud(gt_depths[i][0], rgbs[i], path, depth_masks[i][0])
rgb = (rgbs[i] * 255).astype(np.uint8)
path = os.path.join(self.save_dir, '%04d'%(self.idx) , '_rgb.jpg')
cv2.imwrite(path, rgb[:,:,::-1])
|
zhijieshen-bjtu/PanoFormer
|
PanoFormer/saver.py
|
saver.py
|
py
| 3,346 |
python
|
en
|
code
| 79 |
github-code
|
6
|
43229119277
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("shop", "0005_auto_20150527_1127"),
]
operations = [
migrations.AlterField(
model_name="order",
name="key",
field=models.CharField(max_length=40, db_index=True),
),
]
|
stephenmcd/cartridge
|
cartridge/shop/migrations/0006_auto_20150916_0459.py
|
0006_auto_20150916_0459.py
|
py
| 345 |
python
|
en
|
code
| 696 |
github-code
|
6
|
22083785095
|
def sieve_of_primes(n):
# n = 20
sev=[True for x in range(n+1)]
sev[1]=False
p=2
while (p*p<=n):
if sev[p] == True:
for i in range(p*p,n+1,p):
sev[i] = False
p += 1
return sev
hours,part=input().split()
hours = int(hours)
part = int(part)
div=hours//part
sev = sieve_of_primes(500)
# print(sev)
count=0
for x in range(1,div+1):
flg=0
for y in range(part):
if not sev[x+y*div]:
flg=1
break
if flg==0:
count+=1
print(count)
|
vamshipv/code-repo
|
TCS Codevita/primetime.py
|
primetime.py
|
py
| 453 |
python
|
en
|
code
| 0 |
github-code
|
6
|
33359783594
|
from unittest import TestCase
import unittest
# from unittest.mock import patch, Mock
# import csv
# from flask import request, jsonify
import requests
# import sys
#
# sys.path.insert(0, '../../src')
class TestLoadDailyReports(TestCase):
# def setUp(self):
# self.app = app.
def test_load_data_success(self):
f = open("tests/routes/01-01-2021.csv", "rb")
file = f.read()
url = 'https://covid-monitor-61.herokuapp.com/daily_reports/data'
r = requests.post(url, data=file, headers={"Content-Type": "text/csv"})
f.close()
self.assertEqual(r.status_code, 200)
def test_query_data_success(self):
url = 'https://covid-monitor-61.herokuapp.com/daily_reports/cases'
body = {"return_type": "json",
"types": ["Confirmed", "Deaths", "Active"],
"locations":
[
{"Country/Region": "Belgium"},
{"Country/Region": "Canada", "Province/State": "Ontario"},
{"Country/Region": "Australia",
"Province/State": "Queensland",
"Combined_Key": "Australian Capital Territory, Australia"}
]
}
r = requests.post(url, json=body, headers={"Content-Type": "application/json"})
print(r.json())
self.assertEqual(r.status_code, 200)
if __name__ == '__main__':
unittest.main()
|
shin19991207/CSC301-A2
|
tests/routes/test_daily_reports.py
|
test_daily_reports.py
|
py
| 1,472 |
python
|
en
|
code
| 0 |
github-code
|
6
|
13504549380
|
#For randomly choice; we need import keyword and random module.
import random
customers = ['Jimmy', 'kim', 'John', 'Stacie']
#Choose one customer randomly and put the value as winner.
winner = random.choice(customers)
#Set a variable named flavor to the text vanilla.
flavor = 'vanilla'
#Printing Congratulations,
#Name of the winner which was chosen randomly as winner
#And you have won an ice cream sundae!
print('Congratulations ' + winner + ' you have won an ice cream sundae!')
#Set another variable named prompt to the text Would you like a cherry on top?
prompt = 'Would you like a cherry on top? '
#Set wants_cherry as input. User would be ask to give the input .
wants_cherry = input(prompt)
#Set a variable named order contains flavor (declared before as vanilla) and sundae.
order = flavor + ' sundae '
#Set up a condition that if user give input as yes,
#order variable will add with a cherry on top.
if (wants_cherry == 'yes'):
order = order + ' with a cherry on top'
#Printing , One selected order(flavor and details) for winner’s (name) is coming right up.
print('One ' + order + ' for ' + winner +
' coming right up...')
|
mainurrasel/Learn-To-Code
|
Assignments/ch1/pg-11/iceCream_writing_python_pg_11.py.py
|
iceCream_writing_python_pg_11.py.py
|
py
| 1,151 |
python
|
en
|
code
| 0 |
github-code
|
6
|
40787879761
|
from time import sleep
import time
import datetime
from datetime import timedelta
from time import sleep, strftime
motionTimeOutSeconds = 5
lastMotionTime = datetime.datetime.now()
def motionTimedOut():
myNow = datetime.datetime.now()
deltaTime = (myNow - lastMotionTime).total_seconds()
if deltaTime > motionTimeOutSeconds:
print('Motion timed out after {0} seconds'.format(deltaTime))
return True
return False
sleep(2)
# bTime = datetime.datetime.now()
# deltaT = (bTime-lastMotionTime).total_seconds()
# print(deltaT)
if motionTimedOut():
print('Motion timeout test of 2 seconds failed!')
exit(-1)
print('No timeout after 2 seconds.')
lastMotionTime = datetime.datetime.now()
sleep(6)
if motionTimedOut():
print('Motion timeout test is working after 6 seconds')
|
mrncmoose/smart_controller
|
pi-code/thermalPreTest.py
|
thermalPreTest.py
|
py
| 816 |
python
|
en
|
code
| 3 |
github-code
|
6
|
19601192171
|
#-*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from blog.models import Mypost, MainPage
from blog.forms import CreateForms
# Create your views here.
def index(request):
all_posts = Mypost.objects.all()
maintext = MainPage.objects.all()
# print('all_posts_all')
# print(all_posts)
context = {'maintext': maintext}
return render(request, 'index.html', context)
def BlogList(request):
all_posts = Mypost.objects.all()
# print('all_posts_all')
# print(all_posts)
context = {'all_posts': all_posts}
return render(request, 'blogs_list.html', context)
def PostDitail(request, pk):
post_ditail = Mypost.objects.all().filter(pk=pk)
print('post_ditail')
print(post_ditail[0])
return render(request, 'blog_ditail.html', {'post_ditail': post_ditail[0]})
def CreatePost(request):
if request.method == 'POST':
form = CreateForms(request.POST, request.FILES)
if form.is_valid():
create_post = form.save(commit=False)
create_post.author = request.user
create_post.save()
return redirect('blog:ditail', pk=create_post.pk)
else:
form = CreateForms
return render(request, 'create.html', {'form': form})
def EditPost(request, pk):
# edit_post1 = get_object_or_404(Mypost, pk=pk)
edit_post = Mypost.objects.filter(pk=pk).get()
if request.method == 'POST':
form = CreateForms(request.POST, request.FILES, instance=edit_post)
# print('form')
# print(form)
if form.is_valid():
edit = form.save(commit=False)
# edit.author = request.user
# print('edit.author')
# print(edit.author)
edit.save()
return redirect('blog:ditail', pk=edit.pk)
else:
form = CreateForms(instance=edit_post)
return render(request, 'edit.html', {'form': form})
def DelPost(request, pk):
del_post = Mypost.objects.get(pk=pk)
if request.method == 'POST':
form = CreateForms(request.POST, instance=del_post)
if form.is_valid():
del_post = form.save(commit=False)
del_post.delete()
return redirect('blog:blogs_list')
else:
form = CreateForms(instance=del_post)
return render(request, 'delete.html', {'form': form})
# def PagePostApi(request):
#
# if request.method == 'POST':
# form = CreateForms(request.POST)
# if form.is_valid():
# create_post = form.save(commit=False)
# create_post.author = request.user
# create_post.save()
# return redirect('blog:ditail', pk=create_post.pk)
# else:
# form = CreateForms
# return render(request, 'index.html', {'form': form} )
def ApiBlogjs(request):
return render(request, 'apiblogjs.html', )
def ApiCreateblogjs(request):
return render(request, 'apicreateblogjs.html',)
def ApiDetailblogjs(request, pk):
post_ditail = Mypost.objects.all().filter(pk=pk)
return render(request, 'apidetailblogjs.html',{'post_ditail': post_ditail[0]})
def ApiEditblogjs(request, pk):
post_ditail = Mypost.objects.all().filter(pk=pk)
return render(request, 'apieditblogjs.html',{'post_ditail': post_ditail[0]})
### ResctJS #####
def AllPostsReact(request):
return render(request, 'react_post/allreact.html', )
def CreatePostReact(request):
return render(request, 'react_post/createpostreact.html',)
def DetailPostReact(request, pk):
post_ditail = Mypost.objects.all().filter(pk=pk)
return render(request, 'react_post/detailreact.html',{'post_ditail': post_ditail[0]})
def EditPostReact(request, pk):
post_ditail = Mypost.objects.all().filter(pk=pk)
return render(request, 'react_post/editpostreact.html',{'post_ditail': post_ditail[0]})
|
drhtka/forms_urls_drf
|
blog/views.py
|
views.py
|
py
| 3,836 |
python
|
en
|
code
| 0 |
github-code
|
6
|
5694314611
|
import torch
import torch.nn as nn
class Decoder(nn.Module):
def __init__(self):
super(Decoder, self).__init__()
self.reduce_dim_5 = nn.Conv2d(2048, 256, kernel_size=(1, 1), stride=1, padding=0)
self.reduce_dim_4 = nn.Conv2d(1024, 256, kernel_size=(1, 1), stride=1, padding=0)
self.reduce_dim_3 = nn.Conv2d(512, 256, kernel_size=(1, 1), stride=1, padding=0)
self.reduce_dim_2 = nn.Conv2d(256, 256, kernel_size=(1, 1), stride=1, padding=0)
self.double_conv_5 = self._make_dobule_conv(256, 128)
self.double_conv_4 = self._make_dobule_conv(256, 128)
self.double_conv_3 = self._make_dobule_conv(256, 128)
self.double_conv_2 = self._make_dobule_conv(256, 128)
def _up_add(self, x, y):
(_, _, H, W) = y.size()
x_up = nn.functional.upsample(x, size=(H, W), mode='nearest')
return x_up + y
def _make_dobule_conv(self, in_dims, out_dims):
conv_layer = nn.Sequential(
nn.Conv2d(in_dims, out_dims, kernel_size=(3, 3), stride=1, padding=1),
nn.BatchNorm2d(out_dims),
nn.ReLU(),
nn.Conv2d(out_dims, out_dims, kernel_size=(3, 3), stride=1, padding=1),
nn.BatchNorm2d(out_dims),
nn.ReLU()
)
return conv_layer
def forward(self, c2, c3, c4, c5):
m5 = self.reduce_dim_5(c5)
m4 = self._up_add(m5, self.reduce_dim_4(c4))
m3 = self._up_add(m4, self.reduce_dim_3(c3))
m2 = self._up_add(m3, self.reduce_dim_2(c2))
m5 = self.double_conv_5(m5)
m4 = self.double_conv_4(m4)
m3 = self.double_conv_3(m3)
m2 = self.double_conv_2(m2)
return m5, m4, m3, m2
|
dmdm2002/FPN
|
Model/TopDown.py
|
TopDown.py
|
py
| 1,762 |
python
|
en
|
code
| 2 |
github-code
|
6
|
31102190444
|
from distutils.core import setup
import sys
sys.path.insert(1, "lib")
from maxicom.strings import *
files = ["glade/*"]
setup(name = PACKAGE,
version = VERSION,
description = DESCRIPTION,
author = AUTHOR,
author_email = AUTHOR_EMAIL,
url = URL,
license = LICENSE,
package_dir = {'': 'lib'},
packages = ['maxicom'],
package_data = {'maxicom' : files },
scripts = ["maxicom"],
)
|
gsmcmullin/maxicom
|
setup.py
|
setup.py
|
py
| 420 |
python
|
en
|
code
| 1 |
github-code
|
6
|
8352946523
|
from setuptools import find_packages, setup
with open("./README.md") as fp:
description = fp.read()
setup(
name="pyC8",
version="1.1.1",
description="Python SDK for Macrometa Global Data Mesh",
long_description=description,
long_description_content_type="text/markdown",
author="Macrometa",
author_email="[email protected]",
url="https://www.macrometa.com",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=["requests==2.25.1", "six", "websocket-client==0.57.0"],
tests_require=["pytest", "mock", "flake8"],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Information Technology",
"Operating System :: MacOS",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Topic :: Documentation :: Sphinx",
],
)
|
Macrometacorp/pyC8
|
setup.py
|
setup.py
|
py
| 1,074 |
python
|
en
|
code
| 6 |
github-code
|
6
|
33349139375
|
from data_test import DataGarbageCollector
from conftest import *
def test_function(setup_teardown):
node_name = "k3d-p3"
ip_address = get_ip(node_name)
setup_sessions_metadata(node_name)
setup_node_metadata(node_name)
con = Connection(node_name='k3d-p3', ip=ip_address)
session = 'marco'
assert DataGarbageCollector.test_before == con.get(
'session-offloading-manager?command=test-function&type=session&value=' + session)
assert DataGarbageCollector.garbage_collector == con.get('session-offloading-manager-garbage-collector?command=garbage-collector&deletePolicy=forced&sessionId=' + session)
assert DataGarbageCollector.test_after == con.get(
'session-offloading-manager?command=test-function&type=session&value=' + session)
|
leonardobarilani/edge-computing-thesis
|
FaaS/openfaas-offloading-session/tests/test_garbage_collector.py
|
test_garbage_collector.py
|
py
| 785 |
python
|
en
|
code
| 4 |
github-code
|
6
|
34327083443
|
import pickle
import re
from pathlib import Path
from typing import List
from IPython.display import display
import os.path as op
from datetime import datetime
import pandas as pd
from tqdm.notebook import tqdm
from matplotlib import pyplot as plt
from sklearn.metrics import (
accuracy_score,
balanced_accuracy_score,
f1_score,
matthews_corrcoef,
precision_score,
recall_score,
)
from src.helpers.mylogger import get_handler
import logging
handler = get_handler()
log = logging.getLogger(__name__)
log.handlers[:] = []
log.addHandler(handler)
log.setLevel(logging.DEBUG)
class EntryNotFoundError(Exception):
pass
class BidirectionalEntriesFoundError(Exception):
pass
class MultipleEntryFoundError(Exception):
pass
class ClassLabels:
DISRUPTIVE = "Disruptive"
NON_DISRUPTIVE = "Non-disruptive"
NOT_AVAILABLE = "N/A"
def unzip_res_range(res_range):
"""
Converts ranges in the form: [2-210] or [3-45,47A,47B,51-67] into lists of strings including all numbers in
these ranges in order .
Items are of type <str>.
"""
res_ranges = res_range.strip()[1:-1].split(',')
index_list = []
for r in res_ranges:
if re.match('.+-.+', r):
a, b = r.split('-')
index_list += [str(n) for n in range(int(a), int(b) + 1)]
else:
index_list.append(r)
return index_list
# print(unzip_res_range("[95-96,98-100,102-103,122,262,266-267,270,273,294]"))
def get_mutation_position(mutation):
return mutation[1:-1]
class CancerValidation:
def __init__(self, interfaces_data_path):
self.interfaces_data = self.load_data(interfaces_data_path)
@staticmethod
def load_data(data_path):
interfaces_data = pd.read_csv(
data_path, sep="\t", usecols=["P1", "P2", "P1_IRES", "P2_IRES"]
)
interfaces_data["P1_IRES"] = interfaces_data["P1_IRES"].apply(lambda x: unzip_res_range(x))
interfaces_data["P2_IRES"] = interfaces_data["P2_IRES"].apply(lambda x: unzip_res_range(x))
return interfaces_data
def check(self, protein: str, mutation: str, interactor: str):
# print(f"Checking ..\n"
# f"> PROTEIN: {protein} \n"
# f"> MUTATION: {mutation} \n"
# f"> INTERACTOR: {interactor}")
try:
data, res = self._get_entry(protein, interactor)
except EntryNotFoundError:
return ClassLabels.NOT_AVAILABLE
mut_pos = get_mutation_position(mutation)
if mut_pos in res:
return ClassLabels.DISRUPTIVE
else:
return ClassLabels.NON_DISRUPTIVE
@staticmethod
def _handle_check_duplicated_entries(data, p_ires) -> List[int]:
"""
Checks if all entries of given data duplicated. Each cell contains list in it.
If all entries are duplicated entries, then we have no problem, just get the res.
"""
# display(data)
# In order for us to check if the entries are duplicated, we'll have to
# convert list item in the cells to tuple. Otherwise, we get the following error:
# TypeError: unhashable type: 'list'
data_tuple = data[["P1_IRES", "P2_IRES"]].applymap(lambda x: tuple(x))
data_tuple.duplicated(keep=False).all()
# no problem, then.
if p_ires == "P1_IRES":
[p1] = data["P1"].unique()
[p2] = data["P2"].unique()
elif p_ires == "P2_IRES":
[p2] = data["P1"].unique()
[p1] = data["P2"].unique()
else:
raise ValueError(f"Illegal argument provided for parameter `p_ires`: {p_ires}")
# check if all entries are duplicated
if data_tuple.duplicated(keep=False).all():
log.warning(
f"Multiple entries but they were duplicated. PROTEIN: {p1}, INTERACTOR: {p2}"
)
[p_res] = data_tuple[p_ires].unique()
p_res = list(p_res)
return p_res
else:
log.error("MultipleEntryError with following data: ")
display(data)
p_res_list = data[p_ires].tolist()
p_res = sorted(
set([item for sublist in p_res_list for item in sublist])
)
log.error(F"Returned RES: {p_res}")
return p_res
# data.to_csv("ERROR_data.csv", index=False)
# raise MultipleEntryFoundError
def _get_entry(self, protein, interactor):
a_b_interface_data = self.interfaces_data[
(self.interfaces_data["P1"] == protein) &
(self.interfaces_data["P2"] == interactor)
]
b_a_interface_data = self.interfaces_data[
(self.interfaces_data["P1"] == interactor) &
(self.interfaces_data["P2"] == protein)
]
# Both of them contains entry -- this is an unlikely situation, unless there is problem with the text file..
if len(a_b_interface_data) != 0 and len(b_a_interface_data) != 0:
raise BidirectionalEntriesFoundError
# First data contains entry and the second one is empty
elif len(a_b_interface_data) != 0 and len(b_a_interface_data) == 0:
if len(a_b_interface_data) != 1:
p1_res = self._handle_check_duplicated_entries(a_b_interface_data, "P1_IRES")
else:
[p1_res] = a_b_interface_data["P1_IRES"]
return a_b_interface_data, p1_res
# First data is empty and the second one contains entry
elif len(a_b_interface_data) == 0 and len(b_a_interface_data) != 0:
if len(b_a_interface_data) != 1:
p2_res = self._handle_check_duplicated_entries(b_a_interface_data, "P2_IRES")
else:
[p2_res] = b_a_interface_data["P2_IRES"]
return b_a_interface_data, p2_res
# Both of them are empty
else:
raise EntryNotFoundError
def validate(
self,
tcga_type: str,
tcga_data: pd.DataFrame,
):
tcga_data_validation = tcga_data.copy()
validation_results = []
for index, row in tqdm(
tcga_data_validation.iterrows(),
total=len(tcga_data_validation)
):
protein = row["UniProt_ID"]
mutation = row["Mutation"]
interactor = row["Interactor_UniProt_ID"]
valid_label = self.check(
protein=protein, mutation=mutation, interactor=interactor
)
# print(f">> RESULT: {valid_label}")
validation_results.append(valid_label)
tcga_data_validation["Validation"] = validation_results
tcga_data_validation["Validation"].value_counts().plot(
kind="bar", title=f"{tcga_type} Validation Results"
)
plt.show()
tcga_data_validation_processed = process_validation_data(tcga_data_validation)
tcga_data_validation_processed["Validation"].value_counts().plot(
kind="bar", title=f"{tcga_type} Validation Processed Results"
)
plt.show()
metrics_data = get_scoring_metrics(tcga_data_validation_processed)
num_entries = len(tcga_data_validation)
counts = tcga_data_validation_processed["Validation"].value_counts().to_dict()
num_disruptive = counts[0]
num_non_disruptive = counts[1]
metrics_data.insert(0, "TCGA", tcga_type)
metrics_data.insert(1, "#_Entries", num_entries)
metrics_data.insert(2, "#_Disruptive", num_disruptive)
metrics_data.insert(3, "#_Non_disruptive", num_non_disruptive)
return {
"data_validation": tcga_data_validation,
"data_validation_processed": tcga_data_validation_processed,
"metrics_data": metrics_data,
}
@staticmethod
def validate_single_class(
tcga_type: str,
output_already_calculated: dict,
single_class: int,
):
"""
Requires the positions to be already calculated.
"""
tcga_data_validation = output_already_calculated["data_validation"]
print(f"Using the class {single_class} only.")
tcga_data_validation = tcga_data_validation[
tcga_data_validation["Prediction"] == single_class
].copy()
tcga_data_validation_processed = process_validation_data(tcga_data_validation)
metrics_data = get_scoring_metrics(tcga_data_validation_processed)
num_entries = len(tcga_data_validation)
counts = tcga_data_validation_processed["Validation"].value_counts().to_dict()
num_disruptive = counts[0]
num_non_disruptive = counts[1]
metrics_data.insert(0, "TCGA", tcga_type)
metrics_data.insert(1, "#_Entries", num_entries)
metrics_data.insert(2, "#_Disruptive", num_disruptive)
metrics_data.insert(3, "#_Non_disruptive", num_non_disruptive)
return {
"data_validation": tcga_data_validation,
"data_validation_processed": tcga_data_validation_processed,
"metrics_data": metrics_data,
}
@staticmethod
def extract_output_dict(name, dict_obj):
folder_path = "outputs"
Path(f"{folder_path}").mkdir(parents=True, exist_ok=True)
current_date = datetime.now().strftime("%Y-%m-%d")
file_name = f"{name}_{current_date}.pickle"
file_path = op.join(folder_path, file_name)
if op.exists(file_path):
raise FileExistsError("File already exists")
pickle.dump(dict_obj, open(file_path, "wb"))
print("Object extracted successfully.")
@staticmethod
def load_output_dict(pickle_path):
obj_loaded = pickle.load(open(pickle_path, "rb"))
return obj_loaded
def test_entry_not_found(df, p, i):
a_b = df[
(df["P1"] == p) &
(df["P2"] == i)
]
b_a = df[
(df["P1"] == i) &
(df["P2"] == p)
]
assert len(a_b) == len(b_a) == 0
def get_scoring_metrics(tcga_validation_data):
y_true = tcga_validation_data["Validation"]
y_pred = tcga_validation_data["Prediction"]
metrics_data = pd.DataFrame(
[
accuracy_score(y_true, y_pred),
balanced_accuracy_score(y_true, y_pred),
f1_score(y_true, y_pred),
precision_score(y_true, y_pred),
recall_score(y_true, y_pred),
matthews_corrcoef(y_true, y_pred),
],
index=["ACCURACY", "BALANCED_ACCURACY", "F1", "PRECISION", "RECALL", "MATTHEWS_COR"]
).T
return metrics_data
def process_validation_data(tcga_data: pd.DataFrame):
"""
Process the validation data.
1. Drop N/A entries
2. Convert Labels as follows:
DISRUPTIVE → 0
NON_DISRUPTIVE → 1
3. Convert its type to int.
"""
tcga_processed = tcga_data[tcga_data["Validation"] != "N/A"].copy()
tcga_processed["Validation"] = tcga_processed["Validation"].replace(
{
ClassLabels.DISRUPTIVE: 0,
ClassLabels.NON_DISRUPTIVE: 1,
}
)
tcga_processed = tcga_processed.astype({"Validation": "int"})
return tcga_processed
|
ibrahimberb/Predicting-Mutation-Effects
|
src/dev/CancerValidation/A1/utils.py
|
utils.py
|
py
| 11,283 |
python
|
en
|
code
| 0 |
github-code
|
6
|
4818420912
|
import numpy as np
#this script contains all the necessary function required for training data using linear regression. Run gradient_Descent function and it will return the w vector, b and cost_history, which
#models your data. You can also apply the model to the new data for prediction.
def compute_pderivatives(listX, listY, listW, b):
m = listX.shape[0]
n = listX.shape[1]
listPDerivatives = np.zeros(n)
for i in range(n):
pDerivative = 0
for j in range(m):
pDerivative += (np.dot(listW,listX[j]) + b - listY[j]) * (listX[j][i])
listPDerivatives[i] = pDerivative
pDerivativeb = 0
for i in range(m):
pDerivativeb += (np.dot(listW,listX[i]) + b - listY[i])
pDerivativeb = pDerivativeb/m
listPDerivatives = listPDerivatives/m
return listPDerivatives, pDerivativeb
def costFunc(listX, listY, listW, b, lamb):
cost1 = 0.0
cost2 = 0.0
m = listX.shape[0]
for i in range(m):
cost1 += ((np.dot(listX[i],listW) + b) - listY[i])**2
cost1 = cost1/(2 *listX.shape[0])
for i in range(listW.shape[0]):
cost2 += (listW[i] ** 2)
cost2 = (cost2 * lamb)/2*m
return cost1+cost2
def gradient_Descent(listX, listY, listW, b, alpha, lamb):
costHistory = np.zeros(100000)
m = listX.shape[0]
for i in range(100000):
cost = costFunc(listX, listY, listW, b, lamb)
costHistory[i] = cost
listPDerivatives, pDerivativeB = compute_pderivatives(listX, listY, listW, b)
listW = listW - alpha * (lamb/m) * listW - alpha * listPDerivatives
b = b - alpha * pDerivativeB
return listW,b,costHistory
|
slama0077/ML-Packages
|
RLinearDescent.py
|
RLinearDescent.py
|
py
| 1,664 |
python
|
en
|
code
| 0 |
github-code
|
6
|
48709281
|
from typing import *
class Solution:
@staticmethod
def gcd(a, b):
while b:
a, b = b, a%b
return a
def dfs(self, nums, opt, mask):
# mask: 0 unused, 1 used
n = len(nums)
ans = 0
if mask == (1 << n - 1):
return ans
if self.memo[mask] > 0:
return self.memo[mask]
for i in range(n):
if mask & (1 << i) == 0:
for j in range(i+1, n):
if mask & (1 << j) == 0:
ans = max(ans, opt * self.gcd(nums[i], nums[j]) + self.dfs(nums, opt+1, mask | (1<<i) | (1<<j)))
self.memo[mask] = ans
return ans
def maxScore(self, nums: List[int]) -> int:
n = len(nums)
self.memo = [0] * (1<<n)
self.dfs(nums, 1, 0)
return self.memo[0]
if __name__ == "__main__":
s = Solution()
nums = [3,4,6,8]
assert s.maxScore(nums) == 11
|
code-cp/leetcode
|
solutions/1799/main.py
|
main.py
|
py
| 984 |
python
|
en
|
code
| 0 |
github-code
|
6
|
29528131446
|
from flask import Flask, request
app = Flask(__name__)
@app.route('/')
def home():
return "TP Florian Marques"
@app.route('/means', methods=['GET'])
def meanOfList():
list = request.args.getlist('int', type=int)
if len(list) == 0:
return "Given list is null"
else:
return "Mean of the list is : {}".format(sum(list)/len(list))
|
MarquesFlorian/python_server_testing_florian_marques
|
app.py
|
app.py
|
py
| 362 |
python
|
en
|
code
| 0 |
github-code
|
6
|
12486639819
|
import multiprocessing
import time
import hashlib
memory = list(range(30_000_000))
def function(name):
for i in range(10):
print("Current:", name, i)
time.sleep(1)
def slow_function(name):
for i in range(10):
print("Current:", name, i)
for j in range(300_000):
hashlib.md5(str(j).encode("utf-8")).hexdigest()
if __name__ == "__main__":
for i in range(3):
name = chr(97 + i)
multiprocessing.Process(target=slow_function, args=(name, )).start()
|
tt-n-walters/21-tuesday-python
|
core/multiple_processes.py
|
multiple_processes.py
|
py
| 524 |
python
|
en
|
code
| 0 |
github-code
|
6
|
43082395961
|
n, nums = input().split(" ", 1)
n = int(n)
nums = list(map(int, nums.split(" ")))
# dynamic programming
def max_sum_increasing_subsequence(nums, n):
maximum = 0
dp = [0 for _ in range(n)]
for i in range(n):
dp[i] = nums[i]
for i in range(n):
for j in range(i):
if nums[i] > nums[j] and dp[i] < dp[j] + nums[i]:
dp[i] = dp[j] + nums[i]
for i in range(n):
if maximum < dp[i]:
maximum = dp[i]
return maximum
print(max_sum_increasing_subsequence(nums, n))
|
chaosdevil/leetcode-problem-solving
|
dynamic_programming/maximum_sum_increasing_subsequence.py
|
maximum_sum_increasing_subsequence.py
|
py
| 547 |
python
|
en
|
code
| 0 |
github-code
|
6
|
16644551299
|
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
def ex_deal(df_Int, df_ex):
columns = ['顺序', '氮素', '频率', '刈割']
df_Int = pd.concat([df_Int, pd.DataFrame(columns=columns)])
for item in range(df_Int.shape[0]):
for jtem in range(df_ex.shape[0]):
if int(df_Int.iloc[item, 0]) == int(df_ex.iloc[jtem, 1]):
df_Int.loc[item, '顺序'] = df_ex.iloc[jtem, 1]
df_Int.loc[item, '氮素'] = df_ex.iloc[jtem, 2]
df_Int.loc[item, '频率'] = df_ex.iloc[jtem, 3]
df_Int.loc[item, '刈割'] = df_ex.iloc[jtem, 4]
# df_Int.drop([0, 19], inplace=True)
return df_Int
def MF(g, N="氮素",year=2010):
# if year==2008:
# if N == "刈割":
# g=
# if N == "频率":
#
# else:
if N == "刈割":
if g == 0.0:
g = "nm"
else:
g = "m"
if N == "频率":
if g == 2.0:
g = "l"
elif g == 0.0:
g = "nan"
else:
g = "h"
return g
def loop_chain_nan(year, gb, D, N="氮素"):
# if year == 2008:
# g = MF(g_[0], N,year=2008)
# D["loop"][g], D["nan"][g], D["chain"][g] = 0, 0, 0
# for g_ in gb:
# print(g_[1][3])
# for item in g_[1][3]:
# if item == 0: # 链
# D["chain"][g] += 1
# elif item == -0.15:
# D["nan"][g] += 1
# print(2008,item)
# else: # 环
# D["loop"][g] += 1
if year == 2009:
for g_ in gb:
g = MF(g_[0], N)
# if g!=0.0:
D["loop"][g],D["nan"][g], D["chain"][g] = 0, 0, 0
for item in g_[1][3]:
if item == 0: # 链
D["chain"][g] += 1
elif item == -0.15:
D["nan"][g] += 1
else: # 环
D["loop"][g] += 1
elif year > 2009:
for g_ in gb:
g = MF(g_[0], N)
for item in g_[1][3]:
if item == 0:
D["chain"][g] += 1
elif item == -0.15:
D["nan"][g] += 1
else:
D["loop"][g] += 1
return D
def main():
path = "C:/Users/97899/Desktop/N/"
df_ex = pd.read_excel(path + "实验处理_ex.xls")
ind = np.linspace(2008, 2020, 13)
D = {}
D["loop"], D["chain"], D["nan"] = {}, {}, {}
for year in ind:
df_cir = pd.read_excel(path + "Network/circle20.xls", sheet_name=str(int(year)))
df_cir = ex_deal(df_cir, df_ex)
gb = df_cir.groupby("氮素")
D = loop_chain_nan(year, gb, D)
gm = df_cir.groupby("刈割")
D = loop_chain_nan(year, gm, D, "刈割")
gf = df_cir.groupby("频率")
D = loop_chain_nan(year, gf, D, "频率")
print(D)
net_loop = []
net_chain = []
net_nan = []
'''氮素'''
for key in D["loop"].keys():
sum_ = D["loop"][key] + D["chain"][key] + D["nan"][key]
print(key,sum_)
net_loop.append(D["loop"][key] / sum_)
net_chain.append(D["chain"][key] / sum_)
net_nan.append(D["nan"][key] / sum_)
print("非竞争", len(net_nan), "链", len(net_chain), "环", len(net_loop))
labels = ['N=0', 'N=1', 'N=2', 'N=3', 'N=5', 'N=10', 'N=15', 'N=20', 'N=50']
width = 0.5 # the width of the bars: can also be len(x) sequence
net = (np.array(net_loop) + np.array(net_chain)).tolist()
print("竞争主导", len(net))
plt.xticks(fontsize=15)
plt.yticks(fontsize=15)
ax1 = plt.subplot(212)
print(net_loop[:9],net_chain[:9],net_nan[:9])
ax1.bar(labels, net_loop[:9], width, label='ICN', color="darkcyan")
ax1.bar(labels, net_chain[:9], width, bottom=net_loop[:9], label='TCN', color="turquoise")
ax1.bar(labels, net_nan[:9], width, bottom=net[:9], label='SCS', color="yellow")
ax1.set_ylabel('Ratio', fontdict={"size": 20})
ax1.set_xlabel('N addition rater'"$(gNm^{-2}year^{-1})$", fontdict={"size": 15})
width2 = 0.4
'''刈割'''
label_2 = ['No-Mowing', 'Mowing']
ax2 = plt.subplot(221)
ax2.bar(label_2, net_loop[9:11], width2, label='ICN', color="darkcyan")
ax2.bar(label_2, net_chain[9:11], width2, bottom=net_loop[9:11], label='TCN', color="turquoise")
ax2.bar(label_2, net_nan[9:11], width2, bottom=net[9:11], label='SCS', color="yellow")
ax2.set_ylabel('Ratio', fontdict={"size": 20})
ax2.set_xlabel('Mowing', fontdict={"size": 15})
'''频率'''
label_3 = ['Zero','Low (Twice)', 'High (Monthly)']
ax3 = plt.subplot(222) # 222
ax3.bar(label_3, net_loop[11:14], width2, label='ICN', color="darkcyan")
ax3.bar(label_3, net_chain[11:14], width2, bottom=net_loop[11:14], label='TCN', color="turquoise")
ax3.bar(label_3, net_nan[11:14], width2, bottom=net[11:14], label='SCS', color="yellow") # [11:13]
# ax3.set_ylabel('Ratio', fontdict={"size": 15})
ax3.set_xlabel('Frequency', fontdict={"size": 15})
ax3.legend(ncol=1, bbox_to_anchor=(1.2, 1), fontsize=13)
plt.show()
plt.savefig(path+'Figure/bar_distribution.png')
main()
|
QingqingSun-Bao/GitResp2
|
micro/Fig10_bar_distribution.py
|
Fig10_bar_distribution.py
|
py
| 5,270 |
python
|
en
|
code
| 0 |
github-code
|
6
|
27933308570
|
import time
from clusterbot import ClusterBot, activate_logger
# Print confirmation about sent Slack messages
activate_logger()
# Debug mode
# activate_logger('DEBUG')
# Send a message to the default user specified in you config files.
bot = ClusterBot()
message_id = bot.send("Starting example script.")
# Reply to the default user message from above (open a ne Thread).
bot.reply(message_id, "Waiting for 5s.")
# Wait 5 seconds.
time.sleep(5)
# Reply again.
bot.reply(message_id, "5s have passed.")
# Send a message to someone else (not default user)
message_id = bot.send(
"Hi Denis. I started using ClusterBot :tada:", user_name="Denis Alevi"
)
# Reply to that message (message_id has to belong to a message exchanged with
# ``user_name``)
bot.reply(message_id, "And I ran the example script!", user_name="Denis Alevi")
# Upload a file to your slack chat
message_id = bot.upload(file_name="README.md", message="Upload of README.md")
# Update or append to a previously send message
message_id = bot.reply(message_id, "An answer to this file")
bot.update(message_id, "An updated answer to this figure, deleting the previous one.")
bot.append(message_id, "Another update, which is appended and keeps the previous one")
# Initialize and update a progress bar
bot.init_pbar(10, title="This is a progress bar with a title")
for i in range(10):
message_new = bot.update_pbar()
time.sleep(1)
|
sprekelerlab/slack-clusterbot
|
example_script.py
|
example_script.py
|
py
| 1,406 |
python
|
en
|
code
| 2 |
github-code
|
6
|
23907002609
|
import numpy as np
import pandas as pd
import scipy.spatial.distance as spd
import scipy.stats as sps
import sklearn.model_selection as skm
import sklearn.metrics as skmetrics
import matplotlib.pyplot as plt
import seaborn as sb
from hw1_modules import *
# read data from CSV to array
data = np.array(pd.read_csv("train.csv").values)
#separate values and labels into separate arrays
values = data[:,1:]
labels = data[:,0]
#convert labels array to vertical, 2d array of one column
labels = np.expand_dims(labels, axis=1)
#initialize confusion matrix
cf = np.ones((10,10), dtype=int)
#initialize cumulative accuracy
accuracy = 0
#set number of folds
number_folds = 3
#set k for k neighbors
k_neighbors = 3
#create kfold iterating object
kf = skm.KFold(n_splits=number_folds)
for train_idx, test_idx in kf.split(values, labels):
print("Dividing data")
#subset data using indexes generated by kfold object
train_data = values[train_idx]
test_data = values[test_idx]
train_labels = labels[train_idx]
test_lables = labels[test_idx]
#run one iteration of testing with knn
print("Testing data")
predicted_labels = knn_predict_class(train_data, train_labels, test_data, k_neighbors)
print("Accuracy for this run" + str(sum(predicted_labels == test_lables)/len(test_lables)))
#cumulative accuracy
accuracy += sum(predicted_labels == test_lables)/len(test_lables)
#add this run's confusion values to cumulative confusion matrix
cf = cf + skmetrics.confusion_matrix(test_lables, predicted_labels)
#calculate average accuracy from cumulative
accuracy = accuracy/number_folds
print(accuracy)
#Create and display plot for confusion matrix
ax = sb.heatmap(cf, annot=True, fmt="d")
ax.set(xlabel="Predicted Label", ylabel="True Label")
plt.show()
|
terry99999/M_hw1
|
knn.py
|
knn.py
|
py
| 1,791 |
python
|
en
|
code
| 0 |
github-code
|
6
|
32474300219
|
from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'signup', views.ProfileViewSet)
router.register(r'add_animal', views.AddAnimalViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^login/', views.login),
]
|
stoic1979/pashu_palak_sahayak
|
api/urls.py
|
urls.py
|
py
| 439 |
python
|
en
|
code
| 1 |
github-code
|
6
|
5070831792
|
import pandas as pd
import matplotlib.pyplot as plt
# Load the CSV file and set the type of the date column
reviews = pd.read_csv("branch_reviews.csv", index_col=0, parse_dates=["date"])
# Display a histogram of the number of reviews by date
reviews["date"].hist()
#plt.show()
# Create a new dataframe from reviews with the average rating for each date
reviews_by_date = reviews.groupby("date").mean()
# Plot a 30-day moving average rating per date
reviews_by_date[90:].rolling(30).mean().plot()
#plt.show()
# Print the number of reviews by rating
print(reviews["rating"].value_counts())
# Describe reviews
print(reviews.describe())
|
carsten-hohnke/review_analysis
|
analyze_reviews.py
|
analyze_reviews.py
|
py
| 638 |
python
|
en
|
code
| 0 |
github-code
|
6
|
70047131707
|
import xml.etree.ElementTree as ET
import pandas as pd
import numpy as np
import cv2 as cv
def draw_label(path):
tree = ET.parse(path)
img_out = np.zeros(shape=(1024, 1280))
img_list_x = []
img_list_y = []
for elem in tree.iterfind('object'):
mylist_x = []
mylist_y = []
# print(elem.tag, elem.attrib)
for elem_1 in elem.iterfind('polygon/pt'):
object_x = elem_1.find("x").text
object_y = elem_1.find("y").text
x = int(object_x)
y = 1024 - int(object_y)
if x < 0:
x = 0
if x > 1279:
x = 1279
if y < 0:
y = 0
if y > 1023:
y = 1023
mylist_x.append(x)
mylist_y.append(y)
img_list_x.append(x)
img_list_y.append(y)
img_out.itemset((y, x), 255)
mylist = list(zip(mylist_x, mylist_y))
pts = np.array(mylist, np.int32)
cv.polylines(img_out, [pts], True, (255, 255, 255), 2) # 画线
cv.fillPoly(img_out, [pts], (255, 255, 255)) # 填充内部
Alllist = list(zip(img_list_x, img_list_y)) # 统计标注点
# cv.imwrite('./picture/label.png', img_out)
return img_out
def getlabel(path):
img1 = draw_label(path)
list_out = np.zeros(shape=(1024, 1280))
for i in range(img1.shape[0]):
for j in range(img1.shape[1]):
if img1[i, j] == 255:
list_out[i, j] = 1
return list_out
|
Bagpip/-HSI-
|
label_test.py
|
label_test.py
|
py
| 1,589 |
python
|
en
|
code
| 0 |
github-code
|
6
|
10164344104
|
import sys
import os
def print_usage():
print("Usage:")
print("stringer.py 1 <file> <string_to_count>")
print(" 2 <file> <string_to_remove>")
if __name__ == "__main__":
if len(sys.argv) < 2 or sys.argv[1] == "-h":
print_usage()
exit(1)
if (sys.argv[1] == '1'):
print(f"Counting '{sys.argv[3]}' in {os.path.join(os.getcwd(), sys.argv[2])}")
occurrences = 0
with open(sys.argv[2]) as input_file:
for line in input_file:
if line.find(sys.argv[3]) != -1:
occurrences += 1
print(f"There are {occurrences} occurrences")
|
twono/PythonUtils
|
stringer.py
|
stringer.py
|
py
| 646 |
python
|
en
|
code
| 0 |
github-code
|
6
|
14400222496
|
#!/usr/bin/env python3.8
"""
Given two rectangles, determine if they overlap. The rectangles are defined as a Dictionary, for example:
r1 = {
# x and y coordinates of the bottom-left corner of the rectangle
'x': 2 , 'y': 4,
# Width and Height of rectangle
'w':5,'h':12}
"""
import copy
def overlap(coord1, dim1, coord2, dim2):
greater = max(coord1, coord2)
smaller = min(coord1+dim1, coord2+dim2)
if greater > smaller:
return None, None
return greater, smaller - greater
def rectOverlap(R_1, R_2):
x_overlap, x_dim = overlap(R_1['x'], R_1['w'], R_2['x'], R_2['w'])
y_overlap, y_dim = overlap(R_1['y'], R_1['h'], R_2['y'], R_2['h'])
if not y_dim or not x_dim:
if x_overlap and y_overlap:
if x_dim or y_dim:
print('side overlap at',x_overlap,x_dim,y_overlap,y_dim)
else:
print("point overlap at",x_overlap,y_overlap)
else:
print("no overlap")
return
print(x_overlap, x_dim, y_overlap, y_dim)
r1 = {'x': 2, 'y': 4, 'w':5, 'h':12}
r2 = {'x': 1, 'y': 5, 'w':7, 'h':14}
rectOverlap(r1, r2)
r1 = {'x': 5, 'y': 5, 'w':5, 'h':5}
r2 = {'x': 1, 'y': 5, 'w':4, 'h':14}
rectOverlap(r1, r2)
r1 = {'x': 5, 'y': 4, 'w':5, 'h':5}
r2 = {'x': 1, 'y': 1, 'w':4, 'h':3}
rectOverlap(r1, r2)
r1 = {'x': 5, 'y': 4, 'w':5, 'h':5}
r2 = {'x': 1, 'y': 1, 'w':4, 'h':2}
rectOverlap(r1, r2)
|
dnootana/Python
|
Interview/RectangleOverlap.py
|
RectangleOverlap.py
|
py
| 1,356 |
python
|
en
|
code
| 0 |
github-code
|
6
|
854208264
|
import core.modules
import core.modules.module_registry
from core.modules.vistrails_module import Module, ModuleError
import numpy
import scipy
import scipy.ndimage
from Array import *
from Matrix import *
class ArrayImaging(object):
my_namespace = 'numpy|imaging'
class ExtractRGBAChannel(ArrayImaging, Module):
""" Extract a single color channel from an array representing an
RGBA type image. This will return a 2D array with the single channel
specified as the scalar elements """
def compute(self):
im = self.get_input("Image").get_array()
chan = self.get_input("Channel")
ar = im[:,:,chan]
out = NDArray()
out.set_array(ar)
self.set_output("Output Array", out)
@classmethod
def register(cls, reg, basic):
reg.add_module(cls, namespace=cls.my_namespace)
reg.add_input_port(cls, "Image", (NDArray, 'Image Array'))
reg.add_input_port(cls, "Channel", (basic.Integer, 'Channel'))
reg.add_output_port(cls, "Output Array", (NDArray, 'Output Array'))
class GaussianGradientMagnitude(ArrayImaging, Module):
""" Calculate the Gradient Magnitude of an input NDArray using gaussian derivatives.
The standard-deviation of the Gaussian filter are given for each axis as a sequence
or as a single number, in which case the filter will be isotropic. """
def compute(self):
im = self.get_input("Image")
sigma = self.get_input_list("Sigmas")
if len(sigma) <= 1:
sigma = sigma[0]
der = scipy.ndimage.gaussian_gradient_magnitude(im.get_array(), sigma)
out = NDArray()
out.set_array(der)
self.set_output("Output Array", out)
@classmethod
def register(cls, reg, basic):
reg.add_module(cls, namespace=cls.my_namespace)
reg.add_input_port(cls, "Image", (NDArray, 'Image Array'))
reg.add_input_port(cls, "Sigmas", (basic.Float, 'Standard Deviations'))
reg.add_output_port(cls, "Output Array", (NDArray, 'Output Array'))
class JointHistogram(ArrayImaging, Module):
""" Calculate the Joint Histogram of 2 inputs. The inputs can be of arbitrary dimension,
but must be equivalently sized. """
def compute(self):
in_x = self.get_input("Array One").get_array()
in_y = self.get_input("Array Two").get_array()
size_x = self.get_input("Bins X")
size_y = self.get_input("Bins Y")
take_log = True
if self.has_input("Log10"):
take_log = self.get_input("Log10")
out_ar = numpy.zeros((size_x, size_y))
min_x = in_x.min()
max_x = in_x.max() - min_x
min_y = in_y.min()
max_y = in_y.max() - min_y
in_x = in_x.flatten()
in_y = in_y.flatten()
for i in xrange(in_x.size):
x_cor = int(((in_x[i] - min_x)/max_x) * (size_x - 1))
y_cor = int(((in_y[i] - min_y)/max_y) * (size_y - 1))
out_ar[x_cor,y_cor] += 1.0
if take_log:
out_ar = out_ar + 1.0
out_ar = scipy.log(out_ar)
out = NDArray()
out_ar = out_ar.transpose()
out_ar = out_ar[::-1]
out.set_array(out_ar)
self.set_output("Joint Histogram", out)
@classmethod
def register(cls, reg, basic):
reg.add_module(cls, namespace=cls.my_namespace)
reg.add_input_port(cls, "Array One", (NDArray, 'X Axis Input'))
reg.add_input_port(cls, "Array Two", (NDArray, 'Y Axis Input'))
reg.add_input_port(cls, "Log10", (basic.Boolean, 'Use Log of Histogram'), True)
reg.add_input_port(cls, "Bins X", (basic.Integer, 'Number of X Bins'))
reg.add_input_port(cls, "Bins Y", (basic.Integer, 'Number of Y Bins'))
reg.add_output_port(cls, "Joint Histogram", (NDArray, 'Joint Histogram'))
class GaussianSmooth(ArrayImaging, Module):
""" Smooth the Input array with a multi-dimensional gaussian kernel.
The standard-deviation of the Gaussian filter are given for each axis as a sequence
or as a single number, in which case the filter will be isotropic. """
def compute(self):
im = self.get_input("Input Array")
sigma = self.get_input_list("Sigmas")
if len(sigma) <= 1:
sigma = sigma[0]
der = scipy.ndimage.gaussian_filter(im.get_array(), sigma)
out = NDArray()
out.set_array(der)
self.set_output("Output Array", out)
@classmethod
def register(cls, reg, basic):
reg.add_module(cls, namespace=cls.my_namespace)
reg.add_input_port(cls, "Input Array", (NDArray, 'Image Array'))
reg.add_input_port(cls, "Sigmas", (basic.Float, 'Standard Deviations'))
reg.add_output_port(cls, "Output Array", (NDArray, 'Output Array'))
class MedianFilter(ArrayImaging, Module):
""" Smooth the Input array with a multi-dimensional median filter. """
def compute(self):
im = self.get_input("Input Array")
k_size = self.get_input("Size")
der = scipy.ndimage.median_filter(im.get_array(), size=k_size)
out = NDArray()
out.set_array(der)
self.set_output("Output Array", out)
@classmethod
def register(cls, reg, basic):
reg.add_module(cls, namespace=cls.my_namespace)
reg.add_input_port(cls, "Input Array", (NDArray, 'Image Array'))
reg.add_input_port(cls, "Size", (basic.Integer, 'Kernel Size'))
reg.add_output_port(cls, "Output Array", (NDArray, 'Output Array'))
class ImageDifference(ArrayImaging, Module):
""" Calculate the difference between two input images. """
def compute(self):
im = self.get_input("Input 1")
im2 = self.get_input("Input 2")
da_ar = im.get_array() - im2.get_array()
da_ar = numpy.abs(da_ar)
out = NDArray()
out.set_array(da_ar)
self.set_output("Output", out)
@classmethod
def register(cls, reg, basic):
reg.add_module(cls, namespace=cls.my_namespace)
reg.add_input_port(cls, "Input 1", (NDArray, 'Image Array'))
reg.add_input_port(cls, "Input 2", (NDArray, 'Image Array'))
reg.add_output_port(cls, "Output", (NDArray, 'Output Array'))
class ImageNormalize(ArrayImaging, Module):
""" Move the range of the image to [0,1] """
def compute(self):
im = self.get_input("Input")
im_max = im.get_array().max()
im_ar = im.get_array() / im_max
out = NDArray()
out.set_array(im_ar)
self.set_output("Output", out)
@classmethod
def register(cls, reg, basic):
reg.add_module(cls, namespace=cls.my_namespace)
reg.add_input_port(cls, "Input", (NDArray, 'Image Array'))
reg.add_output_port(cls, "Output", (NDArray, 'Output Array'))
class SobelGradientMagnitude(ArrayImaging, Module):
""" Use n-dimensional sobel kernels to compute the gradient magnitude
of an image """
def compute(self):
im = self.get_input("Input").get_array()
mag = numpy.zeros(im.shape)
for i in xrange(im.ndim):
kern = scipy.ndimage.sobel(im, axis=i)
mag += kern*kern
out = NDArray()
out.set_array(numpy.sqrt(mag))
self.set_output("Output", out)
@classmethod
def register(cls, reg, basic):
reg.add_module(cls, namespace=cls.my_namespace)
reg.add_input_port(cls, "Input", (NDArray, 'Image Array'))
reg.add_output_port(cls, "Output", (NDArray, 'Output Array'))
|
VisTrails/VisTrails
|
contrib/NumSciPy/Imaging.py
|
Imaging.py
|
py
| 7,502 |
python
|
en
|
code
| 100 |
github-code
|
6
|
29580890531
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, api
class ProductTemplate(models.Model):
_inherit = 'product.template'
@api.multi
def open_related_partnerinfo(self):
result = self._get_act_window_dict(
'product_pricelist_partnerinfo.pricelist_partnerinfo_action')
result['domain'] = "[('product_tmpl_id', 'in', " + str(self.ids) + ")]"
return result
class ProductProduct(models.Model):
_inherit = 'product.product'
@api.multi
def open_related_partnerinfo(self):
self.ensure_one()
result = self.product_tmpl_id._get_act_window_dict(
'product_pricelist_partnerinfo.pricelist_partnerinfo_action')
result['domain'] = ("[('product_tmpl_id', '=', " +
str(self.product_tmpl_id.id) + ")]")
return result
|
odoomrp/odoomrp-utils
|
product_pricelist_partnerinfo/models/product.py
|
product.py
|
py
| 1,677 |
python
|
en
|
code
| 36 |
github-code
|
6
|
23497624891
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import socket
import sys
BUFSIZE = 1024
def start_client(address):
tcpCliSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcpCliSock.connect(address)
while True:
data = raw_input('> ')
if not data:
break
tcpCliSock.send(data)
data = tcpCliSock.recv(BUFSIZE)
if not data:
break
print(data)
if __name__ == '__main__':
host = '127.0.0.1'
port = 1234
if len(sys.argv) == 2:
host = sys.argv[1]
elif len(sys.argv) == 3:
host = sys.argv[1]
port = int(sys.argv[2])
start_client((host, port))
|
Furzoom/learnpython
|
app/test/tsTclnt.py
|
tsTclnt.py
|
py
| 675 |
python
|
en
|
code
| 0 |
github-code
|
6
|
31533830486
|
x_house_height = float(input())
y_side_wall_lenght = float(input())
h_triangle_side_height = float(input())
window_side = 1.5
door_side_a = 1.2
door_side_b = 2
window_area = window_side * window_side
windows_total_area = window_area * 2
door_area = door_side_a * door_side_b
side_wall_area = x_house_height * y_side_wall_lenght
side_walls_area = side_wall_area * 2 - windows_total_area
front_wall_area = x_house_height * x_house_height
front_back_walls_area = front_wall_area * 2 - door_area
roof_side_wall_area = x_house_height * y_side_wall_lenght
roof_side_walls_area = roof_side_wall_area * 2
roof_front_side_area = x_house_height * h_triangle_side_height / 2
roof_front_back_sides_area = roof_front_side_area * 2
walls_total_area = side_walls_area + front_back_walls_area
roof_total_area = roof_side_walls_area + roof_front_back_sides_area
green_paint = walls_total_area / 3.4
red_paint = roof_total_area / 4.3
print(f'{green_paint:.2f}')
print(f'{red_paint:.2f}')
|
iliyan-pigeon/Soft-uni-Courses
|
programming_basics_python/first_steps_more_exercises/house_painting.py
|
house_painting.py
|
py
| 970 |
python
|
en
|
code
| 0 |
github-code
|
6
|
31366334882
|
import torch
import torch.nn as nn
from GAWWN.tools.config import cfg
from GAWWN.tools.tools import replicate
class keyMulD(nn.Module):
def __init__(self):
super(keyMulD, self).__init__()
self.ndf = cfg.GAN.NDF
self.nt_d = cfg.TEXT.TXT_FEATURE_DIM
self.keypoint_dim = cfg.KEYPOINT.DIM
self.conv = nn.Sequential(
nn.Conv2d(self.nt_d + self.ndf * 2, self.ndf * 2, 3, 1, 1),
nn.BatchNorm2d(self.ndf * 2),
nn.LeakyReLU(0.2, True)
)
def forward(self, imgGlobal, prep_txt_d, locs):
prep_txt_d = replicate(prep_txt_d, 2, self.keypoint_dim) # (bs, nt_d, 16)
prep_txt_d = replicate(prep_txt_d, 3, self.keypoint_dim) # (bs, nt_d, 16, 16)
imgTextGlobal = torch.cat((imgGlobal, prep_txt_d), 1) # (bs, nt_d + ndf * 2, 16, 16)
imgTextGlobal = self.conv(imgTextGlobal) # (bs, ndf * 2, 16, 16)
# loc (bs, num_elt, keypoint_dim, keypoint_dim)
locs = torch.sum(locs, 1) # (bs, keypoint_dim, keypoint_dim)
locs = torch.clamp(locs, 0, 1)
locs = replicate(locs, 1, self.ndf * 2)
x = imgTextGlobal * locs
return x
class regionD(nn.Module):
def __init__(self):
super(regionD, self).__init__()
self.ndf = cfg.GAN.NDF
self.num_elt = cfg.KEYPOINT.NUM_ELT
self.F_KeyMulD = keyMulD()
self.conv = nn.Sequential(
nn.Conv2d(self.ndf * 2 + self.num_elt, self.ndf * 2, 1),
nn.BatchNorm2d(self.ndf * 2),
nn.LeakyReLU(0.2, True),
nn.Conv2d(self.ndf * 2, self.ndf, 2)
)
self.LReLU = nn.LeakyReLU(0.2, True)
def forward(self, imgGlobal, prep_txt_d, locs):
keyMul = self.F_KeyMulD(imgGlobal, prep_txt_d, locs)
x = torch.cat((keyMul, locs), 1) # (bs, ngf * 2 + num_elt, 16, 16)
x = x.contiguous()
x = self.conv(x)
x = x.mean(3)
x = x.mean(2)
x = self.LReLU(x)
return x
class globalD(nn.Module):
def __init__(self):
super(globalD, self).__init__()
self.ndf = cfg.GAN.NDF
self.nt_d = cfg.TEXT.TXT_FEATURE_DIM
self.convGlobal = nn.Sequential(
nn.Conv2d(self.ndf * 2, self.ndf * 4, 4, 2, 1),
nn.BatchNorm2d(self.ndf * 4),
nn.LeakyReLU(0.2, True),
nn.Conv2d(self.ndf * 4, self.ndf * 8, 4, 2, 1),
nn.BatchNorm2d(self.ndf * 8),
nn.LeakyReLU(0.2, True)
)
self.conv = nn.Sequential(
nn.Conv2d(self.ndf * 8 + self.nt_d, self.ndf * 4, 1),
nn.BatchNorm2d(self.ndf * 4),
nn.LeakyReLU(0.2, True),
nn.Conv2d(self.ndf * 4, self.ndf, 4),
nn.BatchNorm2d(self.ndf),
nn.LeakyReLU(0.2, True)
)
def forward(self, imgGlobal, prep_txt_d):
img = self.convGlobal(imgGlobal) # (bs, ndf * 8, 4, 4)
txtGlobal = replicate(prep_txt_d, 2, 4) # (bs, nt_d, 4)
txtGlobal = replicate(txtGlobal, 3, 4) # (bs, nt_d, 4, 4)
imgTxtGlobal = torch.cat((img, txtGlobal), 1) # (bs, nt_d + ndf * 8, 4 ,4)
imgTxtGlobal = imgTxtGlobal.contiguous()
imgTxtGlobal = self.conv(imgTxtGlobal) # (bs, ndf, 1, 1)
imgTxtGlobal = imgTxtGlobal.view(-1, self.ndf)
return imgTxtGlobal
class Dis(nn.Module):
def __init__(self):
super(Dis, self).__init__()
self.ndf = cfg.GAN.NDF
self.nt = cfg.TEXT.TXT_EMBEDDING_DIM
self.nt_d = cfg.TEXT.TXT_FEATURE_DIM
self.prep_txtD = nn.Sequential(
nn.Linear(self.nt, self.nt_d),
nn.LeakyReLU(0.2, True)
)
self.imgGlobalD = nn.Sequential(
nn.Conv2d(3, self.ndf, 4, 2, 1),
nn.LeakyReLU(0.2, True),
nn.Conv2d(self.ndf, self.ndf, 4, 2, 1),
nn.LeakyReLU(0.2, True),
nn.Conv2d(self.ndf, self.ndf * 2, 4, 2, 1),
nn.BatchNorm2d(self.ndf * 2),
nn.LeakyReLU(0.2, True),
nn.Conv2d(self.ndf * 2, self.ndf * 2, 3, 1, 1),
nn.BatchNorm2d(self.ndf * 2),
nn.LeakyReLU(0.2, True)
)
self.F_regionD = regionD()
self.F_globalD = globalD()
self.judge = nn.Sequential(
nn.Linear(self.ndf * 2, self.ndf),
nn.BatchNorm1d(self.ndf),
nn.LeakyReLU(0.2, True),
nn.Linear(self.ndf, 1),
nn.Sigmoid()
)
def forward(self, img, txt, locs):
prep_txt_d = self.prep_txtD(txt)
image_Global = self.imgGlobalD(img)
region_d = self.F_regionD(image_Global, prep_txt_d, locs)
global_d = self.F_globalD(image_Global, prep_txt_d)
x = torch.cat((region_d, global_d), 1)
x = self.judge(x)
return x
|
LosSherl/GAWWN.Pytorch
|
GAWWN/model/discriminator.py
|
discriminator.py
|
py
| 4,867 |
python
|
en
|
code
| 0 |
github-code
|
6
|
27535721148
|
import json
from web3 import Web3
from decimal import Decimal
from router import*
import time
# add blockchain connection information
cronos_mainnet_rpc = "ws://rpc.vvs.finance/"
w3 = Web3(Web3.WebsocketProvider(cronos_mainnet_rpc, websocket_timeout= 6000))
ERC20ABI = json.load(open('./erc20_abi.abi'))
#getSelector("swapExactTokensForTokens(uint256,uint256,address[],address,uint256)")= 0x38ed1739
#getSelector("swapExactETHForTokens(uint256 amountOutMin, address[] path, address to, uint256 deadline)")= 0x7ff36ab5
#getSelector("swapExactTokensForETH(uint256,uint256,address[],address,uint256)")= 0x18cbafe5
mycontract = '0x109C48345e84459C658e79e806F6DdB236DbDD26'
# multilswap = Web3.toChecksumAddress(mycontract)
# multilswap_abi = json.loads()
# multilswap_contract = w3.eth.contract(address = multilswap, abi= multilswap_abi)
# amountIn = optimalAmount
def dataswap_encode(contract, amountIn, amountOut, path, mycontract):
deadline = 1000
dataswap = contract.encodeABI(fn_name="swapExactTokensForTokens", args=[amountIn,amountOut, path, mycontract,deadline])
return dataswap
def dataswap(route,tokenIn, tokenOut, amountIn, mycontract):
# route = trade['route']
tos = []
tos = [t['router:'] for t in route]
data = []
_tokenInapproveaddr = []
n= 0
for pair in route:
if pair['router:'] == '0x145863Eb42Cf62847A6Ca784e6416C1682b1b2Ae':
contract = VVS_ROUTER_CONTRACT
elif pair['router:'] == '0x145677FC4d9b8F19B5D56d1820c48e0443049a30':
contract = MMF_ROUTER_CONTRACT
elif pair['router:'] == '0xcd7d16fB918511BF7269eC4f48d61D79Fb26f918':
contract = CRONA_ROUTER_CONTRACT
elif pair['router:'] == '0x5bFc95C3BbF50579bD57957cD074fa96a4d5fF9F':
contract = CYBORG_ROUTER_CONTRACT
if n == 0:
amountIn = amountIn
if pair['token0']['address'] == tokenIn['address']:
tokenOut = pair['token1']
else:
tokenOut = pair['token0']
path = [tokenIn['address'],tokenOut['address']]
_tokenInapproveaddr.append(tokenIn['address'])
amountOut_list = contract.functions.getAmountsOut(amountIn, path).call()
amountOut= amountOut_list[1]
print('amountout1:',amountOut)
encode = dataswap_encode(contract, amountIn, amountOut, path, mycontract)
data.append(encode)
# approve =
tokenIn = tokenOut
amountIn = amountOut
if n > 0:
if pair['token0']['address'] == tokenIn['address']:
tokenOut = pair['token1']
else:
tokenOut = pair['token0']
path = [tokenIn['address'],tokenOut['address']]
_tokenInapproveaddr.append(tokenIn['address'])
amountOut_list = contract.functions.getAmountsOut(amountIn, path).call()
amountOut= amountOut_list[1]
print('amountout2:',amountOut)
encode = dataswap_encode(contract, amountIn, amountOut, path, mycontract)
data.append(encode)
# approve =
tokenIn = tokenOut
amountIn = amountOut
n+=1
print('profit:', amountIn - 50*pow(10,18))
return tos, data, _tokenInapproveaddr
|
Galahad091/My-arb-on-fantom
|
test/encode_data.py
|
encode_data.py
|
py
| 2,926 |
python
|
en
|
code
| 0 |
github-code
|
6
|
10426113272
|
from measurements.models import Location, Station, SourceType, Network
from django.contrib.gis.geos import Point
import requests
from bs4 import BeautifulSoup
from datetime import datetime, timedelta
import pandas as pd
import re
IOC = "http://www.ioc-sealevelmonitoring.org/station.php?code={}"
stations = (
('Trieste', 'TR22'),
('Venice', 'VE19'),
('Ancona', 'AN15'),
('S. Benedetto Del Tronto', 'SB36'),
('Stari Grad', 'stari'),
('Vela Luka', 'vela'),
('Sobra', 'sobr'),
('Otranto', 'OT15'),
('Kerkyra, Corfu', 'corf'),
('Crotone', 'CR08'),
('Le Castella', 'lcst'),
('Itea', 'itea'),
('Panormos', 'pano'),
('Aigio', 'aigi'),
('Katakolo', 'kata'),
# ('Kyparissia', 'kypa'),
)
ioc_source, created = SourceType.objects.get_or_create(code='ioc')
ioc_network, created = Network.objects.get_or_create(code='ioc')
# IOC stations
for label, code in stations:
r = requests.get(IOC.format(code))
# print(r.text)
soup = BeautifulSoup(r.text)
for elem in soup(text='Latitude '):
lat = float(elem.find_next('td').contents[0])
for elem in soup(text='Longitude '):
lon = float(elem.find_next('td').contents[0])
# print(lon, lat)
l, created = Location.objects.get_or_create(label=label)
l.geo = Point(lon, lat)
l.save()
# print(label, l)
s, created = Station.objects.get_or_create(code=code,
label=label,
source=ioc_source,
network=ioc_network,
location=l)
|
CNR-ISMAR/ecoads
|
scripts/import_station_locations.py
|
import_station_locations.py
|
py
| 1,738 |
python
|
en
|
code
| 0 |
github-code
|
6
|
2048923412
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
sort.Sort
Sort algorithm.
'''
class Sort(object):
__CUTOFF = 5
def _median3(arr, beg, end):
mid = (beg + end) // 2
if arr[beg] > arr[mid]:
arr[beg], arr[mid] = arr[mid], arr[beg]
if arr[mid] > arr[end]:
arr[mid], arr[end] = arr[end], arr[mid]
if arr[beg] > arr[mid]:
arr[beg], arr[mid] = arr[mid], arr[beg]
arr[mid], arr[end-1] = arr[end-1], arr[mid]
return arr[end-1]
def _qsort(arr, low, high, cmp_obj):
if high - low < Sort.__CUTOFF:
Sort._isort(arr, low, high, cmp_obj)
else:
pivot = Sort._median3(arr, low, high)
lbeg = low + 1
rend = high - 2
while True:
while arr[lbeg] < pivot:
lbeg += 1
while arr[rend] > pivot:
rend -= 1
if lbeg < rend:
arr[lbeg], arr[rend] = arr[rend], arr[lbeg]
lbeg += 1
rend -= 1
else:
arr[lbeg], arr[high-1] = arr[high-1], arr[lbeg]
break
Sort._qsort(arr, low, lbeg - 1, cmp_obj)
Sort._qsort(arr, lbeg + 1, high, cmp_obj)
def _isort(arr, low, high, cmp_obj):
i = low + 1
while i <= high:
tmp = arr[i]
j = i - 1
while j >= low:
if tmp < arr[j]:
arr[j+1] = arr[j]
else:
break
j -= 1
arr[j+1] = tmp # bug: j+1 instead of j
i += 1
def _shsort(arr, low, high, cmp_obj):
step = (high - low + 1) // 2
while step > 0:
i = step
while i <= high:
tmp = arr[i]
j = i - step
while j >= low:
if tmp < arr[j]:
arr[j+step] = arr[j]
else:
break
j -= step
arr[j+step] = tmp # bug: j+1 instead of j
i += 1
step //= 2
def _perc_down(arr, pos, cnt, cmp_obj):
'''Used for heap ajustment'''
lchild = lambda x: 2 * x + 1
tmp = arr[pos]
p = pos
while lchild(p) < cnt:
lc = lchild(p)
rc = lc + 1
min_child = rc if rc < cnt and arr[rc] < arr[lc] else lc
if tmp > arr[min_child]:
arr[p] = arr[min_child]
p = min_child
else:
break
arr[p] = tmp
def _swim_up(arr, pos, cmp_obj):
'''Used for heap ajustment'''
root = lambda x: x//2-1 if x%2 == 0 else x//2
tmp = arr[pos]
p = pos
while root(p) >= 0:
min_child = root(p)
if tmp < arr[min_child]:
arr[p] = arr[min_child]
p = min_child
else:
break
arr[p] = tmp
def make_heap(arr, cmp_obj):
cnt = len(arr)
pos = cnt // 2 - 1
# From the last node which has children,
# to the first node, do perc_down
while pos >= 0:
Sort._perc_down(arr, pos, cnt, cmp_obj)
pos -= 1
def _merge(arr, beg, mid, end, cmp_obj):
i = 0
lbeg = beg
rbeg = mid
rend = end
aux = [None] * (end - beg + 1)
while lbeg < mid and rbeg < end + 1:
if arr[lbeg] < arr[rbeg]:
aux[i] = arr[lbeg]
lbeg += 1
else:
aux[i] = arr[rbeg]
rbeg += 1
i += 1
while lbeg < mid:
aux[i] = arr[lbeg]
i += 1
lbeg += 1
while rbeg < end + 1:
aux[i] = arr[rbeg]
i += 1
rbeg += 1
# copy sorted data back
i = 0
while beg <= end:
arr[beg] = aux[i]
beg += 1
i += 1
def _msort(arr, beg, end, cmp_obj):
if end - beg + 1 < Sort.__CUTOFF:
Sort._isort(arr, beg, end, cmp_obj)
else:
Sort._msort(arr, beg, (beg+end)//2, cmp_obj)
Sort._msort(arr, (beg+end)//2+1, end, cmp_obj)
Sort._merge(arr, beg, (beg+end)//2+1, end, cmp_obj)
def is_sorted(arr, cmp_obj = None):
ret = True
i = 1
while i < len(arr):
if arr[i] < arr[i-1]:
ret = False
break
i += 1
return ret
def qsort(arr, cmp_obj=None):
Sort._qsort(arr, 0, len(arr)-1, cmp_obj)
def isort(arr, cmp_obj=None):
Sort._isort(arr, 0, len(arr)-1, cmp_obj)
def shsort(arr, cmp_obj=None):
Sort._shsort(arr, 0, len(arr)-1, cmp_obj)
def msort(arr, cmp_obj=None):
Sort._msort(arr, 0, len(arr)-1, cmp_obj)
def hsort(arr, cmp_obj=None):
Sort.make_heap(arr, cmp_obj=None)
i = len(arr) - 1
while i > 0:
arr[0], arr[i] = arr[i], arr[0]
Sort._perc_down(arr, 0, i, cmp_obj)
i -= 1
if __name__ == '__main__':
import random
arr = random.sample(range(500), 10)
arr = arr * 3
print("before sort: %s" %arr)
Sort.shsort(arr)
print("after shsort: %s" %arr)
assert Sort.is_sorted(arr)
Sort.msort(arr)
print("after msort: %s" %arr)
assert Sort.is_sorted(arr)
Sort.hsort(arr)
print("after hsort: %s" %arr)
Sort.qsort(arr)
assert Sort.is_sorted(arr)
Sort.qsort(arr)
assert Sort.is_sorted(arr)
print("after qsort: %s" %arr)
|
zhencliu/learning_python
|
sort/Sort.py
|
Sort.py
|
py
| 5,879 |
python
|
en
|
code
| 0 |
github-code
|
6
|
23055488423
|
"""
Creation:
Author: Martin Grunnill
Date: 2022-11-01
Description: Getting prevelance data for world cup teams.
"""
import copy
import pandas as pd
import datetime
schedule_df = pd.read_csv('data_extraction/Fifa 2022 Group stages matches with venue capacity.csv')
covid_data = pd.read_csv('https://covid.ourworldindata.org/data/owid-covid-data.csv')
population_df = pd.read_csv('data_extraction/Population estimates world bank.csv',header=2, index_col='Country Name') # downloaded from https://data.worldbank.org/indicator/SP.POP.TOTL https://api.worldbank.org/v2/en/indicator/SP.POP.TOTL?downloadformat=csv
# need to change covid_data to datetime type
covid_data.date = pd.to_datetime(covid_data.date)
date_to = datetime.datetime(2022, 11, 18)
covid_data = covid_data[covid_data.date<=date_to]
#%%
# select data for only countries in the world cup
countries = set(schedule_df['Team A'].unique().tolist() +
schedule_df['Team B'].unique().tolist())
# looking at the data set (https://covid.ourworldindata.org/data/owid-covid-data.csv) new cases smoothed
# for England and Wales is pooled under United Kingdom
proxies = copy.deepcopy(countries)
proxies.add('United Kingdom')
proxies.remove('England')
proxies.remove('Wales')
covid_data = covid_data[covid_data.location.isin(proxies)]
# sense check to make sure we have selected the right places
selected_proxies = covid_data.location.unique()
len(proxies)==len(selected_proxies)
#%% Selecting most recent available data for new_cases_smoothed
# remove missing data
covid_data.new_cases_smoothed = covid_data.new_cases_smoothed.replace({0:None})
covid_data = covid_data[pd.notnull(covid_data.new_cases_smoothed)]
prevelance_records = []
for country in countries:
if country in ['England', 'Wales']:
proxy = 'United Kingdom'
else:
proxy = country
# select proxie
location_data = covid_data[covid_data.location==proxy]
# latest date for which we have information
latest_date = location_data.date.max()
latest_date_data = location_data[location_data.date==latest_date]
cases_smoothed = latest_date_data.new_cases_smoothed.iloc[0]
if proxy=='South Korea':
population = population_df.loc['Korea, Rep.', '2021']
elif proxy == 'Iran':
population = population_df.loc['Iran, Islamic Rep.', '2021']
else:
population = population_df.loc[proxy,'2021']
entry = {'country': country,
'proxy': proxy,
'date': latest_date,
'case_prevalence': cases_smoothed/population,
}
prevelance_records.append(entry)
prevelance_df = pd.DataFrame(prevelance_records)
#%% Adding data on infection to detection ratio
# Getting data frame
# location of zip file downloaded from https://ghdx.healthdata.org/sites/default/files/record-attached-files/HME_COVID_19_IES_2019_2021_RATIOS.zip
zip_file = 'data_extraction/HME_COVID_19_IES_2019_2021_RATIOS.zip'
# read file
detection_raio_df = pd.read_csv(zip_file)
# Selecting detections/infections
detection_raio_df.measure_name.unique()
detection_raio_df = detection_raio_df[detection_raio_df.measure_name=='Cumulative infection-detection ratio']
# change date to date
detection_raio_df['date'] = pd.to_datetime(detection_raio_df['date'])
detection_raio_df = detection_raio_df[detection_raio_df.date==detection_raio_df.date.max()]
detection_raio_df.location_name = detection_raio_df.location_name.replace({'USA':'United States','UK':'United Kingdom'})
values_list = ['value_mean','value_lower','value_upper']
# Change percent to raw number
for column in values_list:
detection_raio_df[column] = detection_raio_df[column]/100
detection_raio_df.metric_name = 'raw'
# invert values so they are now infections/detected cases.
for column in values_list:
detection_raio_df[column] = detection_raio_df[column]**-1
to_merge = detection_raio_df[detection_raio_df.location_name.isin(proxies)]
to_merge = to_merge[['location_name']+values_list]
prevelance_df = prevelance_df.merge(to_merge, left_on='proxy', right_on='location_name')
prevelance_df.rename(columns={'value_lower': 'ratio_upper',
'value_mean': 'ratio_mean',
'value_upper': 'ratio_lower'},
inplace=True)
prevelance_df['infection_prevalence_lower'] = prevelance_df.case_prevalence*prevelance_df.ratio_lower
prevelance_df['infection_prevalence_mean'] = prevelance_df.case_prevalence*prevelance_df.ratio_mean
prevelance_df['infection_prevalence_upper'] = prevelance_df.case_prevalence*prevelance_df.ratio_upper
# host min and max
host_min = prevelance_df[prevelance_df.country=='Qatar']['infection_prevalence_lower'].tolist()[0]
host_max = prevelance_df[prevelance_df.country=='Qatar']['infection_prevalence_upper'].tolist()[0]
# Everybody elses min max
visior_min = prevelance_df[prevelance_df.country!='Qatar']['infection_prevalence_lower'].min()
visior_max = prevelance_df[prevelance_df.country!='Qatar']['infection_prevalence_upper'].max()
|
LIAM-COVID-19-Forecasting/Modelling-Disease-Mitigation-at-Mass-Gatherings-A-Case-Study-of-COVID-19-at-the-2022-FIFA-World-Cup
|
Running_and_analysing_simulations/parameters/data_extraction/getting_prevelance_data.py
|
getting_prevelance_data.py
|
py
| 5,030 |
python
|
en
|
code
| 0 |
github-code
|
6
|
37009381909
|
class Solution:
def minCostClimbingStairs(self, cost) -> int:
"""
dp[i]表示登上第i个阶梯所花费的体力值。
登上楼顶所花费的体力值为0,
所以我们要登上n层阶梯的阶梯顶部,则要求dp[n+1]
"""
n = len(cost)
if n < 1:
return 0
dp = [0] * (n+1)
cost.append(0)
dp[0] = cost[0]
dp[1] = cost[1]
for i in range(2, n+1):
dp[i] = min(dp[i-1], dp[i-2]) + cost[i]
return dp[n]
s = Solution()
print(s.minCostClimbingStairs([]))
|
wangluolin/Algorithm-Everyday
|
dp/746-爬楼梯.py
|
746-爬楼梯.py
|
py
| 591 |
python
|
en
|
code
| 0 |
github-code
|
6
|
1118355565
|
'''02-05-2021 Baekjoon Algorithm
단계별 문제 풀이 - 12단계
언어 - Python'''
# 2751
# N개의 수가 주어졌을 때, 이를 오름차순으로 정렬하는 프로그램을 작성하시오.
'''
N = int(input())
num = []
for _ in range(0, N):
num.append(int(input()))
num.sort()
for i in range(0, N):
print(num[i])
#################################################################################
## 시간초과 -> PyPy로 아래 코드 제출시 해결 / 아래 코드와 무엇이 다른지 잘 모르겠음##
#################################################################################
N = int(input())
nums = []
for i in range(N):
nums.append(int(input()))
nums = sorted(nums)
for i in range(N):
print(nums[i])
'''
# 10989
# N개의 수가 주어졌을 때, 이를 오름차순으로 정렬하는 프로그램을 작성하시오.
N = int(input())
num = []
for _ in range(0, N):
num.append(int(input()))
num = sorted(num)
for i in range(0, N):
print(num[i])
|
shiningnight93/Baekjoon_Algorithm
|
02-05-2021.py
|
02-05-2021.py
|
py
| 1,068 |
python
|
ko
|
code
| 0 |
github-code
|
6
|
38315691636
|
#khai báo hàm long_words với 2 tham số là n và str
def long_words(n, str):
#khai báo list rỗng
word_len=[]
#gán list mới bằng cách tách các từ trong str( mỗi chữ cách nhau bởi dấu cách sẽ biến thành một phần tử của list)
txt=str.split(" ")
for x in txt:
#nếu số ký tự của chuỗi x trong list txt lớn hơn n thì thêm vào cuối list word_len chuỗi x
if len(x)>n:
word_len.append(x)
# trả về gía trị của list word_len
return word_len
# hàm long_words được truyền tham số n = 3 và str = "The quick brown fox jumps over the lazy dog" và in
print(long_words(3, "The quick brown fox jumps over the lazy dog"))
|
lananh104/chepcode_ham.split-
|
chepcode.py
|
chepcode.py
|
py
| 743 |
python
|
vi
|
code
| 0 |
github-code
|
6
|
23125697422
|
import os
import sys
sys.path.append("..")
import taobaoTry.taobaoTryUtils
from task.logUtils import logUtils
class taobaoTryTask:
def enum(**enums):
return type('Enum', (), enums)
taskType = enum(JingXuan=1, All=2)
mTaskTypeFor = taskType.All
taobaoTryTaskLockFile = ".." + os.path.sep + "lockFile" + os.path.sep + "taobaoTry.lock"
def __init__(self,taskTypeFor=taskType.All):
global mTaskTypeFor
taobaoTryTask.mTaskTypeFor = taskTypeFor
if(taskTypeFor==taobaoTryTask.taskType.JingXuan):
taobaoTryTask.taobaoTryTaskLockFile = ".." + os.path.sep + "lockFile" + os.path.sep + "taobaoJingXuanTry.lock"
else:
taobaoTryTask.taobaoTryTaskLockFile = ".." + os.path.sep + "lockFile" + os.path.sep + "taobaoAllTry.lock"
if (os.path.exists(taobaoTryTask.taobaoTryTaskLockFile)):
if (taskTypeFor == taobaoTryTask.taskType.JingXuan):
logUtils.info("精选类型文件已存在,即将退出")
else:
logUtils.info("所有类型文件已存在,即将退出")
os._exit(0)
else:
# os.mknod('.lock')
if (taskTypeFor == taobaoTryTask.taskType.JingXuan):
logUtils.info("精选类型创建文件")
else:
logUtils.info("所有类型创建文件")
open(taobaoTryTask.taobaoTryTaskLockFile, "w")
if(taskTypeFor==taobaoTryTask.taskType.JingXuan):
self.actionJingXuanTask()
else:
self.actionAllTask()
def __del__(self):
if (os.path.exists(taobaoTryTask.taobaoTryTaskLockFile)):
os.remove(taobaoTryTask.taobaoTryTaskLockFile)
if (taobaoTryTask.mTaskTypeFor == taobaoTryTask.taskType.JingXuan):
logUtils.info("精选类型退出程序")
else:
logUtils.info("所有类型退出程序")
def actionAllTask(self):
logUtils.info("actionAllTask所有")
taobaoTry.taobaoTryUtils.taobaoTryUtils().handlePcTryList(None, 0, 1) # 第一个参数传入负数是精选,传0是所有的都采集
def actionJingXuanTask(self):
logUtils.info("actionAllTask精选")
taobaoTry.taobaoTryUtils.taobaoTryUtils().handlePcTryList(None, -1, 1) # 第一个参数传入负数是精选,传0是所有的都采集
# taobaoTryTask(taobaoTryTask.taskType.JingXuan).actionJingXuanTask()
taobaoTryTask(taobaoTryTask.taskType.JingXuan)
|
tudousiji/pachong
|
taobaoTry/taobaoTryTask.py
|
taobaoTryTask.py
|
py
| 2,503 |
python
|
en
|
code
| 3 |
github-code
|
6
|
19521127011
|
from panda3d.core import CollisionNode, CollisionTube, CollisionBox, AmbientLight, Vec4, DirectionalLight
from FreedomCampaignGame.comm_with_server import ClientLogObject
client_logger = ClientLogObject().client_logger
class GameMap():
def __init__(self, render, load_model_fun):
self.render = render
# 加载环境模型。将加载models文件夹中的environment.egg文件,返回该模型的指针
self.load_model = load_model_fun
self.scene = self.load_model("Models/Environment/environment")
# 重新绘制要 渲染的模型。
self.scene.reparent_to(self.render)
client_logger.info("开始生成地图障碍物...")
self.set_map_solid()
client_logger.info("加载环境光、定向灯光..")
self.load_light_all()
def load_light_all(self):
# 添加环境光, AmbientLight对象是一个节点
self.ambient_light = AmbientLight("ambient light")
self.ambient_light.set_color(Vec4(0.2, 0.2, 0.2, 1))
self.ambient_light_node_path = self.render.attach_new_node(self.ambient_light)
# 环境灯光默认自动影响指示节点下方的所有节点,希望灯光影响所有节点场景,则需要在render(渲染)上指定灯光
self.render.set_light(self.ambient_light_node_path)
# 添加定向光,可指定方向
self.directional_light = DirectionalLight("directional light")
self.directional_light_node_path = self.render.attach_new_node(self.directional_light)
# 一个正面对着你的人,H相当于它左转,P相当于它向左倒也就是顺时针转,R是往前向你这边扑倒旋转,光照射出的位置变动
self.directional_light_node_path.setHpr(45, -45, 0)
self.render.set_light(self.directional_light_node_path)
# 应用着色器生成器,希望它影响的NodePath上调用“ setShaderAuto”即可。这里是将着色器生成器应用于“渲染”
self.render.setShaderAuto()
def set_box_solid(self, size=(0, 1, 1, 1), show=True):
# size, 第一个0暂时不知道干嘛,官网文档也没说明,后面3分别是长宽高
box_solid = CollisionBox(size[0], size[1], size[2], size[3])
box_node = CollisionNode("box")
box_node.add_solid(box_solid)
box = self.render.attach_new_node(box_node)
if show:
box.show()
return box
def set_tube_solid(self, size=(0, 0, 0, 0, 0, 0, 0.4), show=True):
# 管由其起点、终点和半径定义。这里定义的一个管子物体从(-8,0,0)到为(8,0,0),半径为0.4的圆形管道
set_tube_solid = CollisionTube(size[0], size[1], size[2], size[3], size[4], size[5], size[6])
wall_node = CollisionNode("wall")
wall_node.add_solid(set_tube_solid)
wall = self.render.attach_new_node(wall_node)
if show:
wall.show()
return wall
def set_map_solid(self):
# 放一个大的管子
# wall = self.set_tube_solid(size=(-2.0, 0, 0, 2.0, 0, 0, 0.2))
# wall.setY(-3)
# 用box设置楼梯
box = self.set_box_solid(size=(0, 1, 1.5, 0.2))
box.setX(-2)
box.setZ(0.2)
box = self.set_box_solid(size=(0, 1, 1.5, 0.4))
box.setX(-3)
box.setZ(0.4)
box = self.set_box_solid(size=(0, 1, 1.5, 0.6))
box.setX(-4)
box.setZ(0.6)
# 用box设置墙,这里是弄门这里的两面墙
box = self.set_box_solid(size=(0, 3.65, 0.1, 1.5))
box.setY(8.1)
box.setX(-4.3)
box.setZ(1.5)
box = self.set_box_solid(size=(0, 3.65, 0.1, 1.5))
box.setY(8.1)
box.setX(4.3)
box.setZ(1.5)
# 弄门,门栏
box = self.set_box_solid(size=(0, 0.65, 0.1, 0.25))
box.setY(8.2)
box.setZ(0.25)
# 门顶部
box = self.set_box_solid(size=(0, 0.65, 0.1, 0.2))
box.setY(8.1)
box.setZ(2.04)
box = self.set_box_solid(size=(0, 8, 0.1, 1.5))
box.setY(-8.1)
box.setZ(1.5)
box = self.set_box_solid(size=(0, 0.1, 8, 1.5))
box.setX(8.1)
box.setZ(1.5)
box = self.set_box_solid(size=(0, 0.1, 8, 1.5))
box.setX(-8.1)
box.setZ(1.5)
|
optimjiang/my_3d_game
|
game_map.py
|
game_map.py
|
py
| 4,327 |
python
|
zh
|
code
| 0 |
github-code
|
6
|
29432219565
|
'''
투포인터로 접근했다가 실패함
'''
class Solution:
def maxSubArray(self, nums: List[int]) -> int:
sum_num = 0
answer = -999999999999999
for i in range(len(nums)):
sum_num += nums[i]
answer = max(answer, sum_num)
if sum_num < 0:
sum_num = 0
return answer
|
mintaewon/coding_leetcode
|
0921/P21_taewon.py
|
P21_taewon.py
|
py
| 379 |
python
|
en
|
code
| 0 |
github-code
|
6
|
41708064158
|
import glob
import math
import os
import sys
import random
import numpy as np
import pandas as pd
import tensorflow as tf
from tqdm import tqdm
from model.siamese.config import cfg
tqdm.pandas()
"""
Files have to be stored in a structure:
main_folder/
1/
0030.jpg
1080.jpg
...
2/
2400.jpg
...
14/
8800.jpg
...
This structure is going to extract images for 3 classes [1,2,14].
"""
AUTOTUNE = tf.data.experimental.AUTOTUNE
class DataGenerator(tf.keras.utils.Sequence):
def __init__(
self,
folder_path=cfg.TRAIN.DATA_PATH,
file_ext="jpg",
debug=False,
training=True,
exclude_aug=False,
step_size=1
):
"""
Args:
folder_path: string ## Path to folder with video frames
file_ext: string | List[str] (optional) looking for files with this extension
debug: boolean (optional) should generator display any warnings?
"""
self.images = None
self.debug = debug
self.data_path = folder_path
self.batch_size = cfg.TRAIN.BATCH_SIZE
self.shuffle = True
self.training = training
self.step_size = step_size
if not os.path.isdir(folder_path):
print(
"Images folder path {} does not exist. Exiting...".format(folder_path)
)
sys.exit()
images = []
for class_dir in os.scandir(folder_path):
if type(file_ext) is str:
file_ext = [file_ext]
files = []
for ext in file_ext:
pattern = '*'
if exclude_aug:
pattern = '*_*'
files.extend(glob.glob(f"{class_dir.path}/{pattern}.{ext}"))
for i, file in enumerate(sorted(files)):
images.append((file, class_dir.name))
self.org_images = images[::self.step_size]
batched = self.batch_images()
self.images = pd.DataFrame(batched, columns=["path", "label"])
print(
f'Found {len(self.images)} files for {len(self.images["label"].unique())} unique classes'
)
def __len__(self):
return math.ceil(len(self.images) / cfg.TRAIN.BATCH_SIZE)
def add_dataset(self, dataset):
"""
Args:
dataset: List[path, label]
Returns:
"""
self.org_images = self.org_images + dataset
batched = self.batch_images()
self.images = pd.DataFrame(batched, columns=["path", "label"])
print(
f'Found {len(self.images)} files for {len(self.images["label"].unique())} unique classes'
)
def batch_images(self):
images = self.org_images.copy()
random.shuffle(images)
images = pd.DataFrame(images, columns=["path", "label"])
low_class_count = min(images["label"].value_counts())
unique_classes = images["label"].unique()
class_dfs = {}
for class_id in unique_classes:
class_dfs[str(class_id)] = (
images[images["label"] == class_id]
.sample(frac=1)
.reset_index(drop=True)
)
batched = []
for i in range(0, low_class_count - 1, 2):
for class_id in unique_classes:
rows = class_dfs[str(class_id)].loc[[i, i + 1], :]
batched.append(rows.to_numpy())
batched = np.array(batched)
batched = batched.reshape(
(batched.shape[0] * batched.shape[1], batched.shape[2])
)
return batched
@staticmethod
def process_image(image_path, to_input=False):
"""
Args:
image_path: string
to_input: boolean - should image be wrapped into input shape (1, 224, 224, 3)
Returns:
((cfg.NN.INPUT_SIZE, cfg.NN.INPUT_SIZE, 3), class)
"""
image = tf.keras.preprocessing.image.load_img(
image_path, target_size=(cfg.NN.INPUT_SIZE, cfg.NN.INPUT_SIZE)
)
image = tf.keras.preprocessing.image.img_to_array(image)
image = np.expand_dims(image, axis=0)
image - tf.keras.applications.mobilenet_v2.preprocess_input(image)
if to_input:
return image
return image[0]
@staticmethod
def process_label(label):
"""
Args:
label: string
Returns:
int
"""
return int(label)
def get_dataset(self):
"""
Returns:
tf.Dataset
"""
target = (
self.images.pop("label")
.progress_map(DataGenerator.process_label)
.to_numpy()
)
images = (
self.images.pop("path").progress_map(DataGenerator.process_image).to_numpy()
)
reshaped_images = np.concatenate(images).reshape(
(
images.shape[0],
images[1].shape[0],
images[1].shape[1],
images[1].shape[2],
)
)
ds = tf.data.Dataset.from_tensor_slices((reshaped_images, target))
ds = ds.cache()
ds = ds.batch(cfg.TRAIN.BATCH_SIZE)
ds = ds.prefetch(buffer_size=cfg.TRAIN.BATCH_SIZE)
return ds
def on_epoch_end(self):
if self.training:
batched = self.batch_images()
self.images = pd.DataFrame(batched, columns=["path", "label"])
def __getitem__(self, item):
images = self.images.loc[
item * cfg.TRAIN.BATCH_SIZE : (item + 1) * cfg.TRAIN.BATCH_SIZE
]
target = images.pop("label").map(DataGenerator.process_label).to_numpy()
images = images.pop("path").map(DataGenerator.process_image).to_numpy()
reshaped_images = np.concatenate(images).reshape(
(
images.shape[0],
images[1].shape[0],
images[1].shape[1],
images[1].shape[2],
)
)
return reshaped_images, target
|
burnpiro/farm-animal-tracking
|
data/data_generator.py
|
data_generator.py
|
py
| 6,118 |
python
|
en
|
code
| 24 |
github-code
|
6
|
29522073566
|
import os
import random
import sys
import yaml
import numpy as np
with open("config.yml", 'r') as ymlfile:
cfg = yaml.load(ymlfile, Loader=yaml.FullLoader)
ymlfile.close()
if not cfg['use_gpu']:
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
seed = cfg['seed']
os.environ['PYTHONHASHSEED'] = str(seed)
random.seed(seed)
np.random.seed(seed)
import tensorflow as tf
from tensorflow.keras.datasets.fashion_mnist import load_data
from tensorflow.keras.utils import to_categorical
tf.compat.v1.set_random_seed(seed)
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
tf.compat.v1.disable_eager_execution() # To solve the speed problem of TF2
# Deprecated in tf2
config = tf.compat.v1.ConfigProto()
config.gpu_options.allow_growth = True
config.intra_op_parallelism_threads = 4
config.inter_op_parallelism_threads = 4
tf.compat.v1.keras.backend.set_session(tf.compat.v1.Session(graph=tf.compat.v1.get_default_graph(), config=config))
from utils.deepnetwork import DeepNetwork
from utils.tracker import Tracker
def fashion_mnist(params):
tracker = Tracker(seed, 'fashion_mnist.h5')
# Load dataset
(x_train, y_train), (x_test, y_test) = load_data()
# Preprocessing
# Reshape data as dataset is grayscaled
x_train = x_train.reshape(x_train.shape[0], x_train.shape[1], x_train.shape[2], 1)
x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2], 1)
# Convert labels into categorial
n_classes = params['n_classes']
y_train = to_categorical(y_train, n_classes)
y_test = to_categorical(y_test, n_classes)
# Normalize images values
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
# Create model
model = DeepNetwork.build((28, 28, 1), params)
# Train model
model.fit(x_train, y_train,
batch_size=params['batch_size'],
epochs=params['n_epochs'],
validation_data=(x_test, y_test),
shuffle=True)
# Evaluate performance
scores = model.evaluate(x_test, y_test, verbose=1)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])
# Save model
tracker.save_model(model)
if __name__ == "__main__":
fashion_mnist(cfg['train'])
|
emarche/Fashion-MNIST
|
main.py
|
main.py
|
py
| 2,261 |
python
|
en
|
code
| 0 |
github-code
|
6
|
44717248733
|
# Quick and dirty utility to get coordinates for transforming view into
# a bird's eye view. Useful in OCRs were the camera is in a fixed positioning
# viewing a straight plane.
import cv2
import numpy as np
def onTrackbarChange(trackbarValue):
pass
def order_points(pts):
# initialize a list of coordinates that will be ordered
# such that the first entry in the list is the top-left,
# the second entry is the top-right, the third is the
# bottom-right, and the fourth is the bottom-left
rect = np.zeros((4, 2), dtype = "float32")
# the top-left point will have the smallest sum, whereas
# the bottom-right point will have the largest sum
s = pts.sum(axis = 1)
rect[0] = pts[np.argmin(s)]
rect[2] = pts[np.argmax(s)]
# now, compute the difference between the points, the
# top-right point will have the smallest difference,
# whereas the bottom-left will have the largest difference
diff = np.diff(pts, axis = 1)
rect[1] = pts[np.argmin(diff)]
rect[3] = pts[np.argmax(diff)]
# return the ordered coordinates
return rect
def expandPerspective(rect , width, height):
'''Expand the perspective out to the image limits
by finding intersection using point-slope form'''
# Constants
x = 0
y = 1
# Convert coordinate system
rect[:,1] *= -1
(tl, tr, br, bl) = rect
# Find the slope of each of the 4 lines
slopeTop = (tr[y]-tl[y]) / (tr[x]-tl[x])
slopeBottom = (br[y]-bl[y]) / (br[x]-bl[x])
slopeLeft = (tl[y]-bl[y]) / (tl[x]-bl[x])
slopeRight = (tr[y]-br[y]) / (tr[x]-br[x])
# Assign new points based on image size
pointRight = width,0
pointTop = 0,0
pointBottom = width, height * -1.0
pointLeft = 0, height* -1.0
# Find where the new expanded lines intersect using point slope form
def intersectoin (m1,m2,x1,x2,y1,y2,orig):
x = ((m2*x2-m1*x1)-(y2-y1))/(m2-m1)
#y = ((-1.0*m1*y2 + m1*m2*x2 + y1*m2 )-(m1*m2*x1))/(m2-m1)
y = m1*(x - x1) + y1
try:
x = round(x)
y = round(y)
except:
return orig
return x, y
new_tr = intersectoin (slopeTop,slopeRight,pointTop[x],pointRight[x],pointTop[y],pointRight[y],tr)
new_tl = intersectoin (slopeTop,slopeLeft,pointTop[x],pointLeft[x],pointTop[y],pointLeft[y],tl)
new_br = intersectoin (slopeBottom,slopeRight,pointBottom[x],pointRight[x],pointBottom[y],pointRight[y],br)
new_bl = intersectoin (slopeBottom,slopeLeft,pointBottom[x],pointLeft[x],pointBottom[y],pointLeft[y],bl)
# Convert coordinate system back
new_rect = rect = np.array([new_tl, new_tr, new_br, new_bl], dtype = "float32")
new_rect[:,1] *= -1
return new_rect
# Derived from https://www.pyimagesearch.com/2014/08/25/4-point-opencv-getperspective-transform-example
def four_point_transform(image, pts):
# Unpack points
rect = pts
(tl, tr, br, bl) = rect
# compute the width of the new image, which will be the
# maximum distance between bottom-right and bottom-left
# x-coordinates or the top-right and top-left x-coordinates
widthA = np.sqrt(((br[0] - bl[0]) ** 2) + ((br[1] - bl[1]) ** 2))
widthB = np.sqrt(((tr[0] - tl[0]) ** 2) + ((tr[1] - tl[1]) ** 2))
maxWidth = max(int(widthA), int(widthB))
# compute the height of the new image, which will be the
# maximum distance between the top-right and bottom-right
# y-coordinates or the top-left and bottom-left y-coordinates
heightA = np.sqrt(((tr[0] - br[0]) ** 2) + ((tr[1] - br[1]) ** 2))
heightB = np.sqrt(((tl[0] - bl[0]) ** 2) + ((tl[1] - bl[1]) ** 2))
maxHeight = max(int(heightA), int(heightB))
# now that we have the dimensions of the new image, construct
# the set of destination points to obtain a "birds eye view",
# (i.e. top-down view) of the image, again specifying points
# in the top-left, top-right, bottom-right, and bottom-left
# order
dst = np.array([
[0, 0], #tl
[maxWidth - 1, 0], #tr
[maxWidth - 1, maxHeight - 1], #br
[0, maxHeight - 1]], #bl
dtype = "float32")
# Move image to positive coordinates
min_x = round(abs(np.min(rect[:,0])))
min_y = round(abs(np.min(rect[:,1])))
T = np.matrix( [[ 1 , 0 , min_x], # Get min x
[ 0 , 1 , min_y ], # Get min y
[ 0 , 0 , 1 ]],
dtype = "float32")
# compute the perspective transform matrix and then apply it
M = cv2.getPerspectiveTransform(rect, dst)
warped = cv2.warpPerspective(image, T * M , (maxWidth + min_x , maxHeight + min_y), borderMode=cv2.BORDER_TRANSPARENT)
# return the warped image
return warped
# Open Image
img = cv2.imread('img\\example1.jpeg')
# Open windows for control, original image, and result
cv2.namedWindow('Control', cv2.WINDOW_AUTOSIZE)
cv2.namedWindow('Main', cv2.WINDOW_AUTOSIZE)
cv2.namedWindow('Birds Eye', cv2.WINDOW_AUTOSIZE)
# Track bars for coordinates
cv2.createTrackbar( 'X L Bot', 'Control', 0, img.shape[1], onTrackbarChange )
cv2.createTrackbar( 'Y L Bot', 'Control', img.shape[0], img.shape[0], onTrackbarChange )
cv2.createTrackbar( 'X L Top', 'Control', 0, img.shape[1], onTrackbarChange )
cv2.createTrackbar( 'Y L Top', 'Control', 0, img.shape[0], onTrackbarChange )
cv2.createTrackbar( 'X R Top', 'Control', img.shape[1], img.shape[1], onTrackbarChange )
cv2.createTrackbar( 'Y R Top', 'Control', 0, img.shape[0], onTrackbarChange )
cv2.createTrackbar( 'X R Bot', 'Control', img.shape[1], img.shape[1], onTrackbarChange )
cv2.createTrackbar( 'Y R Bot', 'Control', img.shape[0], img.shape[0], onTrackbarChange )
# Loop
while(1):
# Get Track Bar positions
pts = np.array(eval('[(' + str(cv2.getTrackbarPos('X L Bot','Control')) + ',' + str(cv2.getTrackbarPos('Y L Bot','Control')) + '),' +
'(' + str(cv2.getTrackbarPos('X L Top','Control')) + ',' + str(cv2.getTrackbarPos('Y L Top','Control'))+ '),' +
'(' + str(cv2.getTrackbarPos('X R Top','Control')) + ',' + str(cv2.getTrackbarPos('Y R Top','Control'))+ '),' +
'(' + str(cv2.getTrackbarPos('X R Bot','Control')) + ',' + str(cv2.getTrackbarPos('Y R Bot','Control'))+ ')]'
), dtype = "int32")
# Draw the perspective
imgConnectedPoints = cv2.polylines(img.copy(), [pts], isClosed = True, color = (0,255,0), thickness = 3)
cv2.imshow('Main',imgConnectedPoints)
# Draw the transformed bird's eye view
warped = four_point_transform(img, expandPerspective(order_points(pts), img.shape[1], img.shape[0]))
cv2.imshow('Birds Eye',warped)
# Exit
if cv2.waitKey(1)==27:
exit(0)
cv.detroyAllWindows()
|
hellkrusher/BirdsEyePerspectiveTransformationUtility
|
BirdsEyePerspectiveTransformationUtility.py
|
BirdsEyePerspectiveTransformationUtility.py
|
py
| 6,413 |
python
|
en
|
code
| 5 |
github-code
|
6
|
74142812668
|
import sys
from execute_query import *
if __name__ == "__main__":
q_values = [1]
if len(sys.argv) != 4:
print("3 arguments expected : input_file_rep input_file_model output_file")
else:
db_rep = '../generated_dbs/' + sys.argv[1]
db_model = sys.argv[2]
output_file = sys.argv[3]
result = ''
result += 'FO : ' + db_rep + '/X' + db_model + '\n'
for i in q_values:
n = str(i+1)
db_file = db_rep + '/' + n + db_model
fo_script = 'q' + n + 'fo.lp'
# result += str(run_fo_query(db_file, fo_script)[0]) + ', '
print("DONE FO : ", i)
result += '\n'
result += 'GT : ' + db_rep + '/X' + db_model + '\n'
for i in q_values:
n = str(i+1)
db_file = db_rep + '/' + n + db_model
module_name = 'gtq' + n
module = __import__(module_name)
result += str(execute_gt_query(db_file, module.get_script, module.query, module.tmp_file)[0]) + ', '
print("DONE GT : ", i)
result += '\n'
result += '--------------------------------------- \n'
f = open(output_file, 'a')
f.write(result)
f.close()
|
YacineSahli/KRR
|
queries/run_queries.py
|
run_queries.py
|
py
| 1,295 |
python
|
en
|
code
| 0 |
github-code
|
6
|
73832000827
|
import numpy as np
import pandas as pd
import scipy.ndimage as nd
from skimage import io as skio
import sys
import getopt
def usage():
print("""
Usage : python3 gen_stacked_tif.py < -i mask.lst>
< -a anno.txt>
< -o output prefix>
[ -b binsize default 20]
""")
def main(argv):
########################
# no args equal to -h
if len(argv) == 0 :
usage()
sys.exit(0)
########################
# default values
mask_lst = ''
annof = ''
binsize = 20
prefix = ''
########################
# parse args
try:
opts, args = getopt.getopt(argv,"hi:o:a:b:",["help"])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(0)
elif opt in ("-i" ):
mask_lst = arg
elif opt in ("-o" ):
prefix = arg
elif opt in ("-a" ):
annof = arg
elif opt in ("-b" ):
binsize = int(arg)
infos = pd.read_csv(mask_lst,sep=' ',header=None)
infos.columns = ['filename','zvalue']
infos['zvalue'] = infos['zvalue'].astype(int)
infos['zvalue'] = infos['zvalue'] - 1 # start from 0
slices = {}
annos = pd.read_csv(annof,sep=',',header=0)
for i , row in infos.iterrows():
cellmask = np.loadtxt(row['filename'],dtype=int)
y, x = np.nonzero(cellmask)
tmp_draw = pd.DataFrame()
tmp_draw['x'] = x
tmp_draw['y'] = y
tmp_draw['cell'] = cellmask[y,x]
cellmask[y,x] = 0
tmp_draw= tmp_draw.set_index('cell')
slice_anno = annos[ annos['slice_id'] == int(row['zvalue']+1) ].copy()
slice_anno = slice_anno.set_index('cell_id')
tmp_draw['anno'] = slice_anno['anno_id']
tmp_draw = tmp_draw[tmp_draw['anno']!='NA'].copy()
cellmask[tmp_draw['y'],tmp_draw['x']] = 100
#pharynx = tmp_draw[tmp_draw['anno']=='c21']
#gut = tmp_draw[tmp_draw['anno']=='c1']
#neural = tmp_draw[tmp_draw['anno']=='c33']
#cellmask[pharynx['y'],pharynx['x']] = 150
#cellmask[gut['y'],gut['x']] = 200
#cellmask[neural['y'],neural['x']] = 250
h,w = cellmask.shape
affine = np.matrix(np.array([[1.0/binsize,0,0],[0,1.0/binsize,0],[0,0,1]]))
binimage = nd.affine_transform(cellmask.T,affine.I,output_shape=(int(w/binsize),int(h/binsize)),order=0)
slices[row['zvalue']] = binimage.T
H = int(h/binsize)
W = int(w/binsize)
zmax = infos['zvalue'].max()
image_buff = np.zeros((zmax+1,H,W),dtype='uint8')
for x in slices:
image_buff[x,:,:] = slices[x]
skio.imsave(f'{prefix}.tif',image_buff)
if __name__ == "__main__":
main(sys.argv[1:])
|
BGI-Qingdao/4D-BioReconX
|
Preprocess/meshgen/gen_stacked_tif.py
|
gen_stacked_tif.py
|
py
| 2,922 |
python
|
en
|
code
| 4 |
github-code
|
6
|
11119557677
|
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the timeInWords function below.
def timeInWords(h, m):
time = ''
words = ['','one','two','three','four','five','six','seven','eight','nine','ten','eleven','twelve', 'thirteen','fourteen','fifteen','sixteen','seventeen','eighteen','nineteen','twenty']
if m == 1:
time = words[m] + ' minute past ' + words[h]
elif m == 0:
time = words[h] + " o' clock"
elif m == 15:
time = "quarter past " + words[h]
elif m == 30:
time = "half past " + words[h]
elif m == 45:
time = "quarter to " + words[h + 1]
elif m < 20:
time = words[m] + ' minutes past ' + words[h]
elif m < 30:
time = words[-1] + ' ' + words[int(m % 10)] + ' minutes past ' + words[h]
elif m > 45:
time = words[60 - m] + ' minutes to ' + words[h + 1]
else:
time = words[-1] + ' ' + words[int(m%10)] + ' minutes to ' + words[h + 1]
return time
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
h = int(input())
m = int(input())
result = timeInWords(h, m)
fptr.write(result + '\n')
fptr.close()
|
lamanhasanli/challengers-club-adventure
|
MS-WarmUp/The_Time_in_Words.py
|
The_Time_in_Words.py
|
py
| 1,213 |
python
|
en
|
code
| 0 |
github-code
|
6
|
14837094680
|
import frappe
import json
#从前台传入items,客户料号→料号
@frappe.whitelist()
def so_refcode_to_itemcode():
#提取js传入参数
ao_items = json.loads(frappe.form_dict.get("items"))
customer_name = frappe.form_dict.get("customer")
#获取js传入的全部客户料号(非重复)
s_ref_code = {r.get("customer_item_code") for r in ao_items}
#从xx表获取所有客户料号对应的【料号】
item_code_map = dict(
frappe.get_all("Item Customer Detail",
filters = {"customer_name":customer_name,
"ref_code":("in", s_ref_code)
},
fields = ["ref_code", "parent"],
as_list = 1
))
# frappe.msgprint(customer_name+"--"+str(s_ref_code)+"--"+str(items))
#返回行id:料号键值对(字典)
result = {r.get("name"):item_code_map.get(r.get("customer_item_code")) for r in ao_items}
frappe.response["message"] = result
|
cwlong1987/yhen
|
yhen/api/sales_order.py
|
sales_order.py
|
py
| 866 |
python
|
en
|
code
| 0 |
github-code
|
6
|
42909743578
|
# Jin Yang 260724904
import dicts_utils as utilsd
import board_utils as utilsb
import random
# takes as input a dictionary representing the rack of a player; Displays one line containing
# the letter that're on the rack using upper case.
def display_rack(r):
""" (dict) -> NoneType
Displays one line containing the letter(s) that are in the dictionary using upper case.
>>> display_rack({'a':2, 'f':1, 'g':2, 'o':1, 'z':1})
A A F G G O Z
>>> display_rack({'h': 1, 'e': 1, 'l': 2, 'o': 1})
H E L L O
>>> display_rack({'h': 1})
H
"""
r_list = utilsd.flatten_dict(r)
r_str = ''
for w in r_list:
r_str += w + ' '
print(r_str.upper())
# takes as input a dictionary representing the rack of a player, and a string; Returns True if all
# characters in the input string are available on the rack, and if so, removes those letters from
# the rack; Otherwise, return False and doesn't modify the rack.
def has_letters(rack, word):
""" (dict, str) -> bool
Returns true if all characters in the input string are available in the dictionary rack, and if
so, removes those letters from the rack. Otherwise, returns false and doesn't modify the rack.
>>> r = {'a':2, 'c':1, 't':1, 'i':2, 'r':1}
>>> has_letters(r, 'cat')
True
>>> r == {'a':1, 'i':2, 'r':1}
True
>>> r == {'a':2, 'f':1, 'g':2, 'o':1, 'z':1}
>>> has_letters(r, 'goof'
False
>>> r == {'a':2, 'f':1, 'g':2, 'o':1, 'z':1}
True
>>> has_letters(r, 'fog')
True
>>> r == {'a': 2, 'g': 1, 'z': 1}
True
"""
d_occur = utilsd.count_occurrences(word)
if utilsd.subtract_dicts(rack, d_occur) == True:
return True
else:
return False
def get_num_rack(rack):
""" (dict) -> int
Returns the number of characters in the dictionary (which maps single str characters to an int
representing its count)
>>> b = {'a':1, 'e':2, 'h':1, 'l':2, 'n':1, 'p':2, 's':3, 't':2, 'z':1}
>>> get_num_rack(b)
15
>>> b = {'a':2, 'f':1, 'g':2, 'o':1, 'z':1}
>>> get_num_rack(b)
7
>>> b = {'a': 2, 'g': 1, 'z': 1}
>>> get_num_rack(b)
4
"""
num = 0
for key in rack:
num += rack[key]
return num
# takes as input 2 dictionaries (one representing player's rack, the other the pool of letters,
# respectively), and a positive integer n; function draws letters at random from the pool and adds them
# to the rack until there are either n letters on the rack or no more letters in the pool; Doesn't return
# anything, May modify both input dictionaries
def refill_rack(rack, pool, n):
"""(dict, dict, int) -> None
>>> random.seed(5)
>>> r1 = {'a':2, 'k':1}
>>> b = {'a':1, 'e':2, 'h':1, 'l':2, 'n':1, 'p':2, 's':3, 't':2, 'z':1}
>>> refill_rack(r1, b, 7)
>>> r1
{'a': 2, 'k': 1, 's': 1, 'l': 1, 't': 1, 'n': 1}
>>> refill_rack(r1, b, 16)
>>> r1
{'a': 3, 'k': 1, 's': 3, 'l': 2, 't': 2, 'n': 1, 'z': 1, 'e': 2, 'h': 1}
>>> r1 = {'a': 2, 'g': 1, 'z': 1}
>>> b = {'a':2, 'f':1, 'g':2, 'o':1, 'z':1}
>>> refill_rack(r1, b, 4)
{'a': 2, 'g': 1, 'z': 1}
"""
# get list of characters in pool
#p_list = utilsd.flatten_dict(pool)
# choose random character from pool
#rand_char = random.choice(p_list)
# modify p_list to not pick rand_char again
#p_list.remove(rand_char)
# get total number of rand_char that's in the rack
#rand_char_in_rack = utilsd.get_word_score(rand_char, rack)
# get total number of letters in rack and pool
num_pool = get_num_rack(pool)
num_rack = get_num_rack(rack)
count = n - num_rack
while count > 0 and num_pool != 0:
p_list = utilsd.flatten_dict(pool)
rand_char = random.choice(p_list)
p_list.remove(rand_char)
rand_char_in_rack = utilsd.get_word_score(rand_char, rack)
if rand_char_in_rack == 0:
rack[rand_char] = 1
else:
rack[rand_char] += 1
pool[rand_char] -= 1
if pool[rand_char] == 0:
pool.pop(rand_char)
count -= 1
num_pool -= 1
num_rack += 1
# takes as input a list of strings, a dictionary mapping letters to integers representing
# the number of points each letter's worth, and a dictionary representing valid words
# (same format as the one returned by dicts_utils.create_scrabble_dict);
# Returns the score obtained by summing together the score of each word from the input list
# if any of the words in the list isn't valid, TOTAL score should be 0
def compute_score(list_words, dict_points, dict_valid):
"""(list, dict, dict) -> int
Returns the score obtained by summing together the score of each word from the input list (score
is obtained from the values of the dictionary dict_points). If any of the words in the list
isn't valid (decided by dict_valid), total score should be 0.
>>> v = {'a':1, 'p':3, 'h':2}
>>> w = ['aa', 'qi', 'za', 'cat', 'can', 'cow', 'dog', 'dad', 'hippo', 'umami', 'uncle']
>>> d = utilsd.create_scrabble_dict(w)
>>> compute_score(['hippo', 'aa'], v, d)
10
>>> v = {'c':3, 'f':1, 'j':6, 'd':2}
>>> w = ['aa', 'qi', 'za', 'cat', 'can', 'cow', 'dog', 'dad', 'hippo', 'umami', 'uncle']
>>> d = utilsd.create_scrabble_dict(w)
>>> compute_score(['cow', 'hello'])
0
>>> compute_score(['dog', 'uncle', 'can'])
8
>>> compute_score(['here', 'now', 'rat'])
0
"""
is_valid = utilsd.is_valid_word_list(list_words, dict_valid)
score = 0
if is_valid == True:
for word in list_words:
for char in word:
if char in dict_points:
score += dict_points[char]
else:
return 0
return score
# takes as input a two-dimensional list representing the board, a string representing
# the letters the player wants to add to the board, 2 integers representing the row and col
# number (respectively) of the starting square, and a string indicating the direction to
# take when placing the letters on the board (either 'down' or 'right');
# 1) adds the letters received as input to the board given a starting position, and a direction;
# 2) returns a list of words created by adding those letters to the board;
# the list will contain the main word as well as any hook word generated; order of the elements
# doesn't matter; Note: input list should be modified, unless the direction provided is not equal
# to neither 'down' nor 'right';
# in such case, returns an empty list; ASSUME the starting square is empty and the provided letters
# will fit the board
def place_tiles(board, letters, row, col, direction):
""" (list of list, str, int, int, str) -> list
Adds the letters received as input to the board given a starting position at row/col number, and a
direction ('down' or 'right'). Returns a list of words created by adding those letters to the board.
The list will contain the main word, as well as any hook word generated; order of the elements doesn't
matter. The input list is modified unless the direction provided isn't 'down' nor 'right'; in such a case,
returns an empty list. Assume the starting square is empty and the provided letters will fit the board.
>>> b = [['c','a','t',' '], [' ',' ',' ',' '], [' ',' ',' ',' '], [' ',' ',' ',' '], [' ',' ',' ',' ']]
>>> place_tiles(b, 'rain', 1, 2, 'down')
['train']
>>> words = place_tiles(b, 'mt', 2, 1, 'right')
>>> words
['mat']
>>> words2 = place_tiles(b, 'sa', 0, 3, 'down')
>>> words2.sort()
>>> words2
['cats', 'ra', 'sat']
"""
# deep copy of the original (input) board
copy_ori_board = []
for ele in board:
ori_board = []
for n in ele:
ori_board.append(n)
copy_ori_board.append(ori_board)
board_speci = []
if direction not in ['down', 'right']:
return board_speci
row_speci = row
if direction == 'down':
while row_speci < len(board):
board_speci += board[row_speci][col]
row_speci += 1
row_speci = row
if utilsb.fit_on_board(board_speci, letters, 0) == True:
for char in range(len(letters)):
if board[row_speci][col] == ' ':
board[row_speci][col] = letters[char]
row_speci += 1
else:
board[row_sepcial + 1][col] = letters[char]
row_speci += 1
col_speci = col
if direction == 'right':
while col_speci < len(board[0]):
board_speci += board[row][col_speci]
col_speci += 1
col_speci = col
if utilsb.fit_on_board(board_speci, letters, 0) == True:
for char in range(len(letters)):
if board[row][col_speci] == ' ':
board[row][col_speci] = letters[char]
col_speci += 1
else:
board[row][col_speci + 1] = letters[char]
col_speci += 1
# get a list of str from column col
ori_col_word = utilsb.get_vertical_axis(copy_ori_board, col)
new_col_word = utilsb.get_vertical_axis(board, col)
# compare words in the list of list of str in each row and col, add word to an empty str
# if word isn't in the original board (before adding letters), and add the str to a new list;
# return the list
new_board_list = []
new_str = ''
if new_col_word != ori_col_word:
for c in range(len(ori_col_word)):
if utilsb.find_word(new_col_word, c) != utilsb.find_word(ori_col_word, c):
if utilsb.find_word(new_col_word, c) not in new_str:
if len(utilsb.find_word(new_col_word, c)) > 1:
new_str += utilsb.find_word(new_col_word, c)
new_board_list.append(new_str)
for w in range(len(board)):
if board[w] == copy_ori_board[w]:
continue
else:
for el in range(len(board[w])):
if len(utilsb.find_word(board[w], el)) > 1:
if utilsb.find_word(board[w], el) != utilsb.find_word(copy_ori_board[w], el):
if utilsb.find_word(board[w], el) not in new_board_list:
new_board_list.append(utilsb.find_word(board[w], el))
return new_board_list
# takes as input a list representing the board, a dictionary representing the player's rack,
# a string representing the letters the player wants to place, two integers representing the row and column
# number (respectively) of the starting square on the board, and a string representing the direction (either
# 'down' or 'right'; if direction isn't 'down' or 'right', return an empty string (terminate right away).
# Othwerise, checks if this is a valid move:
# Is there enough space on the board to place those letters? Does the player actually have those letters on the
# rack? If so, then the letters are placed on the board, and a list of words created by performing the move
# is returned; Otherwise, if letters don't fit on the board, function raises an IndexError
# If they fit but player doesn't have those letters on their rack, function raises a ValueError;
# if an Error is raised, neither the board nor rack is modified; Otherwise, letters are removed from the rack
# and placed on the board.
def make_a_move(board, rack, letters, row, col, direction):
""" (list, dict, str, int, int, str) -> list
If the string indictating direction isn't 'down' or 'right', returns an empty string. If there's enough
space on the board to place the letters, and player actually has those letters on the rack, then
letters are placed on the board and a list of words created by performing the move is returned.
Otherwise, if letters if letters don't fit on the board, function raises an IndexError. If they fit but
player doesn't have those letters on their rack, function raises a ValueError; if an Error is raised,
neither the board nor rack is modified; Otherwise, letters are removed from the rack and placed on the board.
>>> b = [['c','a','t',' '], [' ',' ',' ',' '], [' ',' ',' ',' '], [' ',' ',' ',' '], [' ',' ',' ',' ']]
>>> r = {'a':3, 't':2, 'c':1, 'r':1, 'i':1, 'n':1}
>>> make_a_move(b, r, 'rain', 1, 2, 'down')
['train']
>>> r == {'a':2, 't':2, 'c':1}
True
>>> words = make_a_move(b, r, 'mt', 2, 1, 'right')
Traceback (most recent call last):
You do not have those letters on your rack!
>>> r == {'a':2, 't':2, 'c':1}
True
>>> make_a_move(b, r, 'cat', 3, 1, 'down')
Traceback (most recent call last):
IndexError: There is not enough space to place the specified letters on the board.
>>> r == {'a':2, 't':2, 'c':1}
True
>>> make_a_move(b, r, 'cat', 2, 1, 'down')
['cat', 'ca', 'ai', 'tn']
>>> r == {'a':1, 't':1}
True
"""
board_speci = []
dict_letters_occur = utilsd.count_occurrences(letters)
if direction not in ['down', 'right']:
return board_speci
row_speci = row
if direction == 'down':
while row_speci < len(board):
board_speci += board[row_speci][col]
row_speci += 1
if utilsb.fit_on_board(board_speci, letters, 0) == True:
if utilsd.subtract_dicts(rack, dict_letters_occur) == True:
made_move = place_tiles(board, letters, row, col, direction)
return made_move
else:
raise ValueError('You do not have those letters on your rack!')
else:
raise IndexError('There is not enough space to place the specified letters on the board.')
col_speci = col
if direction == 'right':
while col_speci < len(board[0]):
board_speci += board[row][col_speci]
col_speci += 1
if utilsb.fit_on_board(board_speci, letters, 0) == True:
if utilsd.subtract_dicts(rack, dict_letters_occur) == True:
made_move = place_tiles(board, letters, row, col, direction)
return made_move
else:
raise ValueError('You do not have those letters on your rack!')
else:
raise IndexError('There is not enough space to place the specified letters on the board.')
|
jinyang10/Scrabble
|
scrabble_utils.py
|
scrabble_utils.py
|
py
| 15,287 |
python
|
en
|
code
| 0 |
github-code
|
6
|
2108921101
|
import sys
from requests import get
from io import BytesIO
import sqlite3
from PIL import Image
from data.PYTHON_files.main import Ui_MainWindow
from data.PYTHON_files.load_image import Ui_Form
from data.PYTHON_files.description import Ui_Form_Desk
from data.PYTHON_files.effects import *
from PyQt5.QtCore import Qt
from PyQt5 import QtCore
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.QtWidgets import QFileDialog
WINDOW_SIZE = (1238, 859)
IMAGE_FRAME_SIZE = (895, 775)
class LoadImage(QMainWindow, Ui_Form):
def __init__(self, parent):
super(LoadImage, self).__init__(parent)
self.setupUi(self)
self.initUI()
def initUI(self):
WINDOW_SIZE = (601, 323)
self.setFixedSize(*WINDOW_SIZE)
class ShowDescription(QMainWindow, Ui_Form_Desk):
def __init__(self, parent):
super(ShowDescription, self).__init__(parent)
self.setupUi(self)
self.initUI()
def initUI(self):
WINDOW_SIZE = (770, 640)
self.setFixedSize(*WINDOW_SIZE)
self.text = ""
con = sqlite3.connect("data/DB_files/filters_db.sqlite")
cur = con.cursor()
result = cur.execute("SELECT * FROM filters").fetchall()
for elem in result:
self.text += "{} - {}\n\n".format(*elem)
con.close()
def show(self, text=None):
self.textBrowser.clear()
if not text:
text = self.text
self.textBrowser.append(text)
super().show()
class MainWindow(QMainWindow, Ui_MainWindow):
def __init__(self):
super().__init__()
self.setupUi(self)
self.initUI()
def initUI(self):
self.open_url_form = LoadImage(self)
self.show_description = ShowDescription(self)
self.setFixedSize(*WINDOW_SIZE)
self.history = []
self.filters_history = []
self.sliders_history = []
self.image_index = 0
self.image_PIL = None
self.scaled_size = [None, None]
# open description form
self.actionfilters_information.triggered.connect(
lambda: self.show_description.show()
)
# open filters history form
self.actionfilters_history.triggered.connect(self.show_filters_history)
# open appliacation info form
self.actionapplication_info.triggered.connect(self.show_application_info)
# file open/save
self.actionopen.triggered.connect(self.load_image)
self.actionsave.triggered.connect(self.save_image)
self.actionopen_from_URL.triggered.connect(self.load_from_url_form)
self.open_url_form.load_url_btn.clicked.connect(self.load_from_url)
# theme
self.action_darktheme.triggered.connect(self.set_dark_theme)
self.action_lighttheme.triggered.connect(self.set_light_theme)
# connecting preset buttons
self.btns_preset = [
self.btn_preset_1,
self.btn_preset_2,
self.btn_preset_3,
self.btn_preset_4,
self.btn_preset_5,
self.btn_preset_6,
self.btn_preset_7,
self.btn_preset_8,
self.btn_preset_9,
self.btn_preset_10,
]
for btn in self.btns_preset:
btn.clicked.connect(self.set_presets)
# connecting special effects buttons
self.btn_box_blur.clicked.connect(self.set_box_blur)
self.gaussian_blur.clicked.connect(self.set_gaussian_blur)
self.btn_unsharp_mask.clicked.connect(self.set_unsharp_mask)
self.btn_stereo.clicked.connect(self.set_stereo)
self.btn_square_effect.clicked.connect(self.set_square_effect)
self.btn_black_and_white.clicked.connect(self.set_black_and_white)
self.btn_negative.clicked.connect(self.set_negative)
# connecting back/reset buttons
self.btn_reset.clicked.connect(self.recet_image)
self.btn_back.clicked.connect(self.previous_image)
# connecting sliders
self.red_slider.valueChanged.connect(
self.change_channels(self.red_slider, (1, 0, 0))
)
self.green_slider.valueChanged.connect(
self.change_channels(self.green_slider, (0, 1, 0))
)
self.blue_slider.valueChanged.connect(
self.change_channels(self.blue_slider, (0, 0, 1))
)
self.red_slider.sliderReleased.connect(
self.apply_channel_changes(self.red_slider, (1, 0, 0))
)
self.green_slider.sliderReleased.connect(
self.apply_channel_changes(self.green_slider, (0, 1, 0))
)
self.blue_slider.sliderReleased.connect(
self.apply_channel_changes(self.blue_slider, (0, 0, 1))
)
self.rgb_sliders = [self.red_slider, self.green_slider, self.blue_slider]
self.alpha_slider.valueChanged.connect(self.change_transparency)
self.statusbar = self.statusBar()
# load theme from .txt file
theme = self.load_theme()
if theme == "dark":
self.set_dark_theme()
else:
self.set_light_theme()
def show_application_info(self):
with open("data/TXT_files/info.txt", "r", encoding="utf-8") as file1:
data = file1.read().strip()
self.show_description.textBrowser.clear()
self.show_description.show(data)
def set_filter_history(self):
con = sqlite3.connect("data/DB_files/history_db.sqlite")
cur = con.cursor()
text = " ".join(self.filters_history)
text_length = len(
" ".join([i[0] for i in cur.execute("SELECT * FROM history").fetchall()])
+ text
)
if text:
cur.execute(
"""
INSERT INTO history(effects) VALUES(?)
""",
(text,),
).fetchall()
if text_length > 600:
result = cur.execute(
"""
SELECT effects FROM history
"""
).fetchone()[0]
cur.execute(
"""
DELETE FROM history
WHERE effects = ?
""",
(result,),
).fetchall()
con.commit()
con.close()
def show_filters_history(self):
con = sqlite3.connect("data/DB_files/history_db.sqlite")
cur = con.cursor()
result = cur.execute("SELECT * FROM history").fetchall()
text = ""
self.show_description.textBrowser.clear()
for i, line in enumerate(result, start=1):
text += f"{i}) {line[0]}\n\n"
self.show_description.show(text)
con.close()
def keyPressEvent(self, event):
mod = int(event.modifiers())
key = event.key()
if mod == Qt.ControlModifier:
if key == Qt.Key_O:
self.load_image()
if key == Qt.Key_S:
self.save_image()
def change_transparency(self):
if self.check_if_image_opened(): # check for image opened
return
self.image_PIL = transparensy(self.image_PIL, self.sender().value())
self.update_image()
def change_channels(self, slider, chan):
def inner():
if self.check_if_image_opened(): # check for image opened
return
val = slider.value()
rgb = tuple(map(lambda n: val if n == 1 else 50, chan))
self.image.setPixmap(
convert_to_qt(channels(self.image_PIL.resize(self.scaled_size), rgb))
)
return inner
def apply_channel_changes(self, slider, chan):
def inner():
if self.check_if_image_opened(): # check for image opened
return
val = slider.value()
rgb = tuple(map(lambda n: val if n == 1 else 50, chan))
self.image_PIL = channels(self.image_PIL, rgb)
self.update_image()
self.alpha_slider.setValue(
255
) # we cannot change RGB-channel with changed alpha channel
return inner
def set_box_blur(self):
if self.check_if_image_opened(): # check for image opened
return
raduis = self.spin_box_blur_raduis.value()
self.image_PIL = box_blur(self.image_PIL, raduis)
self.update_image()
self.filters_history.append("Box Blur")
def set_gaussian_blur(self):
if self.check_if_image_opened(): # check for image opened
return
raduis = self.spin_gaussian_blur_raduis.value()
self.image_PIL = gaussian_blur(self.image_PIL, raduis)
self.update_image()
self.filters_history.append("Gaussian Blur")
def set_unsharp_mask(self):
if self.check_if_image_opened(): # check for image opened
return
raduis = self.unsharp_mask_raduis_spin.value()
percent = self.unsharp_mask_percent_spin.value()
threshold = self.unsharp_mask_threshold_spin.value()
self.image_PIL = unsharp_mask(self.image_PIL, raduis, percent, threshold)
self.update_image()
self.filters_history.append("Unsharp Mask")
def set_stereo(self):
if self.check_if_image_opened(): # check for image opened
return
delta = self.stereo_delta_spin.value()
self.image_PIL = stereo_effect(self.image_PIL, delta)
self.update_image()
self.filters_history.append("Stereo")
def set_square_effect(self):
if self.check_if_image_opened(): # check for image opened
return
self.alpha_slider.setValue(255)
area = self.square_effect_area_spin.value()
self.image_PIL = lightest_pixel_effect(self.image_PIL, area)
self.update_image()
self.filters_history.append("Square Effect")
def set_black_and_white(self):
if self.check_if_image_opened(): # check for image opened
return
self.image_PIL = black_and_white_effect(self.image_PIL)
self.update_image()
self.filters_history.append("Black And White")
def set_negative(self):
if self.check_if_image_opened(): # check for image opened
return
self.image_PIL = negative_effect(self.image_PIL)
self.update_image()
self.filters_history.append("Negative")
def set_presets(self):
if self.check_if_image_opened(): # check for image opened
return
self.image_PIL = preset_filters(self.image_PIL, filters[self.sender().text()])
self.update_image()
self.filters_history.append(self.sender().text())
def convert_image(self, image):
self.filters_history = []
self.history = []
self.sliders_history = []
# делаем изображение меньше, если оно не влезает в рамки
width, height = image.size
scale1 = scale2 = 1
if width > IMAGE_FRAME_SIZE[0]:
scale1 = IMAGE_FRAME_SIZE[0] / width
if height > IMAGE_FRAME_SIZE[1]:
scale2 = IMAGE_FRAME_SIZE[1] / height
scale = scale1 if scale1 < scale2 else scale2
self.scaled_size = (int(width * scale), int(height * scale))
# self.image_PIL = self.image_PIL.resize(self.scaled_size)
self.origin_image = self.image_PIL.copy()
self.history.append(self.origin_image)
# ________________________________________
self.image.move(0, 0)
self.image.setAlignment(QtCore.Qt.AlignCenter)
self.update_image()
self.recet_image()
def load_image(self):
filename = QFileDialog.getOpenFileName(
self,
"Choose photo",
"",
"Pictures (*.png *.jpg);; Pictures (*.png);; Pictures (*.jpg)",
)[0].strip()
if not filename:
return
# filename = "/home/anchous/Pictures/waves.png"
self.image_PIL = Image.open(filename)
self.convert_image(self.image_PIL)
def save_image(self):
filename = QFileDialog.getSaveFileName(
self, "Save photo", "", "Pictures (*.png);; Pictures (*.jpg)"
)[0].strip()
if not filename:
self.statusbar.showMessage("Error: No filename", 5000)
return
if not self.image_PIL:
self.statusbar.showMessage("Error: No image", 5000)
return
# if the filename is incorrect, request it again
try:
self.image_PIL.save(filename)
except Exception:
self.statusbar.showMessage("Error: Incorrect filename", 5000)
filename = QFileDialog.getSaveFileName(
self,
"Save photo",
f"{filename.split('.')[0]}.png",
"Pictures (*.png);; Pictures (*.jpg)",
)[0].strip()
self.image_PIL.save(filename)
self.set_filter_history()
def load_from_url_form(self):
self.open_url_form.show()
def load_from_url(self):
try:
url = self.open_url_form.url_text.toPlainText()
response = get(url)
self.image_PIL = Image.open(BytesIO(response.content))
self.convert_image(self.image_PIL)
self.open_url_form.url_text.setPlainText("")
self.open_url_form.close()
except Exception:
self.statusbar.showMessage("Error: Incorrect url", 5000)
return
def update_image(self):
self.history.append(self.image_PIL)
self.sliders_history.append(
(
self.alpha_slider.value(),
self.red_slider.value(),
self.green_slider.value(),
self.blue_slider.value(),
)
)
self.image_index += 1
self.image.setPixmap(convert_to_qt(self.image_PIL.resize(self.scaled_size)))
def previous_image(self):
if self.image_index > 0:
del self.history[self.image_index :]
del self.sliders_history[self.image_index :]
self.image_index -= 1
self.image_PIL = self.history[self.image_index]
self.alpha_slider.setValue(self.sliders_history[-1][0])
self.red_slider.setValue(self.sliders_history[-1][1])
self.green_slider.setValue(self.sliders_history[-1][2])
self.blue_slider.setValue(self.sliders_history[-1][3])
# updating image without history logging
self.image.setPixmap(convert_to_qt(self.image_PIL.resize(self.scaled_size)))
def recet_image(self):
if self.check_if_image_opened(): # check for image opened
return
self.image_PIL = self.origin_image.copy()
for sl in self.rgb_sliders:
sl.setValue(50)
self.alpha_slider.setValue(255)
self.update_image()
self.image_index = 0
self.history = [self.image_PIL]
self.sliders_history = [(255, 50, 50, 50)]
def set_dark_theme(self):
self.setStyleSheet("background-color: #353535;\ncolor: #dddddd;")
self.frame.setStyleSheet("background-color: #282828;")
self.set_theme("dark")
def set_light_theme(self):
self.setStyleSheet("background-color: #dddddd;\ncolor: #202020;")
self.frame.setStyleSheet("background-color: #cccccc;")
self.set_theme("light")
def check_if_image_opened(self):
try:
return bool(self.image_PIL) is False # check if image opened
except AttributeError:
return True
def set_theme(self, theme):
with open("data/TXT_files/theme.txt", "w", encoding="UTF-*") as file1:
file1.write(theme)
def load_theme(self):
with open("data/TXT_files/theme.txt", "r", encoding="UTF-8") as file1:
theme = file1.read().strip()
return theme
def except_hook(cls, exception, traceback):
sys.__excepthook__(cls, exception, traceback)
if __name__ == "__main__":
app = QApplication(sys.argv)
ex = MainWindow()
ex.show()
sys.excepthook = except_hook
sys.exit(app.exec_())
|
Programmer-Anchous/Effects-program
|
run.py
|
run.py
|
py
| 16,243 |
python
|
en
|
code
| 0 |
github-code
|
6
|
32124422720
|
from pages.courses.register_courses_page import RegisterCoursesPage
from utilities.teststatus import TestStatus
import unittest
import pytest
import time
@pytest.mark.usefixtures("oneTimeSetUp", "setUp")
class RegisterCoursesTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def classSetup(self, oneTimeSetUp):
self.courses = RegisterCoursesPage(self.driver)
self.ts = TestStatus(self.driver)
@pytest.mark.run(order=1)
def test_invalidEnrollment(self):
"""
1. Call required methods from the page class to perform the test
2. Enter course name
3. Select course
4. Enroll in course
5. Verify error message
6. Test Status.markFinal()
"""
self.courses.enterCourseName("JavaScript")
self.courses.selectCourseToEnroll("JavaScript for beginners")
self.courses.enrollCourse("5241810401821657", "1123", "123")
time.sleep(5)
result2 = self.courses.verifyEnrollFailed()
self.ts.markFinal("test_verifyEnrollment", result2, "Enrollment failed...!")
|
badekarganesh04/selenium-python-framework
|
tests/courses/register_courses_tests.py
|
register_courses_tests.py
|
py
| 1,090 |
python
|
en
|
code
| 0 |
github-code
|
6
|
15387648798
|
import matplotlib.pyplot as plt
from sklearn.datasets.samples_generator import make_blobs
import numpy as np
def sigmoid(x):
return 1.0 / (1 + np.exp(-x))
def dataset():
(X, y) = make_blobs(n_samples=250, n_features=2, centers=2,
cluster_std=1.05, random_state=20)
X = np.c_[np.ones((X.shape[0])), X]
return X,y
def initialize_weights(p):
return np.random.uniform(size = p)
def make_predictions(X,W,link):
return link(X.dot(W))
def cross_entropy(y,preds):
y = np.array([y])
return -np.sum(y*np.log(preds)+(1-y)*np.log(1-preds))/y.shape[0]
def compute_gradient(preds,X,y,cost=cross_entropy,link = sigmoid):
y = np.array([y])
if cost == cross_entropy and link == sigmoid:
return X.T.dot(preds-y)/y.shape[0]
def sgd(X,y,cost = cross_entropy,link=sigmoid,
alpha = 0.01,eps = 0.0001, maxit = 1000):
W = initialize_weights(X.shape[1])
n = X.shape[0]
ind = np.random.permutation(np.arange(n))
X = X[ind]
y = y[ind]
i = 0
losses = []
preds = make_predictions(X[i:i+1,:],W,link)
losses.append(cost(y[i],preds))
it = 0
while True:
it += 1
print("Iteration n.{}".format(it))
gradient = compute_gradient(preds,X[i:i+1,:],y[i],cost,link)
W -= alpha*gradient
i = i + 1
if i == n:
ind = np.random.permutation(np.arange(n))
X = X[ind]
y = y[ind]
i = 0
preds = make_predictions(X[i:i+1,:],W,link)
l_new = cost(y[i],preds)
losses.append(l_new)
if it == maxit:
break
"""if it > 250 and abs(l_new-losses[it-250])<eps:
break"""
return W,losses
#generate the data
X,y = dataset()
# plot the points
plt.scatter(X[:, 1], X[:, 2], marker="o", c=y)
theta,losses = sgd(X,y,cost = cross_entropy,link = sigmoid,
alpha = 0.01,eps = 0.0001, maxit = 1000)
Y = (-theta[0] - (theta[1] * X)) / theta[2]
plt.plot(X, Y, "r-")
# construct a figure that plots the loss over time
fig = plt.figure()
plt.plot(np.arange(0, len(losses)), losses)
fig.suptitle("Training Loss")
plt.xlabel("Epoch #")
plt.ylabel("Loss")
plt.show()
|
nickruggeri/Machine_Learning
|
AdaGrad, ADAM and AMSGrad/Codes/my_sgd.py
|
my_sgd.py
|
py
| 2,198 |
python
|
en
|
code
| 2 |
github-code
|
6
|
13485135608
|
#!/usr/bin/python3
def search_replace(my_list, search, replace):
newList = my_list.copy()
count = 0
for i in my_list:
if i == search:
newList[my_list.index(i, count)] = replace
count = my_list.index(i) + 1
return newList
|
phiweCode/alx-higher_level_programming
|
0x04-python-more_data_structures/1-search_replace.py
|
1-search_replace.py
|
py
| 274 |
python
|
en
|
code
| 0 |
github-code
|
6
|
72465379387
|
from django import forms
from .models import Meme
class MemeForm(forms.ModelForm):
class Meta():
model = Meme
fields = ('description','category','meme_img')
widgets = {
'description': forms.TextInput(attrs={
'class': 'field',
'placeholder': 'Enter Description'
}),
'category': forms.Select(choices=model.CATEGORY_CHOICES, attrs={
'class': 'choice-control',
'placeholder': 'Choose category',
}),
'meme_img': forms.FileInput(attrs={
'class': 'upload-control',
'placeholder': 'Choose file',
})
}
|
omroczkowski/h8gag
|
meme/forms.py
|
forms.py
|
py
| 701 |
python
|
en
|
code
| 0 |
github-code
|
6
|
12657002972
|
# level: medium
# soluton: backtracking
class Solution(object):
def combinationSum(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
candidates.sort()
# print candidates
res = []
self.backtracking(res, target, 0, [], candidates)
return res
def backtracking(self, res, target, start, combination, candidates):
if target < 0: return
if target == 0:
res.append(combination)
return
for i in range(start, len(candidates)):
# print combination, target-candidate
self.backtracking(res, target - candidates[i], i, combination + [candidates[i]], candidates)
|
PouringRain/leetcode
|
39.py
|
39.py
|
py
| 764 |
python
|
en
|
code
| 1 |
github-code
|
6
|
71611302909
|
import json
import open3d as o3d
import numpy as np
import os
import trimesh
import zipfile
from tqdm import tqdm
import matplotlib.pyplot as plt
plt.style.use('bmh')
default_color = [0,0.5,1]
cube = np.array([
[0,0,0], [1,0,0], [1,1,0], [0,1,0],
[0,0,1], [1,0,1], [1,1,1], [0,1,1],
])
'''plt figure'''
def plt_show_save(data, title, save_path=None, xname='', bins=50):
plt.cla()
plt.figure(figsize=(12,9))
if type(data) == dict:
plt.bar(data.keys(), data.values())
# plt.xticks(rotation=90)
else:
plt.hist(data, bins=bins)
plt.title(title)
plt.ylabel('value')
plt.xlabel(xname)
if save_path is not None:
plt.savefig(save_path)
else:
plt.show()
def get_pcd(pc, color=default_color):
pc = np.array(pc)
pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(pc)
pcd.paint_uniform_color(color) # 默认是彩虹色过渡,这里指定染色
return pcd
def get_o3d_FOR(origin=[0, 0, 0],size=0.1):
mesh_frame = o3d.geometry.TriangleMesh.create_coordinate_frame(size=size)
mesh_frame.translate(origin)
return(mesh_frame)
def show_pcds(pcds, wname='Open3D', FOR=0.1):
if FOR:
pcds.append(get_o3d_FOR(size = FOR))
o3d.visualization.draw_geometries(pcds, width=800, height=800, window_name=wname)
def csv2box(csv_path):
obb_info = np.loadtxt(open(csv_path, 'r'),delimiter = ",") # (5,4)
center = obb_info[0,:3]
dirs = 0.5 * (obb_info[2:,:3] * obb_info[2:,-1].reshape(3,1) )
val = cube*2 - 1
vec = np.matmul(val, dirs) # (8,3)@(3,3)
corner = center.reshape(1,3) + vec
return corner,dirs
def add_thickness(pc, direction, scale):
direction = direction / np.linalg.norm(direction)
noise = np.random.normal(0, scale, (pc.shape[0],1))
return pc + noise * direction.reshape(1,3)
def PCA(data, sort=True):
average_data = np.mean(data,axis=0)
decentration_matrix = data - average_data
H = np.dot(decentration_matrix.T,decentration_matrix)
eigenvectors,eigenvalues,eigenvectors_T = np.linalg.svd(H)
if sort:
sort = eigenvalues.argsort()[::-1]
eigenvalues = eigenvalues[sort]
eigenvectors = eigenvectors[:, sort]
return eigenvalues, eigenvectors
def box_from_pc(pc, color=default_color, aabb=False, return_corner=True):
pcd = get_pcd(pc)
box = pcd.get_axis_aligned_bounding_box() if aabb else \
pcd.get_oriented_bounding_box()
if return_corner:
corner = np.array(box.get_box_points())
return corner
else:
box.color = color
return box
def box_from_corner(corner, color=default_color, aabb=False):
corner = np.asarray(corner)
box = o3d.geometry.AxisAlignedBoundingBox() if aabb else \
o3d.geometry.OrientedBoundingBox()
box = box.create_from_points(o3d.utility.Vector3dVector(corner))
box.color = color
return box
def box2cen_dir(box:np.ndarray):
centers = np.zeros((6,3))
sorted_box = sort_pts(box)
v1 = sorted_box[1]-sorted_box[0]
v2 = sorted_box[3]-sorted_box[0]
cos = v1@v2 / (np.linalg.norm(v1) * np.linalg.norm(v2))
if abs(cos) < 0.001:
tmp = sorted_box[3].copy()
sorted_box[3] =sorted_box[4]
sorted_box[4] = tmp
# 0246, 0145
centers[0] = sorted_box[:4].mean(axis=0)
centers[1] = sorted_box[[0,2,4,6]].mean(axis=0)
centers[2] = sorted_box[[0,1,4,5]].mean(axis=0)
centers[3:] = 2 * box.mean(0).reshape(1,3) - centers[:3]
return centers
def box2dir(box:np.ndarray):
sorted_box = np.array(sorted(box, key = lambda x:x[0]) )
dirs3 = sorted_box[1:4] - sorted_box[0].reshape(1,-1)
cos = cosine(dirs3, dirs3).flatten()
idx = np.argmin(cos)
if cos[idx]<1e-3:
d1 = idx//3
d2 = idx%3
left_dir = np.cross(dirs3[d1], dirs3[d2])
return np.vstack([dirs3[d1], dirs3[d2], left_dir])
else:
return None
def aabb_dirs(pc):
mins = pc.min(0)
maxs = pc.max(0)
dirs = np.eye(3,3) * (maxs-mins).reshape(1,3) / 2
center = (mins + maxs) / 2
corners = center.reshape(1,3) + (cube*2-1)@dirs
return corners, dirs
def obb_2dirs(pc, axis, return_corner=True):
else_axis = [0,1,2]
else_axis.pop(axis)
sub_pc = pc[:,else_axis]
cov_pts = np.cov(sub_pc, y=None, rowvar=False, bias=True)
v, vect = np.linalg.eig(cov_pts)
tvect = vect.T
rpts = np.dot(sub_pc, np.linalg.inv(tvect))
mina = np.min(rpts, 0)
maxa = np.max(rpts, 0)
diff = (maxa - mina)*0.5
center = mina + diff
corners = center.reshape(-1,2) + np.array([
[-1,-1], [1,-1], [1,1], [-1,1]
]) * diff.reshape(-1,2)
corners = np.dot(corners, tvect) # (4,2)
axis_pc = pc[:, axis]
axis_min,axis_max = axis_pc.min(), axis_pc.max()
cor1 = np.insert(corners, axis, axis_min, axis=1)
cor2 = np.insert(corners, axis, axis_max, axis=1)
corners = np.vstack([cor1,cor2])
center = corners.mean(0)
dirs = (corners[[1,3,4]] - corners[0].reshape(1,3))/2
if return_corner:
return corners, dirs
else:
return center, dirs
def obb_adjust(pc:np.ndarray, fix_dir:np.array, ori_dir:np.array):
'''ori_dir should be [0,0,1] or [0,1,0] or [1,0,0]'''
axis = np.argmax(ori_dir)
fix_dir = fix_dir / np.linalg.norm(fix_dir)
ori_dir = ori_dir / np.linalg.norm(ori_dir)
cro = np.cross(ori_dir, fix_dir)
cos = ori_dir@fix_dir
if abs(cos)>0.99:
return obb_2dirs(pc, axis, True)
vx = np.array([
[0, -cro[2], cro[1]],
[cro[2], 0, -cro[0]],
[-cro[1], cro[0], 0 ]
])
rot_w = np.eye(3,3) + vx + np.matmul(vx,vx) / (1+cos)
rot_verse = np.linalg.inv(rot_w)
rot_pc = np.matmul(pc, rot_verse.T)
center, dirs = obb_2dirs(rot_pc, axis, False)
# dirs[-1][:2] = 0
# dirs[-1,-1] = rot_pc[:,axis].max() - rot_pc[:,axis].min()
cen = center.reshape(-1,3)
dirs = np.matmul(dirs, rot_w.T)
box = (cube*2 - 1)@dirs + cen@rot_w.T
return box, dirs
def pts2pts_dis(pts1,pts2):
diff = pts1.reshape((-1, 1, 3)) - pts2.reshape((1, -1, 3))
distance = (diff**2).reshape((-1,3)).sum(axis=-1)
return distance
def sort_pts(box):
uniques = []
for i in range(3):
uni = np.unique(box[:,i]).shape[0]
uniques.append(uni<8) # and uni//2==0
if sum(uniques)==0: uniques[0] = True
sorted_box = np.array(sorted(box, key = lambda x:x[uniques].sum()))
return sorted_box
def pc2mesh(pts):
pts = np.asarray(pts)
pcd = get_pcd(pts)
pcd.estimate_normals()
distances = pcd.compute_nearest_neighbor_distance()
avg_dist = np.mean(distances)
radius = 1.5 * avg_dist
mesh = o3d.geometry.TriangleMesh.create_from_point_cloud_ball_pivoting(
pcd, o3d.utility.DoubleVector([radius, radius * 2]), )
# return np.asarray(mesh.triangles)
return mesh
def pc_from_mesh(obj_path, npoints):
mesh = o3d.io.read_triangle_mesh(obj_path)
pts = mesh.sample_points_uniformly(number_of_points=npoints)
return np.array(pts.points)
def load_mesh(obj_path):
return trimesh.load(obj_path, 'obj', force='mesh')
def merge_mesh(meshs):
merged_mesh = trimesh.util.concatenate(meshs)
return merged_mesh
def write_mesh(mesh, path, normal=False, color=False):
o3d.io.write_triangle_mesh(
path, mesh, write_vertex_normals=normal, write_vertex_colors=color
)
def gen_meshs(obj_folder, hier_tree, npoints=1024):
all_node_mesh = {}
for node in hier_tree:
id_ = node['id']
if 'children' in node.keys():
sub_mesh = gen_meshs(obj_folder, node['children'], npoints)
all_node_mesh = {**all_node_mesh, **sub_mesh}
child_mesh = [sub_mesh[me['id']] for me in node['children']]
node_mesh = merge_mesh(child_mesh)
all_node_mesh[id_] = node_mesh
else:
meshs = []
for obj_name in node['objs']:
obj_path = os.path.join(obj_folder, obj_name+'.obj')
mesh = load_mesh(obj_path)
meshs.append(mesh)
if len(meshs)>1:
meshs = merge_mesh(meshs)
else:
meshs = meshs[0]
all_node_mesh[id_] = meshs
return all_node_mesh
def get_leaves(tree, only=None, flatten=False, pop_child=True):
leaf_parts = []
for node in tree:
data = node[only] if only is not None else node
if 'children' not in node.keys():
leaf_parts.append(data)
else:
node_list = get_leaves(node['children'], only, flatten) # [{...},] with parent+children idx
leaf_parts.extend(node_list)
if flatten:
if only == None:
data = data.copy()
if pop_child:data.pop('children')
leaf_parts.append(data)
return leaf_parts
def hier2graphic(hier_tree, parent_id=-1, depth=0):
all_nodes = {}
for node in hier_tree:
renode = {
'name': node['name'],
'objs': node['objs'] if 'objs' in node.keys() else [],
'parent': parent_id,
'depth': depth,
'box': node['box'] if 'box' in node.keys() else [],
'brother':[],
'children_id': [],
'leaves': get_leaves([node], 'id'),
}
if 'children' in node.keys():
children_nodes = hier2graphic(node['children'], node['id'], depth+1)
all_nodes = {**all_nodes, **children_nodes}
renode['children_id'] = [i['id'] for i in node['children']]
all_nodes[node['id']] = renode
for child in renode['children_id']:
all_nodes[child]['brother'] = renode['children_id'][:]
all_nodes[child]['brother'].remove(child)
return all_nodes
def update_mopara(hash_hier, ids=[0]):
main_child = ids[:]
for key in ids:
if hash_hier[key]['children_id'] != []:
tree, mochild = update_mopara(hash_hier, hash_hier[key]['children_id'] )
hash_hier = {**hash_hier, **tree}
mopara = {'jointData':{}, 'joint':'', 'motype':''}
node = hash_hier[mochild]
if 'ref' in node.keys() and key!=0:
mopara['jointData'] = node['jointData']
mopara['joint'] = node['joint']
if 'motype' in node.keys(): mopara['motype'] = node['motype']
refs = node['ref'][:]
for idx,ref in enumerate(refs):
while(hash_hier[ref]['depth'] > hash_hier[key]['depth']):
ref = hash_hier[ref]['parent']
refs[idx] = ref
mopara['ref'] = list(set(refs))
for ref in mopara['ref']:
if ref in main_child and ref != key:
main_child.remove(key)
break
hash_hier[key] = {**hash_hier[key], **mopara}
elif 'ref' in hash_hier[key].keys():
refs = hash_hier[key]['ref']
for idx,ref in enumerate(refs):
while(hash_hier[ref]['depth'] > hash_hier[key]['depth']):
ref = hash_hier[ref]['parent']
hash_hier[key]['ref'][idx] = ref
hash_hier[key]['ref'] = list(set(hash_hier[key]['ref']))
for ref in hash_hier[key]['ref']:
if ref in main_child and ref != key:
main_child.remove(key)
break
return hash_hier, main_child[0]
def gen_graph(hier_tree, mobi):
'''
将hierarchy tree转化称graph
'''
hash_hier = hier2graphic(hier_tree)
for idx,node in enumerate(mobi):
# mobi[idx]['ids'] = [i['id'] for i in node['parts']]
mopara = {'jointData':{}, 'joint':'', 'motype':''}
if node['jointData'] != {}:
mopara['jointData'] = node['jointData']
mopara['joint'] = node['joint']
if 'motype' in node.keys(): mopara['motype'] = node['motype']
if node['parent'] != -1 and 'parts' in mobi[node['parent']].keys():
ref = [j['id'] for j in mobi[node['parent']]['parts']]
mopara['ref'] = ref
for sub_node in node['parts']:
sub_id = sub_node['id']
hash_hier[sub_id] = {**hash_hier[sub_id], **mopara}
graph, _ = update_mopara(hash_hier)
statics = {}
for key in graph.keys():
if 'ref' in graph[key].keys():
refs = graph[key]['ref'][:]
for ref in refs:
if graph[key]['parent'] != graph[ref]['parent'] or ref == key:
graph[key]['ref'].remove(ref)
if graph[key]['ref'] == []:
graph[key].pop('ref')
for key in graph.keys():
node = graph[key]
graph[key]['edges'] = {
'children':{},
'space':{}
}
for child in graph[key]['children_id']:
graph[key]['edges']['children'][child] = ''
brothers = graph[key]['brother'][:]
if 'ref' in graph[key].keys():
for bro in brothers:
if bro in graph[key]['ref']:
graph[key]['edges']['space'][bro] = 'motion'
else:
graph[key]['edges']['space'][bro] = 'none'
graph[key].pop('ref')
else:
for bro in brothers:
graph[key]['edges']['space'][bro] = 'none' if 'ref' in graph[bro].keys() else 'fixed'
return graph, statics
def ref_count(graph):
for key in graph.keys():
edges = graph[key]['edges']['space']
refs = [r for r in edges.keys() if edges[r]=='motion']
graph[key]['refs'] = refs
invalids, child_allref, expect = reduce_ref(graph)
return invalids, expect
def reduce_ref(graph, node_key='0'):
ref_child = set()
all_invalid = 0
flgs = 0
for child in graph[node_key]['children_id']:
child = str(child)
if graph[child]['refs'] == []:
ref_child.add(int(child))
if graph[child]['children_id'] != []:
invalid, flg, expect = reduce_ref(graph, child)
all_invalid += invalid if flg else invalid-1
flgs += 1-flg
children_allref = False
if len(ref_child)==0 and graph[node_key]['brother'] == []:
children_allref = True
elif len(ref_child) and ref_child == set(graph[node_key]['children_id']) \
and not flgs:
children_allref = True
all_invalid += len(ref_child)
# print('%s invalids:%d'%(node_key, all_invalid))
return all_invalid, children_allref, (flgs if flgs else 1)
'''direction, angle, pos, ...'''
def cosine(dirs, vec, abs=True):
vec = vec.reshape(-1,3)
vec = vec / np.linalg.norm(vec, axis=-1).reshape(-1,1) # (1-n, 3) -> (1-n,) or val
mul_res = [email protected]
cos = mul_res / np.linalg.norm(dirs, axis=-1).reshape(-1,1)
if abs: cos = np.abs(cos)
return cos
def cross(dirs, vec, abs=False):
# vec = vec / np.linalg.norm(vec)
cro = np.cross(dirs, vec)
cro = cro / np.linalg.norm(cro, axis=-1)
if abs:
cro = np.abs(cro)
return cro
def motion_pos(direction, gt_pos, pos):
direction= direction / np.linalg.norm(direction)
cro = np.cross(pos - gt_pos.reshape(1,3), direction)
dis = np.abs(np.linalg.norm(cro, axis=-1))
min_idx = np.argmin(dis)
return min_idx, dis[min_idx]
def read_json(json_path):
return json.load(open(json_path, 'r'))
def get_boolseg(seg:np.ndarray, mov_idx):
mov_idx = np.array(mov_idx).reshape((-1,1)) # (n,1), and seg(1,N)
return ( seg.reshape((1,-1)) == mov_idx ).sum(axis=0) == 1.
if __name__ == '__main__':
pass
|
GengxinLiu/SWMP
|
Extern/tools/mobility_tool.py
|
mobility_tool.py
|
py
| 14,370 |
python
|
en
|
code
| 4 |
github-code
|
6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.