seq_id
stringlengths 7
11
| text
stringlengths 156
1.7M
| repo_name
stringlengths 7
125
| sub_path
stringlengths 4
132
| file_name
stringlengths 4
77
| file_ext
stringclasses 6
values | file_size_in_byte
int64 156
1.7M
| program_lang
stringclasses 1
value | lang
stringclasses 38
values | doc_type
stringclasses 1
value | stars
int64 0
24.2k
⌀ | dataset
stringclasses 1
value | pt
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|---|
73357295548
|
from .MainDataToICS import MainDataToICS
from .WebJWC import WebJWC
import time
import os
from hashlib import md5
import random
import json
def getData(id,password):
web = WebJWC(id,password)
print('TOPO1')
web.runDriver()
time.sleep(1)
print('TOPO2')
web.loginIn()
time.sleep(1)
print('TOPO3')
web.getBody()
time.sleep(1)
print('TOPO4')
web.dataInBs4()
print('TOPO4')
web.close()
def makeIcs(id,year,month,day):
test = MainDataToICS(id,year,month,day)
log = test.makeIcs()
data = ''
for i in log:
for k,v in i.items():
data += '%s:%s \n'%(k,v)
data+='\n'
data = '导入失败数:%d\n'%len(log)+'请手动导入以下课程:\n%s'%data
return data
def makeApi(id):
with open('./CQUClassICS/res/jsonData/user.json','r',encoding='utf-8') as fp:
SQ = json.load(fp)
fp.close()
if id not in SQ[0].keys():
SQ[0][id]=str(random.randint(1,1<<16))
with open('./CQUClassICS/res/jsonData/user.json','w',encoding='utf-8') as fp:
json.dump(SQ,fp,ensure_ascii=False)
fp.close()
with open('./CQUClassICS/res/icsData/%s.ics'%id,'rb') as fp:
data = fp.read()
md5v = md5()
md5v.update((id+SQ[0][id]).encode('utf8'))
ids = md5v.hexdigest()
open('./CQUClassICS/res/api/%s.ics'%ids,'wb').write(data)
return ids
def test():
print(os.path.abspath('Event.py'))
print(os.path.abspath(''))
|
CQU-CSA/CQUScheduleCalendar
|
DjangoICS/CQUClassICS/src/MainICS.py
|
MainICS.py
|
py
| 1,500 |
python
|
en
|
code
| 0 |
github-code
|
6
|
3705027331
|
import bitstring
def shift_check( filename ):
f = open(filename, 'rb')
bits = bitstring.Bits( f )
f.close()
bits_array = bitstring.BitArray( bits )
skip =8*3
for k in range(8):
start = k + skip
stop = start+ 200*8
shifted = bits_array[start:stop]
byte_data = shifted.bytes
try:
print("offset {}".format(k))
print( byte_data.decode('utf-8'))
except:
print("Not ascii at offset {}".format(k))
pass
if __name__ == "__main__":
shift_check("out.txt")
|
tj-oconnor/spaceheroes_ctf
|
forensics/forensics-rf-math/solve/shifty.py
|
shifty.py
|
py
| 583 |
python
|
en
|
code
| 13 |
github-code
|
6
|
41509638995
|
import math
def get_delta_color(c_from, c_to, step):
d_r_col = math.ceil(c_from[0] - c_to[0]) if c_from[0] > c_to[0] else math.ceil(c_to[0] - c_from[0])
d_r_col = math.ceil(d_r_col / step) if d_r_col != 0 else 0
d_g_col = math.ceil(c_from[1] - c_to[1]) if c_from[1] > c_to[1] else math.ceil(c_to[1] - c_from[1])
d_g_col = math.ceil(d_g_col / step) if d_g_col != 0 else 0
d_b_col = math.ceil(c_from[2] - c_to[2]) if c_from[2] > c_to[2] else math.ceil(c_to[2] - c_from[2])
d_b_col = math.ceil(d_b_col / step) if d_b_col != 0 else 0
return d_r_col, d_g_col, d_b_col
def get_correct_chanel(chanel):
if chanel > 255:
return 255
elif chanel < 0:
return 0
else:
return chanel
def get_correct_color(color):
correct_color = [0, 0, 0]
i = 0
while i < len(color):
correct_color[i] = get_correct_chanel(color[i])
i += 1
return correct_color[0], correct_color[1], correct_color[2]
def get_valid_next_color(value, default_value, direction):
if direction:
result = default_value if value > default_value else value
else:
result = default_value if value < default_value else value
return get_correct_chanel(result)
def next_color(c_from, c_to, d_r_col, d_g_col, d_b_col):
c_r = get_valid_next_color(c_from[0] - d_r_col, c_to[0], False) \
if c_from[0] > c_to[0] else get_valid_next_color(c_from[0] + d_r_col, c_to[0], True)
c_g = get_valid_next_color(c_from[1] - d_g_col, c_to[1], False) \
if c_from[1] > c_to[1] else get_valid_next_color(c_from[1] + d_g_col, c_to[1], True)
c_b = get_valid_next_color(c_from[2] - d_b_col, c_to[2], False) \
if c_from[2] > c_to[2] else get_valid_next_color(c_from[2] + d_b_col, c_to[2], True)
return c_r, c_g, c_b
def animate_color(c_from, c_to, step):
delta_color = get_delta_color(c_from, c_to, step)
previous_color = next_color(c_from, c_to, delta_color[0], delta_color[1], delta_color[2])
while True:
previous_color = next_color(previous_color, c_to, delta_color[0], delta_color[1], delta_color[2])
yield previous_color
def animate_array_colors(colors):
direction = True
from_index = 0
end_index = 1
while True:
current_generator = animate_color(colors[from_index], colors[end_index], 30)
current_color = next(current_generator)
while current_color != colors[end_index]:
current_color = next(current_generator)
yield current_color
if direction:
from_index += 1
end_index += 1
if end_index >= len(colors):
direction = False
from_index = end_index - 1
end_index = from_index - 1
else:
from_index -= 1
end_index -= 1
if end_index <= 0:
direction = True
from_index = 0
end_index = 1
|
memchenko/x-max-tree
|
services/Color.py
|
Color.py
|
py
| 2,964 |
python
|
en
|
code
| 0 |
github-code
|
6
|
24199924707
|
class Solution:
def maxAreaOfIsland(self, grid):
"""
Args:
grid: list[list[int]]
Return:
int
"""
res = 0
self.grid = grid
self.visited = [[False for _ in range(len(grid[0]))]
for _ in range(len(grid))]
for i in range(len(grid)):
for j in range(len(grid[0])):
res = max(res, self.dfs(i, j))
return res
def dfs(self, i, j):
"""
Args:
i: int
j: int
Return:
area: int
"""
if i < 0 or i >= len(self.grid) or j < 0 or j >= len(self.grid[0]) \
or not self.grid[i][j] or self.visited[i][j]:
return 0
self.visited[i][j] = True
area = 1
area += self.dfs(i + 1, j)
area += self.dfs(i - 1, j)
area += self.dfs(i, j + 1)
area += self.dfs(i, j - 1)
return area
if __name__ == "__main__":
grid = [[1,1],[1,0]]
print(Solution().maxAreaOfIsland(grid))
|
AiZhanghan/Leetcode
|
code/695. 岛屿的最大面积.py
|
695. 岛屿的最大面积.py
|
py
| 1,091 |
python
|
en
|
code
| 0 |
github-code
|
6
|
18385696956
|
# Import the libraries
import cv2
import os
import numpy as np
class Auxiliary(object):
"""
Class that provides some auxiliary functions.
"""
def __init__(self, size_x=100, size_y=100, interpolation=cv2.INTER_CUBIC):
"""
Set the default values for the image size and the interpolation method.
Available interpolation methods provided by OpenCV: INTER_CUBIC, INTER_AREA, INTER_LANCZOS4, INTER_LINEAR, INTER_NEAREST
:param size_x: Set the default image width (default = 100).
:param size_y: Set the default image height (default = 100).
:param interpolation: Set the default interpolation method (default cv2.INTER_CUBIC).
"""
self.size_x = size_x
self.size_y = size_y
self.interpolation = interpolation
# Declare all supported files
self.supported_files = ["png", "jpg", "jpeg"]
def set_default_size(self, size_x, size_y):
"""
Set the default size.
:param size_x: Image width.
:param size_y: Image height.
"""
if size_x > 0:
self.size_x = size_x
if size_y > 0:
self.size_y = size_y
def get_default_size(self):
"""
Get the default image size defined (default is 100x100).
"""
return self.size_x, self.size_y
def get_interpolation_method_name(self):
"""
Get the selected interpolation method name.
:return: A string containing the interpolation method name.
"""
if self.interpolation == cv2.INTER_CUBIC:
return "cv2.INTER_CUBIC"
if self.interpolation == cv2.INTER_AREA:
return "cv2.INTER_AREA"
if self.interpolation == cv2.INTER_LANCZOS4:
return "cv2.INTER_LANCZOS4"
if self.interpolation == cv2.INTER_LINEAR:
return "cv2.INTER_LINEAR"
if self.interpolation == cv2.INTER_NEAREST:
return "cv2.INTER_NEAREST"
raise NameError("Invalid interpolation method name")
return ""
@staticmethod
def calc_accuracy(recognized_images, total_face_images):
"""
Calculates the accuracy (percentage) using the formula:
acc = (recognized_images / total_face_images) * 100
:param recognized_images: The number of recognized face images.
:param total_face_images: The number of total face images.
:return: The accuracy.
"""
try:
return (float(recognized_images) /
float(total_face_images)) * 100.0
except ZeroDivisionError:
return 0.0
@staticmethod
def write_text_file(content, file_name):
"""
Write the content to a text file based on the file name.
:param content: The content as a string.
:param file_name: The file name (e.g. home/user/test.txt)
"""
# Save the text file
text_file = open(file_name, "w")
text_file.write(content)
text_file.close()
@staticmethod
def is_grayscale(image):
"""
Check if an image is in grayscale.
:param image: The image.
:return: True if the image is in grayscale.
"""
if len(image.shape) <= 2:
return True
h, w = image.shape[:2] # rows, cols, channels
for i in range(w):
for j in range(h):
p = image[i, j]
if p[0] != p[1] != p[2]:
return False
return True
@staticmethod
def to_grayscale(image):
"""
Convert an image to grayscale
:param image: The image.
:return: The image in grayscale.
"""
if image is None:
print("Invalid Image: Could not convert to grayscale")
return None
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
@staticmethod
def load_image(path):
"""
Load an image based on the path passed by parameter.
:param path: The path to the image file.
:return: The image object.
"""
return cv2.imread(path)
@staticmethod
def save_image(file_name, image):
"""
Save an image based on the fileName passed by parameter.
:param file_name: The file name.
:param image: The image.
"""
cv2.imwrite(file_name, image)
@staticmethod
def resize_image(image, size_x, size_y, interpolation_method):
"""
Resize an image.
:param image: The image object.
:param size_x: The image width.
:param size_y: The image height.
:param interpolation_method: The interpolation method.
:return: The resized image.
"""
if image is None:
print("Invalid Image: Could not be resized")
return -1
rows, cols = image.shape
if rows <= 0 or cols <= 0:
print("Invalid Image Sizes: Could not be resized")
return -1
return cv2.resize(image, (size_x, size_y),
interpolation=interpolation_method)
def preprocess_image(self, path):
"""
Preprocess an image. Load an image, convert to grayscale and resize it.
:param path: The image path.
:return: The preprocessed image.
"""
# Load the image
image = self.load_image(path)
if image is None:
print("Could not load the image:", path)
return None
# Convert to grayscale
image = self.to_grayscale(image)
# Resize the image
image = self.resize_image(
image, self.size_x, self.size_y, self.interpolation)
# Return the processed image
return image
@staticmethod
def concatenate_images(left_image, right_image):
"""
Concatenate two images side by side (horizontally) and returns a new one.
:param left_image: The image that should be put to the left.
:param right_image: The image that should be put to the right.
:return: The new concatenated image.
"""
try:
return np.concatenate((left_image, right_image), axis=1)
except ValueError:
return None
def extract_images_paths(self, path):
"""
Extract all paths for each image in a directory.
:param path: The directory path.
:return: A list with all file paths.
"""
paths = []
# In the path folder search for all files in all directories
for dir_name, dir_names, file_names in os.walk(path):
# For each file found
for file_name in file_names:
# Check if it is a valid image file
if file_name.split(".")[1] in self.supported_files:
# Creates the filePath joining the directory name and the
# file name
paths.append(os.path.join(dir_name, file_name))
return paths
@staticmethod
def extract_files_paths(path):
"""
Extract all paths for all files type.
:param path: The directory path.
:return: A list with all paths for all files.
"""
paths = []
# In the path folder search for all files in all directories
for dir_name, dir_names, file_names in os.walk(path):
# For each file found
for file_name in file_names:
# Creates the filePath joining the directory name and the file
# name
paths.append(os.path.join(dir_name, file_name))
return paths
def load_all_images_for_train(self, train_path):
"""
Load all images for training.
:param train_path: The train path.
:return: Three lists with the images, labels and file names.
"""
images = []
labels = []
file_name = []
paths = self.extract_images_paths(train_path)
# For each file path
for file_path in paths:
# Check if it is a valid image file
if file_path.split(".")[1] in self.supported_files:
# Get the subject id (label) based on the format:
# subjectID_imageNumber.png
path_split = file_path.split("/")
temp_name = path_split[len(path_split) - 1]
subject_id = int(temp_name.split("_")[0])
images.append(self.preprocess_image(file_path))
labels.append(subject_id)
file_name.append(temp_name.split(".")[0])
return images, labels, file_name
def load_all_images_for_test(self, test_path):
"""
Load all images for test.
:param test_path: The test path.
:return: Three lists with the images, labels and file names.
"""
images = []
labels = []
file_name = []
paths = self.extract_images_paths(test_path)
# For each file path
for file_path in paths:
# Check if it is a valid image file
if file_path.split(".")[1] in self.supported_files:
# Get the subject id (label)
# IMPORTANT: it follows the pattern: imageNumber_subjectID.png
# It is different from the pattern on the training set
path_split = file_path.split("/")
temp_name = path_split[len(path_split) - 1]
subject_id = temp_name.split("_")[1]
subject_id = int(subject_id.split(".")[0])
image = self.preprocess_image(file_path)
if image is None:
return None, None, None
images.append(image)
labels.append(subject_id)
file_name.append(temp_name.split(".")[0])
return images, labels, file_name
|
kelvins/Reconhecimento-Facial
|
FaceRecognition/classes/auxiliary.py
|
auxiliary.py
|
py
| 9,896 |
python
|
en
|
code
| 20 |
github-code
|
6
|
2246571162
|
testname = 'apconfiguration_2.2.2.20'
avoiderror(testname)
printTimer(testname,'Start','Ac config wrong image file for ap. Ap can download image, but can not upgrade')
###############################################################################
#Step 1
#操作
# 在AC1上为AP1_image_type指定错误的image文件,(其他image_type的文件)
# 在AC1上为AP2_image_type指定错误的image文件,(其他image_type的文件)
# AC1上为AP1升级
# 预期
# AP1和AP2不会升级
################################################################################
printStep(testname,'Step 1',
'config ap1_image_type upgrade to wrong image_type file',
'config ap2_image_type upgrade to wrong image_type file',
'upgrade ap1',
'ap1 and ap2 should not upgrade')
res1=res2=1
#operate
# 查看AP1和AP2当前的版本号
ap1_version = Get_ap_version(ap1, Ap1cmdtype)
ap2_version = Get_ap_version(ap2, Ap2cmdtype)
# 在AC1上为AP1和AP2指定image文件
# (脚本中先配置AP2再配置AP1,目的是如果AP2和AP1的image type相同时,AP2的配置会被AP1覆盖)
EnterWirelessMode(switch1)
SetCmd(switch1, 'wireless ap download image-type',ap2_image_type, wrong_imagetype_ftpupgrade_path)
SetCmd(switch1, 'wireless ap download image-type',ap1_image_type, wrong_imagetype_ftpupgrade_path)
# 升级AP1
EnterEnableMode(switch1)
SetCmd(switch1,' wireless ap download start',ap1mac)
IdleAfter(60)
# AP1升级失败
res1 = CheckSutCmd(switch1, 'show wireless ap download',
check=[(ap1mac, 'Failure')],
retry=20, interval=5, waitflag=False,IC=True)
# AP2不会出现在升级列表中
res2 = CheckSutCmdWithNoExpect(switch1, 'show wireless ap download',
check=[(ap2mac)],
retry=1, waitflag=False,IC=True)
# 检查AC1仍然管理AP1和AP2
res3=CheckSutCmd(switch1,'show wireless ap status',
check=[(ap1mac,'Managed','Success'),(ap2mac,'Managed','Success')],
retry=20,interval=5,waitflag=False,IC=True)
# 检查AP1和AP2没有升级
ApLogin(ap1)
res4 = check_apversion_after_upgrade(ap1, Ap1cmdtype, ap1_version)
res5 = check_apversion_after_upgrade(ap2, Ap2cmdtype, ap2_version)
#result
printCheckStep(testname, 'Step 1',res1,res2,res3,res4,res5)
###############################################################################
#Step 2
#操作
# 在AC1上为AP1_image_type指定image文件为ap1_standby_build,
# 在AC1上为AP2_image_type指定image文件为ap2_standby_build,
# AC1上为AP1升级
# 预期
# AP1升级成功
# AP2不会升级
################################################################################
printStep(testname,'Step 2',
'config ap1_image_type upgrade to ap1_standby_build',
'config ap2_image_type upgrade to ap2_standby_build',
'upgrade ap1',
'ap1 upgrade successfully',
'ap2 should not upgrade')
res1=res2=1
#operate
# 在AC1上为AP1和AP2指定image文件
# (脚本中先配置AP2再配置AP1,目的是如果AP2和AP1的image type相同时,AP2的配置会被AP1覆盖)
EnterWirelessMode(switch1)
SetCmd(switch1, 'wireless ap download image-type',ap2_image_type, ap2_ftpupgrade_standby_path)
SetCmd(switch1, 'wireless ap download image-type',ap1_image_type, ap1_ftpupgrade_standby_path)
# 升级AP1
EnterEnableMode(switch1)
SetCmd(switch1,' wireless ap download start',ap1mac)
# 等待升级完成
IdleAfter(ftp_ap_upgrade_time)
ac_wait_download_finish(switch1)
# check
# 检查AC1是否重新管理AP1和AP2
res1=CheckSutCmd(switch1,'show wireless ap status',
check=[(ap1mac,'Managed','Success'),(ap2mac,'Managed','Success')],
retry=20,interval=5,waitflag=False,IC=True)
# 检查AP1升级成功
ApLogin(ap1)
res2 = check_apversion_after_upgrade(ap1, Ap1cmdtype, ap1_standby_buildnum)
# 检查AP2没有升级
res3 = check_apversion_after_upgrade(ap2, Ap2cmdtype, ap2_version)
#result
printCheckStep(testname, 'Step 2',res1,res2,res3)
###############################################################################
#Step 3
#操作
# 在AC1上为AP1_image_type指定image文件为ap1_current_build,并升级AP1
# 预期
# 等待AP1升级成功,检查AP1升级后的版本和预期是否一致
################################################################################
printStep(testname,'Step 3',
'config ap1 upgrade image to ap1_current_build',
'upgrade ap1',
'ap1 upgrade successfully')
#operate
EnterWirelessMode(switch1)
SetCmd(switch1, 'wireless ap download image-type',ap1_image_type, ap1_ftpupgrade_current_path)
EnterEnableMode(switch1)
SetCmd(switch1,' wireless ap download start',ap1mac)
# 等待升级完成
IdleAfter(ftp_ap_upgrade_time)
ac_wait_download_finish(switch1)
# check
res1 = CheckSutCmd(switch1,'show wireless ap status',
check=[(ap1mac,'Managed','Success'),(ap2mac,'Managed','Success')],
retry=20,interval=5,waitflag=False,IC=True)
ApLogin(ap1)
res2 = check_apversion_after_upgrade(ap1, Ap1cmdtype, ap1_current_buildnum)
#result
printCheckStep(testname, 'Step 3',res1,res2)
################################################################################
# Step 4
# 操作
# 恢复默认配置
################################################################################
printStep(testname, 'Step 4',
'Recover initial config')
# operate
EnterWirelessMode(switch1)
SetCmd(switch1, 'no wireless ap download image-type',ap1_image_type)
SetCmd(switch1, 'no wireless ap download image-type',ap2_image_type)
#end
printTimer(testname, 'End')
|
guotaosun/waffirm
|
autoTests/module/apconfiguration/apconfiguration_2.2.2.20_ONE.py
|
apconfiguration_2.2.2.20_ONE.py
|
py
| 5,667 |
python
|
en
|
code
| 0 |
github-code
|
6
|
35914457874
|
class Solution(object):
# @param nestedList a list, each element in the list
# can be a list or integer, for example [1,2,[1,2]]
# @return {int[]} a list of integer
def flatten(self, nestedList: list) -> list:
import collections
stack = collections.deque([nestedList])
result = []
while stack:
front = stack.popleft()
if isinstance(front, list):
while front:
stack.appendleft(front.pop())
else:
result.append(front)
return result
|
Super262/LintCodeSolutions
|
data_structures/stack/problem0022.py
|
problem0022.py
|
py
| 575 |
python
|
en
|
code
| 1 |
github-code
|
6
|
30827683895
|
import os
import logging
from novelwriter.enum import nwItemLayout, nwItemClass
from novelwriter.error import formatException
from novelwriter.common import isHandle, sha256sum
logger = logging.getLogger(__name__)
class NWDoc():
def __init__(self, theProject, theHandle):
self.theProject = theProject
# Internal Variables
self._theItem = None # The currently open item
self._docHandle = None # The handle of the currently open item
self._fileLoc = None # The file location of the currently open item
self._docMeta = {} # The meta data of the currently open item
self._docError = "" # The latest encountered IO error
self._prevHash = None # Previous sha256sum of the document file
self._currHash = None # Latest sha256sum of the document file
if isHandle(theHandle):
self._docHandle = theHandle
if self._docHandle is not None:
self._theItem = self.theProject.projTree[theHandle]
return
def __repr__(self):
return f"<NWDoc handle={self._docHandle}>"
def __bool__(self):
return self._docHandle is not None and bool(self._theItem)
##
# Class Methods
##
def readDocument(self, isOrphan=False):
"""Read the document specified by the handle set in the
contructor, capturing potential file system errors and parse
meta data. If the document doesn't exist on disk, return an
empty string. If something went wrong, return None.
"""
self._docError = ""
if self._docHandle is None:
logger.error("No document handle set")
return None
if self._theItem is None and not isOrphan:
logger.error("Unknown novelWriter document")
return None
docFile = self._docHandle+".nwd"
logger.debug("Opening document: %s", docFile)
docPath = os.path.join(self.theProject.projContent, docFile)
self._fileLoc = docPath
theText = ""
self._docMeta = {}
self._prevHash = sha256sum(docPath)
if os.path.isfile(docPath):
try:
with open(docPath, mode="r", encoding="utf-8") as inFile:
# Check the first <= 10 lines for metadata
for i in range(10):
inLine = inFile.readline()
if inLine.startswith(r"%%~"):
self._parseMeta(inLine)
else:
theText = inLine
break
# Load the rest of the file
theText += inFile.read()
except Exception as exc:
self._docError = formatException(exc)
return None
else:
# The document file does not exist, so we assume it's a new
# document and initialise an empty text string.
logger.debug("The requested document does not exist")
return ""
return theText
def writeDocument(self, docText, forceWrite=False):
"""Write the document specified by the handle attribute. Handle
any IO errors in the process Returns True if successful, False
if not.
"""
self._docError = ""
if self._docHandle is None:
logger.error("No document handle set")
return False
self.theProject.ensureFolderStructure()
docFile = self._docHandle+".nwd"
logger.debug("Saving document: %s", docFile)
docPath = os.path.join(self.theProject.projContent, docFile)
docTemp = os.path.join(self.theProject.projContent, docFile+"~")
if self._prevHash is not None and not forceWrite:
self._currHash = sha256sum(docPath)
if self._currHash is not None and self._currHash != self._prevHash:
logger.error("File has been altered on disk since opened")
return False
# DocMeta Line
if self._theItem is None:
docMeta = ""
else:
docMeta = (
f"%%~name: {self._theItem.itemName}\n"
f"%%~path: {self._theItem.itemParent}/{self._theItem.itemHandle}\n"
f"%%~kind: {self._theItem.itemClass.name}/{self._theItem.itemLayout.name}\n"
)
try:
with open(docTemp, mode="w", encoding="utf-8") as outFile:
outFile.write(docMeta)
outFile.write(docText)
except Exception as exc:
self._docError = formatException(exc)
return False
# If we're here, the file was successfully saved, so we can
# replace the temp file with the actual file
try:
os.replace(docTemp, docPath)
except OSError as exc:
self._docError = formatException(exc)
return False
self._prevHash = sha256sum(docPath)
self._currHash = self._prevHash
return True
def deleteDocument(self):
"""Permanently delete a document source file and related files
from the project data folder.
"""
self._docError = ""
if self._docHandle is None:
logger.error("No document handle set")
return False
chkList = [
os.path.join(self.theProject.projContent, f"{self._docHandle}.nwd"),
os.path.join(self.theProject.projContent, f"{self._docHandle}.nwd~"),
]
for chkFile in chkList:
if os.path.isfile(chkFile):
try:
os.unlink(chkFile)
logger.debug("Deleted: %s", chkFile)
except Exception as exc:
self._docError = formatException(exc)
return False
return True
##
# Getters
##
def getFileLocation(self):
"""Return the file location of the current document.
"""
return self._fileLoc
def getCurrentItem(self):
"""Return a pointer to the currently open NWItem.
"""
return self._theItem
def getMeta(self):
"""Parse the document meta tag and return the name, parent,
class and layout meta values.
"""
theName = self._docMeta.get("name", "")
theParent = self._docMeta.get("parent", None)
theClass = self._docMeta.get("class", None)
theLayout = self._docMeta.get("layout", None)
return theName, theParent, theClass, theLayout
def getError(self):
"""Return the last recorded exception.
"""
return self._docError
##
# Internal Functions
##
def _parseMeta(self, metaLine):
"""Parse a line from the document starting with the characters
%%~ that may contain meta data.
"""
if metaLine.startswith("%%~name:"):
self._docMeta["name"] = metaLine[8:].strip()
elif metaLine.startswith("%%~path:"):
metaVal = metaLine[8:].strip()
metaBits = metaVal.split("/")
if len(metaBits) == 2:
if isHandle(metaBits[0]):
self._docMeta["parent"] = metaBits[0]
if isHandle(metaBits[1]):
self._docMeta["handle"] = metaBits[1]
elif metaLine.startswith("%%~kind:"):
metaVal = metaLine[8:].strip()
metaBits = metaVal.split("/")
if len(metaBits) == 2:
if metaBits[0] in nwItemClass.__members__:
self._docMeta["class"] = nwItemClass[metaBits[0]]
if metaBits[1] in nwItemLayout.__members__:
self._docMeta["layout"] = nwItemLayout[metaBits[1]]
else:
logger.debug("Ignoring meta data: '%s'", metaLine.strip())
return
# END Class NWDoc
|
vaelue/novelWriter
|
novelwriter/core/document.py
|
document.py
|
py
| 7,928 |
python
|
en
|
code
| null |
github-code
|
6
|
37530932561
|
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework import status
from curriculum.serializers.curriculum_serializers import SubjectLevelListSerializer, SubjectLevelSerializer, SubjectLevelWriteSerializer
from rest_framework.exceptions import NotFound
from rest_framework.views import APIView
from curriculum.models import SubjectLevel
#
# SUBJECT LEVEL VIEWS
#
class SubjectLevelList(APIView):
"""
List all SubjectLevels, or create a new one.
"""
def get(self, request, school_pk=None, format=None):
subject_levels = SubjectLevel.objects.all()
if school_pk:
subject_levels = subject_levels.filter(
subject__school__id=school_pk)
subject = request.query_params.get('subject', None)
level = request.query_params.get('level', None)
if subject:
subject_levels = subject_levels.filter(subject_id=subject)
if level:
subject_levels = subject_levels.filter(level_id=level)
serializer = SubjectLevelListSerializer(subject_levels, many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = SubjectLevelWriteSerializer(data=request.data)
if serializer.is_valid():
new_subject_level = serializer.save()
new_serializer = SubjectLevelListSerializer(new_subject_level)
return Response(new_serializer.data, status=status.HTTP_201_CREATED)
return Response(new_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class SubjectLevelDetail(APIView):
"""
Retrieve, update or delete a SubjectLevel.
"""
def get_object(self, subject_level_pk):
try:
return SubjectLevel.objects.get(id=subject_level_pk)
except SubjectLevel.DoesNotExist:
raise NotFound(detail="Object with this ID not found.")
def get(self, request, subject_level_pk, format=None):
subject_level = self.get_object(subject_level_pk)
serializer = SubjectLevelSerializer(subject_level)
return Response(serializer.data)
def put(self, request, subject_level_pk, format=None):
subject_level = self.get_object(subject_level_pk)
serializer = SubjectLevelWriteSerializer(
subject_level, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# Partially update a specific entry by primary key
def patch(self, request, subject_level_pk):
subject_level = self.get_object(subject_level_pk)
serializer = SubjectLevelWriteSerializer(
subject_level, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, subject_level_pk, format=None):
subject_level = self.get_object(subject_level_pk)
subject_level.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
|
markoco14/student-mgmt
|
curriculum/views/subject_level_views.py
|
subject_level_views.py
|
py
| 3,238 |
python
|
en
|
code
| 0 |
github-code
|
6
|
5503956648
|
# https://www.hackerrank.com/challenges/swap-case/problem
def swap_case(s):
result = ""
for let in s:
if let.isupper():
result += let.lower()
else:
result += let.upper()
return result
string = input()
print(swap_case(string))
|
Nikit-370/HackerRank-Solution
|
Python/swap-case.py
|
swap-case.py
|
py
| 282 |
python
|
en
|
code
| 10 |
github-code
|
6
|
73798071227
|
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.http import HttpResponse, HttpResponseRedirect, QueryDict
from django.core.serializers.json import DjangoJSONEncoder
from django.contrib.auth import authenticate, login, logout
from django.views.generic import View, TemplateView
from django.contrib.sessions.models import Session
from django.contrib.auth.models import User
from django.template.loader import render_to_string
from django.core.mail import send_mail
from maracay.backEnd import backStart, profileBackend, filterProducts, adminSite
from django.shortcuts import render
from django.core.cache import cache
from django.conf import settings
from threading import Thread
from maracay.models import Tools, Profile as ProfileDB, PurchaseConfirmation, TokenPassword, PagosImagenes, purchaseHistory, Product, DolarBolivar
from maracay import get_client_ip, config, formatoBolivares
import json,random, string
from django.contrib import admin
import os
from maracay.sendinblue import sendinblue_send
from django.core.files.storage import FileSystemStorage
import base64
from datetime import datetime
import os,stat
from django.core.files.base import ContentFile
import xlrd
from maracay.task import help_form,forgot_pass
# Create your views here.
class GoogleVerificacion(TemplateView):
def get(self, request, *args, **kwargs):
return render(request, 'market/googlebebc5688f09bbff0.html',{})
#Main Class
class Maracay(TemplateView):
template_name = 'market/index.html'
#index
def get(self, request, *args, **kwargs):
_allproducts = backStart(request)
_allproducts.get()
if 'pagination' not in request.GET:
data = _allproducts.response_data
data['code'] = _allproducts.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 12) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)
direction = '/static/images/upload/imagesp/'
return render(request, 'market/index.html',{'direction':direction,'contacts':contacts,'data':json.dumps(data['data'])})
'''else:
print ("22222")
data = _allproducts.response_data
data['code'] = _allproducts.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
dataAll = {'contacts':contacts}
return HttpResponse(json.dumps(dataAll, cls=DjangoJSONEncoder), content_type='application/json')'''
#post
def post(self, request, *args, **kwargs):
pass
class Account(View):
def get(self, request, *args, **kwargs):
if str(request.user) != 'AnonymousUser':#si esta logeado su data
_accountData = profileBackend(request)
_accountData.accountData()
data = _accountData.response_data
return render(request, 'market/account.html', {'data':data['data']})
else: # registro
return render(request, 'market/register.html', {})
class Login(View):
def __init__(self):
self.requireds = ['email', 'password', 'csrfmiddlewaretoken']
def post(self, request, *args, **kwargs):
# __ip = get_client_ip(request)
for key in self.requireds:
if not key in request.POST.keys():
return HttpResponse(status=400, content_type='application/json')
for session in Session.objects.filter(session_key=request.session.session_key):
if session:
#No se puede iniciar Sesion usuario ya tiene una sesion activa
return HttpResponse(json.dumps({'code':400,'message':'Ya tiene una sesiòn activa'}, cls=DjangoJSONEncoder), content_type='application/json')
# if cache.get('cache_ip__%s'%__ip):
# return HttpResponse(json.dumps({'code':400,'message':'Debe esperar 5 minutos'}, cls=DjangoJSONEncoder), content_type='application/json')
user = authenticate(username=request.POST['email'], password=request.POST['password'])
if user:
cache.clear()
login(request, user)
return HttpResponse(json.dumps({'code':200}, cls=DjangoJSONEncoder), content_type='application/json')
else:
return HttpResponse(json.dumps({'code':400,'message':'Intento fallido'}, cls=DjangoJSONEncoder), content_type='application/json')
#
# __cache_count_error = cache.get('cache_error__%s'%__ip)
# __cache_exist = cache.get('cache_ip__%s'%__ip)
# if __cache_exist:
# return HttpResponse(json.dumps({'code':400,'message':'Debe esperar 5 minutos'}, cls=DjangoJSONEncoder), content_type='application/json')
# else:
# if __cache_count_error:
# if __cache_count_error == 1:
# cache.set('cache_error__%s'%__ip,1+1,60)
# return HttpResponse(json.dumps({'code':400,'message':'Segundo intento fallido'}, cls=DjangoJSONEncoder), content_type='application/json')
# elif __cache_count_error == 2:
# cache.set('cache_ip__%s'%__ip,__ip,300)
# return HttpResponse(json.dumps({'code':400,'message':'Tercer intento fallido/Debe esperar 5 minutos'}, cls=DjangoJSONEncoder), content_type='application/json')
# else:
# cache.set('cache_error__%s'%__ip,1,60)
# return HttpResponse(json.dumps({'code':400,'message':'Primer intento fallido'}, cls=DjangoJSONEncoder), content_type='application/json')
class Logout(View):
def get(self, request, *args, **kwargs):
logout(request)
_allproducts = backStart(request)
_allproducts.get()
if 'pagination' not in request.GET:
data = _allproducts.response_data
data['code'] = _allproducts.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 12) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)
direction = '/static/images/upload/imagesp/'
return render(request, 'market/index.html',{'direction':direction,'contacts':contacts,'data':json.dumps(data['data'])})
class Profile(View):
def get(self, request, *args, **kwargs):
print ("Profile")
#creacion de usuarios
def post(self, request, *args, **kwargs):
_newUser = profileBackend(request)
_newUser.post()
data = _newUser.response_data
data['code'] = _newUser.code
user = authenticate(username=request.POST['email'], password=request.POST['password'])
if user:login(request, user)
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
def put(self, request, *args, **kwargs):
request.POST=QueryDict(request.read())
try:
data = {'code':200}
if request.POST['flagProfileonly'] == 'false':
dataUser = User.objects.get(pk=int(request.POST['user']))
dataUser.first_name=request.POST['name']
dataUser.last_name=request.POST['lastname']
dataProfile = ProfileDB.objects.get(user=dataUser.id)
dataProfile.phone=request.POST['phone']
dataProfile.rif=request.POST['rif']
dataUser.save()
dataProfile.save()
else:
dataProfile = ProfileDB.objects.get(user=User.objects.get(pk=int(request.POST['user'])))
dataProfile.direction=request.POST['direction']
dataProfile.localphone=request.POST['localphone']
dataProfile.reference=request.POST['reference']
dataProfile.save()
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
except Exception as e:
print ("Profile",e)
data = {'code':500}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
#Seccion de Administrador
def AllProductsAdminTable(request):
#poner esto and request.user.is_superuser==True para el admin
# if str(request.user) != 'AnonymousUser' :#si esta logeado su data
_allproductstable = adminSite(request)
_allproductstable.allProductsTable()
data = _allproductstable.response_data
print("data",data)
# data = {"a":"a"}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
# else:
# return render(request, 'market/adminIndex.html', {})
class ControlAdmin(View):
def get(self, request, *args, **kwargs):
try:
#poner esto and request.user.is_superuser==True para el admin
if str(request.user) != 'AnonymousUser' and request.user.is_superuser==True:#si esta logeado su data
_allproductsfilter = adminSite(request)
_allproductsfilter.dataProductUser()
lista_template = ['productos','cotizacion','precios','inventario']
data = _allproductsfilter.response_data
data['code'] = _allproductsfilter.code
contact_list = data['cantTotal']
# paginator = Paginator(contact_list, 10) # Show 25 contacts per page
# page = request.GET.get('page')
# contacts = paginator.get_page(page)
# dataAll = {'contacts':contacts}
flag = False
direction = '/static/images/upload/imagesp/'
for value in lista_template:
# print("value",value)
if value in request.GET:
flag=True
data[value]=True
if not flag:
data['cotizacion']=True
# print("Data",data)
return render(request, 'market/admintemplates/adminGestion.html', {'valores':data,'direction':direction,'data':data['data'],'flag':'all'})
#mandar los productos con nombre , y precio en dolares y dejar dos campos vacion que seran cant y total
#llenarlo en el fron dinamicamente y hacer la multiplicacion y ya y poner un filtro para mostrar solo los que
#estan llenos y buscar poner un boton para eportarlo y ya
else: # registro
return render(request, 'market/admintemplates/adminIndex.html', {})
except Exception as e:
print("ControlAdmin get",e)
def post(self, request, *args, **kwargs):
try:
archivo = request.POST.get('archivo')
nombre_archivo = request.POST.get('nombre_archivo')
format, imgstr = archivo.split(';base64,')
ext = format.split('/')[-1]
data = ContentFile(base64.b64decode(imgstr))
localtion_save = settings.MEDIA_ROOT
fs = FileSystemStorage(location=localtion_save)
fs.save(nombre_archivo, data)
#Abrimos el archivo excel
documento = xlrd.open_workbook(settings.MEDIA_ROOT+'/'+nombre_archivo)
sheet_excel = documento.sheet_names()
if request.POST.get('flag'):
if 'INVENTARIO' in sheet_excel:
data = {"code":200,"mensaje":"Subido Correctamente"}
inventariocritico = []
inventariocritico_return = []
lista_productos_inventario = documento.sheet_by_index(sheet_excel.index('INVENTARIO'))
# print (lista_productos_inventario.row_values(3))
# print(lista_productos_inventario.nrows)
for i in range(100): #
if i !=0 and i>=3:
fila = lista_productos_inventario.row(i) #
stock = int(float(str(fila[5]).split("number:")[1]))
if stock <=5:
inventariocritico.append([str(fila[1]).split("text:"),str(fila[2]).split("number:"),str(fila[5]).split("number:")])
for value in inventariocritico:
nombre_producto = value[0][1].replace("'","")
cantidad_en_stock_del_producto = round(float(value[2][1]),2)
inventariocritico_return.append({"producto":nombre_producto,"stockcritico":cantidad_en_stock_del_producto})
print("borrar excel del sistema ")
data = {"code":200,"mensaje":"Critico","data":inventariocritico_return}
os.remove(settings.MEDIA_ROOT+'/'+nombre_archivo)
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
else:
if 'CALCULADOR' in sheet_excel:
lista_productos_precios_venta = documento.sheet_by_index(sheet_excel.index('CALCULADOR'))
listafinal = []
listafinalreal = []
for i in range(lista_productos_precios_venta.nrows): #
if i !=0:
fila = lista_productos_precios_venta.row(i) #
listafinal.append([str(fila[1]).split("text:"),str(fila[2]).split("number:"),str(fila[5]).split("number:")])
for product_precio in listafinal:
nombre_producto = product_precio[0][1].replace("'","")
precio_producto = round(float(product_precio[1][1]),2)
categoria = round(float(product_precio[2][1]))
try:
producto_para_actualizar = Product.objects.get(name=nombre_producto)
producto_para_actualizar.price = precio_producto
producto_para_actualizar.pricebs = round((float(precio_producto)*float(DolarBolivar.objects.get().bolivar)),2)
producto_para_actualizar.save()
except Exception as e:
if categoria != 0:
print("No existe y lo creo")
actualizado = Product.objects.create(
name=nombre_producto,
price=precio_producto,
category=categoria,
pricebs=round((float(precio_producto)*float(DolarBolivar.objects.get().bolivar)),2))
actualizado.save()
else:
print("salta porque no es categoria valida")
else:
data = {"code":500,"mensaje":"Error Verifique el archivo subido"}
print("borrar excel del sistema ")
os.remove(settings.MEDIA_ROOT+'/'+nombre_archivo)
data = {"code":200,"mensaje":"Subido Correctamente"}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
except Exception as e1:
print("borrar excel del sistema error")
try:
os.remove(settings.MEDIA_ROOT+'/'+nombre_archivo)
data = {"code":500,"error":"BackEnd "+str(e1)}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
except Exception as e:
data = {"code":500,"error":"BackEnd "+str(e1)}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
#Fin de la Seccion de Administrador
def Conditions(request):
return render(request, 'market/conditions.html', {})
def Help(request):
return render(request, 'market/help.html', {})
def We(request):
return render(request, 'market/we.html', {})
def Places(request):
return render(request, 'market/places.html', {})
def Payment(request):
return render(request, 'market/payment.html', {})
def Delivery(request):
return render(request, 'market/delivery.html', {})
####CARRITO DE COMPRAS#####
def CartShopping(request):
if str(request.user) != 'AnonymousUser':#si esta logeado su data
try:
dataUser = User.objects.get(email=request.user)
return render(request, 'market/cartshopping.html', {
'name':dataUser.first_name,
'apellido':dataUser.last_name,
'phone':dataUser.user_profile.phone,
'direction':dataUser.user_profile.direction,
'rif':dataUser.user_profile.rif,
'localphone':dataUser.user_profile.localphone,
'reference':dataUser.user_profile.reference,
'code':200
})
except Exception as e:
print ("CartShopping",e)
return render(request, 'market/cartshopping.html', {})
else:
return render(request, 'market/cartshopping.html', {})
#Section Filters
def AllProducts(request):
_allproductsfilter = filterProducts(request)
_allproductsfilter.allProductsFilter()
data = _allproductsfilter.response_data
data['code'] = _allproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
formatoBolivares(contacts)#formato en bolivares
return render(request, 'market/allProducts.html',{'all':1,'direction':direction,'contacts':contacts,'data':json.dumps(data['data'])})
def ViveresProducts(request):
_viveresproductsfilter = filterProducts(request)
_viveresproductsfilter.viveresProductsFilter()
data = _viveresproductsfilter.response_data
data['code'] = _viveresproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/viveresProducts.html',{'viveres':1,'direction':direction,'contacts':contacts,'data':json.dumps(data['data'])})
def ChucheriasProducts(request):
_chucheriasproductsfilter = filterProducts(request)
_chucheriasproductsfilter.chucheriasProductsFilter()
data = _chucheriasproductsfilter.response_data
data['code'] = _chucheriasproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/chucheriaProducts.html',{'chucherias':1,'direction':direction,'contacts':contacts,'data':json.dumps(data['data'])})
def FrigorificoProducts(request):
_frigorificoproductsfilter = filterProducts(request)
_frigorificoproductsfilter.frigorificoProductsFilter()
data = _frigorificoproductsfilter.response_data
data['code'] = _frigorificoproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/frigorificoProducts.html',{'direction':direction,'contacts':contacts,'data':json.dumps(data['data'])})
def EnlatadosProducts(request):
_enlatadosproductsfilter = filterProducts(request)
_enlatadosproductsfilter.enlatadosProductsFilter()
data = _enlatadosproductsfilter.response_data
data['code'] = _enlatadosproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/enlatadosProducts.html',{'direction':direction,'contacts':contacts,'data':json.dumps(data['data'])})
def CharcuteriaProducts(request):
_charcuteriaproductsfilter = filterProducts(request)
_charcuteriaproductsfilter.charcuteriaProductsFilter()
data = _charcuteriaproductsfilter.response_data
data['code'] = _charcuteriaproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/charcuteriaProducts.html',{'charcuteria':1,'direction':direction,'contacts':contacts,'data':json.dumps(data['data'])})
def CarnesProducts(request):
_carnesproductsfilter = filterProducts(request)
_carnesproductsfilter.carnesProductsFilter()
data = _carnesproductsfilter.response_data
data['code'] = _carnesproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/carnesProducts.html',{'carne':1,'direction':direction,'contacts':contacts,'data':json.dumps(data['data'])})
def PersonalesProducts(request):
_personalesproductsfilter = filterProducts(request)
_personalesproductsfilter.personalesProductsFilter()
data = _personalesproductsfilter.response_data
data['code'] = _personalesproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/personalesProducts.html',{'personales':1,'direction':direction,'contacts':contacts,'data':json.dumps(data['data'])})
#Section Filter Prodcuts Admin
def AllProductsAdmin(request):
if str(request.user) != 'AnonymousUser':#si esta logeado su data
_allproductsfilter = adminSite(request)
_allproductsfilter.dataProductUser()
data = _allproductsfilter.response_data
data['code'] = _allproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/adminGestion.html', {'direction':direction,'data':contacts,'flag':'all'})
else:
return render(request, 'market/adminIndex.html', {})
def ViveresProductsAdmin(request):
if str(request.user) != 'AnonymousUser':#si esta logeado su data
_viveresproductsfilter = adminSite(request)
_viveresproductsfilter.viveresProductsFilterAdmin()
data = _viveresproductsfilter.response_data
data['code'] = _viveresproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/adminGestion.html', {'direction':direction,'data':contacts,'flag':'vive'})
else:
return render(request, 'market/adminIndex.html', {})
def FrigorificoProductsAdmin(request):
if str(request.user) != 'AnonymousUser':#si esta logeado su data
_frigorificoproductsfilter = adminSite(request)
_frigorificoproductsfilter.frigorificoProductsFilterAdmin()
data = _frigorificoproductsfilter.response_data
data['code'] = _frigorificoproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/adminGestion.html', {'direction':direction,'data':contacts,'flag':'frigo'})
else:
return render(request, 'market/adminIndex.html', {})
def EnlatadosProductsAdmin(request):
if str(request.user) != 'AnonymousUser':#si esta logeado su data
_enlatadosproductsfilter = adminSite(request)
_enlatadosproductsfilter.enlatadosProductsFilterAdmin()
data = _enlatadosproductsfilter.response_data
data['code'] = _enlatadosproductsfilter.code
contact_list = data['cantTotal']
paginator = Paginator(contact_list, 10) # Show 25 contacts per page
page = request.GET.get('page')
contacts = paginator.get_page(page)
formatoBolivares(contacts)#formato en bolivares
dataAll = {'contacts':contacts}
direction = '/static/images/upload/imagesp/'
return render(request, 'market/adminGestion.html', {'direction':direction,'data':contacts,'flag':'enla'})
else:
return render(request, 'market/adminIndex.html', {})
#Caja
def CartOrder(request):
data = {}
if str(request.user) != 'AnonymousUser':#si esta logeado su data
try:
dataUser = User.objects.get(email=request.user)
data = {
'user':dataUser.id,
'name':dataUser.first_name,
'email':dataUser.email,
'apellido':dataUser.last_name,
'phone':dataUser.user_profile.phone,
'direction':dataUser.user_profile.direction,
'rif':dataUser.user_profile.rif,
'localphone':dataUser.user_profile.localphone,
'reference':dataUser.user_profile.reference,
'code':200
}
except Exception as e:
logout(request)
_allproducts = backStart(request)
_allproducts.get('all')
data = _allproducts.response_data
data['code'] = _allproducts.code
return render(request, 'market/index.html',{'data':data['data'][0] if data['data'] else {} })
return render(request, 'market/order.html',data)
#confirmacioncompra
def ConfimationOrder(request):
if str(request.user) == 'AnonymousUser':
return render(request, 'market/registerLogin.html', {})
try:
dataUser = ProfileDB.objects.get(user__email=request.user)
data = {
'user':dataUser.user.id,
'name':dataUser.user.first_name,
'email':dataUser.user.email,
'code':200,
'costoenvio':dataUser.costoenvio,
'compra':[],
'tipoPago':'',
}
compra = PurchaseConfirmation.objects.filter(user=dataUser.user).last()
allProducts = PurchaseConfirmation.objects.filter(code=compra.code)
totalGeneral=0
for value in allProducts:
data['code'] = value.code
data['compra'].append({
'name':value.product.name,
'price':"$"+str(value.product.price)+' / '+str(value.cant_product),
'image':'/static/images/upload/imagesp/'+value.product.name_image,
'total':"$"+str(round(float(value.product.price)*int(value.cant_product),2)),
})
totalGeneral = totalGeneral+(float(value.product.price)*int(value.cant_product))
for value2 in purchaseHistory.objects.filter(code_purchase=compra.code):
data['lugarpago'] = value2.lugarpago
data['moneda'] = value2.moneda
data['tipoPago'] = value2.payment_type
data['totalenmodena'] = value2.total
data['totalGeneral'] = round(totalGeneral,2)
data['totalCompleto'] =round(data['totalGeneral']+data['costoenvio'],2)
if data['moneda'] == 'Bs':
data['totalenmodena']="{:,.2f}".format(float(data['totalenmodena'])).replace(","," ")
data['totalenmodena']=data['totalenmodena'].replace(".",",")
data['totalenmodena']=data['totalenmodena'].replace(" ",".")
return render(request, 'market/confirmationOrder.html',data)
except Exception as e:
print("ConfimationOrder",e)
#envio de formulario de ayuda
def HelpForm(request):
try:
#antes de entrar en el hilo verifico si ese codigo de compra existe
codigo = request.POST.get('codigo')
if codigo:
try:
PagosImagenes.objects.get(codigo_compra=codigo)
except Exception as e:
print("codigo invalido",e)
data = {'code':500,"error":"Código invalido"}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
kwargs_ = {
"asunto": request.POST.get('asunto'),
"email": request.POST.get('email'),
"mensaje": request.POST.get('mensaje'),
"imagen": request.POST.get('imagen'),
"nombre_imagen": request.POST.get('nombre_imagen'),
"codigo": request.POST.get('codigo'),
"origin":request.headers['Origin'],
}
extension = request.POST.get('extension')
if extension:
extension = '.'+extension.split("/")[1]
kwargs_["extension"] = extension
envio_email = help_form.delay(kwargs_)
except Exception as e:
print("HelpForm",e)
data = {'code':200}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
def CartOrderEntrega(request):
if str(request.user) == 'AnonymousUser':
return render(request, 'market/registerLogin.html', {})
data = {}
_allproducts = backStart(request)
_allproducts.guardaCompra()
data['code'] = _allproducts.code
if data['code'] !=500:
data = {'code':200}
else:
data = {'code':500,'message':'Error al procesar su compra'}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
#pagina de recuperacion de clave
def Restore(request):
return render(request, 'market/restore.html', {})
#envio de recuperacion de clave
def Forgot(request):
try:
dataUser = User.objects.get(email=request.POST['email'])
########################codigo de seguridad de cambio de clave##########
def ran_gen(size, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
tokenCode = ran_gen(30,"abcdefghijkLmnNopqrstuvwxyz0123456789*")
########################################################################
try:
token = TokenPassword.objects.get(user=dataUser)
token.token = tokenCode
except Exception as e:
dataToke = {'token':tokenCode,'user':dataUser}
token = TokenPassword(**dataToke)
token.save()
kwargs_ = {
"email":str(dataUser.email),
"uriab":request.build_absolute_uri(),
"token":token.token
}
envio_email_forgot = forgot_pass.delay(kwargs_)
data = {'code':200}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
except Exception as e:
print (e)
data = {'code':500,'message':'Email no existe'}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
def ForgotMail(request):
if 'token' in request.GET:
try:
TokenPassword.objects.get(token=request.GET.get('token'))
return render(request, 'market/forgotPasswordFinal.html', {'token':request.GET['token']})
except Exception as e:
return render(request, 'market/error404.html', {})
else:
return render(request, 'market/error404.html', {})
def Detail(request):
if 'code' in request.GET:
_detailproducts = backStart(request)
_detailproducts.detailProducts()
data = _detailproducts.response_data
direction = '/static/images/upload/imagesp/'
return render(request, 'market/detailProduct.html', {'direction':direction,'data':data['data'],'data2':data['data2'][0]})
else:
data = {'code':500,'message':'Codigo invalido'}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
def Register(request):
return render(request, 'market/register.html', {'flag':1})
def SendEmailClient(request):
try:
email = request.POST.get("email")
if email:
dataUser = User.objects.get(email=request.POST['email'])
sendinblue_send('registro',dataUser.email,dataUser.first_name,dataUser.last_name,None)
data = {'code':200,'message':''}
return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')
except Exception as e:
print("SendEmailClient",e)
|
alfonsoolavarria/cm
|
maracay/views.py
|
views.py
|
py
| 34,284 |
python
|
en
|
code
| 0 |
github-code
|
6
|
31484686923
|
import torch
from models.conformer.activation import GLU, Swish
class DepthWiseConvolution(torch.nn.Module):
def __init__(self, in_channels, kernel_size, stride, padding):
super(DepthWiseConvolution, self).__init__()
self.conv = torch.nn.Conv1d(in_channels, in_channels, kernel_size, stride, padding, groups=in_channels)
def forward(self, x):
x = x.permute(0, 2, 1)
x = self.conv(x)
x = x.permute(0, 2, 1)
return x
class PointWiseConvolution(torch.nn.Module):
def __init__(self, in_channels, out_channels, stride=1):
super(PointWiseConvolution, self).__init__()
self.conv = torch.nn.Conv1d(in_channels, out_channels, 1, stride, 0)
def forward(self, x):
x = x.permute(0, 2, 1)
x = self.conv(x)
x = x.permute(0, 2, 1)
return x
class Permute(torch.nn.Module):
def __init__(self, dims):
super(Permute, self).__init__()
self.dims = dims
def forward(self, x):
return x.permute(*self.dims)
class ConvolutionModule(torch.nn.Module):
def __init__(self, d_model, dropout, kernel_size=3):
super(ConvolutionModule, self).__init__()
self.conv = torch.nn.Sequential(
torch.nn.LayerNorm(d_model),
PointWiseConvolution(d_model, 2 * d_model),
GLU(),
DepthWiseConvolution(d_model, kernel_size, 1, int(kernel_size / 2)),
Permute((0, 2, 1)),
torch.nn.BatchNorm1d(d_model),
Permute((0, 2, 1)),
Swish(),
PointWiseConvolution(d_model, d_model),
torch.nn.Dropout(dropout),
)
def forward(self, x):
return self.conv(x)
|
m-koichi/ConformerSED
|
src/models/conformer/convolution.py
|
convolution.py
|
py
| 1,709 |
python
|
en
|
code
| 25 |
github-code
|
6
|
14807526088
|
import time
import multiprocessing
def work():
for i in range(10):
print("工作中...")
time.sleep(0.2)
if __name__ == '__main__':
work_process = multiprocessing.Process(target=work)
work_process.daemon=True
work_process.start()
# 程序等待1秒
time.sleep(1)
print("程序结束")
|
kids0cn/leetcode
|
Python语法/python多线程多进程/4.守护进程.py
|
4.守护进程.py
|
py
| 333 |
python
|
en
|
code
| 0 |
github-code
|
6
|
7357205248
|
import requests
import json
import nestConfig
#AWS Constants
url = nestConfig.get_URL()
query = '''
mutation Mutation($id: String!) {
checkIn(id: $id) {
code
message
}
}
'''
def checkIn(nestID):
#Ensure nest is connected to the backend
content = json.dumps({'id':nestID}) #Assign nest name to be checked
try:
res = requests.post(url, json={'query': query, 'variables': content})
except Exception as error:
return None
if res.status_code == 200:
print(res.status_code)
else:
raise Exception("Query failed to run by returning code of {}.".format(res.text))
return None
|
EzequielRosario/ImperiumBinarium-Files
|
NestFunctions/HourlyCheckIn.py
|
HourlyCheckIn.py
|
py
| 642 |
python
|
en
|
code
| 0 |
github-code
|
6
|
16475584397
|
import sqlite3
from sqlite3 import Error
class Data():
__error = None
__result = None
def __init__(self, db):
try:
self.con = sqlite3.connect(db, check_same_thread = False)
self.cur = self.con.cursor()
except Error as e:
print(e)
def clean_db(self):
try:
self.cur.execute("DELETE FROM file;")
self.con.commit()
self.cur.execute("DELETE FROM SQLITE_SEQUENCE WHERE name='file';")
self.con.commit()
self.cur.execute("DELETE FROM directory;")
self.con.commit()
self.cur.execute("DELETE FROM SQLITE_SEQUENCE WHERE name='directory';")
self.con.commit()
except Error as e:
self.__error = e
return self.__error
def create_tables(self):
tb_directory ='CREATE TABLE "directory" ("id_directory" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, "name" TEXT)'
tb_evidence ='CREATE TABLE "evidence" ("case_number" INTEGER, "examiner_name" TEXT, "description" TEXT, "note" TEXT)'
tb_pull_log ='CREATE TABLE "pull_log" ("id_log" INTEGER PRIMARY KEY AUTOINCREMENT, "file" TEXT, "from" TEXT, "to" TEXT, "md5_source" TEXT, "sha1_source" TEXT, "date" TEXT )'
tb_file = 'CREATE TABLE "file" ("id_file" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, "id_directory" INTEGER NOT NULL, "name" TEXT, "permision" TEXT, "date" TEXT, "Size" REAL)'
try:
self.cur.execute(tb_directory)
self.cur.execute(tb_file)
self.cur.execute(tb_evidence)
self.cur.execute(tb_pull_log)
except Error as e:
self.__error = e
return self.__error
def insert_log_pull(self, file, from_path, to_path, md5_source, sha1_source, date):
try:
query = "INSERT INTO pull_log (`file`,`from`, `to`, md5_source, sha1_source,`date` )VALUES ('%s','%s','%s','%s','%s','%s')"%(file, from_path, to_path, md5_source, sha1_source, date)
self.cur.execute(query)
self.con.commit()
except Error as e:
self.__error = e
return self.__error
def insert_evidence(self, case_number, examiner_name, description, note):
try:
query = "INSERT INTO evidence (case_number, examiner_name, description, note) VALUES ('%s','%s','%s','%s')"%(case_number, examiner_name, description, note)
self.cur.execute(query)
self.con.commit()
except Error as e:
self.__error = e
return self.__error
def select_evidence(self):
try:
self.cur.execute("SELECT * from evidence")
self.__result = self.cur.fetchone()
return self.__result
except Error as e:
print(e)
def select_pull_log(self):
try:
self.cur.execute("SELECT * from pull_log")
self.__result = self.cur.fetchall()
return self.__result
except Error as e:
print(e)
def select_all_data(self, order):
try:
select = "SELECT directory.name as loc, directory.id_directory, file.name as file, file.permision, file.Size, file.date"
frm = " FROM directory, file"
where = " WHERE directory.id_directory=file.id_directory ORDER BY "+order+" DESC"
self.cur.execute(select+frm+where)
self.__result = self.cur.fetchall()
return self.__result
except Exception as e:
self.__error = e.args[0]
return self.__error
def select_by_extention(self, ext, order):
try:
select = "SELECT directory.name as loc, directory.id_directory, file.name as file, file.permision, file.Size, file.date"
frm = " FROM directory, file"
where = " WHERE directory.id_directory=file.id_directory and file.name like'%"+ext+"%' ORDER BY "+order+" DESC"
self.cur.execute(select+frm+where)
self.__result = self.cur.fetchall()
return self.__result
except Exception as e:
self.__error = e.args[0]
return self.__error
def insert_dir(self, dir):
try:
self.cur.execute('INSERT INTO `directory` (`name`) VALUES ("%s")' % (dir))
self.con.commit()
except Exception as e :
self.__error=e.args[0]
return self.__error
def insert_sub_dir(self, id_dir, name):
try:
self.cur.execute('INSERT INTO `sub_directory` (`id_directory`,`name`) VALUES (%s,"%s")' % (id_dir, name))
self.con.commit()
except Exception as e :
self.__error=e.args[0]
return self.__error
def insert_file(self, id_dir, name, permision, date, size):
try:
self.cur.execute('INSERT INTO `file` (`id_directory`,`name`, `permision`, `date`, `size`) VALUES (%s,"%s", "%s", "%s", "%s")' % (id_dir, name, permision, date, size))
self.con.commit()
except Exception as e :
self.__error=e.args[0]
return self.__error
def select_name_by_id_dir(self, id_dir):
try:
query = 'SELECT `name` FROM sub_directory WHERE id_directory =%s'%(id_dir)
self.cur.execute(query)
self.__result = self.cur.fetchall()
return self.__result
except Exception as e:
self.__error = e.args[0]
return self.__error
def select_name_dir_subDir(self, id_dir):
try:
query = 'SELECT directory.`name`, sub_directory.name FROM sub_directory, `directory` WHERE sub_directory.id_directory=directory.id_directory and directory.id_directory=%s' %(id_dir)
self.cur.execute(query)
self.__result = self.cur.fetchall()
return self.__result
except Exception as e:
self.__error = e.args[0]
return self.__error
def select_id_dir_by_name(self, name):
try:
query = 'SELECT `id_directory` FROM directory WHERE name ="%s"'%(name)
self.cur.execute(query)
self.__result = self.cur.fetchall()
return self.__result
except Exception as e:
self.__error = e.args[0]
return self.__error
def search(self, key, order):
try:
select = "SELECT directory.name as loc, directory.id_directory, file.name as file, file.permision, file.Size, file.date"
frm = " FROM directory, file"
where = " WHERE directory.id_directory=file.id_directory AND file.name like'%"+key+"%'"+" OR file.date like'%"+key+"%'"+" OR directory.name like'%"+key+"%' GROUP BY id_file"+" ORDER BY "+order+" DESC"
self.cur.execute(select+frm+where)
self.__result = self.cur.fetchall()
return self.__result
except Exception as e:
self.__error = e.args[0]
return self.__error
|
madePersonal/Android_forensic_tools
|
Data.py
|
Data.py
|
py
| 6,992 |
python
|
en
|
code
| 0 |
github-code
|
6
|
42480209825
|
import pandas as pd
def listaProdutos(tabela_produtos, n):
my_list = []
for index, rows in tabela_produtos.iterrows():
m_l = []
if n == 0:
k = 0
m_l.append(rows.Produto+"-"+rows.Marca + "-" + rows.Método_Compra)
for m in range(len(m_l)):
for num in range(len(my_list)):
if m_l[m] == my_list[num]:
k = 1
if k == 0:
my_list.append(rows.Produto+"-"+rows.Marca + "-" + rows.Método_Compra)
if n == 1:
my_list.append(rows.Produto+"-"+rows.Marca + "-" + rows.Método_Venda)
if n == 2:
m_l.append(rows.Produto + '-' + rows.Marca + '-' + rows.Método_Venda + '-' + str(
rows.Valor_Venda) + '-' + rows.Método_Compra + '-' + str(rows.Valor_Compra))
m_l = m_l[0].split('-')
my_list.append(m_l)
return my_list
def listaProdutosV(tabela_compras):
my_list = []
for index, rows in tabela_compras.iterrows():
my_list.append(rows.Produto+'-'+rows.Marca + "-" + str(rows.Método))
return my_list
def listaProdutos1(tabela_produtos, idx, quant, id, n):
my_list = []
y = []
if n == 0:
for index, rows in tabela_produtos.iterrows():
my_list.append(rows.Produto + "-" + rows.Marca + "-" + rows.Método_Compra + "-" + str(quant) + "-" + str(
float(rows.Valor_Compra)) + "-" + str(float(rows.Valor_Compra) * float(quant)))
y = my_list[int(idx)].split("-")
if n == 1:
for index, rows in tabela_produtos.iterrows():
my_list.append(str(id) + "-" + rows.Produto + "-" + rows.Marca + "-" + rows.Método_Compra + "-" + str(
quant) + "-" + str(float(rows.Valor_Compra)) + "-" + str(float(rows.Valor_Compra) * float(quant)))
y = my_list[int(idx)].split("-")
return y
def numVenda(tabela_vendas):
my_list = ''
for index, rows in tabela_vendas.iterrows():
my_list = str(int(rows.NumVenda)+1)
return my_list
def listaProdutosV1(tabela_produtos, idx, quant, id):
my_list = []
y = []
for index, rows in tabela_produtos.iterrows():
my_list.append(str(id) + "-" + rows.Produto + "-" + rows.Marca + "-" + rows.Método_Venda + "-" + str(
quant) + "-" + str(float(rows.Valor_Venda)) + "-" + str(float(rows.Valor_Venda) * float(quant)))
y = my_list[int(idx)].split("-")
return y
# Perfeito
def writerE(tabelas, path):
with pd.ExcelWriter(path) as writer:
tabelas[0].to_excel(writer, sheet_name='Métodos', index=False)
tabelas[1].to_excel(writer, sheet_name='Produtos', index=False)
tabelas[2].to_excel(writer, sheet_name='P_Vendas', index=False)
tabelas[3].to_excel(writer, sheet_name='Vendas', index=False)
tabelas[4].to_excel(writer, sheet_name='P_Compras', index=False)
tabelas[5].to_excel(writer, sheet_name='Compras', index=False)
tabelas[6].to_excel(writer, sheet_name='Estoque', index=False)
def listaMetodos(tabela_metodos):
my_list = []
for index, rows in tabela_metodos.iterrows():
my_list.append(rows.Método)
return my_list
def listarVC(tabela_vendas):
my_list = []
y = []
for index, rows in tabela_vendas.iterrows():
my_list.append(rows.ID)
my_list.append(rows.Data)
my_list.append(rows.QItens)
my_list.append(rows.Valor_Total)
y.append(my_list)
my_list = []
return y
def listarBusca(tabelas, id, quant, data, vT, janela):
venda = []
vendaP = []
compra = []
compraP = []
if id == '' and quant == '' and data == '' and vT == '':
for index, rows in tabelas[3].iterrows():
venda = [str(rows.ID), rows.Data, str(rows.QItens), str(rows.Valor_Total)]
vendaP.append(venda)
for index, rows in tabelas[5].iterrows():
compra = [str(rows.ID), rows.Data, str(rows.QItens), str(rows.Valor_Total)]
compraP.append(compra)
janela['-TBHV-'].Update(values=vendaP)
janela['-TBHC-'].Update(values=compraP)
return
for index, rows in tabelas[3].iterrows():
if str(id) == str(rows.ID):
venda = [str(rows.ID), rows.Data, str(rows.QItens), str(rows.Valor_Total)]
if str(quant) == str(rows.QItens):
venda = [str(rows.ID), rows.Data, str(rows.QItens), str(rows.Valor_Total)]
if str(data) == str(rows.Data):
venda = [str(rows.ID), rows.Data, str(rows.QItens), str(rows.Valor_Total)]
if str(vT) == str(rows.Valor_Total):
venda = [str(rows.ID), rows.Data, str(rows.QItens), str(rows.Valor_Total)]
if venda != []:
vendaP.append(venda)
venda = []
for index, rows in tabelas[5].iterrows():
if str(id) == str(rows.ID):
compra = [str(rows.ID), rows.Data, str(rows.QItens), str(rows.Valor_Total)]
if str(quant) == str(rows.QItens):
compra = [str(rows.ID), rows.Data, str(rows.QItens), str(rows.Valor_Total)]
if str(data) == str(rows.Data):
compra = [str(rows.ID), rows.Data, str(rows.QItens), str(rows.Valor_Total)]
if str(vT) == str(rows.Valor_Total):
compra = [str(rows.ID), rows.Data, str(rows.QItens), str(rows.Valor_Total)]
compraP.append(compra)
janela['-TBHV-'].Update(values=vendaP)
janela['-TBHC-'].Update(values=compraP)
return
def listaEstoque(tabela_estoque):
my_list = []
y = []
for index, rows in tabela_estoque.iterrows():
my_list.append(rows.Produto+'-'+rows.Marca+'-'+rows.Método+'-'+str(rows.Quantidade))
my_list = my_list[0].split('-')
y.append(my_list)
my_list= []
return y
def listarID(tabelaID):
n = 0
for index, rows in tabelaID.iterrows():
n = rows.ID
n = int(n)+1
return n
|
jcromeck/ProjectPetShop
|
Funções.py
|
Funções.py
|
py
| 5,917 |
python
|
pt
|
code
| 0 |
github-code
|
6
|
9388340974
|
"""Functions and constants used in several modules of the gtphipsi package.
This module exports the following functions:
- get_name_from_badge (badge)
- get_all_big_bro_choices ()
- create_user_and_profile (form_data)
- log_page_view (request, name)
This module exports the following constant definitions:
- REFERRER
"""
import logging
from django.conf import settings
from django.contrib.auth.models import Group, Permission
from gtphipsi.brothers.bootstrap import INITIAL_BROTHER_LIST
from gtphipsi.brothers.models import User, UserProfile, VisibilitySettings
log = logging.getLogger('django.request')
# The literal name of the HTTP Referrer header. The typo below in 'referrer' is intentional.
REFERRER = 'HTTP_REFERER'
def get_name_from_badge(badge):
"""Return a brother's first and last name given his badge number, assuming he doesn't have an account."""
return INITIAL_BROTHER_LIST[badge][1] if 0 < badge < len(INITIAL_BROTHER_LIST) else None
def get_all_big_bro_choices():
"""Return a list of tuples (in the format (badge, name)) of all possible big brothers."""
list = INITIAL_BROTHER_LIST
for profile in UserProfile.objects.filter(badge__gte=len(INITIAL_BROTHER_LIST)).order_by('badge'):
tup = (profile.badge, profile.common_name())
if tup not in list:
list.append(tup)
return list
def create_user_and_profile(form_data):
"""Create and save a new User and UserProfile from the cleaned_data dictionary of a UserForm instance."""
status = form_data['status']
# create and save the User instance
user = User.objects.create_user(form_data['username'], form_data['email'], form_data['password'])
user.first_name = form_data['first_name']
user.last_name = form_data['last_name']
_create_user_permissions(user, status != 'A', form_data['make_admin'])
user.save()
# create and save the UserProfile instance
public, chapter = _create_visibility_settings()
profile = UserProfile.objects.create(user=user, middle_name=form_data['middle_name'], suffix=form_data['suffix'],
nickname=form_data['nickname'], badge=form_data['badge'], status=status,
big_brother=int(form_data['big_brother']), major=form_data['major'],
hometown=form_data['hometown'], current_city=form_data['current_city'],
phone=form_data['phone'], initiation=form_data['initiation'],
graduation=form_data['graduation'], dob=form_data['dob'],
public_visibility=public, chapter_visibility=chapter)
profile.save()
def log_page_view(request, name):
"""Log a view to the specified page (view), including information about the client viewing the page."""
method = request.method
path = request.path
if method == 'POST':
post = ', POST Data: { '
for key, value in request.POST.iteritems():
if key not in ['csrfmiddlewaretoken', 'password', 'confirm', 'old_pass', 'secret_key', 'admin_password']:
post += '%s: \'%s\', ' % (key, unicode(value))
post += '}'
else:
post = ''
if request.user.is_authenticated():
profile = request.user.get_profile()
client_string = ' User: %s (%s ... %d),' % (request.user.username, profile.common_name(), profile.badge)
else:
client_string = ''
if 'HTTP_USER_AGENT' in request.META:
user_agent = request.META['HTTP_USER_AGENT']
else:
user_agent = '<not supplied>'
log.debug('[%s]%s Request: %s %s%s, User Agent: %s' % (name, client_string, method, path, post, user_agent))
## ============================================= ##
## ##
## Private Functions ##
## ##
## ============================================= ##
def _create_user_permissions(user, undergrad, admin):
"""Add a new user to the appropriate permissions group(s)."""
if undergrad:
group, created = Group.objects.get_or_create(name='Undergraduates')
if created:
group.permissions = Permission.objects.filter(codename__in=settings.UNDERGRADUATE_PERMISSIONS)
group.save()
user.groups.add(group)
else:
group, created = Group.objects.get_or_create(name='Alumni')
if created:
group.permissions = Permission.objects.filter(codename__in=settings.ALUMNI_PERMISSIONS)
group.save()
user.groups.add(group)
if admin:
group, created = Group.objects.get_or_create(name='Administrators')
if created:
group.permissions = Permission.objects.filter(codename__in=settings.ADMINISTRATOR_PERMISSIONS)
group.save()
user.groups.add(group)
def _create_visibility_settings():
"""Create default public and chapter visibility settings for a new user profile."""
public_visibility = VisibilitySettings.objects.create(full_name=False, big_brother=False, major=False,
hometown=False, current_city=False, initiation=False,
graduation=False, dob=False, phone=False, email=False)
public_visibility.save()
chapter_visibility = VisibilitySettings.objects.create(full_name=True, big_brother=True, major=True, hometown=True,
current_city=True, initiation=True, graduation=True, dob=True,
phone=True, email=True)
chapter_visibility.save()
return public_visibility, chapter_visibility
|
will2dye4/gtphipsi
|
common.py
|
common.py
|
py
| 5,867 |
python
|
en
|
code
| 2 |
github-code
|
6
|
39620320183
|
from Folder_de_Testes.base import Fox_HEIGHT, Fox_WIDTH
import pygame
import random
#Parametros gerais
WIDTH = 880
HEIGHT = 400
gravity = 1
def randon_sizes_for_walls(xpos):
protection = 200
altura = random.randint(200, 400)
wall = Wall(False, xpos, altura)
inversal_wall = Wall(True, xpos,HEIGHT - altura - protection)
return (wall, inversal_wall)
class Fox(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
count_fox = 0
Fox_WIDTH = 170
Fox_HEIGHT = 100
self.gravity = 1
Fox1 = pygame.image.load('Folder_de_Testes/assets/img/raposa 1.png').convert_alpha()
Fox1 = pygame.transform.scale(Fox1, (Fox_WIDTH, Fox_HEIGHT))
Fox2 = pygame.image.load('Folder_de_Testes/assets/img/raposa2.png').convert_alpha()
Fox2 = pygame.transform.scale(Fox2, (Fox_WIDTH, Fox_HEIGHT))
Fox3 = pygame.image.load('Folder_de_Testes/assets/img/raposa 3.png').convert_alpha()
Fox3 = pygame.transform.scale(Fox3, (Fox_WIDTH, Fox_HEIGHT))
self.flying_one = pygame.image.load('Folder_de_Testes/assets/img/raposafinal.png').convert_alpha()
self.flying_one = pygame.transform.scale(self.flying_one, (100, 100))
self.images = [Fox1,Fox2,Fox3]
self.count_fox = count_fox
self.image = Fox1
self.rect = self.image.get_rect()
self.rect.centerx = WIDTH / 4
self.rect.bottom = HEIGHT - 100
self.speedy = 1
self.now_on_windon = 0
self.speed_modifier = 0.0
def update(self):
self.rect.y += self.speedy
self.speedy += self.gravity + 0.1 * (-self.speedy)
self.mask = pygame.mask.from_surface(self.image)
self.count_fox += 1
#print(self.speed_modifier)
if self.speed_modifier > -12:
self.speed_modifier -= 0.0024
if self.count_fox >= 10 and self.rect.bottom > HEIGHT:
self.now_on_windon = (self.now_on_windon + 1) % 3
self.image = self.images[self.now_on_windon]
self.count_fox = 0
elif self.speedy <0 :
self.image = self.flying_one
#print(self.speedy)
#print(self.count_fox)
# Mantem dentro da tela
if self.rect.bottom > HEIGHT:
self.rect.bottom = HEIGHT
#self.speedy = 1
#game = False
if self.rect.top < 0:
self.rect.top = 0
def pulo(self):
self.speedy = -16 + self.speed_modifier
fox_group = pygame.sprite.Group()
fox = Fox()
fox_group.add(fox)
class Wall_meteor_fisic(pygame.sprite.Sprite):
def __init__(self, img):
# Construtor da classe mãe (Sprite).
pygame.sprite.Sprite.__init__(self)
Wall_WIDTH = 50
Wall_HEIGHT = random.randint(50, 250)
self.image = img
self.rect = self.image.get_rect()
self.rect.x = (WIDTH-Wall_WIDTH)
self.rect.y = random.randint(10,300)
self.speedx = random.randint(-5, -3)
Wall_HEIGHT = random.randint(50, 250)
def update(self):
# Atualizando a posição do meteoro
self.rect.x += self.speedx
Wall_WIDTH = 50
# Se o meteoro passar do final da tela, volta para cima e sorteia
# novas posições e velocidades
if self.rect.top > HEIGHT or self.rect.right < 0 or self.rect.left > WIDTH:
self.rect.x = (WIDTH-Wall_WIDTH)
self.rect.y = random.randint(10,300)
self.speedx = random.randint(-5, -3)
class Invible_wall:
def __init__(self,img):
pygame.sprite.Sprite.__init__(self)
self.image = img
self.rect = self.image.get_rect()
class Wall(pygame.sprite.Sprite):
def __init__(self, inversal,posx, posy):
# Construtor da classe mãe (Sprite).
pygame.sprite.Sprite.__init__(self)
wall_HEIGHT = 80
wall_WIDTH = 80
self.image = pygame.image.load('Folder_de_Testes/assets/img/Tree.png').convert_alpha()
self.image = pygame.transform.scale(self.image, (wall_WIDTH, wall_HEIGHT))
self.rect = self.image.get_rect()
self.rect[0] = posx
if inversal:
self.image = pygame.transaform.flip(self.image,False, True)
self.rect[1] = (self.rect[3] - posy)
else:
self.rect[1] = HEIGHT - posy
self.mask = pygame.mask.from_surface(self.image)
self.speedx = random.randint(-5, -3)
def update(self):
self.rect[0] += self.speedx
class Coin(pygame.sprite.Sprite):
def __init__(self):
# Construtor da classe mãe (Sprite).
pygame.sprite.Sprite.__init__(self)
coin_HEIGHT = 50
coin_WIDTH = 50
self.image = pygame.image.load('Folder_de_Testes/assets/img/coin.png').convert_alpha()
self.mask = pygame.mask.from_surface(self.image)
self.rect = self.image.get_rect()
self.rect.x = (WIDTH-coin_WIDTH)
self.rect.y = (HEIGHT - coin_HEIGHT)
self.speedx = random.randint(-5, -3)
METEOR_HEIGHT = random.randint(50, 250)
def update(self):
# Atualizando a posição do meteoro
METEOR_HEIGHT = random.randint(50, 250)
self.rect.x += self.speedx
coin_WIDTH = 50
# Se o meteoro passar do final da tela, volta para cima e sorteia
# novas posições e velocidades
if self.rect.top > HEIGHT or self.rect.right < 0 or self.rect.left > WIDTH:
self.rect.x = (WIDTH-coin_WIDTH)
self.rect.y = (HEIGHT - METEOR_HEIGHT)
self.speedx = random.randint(-5, -3)
class Predator(pygame.sprite.Sprite):
def __init__(self):
# Construtor da classe mãe (Sprite).
pygame.sprite.Sprite.__init__(self)
coin_HEIGHT = 50
coin_WIDTH = 50
self.image = pygame.image.load('Folder_de_Testes/assets/img/piranha.png').convert_alpha()
self.image = pygame.transform.scale(self.image, (coin_WIDTH, coin_HEIGHT))
self.mask = pygame.mask.from_surface(self.image)
self.rect = self.image.get_rect()
self.rect.x = (WIDTH-coin_WIDTH)
self.rect.y = random.randint(10, 300)
self.speedx = random.randint(-5, -3)
METEOR_HEIGHT = random.randint(50, 250)
def update(self):
# Atualizando a posição do meteoro
METEOR_HEIGHT = random.randint(50, 250)
self.rect.x += self.speedx
coin_WIDTH = 50
# Se o meteoro passar do final da tela, volta para cima e sorteia
# novas posições e velocidades
if self.rect.top > HEIGHT or self.rect.right < 0 or self.rect.left > WIDTH:
self.rect.x = (WIDTH-coin_WIDTH)
self.rect.y = (HEIGHT - METEOR_HEIGHT)
self.speedx = random.randint(-5, -3)
|
RodrigoAnciaes/Flying_Fox_game
|
Folder_de_Testes/personagens.py
|
personagens.py
|
py
| 7,008 |
python
|
en
|
code
| 0 |
github-code
|
6
|
44295661280
|
import numpy as np
from lib import EulerUtils as eu
# Problem 36 solution!
def checkIfNumberIsPalindromeInBothBases(number):
numberString = str(number)
baseTwoString = "{0:b}".format(number)
if (eu.isPalindrome(numberString) and eu.isPalindrome(baseTwoString)):
return True
else:
return False
sum = sum(x for x in range(1000000) if checkIfNumberIsPalindromeInBothBases(x))
print (sum)
|
Renoh47/ProjectEuler
|
project euler python/problem36.py
|
problem36.py
|
py
| 426 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71971273469
|
import logging
from kubernetes import client
from kubernetes.client.models.v1_resource_requirements import V1ResourceRequirements
from kubeflow.fairing.constants import constants
logger = logging.getLogger(__name__)
def get_resource_mutator(cpu=None, memory=None, gpu=None, gpu_vendor='nvidia'):
"""The mutator for getting the resource setting for pod spec.
The useful example:
https://github.com/kubeflow/fairing/blob/master/examples/train_job_api/main.ipynb
:param cpu: Limits and requests for CPU resources (Default value = None)
:param memory: Limits and requests for memory (Default value = None)
:param gpu: Limits for GPU (Default value = None)
:param gpu_vendor: Default value is 'nvidia', also can be set to 'amd'.
:returns: object: The mutator function for setting cpu and memory in pod spec.
"""
def _resource_mutator(kube_manager, pod_spec, namespace): #pylint:disable=unused-argument
if cpu is None and memory is None and gpu is None:
return
if pod_spec.containers and len(pod_spec.containers) >= 1:
# All cloud providers specify their instace memory in GB
# so it is peferable for user to specify memory in GB
# and we convert it to Gi that K8s needs
limits = {}
if cpu:
limits['cpu'] = cpu
if memory:
memory_gib = "{}Gi".format(round(memory/1.073741824, 2))
limits['memory'] = memory_gib
if gpu:
limits[gpu_vendor + '.com/gpu'] = gpu
if pod_spec.containers[0].resources:
if pod_spec.containers[0].resources.limits:
pod_spec.containers[0].resources.limits = {}
for k, v in limits.items():
pod_spec.containers[0].resources.limits[k] = v
else:
pod_spec.containers[0].resources = V1ResourceRequirements(limits=limits)
return _resource_mutator
def mounting_pvc(pvc_name, pvc_mount_path=constants.PVC_DEFAULT_MOUNT_PATH):
"""The function has been deprecated, please use `volume_mounts`.
"""
logger.warning("The function mounting_pvc has been deprecated, \
please use `volume_mounts`")
return volume_mounts('pvc', pvc_name, mount_path=pvc_mount_path)
def volume_mounts(volume_type, volume_name, mount_path, sub_path=None):
"""The function for pod_spec_mutators to mount volumes.
:param volume_type: support type: secret, config_map and pvc
:param name: The name of volume
:param mount_path: Path for the volume mounts to.
:param sub_path: SubPath for the volume mounts to (Default value = None).
:returns: object: function for mount the pvc to pods.
"""
mount_name = str(constants.DEFAULT_VOLUME_NAME) + volume_name
def _volume_mounts(kube_manager, pod_spec, namespace): #pylint:disable=unused-argument
volume_mount = client.V1VolumeMount(
name=mount_name, mount_path=mount_path, sub_path=sub_path)
if pod_spec.containers[0].volume_mounts:
pod_spec.containers[0].volume_mounts.append(volume_mount)
else:
pod_spec.containers[0].volume_mounts = [volume_mount]
if volume_type == 'pvc':
volume = client.V1Volume(
name=mount_name,
persistent_volume_claim=client.V1PersistentVolumeClaimVolumeSource(
claim_name=volume_name))
elif volume_type == 'secret':
volume = client.V1Volume(
name=mount_name,
secret=client.V1SecretVolumeSource(secret_name=volume_name))
elif volume_type == 'config_map':
volume = client.V1Volume(
name=mount_name,
config_map=client.V1ConfigMapVolumeSource(name=volume_name))
else:
raise RuntimeError("Unsupport type %s" % volume_type)
if pod_spec.volumes:
pod_spec.volumes.append(volume)
else:
pod_spec.volumes = [volume]
return _volume_mounts
def add_env(env_vars):
"""The function for pod_spec_mutators to add custom environment vars.
:param vars: dict of custom environment vars.
:returns: object: function for add environment vars to pods.
"""
def _add_env(kube_manager, pod_spec, namespace): #pylint:disable=unused-argument
env_list = []
for env_name, env_value in env_vars.items():
env_list.append(client.V1EnvVar(name=env_name, value=env_value))
if pod_spec.containers and len(pod_spec.containers) >= 1:
if pod_spec.containers[0].env:
pod_spec.containers[0].env.extend(env_list)
else:
pod_spec.containers[0].env = env_list
return _add_env
def get_node_selector(node_selector):
"""This function for pod_spec_mutators to designate node selector.
:param node_selector: dict of selection constraint
:return: obejct: The mutator fucntion for setting node selector
"""
def _node_selector(kube_master, pod_spec, namespace): #pylint:disable=unused-argument
if node_selector is None:
return
if pod_spec.containers and len(pod_spec.containers) >= 1:
pod_spec.node_selector = node_selector
return _node_selector
|
kubeflow/fairing
|
kubeflow/fairing/kubernetes/utils.py
|
utils.py
|
py
| 5,342 |
python
|
en
|
code
| 336 |
github-code
|
6
|
70007062267
|
from typing import Any, Dict
import os
import json
import httpx
from odt.config import PipeConfig
_TEMPFILENAME = "lgbm_tmp_model.txt"
class ODTManager:
def __init__(self, server_host: str) -> None:
self.server_host = server_host
def update_config(self, config: PipeConfig):
# serialization from pydandic with .json method doesn't work internally
json_data = json.loads(config.json())
r = httpx.post(f"{self.server_host}/config", json=json_data)
if r.status_code == 200:
print("Updating config succeeded!")
else:
raise Exception(f"Something went wrong updating the config, status code {r.status_code}")
def update_model(self, model: Any):
model.save_model(_TEMPFILENAME)
with open(_TEMPFILENAME, "r") as f:
in_mem_model: str = f.read()
os.remove(_TEMPFILENAME)
r = httpx.post(f"{self.server_host}/model", content=bytes(in_mem_model, encoding='utf-8'))
if r.status_code == 200:
print("Updating model succeeded!")
else:
raise Exception(f"Something went wrong updating the model, status code {r.status_code}")
def update_config_and_model(self, config: PipeConfig, model: Any):
self.update_config(config)
self.update_model(model)
def get_prediction(self, data: Dict[str, Any]) -> float:
r = httpx.post(f"{self.server_host}/predict", json=data)
return r.json()
|
Tsoubry/fast-lightgbm-inference
|
rust-transformer/python/odt/manage.py
|
manage.py
|
py
| 1,506 |
python
|
en
|
code
| 0 |
github-code
|
6
|
3480167544
|
import json
import boto3
from smart_open import smart_open, codecs
from ConfigParser import ConfigParser
import psycopg2
def publish_message(producerInstance, topic_name, key, value):
"Function to send messages to the specific topic"
try:
producerInstance.produce(topic_name,key=key,value=value)
producerInstance.flush()
print('Message published successfully.')
except Exception as ex:
print('Exception in publishing message')
print(str(ex))
def config(filename='database.ini', section='postgresql'):
# create a parser
parser = ConfigParser()
# read config file
parser.read(filename)
# get section, default to postgresql
db = {}
if parser.has_section(section):
params = parser.items(section)
for param in params:
db[param[0]] = param[1]
else:
raise Exception('Section {0} not found in the {1} file'.format(section, filename))
return db
def insert_data(finaldict,tablename):
conn = None
try:
params = config()
# connect to the PostgreSQL database
conn = psycopg2.connect(**params)
# create a new cursor
curs = conn.cursor()
query = curs.mogrify("INSERT INTO {} ({}) VALUES {}".format(
tablename,
', '.join(finaldict[0].keys()),
', '.join(["%s"] * len(finaldict))
), [tuple(v.values()) for v in finaldict])
print(query)
curs.execute(query)
conn.commit()
curs.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
def get_event_files(tableprefix):
return list(my_bucket.objects.filter(Prefix=tableprefix))
client = boto3.client('s3')
resource = boto3.resource('s3')
my_bucket = resource.Bucket('gdelt-sample-data')
events_files = get_event_files("events")
gkg_files = get_event_files("gkg")
mentions_files = get_event_files("mentions")
gkg_obj = codecs.getreader('utf-8')(gkg_files[0].get()['Body'])
event_obj = codecs.getreader('utf-8')(events_files[0].get()['Body'])
mention_obj = codecs.getreader('utf-8')(mentions_files[0].get()['Body'])
events_columns = ['GlobalEventID', 'Day', 'MonthYear', 'Year', 'FractionDate',
'Actor1Code', 'Actor1Name', 'Actor1CountryCode',
'Actor1KnownGroupCode', 'Actor1EthnicCode',
'Actor1Religion1Code', 'Actor1Religion2Code',
'Actor1Type1Code', 'Actor1Type2Code', 'Actor1Type3Code',
'Actor2Code', 'Actor2Name', 'Actor2CountryCode',
'Actor2KnownGroupCode', 'Actor2EthnicCode',
'Actor2Religion1Code', 'Actor2Religion2Code',
'Actor2Type1Code', 'Actor2Type2Code', 'Actor2Type3Code',
'IsRootEvent', 'EventCode', 'EventBaseCode',
'EventRootCode', 'QuadClass', 'GoldsteinScale',
'NumMentions', 'NumSources', 'NumArticles', 'AvgTone',
'Actor1Geo_Type', 'Actor1Geo_Fullname',
'Actor1Geo_CountryCode', 'Actor1Geo_ADM1Code',
'Actor1Geo_ADM2Code', 'Actor1Geo_Lat', 'Actor1Geo_Long',
'Actor1Geo_FeatureID', 'Actor2Geo_Type',
'Actor2Geo_Fullname', 'Actor2Geo_CountryCode',
'Actor2Geo_ADM1Code', 'Actor2Geo_ADM2Code',
'Actor2Geo_Lat', 'Actor2Geo_Long', 'Actor2Geo_FeatureID',
'ActionGeo_Type', 'ActionGeo_Fullname',
'ActionGeo_CountryCode', 'ActionGeo_ADM1Code',
'ActionGeo_ADM2Code', 'ActionGeo_Lat', 'ActionGeo_Long',
'ActionGeo_FeatureID', 'DATEADDED', 'SOURCEURL']
gkg = ["recordid","date" , "srccollectionidentifier","srccommonname","documentid","counts","countsv1","themes","enhancedthemes",
"locations", "enhancedlocation","persons","enhancedpersons","organizations","enhancedorganizations","tone","enhanceddates",
"gcam","sharingimage","relatedimages", "socialimageembeds", "socialvideoembeds", "quotations", "allnames", "amounts","translationinfo",
"extrasxml"]
mentions = ["GlobalEventID","EventTimeDate","MentionTimeDate","MentionType","MentionSourceName","MentionIdentifier","SentenceID",
"Actor1CharOffset","Actor2CharOffset","ActionCharOffset","InRawText","Confidence","MentionDocLen","MentionDocTone"]
gkg_finaldict=[]
for record in gkg_obj:
features = record.strip().split("\t")
if(len(features)==27):
tmpDict = dict()
tmpDict = dict({gkg[i]:features[i].encode("utf-8") for i in range(len(gkg))})
gkg_finaldict.append(tmpDict)
for i in range(0,len(gkg_finaldict),1000):
insert_data(gkg_finaldict[i:i+1000],"public.gkg")
event_finaldict=[]
for record in event_obj:
features = record.strip().split("\t")
if(len(features)==61):
tmpDict = dict()
tmpDict = dict({events_columns[i]: features[i].encode("utf-8") for i in range(len(events_columns))})
event_finaldict.append(tmpDict)
for i in range(0,len(event_finaldict),1000):
insert_data(event_finaldict[i:i+1000],"public.events")
mentions_finaldict=[]
for record in mention_obj:
features = record.strip().split("\t")
print(record)
if(len(features)==14):
tmpDict = dict()
tmpDict = dict({mentions[i]: features[i].encode("utf-8") for i in range(len(mentions))})
mentions_finaldict.append(tmpDict)
for i in range(0,len(mentions_finaldict),1000):
insert_data(mentions_finaldict[i:i+1000],"public.mentions")
|
vikash4281/Corpus-Callosum
|
Ingestion/Streaming.py
|
Streaming.py
|
py
| 5,581 |
python
|
en
|
code
| 0 |
github-code
|
6
|
9756160768
|
from __future__ import print_function, absolute_import, division
import numpy as np
def pad_sequences(sequences, maxlen=None, dtype='int32',
padding='pre', truncating='pre', value=0.):
'''
Pad each sequence to the same length:
the length of the longest sequence.
If maxlen is provided, any sequence longer than maxlen is truncated
to maxlen. Truncation happens off either the beginning (default) or
the end of the sequence.
Supports post-padding and pre-padding (default).
Parameters:
-----------
sequences: list of lists where each element is a sequence
maxlen: int, maximum length
dtype: type to cast the resulting sequence.
padding: 'pre' or 'post', pad either before or after each sequence.
truncating: 'pre' or 'post', remove values from sequences larger than
maxlen either in the beginning or in the end of the sequence
value: float, value to pad the sequences to the desired value.
Returns:
-------
x: numpy array with dimensions (number_of_sequences, maxlen)
Example:
-------
> pad_sequences([[1,2,3],
[1,2],
[1,2,3,4]], maxlen=3, padding='post', truncating='pre')
> [[1,2,3],
[1,2,0],
[2,3,4]]
'''
lengths = [len(s) for s in sequences]
nb_samples = len(sequences)
if maxlen is None:
maxlen = np.max(lengths)
x = (np.ones((nb_samples, maxlen)) * value).astype(dtype)
for idx, s in enumerate(sequences):
if len(s) == 0:
continue # empty list was found
if truncating == 'pre':
trunc = s[-maxlen:]
elif truncating == 'post':
trunc = s[:maxlen]
else:
raise ValueError("Truncating type '%s' not understood" % padding)
if padding == 'post':
x[idx, :len(trunc)] = trunc
elif padding == 'pre':
x[idx, -len(trunc):] = trunc
else:
raise ValueError("Padding type '%s' not understood" % padding)
return x
|
trungnt13/odin_old
|
odin/features/text.py
|
text.py
|
py
| 2,108 |
python
|
en
|
code
| 2 |
github-code
|
6
|
17661406387
|
from collections import defaultdict, deque
from enum import Enum
def read(filename):
with open(filename) as f:
insts = (line.strip().split(' ') for line in f)
return [(inst[0], tuple(inst[1:])) for inst in insts]
def isint(exp):
try:
int(exp)
return True
except ValueError:
return False
def val(exp, regs):
if isint(exp):
return int(exp)
return regs[exp]
class State(Enum):
ENDED = 1
STUCK = 2
RUNNING = 3
class Program(object):
def __init__(self, id, insts, inq, outq):
self.regs = defaultdict(int)
self.regs['p'] = id
self.pc = 0
self.insts = insts
self.inq = inq
self.outq = outq
self.snd_count = 0
def step(self):
if not (0 <= self.pc < len(self.insts)):
return State.ENDED
op, args = self.insts[self.pc]
if op == 'snd':
self.outq.append(val(args[0], self.regs))
self.pc += 1
self.snd_count += 1
return State.RUNNING
elif op == 'set':
self.regs[args[0]] = val(args[1], self.regs)
self.pc += 1
return State.RUNNING
elif op == 'add':
self.regs[args[0]] += val(args[1], self.regs)
self.pc += 1
return State.RUNNING
elif op == 'mul':
self.regs[args[0]] *= val(args[1], self.regs)
self.pc += 1
return State.RUNNING
elif op == 'mod':
self.regs[args[0]] = self.regs[args[0]] % val(args[1], self.regs)
self.pc += 1
return State.RUNNING
elif op == 'rcv':
if len(self.inq) == 0:
return State.STUCK
else:
self.regs[args[0]] = self.inq.popleft()
self.pc += 1
return State.RUNNING
elif op == 'jgz':
x = val(args[0], self.regs)
if x > 0:
self.pc += val(args[1], self.regs)
else:
self.pc += 1
return State.RUNNING
def process(prog_a, prog_b, nsteps=100000):
for i in range(nsteps):
res_a = prog_a.step()
res_b = prog_b.step()
queue_a = deque()
queue_b = deque()
insts = read('input-18.txt')
prog_a = Program(0, insts, queue_a, queue_b)
prog_b = Program(1, insts, queue_b, queue_a)
process(prog_a, prog_b)
print(prog_b.snd_count)
|
pdhborges/advent-of-code
|
2017/18.py
|
18.py
|
py
| 2,447 |
python
|
en
|
code
| 0 |
github-code
|
6
|
36021205025
|
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from sendQueries import SendQueriesHandler
from ResponseHandler import ResponseHandler
class HomeHandler(webapp.RequestHandler):
def get(self):
self.response.out.write("Hello!")
appRoute = webapp.WSGIApplication( [
('/', HomeHandler),
('/response', ResponseHandler),
('/sendQueries', SendQueriesHandler),
], debug=True)
def main():
run_wsgi_app(appRoute)
if __name__ == '__main__':
main()
|
stolksdorf/lifetracker
|
web/home.py
|
home.py
|
py
| 508 |
python
|
en
|
code
| 1 |
github-code
|
6
|
20785922085
|
from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('reviews.views',
url(r'^$', 'home', name='home'),
url(r'^courses/$', 'courses', name='courses'),
url(r'^courses/find/$', 'find_course', name='find_course'),
url(r'^courses/search/$', 'search', name='search'),
url(r'^courses/add/$', 'add_course', name='add_course'),
url(r'^courses/(?P<course_id>\d+)/$', 'course', name="course"),
url(r'^courses/(?P<course_id>\d+)/choose_class/$', 'choose_class_to_review', name='choose_class'),
url(r'^courses/(?P<class_id>\d+)/review/$', 'review_course', name="review_course"),
url(r'^courses/(?P<class_id>\d+)/review/(?P<review_id>\d+)/edit/$', 'review_course', name="edit_review"),
url(r'^courses/(?P<course_id>\d+)/edit/$', 'edit_course', name="edit_course"),
url(r'^depts/$', 'departments', name='departments'),
url(r'^depts/(?P<dept_abb>.+)/$', 'department', name='department'),
url(r'^instructors/$', 'instructors', name='instructors'),
url(r'^instructors/add/$', 'add_instructor', name='add_instructor'),
url(r'^instructors/(?P<instructor_id>\d+)/$', 'instructor', name='instructor'),
url(r'^tags/$', 'tags', name='tags'),
url(r'^tags/(?P<tag_name>\w+)/$', 'tag', name='tag'),
url(r'^allreviews/$', 'reviews', name='reviews'),
url(r'^students/$', 'students', name='students'),
url(r'^students/(?P<student_id>\d+)/$', 'student', name='student'),
url(r'^login/$', 'login', name='login'),
url(r'^logout/$', 'logout_page', name='logout'),
)
urlpatterns += patterns('',
url(r'^admin/', include(admin.site.urls)),
)
|
aldeka/ClassShare
|
classshare/urls.py
|
urls.py
|
py
| 1,746 |
python
|
en
|
code
| 3 |
github-code
|
6
|
37892985042
|
class Solution:
def isValidSudoku(self, board: List[List[str]]) -> bool:
col_set = [set() for j in range(9)]
row_set = [set() for j in range(9)]
box_set = [set() for j in range(9)]
for i in range(9):
for j in range(9):
if board[i][j] == ".":
continue
if board[i][j] in row_set[i] or board[i][j] in col_set[j] or board[i][j] in box_set[i//3*3+j//3]:
return False
row_set[i].add(board[i][j])
col_set[j].add(board[i][j])
box_set[i//3*3+j//3].add(board[i][j])
return True
|
johnrhimawan/LeetCode-Solution
|
Medium/valid-sudoku.py
|
valid-sudoku.py
|
py
| 644 |
python
|
en
|
code
| 0 |
github-code
|
6
|
40291222328
|
"""
Purpose - A concordance
Author - Vivek T S
Date - 12/12/2018
"""
def find(text, target):
for index in range(len(text)-len(target)+1):
if text[index:index+len(target)]==target:
return index
return -1
def concordanceEntry(target):
textFile = open('Mobydick.txt','r',encoding='utf-8')
lineNumber=1
for line in textFile:
found = find(line,target)
if found >= 0:
print(lineNumber,(80-found)*' ',line)
lineNumber=lineNumber+1
def main():
textFile = open('Mobydick.txt','r',encoding='utf-8')
text = textFile.read()
textFile.close()
word = input('Search for : ')
while word != 'q':
#index = find(text,word)
concordanceEntry(word)
#print(word,'at index',index)
word = input('Search for :')
main()
|
vivekworks/learning-to-code
|
4. Discovering Computer Science/Python/Chapter 6 - Text, Documents & DNA/concordance.py
|
concordance.py
|
py
| 731 |
python
|
en
|
code
| 0 |
github-code
|
6
|
12903245570
|
import faust
import uuid
app = faust.App(
'greetings',
broker='kafka://localhost:9092',
)
class Greeting(faust.Record, serializer='json', isodates=True):
message: str
uuid: str
greetings_topic = app.topic('greetings', value_type=Greeting)
@app.agent(greetings_topic)
async def get_greetings(greetings):
"""Receives the message and prints the greeting in the logger
"""
async for greeting in greetings:
print(greeting.message)
print(greeting.uuid)
@app.timer(5)
async def produce():
for i in range(100):
await get_greetings.send(value={
"message": f'hello from {i}',
"uuid": uuid.uuid1()
})
if __name__ == '__main__':
app.main()
|
tyao117/faust-fastapi
|
faust_hello_world.py
|
faust_hello_world.py
|
py
| 747 |
python
|
en
|
code
| 0 |
github-code
|
6
|
25508690525
|
#!/usr/bin/env python3
import requests
import os
url = 'http://localhost/upload/'
path = os.getcwd() + '/supplier-data/images/'
only_jpeg = []
for file in os.listdir(path):
name, ext = os.path.splitext(file)
if ext == '.jpeg':
only_jpeg.append(os.path.join(path,file))
for jpeg in only_jpeg:
with open(jpeg, 'rb') as opened:
r = requests.post(url, files={'file': opened})
|
paesgus/AutomationTI_finalproject
|
supplier_image_upload.py
|
supplier_image_upload.py
|
py
| 393 |
python
|
en
|
code
| 0 |
github-code
|
6
|
7781848764
|
import imutils
import cv2
import numpy as np
class DistanceCalculator:
def __init__(self, distance_ref, width_ref, pixels):
self.distance_ref = distance_ref
self.width_ref = width_ref
self.focal_ref = (pixels*distance_ref)/width_ref
def find_object(self, original):
"""
find object that went to calculate camera-object distance
we can applay a mask to take only region of intrest
but here we applay only max contour detection
"""
# convert the image to grayscale, blur it, and detect edges
gray = cv2.cvtColor(original, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (5, 5), 0)
edged = cv2.Canny(gray, 35, 125)
# find the contours in the edged image and keep the largest one
cnts = cv2.findContours(
edged.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
cnts = imutils.grab_contours(cnts)
cv2.drawContours(original, cnts, -1, (0, 0, 255))
if len(cnts):
c = max(cnts, key=cv2.contourArea)
return cv2.minAreaRect(c)
else:
return (0, 0), (self.width_ref, 0), 0
def _calc_distance(self, pixels):
""" real distance"""
return (self.width_ref*self.focal_ref)/pixels
def calc_distance(self, original):
""" calculate camera-object distance """
# applay rectangle max area filter and draw contours
(x, y), (width, height), angle = self.find_object(original=original)
print("distance %d" % self._calc_distance(width))
box = cv2.boxPoints(((x, y), (width, height), angle))
box = np.int0(box)
cv2.drawContours(original, [box], -1, (0, 255, 0), 2)
cv2.putText(original, "%.2f cm" % (self._calc_distance(
width)), (2, 506), cv2.FONT_HERSHEY_SIMPLEX, 2.0, (0, 255, 0), 2)
|
tarekbrahmi/Open-cv-project
|
MyProjects/distance-calculator/example2/DistanceCalculator.py
|
DistanceCalculator.py
|
py
| 1,870 |
python
|
en
|
code
| 0 |
github-code
|
6
|
34183851752
|
# def solution(s):
# q, r = divmod(len(s), 2)
# if r : q -= 1
# result_list = []
# for n in range(1, q+1):
# cur = 0
# result_str = ''
# print(n)
# while cur < len(s):
# cur_str = s[cur:cur + n]
# count = 1
# for i in range(cur + n, len(s)+1, n):
# next_str = s[i:i+n]
# if cur_str == next_str:
# count +=1
# else:
# if count == 1 :
# result_str += s[cur]
# cur +=1
# else:
# result_str += str(count) + cur_str
# cur = i
# break
# print(result_str)
# result_list.append(result_str)
# print(result_list)
# answer = 0
# return answer
# def solution(s):
# # q, r = divmod(len(s), 2)
# # if r : q -= 1
# count_list = []
# for n in range(1, len(s)//2):
# before_str = s[0:n]
# flag = False
# count = n
# if before_str == s[n:2*n]:
# flag = True
# count = 0
# for i in range(n, len(s), n):
# cur_str = s[i:i+n]
# print(before_str, cur_str)
# if before_str == cur_str:
# flag = True
# else:
# if flag:
# count += n+1
# else:
# count += len(before_str)
# before_str = cur_str
# flag = False
# print(n, count)
# print()
# if flag:
# count += n+1
# else:
# count += len(before_str)
# count_list.append(count)
# print(count_list)
# return
def solution(s):
min_length = len(s)
for n in range(1, len(s)//2+1):
count = 1
result_str = ''
for i in range(0, len(s), n):
cur_str = s[i:i+n]
next_str = s[i+n:i+n+n]
if cur_str == next_str:
count += 1
else:
if count != 1:
result_str += str(count) + cur_str
else:
result_str += cur_str
count = 1
min_length = min([min_length, len(result_str)])
return min_length
test_case = [
("aabbaccc", 7),
("ababcdcdababcdcd", 9),
("abcabcdede", 8),
("abcabcabcabcdededededede", 14),
("xababcdcdababcdcd", 17),
("xxxxxxxxxxyyy", 5),
("a", 1)
]
for s, answer in test_case:
print(solution(s), answer)
|
study-for-interview/algorithm-study
|
hanjo/개인용/programmers/카카오/L2_문자열압축/solution.py
|
solution.py
|
py
| 2,617 |
python
|
en
|
code
| 8 |
github-code
|
6
|
28193366899
|
from datetime import datetime, time
import sys
from time import sleep
import datefunc
def choose_date(now):
datefunc.clear_terminal()
option = input("Choose counter:\n 1 - time to pay,\n 2 - time to vacation,\n 3 - time to end of working day \n")
datefunc.clear_terminal()\
if option == '1' or option == 1:
return datefunc.time_to_pay(now)
if option == '2' or option == 2:
return datefunc.time_to_vacation()
if option == '3' or option == 3:
return datefunc.time_end_workingday()
else:
print('fuck yourself')
sys.exit()
def main():
now = datetime.now()
# print(now.today().weekday())
req = choose_date(now)
while req>now:
print("%dd %dh %dm %ds" % datefunc.daysHoursMinutesSecondsFromSeconds(datefunc.dateDiffInSeconds(now, req)))
datefunc.clear_terminal()
now = datetime.now()
print("Thank you")
if __name__ == "__main__":
main()
|
NikitaTymofeiev-dev/simpleApp
|
main.py
|
main.py
|
py
| 1,046 |
python
|
en
|
code
| 0 |
github-code
|
6
|
40787987363
|
## import libraries
from tkinter import *
from gtts import gTTS
from playsound import playsound
################### Initialized window####################
root = Tk()
root.geometry('350x300')
root.resizable(0,0)
root.config(bg = 'light yellow')
root.title('DataFlair - TEXT_TO_SPEECH')
##heading
Label(root, text = 'HELIGA TEKST' , font='arial 20 bold' , bg ='white smoke').pack()
Label(root, text ='DataFlair' , font ='arial 15 bold', bg = 'blue').pack(side = BOTTOM)
#label
Label(root, text ='Sisesta Tekst', font ='arial 15 bold', bg ='white').place(x=20,y=60)
##text variable
Msg = StringVar()
#Entry
entry_field = Entry(root,textvariable =Msg, width ='50')
entry_field.place(x=20 , y=100)
###################define function##############################
def Tekst():
Message = entry_field.get()
speech = gTTS(text = Message, lang ='et', slow = True)
speech.save('DataFlair.mp3')
playsound('DataFlair.mp3')
def Exit():
root.destroy()
def Reset():
Msg.set("")
#Button
Button(root, text = "ESITA" , font = 'arial 15 bold', command = Tekst, bg = 'light blue', width =6).place(x=25, y=140)
Button(root,text = 'VÄLJU',font = 'arial 15 bold' , command = Exit, bg = 'green').place(x=100,y=140)
Button(root, text = 'UUESTI', font='arial 15 bold', command = Reset, bg = 'yellow' ).place(x=175 , y =140)
#infinite loop to run program
root.mainloop()
|
program444/HELIGA-TEKST-
|
Text-to-Speech.py
|
Text-to-Speech.py
|
py
| 1,453 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71168432827
|
'''
This project is a GUI calculator for a high yield savings account.
The GUI will display 4 input boxes. An intial deposit, monthly deposit, APY yield, and years to calculate
The result will be a number at the end of the year, as well as a graph that displays the growth of the account.
Possible extras could include a bar graph or just numbers that display how much of the final amount was the initial, monthly deposit,
or interest earned.
'''
#Imports
import tkinter as tk
import matplotlib.pyplot as plt
from matplotlib.figure import Figure
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
# Make tkinter window with classname and size
m = tk.Tk(className="high yield savings calculator")
m.attributes('-fullscreen', True)
# Create canvas to draw and do animations
canvas = tk.Canvas(m, width=m.winfo_screenwidth(), height=m.winfo_screenheight(), bg="white")
canvas.create_line(0, 120, m.winfo_screenwidth(), 120, fill="black", width=2)
canvas.pack(fill="both", expand=True)
title = tk.Label(m, text="High Yield Savings Calculator", font=("Mistral 60 bold"), bg="white")
title.pack()
screen_width = m.winfo_screenwidth()
center, quarter = screen_width // 2, screen_width // 1.5
title.place(x=center, y=18, anchor="n")
initial_var, monthly_var, APY_var, years_var = tk.StringVar(), tk.StringVar(), tk.StringVar(), tk.StringVar()
def calculate(initial, monthly, APY, years):
apy_ratio = APY / 100
total_monthly = (monthly * 12) * years
total_months = int((years * 12))
count = years
contribution_interest = 0
for i in range(0, total_months):
contribution_interest += (monthly * apy_ratio * count)
total = initial + total_monthly + contribution_interest
return total, contribution_interest, total_monthly
total_bal = None
error_msg = None
piegraph = None
def display_total_balance(total, contribution_interest, initial, total_monthly):
global total_bal
if total_bal:
total_bal.config(text='Total balance is $' + str(total))
else:
total_bal = tk.Label(m, text='Total balance is $' + str(total), fg='green', font=('Modern', 40), bg="white")
total_bal.place(x=quarter, y=165, anchor='n')
display_pie_graph(initial, total_monthly, contribution_interest)
def display_pie_graph(initial, total_monthly, contribution_interest):
global piegraph
# Make canvas where we can draw plots and graph
fig = Figure(figsize=(6, 4), dpi=130)
# Make subplot so we have place to plot our pie graph
subplot = fig.add_subplot(111)
# Prepare the data for the pie chart
labels = ['Initial', 'Contributions', 'Interest']
sizes = [initial, total_monthly, contribution_interest]
explode = (0.1, 0.1, 0.1) # Separation of our pie datas
colors = ('yellow', 'cyan', 'green')
wp = {'linewidth': 0.5, 'edgecolor': "red"}
# Create the pie chart
wedges, texts, autotexts = subplot.pie(sizes,
autopct='%1.1f%%',
explode=explode,
shadow=True,
colors=colors,
startangle=90,
wedgeprops=wp,
textprops=dict(color="black"))
subplot.axis('equal') # Equal aspect ratio ensures the pie is circular
# Make legend, 1st and 2nd are location, 3rd and 4th are size
subplot.legend(wedges, labels,
title="Entries",
bbox_to_anchor=(0.18, 1.1))
# Create a FigureCanvasTkAgg widget that binds the graph in the Tkinter window
piegraph = FigureCanvasTkAgg(fig, master=m)
piegraph.draw()
# Place the graph in the Tkinter window
piegraph.get_tk_widget().place(x=quarter, y=290, anchor='n')
def remove_pie_graph():
global piegraph
if piegraph:
piegraph.get_tk_widget().destroy()
def display_error_message():
global error_msg
if error_msg:
error_msg.config(text='Please enter a valid number')
else:
error_msg = tk.Label(m, text='Please enter a valid number', fg='red', font=('Georgia', 20), anchor='center', bg="white")
error_msg.place(x=center, y=165, anchor='n')
def remove_widgets():
global total_bal, error_msg
if total_bal:
total_bal.destroy()
total_bal = None
if error_msg:
error_msg.destroy()
error_msg = None
remove_pie_graph()
def submit():
remove_widgets()
try:
initial = float(initial_var.get())
monthly = float(monthly_var.get())
APY = float(APY_var.get())
years = int(years_var.get())
if initial < 0 or monthly < 0 or APY < 0 or years < 0:
raise ValueError
# Calculate the total balance
total, contribution_interest, total_monthly = calculate(initial, monthly, APY, years)
# Display the total balance
display_total_balance(total, contribution_interest, initial, total_monthly)
except ValueError:
# Display the error message
display_error_message()
def main():
# Label the questions
initial_question = tk.Label(m, text='Initial Deposit:', font=('Georgia', 20), anchor='n', bg="white")
monthly_question = tk.Label(m, text='Monthly Deposit:', font=('Georgia', 20), anchor='n', bg="white")
APY_question = tk.Label(m, text='APY:', font=('Georgia', 20), anchor='n', bg="white")
years_question = tk.Label(m, text='Years to calculate:', font=('Georgia', 20), anchor='n', bg="white")
# Place the questions
initial_question.place(x=8, y=170)
monthly_question.place(x=8, y=275)
APY_question.place(x=8, y=380)
years_question.place(x=8, y=485)
# Make the input box
initial_box = tk.Entry(m, textvariable=initial_var, width=20, font=('Arial 22'), borderwidth=2, highlightthickness=2)
monthly_box = tk.Entry(m, textvariable=monthly_var, width=20, font=('Arial 22'), borderwidth=2, highlightthickness=2)
APY_box = tk.Entry(m, textvariable=APY_var, width=20, font=('Arial 22'), borderwidth=2, highlightthickness=2)
years_box = tk.Entry(m, textvariable=years_var, width=20, font=('Arial 22'), borderwidth=2, highlightthickness=2)
# Place the input boxes
initial_box.place(x=10, y=220)
monthly_box.place(x=10, y=315)
APY_box.place(x=10, y=420)
years_box.place(x=10, y=525)
#Make and place the button
button = tk.Button(text="$Calculate$", width=12, height=5, bg="white", fg="green", font = ('Castellar 20 bold'), anchor = 'center', command = submit, borderwidth=0, highlightthickness=0)
button.place(x=10, y=600)
m.mainloop()
main()
|
MaxC1880/HYSAcalculator
|
HYSAcalculator.py
|
HYSAcalculator.py
|
py
| 6,918 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71601372029
|
class Greeter:
def __init__(self, meno):
self._meno = meno
self._vek = 30
def pozdrav(self):
for i in range(0, 10):
if i % 2 == 0:
print("Ahoj {0}, mas {1} rokov. Vitaj na PSA v 2023".format(self._meno, self._vek+i))
print("Ahoj", self._meno, "mas ", self._vek+i, "rokov.Vitaj znovu na PSA v 2023")
meno = input("Zadak svoje meno")
greet = Greeter(meno)
greet.pozdrav()
|
Merlinkooo/CviceniePondelok
|
cv1Triedy.py
|
cv1Triedy.py
|
py
| 466 |
python
|
hr
|
code
| 0 |
github-code
|
6
|
38486654704
|
from datetime import datetime, timezone, timedelta
def stem(label: str, blacklist: list):
'''
This function stems a given event label.
Inputs:
- label: single label to stem
- blacklist: list of terms, that should be excluded from the label
Return: stemmed label
'''
parts = label.split(' ')
parts = list(filter(lambda x: x not in blacklist, parts))
return ' '.join(parts)
def time_dif(x: tuple, interval: str):
'''
Calculate the differences between two points in time.
Inputs:
- x: tuple of two datetime objects
- interval: indicator of the return type; accepted values: 'd', 'h', 's'
Return: interval in days, hours or seconds
'''
res = time_wrap(x[0], x[1])
days = res.days
hours = res.seconds//60//60
seconds = res.seconds
if interval is 'd':
return days
elif interval is 'h':
return hours + (days * 24)
elif interval is 's':
return seconds + (days * 24 * 60 * 60)
def number_of_non_workdays(start, end):
'''
Compute the number of days between two points in time, excluding weekends.
Input:
- start: datetime object
- end: datetime object
Return:
int: number of days
'''
# 0: Monday
days = []
while(start <= end):
days.append(start.weekday())
start = start + timedelta(days=1)
days = len(list(filter(lambda x: x > 4, days)))
return days
def time_wrap(start: datetime, end: datetime, s_hour = 8, e_hour = 18):
'''
Return the temporal difference between two points in time, adjusted to a given workschedule.
Input:
- start: datetime object
- end: datetime object
- s_hour: start of workschedule
- e_hour: end of workschedule
Return:
- timedelta in seconds
'''
# worktime after start event
e_time = datetime(start.year, start.month, start.day, e_hour)
start = start.replace(tzinfo=None)
t1 = (e_time - start).seconds
# worktime before end event
end = end.replace(tzinfo=None)
s_time = datetime(start.year, start.month, start.day, s_hour)
t3 = (end - s_time).seconds
# calculate days between start and end exclusive non-working days
days_total = (end - start).days
non_workingdays = number_of_non_workdays(start, end)
working_days = days_total - non_workingdays
if working_days > 1:
working_days -= 1 # consider only complete day in between
total_hours_between = (e_hour - s_hour) * working_days
# convert into seconds
t2 = total_hours_between * 60 * 60
else:
# in this case, there is no full working day between start and end
t2 = 0
total_dif = t1 + t2 + t3
return timedelta(seconds=total_dif)
|
bptlab/bpi-challenge-2020
|
src/util.py
|
util.py
|
py
| 2,829 |
python
|
en
|
code
| 4 |
github-code
|
6
|
23917961666
|
from langchain.document_loaders import WebBaseLoader
from langchain.document_loaders import PyPDFLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores import Chroma
import os
from langchain.chat_models import JinaChat
from langchain.prompts.chat import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
)
from langchain.chains import RetrievalQAWithSourcesChain
from langchain.llms import AI21
# create a new instance of chatbot and saves it as a JSON file
def createNewBot(name, fileType, path, url):
loader = None
if fileType == 'web':
loader = WebBaseLoader(url)
elif fileType == 'doc':
loader = PyPDFLoader(path)
data = loader.load()
text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=100)
all_splits = text_splitter.split_documents(data)
embeddings = HuggingFaceEmbeddings()
persistentDir = "bots/" + name + "/vectorstore/"
vectorstore = Chroma.from_documents(documents=all_splits, embedding=embeddings, persist_directory=persistentDir)
# print(vectorstore)
# jina_api_key = os.environ['JINA_API_KEY']
# chat = JinaChat(temperature=0, jinachat_api_key=jina_api_key)
# chat = ChatAnyscale(model_name='meta-llama/Llama-2-7b-chat-hf', temperature=1.0, anyscale_api_key=os.environ["ANYSCALE_API_KEY"])
chat = AI21(ai21_api_key=os.getenv("AI21_API_KEY"))
# memory = ConversationSummaryMemory(llm=chat,memory_key="chat_history",return_messages=True)
retriever = vectorstore.as_retriever()
template = (
r"""You are a helpful English speaking assistant. Use the following pieces of context to answer the users question. If you cannot find the answer from the pieces of context, just say that you don't know, don't try to make up an answer.
----------------
{context}
"""
)
system_message_prompt = SystemMessagePromptTemplate.from_template(template)
human_template = "{question}"
human_message_prompt = HumanMessagePromptTemplate.from_template(human_template)
chat_prompt = ChatPromptTemplate.from_messages(
[system_message_prompt, human_message_prompt]
)
# finalChain = ConversationalRetrievalChain.from_llm(chat, retriever=retriever, memory = memory, combine_docs_chain_kwargs={'prompt': chat_prompt})
finalChain = RetrievalQAWithSourcesChain.from_chain_type(chat, retriever=retriever)
# print(finalChain.retriever)
# SAVING DOESNT WORK OUT BECAUSE LANGCHAIN HAS YET TO SUPPORT THIS
chainSaveFolder = "bots/" + name + '/'
botSavePath = chainSaveFolder + name + '.json'
finalChain.save(botSavePath)
# retrieverSavePath = chainSaveFolder + name + '_retriever.json'
# with open(retrieverSavePath, "w") as f:
# # json.dump(finalChain.retriever.to_json(), f, indent = 2)
# json.dump(vectorstore, f, indent = 2)
return finalChain
|
luongthang0105/rag-cla
|
create_bot.py
|
create_bot.py
|
py
| 2,864 |
python
|
en
|
code
| 0 |
github-code
|
6
|
74977721787
|
import logging
import psycopg2
from dipper.sources.Source import Source
LOG = logging.getLogger(__name__)
class PostgreSQLSource(Source):
"""
Class for interfacing with remote Postgres databases
"""
files = {}
def __init__(
self,
graph_type,
are_bnodes_skolemized,
data_release_version=None,
name=None,
ingest_title=None,
ingest_url=None,
ingest_logo=None,
ingest_description=None,
license_url=None,
data_rights=None,
file_handle=None
):
super().__init__(
graph_type=graph_type,
are_bnodes_skized=are_bnodes_skolemized,
data_release_version=data_release_version,
name=name,
ingest_title=ingest_title,
ingest_url=ingest_url,
ingest_logo=ingest_logo,
ingest_description=ingest_description,
license_url=license_url,
data_rights=data_rights,
file_handle=file_handle)
# used downstream but handled in Source
# globaltt = self.globaltt
# globaltcid = self.globaltcid
# all_test_ids = self.all_test_ids
def fetch_from_pgdb(self, tables, cxn, limit=None):
"""
Will fetch all Postgres tables from the specified database
in the cxn connection parameters.
This will save them to a local file named the same as the table,
in tab-delimited format, including a header.
:param tables: Names of tables to fetch
:param cxn: database connection details
:param limit: A max row count to fetch for each table
:return: None
"""
con = None
try:
con = psycopg2.connect(
host=cxn['host'], database=cxn['database'], port=cxn['port'],
user=cxn['user'], password=cxn['password'])
cur = con.cursor()
for tab in tables:
LOG.info("Fetching data from table %s", tab)
self._getcols(cur, tab)
query = ' '.join(("SELECT * FROM", tab))
countquery = ' '.join(("SELECT COUNT(*) FROM", tab))
if limit is not None:
query = ' '.join((query, "LIMIT", str(limit)))
countquery = ' '.join((countquery, "LIMIT", str(limit)))
cur.execute(countquery)
tablerowcount = cur.fetchone()[0]
outfile = '/'.join((self.rawdir, tab))
# download the file
LOG.info("COMMAND:%s", query)
outputquery = "COPY ({0}) TO STDOUT WITH DELIMITER AS '\t' CSV HEADER"\
.format(query)
with open(outfile, 'w') as tsvfile:
cur.copy_expert(outputquery, tsvfile)
filerowcount = self.file_len(outfile)
if (filerowcount - 1) < tablerowcount:
raise Exception(
"Download from {} failed, {} != {}"
.format(cxn['host'] + ':' + cxn['database'],
(filerowcount - 1), tablerowcount))
if (filerowcount - 1) > tablerowcount:
LOG.warning(
"Fetched from %s more rows in file (%s) than reported "
"in count(%s)",
cxn['host'] + ':' + cxn['database'],
(filerowcount - 1), tablerowcount)
finally:
if con:
con.close()
def fetch_query_from_pgdb(self, qname, query, con, cxn, limit=None):
"""
Supply either an already established connection, or connection parameters.
The supplied connection will override any separate cxn parameter
:param qname: The name of the query to save the output to
:param query: The SQL query itself
:param con: The already-established connection
:param cxn: The postgres connection information
:param limit: If you only want a subset of rows from the query
:return:
"""
if con is None and cxn is None:
raise ValueError("ERROR: you need to supply connection information")
if con is None and cxn is not None:
con = psycopg2.connect(
host=cxn['host'], database=cxn['database'], port=cxn['port'],
user=cxn['user'], password=cxn['password'])
outfile = '/'.join((self.rawdir, qname))
cur = con.cursor()
# wrap the query to get the count
countquery = ' '.join(("SELECT COUNT(*) FROM (", query, ") x"))
if limit is not None:
countquery = ' '.join((countquery, "LIMIT", str(limit)))
cur.execute(countquery)
tablerowcount = cur.fetchone()[0]
# download the file
LOG.debug("COMMAND:%s", query)
outputquery = \
"COPY ({0}) TO STDOUT WITH DELIMITER AS '\t' CSV HEADER".format(query)
with open(outfile, 'w') as tsvfile:
cur.copy_expert(outputquery, tsvfile)
# Regenerate row count to check integrity
filerowcount = self.file_len(outfile)
if (filerowcount-1) < tablerowcount:
raise Exception(
"Download from {} failed, {} != {}"
.format(cxn['host'] + ':' + cxn['database'],
(filerowcount-1), tablerowcount))
if (filerowcount-1) > tablerowcount:
LOG.warning(
"Fetched from %s more rows in file (%s) than reported in count(%s)",
cxn['host'] + ':'+cxn['database'], (filerowcount-1), tablerowcount)
@staticmethod
def _getcols(cur, table):
"""
Will execute a pg query to get the column names for the given table.
:param cur:
:param table:
:return:
"""
query = ' '.join(("SELECT * FROM", table, "LIMIT 0")) # for testing
cur.execute(query)
colnames = [desc[0] for desc in cur.description]
LOG.info("COLS (%s): %s", table, colnames)
# abstract
def fetch(self, is_dl_forced=False):
"""
abstract method to fetch all data from an external resource.
this should be overridden by subclasses
:return: None
"""
raise NotImplementedError
def parse(self, limit):
"""
abstract method to parse all data from an external resource,
that was fetched in fetch() this should be overridden by subclasses
:return: None
"""
raise NotImplementedError
|
monarch-initiative/dipper
|
dipper/sources/PostgreSQLSource.py
|
PostgreSQLSource.py
|
py
| 6,689 |
python
|
en
|
code
| 53 |
github-code
|
6
|
70416777467
|
from multiprocessing import Value, Queue, Process
from config import config
from spider.HtmlCrawl import IpCrawl
from usable.usable import usable
from db.db_select import save_data
def startProxyCrawl(queue,db_proxy_num):
crawl = IpCrawl(queue,db_proxy_num)
crawl.run()
def validator(queue1,queue2):
pass
if __name__ == "__main__":
DB_PROXY_NUM = Value('i', 0)
q1 = Queue(maxsize=config.TASK_QUEUE_SIZE)
q2 = Queue()
p1 = Process(target=startProxyCrawl, args=(q1, DB_PROXY_NUM))
p2 = Process(target=usable, args=(q1, q2))
p3 = Process(target=save_data, args=(q2, DB_PROXY_NUM))
p1.start()
p2.start()
p3.start()
p1.join()
p2.join()
p3.join()
|
queenswang/IpProxyPool
|
proxyspider.py
|
proxyspider.py
|
py
| 703 |
python
|
en
|
code
| 0 |
github-code
|
6
|
27161606711
|
# Written by RF
while True:
s1=float(input("What is the length of side one in cm?"))
s2=float(input("What is the length of side two in cm?"))
A=(s1*s2)
print("The area is", A, "cm^2")
while True:
answer = str(input('Anything else? (y/n): '))
if answer in ('y', 'n'):
break
print("invalid input.")
if answer == 'y':
continue
else:
print("Godspeed")
break
|
GustavMH29/Python
|
Code/Math/Surface Area/Area Square.py
|
Area Square.py
|
py
| 443 |
python
|
en
|
code
| 0 |
github-code
|
6
|
5962399898
|
from aoc_helpers.perf_helpers import *
from aoc_helpers.input_helpers import *
from aoc_helpers.collection_helpers import *
from aoc_helpers.test_helpers import *
from collections import defaultdict
from collections import Counter
import string
import time
from pprint import pprint
from itertools import cycle
class Marble:
def __init__(self, value):
self.value = value
self.next_marble = self
self.prev_marble = self
@timeit
def get_solution(num_players, last_marble_value):
current_marble = Marble(0)
head_marble = current_marble
scores = [0 for _ in range(num_players)]
current_player = 0
for i in range_inclusive(1, last_marble_value):
# pr = []
# pr_marble = head_marble
# for idx in range(i):
# pr.append(pr_marble.value)
# pr_marble = pr_marble.next_marble
# print("Value: {0}".format(pr))
#
# link = []
# pr_marble = head_marble
# for idx in range(i):
# pr_marble = pr_marble.next_marble
# link.append(pr_marble.next_marble.value)
# print("Links: {0}".format(link))
if i % 23 == 0:
scores[current_player] += i
seven_prev = current_marble.prev_marble.prev_marble.prev_marble.prev_marble.prev_marble.prev_marble.prev_marble
scores[current_player] += seven_prev.value
seven_prev.prev_marble.next_marble = seven_prev.next_marble
seven_prev.next_marble.prev_marble = seven_prev.prev_marble
current_marble = seven_prev.next_marble
else:
m1 = current_marble.next_marble
m2 = current_marble.next_marble.next_marble
new_marble = Marble(i)
new_marble.next_marble = m2
m2.prev_marble = new_marble
new_marble.prev_marble = m1
m1.next_marble = new_marble
current_marble = new_marble
current_player = (current_player + 1) % num_players
return max(scores)
test(get_solution(9, 25), 32)
test(get_solution(10, 1618), 8317)
test(get_solution(13, 7999), 146373)
test(get_solution(17, 1104), 2764)
test(get_solution(21, 6111), 54718)
test(get_solution(30, 5807), 37305)
test(get_solution(428, 70825), 398502)
print(get_solution(428, 7082500))
|
colejd/AdventOfCode2018
|
day_09/day09part2.py
|
day09part2.py
|
py
| 2,316 |
python
|
en
|
code
| 0 |
github-code
|
6
|
19362201808
|
def ternary_search(list1,item): #the list must be sorted to implement ternary search
low=0 # O(log3(n))---->Time Complexity
high=len(list1)-1 # Divide the array in 3 parts and check for the item.
#To find maximum or minimum of unimodal function
while low <= high : #Unimodal--possesing a single maximum value
mid1=low+(high-low)//3 #Parabolic Function
mid2=high-(high-low)//3
if list1[mid1]==item:
return mid1
if list1[mid2]==item:
return mid2
if item < list1[mid1]:
high= mid1-1
elif item > list1[mid2]:
low=mid2+1
else:
low= mid1+1
high=mid2-1
return None
if __name__=="__main__":
array=input("Enter the list of numbers(separated by commas):")
list1=sorted(list(map(int,array.split(sep=","))))
item=int(input("Enter the item you want to find:"))
print(ternary_search(list1,item))
|
AnkitM18-tech/Data-Structures-And-Algorithms
|
Algorithms/Searching Algorithms/Ternary Search.py
|
Ternary Search.py
|
py
| 1,130 |
python
|
en
|
code
| 1 |
github-code
|
6
|
43190945536
|
from .object_tracking_visualizer_name import ObjectTrackingVisualizerName
class ObjectTrackingVisualizerFactory():
def create(visualizer_name="TrackingVisualizer"):
if visualizer_name == ObjectTrackingVisualizerName.tracking_visualizer.value:
from .visualization.tracking_visualizer import TrackingVisualizer
return TrackingVisualizer()
else:
msg = "model_name is {}, but not implemented".format(
visualizer_name)
raise NotImplementedError(msg)
|
hampen2929/inferencia
|
inferencia/task/object_tracking/object_tracking/visualization/object_tracking_visualizer_factory.py
|
object_tracking_visualizer_factory.py
|
py
| 531 |
python
|
en
|
code
| 0 |
github-code
|
6
|
29041072051
|
from flask import Flask, jsonify
from datetime import datetime
import requests
from flask import request
app = Flask(__name__)
logs = []
@app.route("/list", methods=["POST"])
def list():
r = request.data.decode("utf-8")
logs.append(r)
return jsonify(success=True)
@app.route("/usage.log")
def home():
return "<br>".join(logs)
if __name__ == "__main__":
app.run()
|
maciejgrosz/containers_network_communication
|
loggerservice/loggerservice.py
|
loggerservice.py
|
py
| 390 |
python
|
en
|
code
| 0 |
github-code
|
6
|
18158243961
|
def findRoot(f,a,b,epsilon) :
m = (a + b) / 2
# Stopping criterion
if abs(b - a) <= epsilon or f(m) == 0 :
return m
# Check if this is already a root
if f(a) == 0 :
return a
if f(b) == 0 :
return b
# Go into recursion
if (f(a) < 0 and f(m) > 0) or (f(a) > 0 and f(m) < 0) :
return findRoot(f,a,m,epsilon)
if (f(m) < 0 and f(b) > 0) or (f(m) > 0 and f(b) < 0) :
return findRoot(f,m,b,epsilon)
def findAllRoots(f,a,b,epsilon) :
#if f(a) * f(b) >= 0 :
#raise(AssertionError('f(a) * f(b) >= 0'))
m = (a + b) / 2
# Stopping criterion
if abs(b - a) <= epsilon :
return [m]
l = []
# Check if this is already a root
if f(a) == 0 :
l.append(a)
if f(b) == 0 :
l.append(b)
if f(m) == 0 :
l.append(m)
# Build the lists recursively
if (f(a) < 0 and f(m) > 0) or (f(a) > 0 and f(m) < 0) :
l.extend(findAllRoots(f,a,m,epsilon))
if (f(m) < 0 and f(b) > 0) or (f(m) > 0 and f(b) < 0) :
l.extend(findAllRoots(f,m,b,epsilon))
return l
|
Saquith/WISB256
|
Opdracht4/bisection.py
|
bisection.py
|
py
| 1,119 |
python
|
en
|
code
| 0 |
github-code
|
6
|
7368325823
|
#! /usr/bin/env python
def geokar_Ax(n, x, y):
Ax = 0.0
for i in range(n):
Ax = Ax + (x[i+1] + x[i]) * (y[i+1] - y[i])
return 0.5 * Ax
def main():
# Vnos podatkov
print("Vnos podatkov ...")
n = int(input("Podaj število točk: "))
x = []
y = []
for i in range(n):
# 1. primer: vsaka koordinata posebej
# xi = float(input("x({}): ".format(i + 1)))
# yi = float(input("y({}): ".format(i + 1)))
# x.append(xi)
# y.append(yi)
# 2. primer: podajanje x in y koordinate točke skupaj
vrstica = input("Točka {}: ".format(i + 1))
besede = vrstica.split()
x.append(float(besede[0]))
y.append(float(besede[1]))
x.append(x[0])
y.append(y[0])
# Izračun ploščine prereza
print()
print("Izračun ...")
Ax = geokar_Ax(n, x, y)
# Izpis rezultatov
print()
print("Izpis ...")
print("Ax = {:.3f}".format(Ax))
if __name__ == "__main__": main()
|
matevzdolenc/matevzdolenc.github.io
|
python/src/015/geokar.py
|
geokar.py
|
py
| 1,002 |
python
|
sl
|
code
| 4 |
github-code
|
6
|
27215126875
|
import pytest
from hbutils.system import telnet, wait_for_port_online
@pytest.mark.unittest
class TestSystemNetworkTelnet:
def test_telnet(self):
assert telnet('127.0.0.1', 35127)
assert telnet('127.0.0.1', 35128)
assert not telnet('127.0.0.1', 35129, timeout=1.0)
def test_wait_for_port_online(self):
wait_for_port_online('127.0.0.1', 35127)
wait_for_port_online('127.0.0.1', 35128)
with pytest.raises(TimeoutError):
wait_for_port_online('127.0.0.1', 35129, timeout=2.0, interval=0.1)
|
HansBug/hbutils
|
test/system/network/test_telnet.py
|
test_telnet.py
|
py
| 559 |
python
|
en
|
code
| 7 |
github-code
|
6
|
3232593391
|
import logging
class LogDB:
def __init__(self,fileName):
self.fileName = fileName
self.loglist = []
self.files = None
self.final = {}
def log(self, message=None ):
FORMAT = '%(asctime)s %(message)s'
logging.basicConfig(format=FORMAT, filename=self.fileName)
logging.warning(message)
def show_tracker_logs(self):
with open(self.fileName) as f:
f = f.readlines()
for line in f:
print(line)
def update_files(self, files_seeder):
self.files = files_seeder
def log_file(self,fileName):
if fileName in self.files.keys():
print(self.files[fileName])
else:
print(f'{fileName} not found')
def add_logs2file(self,fileName, logmsg):
"""adds the log message related to one specific file to its key in a dictionary"""
if fileName not in self.files.keys():
self.final[fileName].append(logmsg)
else:
self.final[fileName] = []
self.final[fileName].append(logmsg)
def logs_of_the_file(self,fileName):
if fileName in self.files.keys() :
print(self.files[fileName])
if fileName in self.final.keys():
print(self.final[fileName])
else:
print('No log yet')
else:
print(f'{fileName} not found')
def all_logs(self):
for fileName in self.files.keys():
self.logs_of_the_file(fileName)
|
reza2002801/Torrent
|
logDB.py
|
logDB.py
|
py
| 1,524 |
python
|
en
|
code
| 0 |
github-code
|
6
|
43279150633
|
from django.contrib import admin
from django.urls import path
from . import views
app_name = 'task'
urlpatterns=[
# path('', views.index, name='index')
path('', views.TasksView.as_view(), name='index'),
path('addtask/', views.add_task, name='addtask'),
path('remover/', views.remove_all_task, name='rm_task'),
path('rm/<int:task_pk>', views.remove_1_task, name='rm'),
path('done/<int:task_pk>', views.done_task, name='done')
]
|
eh97979/Task-manager
|
task_project/task/urls.py
|
urls.py
|
py
| 456 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71924390587
|
from setuptools import setup, find_packages
from os.path import join
name = 'menhir.simple.livesearch'
version = '0.1'
readme = open("README.txt").read()
history = open(join("docs", "HISTORY.txt")).read()
setup(name = name,
version = version,
description = 'Dolmen simple extension : livesearch',
long_description = readme[readme.find('\n\n'):] + '\n' + history,
keywords = 'Grok Zope3 CMS Dolmen',
author = 'Souheil Chelfouh',
author_email = '[email protected]',
url = 'http://tracker.trollfot.org/',
download_url = 'http://pypi.python.org/pypi/menhir.simple.livesearch',
license = 'GPL',
packages=find_packages('src', exclude=['ez_setup']),
package_dir={'': 'src'},
namespace_packages = ['menhir', 'menhir.simple'],
include_package_data = True,
platforms = 'Any',
zip_safe = True,
install_requires=[
'setuptools',
'grok',
'dolmen.app.layout',
'dolmen.app.search',
'hurry.jquery',
'megrok.resource',
'zope.component',
'zope.interface',
],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Grok',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
trollfot/menhir.simple.livesearch
|
setup.py
|
setup.py
|
py
| 1,479 |
python
|
en
|
code
| 0 |
github-code
|
6
|
20538458179
|
# https://leetcode.com/problems/last-stone-weight/
"""
Time complexity:- O(N logN)
Space Complexity:- O(N)
"""
import heapq
from typing import List
class Solution:
def lastStoneWeight(self, stones: List[int]) -> int:
# Create a max heap (negate each element to simulate a min heap)
h = [-x for x in stones]
heapq.heapify(h)
# Continue the process until only one or no stone is left
while len(h) > 1 and h[0] != 0:
# Pop the two largest stones from the max heap
stone1 = heapq.heappop(h)
stone2 = heapq.heappop(h)
# Calculate the weight difference and push it back into the max heap
diff = stone1 - stone2
heapq.heappush(h, diff)
# If there is at least one stone remaining, return its weight
return -h[0]
|
Amit258012/100daysofcode
|
Day60/last_stone_weight.py
|
last_stone_weight.py
|
py
| 844 |
python
|
en
|
code
| 0 |
github-code
|
6
|
5449618648
|
def longestCommonPrefix(self, strs: list[str]) -> str:
res = min(strs, key = len)
for i in strs:
while res != i[:len(res)]:
res = res[:-1]
return res
# A function to find the longest common prefix string amongst an array of strings.
# If there is no common prefix, return an empty string "".
# Input: strs = ["flower","flow","flight"]
# Output: "fl"
# Input: strs = ["dog","racecar","car"]
# Output: ""
# Explanation: There is no common prefix among the input strings.
|
jvm-coder/Hacktoberfest2022_aakash
|
Python/longest_common_prefix.py
|
longest_common_prefix.py
|
py
| 523 |
python
|
en
|
code
| 47 |
github-code
|
6
|
5469847519
|
import os
import numpy as np
from datetime import datetime
import time
from Utils import _add_loss_summaries
from model import *
#from augmentation import pre_process_image
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = 367
NUM_EXAMPLES_PER_EPOCH_FOR_TEST = 101
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = 1
TEST_ITER = 200 # ceil(NUM_EXAMPLES_PER_EPOCH_FOR_TEST / TRAIN_BATCH_SIZE)
# =========== This function converts prediction to image ===========================
def color_image(image, num_classes=11):
import matplotlib as mpl
import matplotlib.cm
norm = mpl.colors.Normalize(vmin=0., vmax=num_classes)
mycm = mpl.cm.get_cmap('Set1')
return mycm(norm(image))
def train(total_loss, global_step):
""" fix lr """
lr = INITIAL_LEARNING_RATE
loss_averages_op = _add_loss_summaries(total_loss)
# Compute gradients.
with tf.control_dependencies([loss_averages_op]):
opt = tf.train.AdamOptimizer(lr)
grads = opt.compute_gradients(total_loss)
apply_gradient_op = opt.apply_gradients(grads, global_step=global_step)
# Add histograms for trainable variables.
for var in tf.trainable_variables():
tf.summary.histogram(var.op.name, var)
# Add histograms for gradients.
for grad, var in grads:
if grad is not None:
tf.summary.histogram(var.op.name + '/gradients', grad)
# Track the moving averages of all trainable variables.
variable_averages = tf.train.ExponentialMovingAverage(
MOVING_AVERAGE_DECAY, global_step)
variables_averages_op = variable_averages.apply(tf.trainable_variables())
with tf.control_dependencies([apply_gradient_op, variables_averages_op]):
train_op = tf.no_op(name='train')
return train_op
def training():
# should be changed if your model stored by different convention
startstep = 801 #if not is_finetune else int(FLAGS.finetune.split('-')[-1])
image_filenames, label_filenames = get_filename_list(path_train)
val_image_filenames, val_label_filenames = get_filename_list(path_val)
with tf.Graph().as_default():
train_data_node = tf.placeholder( tf.float32, shape=[TRAIN_BATCH_SIZE, IMAGE_HEIGHT, IMAGE_WIDTH, IMAGE_DEPTH])
train_labels_node = tf.placeholder(tf.int64, shape=[TRAIN_BATCH_SIZE, IMAGE_HEIGHT, IMAGE_WIDTH, 1])
phase_train = tf.placeholder(tf.bool, name='phase_train')
global_step = tf.Variable(0, trainable=False)
# For CamVid
images, labels = CamVidInputs(image_filenames, label_filenames, TRAIN_BATCH_SIZE)
print ('Camvid:', images, '===000===', labels)
val_images, val_labels = CamVidInputs(val_image_filenames, val_label_filenames, TRAIN_BATCH_SIZE)
# Build a Graph that computes the logits predictions from the inference model.
loss, eval_prediction = inference(train_data_node, train_labels_node, TRAIN_BATCH_SIZE, phase_train)
# Build a Graph that trains the model with one batch of examples and updates the model parameters.
train_op = train(loss, global_step)
saver = tf.train.Saver(tf.global_variables())
summary_op = tf.summary.merge_all()
with tf.Session() as sess:
# Build an initialization operation to run below.
try:
print("Trying to restore last checkpoint from ", path_ckpt, " ...")
# Use TensorFlow to find the latest checkpoint - if any.
last_chk_path = tf.train.latest_checkpoint(checkpoint_dir=path_ckpt)
print ('last chkr point:', last_chk_path)
# Try and load the data in the checkpoint.
saver.restore(sess, save_path=last_chk_path)
# If we get to this point, the checkpoint was successfully loaded.
print("Restored checkpoint from:", last_chk_path)
except:
# If the above failed for some reason, simply
# initialize all the variables for the TensorFlow graph.
print("Failed to restore checkpoint. Initializing variables instead.")
sess.run(tf.global_variables_initializer())
# Start the queue runners.
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
# Summery placeholders
summary_writer = tf.summary.FileWriter(path_train, sess.graph)
average_pl = tf.placeholder(tf.float32)
acc_pl = tf.placeholder(tf.float32)
iu_pl = tf.placeholder(tf.float32)
average_summary = tf.summary.scalar("test_average_loss", average_pl)
acc_summary = tf.summary.scalar("test_accuracy", acc_pl)
iu_summary = tf.summary.scalar("Mean_IU", iu_pl)
for step in range(train_iteration):
image_batch ,label_batch = sess.run([images, labels])
# since we still use mini-batches in validation, still set bn-layer phase_train = True
#print ('Batch:', image_batch, ' ----0000---', label_batch)
#image_batch_a = pre_process_image (image_batch, True)
feed_dict = {
train_data_node: image_batch,
train_labels_node: label_batch,
phase_train: True
}
start_time = time.time()
#print ('Step:', step)
_, loss_value = sess.run([train_op, loss], feed_dict=feed_dict)
duration = time.time() - start_time
assert not np.isnan(loss_value), 'Model diverged with loss = NaN'
if (step<50):
print ('Step:',step)
if step % 100 == 0:
num_examples_per_step = TRAIN_BATCH_SIZE
examples_per_sec = num_examples_per_step / duration
sec_per_batch = float(duration)
format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '
'sec/batch)')
print (format_str % (datetime.now(), step, loss_value,
examples_per_sec, sec_per_batch))
# eval current training batch pre-class accuracy
pred = sess.run(eval_prediction, feed_dict=feed_dict)
per_class_acc(pred, label_batch)
if step % val_iter == 0:
print("start validating.....")
total_val_loss = 0.0
hist = np.zeros((NUM_CLASSES, NUM_CLASSES))
for test_step in range(TEST_ITER):
val_images_batch, val_labels_batch = sess.run([val_images, val_labels])
_val_loss, _val_pred = sess.run([loss, eval_prediction], feed_dict={
train_data_node: val_images_batch,
train_labels_node: val_labels_batch,
phase_train: True
})
total_val_loss += _val_loss
hist += get_hist(_val_pred, val_labels_batch)
print("val loss: ", total_val_loss / TEST_ITER)
acc_total = np.diag(hist).sum() / hist.sum()
iu = np.diag(hist) / (hist.sum(1) + hist.sum(0) - np.diag(hist))
test_summary_str = sess.run(average_summary, feed_dict={average_pl: total_val_loss / TEST_ITER})
acc_summary_str = sess.run(acc_summary, feed_dict={acc_pl: acc_total})
iu_summary_str = sess.run(iu_summary, feed_dict={iu_pl: np.nanmean(iu)})
print_hist_summery(hist)
print(" end validating.... ")
summary_str = sess.run(summary_op, feed_dict=feed_dict)
summary_writer.add_summary(summary_str, step)
summary_writer.add_summary(test_summary_str, step)
summary_writer.add_summary(acc_summary_str, step)
summary_writer.add_summary(iu_summary_str, step)
# Save the model checkpoint periodically.
if step % save_model_itr == 0 or (step + 1) == train_iteration:
checkpoint_path = os.path.join(path_ckpt, 'model.ckpt')
saver.save(sess, checkpoint_path, global_step=global_step)
coord.request_stop()
coord.join(threads)
# --------------------------------------------------------
training()
|
mohbattharani/Segmentation_
|
SegNet/train.py
|
train.py
|
py
| 7,704 |
python
|
en
|
code
| 0 |
github-code
|
6
|
28339877749
|
from itertools import product
k,m = list(map(int,input().split()))
arr = []
cart_prod = []
maxS=0
for _ in range(k):
lstN = list(map(int,input().split()[1:]))
arr.append(lstN)
cart_prod = list(product(*arr))
for elem in cart_prod:
sum1=0
for i in elem:
sum1+=i**2
if sum1%m>maxS:
maxS = sum1%m
print(maxS)
|
t3chcrazy/Hackerrank
|
maximize-it.py
|
maximize-it.py
|
py
| 358 |
python
|
en
|
code
| 0 |
github-code
|
6
|
4785885470
|
from collections import defaultdict, deque
n = int(input())
d = defaultdict(list)
for i in range(1, n):
l = list(map(int, input().split()))
now = 1
for j in range(i+1, n+1):
d[i].append((j, l[now-1]))
d[j].append((i, l[now-1]))
now += 1
print(d)
s = set()
max = 0
def dfs(now, flg, visited):
global max
if visited[now-1] == 1:
return
print(now, s)
visited[now-1] = 1
if flg == 0:
s.add(now)
for next in d[now]:
flg ^= 1
dfs(next[0], flg, visited)
dfs(1, 0, [0]*n)
print()
|
K5h1n0/compe_prog_new
|
abc318/d/main.py
|
main.py
|
py
| 570 |
python
|
en
|
code
| 0 |
github-code
|
6
|
30172144350
|
import pandas as pd
from models import DataPoint, db
INDENTATION = 4
DB_NAME = "data_point"
class DBHandler:
__instance = None
@staticmethod
def get_instance(weather_app):
""" Static access method. """
if DBHandler.__instance is None:
DBHandler(weather_app)
return DBHandler.__instance
def __init__(self, weather_app):
""" Virtually private constructor. """
if DBHandler.__instance is not None:
raise Exception("This class is a singleton!")
else:
DBHandler.__instance = self
db.init_app(weather_app)
def get_db(self):
return db
def check_if_row_exists_by_location(self, longitude, latitude):
df = pd.read_sql(f"SELECT * FROM data_point WHERE EXISTS (SELECT * FROM data_point WHERE latitude={latitude} AND longitude={longitude})", db.session.bind)
if df.empty:
return False
return True
def query_db_by_location(self, longitude, latitude):
df = pd.read_sql(f"SELECT * "
f"FROM {DB_NAME} WHERE longitude={longitude} AND latitude={latitude}", db.session.bind)
return df.to_json(indent=INDENTATION, date_format='iso', orient='records')
def query_db_summarize_location(self, longitude, latitude):
df = pd.read_sql(f"SELECT "
f"MIN(temperature) AS min_temperature, "
f"MIN(precipitation) AS min_precipitation, "
f"MAX(temperature) AS max_temperature, "
f"MAX(precipitation) AS max_precipitation, "
f"AVG(temperature) AS avg_temperature, "
f"AVG(precipitation) AS avg_precipitation "
f"FROM {DB_NAME} WHERE longitude={longitude} AND latitude={latitude}", db.session.bind)
return df.to_json(indent=INDENTATION, orient='records')
|
MayDruyan/weather-service
|
db_handler.py
|
db_handler.py
|
py
| 1,935 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71992464828
|
import json
# Đọc dữ liệu từ file input1.json và input2.json
with open('input1.json', 'r', encoding='utf-8') as file1, open('input2.json', 'r', encoding='utf-8') as file2:
data1 = json.load(file1)
data2 = json.load(file2)
# Tìm các cặp key có cùng giá trị trong cả hai file
common_key_value_pairs = []
for key1, value1 in data1.items():
for key2, value2 in data2.items():
if value1 == value2 and key1 != key2:
common_key_value_pairs.append((key2, key1, value1))
# # Ghi các khóa giống nhau vào tệp output2.txt
# with open('output2.txt', 'w', encoding='utf-8') as output_file:
# for key1, key2, value1 in common_key_value_pairs:
# output_file.write(f"{key1} = {key2} : {value1}\n")
# Tạo một dictionary để lưu trữ kết quả theo định dạng bạn mong muốn
output_data = {}
count = 1
for key1, key2, value1 in common_key_value_pairs:
output_data[count] = [key1, key2, value1]
count += 1
# Ghi dictionary kết quả vào file output.json
with open('output.json', 'w') as output_file:
json.dump(output_data, output_file, indent=2)
|
mminhlequang/python_tools
|
key_have_same_value/main.py
|
main.py
|
py
| 1,139 |
python
|
vi
|
code
| 0 |
github-code
|
6
|
8743120221
|
import pandas as pd #pandas是强大的分析结构化数据的工具集 as是赋予pandas别名
from matplotlib import pyplot as plt #2D绘图库,通过这个库将数据绘制成各种2D图形(直方图,散点图,条形图等)
#全国哪一个城市地铁线最多
def subline_count():
df1 = df.iloc[:, :-1] #筛选前三列 df是下面main读取的
df2 = df1.drop_duplicates(subset=['city', 'subwayline']) # 去重
# drop_duplicates是pandas里面的函数 subset用来指定特定的列,不填参数就默认所有列
df3 = df2['city'].value_counts() #pandas里面的value_counts()函数可以对Series里面每个值进行计数并排序
df3.plot.bar() #bar条形图
plt.savefig("城市地铁数量排行榜.png")
plt.show() #将处理后的数据显示出来
print(df3)
if __name__=='__main__' :
df = pd.read_csv('subway.csv', encoding='utf-8') #读取subway.csv文件,并制定字符集的类型
plt.rcParams['font.sans-serif'] = 'fangsong' #font.sans-serif就是修改字体,后面是仿宋字体
#rcParams可以修改默认属性,包括窗体大小,每英寸的点数,线颜色,样式,坐标轴,坐标和网络属性,文本,字体等
subline_count() #运行函数
|
rlxy/python
|
爬虫/数据分析/城市地铁数量排行榜/analysis.py
|
analysis.py
|
py
| 1,315 |
python
|
zh
|
code
| 0 |
github-code
|
6
|
19580309816
|
import pytest
from torch.optim import RMSprop as _RMSprop
from neuralpy.optimizer import RMSprop
@pytest.mark.parametrize(
"learning_rate, alpha, eps, weight_decay, momentum, centered",
[
(-6, 0.001, 0.001, 0.001, 0.001, False),
(False, 0.001, 0.001, 0.001, 0.001, False),
("invalid", 0.001, 0.001, 0.001, 0.001, False),
(0.0, False, 0.001, 0.001, 0.001, False),
(0.001, False, 0.001, 0.001, 0.001, False),
(0.001, "", 0.001, 0.001, 0.001, False),
(0.001, 0.001, False, 0.001, 0.001, False),
(0.001, 0.001, -6, 0.001, 0.001, False),
(0.001, 0.001, 0.2, True, 0.001, False),
(0.001, 0.001, 0.2, "", 0.001, False),
(0.001, 0.001, 0.2, 0.32, False, False),
(0.001, 0.001, 0.2, 0.32, "invalid", False),
(0.001, 0.001, 0.2, 0.32, 0.32, 3),
(0.001, 0.001, 0.2, 0.32, 0.32, "invalid"),
],
)
def test_rmsprop_should_throw_value_error(
learning_rate, alpha, eps, weight_decay, momentum, centered
):
with pytest.raises(ValueError):
RMSprop(
learning_rate=learning_rate,
alpha=alpha,
eps=eps,
weight_decay=weight_decay,
momentum=momentum,
centered=centered,
)
# Possible values that are valid
learning_rates = [0.001, 0.1]
alphas = [0.2, 1.0]
epses = [0.2, 1.0]
momentums = [0.32]
weight_decays = [0.32]
centeredes = [False, True]
@pytest.mark.parametrize(
"learning_rate, alpha, eps, weight_decay, momentum, centered",
[
(learning_rate, alpha, eps, weight_decay, momentum, centered)
for learning_rate in learning_rates
for alpha in alphas
for eps in epses
for weight_decay in weight_decays
for momentum in momentums
for centered in centeredes
],
)
def test_rmsprop_get_layer_method(
learning_rate, alpha, eps, weight_decay, momentum, centered
):
x = RMSprop(
learning_rate=learning_rate,
alpha=alpha,
eps=eps,
weight_decay=weight_decay,
momentum=momentum,
centered=centered,
)
details = x.get_optimizer()
assert isinstance(details, dict) is True
assert issubclass(details["optimizer"], _RMSprop) is True
assert isinstance(details["keyword_arguments"], dict) is True
assert details["keyword_arguments"]["lr"] == learning_rate
assert details["keyword_arguments"]["alpha"] == alpha
assert details["keyword_arguments"]["eps"] == eps
assert details["keyword_arguments"]["momentum"] == momentum
assert details["keyword_arguments"]["weight_decay"] == weight_decay
assert details["keyword_arguments"]["centered"] == centered
def test_rmsprop_get_layer_method_without_parameter():
x = RMSprop()
details = x.get_optimizer()
assert isinstance(details, dict) is True
assert issubclass(details["optimizer"], _RMSprop) is True
assert isinstance(details["keyword_arguments"], dict) is True
assert details["keyword_arguments"]["lr"] == 0.001
assert details["keyword_arguments"]["alpha"] == 0.99
assert details["keyword_arguments"]["eps"] == 1e-08
assert details["keyword_arguments"]["momentum"] == 0.0
assert details["keyword_arguments"]["weight_decay"] == 0.0
assert details["keyword_arguments"]["centered"] is False
|
imdeepmind/NeuralPy
|
tests/neuralpy/optimizer/test_rmsprop.py
|
test_rmsprop.py
|
py
| 3,352 |
python
|
en
|
code
| 78 |
github-code
|
6
|
25273022270
|
x, y = map(int, input().split())
N = int(input())
# 가로, 세로 절단면을 저장할 리스트를 생성하고 처음과 끝 값을 저장
x_cut = [0, x]
y_cut = [0, y]
# 가로, 세로 절단면을 입력받고 해당 리스트에 저장
for _ in range(N):
d, cut = map(int, input().split())
if d:
x_cut.append(cut)
else:
y_cut.append(cut)
x_max = 1
y_max = 1
# 연산을 용이하게 하기 위해서 내림차순으로 정렬
x_cut.sort(reverse=True)
y_cut.sort(reverse=True)
len_x_cut = len(x_cut)
len_y_cut = len(y_cut)
# 가로, 세로 절단 리스트를 돌면서 최대 차이 확인
for i in range(len_x_cut-1):
diff = x_cut[i] - x_cut[i+1]
if diff > x_max:
x_max = diff
for i in range(len_y_cut-1):
diff = y_cut[i] - y_cut[i+1]
if diff > y_max:
y_max = diff
# 확인된 최대 차이의 곱을 출력
print(x_max*y_max)
|
powerticket/algorithm
|
Baekjoon/event/2628_com.py
|
2628_com.py
|
py
| 895 |
python
|
ko
|
code
| 0 |
github-code
|
6
|
42565773392
|
import time
class AutoMail():
def __init__(self, mail_provider, mail_from, mail_to, timeout):
self.mail_provider = mail_provider
self.mail_from = mail_from
self.mail_to = mail_to
self.timeout = timeout
self.images_bank = []
self.start_time = None
def add_image(self, img):
self.images_bank.append(img)
def process(self, img):
# Launch timer and avoid sending email during x seconds
if self.start_time is None:
self.start_time = time.time()
#self.add_image(img)
self.mail_provider.connect()
self.mail_provider.send("AutoMail - Detection", self.mail_from, self.mail_to, "Catched something...", img)
self.mail_provider.disconnect()
elapsed_time = time.time() - self.start_time
# Reset
if elapsed_time > self.timeout:
self.start_time = None
|
Malik-Fleury/RaspPi_SuperVision
|
Program/Mail/AutoMail.py
|
AutoMail.py
|
py
| 920 |
python
|
en
|
code
| 0 |
github-code
|
6
|
43348614388
|
from scrabzl import Word, Dictionary
import unicodedata
def strip_accents(text):
try:
text = unicode(text, 'utf-8')
except NameError: # unicode is a default on python 3
pass
text = unicodedata.normalize('NFD', text)\
.encode('ascii', 'ignore')\
.decode("utf-8")
return str(text)
def no_special_chars(word):
ret = "'" not in word
ret = ret and ' ' not in word
ret = ret and '.' not in word
ret = ret and '-' not in word
return ret
def create_dictionaries(dictionary_path, max_word_length, language):
words = []
with open(dictionary_path, 'r') as f:
for word in f.readlines():
word = strip_accents(word).upper().strip()
if (
len(word) > 1 and len(word) <= max_word_length and
no_special_chars(word)
):
words.append(Word(word))
words = tuple(sorted(set(words)))
dictionary = Dictionary(words)
dictionary.dump(language=language)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Create dictionaries.')
parser.add_argument('dictionary_path', metavar='dictionary-path', type=str,
help='Path to a dictionary txt file containing one word per line')
parser.add_argument('dictionary_name', metavar='dictionary-name', type=str,
help='Name of the dictionary')
parser.add_argument('--max-word-length', type=int, default=7,
help='Maximum word length of the words in the dictionary (default: 7)')
args = parser.parse_args()
create_dictionaries(args.dictionary_path, args.max_word_length, args.dictionary_name)
|
charleswilmot/scrabzl
|
src/create_dictionary.py
|
create_dictionary.py
|
py
| 1,733 |
python
|
en
|
code
| 0 |
github-code
|
6
|
5309109260
|
def search_matrix(matrix, target):
# 예외 처리
if not matrix:
return False
# 첫행의 맨뒤
row = 0
col = len(matrix[0]) - 1
# 작으면 왼쪽, 크면 아래로 이동
while row <= len(matrix) - 1 and col >= 0:
if target == matrix[row][col]:
return True
elif target < matrix[row][col]:
col -= 1
else:
row += 1
return False
def search_matrix2(matrix, target):
return any(target in row for row in matrix)
|
louisuss/Algorithms-Code-Upload
|
Python/Tips/BinarySearch/2d_matrix.py
|
2d_matrix.py
|
py
| 518 |
python
|
ko
|
code
| 0 |
github-code
|
6
|
34320404112
|
import urllib.request
def get_url(digikeyID):
with urllib.request.urlopen('https://www.digikey.com/products/en?keywords=' + digikeyID) as url:
s = url.read()
s = str(s)
s = s.split('\\n')
for line in s:
if "lnkDatasheet" in line:
line = line.strip(' ').split(' ')
urlLine = line[3]
url = urlLine.strip('href=').strip('"')
return url
def url_assoc(fkey, mfg, fout):
f = open(fkey, "r")
m = open(mfg, "r")
w = open(fout, "w")
count = -1
for line in f:
count += 1
url = get_url(line)
if line == None or url == None:
print (count)
for i, item in enumerate(m):
if i == count:
url = get_url(item)
break
if url == None:
w.write('\n')
else:
w.write(item[:-1] + '-->-->--' + url + '\n')
continue
w.write(line[:-1] + '-->-->--' + url + '\n')
if '277-2205-ND' in line:
break
f.close()
w.close()
fin = 'digiIDs.txt'
fmfg = 'mfgpn.txt'
fout = 'IDs.txt'
url_assoc(fin, fmfg, fout)
|
aschwarz22/work_arr
|
prac/url.py
|
url.py
|
py
| 1,181 |
python
|
en
|
code
| 0 |
github-code
|
6
|
35945080718
|
import json
import csv
filename = 'data/predictions/test_prediction_RD_15_0.00003_4_finnum_5_bertuncase.csv'
j = 0
predictions = []
with open(filename, 'r') as csvfile:
datareader = csv.reader(csvfile)
for row in datareader:
j += 1
if j == 1: continue
new_row = []
new_row += [row[0]]
new_row += [row[1].replace('[', '').replace(']', '').split(",")]
for i, i_number in enumerate(new_row[1]):
try:
new_row[1][i] = int(i_number)
except:
new_row[1][i] = new_row[1][i].replace("'","").replace("'","").replace(" ","")
new_row += [row[2].replace('[', '').replace(']', '').split(",")]
for i, i_number in enumerate(new_row[2]):
try:
new_row[2][i] = int(i_number)
except:
new_row[2][i] = new_row[2][i].replace("'","").replace("'","").replace(" ","")
print(new_row)
predictions += [new_row]
with open('data/predictions/test_prediction_RD_15_0.00003_4_finnum_5_bertuncase.json','w') as f:
json.dump(predictions, f)
|
MikeDoes/ETH_NLP_Project
|
predictions_to_json.py
|
predictions_to_json.py
|
py
| 1,144 |
python
|
en
|
code
| 0 |
github-code
|
6
|
74055844987
|
import torch
import torch.nn as nn
import numpy as np
import torch.nn.functional as F
from collections import namedtuple
from .set2set import Set2Vec
ReadoutConfig = namedtuple(
'ReadoutConfig', [
'hidden_dim',
'readout_hidden_dim',
'mode',
'target_dim',
]
)
class Readout(nn.Module):
def __init__(self, config):
super().__init__()
self.config = config
self.classify = (self.config.mode == 'clf')
self.hidden_dim = config.hidden_dim
self.target_dim = config.target_dim
self.readout_hidden_dim = config.readout_hidden_dim
self.activation = nn.LeakyReLU
def forward(self, G):
pass
class DTNNReadout(Readout):
def __init__(self, config):
super().__init__(config)
net = nn.Sequential(
nn.Linear(self.hidden_dim, self.readout_hidden_dim),
self.activation(),
nn.BatchNorm1d(self.readout_hidden_dim),
nn.Linear(self.readout_hidden_dim, self.target_dim),
)
self.net = net
def forward(self, h):
bs, gd, dd = (s for s in h.size())
x = h.view(-1, dd)
x = self.net(x)
x = x.view(bs, gd, -1)
x = x.sum(1)
return x
class FullyConnectedReadout(Readout):
def __init__(self, config):
super().__init__(config)
net = nn.Sequential(
nn.Linear(self.hidden_dim, self.readout_hidden_dim),
self.activation(),
nn.BatchNorm1d(self.readout_hidden_dim),
nn.Linear(self.readout_hidden_dim, self.target_dim),
)
self.net = net
def forward(self, h):
x = torch.mean(h, 1)
x = self.net(x)
return x
class SetReadout(Readout):
def __init__(self, config):
super().__init__(config)
self.set2vec = Set2Vec(self.hidden_dim, self.target_dim, config.readout_hidden_dim)
def forward(self, h):
x = self.set2vec(h)
return x
class VCNReadout(Readout):
def __init__(self, config):
super().__init__(config)
self.module_list = nn.ModuleList()
for target in self.target_names:
self.module_list.append(nn.Linear(self.hidden_dim, target.dim))
def forward(self, G):
h_dict = {v: G.node[v]['hidden'] for v in G.nodes()}
out = {}
for i, target in enumerate(self.target_names):
out[target.name] = self.module_list[i](h_dict[target.name])
return out
class VertexReadout(Readout):
def __init__(self, config):
super().__init__(config)
net = nn.Sequential(
nn.Linear(self.hidden_dim, self.readout_hidden_dim),
self.activation(),
nn.BatchNorm2d(self.readout_hidden_dim),
nn.Linear(self.readout_hidden_dim, self.target_dim),
)
self.net = net
def forward(self, h):
bs, gd, dd = (s for s in h.size())
x = h.view(-1, dd)
x = self.net(x)
x = x.view(bs, gd, -1)
return x
def make_readout(readout_config):
if readout_config.function == 'fully_connected':
return FullyConnectedReadout(readout_config.config)
elif readout_config.function == 'dtnn':
return DTNNReadout(readout_config.config)
elif readout_config.function == 'vcn':
return VCNReadout(readout_config.config)
elif readout_config.function == 'vertex':
return VertexReadout(readout_config.config)
elif readout_config.function == 'set':
return SetReadout(readout_config.config)
else:
raise ValueError("Unsupported readout function! ({})".format(readout_config.function))
|
isaachenrion/gcn
|
models/mpnn/readout/readout.py
|
readout.py
|
py
| 3,763 |
python
|
en
|
code
| 0 |
github-code
|
6
|
38470272604
|
import collections
def flatten_path(nested, parent_key=()):
items = []
for k, v in nested.items():
new_key = parent_key + (k,)
if isinstance(v, collections.abc.MutableMapping):
items.extend(flatten_path(v, new_key).items())
else:
items.append((new_key, v))
return dict(items)
def flatten(nested, sep='.'):
return {sep.join(k): v for k, v in flatten_path(nested).items()}
|
BRGM/inept
|
inept/utils.py
|
utils.py
|
py
| 439 |
python
|
en
|
code
| 1 |
github-code
|
6
|
27773482180
|
import threading
from sqlalchemy import Column, UnicodeText, Integer
from telepyrobot.db import BASE, SESSION
from telepyrobot.utils.msg_types import Types
class Notes(BASE):
__tablename__ = "self_notes"
user_id = Column(Integer, primary_key=True)
name = Column(UnicodeText, primary_key=True)
value = Column(UnicodeText, nullable=False)
msgtype = Column(Integer, default=Types.TEXT)
file_id = Column(UnicodeText)
file_ref = Column(UnicodeText)
def __init__(self, user_id, name, value, msgtype, file_id, file_ref):
"""initializing db"""
self.user_id = user_id
self.name = name
self.value = value
self.msgtype = msgtype
self.file_id = file_id
self.file_ref = file_ref
def __repr__(self):
"""get db message"""
return f"<Note {self.name}>"
Notes.__table__.create(checkfirst=True)
INSERTION_LOCK = threading.RLock()
SELF_NOTES = {}
# Types of message
# TEXT = 1
# DOCUMENT = 2
# PHOTO = 3
# VIDEO = 4
# STICKER = 5
# AUDIO = 6
# VOICE = 7
# VIDEO_NOTE = 8
# ANIMATION = 9
# ANIMATED_STICKER = 10
# CONTACT = 11
def save_note(user_id, note_name, note_data, msgtype, file_id=None, file_ref=None):
global SELF_NOTES
with INSERTION_LOCK:
prev = SESSION.query(Notes).get((user_id, note_name))
if prev:
SESSION.delete(prev)
note = Notes(
user_id,
note_name,
note_data,
msgtype=int(msgtype),
file_id=file_id,
file_ref=file_ref,
)
SESSION.add(note)
SESSION.commit()
if not SELF_NOTES.get(user_id):
SELF_NOTES[user_id] = {}
SELF_NOTES[user_id][note_name] = {
"value": note_data,
"type": msgtype,
"file_id": file_id,
"file_ref": file_ref,
}
def get_note(user_id, note_name):
if not SELF_NOTES.get(user_id):
SELF_NOTES[user_id] = {}
return SELF_NOTES[user_id].get(note_name)
def get_all_notes(user_id):
if not SELF_NOTES.get(user_id):
SELF_NOTES[user_id] = {}
return None
allnotes = list(SELF_NOTES[user_id])
allnotes.sort()
return allnotes
def get_num_notes(user_id):
try:
num_notes = SESSION.query(Notes).count()
return num_notes
finally:
SESSION.close()
def rm_note(user_id, note_name):
global SELF_NOTES
with INSERTION_LOCK:
note = SESSION.query(Notes).get((user_id, note_name))
if note:
SESSION.delete(note)
SESSION.commit()
SELF_NOTES[user_id].pop(note_name)
return True
else:
SESSION.close()
return False
def __load_all_notes():
global SELF_NOTES
getall = SESSION.query(Notes).distinct().all()
for x in getall:
if not SELF_NOTES.get(x.user_id):
SELF_NOTES[x.user_id] = {}
SELF_NOTES[x.user_id][x.name] = {
"value": x.value,
"type": x.msgtype,
"file_id": x.file_id,
"file_ref": x.file_ref,
}
__load_all_notes()
|
Divkix/TelePyroBot
|
telepyrobot/db/notes_db.py
|
notes_db.py
|
py
| 3,140 |
python
|
en
|
code
| 40 |
github-code
|
6
|
9633991773
|
import os
import pandas
FILES = [
"../.data/accidents_2005_to_2007.csv",
"../.data/accidents_2009_to_2011.csv",
"../.data/accidents_2012_to_2014.csv",
]
def preprocess_accident_data():
for csv_file in FILES:
df = pandas.read_csv(csv_file)
data = df[[
'Date', 'Day_of_Week', 'Time', '1st_Road_Class', 'Road_Type', 'Speed_limit',
'Junction_Control', '2nd_Road_Class', 'Pedestrian_Crossing-Human_Control',
'Pedestrian_Crossing-Physical_Facilities', 'Light_Conditions', 'Weather_Conditions',
'Road_Surface_Conditions', 'Special_Conditions_at_Site', "Urban_or_Rural_Area"]]
labels = df[['Police_Force', 'Accident_Severity', 'Number_of_Vehicles', 'Number_of_Casualties', ]]
with open(os.path.splitext(csv_file)[0] + "_data.csv", "w") as data_file:
data.to_csv(data_file, sep=",", encoding="utf-8")
with open(os.path.splitext(csv_file)[0] + "_labels.csv", "w") as labels_file:
labels.to_csv(labels_file, sep=",", encoding="utf-8")
|
mustafa-cosar/ceng562
|
src/preprocess.py
|
preprocess.py
|
py
| 1,057 |
python
|
en
|
code
| 0 |
github-code
|
6
|
21296048306
|
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 31 22:15:47 2020
@author: Sinki
"""
class Event:
'''
活动
'''
def __init__(self,event):
'''
根据event生成新的对象
{
"name": "初心纪念活动",
"start": "201101",
"end": "201121",
"gacha": [1,2,3]
}
'''
self.event=event
self.name=event['name']
self.start=event['start']
self.end=event['end']
self.gacha=event['gacha']
self.type=event['type']
self.get=event['get']
def show(self):
print('活动名:',self.name)
print('活动类型:',self.type)
print('开始日期:',self.start)
print('结束日期:',self.end)
if self.got():
print('已在',self.get,'集齐!')
def updateTag(self):
self.event['get']=self.get
return self.event
def got(self):
if self.get=='999999':
return False
return True
|
NingChenhui/Mirror
|
event.py
|
event.py
|
py
| 1,093 |
python
|
en
|
code
| 0 |
github-code
|
6
|
10887625874
|
from Korisnik import *
class Sluzbenik(Korisnik):
def __init__(self, korisnicko_ime, lozinka, id, sektor):
super().__init__(korisnicko_ime, lozinka)
self.id = id
self.sektor = sektor
@staticmethod
def prijava(niz): #posto je staticka metoda, ne mora da ima self za parametar
kor_ime = input("Unesite korisnicko ime: ")
loz = input("Unesite lozinku: ")
for element in niz: #mogao sam ubaciti i .lower() ili .upper()
if(kor_ime == element.korisnicko_ime and loz == element.lozinka):
id = input("Unesite id: ")
id = id.upper()
if(id == element.id):
print("Uspenso ste se prijavili.")
return True
print("Pogresno ste uneli podatke. Pokusajte ponovo.")
return False
|
marko-smiljanic/vezbanje-strukture-podataka
|
vezbanje-strukture-podataka/V1_z2_korisnici/Sluzbenik.py
|
Sluzbenik.py
|
py
| 867 |
python
|
sl
|
code
| 0 |
github-code
|
6
|
36942726510
|
from PIL import Image
myImg = Image.open('Image1.jpg')
newImg = myImg.convert('L')
print("Do you want your ", myImg, "converted to GRY?")
print("Type: y or n")
answer = str(input("y or n?: "))
if answer == "y":
newImg.show()
newImg.save('Image1_Grayscale.jpg')
if answer == "n":
myImg.show()
|
Sir-Lance/CS1400
|
EX7-3.py
|
EX7-3.py
|
py
| 304 |
python
|
en
|
code
| 0 |
github-code
|
6
|
7169957044
|
def solve_maze(n, instructions):
dx = [0, 1, 0, -1]
dy = [1, 0, -1, 0]
x = y = dir = 0
grid = [[0] * 100 for i in range(100)]
for i in range(n):
if instructions[i] == 'F':
x += dx[dir]
y += dy[dir]
grid[x][y] = 1
elif instructions[i] == 'L':
dir = (dir + 1) % 4
elif instructions[i] == 'R':
dir = (dir + 3) % 4
left = top = bottom = right = 0
for i in range(100):
for j in range(100):
if grid[i][j] == 1:
left = min(left, j)
right = max(right, j)
top = min(top, i)
bottom = max(bottom, i)
for i in range(top, bottom + 1):
for j in range(left, right + 1):
if grid[i][j] == 1:
print(".", end="")
else:
print("#", end="")
print("")
if __name__ == "__main__":
n = int(input().strip())
instructions = input().strip()
solve_maze(n, instructions)
|
Competitions-And-Hackathons/Cos-Pro-1tier-python
|
BOJ/구현/c미로만들기.py
|
c미로만들기.py
|
py
| 1,033 |
python
|
en
|
code
| 0 |
github-code
|
6
|
32644474997
|
import mgear.core.pyqt as gqt
from mgear.vendor.Qt import QtCore, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(200, 133)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.sections_label = QtWidgets.QLabel(Dialog)
self.sections_label.setObjectName("sections_label")
self.horizontalLayout.addWidget(self.sections_label)
self.sections_spinBox = QtWidgets.QSpinBox(Dialog)
self.sections_spinBox.setMinimum(1)
self.sections_spinBox.setMaximum(999)
self.sections_spinBox.setProperty("value", 3)
self.sections_spinBox.setObjectName("sections_spinBox")
self.horizontalLayout.addWidget(self.sections_spinBox)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.direction_label = QtWidgets.QLabel(Dialog)
self.direction_label.setObjectName("direction_label")
self.horizontalLayout_2.addWidget(self.direction_label)
self.direction_comboBox = QtWidgets.QComboBox(Dialog)
self.direction_comboBox.setObjectName("direction_comboBox")
self.direction_comboBox.addItem("")
self.direction_comboBox.addItem("")
self.direction_comboBox.addItem("")
self.direction_comboBox.addItem("")
self.direction_comboBox.addItem("")
self.direction_comboBox.addItem("")
self.horizontalLayout_2.addWidget(self.direction_comboBox)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.spacing_label = QtWidgets.QLabel(Dialog)
self.spacing_label.setObjectName("spacing_label")
self.horizontalLayout_3.addWidget(self.spacing_label)
self.spacing_doubleSpinBox = QtWidgets.QDoubleSpinBox(Dialog)
self.spacing_doubleSpinBox.setDecimals(4)
self.spacing_doubleSpinBox.setMaximum(999.99)
self.spacing_doubleSpinBox.setProperty("value", 1.0)
self.spacing_doubleSpinBox.setObjectName("spacing_doubleSpinBox")
self.horizontalLayout_3.addWidget(self.spacing_doubleSpinBox)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.verticalLayout_2.addLayout(self.verticalLayout)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout_2.addWidget(self.buttonBox)
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(gqt.fakeTranslate("Dialog", "Dialog", None, -1))
self.sections_label.setText(gqt.fakeTranslate("Dialog", "Sections Number", None, -1))
self.direction_label.setText(gqt.fakeTranslate("Dialog", "Direction", None, -1))
self.direction_comboBox.setItemText(0, gqt.fakeTranslate("Dialog", "X", None, -1))
self.direction_comboBox.setItemText(1, gqt.fakeTranslate("Dialog", "Y", None, -1))
self.direction_comboBox.setItemText(2, gqt.fakeTranslate("Dialog", "Z", None, -1))
self.direction_comboBox.setItemText(3, gqt.fakeTranslate("Dialog", "-X", None, -1))
self.direction_comboBox.setItemText(4, gqt.fakeTranslate("Dialog", "-Y", None, -1))
self.direction_comboBox.setItemText(5, gqt.fakeTranslate("Dialog", "-Z", None, -1))
self.spacing_label.setText(gqt.fakeTranslate("Dialog", "Spacing", None, -1))
|
mgear-dev/mgear4
|
release/scripts/mgear/shifter/component/chain_guide_initializer_ui.py
|
chain_guide_initializer_ui.py
|
py
| 4,345 |
python
|
en
|
code
| 209 |
github-code
|
6
|
26416473947
|
# -*- coding: UTF-8 -*-
from flask import Flask
from flask import request
from flask import json
import requests
app = Flask(__name__)
# http://blog.luisrei.com/articles/flaskrest.html
@app.route('/oslh2b', methods=['POST'])
def oslh2b():
if request.method == 'POST':
json_headers = request.headers
data = json.loads(request.data)
destination_url = data["destination_url"]
data.pop("destination_url", None)
json_data = json.dumps(data)
r = requests.post(destination_url, data=json_data, headers=json_headers)
data = {}
data["body"] = json.loads(r.text)
data["headers"] = r.headers
return str(data)
def config2dict(request_data):
'''
Convert a lot of lines with two strings per line in a dictionary
OS_AUTH_URL http://openstack-vcenter:5000/v3
OS_PROJECT_ID 9d7812704e104a208603c5d0481bd952
OS_PROJECT_NAME admin
OS_USER_DOMAIN_NAME default
OS_USERNAME admin
OS_PASSWORD admin
OS_REGION_NAME RegionOne
name prueba
'''
configuration = {}
for line in request_data.splitlines():
if len(line.split()) == 2:
configuration[line.split()[0]] = line.split()[1]
return(configuration)
def get_auth_token(config):
headers = {}
headers["Content-Type"] = 'application/json'
data = """
{
"auth": {
"identity": {
"methods": [
"password"
],
"password": {
"user": {
"name": "%s",
"password": "%s",
"domain": {
"name": "%s"
}
}
}
},
"scope": {
"project": {
"id": "%s",
"domain": {
"name": "%s"
}
}
}
}
}
""" % (config["OS_USERNAME"],
config["OS_PASSWORD"],
config["OS_USER_DOMAIN_NAME"],
config["OS_PROJECT_ID"],
config["OS_USER_DOMAIN_NAME"])
#print data
headers["Content-Type"] = 'application/json'
#
r = requests.post(config["OS_AUTH_URL"] + "/auth/tokens",
data=data, headers=headers)
token = json.loads(r.text)
token_id = r.headers["X-Subject-Token"]
#print json.dumps(token, indent=4)
#print (token_id)
return (token, token_id)
def get_endpoint(token, endpoint_type, interface_type):
url = ""
for i in range(len(token["token"]["catalog"])):
if (token["token"]["catalog"][i]["type"] == endpoint_type):
for j in range(len(token["token"]["catalog"][i]["endpoints"])):
if (token["token"]["catalog"][i]["endpoints"][j]["interface"] == interface_type):
url = token["token"]["catalog"][i]["endpoints"][j]["url"]
return (url)
def create_network(token, token_id, env_name):
# Redes:
# - guardamos red con salida pública
# - creamos red y subred privada
# - creamos puerto en subred privada
# - creamos router en subred privada y pública
# - asignamos puerto a router
network_url = get_endpoint(token, "network", "public")
headers = {}
headers["Content-Type"] = 'application/json'
headers["X-Auth-Token"] = token_id
r = requests.get(network_url + "/v2.0/networks", headers=headers)
#r = requests.post(network_url + "/v2.0/networks",headers=headers,data=data)
#print r.text
networks = json.loads(r.text)
#print json.dumps(networks, indent=4)
# # Obtenemos el network_id de la red de publica
public_network = {}
for network in networks["networks"]:
if network["router:external"]:
public_network = network
print ((json.dumps(public_network, indent=4)))
# public_network_id = pretty_response["network"]["id"]
# Creamos la red de la instancia
private_net_name = env_name + "_net"
data = """
{
"network": {
"name": "%s",
"admin_state_up": true
}
}
""" % private_net_name
r = requests.post(network_url + "/v2.0/networks",
headers=headers, data=data)
private_net = json.loads(r.text)
print ((json.dumps(private_net, indent=4)))
# Creamos la subred de la instancia
# subnetwork_url = "http://openstack.paradigmadigital.com:9696/v2.0/subnets"
private_subnet_name = env_name + "_subnet"
data = """
{
"subnet": {
"name": "%s",
"ip_version": 4,
"network_id": "%s",
"cidr": "172.17.235.0/24",
"gateway_ip": "172.17.235.1",
"allocation_pools": [
{
"start": "172.17.235.10",
"end": "172.17.235.100"
}
],
"enable_dhcp": "true"
}
}
""" % (private_subnet_name, private_net["network"]["id"])
r = requests.post(network_url + "/v2.0/subnets", headers=headers, data=data)
private_subnet = json.loads(r.text)
print ((json.dumps(private_subnet, indent=4)))
# Creamos un router para dar salida a la red publica hacia el exterior
# routers_url = "http://openstack.paradigmadigital.com:9696/v2.0/routers"
router_name = env_name + "_router"
data = """
{
"router": {
"name": "%s",
"external_gateway_info": {
"network_id": "%s"
}
}
}
""" % (router_name, public_network["id"])
r = requests.post(network_url + "/v2.0/routers", headers=headers, data=data)
external_router = json.loads(r.text)
print ((json.dumps(external_router, indent=4)))
# Conectamos el router público con la red privada
# add_router_interface_url = routers_url + "/" + external_router_id +
# "/add_router_interface"
data = """
{
"subnet_id": "%s"
}
""" % private_subnet["subnet"]["id"]
r = requests.put(network_url + "/v2.0/routers/" + external_router["router"]["id"] + "/add_router_interface",
headers=headers, data=data)
external_router_connections = json.loads(r.text)
print ((json.dumps(external_router_connections, indent=4)))
network_env = {}
network_env["public"] = public_network
network_env["private_net"] = private_net["network"]
network_env["private_subnet"] = private_subnet["subnet"]
network_env["external_router"] = external_router["router"]
return (network_env)
def create_server(token, token_id, env):
server_env = {}
headers = {}
headers["Content-Type"] = 'application/json'
headers["X-Auth-Token"] = token_id
#print ((json.dumps(token, indent=4)))
name = env["name"] + "_computer"
#image = "a9f3ef90-da4f-47f4-b05a-c8180b3bda60"
image = "78eb8e56-d6b5-424d-9a94-f92e02c498f7"
flavor = "2"
#print ((json.dumps(env, indent=4)))
data = """
{
"server" : {
"name" : "%s",
"imageRef" : "%s",
"flavorRef" : "%s",
"availability_zone": "nova",
"security_groups": [
{
"name": "default"
}
],
"networks": [
{
"uuid": "%s"
}
]
}
}
""" % (name, image, flavor, env["network"]["private_net"]["id"])
compute_url = get_endpoint(token, "compute", "public")
r = requests.post(compute_url + "/servers", headers=headers, data=data)
#print (r.text)
server_env = json.loads(r.text)
#network_url = get_endpoint(token, "network", "public")
#print ((json.dumps(server_env, indent=4)))
return (server_env["server"])
def dict2config(dictio):
config = ""
for key in list(dictio.keys()):
config = config + str(key) + " " + str(dictio[key]) + "\n"
return (config)
@app.route('/create_computer_mock', methods=['POST'])
def create_computer_mock():
data = """
router_id 647a4c8f-f055-461d-bd91-b7c224f4acd9
server_id 00633113-acfc-41fc-8b23-88d2e84c1a90
name prueba
OS_USERNAME admin
subnet_id ca2cfacb-de0a-4705-a334-9a4cbb709f41
OS_PROJECT_ID 9d7812704e104a208603c5d0481bd952
OS_REGION_NAME RegionOne
OS_USER_DOMAIN_NAME default
OS_AUTH_URL http://openstack-vcenter:5000/v3
OS_PROJECT_NAME admin
OS_PASSWORD admin
net_id bee1007e-1289-4c75-9dd5-dbe11a3fdba5
"""
return (data)
@app.route('/create_computer', methods=['POST'])
def create_computer():
'''
create_computer
This creates a server and returns a list of net_id, subnet_id, router_id,
server_id and console_url.
Console url can be configured in a media prim.
The other data can be used to delete the server.
'''
if request.method == 'POST':
env = {}
config = {}
config = config2dict(request.data)
env["name"] = config["name"]
token, token_id = get_auth_token(config)
env["network"] = create_network(token, token_id, env["name"])
config["net_id"] = env["network"]["private_net"]["id"]
config["router_id"] = env["network"]["external_router"]["id"]
config["subnet_id"] = env["network"]["private_subnet"]["id"]
env["server"] = create_server(token, token_id, env)
config["server_id"] = env["server"]["id"]
return dict2config(config)
def delete_server(token, token_id, server_id):
headers = {}
headers["Content-Type"] = 'application/json'
headers["X-Auth-Token"] = token_id
compute_url = get_endpoint(token, "compute", "public")
requests.delete(compute_url + "/servers/" + server_id, headers=headers)
return
def delete_network(token, token_id, net_id, subnet_id, router_id):
headers = {}
headers["Content-Type"] = 'application/json'
headers["X-Auth-Token"] = token_id
network_url = get_endpoint(token, "network", "public")
#r = requests.put(network_url + "/v2.0/routers/" + external_router["router"]["id"] + "/add_router_interface",
#headers=headers, data=data)
#r = requests.post(network_url + "/v2.0/routers", headers=headers, data=data)
#r = requests.post(network_url + "/v2.0/subnets", headers=headers, data=data)
return
@app.route('/delete_computer', methods=['POST'])
def delete_computer():
'''
delete_computer
This deletes a computer and returns 200 if ok
'''
if request.method == 'POST':
config = {}
config = config2dict(request.data)
token, token_id = get_auth_token(config)
delete_server(token, token_id, config["server_id"])
delete_network(token, token_id,
config["net_id"], config["subnet_id"],
config["router_id"])
def get_console(token, token_id, server):
headers = {}
headers["Content-Type"] = 'application/json'
headers["X-Auth-Token"] = token_id
data = """
{
"os-getVNCConsole": {
"type": "novnc"
}
}
"""
#data = """
#{
#"os-getSPICEConsole": {
#"type": "spice-html5"
#}
#}
#"""
compute_url = get_endpoint(token, "compute", "public")
r = requests.post(compute_url + "/servers/" + server["id"] + "/action",
headers=headers, data=data)
print ((r.text))
console_env = json.loads(r.text)
print ((json.dumps(console_env, indent=4)))
return (console_env["console"])
@app.route('/get_console_url', methods=['POST'])
def get_console_url():
if request.method == 'POST':
env = {}
config = {}
config = config2dict(request.data)
env["name"] = config["name"]
token, token_id = get_auth_token(config)
env["server"] = {}
env["server"]["id"] = config["server_id"]
env["console"] = get_console(token, token_id, env["server"])
return ("console_url " + env["console"]["url"])
if __name__ == '__main__':
app.run()
|
elmanytas/osl-computer
|
ansible-flask/roles/flaskapp/files/flaskapp/flaskapp/__init__.py
|
__init__.py
|
py
| 12,045 |
python
|
en
|
code
| 2 |
github-code
|
6
|
73016495228
|
from tkinter import *
from tkinter import ttk
import sqlite3
import time
#--------------------------------------
# DEFININDO MODULO HORA E DATA
#--------------------------------------
time = time.localtime()
hour = ('{}:{}'.format(time[3], time[4]))
date = ('{}/{}/{}'.format(time[0], time[1], time[2]))
#--------------------------------------
# GESTOR DE BANCO DE DADOS
#--------------------------------------
con = sqlite3.connect('database.db')
c = con.cursor()
sql = 'SELECT * FROM Users WHERE user = ?'
c.execute('CREATE TABLE IF NOT EXISTS Users(user text, passw text, cargo text)')
c.execute("""
CREATE TABLE IF NOT EXISTS Clientes(data text, cargo text, user, name text, cpf text, tel text,email text)
""")
#--------------------------------------
# QUERRY DE LOGIN
#--------------------------------------
def login(user, passw):
c.execute(sql, (user,))
auth = c.fetchone()
if auth == None:
return False
else:
if (user, passw) == (auth[0], auth[1]):
return True
else:
return False
#--------------------------------------
# QUERRY DE CARGO
#--------------------------------------
def cargo(user):
c.execute(sql, (user,))
global auth
auth = c.fetchone()
return auth[2]
#--------------------------------------
# CADASTRO DE CLIENTES
#--------------------------------------
def cadastro():
#--------------------------------------
# GESTOR DE INFORMAÇÃO
#--------------------------------------
def get():
clt = 'INSERT INTO Clientes(data, cargo, user, name, cpf, tel, email) VALUES (?,?,?,?,?,?,?)'
data = "{} {}".format(date, hour)
user = auth[0]
cargo = auth[2]
name = et_name.get()
cpf = et_cpf.get()
tel = et_tel.get()
email = et_email.get()
c.execute(clt,(data, cargo, user, name, cpf, tel, email),)
con.commit()
root = Tk()
cad = LabelFrame(root, text='Cadastro')
root.title("S4U® CADASTRO")
Label(cad, text='Nome').grid(row=0, column=0)
Label(cad, text='CPF').grid(row=1, column=0)
Label(cad, text='Telefone').grid(row=2, column=0)
Label(cad, text='E-Mail').grid(row=3, column=0)
et_name = Entry(cad)
et_cpf = Entry(cad)
et_tel = Entry(cad)
et_email = Entry(cad)
et_name.grid(row=0, column=1)
et_cpf.grid(row=1, column=1)
et_tel.grid(row=2, column=1)
et_email.grid(row=3, column=1)
cad.grid(row=0, columnspan=4)
Button(root, text='Salvar', command=get).grid(row=1, column=0, sticky=W+E)
Button(root, text='Cadastrar Equipamento').grid(row=1, column=1, sticky=W+E)
Button(root, text='Limpar').grid(row=1, column=2, sticky=W+E)
Button(root, text='Sair').grid(row=1, column=3, sticky=W+E)
root.mainloop()
#--------------------------------------
# GESTOR DE CONSULTA
#--------------------------------------
def consulta():
#--------------------------------------
# BUSCANDO CLIENTES
#--------------------------------------
def refresh():
for clear in treeview.get_children():
treeview.delete(clear)
c.execute('SELECT * FROM Clientes')
for sql_cliente in c.fetchall():
treeview.insert('', 0, text=sql_cliente[3], values=(sql_cliente[5], sql_cliente[6]))
def busca(event):
for item in treeview.selection():
item_text = treeview.item(item, "text")
sql_busca = 'SELECT * FROM Clientes WHERE name = ?'
for sql_consulta in c.execute(sql_busca, (item_text,)):
lb_tempo['text'] = sql_consulta[0]
lb_user['text'] = (sql_consulta[1].title(), sql_consulta[2].title())
lb_name['text'] = sql_consulta[3].title()
lb_cpf['text'] = sql_consulta[4]
lb_tel['text'] = sql_consulta[5]
lb_email['text'] = sql_consulta[6].title()
root = Tk()
root.title('S4U® CONSULTA')
consult = LabelFrame(root, text='Consulta')
Label(consult, text='Data: ').grid(row=0, column=0, sticky=E)
Label(consult, text='Funcionario: ').grid(row=1, column=0, sticky=E)
Label(consult, text='Nome: ').grid(row=2, column=0, sticky=E)
Label(consult, text='CPF: ').grid(row=3, column=0, sticky=E)
Label(consult, text='Telefone: ').grid(row=4, column=0, sticky=E)
Label(consult, text='E-Mail: ').grid(row=5, column=0, sticky=E)
#--------------------------------------
# EXIBIR INFORMAÇÕES
#--------------------------------------
lb_tempo = Label(consult, text='')
lb_user = Label(consult, text='')
lb_name = Label(consult, text='')
lb_cpf = Label(consult, text='')
lb_tel = Label(consult, text='')
lb_email = Label(consult, text='')
lb_tempo.grid(row=0, column=1, sticky=W)
lb_user.grid(row=1, column=1, sticky=W)
lb_name.grid(row=2, column=1, sticky=W)
lb_cpf.grid(row=3, column=1, sticky=W)
lb_tel.grid(row=4, column=1, sticky=W)
lb_email.grid(row=5, column=1, sticky=W)
consult.grid(row=0, columnspan=4, sticky=W+E)
#--------------------------------------
# INTERFACE GRAFICA DE BUSCA
#--------------------------------------
Label(root, text='Pesquisar:').grid(row=1, column=0, sticky=E)
Button(root, text='Pesquisar').grid(row=1, column=2, sticky=W+E)
Button(root, text='Buscar', command=refresh).grid(row=1, column=3, sticky=W+E)
et_busca = Entry(root)
treeview = ttk.Treeview(root, columns=('#0', '#1'))
treeview.heading('#0', text='Nome')
treeview.heading('#1', text='Telefone')
treeview.heading('#2', text='E-Mail')
treeview.bind("<<TreeviewSelect>>", busca)
et_busca.grid(row=1, column=1, sticky=W+E)
treeview.grid(row=2, columnspan=4, sticky=W+E)
refresh()
root.mainloop()
|
S4UDeveloper/MDI
|
DB/Database.py
|
Database.py
|
py
| 5,792 |
python
|
en
|
code
| 1 |
github-code
|
6
|
36339047472
|
import csv
from datetime import datetime
import random
Header=["Time","Sample number","Temperature","Humidity","Sensor response", "PM response", "Temperature MFC"]
dataLine=["","","","","","",""]
with open('main.csv','w') as main:
csv_writer=csv.writer(main, delimiter=",")
csv_writer.writerow(Header)
#csv_writer.writerow(lined)
i=0
while i < 10000:
dataLine[0]=datetime.now()
dataLine[1]=i
dataLine[2]=random.randint(0, 40)
dataLine[3]=random.randint(15, 90)
dataLine[4]=random.randint(0, 100)
dataLine[5]=random.randint(0, 100)
dataLine[6]=random.randint(0, 40)
csv_writer.writerow(dataLine)
i=i+1
#with open('main.csv','r') as main:
# csv_reader=csv
#for ligne in csv_writer:
# print(ligne)
|
Virgile-Colrat/YFA-Project_python_interface
|
Sources/testcs.py
|
testcs.py
|
py
| 723 |
python
|
en
|
code
| 0 |
github-code
|
6
|
22755470032
|
from collections import namedtuple
import time
from .utils import (
client_array_operation,
make_valid_data,
create_host_urn,
create_resource_arn,
create_hash,
set_required_access_v2,
transformation,
ipaddress_to_urn
)
from .registry import RegisteredResourceCollector
from schematics import Model
from schematics.types import StringType, ModelType, ListType, BooleanType
InstanceData = namedtuple("InstanceData", ["instance", "instance_type"])
class Tag(Model):
Key = StringType(required=True)
Value = StringType(required=True)
class Subnet(Model):
SubnetId = StringType(required=True)
Tags = ListType(ModelType(Tag), default=[])
AvailabilityZone = StringType(required=True)
VpcId = StringType(required=True)
class Vpc(Model):
VpcId = StringType(required=True)
IsDefault = BooleanType(default=False)
Tags = ListType(ModelType(Tag), default=[])
class SecurityGroup(Model):
GroupName = StringType(default="UKNOWN")
GroupId = StringType(required=True)
VpcId = StringType()
class VpnGateway(Model):
class VpnGatewayVpcAttachment(Model):
VpcId = StringType(required=True)
State = StringType(default="UNKNOWN")
VpnGatewayId = StringType(required=True)
VpcAttachments = ListType(ModelType(VpnGatewayVpcAttachment), default=[])
class InstanceType(Model):
InstanceType = StringType(required=True)
Hypervisor = StringType(default="")
class Instance(Model):
class InstanceState(Model):
Name = StringType(required=True)
class SecurityGroup(Model):
GroupId = StringType(required=True)
InstanceId = StringType(required=True)
InstanceType = StringType(required=True)
State = ModelType(InstanceState)
Tags = ListType(ModelType(Tag), default=[])
PrivateIpAddress = StringType()
PublicDnsName = StringType()
PublicIpAddress = StringType()
SubnetId = StringType()
VpcId = StringType()
SecurityGroups = ListType(ModelType(SecurityGroup), default=[])
class RunInstances(Model):
class ResponseElements(Model):
class InstancesSet(Model):
class RunInstance(Model):
instanceId = StringType(required=True)
items = ListType(ModelType(RunInstance), required=True)
instancesSet = ModelType(InstancesSet, required=True)
responseElements = ModelType(ResponseElements, required=True)
class Ec2InstanceCollector(RegisteredResourceCollector):
API = "ec2"
API_TYPE = "regional"
COMPONENT_TYPE = "aws.ec2"
def __init__(self, location_info, client, agent):
RegisteredResourceCollector.__init__(self, location_info, client, agent)
self.instance_types = {}
def process_all(self, filter=None):
if not filter or "instances" in filter:
self.process_instances()
if not filter or "security_groups" in filter:
self.process_security_groups()
if not filter or "vpcs" in filter:
self.process_vpcs()
if not filter or "subnets" in filter:
self.process_subnets()
if not filter or "vpn_gateways" in filter:
self.process_vpn_gateways()
@set_required_access_v2("ec2:DescribeInstanceTypes")
def collect_instance_type(self, instance_type):
# Items never change, only added, safe to hold in memory
if instance_type not in self.instance_types:
instance_type_data = self.client.describe_instance_types(InstanceTypes=[instance_type]).get(
"InstanceTypes", []
)
if instance_type_data:
self.instance_types[instance_type] = instance_type_data[0]
return self.instance_types.get(instance_type, {})
def collect_instance(self, instance_data):
instance_type = instance_data.get("InstanceType", "")
instance_type_data = self.collect_instance_type(instance_type) or {}
return InstanceData(instance=instance_data, instance_type=instance_type_data)
def collect_instances(self, **kwargs):
for reservation in client_array_operation(
self.client,
"describe_instances",
"Reservations",
Filters=[
{
"Name": "instance-state-code", # Don't return terminated instances
"Values": [
"0", # pending
"16", # running
"32", # shutting-down
"64", # stopping
"80", # stopped
],
}
],
**kwargs
):
for instance_data in reservation.get("Instances", []):
yield self.collect_instance(instance_data)
@set_required_access_v2("ec2:DescribeInstances")
def process_instances(self, **kwargs):
for data in self.collect_instances(**kwargs):
self.process_instance(data)
def process_some_instances(self, ids):
self.process_instances(InstanceIds=ids)
@transformation()
def process_instance_type(self, data):
instance_type = InstanceType(data, strict=False)
instance_type.validate()
return instance_type
@transformation()
def process_instance(self, data):
instance = Instance(data.instance, strict=False)
instance.validate()
self.agent.event(
{
"timestamp": int(time.time()),
"event_type": "ec2_state",
"msg_title": "EC2 instance state",
"msg_text": instance.State.Name,
"host": instance.InstanceId,
"tags": ["state:" + instance.State.Name],
}
)
output = make_valid_data(data.instance)
urns = [
create_host_urn(instance.InstanceId),
create_resource_arn(
"ec2",
self.location_info.Location.AwsRegion,
self.location_info.Location.AwsAccount,
"instance",
instance.InstanceId,
),
]
if not instance.Tags:
output["Tags"] = []
output["Tags"].append({"Key": "host", "Value": instance.InstanceId})
output["Tags"].append({"Key": "instance-id", "Value": instance.InstanceId})
if instance.PrivateIpAddress:
urns.append(ipaddress_to_urn(instance.PrivateIpAddress, instance.VpcId))
output["Tags"].append({"Key": "private-ip", "Value": instance.PrivateIpAddress})
if instance.PublicDnsName:
urns.append(create_host_urn(instance.PublicDnsName))
output["Tags"].append({"Key": "fqdn", "Value": instance.PublicDnsName})
if instance.PublicIpAddress:
urns.append(create_host_urn(instance.PublicIpAddress))
output["Tags"].append({"Key": "public-ip", "Value": instance.PublicIpAddress})
output["URN"] = urns
if data.instance_type: # Don't run if instance type not found
instance_type = self.process_instance_type(data.instance_type)
output["isNitro"] = instance_type.Hypervisor == "nitro"
# Map the subnet and if not available then map the VPC
if instance.SubnetId:
self.emit_relation(instance.InstanceId, instance.SubnetId, "uses-service", {})
elif instance.VpcId: # pragma: no cover
self.emit_relation(instance.InstanceId, instance.VpcId, "uses-service", {})
for security_group in instance.SecurityGroups:
self.emit_relation(instance.InstanceId, security_group.GroupId, "uses-service", {})
self.emit_component(instance.InstanceId, "instance", output)
def collect_security_groups(self, **kwargs):
for security_group in client_array_operation(self.client,
"describe_security_groups",
"SecurityGroups",
**kwargs):
yield security_group
@set_required_access_v2("ec2:DescribeSecurityGroups")
def process_security_groups(self, **kwargs):
for security_group_data in self.collect_security_groups(**kwargs):
self.process_security_group(security_group_data)
@transformation()
def process_security_group(self, data):
security_group = SecurityGroup(data, strict=False)
security_group.validate()
output = make_valid_data(data)
output["Version"] = create_hash(output)
output["Name"] = security_group.GroupName
output["URN"] = [
create_resource_arn(
"ec2",
self.location_info.Location.AwsRegion,
self.location_info.Location.AwsAccount,
"security-group",
security_group.GroupId,
)
]
if security_group.VpcId: # pragma: no cover
self.emit_relation(security_group.VpcId, security_group.GroupId, "has-resource", {})
self.emit_component(security_group.GroupId, "security-group", output)
def collect_vpcs(self):
for vpc in client_array_operation(self.client, "describe_vpcs", "Vpcs"):
yield vpc
@set_required_access_v2("ec2:DescribeVpcs")
def process_vpcs(self):
for vpc_data in self.collect_vpcs():
self.process_vpc(vpc_data)
@transformation()
def process_vpc(self, data):
vpc = Vpc(data, strict=False)
vpc.validate()
output = make_valid_data(data)
# construct a name
vpc_name = vpc.VpcId
name_tag = [tag for tag in vpc.Tags if tag.Key == "Name"]
if vpc.IsDefault:
vpc_name = "default"
elif len(name_tag) > 0:
vpc_name = name_tag[0].Value
output["Name"] = vpc_name
# add a URN
output["URN"] = [
create_resource_arn(
"ec2", self.location_info.Location.AwsRegion, self.location_info.Location.AwsAccount, "vpc", vpc.VpcId
)
]
self.emit_component(vpc.VpcId, "vpc", output)
def collect_subnets(self):
for subnet in client_array_operation(self.client, "describe_subnets", "Subnets"):
yield subnet
@set_required_access_v2("ec2:DescribeSubnets")
def process_subnets(self):
for subnet_data in self.collect_subnets():
self.process_subnet(subnet_data)
@transformation()
def process_subnet(self, data):
subnet = Subnet(data, strict=False)
subnet.validate()
output = make_valid_data(data)
# construct a name
subnet_name = subnet.SubnetId
name_tag = [tag for tag in subnet.Tags if tag.Key == "Name"]
if len(name_tag) > 0:
subnet_name = name_tag[0].Value
if subnet.AvailabilityZone: # pragma: no cover
subnet_name = "{}-{}".format(subnet_name, subnet.AvailabilityZone)
output["Name"] = subnet_name
# add a URN
output["URN"] = [
create_resource_arn(
"ec2",
self.location_info.Location.AwsRegion,
self.location_info.Location.AwsAccount,
"subnet",
subnet.SubnetId,
)
]
self.emit_component(subnet.SubnetId, "subnet", output)
self.emit_relation(subnet.SubnetId, subnet.VpcId, "uses-service", {})
def collect_vpn_gateways(self):
for vpn_gateway in client_array_operation(self.client,
"describe_vpn_gateways",
"VpnGateways",
Filters=[{"Name": "state", "Values": ["pending", "available"]}]
):
yield vpn_gateway
@set_required_access_v2("ec2:DescribeVpnGateways")
def process_vpn_gateways(self):
for vpn_gateway_data in self.collect_vpn_gateways():
self.process_vpn_gateway(vpn_gateway_data)
@transformation()
def process_vpn_gateway(self, data):
vpn_gateway = VpnGateway(data, strict=False)
vpn_gateway.validate()
output = make_valid_data(data)
output["Name"] = vpn_gateway.VpnGatewayId
self.emit_component(vpn_gateway.VpnGatewayId, "vpn-gateway", output)
for vpn_attachment in vpn_gateway.VpcAttachments:
if vpn_attachment.State == "attached":
self.emit_relation(vpn_gateway.VpnGatewayId, vpn_attachment.VpcId, "uses-service", {})
@transformation()
def process_batch_instances(self, event, seen):
data = RunInstances(event, strict=False)
data.validate()
instance_ids = [
instance.instanceId
for instance in data.responseElements.instancesSet.items
if instance.instanceId not in seen
]
self.process_instances(InstanceIds=instance_ids)
seen.update(set(instance_ids))
def process_state_notification(self, event, seen):
instance_id = event.get("instance-id", "")
if instance_id not in seen:
seen.add(instance_id)
if event.get("state") == "terminated":
self.agent.delete(instance_id)
else:
self.process_instances(InstanceIds=[instance_id])
def process_one_instance(self, instance_id):
self.process_instances(InstanceIds=[instance_id])
def process_one_security_group(self, security_group_id):
self.process_security_groups(GroupIds=[security_group_id])
EVENT_SOURCE = "ec2.amazonaws.com"
CLOUDTRAIL_EVENTS = [
{"event_name": "RunInstances", "processor": process_batch_instances},
{"event_name": "StartInstances", "processor": process_batch_instances},
{"event_name": "StopInstances", "processor": process_batch_instances},
{"event_name": "TerminateInstances", "processor": process_batch_instances},
{"event_name": "InstanceStateChangeNotification", "processor": process_state_notification},
{"event_name": "AttachVolume", "path": "responseElements.instanceId", "processor": process_one_instance},
{"event_name": "DetachVolume", "path": "responseElements.instanceId", "processor": process_one_instance},
{
"event_name": "ModifyInstanceAttribute",
"path": "requestParameters.instanceId",
"processor": process_one_instance,
},
{
"event_name": "RevokeSecurityGroupIngress",
"path": "requestParameters.groupId",
"processor": process_one_security_group,
},
{
"event_name": "AuthorizeSecurityGroupIngress",
"path": "requestParameters.groupId",
"processor": process_one_security_group,
},
]
|
StackVista/stackstate-agent-integrations
|
aws_topology/stackstate_checks/aws_topology/resources/ec2.py
|
ec2.py
|
py
| 14,997 |
python
|
en
|
code
| 1 |
github-code
|
6
|
34714688235
|
import argparse
import torch
import torch.utils.data
import src.utils as utils
from src.utils import alphabet
from src.utils import strLabelConverterForAttention as converter
import src.dataset as dataset
import model
parser = argparse.ArgumentParser()
parser.add_argument('--testList', default='label/test_label.txt')
parser.add_argument('--workers', type=int, help='number of data loading workers', default=2)
parser.add_argument('--batchSize', type=int, default=32, help='input batch size')
parser.add_argument('--cuda', action='store_true', help='enables cuda', default=True)
parser.add_argument('--gpuid', type=int, default=0, help='which GPU to use')
parser.add_argument('--height', type=int, default=32, help='the height of the input image to network')
parser.add_argument('--width', type=int, default=208, help='the width of the input image to network')
parser.add_argument('--encoder', type=str, default='', help="path to encoder (to continue training)")
parser.add_argument('--decoder', type=str, default='', help='path to decoder (to continue training)')
parser.add_argument('--loadModelEpoch', type=int, default=0, help='load model from epoch n to continue training, override the previous two')
opt = parser.parse_args()
if opt.cuda:
torch.cuda.set_device(opt.gpuid)
def predict(encoder, decoder, criterion, batchsize, dataset, workers=2):
for e, d in zip(encoder.parameters(), decoder.parameters()):
e.requires_grad = False
d.requires_grad = False
encoder.eval()
decoder.eval()
data_loader = torch.utils.data.DataLoader(dataset, shuffle=False, batch_size=batchsize, num_workers=workers)
iterator = iter(data_loader)
n_correct = 0 # correct characters (including EOS)
n_total = 0 # total characters (including EOS)
n_current = 0 # current position
loss_avg = utils.averager()
EOS_TOKEN = 1 # end of sequence
for _ in range(len(data_loader)):
data = iterator.next()
cpu_images, cpu_texts = data
b = cpu_images.size(0)
image = torch.FloatTensor(batchsize, 3, 1, 1)
image = image.cuda()
utils.loadData(image, cpu_images)
target_variable = converter(alphabet).encode(cpu_texts)
target_variable = target_variable.cuda()
encoder_outputs = encoder(image) # cnn+biLstm做特征提取
decoder_input = target_variable[0].cuda() # 初始化decoder的开始,从0开始输出
decoder_hidden = decoder.initHidden(b).cuda()
loss = 0.0
decoded_words = []
decoded_labels = []
flag = [True] * batchsize
for _ in range(batchsize):
new_list = []
decoded_words.append(new_list)
new_list = []
decoded_labels.append(new_list)
for di in range(1, target_variable.shape[0]): # 最大字符串的长度
decoder_output, decoder_hidden, decoder_attention = decoder(decoder_input, decoder_hidden, encoder_outputs)
loss += criterion(decoder_output, target_variable[di]) # 每次预测一个字符
topv, topi = decoder_output.data.topk(1)
ni = topi.squeeze()
decoder_input = ni
for count in range(batchsize):
if flag[count]:
if ni[count] == EOS_TOKEN:
decoded_words[count].append('<EOS>')
decoded_labels[count].append(EOS_TOKEN)
flag[count] = False
else:
decoded_words[count].append(converter(alphabet).decode(ni[count]))
decoded_labels[count].append(ni[count])
loss_avg.add(loss)
for count in range(batchsize):
n_total += len(cpu_texts[count]) + 1 # EOS included
for pred, target in zip(decoded_labels[count], target_variable[1:,count]):
if pred == target:
n_correct += 1
texts = cpu_texts[count]
print('%d Pred:%-20s, GT: %-20s' % (n_current, decoded_words[count], texts))
n_current += 1
accuracy = n_correct / float(n_total)
print('Loss: %f, Accuracy: %f' % (loss_avg.val(), accuracy))
if __name__ == '__main__':
test_dataset = dataset.listDataset(list_file=opt.testList, transform=dataset.resizeNormalize((opt.width, opt.height)))
nclass = len(alphabet) + 3
nc = 1
criterion = torch.nn.NLLLoss()
encoder = model.encoder(opt.height, nc=nc, nh=256)
decoder = model.decoder(nh=256, nclass=nclass, dropout_p=0.1)
if opt.encoder:
print('loading pretrained encoder model from %s' % opt.encoder)
encoder.load_state_dict(torch.load(opt.encoder))
if opt.decoder:
print('loading pretrained decoder model from %s' % opt.decoder)
decoder.load_state_dict(torch.load(opt.decoder))
if opt.loadModelEpoch > 0:
encoder_path = 'model/encoder_%d.pth' % opt.loadModelEpoch
print('loading pretrained encoder model from %s' % encoder_path)
encoder.load_state_dict(torch.load(encoder_path))
decoder_path = 'model/decoder_%d.pth' % opt.loadModelEpoch
print('loading pretrained decoder model from %s' % decoder_path)
decoder.load_state_dict(torch.load(decoder_path))
if opt.cuda:
encoder.cuda()
decoder.cuda()
criterion = criterion.cuda()
print("Testing:")
predict(encoder, decoder, criterion, opt.batchSize, dataset=test_dataset)
|
WANGPeisheng1997/HandwrittenTextRecognition
|
cnn+lstm+attention/test.py
|
test.py
|
py
| 5,492 |
python
|
en
|
code
| 0 |
github-code
|
6
|
24423662765
|
#! /usr/bin/env python3
from typing import Any, Dict
import rospy
import dynamic_reconfigure.server
from example_package_with_dynamic_reconfig.cfg import ExampleDynamicParametersConfig
def dynamic_reconfigure_callback(config: Dict[str, Any], level: Any) -> Dict[str, Any]:
return config
if __name__ == "__main__":
try:
rospy.init_node("package_with_dynamic_reconfig", log_level=rospy.WARN)
dynamic_reconfigure_srv = dynamic_reconfigure.server.Server(ExampleDynamicParametersConfig,
dynamic_reconfigure_callback)
rospy.spin()
except rospy.ROSInterruptException:
rospy.loginfo("Shutting down.")
|
keivanzavari/dynamic-reconfigure-editor
|
example/example_package_with_dynamic_reconfig/src/example_package_with_dynamic_reconfig/node.py
|
node.py
|
py
| 710 |
python
|
en
|
code
| 0 |
github-code
|
6
|
35362077083
|
import random
def hello():
print("------------------------------")
print(" X and O ")
print("------------------------------")
print(" начнем игру ")
print(" люди против машин ")
print("------------------------------")
print(" формат ввода: x y ")
print(" x - номер строки ")
print(" y - номер столбца ")
print("--------------------1.0-by-ZAV")
def bye():
print()
print("------------------------------")
print(" Спасибо за игру ")
print(" до новых встреч ")
print("--------------------1.0-by-ZAV")
def xando():
print()
print(" | 0 | 1 | 2 | ")
print(" --------------- ")
for i, line in enumerate(field):
line_str = " " + str(i) + " | " + ' | '.join(line) + " |" # извените что не f строка
print(line_str)
print(" --------------- ")
def refield():
win_line[0] = field[0]
win_line[1] = field[1]
win_line[2] = field[2]
for i in range(3):
win_line[3][i] = field[i][0]
win_line[4][i] = field[i][1]
win_line[5][i] = field[i][2]
win_line[6][i] = field[i][i]
win_line[7][i] = field[i][~i]
def h_con():
while True:
cords = input(" Введите x y: ").split()
if len(cords) != 2: # воспринимает 2 символа - 2 числа по 1 символу (отрицательное число ввести не возможно)
print("Что-то пошло не так! :( ")
print("Пожалуйста введите 2 координаты!")
continue
x, y = cords
if not (x.isdigit()) or not (y.isdigit()):
print("Что-то пошло не так! :( ")
print("Пожалуйста введите числа!")
continue
x, y = int(x), int(y)
if x > 2 or y > 2: # отрицательные значения не возможно ввести isdigit "-" не воспринимает как число
print("Что-то пошло не так! :( ")
print("Пожалуйста введите координаты в диапозоне 0...2")
continue
if field[x][y] != " ":
print("Извените :( ")
print("Поле уже занято!")
continue
field[x][y] = "X"
refield()
break
def reline(line: int):
if 0 <= line <= 2:
field[line] = win_line[line]
if line == 3:
for i in range(3):
field[i][0] = win_line[3][i]
if line == 4:
for i in range(3):
field[i][1] = win_line[4][i]
if line == 5:
for i in range(3):
field[i][2] = win_line[5][i]
if line == 6:
for i in range(3):
field[i][i] = win_line[6][i]
if line == 7:
for i in range(3):
field[i][~i] = win_line[7][i]
refield()
def ii_con():
i_xod = 0
for i in range(8):
if win_line[i] == [" ", "0", "0"]:
win_line[i][0] = "0"
i_xod = 1
reline(i)
break
if win_line[i] == ["0", " ", "0"]:
win_line[i][1] = "0"
i_xod = 2
reline(i)
break
if win_line[i] == ["0", "0", " "]:
win_line[i][2] = "0"
i_xod = 3
reline(i)
break
if win_line[i] == [" ", "X", "X"]:
win_line[i][0] = "0"
i_xod = 4
reline(i)
break
if win_line[i] == ["X", " ", "X"]:
win_line[i][1] = "0"
i_xod = 5
reline(i)
break
if win_line[i] == ["X", "X", " "]:
win_line[i][2] = "0"
i_xod = 6
reline(i)
break
if i_xod == 0:
while True:
x = random.randint(0, 2)
y = random.randint(0, 2)
if field[x][y] == " ":
field[x][y] = "0"
refield()
break
def check_win():
for i in range(8):
if win_line[i] == ["X", "X", "X"]:
print()
print(" !!! ПОБЕДА !!! ")
print(" Игрок победил компьютер !!! ")
return True
if win_line[i] == ["0", "0", "0"]:
print()
print(" !!! ПОБЕДА !!! ")
print(" Компьютер победил игрока !!! ")
return True
return False
# тело программы
hello()
field = [[" "] * 3 for i in range(3)]
win_line = [[" "] * 3 for j in range(8)]
xando()
con = 0
while True:
con += 1
if con % 2 == 1:
print(" Ход Игрока!")
h_con()
else:
print(" Ход Компьютера!")
ii_con()
xando()
if check_win():
break
if con == 9:
print()
print("Поздравляю Ничья!")
break
bye()
|
SanSvin/X-and-0
|
Xand0 1.py
|
Xand0 1.py
|
py
| 5,446 |
python
|
en
|
code
| 0 |
github-code
|
6
|
72059016508
|
from random import randint
play = True
ronde = 1
bomb = randint(1,1)
score = 0
while play == True:
geuss = input("Ronde "+str(ronde)+": Op welk getal denkt U dat de bom ligt ")
ronde = ronde + 1
nextRound = input("Wilt nu naar ronde "+str(ronde)+" (Y/N)? ").lower()
if nextRound == "n":
play = False
print(score)
|
Th0mas05/leren-programmeren
|
Klassikale opdrachten/sweeper.py
|
sweeper.py
|
py
| 352 |
python
|
nl
|
code
| 0 |
github-code
|
6
|
27391300473
|
# flake8: NOQA;
import os
import sys
from collections.abc import Generator
import pytest
from fastapi import FastAPI
from fastapi.testclient import TestClient
current: str = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(os.path.join(current, "src"))
from database import Database
from main import create_app
@pytest.fixture(scope="session")
def database() -> Generator:
database = Database(database_url=os.getenv("TEST_DATABASE_URL"))
yield database
@pytest.fixture
def app(database) -> Generator:
app: FastAPI = create_app()
app.container.db.override(database)
database.create_database()
yield app
database.drop_database()
@pytest.fixture
def client(app) -> Generator:
with TestClient(app) as client:
yield client
@pytest.fixture
def db_session(database):
return database.session
|
ebysofyan/dcentric-health-hometest
|
chatroom-backend/tests/conftest.py
|
conftest.py
|
py
| 868 |
python
|
en
|
code
| 0 |
github-code
|
6
|
21903304999
|
from tkinter import *
import math
class Calculator:
'''Class to define the calculator and its layout'''
def get_and_replace(self):
''' Replaces printable operators with machine readable ones'''
self.expression = self.e.get()
TRANS = self.expression.maketrans({
'÷':'/','x':'*'
})
self.expression.translate(TRANS)
def compute(self):
''' Compute equation on pressing the equals button'''
self.get_and_replace()
try:
#Use built in function eval() to evaluate function
self.value = eval(self.expression)
except:
self.e.delete(0,END)
self.e.insert(0,'Invalid Input!')
else:
self.e.delete(0,END)
self.e.insert(0,self.value)
def square_root(self):
''' Method to find square-root '''
self.get_and_replace()
try:
# USe build in function to evaluate the expression
self.value = eval(self.expression)
except SyntaxError or NameError:
self.e.delete(0,END)
self.e.insert(0,'Invalid Input!')
else:
self.sqrt_val = math.sqrt(self.value)
self.e.delete(0,END)
self.e.insert(0,self.sqrt_val)
def __init__(self,master):
""" Constructor method """
master.title('Calculator')
master.geometry()
self.e = Entry(master) # Input Field
self.e.grid(row=0,column=0,columnspan=6,pady=3)
self.e.focus_set() #Sets focus on the input field
#Generating buttons
Button(master,text='=',width=11,height=3).grid(row=4,column=4,columnspan=2)
Button(master,text='AC',width=5,height=3).grid(row=1,column=4)
Button(master,text='C',width=5,height=3).grid(row=1, column=4)
root = Tk()
obj = Calculator(root)
root.mainloop()
|
RahulKeluskar/Calculator
|
interface.py
|
interface.py
|
py
| 1,876 |
python
|
en
|
code
| 0 |
github-code
|
6
|
38746906775
|
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
#loop through the linked list and store the node in an array. Store the node and not Just the val of #node as the val can be repeated but node cannot be because of next pointer)
def hasCycle(self, head: ListNode) -> bool:
curr = head
visited= []
if head == None or head.next == None:
return False
while curr!= None:
if curr not in visited:
visited.append(curr)
else:
return True
curr = curr.next
|
HiteshKhandelwal901/120DaysOfLeetCode
|
day4_question141.py
|
day4_question141.py
|
py
| 674 |
python
|
en
|
code
| 0 |
github-code
|
6
|
73111312187
|
from langchain.document_loaders import TextLoader
from langchain.text_splitter import CharacterTextSplitter, NLTKTextSplitter
import glob
import os
from transformers import AutoModel, AutoTokenizer
from dotenv import load_dotenv
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores import Chroma
load_dotenv()
llm_model_name_or_path = os.environ.get("LLM_MODEL_NAME_OR_PATH")
embedding_model_name_or_path = os.environ.get("EMBEDDING_MODEL_NAME_OR_PATH")
vectorstore_persist_directory = os.environ.get("VECTORSTORE_PERSIST_DIRECTORY")
# tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, trust_remote_code=True)
embedding = HuggingFaceEmbeddings(model_name=embedding_model_name_or_path)
text_splitter = CharacterTextSplitter()
file_paths = glob.glob("./source_documents/**/*.txt", recursive=True)
documents = []
for file_path in file_paths:
print(f"{file_path}: Loading")
loader = TextLoader(file_path, autodetect_encoding=True)
docs = loader.load()
print(f"{file_path}: Splitting")
# text_splitter = CharacterTextSplitter.from_huggingface_tokenizer(tokenizer=tokenizer)
# text_splitter = NLTKTextSplitter()
docs = text_splitter.split_documents(docs)
documents.extend(docs)
# page_contents = []
# page_metadatas = []
# for document in texts:
# page_contents.append(document.page_content)
# page_metadatas.append(document.metadata)
# vectors = embedding.embed_documents(texts=page_contents)
print(f"(ALL): Embedding and saving")
db = Chroma(persist_directory=vectorstore_persist_directory, embedding_function=embedding)
db.add_documents(documents=documents)
db.persist()
print(f"(ALL): Done")
|
shaunxu/try-langchain
|
injest.py
|
injest.py
|
py
| 1,703 |
python
|
en
|
code
| 0 |
github-code
|
6
|
17657067303
|
from tkinter import *
import pygame
from tkinter import filedialog
import time
from mutagen.mp3 import MP3
import random
from AudioFile import AudioFile, Song, Podcast
from Exceptions import *
from Playlist import Playlist
from Artist import Artist
from User import User
from LastFmConnection import LastFmConnection
from GUI import GUI
from tkinter import simpledialog
from tkinter import StringVar
from tkinter.scrolledtext import ScrolledText
from urllib.request import urlopen
from suds.client import Client
from PIL import Image, ImageTk
import requests
from io import BytesIO
root = Tk()
root.title('Play Mode')
root.iconbitmap('D:\\computer\\cs@aut\\term2\\AP\\music player\\icons\\title-icon.ico')
root.geometry("500x350")
pygame.mixer.init()
songs_list = []
n_shuffle = None
class MusicPlayer():
_audioList = []
_masterPlaylistName = "Main Library"
def __init__(self):
self._playlists = []
self.currentPlaylist = None
self.currentSong = None
self.currentUser = None
self._audioList.append(Song(None, "Darude Sandstorm", rating=2))
self._audioList.append(Song(None, "Baby Dont Hurt Me", rating=1))
self._audioList.append(Song(None, "I Want To Break Free", rating=4))
self.newPlaylist(self._masterPlaylistName, self._audioList)
self.importSongWithREST("Sweet Mountain River", "Monster Truck")
self.importSongWithREST("Aural Psynapse", "deadmau5")
self.importSongWithREST("Piano Man", "Billy Joel")
self.importSongWithREST("Best Of You", "Foo Fighters")
self.importSongWithREST("One More Time", "Daft Punk")
self.gui = GUI()
self.gui.startGUI(self)
def loadUserInformation(self):
self._audioList = self.currentUser.allAudioFiles
self._playlists = self.currentUser.allPlaylists
self.gui.updatePlaylistBox()
def saveUserInformation(self):
if (self.currentUser != None):
self.currentUser.saveUser(self._audioList, self._playlists)
self.gui.displayMessage("User saved as: " + self.currentUser.name)
else:
self.gui.displayMessage("You must first load or create a new user!")
def newPlaylist(self, name:str = None, songs:list = None):
newPlaylist = Playlist(name)
if (songs != None):
for s in songs:
newPlaylist.addAudio(s)
self._playlists.append(newPlaylist)
print("DEBUG: playlist created:" + newPlaylist.name)
def newSong(self, response):
if (response != None):
newSong = Song(None, response[0])
if (response[1] != ''):
newSong.artist = Artist(response[1])
if (response[2] != '' and int(response[2]) > 0 and int(response[2]) <= 5):
newSong.rating = int(response[2])
self.addAudioToMasterList(newSong)
self.gui.focusMasterPlaylist()
else:
self.gui.displayMessage("Incorrect or Missing Song Information!")
def getPlaylist(self, getN:str):
for p in self._playlists:
if (p.name == getN):
return p
raise NotFoundException("Playlist not found.")
def getAudio(self, sName:str, detail = None):
for s in self._audioList:
if (s.name == sName):
if (detail == None):
return s
elif (type(s) is Song and s.artist.name == str(detail)):
return s
elif (type(s) is Podcast and s.episode == int(detail)):
return s
raise NotFoundException("Audio not found.")
def deleteAudio(self, audio:AudioFile):
for p in self._playlists:
for s in p.songList:
if (s == audio):
p.songList.remove(s)
self._audioList.remove(audio)
self.gui.displayMessage("Song Deleted!")
def addAudioToMasterList(self, audio:AudioFile):
self._audioList.append(audio)
self.getPlaylist(self._masterPlaylistName).addAudio(audio)
def savePlaylistXML(self):
root = ET.Element("root")
for song in self.currentPlaylist.songList:
song.addXML(root)
print(ET.tostring(root, encoding='utf8').decode('utf8'))
tree = ET.ElementTree(root)
tree.write((self.currentPlaylist.name + ".xml"))
self.gui.displayMessage("Playlist successfully exported!")
def loadPlaylistXML(self, name):
try:
self.getPlaylist(name)
self.gui.displayMessage("Playlist already created with that name.")
except NotFoundException:
playlistTree = ET.parse(name + ".xml")
root = playlistTree.getroot()
newPlaylist = Playlist(name)
for child in root:
try:
song = self.getAudio(child[0].text, child[2].text)
newPlaylist.addAudio(song)
except NotFoundException:
song = self.newSong([child[0].text, child[2].text, child[1].text])
self.addAudioToMasterList(song)
newPlaylist.addAudio(self.getAudio(child[0].text, child[2].text))
self._playlists.append(newPlaylist)
print("DEBUG: playlist created:" + newPlaylist.name)
self.gui.updatePlaylistBox()
self.gui.displayMessage("Playlist " + name + " successfully imported!")
def importSongWithREST(self, songTitle, songArtist):
try:
c = LastFmConnection()
details = c.getSongDetails(songTitle, songArtist)
except LastFMException as e:
return ( "Error: LastFM error code " + str(e.code) )
except GenericConnectionException:
return ("Error: Unable to establish connection..")
newSong = Song(details[0], details[1], Artist(details[2]))
self.addAudioToMasterList(newSong)
return ("Song successfully imported!")
@property
def playlists(self):
return self._playlists
@playlists.setter
def playlists(self, playlists:str):
self._playlists = playlists
@property
def audioList(self):
return self._playlists
@audioList.setter
def audioList(self, audioList:str):
self._audioList = audioList
@property
def masterPlaylistName(self):
return self._masterPlaylistName
@masterPlaylistName.setter
def masterPlaylistName(self, masterPlaylistName:str):
self._masterPlaylistName = masterPlaylistName
mp = MusicPlayer()
class Song(object):
def __init__(self, title, artist, genre):
self.title = title
self.artist = artist
self.genre = genre
def get_title(self):
return self.title
def get_artist(self):
return self.artist
def get_genre(self):
return self.genre
def choose_directory():
global folder_selected
folder_selected = filedialog.askdirectory()
def add_song():
song = filedialog.askopenfilename(initialdir='audio/', title="Choose A Song", filetypes=(("mp3 Files", "*.mp3"), ("WAV Files","*.WAV"),))
song = song.replace(folder_selected, "")
song = song.replace("/", "")
song = song.replace(".mp3", "")
song_box.insert(END, song)
songs_list.append(song)
def add_many_songs():
songs = filedialog.askopenfilenames(initialdir='audio/', title="Choose A Song", filetypes=(("mp3 Files", "*.mp3"), ))
for song in songs:
song = song.replace(folder_selected, "")
song = song.replace("/", "")
song = song.replace(".mp3", "")
songs_list.append(song)
song_box.insert(END, song)
def play_time():
current_time = pygame.mixer.music.get_pos() / 1000
converted_current_time = time.strftime('%M:%S', time.gmtime(current_time))
song = song_box.get(ACTIVE)
song = folder_selected + song +'.mp3'
song_mut = MP3(song)
song_length = song_mut.info.Length
converted_song_length = time.strftime('%M:%S', time.gmtime(song_length))
status_bar.config(text= f'Time Elapsed: {converted_current_time} of {converted_song_length} ')
status_bar.after(1000, play_time)
play_time()
def play():
song = song_box.get(ACTIVE)
song = folder_selected + '/' + song + '.mp3'
pygame.mixer.music.load(song)
pygame.mixer.music.play(loops=0)
def play_my_song(song):
pygame.mixer.music.load(song)
pygame.mixer.music.play(loops=0)
def stop():
pygame.mixer.music.stop()
song_box.selection_clear(ACTIVE)
status_bar.config(text='')
def next_song():
next_one = song_box.curselection()
next_one = next_one[0]+1
song = song_box.get(next_one)
song = folder_selected + '/' + song + '.mp3'
pygame.mixer.music.load(song)
pygame.mixer.music.play(loops=0)
song_box.selection_clear(0, END)
song_box.activate(next_one)
song_box.selection_set(next_one, last=None)
def previous_song():
next_one = song_box.curselection()
next_one = next_one[0]-1
song = song_box.get(next_one)
song = folder_selected + '/' + song + '.mp3'
pygame.mixer.music.load(song)
pygame.mixer.music.play(loops=0)
song_box.selection_clear(0, END)
song_box.activate(next_one)
song_box.selection_set(next_one, last=None)
global paused
paused = False
def delete_song():
song_box.delete(ANCHOR)
pygame.mixer.music.stop()
def delete_all_songs():
song_box.delete(0, END)
pygame.mixer.music.stop()
def pause(is_paused):
global paused
paused = is_paused
if paused:
pygame.mixer.music.unpause()
paused = False
else:
pygame.mixer.music.pause()
paused = True
def shuffle():
#when you click on shuffle button a new song will play immediately
i = 0
play()
for i in range(len(songs_list)):
songs_number = random.randint(0,len(songs_list)+1)
song = folder_selected + '/' + songs_list[songs_number] + '.mp3'
play_my_song(song)
def repeat():
song = song_box.get(ACTIVE)
song = folder_selected + '/' + song + '.mp3'
pygame.mixer.music.load(song)
pygame.mixer.music.play(-1)
def division_by_artist():
pass
#in joda krdnst ke mikha
def your_playlist():
pass
song_box = Listbox(root, bg="black", fg="red", width=60, selectbackground="red", selectforeground="black")
song_box.pack(pady=20)
back_img = PhotoImage(file='D:/computer/cs@aut/term2/AP/music player/icons/back.png')
stop_img = PhotoImage(file='D:/computer/cs@aut/term2/AP/music player/icons/stop.png')
play_img = PhotoImage(file='D:/computer/cs@aut/term2/AP/music player/icons/play.png')
pause_img = PhotoImage(file='D:/computer/cs@aut/term2/AP/music player/icons/pause.png')
next_img = PhotoImage(file='D:/computer/cs@aut/term2/AP/music player/icons/next.png')
shuffle_img = PhotoImage(file='D:/computer/cs@aut/term2/AP/music player/icons/shuffle.png')
repeat_img = PhotoImage(file='D:/computer/cs@aut/term2/AP/music player/icons/repeat.png')
controls_frame = Frame(root,pady=40)
controls_frame.pack()
back_button = Button(controls_frame, image=back_img, borderwidth=0, command= previous_song)
next_button = Button(controls_frame, image=next_img, borderwidth=0, command=next_song)
play_button = Button(controls_frame, image=play_img, borderwidth=0, command=play)
pause_button = Button(controls_frame, image=pause_img, borderwidth=0, command=lambda: pause(paused))
stop_button = Button(controls_frame, image=stop_img, borderwidth=0, command=stop)
shuffle_button = Button(controls_frame, image=shuffle_img, borderwidth=0, command=shuffle)
repeat_button = Button(controls_frame, image=repeat_img, borderwidth=0, command=repeat)
back_button.grid(row=0, column=0, padx=10, pady=10)
stop_button.grid(row=0, column=1, padx=10, pady=10)
play_button.grid(row=0, column=2, padx=10, pady=10)
pause_button.grid(row=0, column=3, padx=10, pady=10)
next_button.grid(row=0, column=4, padx=10, pady=10)
shuffle_button.grid(row=0, column=5, padx=10, pady=10)
repeat_button.grid(row=0, column=6, padx=10, pady=10)
#menu part
my_menu = Menu(root)
root.config(menu=my_menu)
#choose directory for menu
choose_directory_menu = Menu(my_menu)
my_menu.add_cascade(label="Directory", menu=choose_directory_menu)
choose_directory_menu.add_command(label="Choose Directory", command=choose_directory)
#add song for menu
add_song_menu = Menu(my_menu)
my_menu.add_cascade(label="Add Songs", menu=add_song_menu)
add_song_menu.add_command(label="Add A Song To Queue", command=add_song)
add_song_menu.add_command(label="Add Many Songs To Queue", command=add_many_songs)
#remove song for menu
remove_song_menu = Menu(my_menu)
my_menu.add_cascade(label="remove Songs", menu=remove_song_menu)
remove_song_menu.add_command(label="delete A Song from Queue", command=delete_song)
remove_song_menu.add_command(label="delete All Songs from Queue", command=delete_all_songs)
status_bar = Label(root, text='', bd=1, relief=GROOVE,anchor=E )
status_bar.pack(fill=X, side=BOTTOM, ipady=2)
division_by_artist = Menu(my_menu)
my_menu.add_cascade(label="Artists",menu=add_song_menu)
division_by_artist.add_command(label="Artists", command= Song)
your_playlist = Menu(my_menu)
my_menu.add_cascade(label="fav music",menu=add_song_menu)
your_playlist.add_command(label="select you're fav music", command = MusicPlayer)
root.mainloop()
|
ydamirkol/music-player
|
play mode3.py
|
play mode3.py
|
py
| 13,889 |
python
|
en
|
code
| 0 |
github-code
|
6
|
6713641650
|
"""
Utilities for dictionaries of xy tuple values.
"""
from __future__ import print_function, division
import random
from collections import defaultdict
def center(pos, dimensions):
x = [p[0] for p in pos.values()]
y = [p[1] for p in pos.values()]
minx, maxx = min(x), max(x)
miny, maxy = min(y), max(y)
dx = dimensions[0]/2. - ((maxx + minx)/2)
dy = dimensions[1]/2. - ((maxy + miny)/2)
for ID, p in pos.items():
pos[ID] = (p[0]+dx, p[1]+dy)
def scale_offset(pos, scale=1, dx=0, dy=0):
for ID, (x, y) in pos.items():
x1 = x*scale + dx
y1 = y*scale + dy
pos[ID] = [x1, y1]
def fix_overlapping(pos, r=10):
random.seed(0xDABBAD00)
positions = defaultdict(set)
ran = random.random
for k, p in pos.items():
tp = (int(p[0]), int(p[1]))
positions[tp].add(k)
for p, ks in positions.items():
if len(ks) > 1:
for k in ks:
pos[k] = (pos[k][0]+ran()*r, pos[k][1]+ran()*r)
def get_center(pos):
cx = 0#self.bounds[0]/2.
cy = 0#self.bounds[1]/2.
for body in self.world.bodies:
cx += body.position[0]
cy += body.position[1]
cx /= len(self.world.bodies)
cy /= len(self.world.bodies)
return cx, cy
def rotate(pos, angle):
# pos_matrix = np.array(pos.values())
# rot_matrix = np.matrix(((math.cos(angle),-math.sin(angle)), (math.sin(angle), math.cos(angle))))
center = get_center(pos)
# for ID, (x, y) in pos.items():
# x1 = x-cx
# x2 = y-cy
return {ID: rotate_point(center, p) for ID, p in pos.items()}
# foo = {ID, x-cx, x}
|
joel-simon/evo_floorplans
|
floor_plans/pos_utils.py
|
pos_utils.py
|
py
| 1,635 |
python
|
en
|
code
| 84 |
github-code
|
6
|
34508602000
|
def merge(left, right):
a= [] #[None]*(len(left)+len(right))
i=j=k =0
while i <= len(left)-1 and j <= len(right)-1:
if left[i] <= right[j]:
# a[k] = left[i]
a.append(left[i])
k += 1
i += 1
else :
# a[k] = right[j]
a.append(right[j])
k += 1
j += 1
# print(i, j, left, right)
if i <= len(left):
a.extend(left[i:])
if j <= len(right):
a.extend(right[j:])
return a
def merge_sort(a):
if len(a) <= 1:
return a
l, r = 0,len(a)-1
mid = (l+r)//2
left=merge_sort(a[:mid+1])
right = merge_sort(a[mid+1:])
d=merge(left,right)
return d
if __name__ == "__main__":
a= [7, 2, 1, 3, 8, 4, 9, 1, 2, 6]
print(merge_sort(a))
|
ved93/deliberate-practice-challenges
|
code-everyday-challenge/n136_repeat_program.py
|
n136_repeat_program.py
|
py
| 839 |
python
|
en
|
code
| 0 |
github-code
|
6
|
28194386524
|
from __future__ import print_function, division
import os
import time
import random
import numpy as np
from base import BaseModel
from replay_memory import ReplayMemory
from utils import save_pkl, load_pkl
import tensorflow as tf
import matplotlib.pyplot as plt
class Agent(BaseModel):
def __init__(self, config, environment, sess):
self.sess = sess
self.weight_dir = 'weight'
self.record_dir = 'record'
self.recordfile_name = ''
self.now_time = time.strftime("%m-%d-%H-%M",time.localtime(time.time()))
self.env = environment
model_dir = './Model/a.model'
self.memory = ReplayMemory(model_dir)
self.max_step = 100000
# The number of RB, The number of vehicle
self.RB_number = 20
self.num_vehicle = len(self.env.vehicles)
# The following two variables are used to store the transmission power
# and channel selection of each V2V link
# The one is used for testing, and the other is used for training
self.action_all_with_power = np.zeros([self.num_vehicle, 3, 2],
dtype='int32') # this is actions that taken by V2V links with power
self.action_all_with_power_training = np.zeros([self.num_vehicle, 3, 2],
dtype='int32')
self.reward = []
# Settings related to learning rate
self.learning_rate = 0.01 # 0.01
self.learning_rate_minimum = 0.0001
self.learning_rate_decay = 0.96
self.learning_rate_decay_step = 500000
# each 100 steps update the target_q network
self.target_q_update_step = 100 # 100
#Discount factor
self.discount = 0.5
self.double_q = True
self.build_dqn()
# The number of V2V links.
self.V2V_number = 3 * len(self.env.vehicles) # every vehicle need to communicate with 3 neighbors
self.training = True
# This function is used to store the transmit power and channel selected by each V2V link
# Store in an <"action"> matrix
def merge_action(self, idx, action):
self.action_all_with_power[idx[0], idx[1], 0] = action % self.RB_number
self.action_all_with_power[idx[0], idx[1], 1] = int(np.floor(action / self.RB_number))
def get_state(self, idx):
# ===============================
# Get State from the environment
# ===============================
vehicle_number = len(self.env.vehicles)
V2V_channel = (self.env.V2V_channels_with_fastfading[idx[0], self.env.vehicles[idx[0]].destinations[idx[1]],
:] - 80) / 60
V2I_channel = (self.env.V2I_channels_with_fastfading[idx[0], :] - 80) / 60
Eve_channel_I = (self.env.Eve_channels_with_fastfading_I[idx[0], :] - 80) / 60
Eve_channel_V = (self.env.Eve_channels_with_fastfading_V[idx[0], self.env.vehicles[idx[0]].destinations[idx[1]],
:] - 80) / 60
V2V_interference = (-self.env.V2V_Interference_all[idx[0], idx[1], :] - 60) / 60
# The <"NeiSelection"> representative RB occupation
NeiSelection = np.zeros(self.RB_number)
for i in range(3):
for j in range(3):
if self.training:
NeiSelection[self.action_all_with_power_training[self.env.vehicles[idx[0]].neighbors[i], j, 0]] = 1
else:
NeiSelection[self.action_all_with_power[self.env.vehicles[idx[0]].neighbors[i], j, 0]] = 1
for i in range(3):
if i == idx[1]:
continue
if self.training:
if self.action_all_with_power_training[idx[0], i, 0] >= 0:
NeiSelection[self.action_all_with_power_training[idx[0], i, 0]] = 1
else:
if self.action_all_with_power[idx[0], i, 0] >= 0:
NeiSelection[self.action_all_with_power[idx[0], i, 0]] = 1
# Status include V2I_channel, V2V_interference, V2V_channel, Eve_channel_I, Eve_channel_V, NeiSelection
return np.concatenate((V2I_channel, V2V_interference, V2V_channel, Eve_channel_I, Eve_channel_V, NeiSelection))
def predict(self, s_t, step, test_ep=False):
# ==========================
# Select actions
# ==========================
ep = 1 / (step / 1000000 + 1)
# Random selection or training selection
if random.random() < ep and test_ep is False: # epsion to balance the exporation and exploition
# Each number from 0 ~ 60 represents a choice
action = np.random.randint(60) # 20RBs X 3 power level
else:
action = self.q_action.eval({self.s_t: [s_t]})[0]
return action
# This function used for collcet data for training, and training a mini batch
def observe(self, prestate, state, reward, action):
# -----------
# Collect Data for Training and Experience replay
# ---------
self.memory.add(prestate, state, reward, action) # add the state and the action and the reward to the memory
# print(self.step)
if self.step > 0:
if self.step % 50 == 0:
# print('Training')
self.q_learning_mini_batch() # training a mini batch
# self.save_weight_to_pkl()
if self.step % self.target_q_update_step == self.target_q_update_step - 1:
# print("Update Target Q network:")
self.update_target_q_network() # update the Target-Q network parameter
def save_record(self, record_content):
if not os.path.exists(self.record_dir):
os.makedirs(self.record_dir)
if(self.recordfile_name == ''):
if(self.double_q == True and self.dueling_q == True):
self.recordfile_name = "double_q&dueling_q"
else:
if(self.double_q == True):
self.recordfile_name = "double_q"
else:
if(self.dueling_q == True):
self.recordfile_name = "dueling_q"
else:
self.recordfile_name = "normal_q"
with open(os.path.join(self.record_dir, "V-num-%d_%s-%s.txt" % \
(self.num_vehicle, self.now_time, self.recordfile_name)), 'a') as f:
f.write(record_content)
# The network training and testing funtion
def train(self):
num_game, self.update_count, ep_reward = 0, 0, 0.
total_reward, self.total_loss, self.total_q = 0., 0., 0.
max_avg_ep_reward = 0
ep_reward, actions = [], []
mean_big = 0
number_big = 0
mean_not_big = 0
number_not_big = 0
print(self.num_vehicle)
#!Step1: Start a new simulation environment
self.env.new_random_game(self.num_vehicle) # episode
for self.step in (range(0, 40000)): # need more configuration
#!Step2: Begin training, the tutal steps is 40000
# initialize set some varibles
if self.step == 0:
num_game, self.update_count, ep_reward = 0, 0, 0.
total_reward, self.total_loss, self.total_q = 0., 0., 0.
ep_reward, actions = [], []
# Restart a new simulation environment
if (self.step % 2000 == 1):
self.env.new_random_game(self.num_vehicle)
print(self.step)
state_old = self.get_state([0, 0])
# print("state", state_old)
self.training = True
for k in range(1):
for i in range(len(self.env.vehicles)):
for j in range(3):
#!Step3: Get training data for each pair of V2V links and training
# Include <"state_old, state_new, reward_train, action">
# Besides: The training a batch in <"observe"> function
state_old = self.get_state([i, j])
action = self.predict(state_old, self.step)
# self.merge_action([i,j], action)
self.action_all_with_power_training[i, j, 0] = action % self.RB_number
self.action_all_with_power_training[i, j, 1] = int(np.floor(action / self.RB_number))
reward_train = self.env.act_for_training(self.action_all_with_power_training, [i, j])
state_new = self.get_state([i, j])
self.observe(state_old, state_new, reward_train, action)
if (self.step % 2000 == 0) and (self.step > 0):
#!Step4: Testing
self.training = False
number_of_game = 10
if (self.step % 10000 == 0) and (self.step > 0):
number_of_game = 50
if (self.step == 38000):
number_of_game = 100
V2V_Eifficency_list = np.zeros(number_of_game)
V2I_Eifficency_list = np.zeros(number_of_game)
V2V_security_rate_list = np.zeros(number_of_game)
for game_idx in range(number_of_game):
self.env.new_random_game(self.num_vehicle)
test_sample = 200
Eifficency_V2V = []
Eifficency_V2I = []
Security_rate = []
print('test game idx:', game_idx)
for k in range(test_sample):
action_temp = self.action_all_with_power.copy()
for i in range(len(self.env.vehicles)):
self.action_all_with_power[i, :, 0] = -1
sorted_idx = np.argsort(self.env.individual_time_limit[i, :])
for j in sorted_idx:
state_old = self.get_state([i, j])
action = self.predict(state_old, self.step, True)
self.merge_action([i, j], action)
if i % (len(self.env.vehicles) / 10) == 1: # add 10
action_temp = self.action_all_with_power.copy()
V2V_reward, V2I_reward, V2V_security_rate = self.env.act_asyn(action_temp)
Eifficency_V2V.append(np.sum(V2V_reward))
Eifficency_V2I.append(np.sum(V2I_reward))
Security_rate.append(np.sum(V2V_security_rate))
# print("actions", self.action_all_with_power)
V2V_Eifficency_list[game_idx] = np.mean(np.asarray(Eifficency_V2V))
V2I_Eifficency_list[game_idx] = np.mean(np.asarray(Eifficency_V2I))
V2V_security_rate_list[game_idx] = np.mean(np.asarray(Security_rate))
# print("action is", self.action_all_with_power)
# print('failure probability is, ', percent)
# print('action is that', action_temp[0,:])
#!Step5: Save weight parameters
self.save_weight_to_pkl()
print('The number of vehicle is ', len(self.env.vehicles))
print('Mean of the V2V Eifficency is that ', np.mean(V2V_Eifficency_list))
print('Mean of the V2I Eifficency is that ', np.mean(V2I_Eifficency_list))
print('Mean of V2V Security Rate is that ', np.mean(V2V_security_rate_list))
self.save_record("V2V Efficiency: %f \tV2I Efficiency: %f\tSecurity Rate: %f\tCompound Efficiency: %f\tStep : %d\n" % \
(np.mean(V2V_Eifficency_list),np.mean(V2I_Eifficency_list),\
np.mean(V2V_security_rate_list)/self.num_vehicle,\
0.1 * np.mean(V2I_Eifficency_list) + 0.9 * np.mean(V2V_Eifficency_list), self.step))
# print('Test Reward is ', np.mean(test_result))
def q_learning_mini_batch(self):
# Training the DQN model
s_t, s_t_plus_1, action, reward = self.memory.sample()
t = time.time()
if self.double_q: # double Q learning
pred_action = self.q_action.eval({self.s_t: s_t_plus_1})
q_t_plus_1_with_pred_action = self.target_q_with_idx.eval({self.target_s_t: s_t_plus_1,
self.target_q_idx: [[idx, pred_a] for idx, pred_a
in enumerate(pred_action)]})
target_q_t = self.discount * q_t_plus_1_with_pred_action + reward
else:
q_t_plus_1 = self.target_q.eval({self.target_s_t: s_t_plus_1})
max_q_t_plus_1 = np.max(q_t_plus_1, axis=1)
target_q_t = self.discount * max_q_t_plus_1 + reward
_, q_t, loss, w = self.sess.run([self.optim, self.q, self.loss, self.w],
{self.target_q_t: target_q_t, self.action: action, self.s_t: s_t,
self.learning_rate_step: self.step}) # training the network
print('loss is ', loss)
self.total_loss += loss
self.total_q += q_t.mean()
self.update_count += 1
def build_dqn(self):
# --- Building the DQN -------
self.w = {}
self.t_w = {}
initializer = tf.truncated_normal_initializer(0, 0.02)
activation_fn = tf.nn.relu
n_hidden_1 = 500
n_hidden_2 = 250
n_hidden_3 = 120
n_input = 120
n_output = 60
# The DQN network weights and biases
def encoder(x):
weights = {
'encoder_h1': tf.Variable(tf.truncated_normal([n_input, n_hidden_1], stddev=0.1)),
'encoder_h2': tf.Variable(tf.truncated_normal([n_hidden_1, n_hidden_2], stddev=0.1)),
'encoder_h3': tf.Variable(tf.truncated_normal([n_hidden_2, n_hidden_3], stddev=0.1)),
'encoder_h4': tf.Variable(tf.truncated_normal([n_hidden_3, n_output], stddev=0.1)),
'encoder_b1': tf.Variable(tf.truncated_normal([n_hidden_1], stddev=0.1)),
'encoder_b2': tf.Variable(tf.truncated_normal([n_hidden_2], stddev=0.1)),
'encoder_b3': tf.Variable(tf.truncated_normal([n_hidden_3], stddev=0.1)),
'encoder_b4': tf.Variable(tf.truncated_normal([n_output], stddev=0.1)),
}
layer_1 = tf.nn.relu(tf.add(tf.matmul(x, weights['encoder_h1']), weights['encoder_b1']))
layer_2 = tf.nn.relu(tf.add(tf.matmul(layer_1, weights['encoder_h2']), weights['encoder_b2']))
layer_3 = tf.nn.relu(tf.add(tf.matmul(layer_2, weights['encoder_h3']), weights['encoder_b3']))
layer_4 = tf.nn.relu(tf.add(tf.matmul(layer_3, weights['encoder_h4']), weights['encoder_b4']))
return layer_4, weights
# Used for prediction
with tf.variable_scope('prediction'):
self.s_t = tf.placeholder('float32', [None, n_input])
self.q, self.w = encoder(self.s_t)
self.q_action = tf.argmax(self.q, dimension=1)
# Used for get target-Q
with tf.variable_scope('target'):
self.target_s_t = tf.placeholder('float32', [None, n_input])
self.target_q, self.target_w = encoder(self.target_s_t)
self.target_q_idx = tf.placeholder('int32', [None, None], 'output_idx')
self.target_q_with_idx = tf.gather_nd(self.target_q, self.target_q_idx)
# Used for update the target-Q network parameters
with tf.variable_scope('pred_to_target'):
self.t_w_input = {}
self.t_w_assign_op = {}
for name in self.w.keys():
print('name in self w keys', name)
self.t_w_input[name] = tf.placeholder('float32', self.target_w[name].get_shape().as_list(), name=name)
self.t_w_assign_op[name] = self.target_w[name].assign(self.t_w_input[name])
def clipped_error(x):
try:
return tf.select(tf.abs(x) < 1.0, 0.5 * tf.square(x), tf.abs(x) - 0.5)
except:
return tf.where(tf.abs(x) < 1.0, 0.5 * tf.square(x), tf.abs(x) - 0.5)
# Used for Optimizer
with tf.variable_scope('optimizer'):
self.target_q_t = tf.placeholder('float32', None, name='target_q_t')
self.action = tf.placeholder('int32', None, name='action')
action_one_hot = tf.one_hot(self.action, n_output, 1.0, 0.0, name='action_one_hot')
q_acted = tf.reduce_sum(self.q * action_one_hot, reduction_indices=1, name='q_acted')
self.delta = self.target_q_t - q_acted
self.global_step = tf.Variable(0, trainable=False)
self.loss = tf.reduce_mean(tf.square(self.delta), name='loss')
self.learning_rate_step = tf.placeholder('int64', None, name='learning_rate_step')
self.learning_rate_op = tf.maximum(self.learning_rate_minimum,
tf.train.exponential_decay(self.learning_rate, self.learning_rate_step,
self.learning_rate_decay_step,
self.learning_rate_decay, staircase=True))
self.optim = tf.train.RMSPropOptimizer(self.learning_rate_op, momentum=0.95, epsilon=0.01).minimize(
self.loss)
tf.initialize_all_variables().run()
self.update_target_q_network()
def update_target_q_network(self):
for name in self.w.keys():
self.t_w_assign_op[name].eval({self.t_w_input[name]: self.w[name].eval()})
# These two functions are used to save and load weight parameters
def save_weight_to_pkl(self):
if not os.path.exists(self.weight_dir):
os.makedirs(self.weight_dir)
for name in self.w.keys():
save_pkl(self.w[name].eval(), os.path.join(self.weight_dir, "%s.pkl" % name))
def load_weight_from_pkl(self):
with tf.variable_scope('load_pred_from_pkl'):
self.w_input = {}
self.w_assign_op = {}
for name in self.w.keys():
self.w_input[name] = tf.placeholder('float32')
self.w_assign_op[name] = self.w[name].assign(self.w_input[name])
for name in self.w.keys():
self.w_assign_op[name].eval({self.w_input[name]: load_pkl(os.path.join(self.weight_dir, "%s.pkl" % name))})
self.update_target_q_network()
|
BandaidZ/OptimizationofSEandEEBasedonDRL
|
agent.py
|
agent.py
|
py
| 18,757 |
python
|
en
|
code
| 13 |
github-code
|
6
|
12657115952
|
# level:medium
# 思路:广度遍历,计算二叉树每层最大宽度,取最大值
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
from Queue import Queue
class Solution(object):
def widthOfBinaryTree(self, root):
"""
:type root: TreeNode
:rtype: int
"""
maxdis = 1
q = Queue()
# if root.left == root.right == None: return 1
root.val = 1
q.put(root)
while not q.empty():
left, right = float("inf"), float("-inf")
n = q.qsize()
for i in range(n):
node = q.get()
if node.left != None:
node.left.val = node.val * 2 - 1
if node.left.val > right:
right = node.left.val
if node.left.val < left:
left = node.left.val
q.put(node.left)
if node.right != None:
node.right.val = node.val * 2
if node.right.val < left:
left = node.right.val
if node.right.val > right:
right = node.right.val
q.put(node.right)
maxdis = max(maxdis, right - left + 1)
return maxdis
if __name__ == '__main__':
ans = Solution()
print(ans.widthOfBinaryTree([1,3,2,5,3,None,9]))
|
PouringRain/leetcode
|
662.py
|
662.py
|
py
| 1,534 |
python
|
en
|
code
| 1 |
github-code
|
6
|
20910138172
|
from constants import *
from game.scripting.action import Action
class DrawBricksAction(Action):
def __init__(self, video_service):
self._video_service = video_service
def execute(self, cast, script, callback):
bricks = cast.get_actors(BRICK_GROUP)
for brick in bricks:
body = brick.get_body()
if brick.is_debug():
rectangle = body.get_rectangle()
self._video_service.draw_rectangle(rectangle, PURPLE)
animation = brick.get_animation()
image = animation.next_image()
position = body.get_position()
self._video_service.draw_image(image, position)
|
Abstact/CSE210-projects
|
Group_Work/Pong/batter/game/scripting/draw_bricks_action.py
|
draw_bricks_action.py
|
py
| 720 |
python
|
en
|
code
| 2 |
github-code
|
6
|
18256028081
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
t=int(input())
for i in range(t):
n=int(input())
l=[int(i) for i in input().split()]
f=1
a=0
b=n-1
ans=[]
for i in range(n):
if f:
ans.append(l[a])
a+=1
else:
ans.append(l[b])
b-=1
f=(f+1)%2
print(*ans)
|
clarinet758/codeforces
|
round/round0676-700/round0690/a1.py
|
a1.py
|
py
| 356 |
python
|
en
|
code
| 0 |
github-code
|
6
|
1586622740
|
import tkinter as tk
from tkinter import END, ttk, messagebox
from tkinter.font import BOLD
import util.generic as utl
import mysql.connector
class MasterPanel:
def conectar_bd(self):
# Conectar a la base de datos
conexion = mysql.connector.connect(
host='192.168.100.9',
user='remote',
password='Briza_3121',
database='proyecto_accesos'
)
return conexion
def agregar_usuario(self):
identificador_agregar = self.identificador.get()
nombre_agregar = self.nombre.get()
apellido_p_agregar = self.apellido_p.get()
apellido_m_agregar = self.apellido_m.get()
matricula_agregar = self.matricula.get()
tipo_usuario_agregar = self.tipo_usuario.get()
contrasena_agregar = self.contrasena.get()
# Verificar si los campos de entrada están vacíos //Faltaria mandar un mensaje de error al escribir en el entry un identificador igual a uno ya existente, porque lanza error.
if not identificador_agregar or not nombre_agregar or not apellido_p_agregar or not apellido_m_agregar or not matricula_agregar or not tipo_usuario_agregar or not contrasena_agregar:
messagebox.showerror(
message="Por favor ingrese la información requerida", title="Error")
return
conexion = self.conectar_bd()
cursor = conexion.cursor()
query = "INSERT INTO proyecto_accesos.usuarios(identificador,nombre,apellido_p,apellido_m,matricula,tipo_usuario,contraseña) VALUES (%s, %s, %s, %s, %s, %s, %s)"
cursor.execute(query, (identificador_agregar, nombre_agregar, apellido_p_agregar,apellido_m_agregar, matricula_agregar, tipo_usuario_agregar, contrasena_agregar))
if cursor.rowcount > 0:
mensaje = "Registro agregado con éxito"
else:
mensaje = "No se ha podido agregar el registro"
messagebox.showinfo(title="Agregado de registro", message=mensaje)
# Limpiar los entrys después de agregar los datos a la base de datos
self.identificador.delete(0, END)
self.nombre.delete(0, END)
self.apellido_p.delete(0, END)
self.apellido_m.delete(0, END)
self.matricula.delete(0, END)
self.tipo_usuario.delete(0, END)
self.contrasena.delete(0, END)
conexion.commit()
cursor.close()
conexion.close()
self.eliminar_celdas()
self.celdas = []
conexion = self.conectar_bd()
cursor = conexion.cursor()
cursor.execute(
'SELECT * FROM proyecto_accesos.usuarios ORDER BY nombre')
registros = cursor.fetchall()
for i, fila in enumerate(registros):
for j, valor in enumerate(fila):
celda = tk.Label(self.frame_contenido, text=str(
valor), font=('Times', 14), fg="black", bg='#fcfcfc')
celda.grid(row=i, column=j, padx=55, pady=10)
self.celdas.append(celda)
conexion.commit() # si deja de funcionar eliminar esta linea
cursor.close()
conexion.close()
def eliminar_usuario(self):
identificador_eliminar = self.identificador.get()
if not identificador_eliminar:
messagebox.showerror(
message="Por favor ingrese el identificador que desea eliminar.", title="Error")
return
conexion = self.conectar_bd()
cursor = conexion.cursor()
query = "DELETE FROM proyecto_accesos.usuarios WHERE identificador = %s"
cursor.execute(query, (identificador_eliminar,))
if cursor.rowcount > 0:
mensaje = "Registro eliminado con éxito"
else:
mensaje = "No se ha encontrado ningún registro con el identificador especificado"
messagebox.showinfo(title="Eliminación de registro", message=mensaje)
# Limpiar los entrys después de agregar los datos a la base de datos
self.identificador.delete(0, END)
self.nombre.delete(0, END)
self.apellido_p.delete(0, END)
self.apellido_m.delete(0, END)
self.matricula.delete(0, END)
self.tipo_usuario.delete(0, END)
self.contrasena.delete(0, END)
conexion.commit()
cursor.close()
conexion.close()
self.eliminar_celdas()
self.celdas = []
conexion = self.conectar_bd()
cursor = conexion.cursor()
cursor.execute(
'SELECT * FROM proyecto_accesos.usuarios ORDER BY nombre')
registros = cursor.fetchall()
for i, fila in enumerate(registros):
for j, valor in enumerate(fila):
celda = tk.Label(self.frame_contenido, text=str(
valor), font=('Times', 14), fg="black", bg='#fcfcfc')
celda.grid(row=i, column=j, padx=55, pady=10)
self.celdas.append(celda)
conexion.commit() # si deja de funcionar eliminar esta linea
cursor.close()
conexion.close()
def __init__(self):
self.ventana = tk.Tk()
self.ventana.title('Administración de Usuarios')
w, h = self.ventana.winfo_screenwidth(), self.ventana.winfo_screenheight()
self.ventana.geometry("%dx%d+0+0" % (w, h))
self.ventana.config(bg='#fcfcfc')
self.ventana.resizable(width=True, height=True)
# frame admin
frame_admin = tk.Frame(self.ventana, bd=0, width=500,relief=tk.SOLID, padx=10, pady=10, bg='#fa5c5c')
frame_admin.pack(side="left", expand=tk.NO, fill=tk.Y)
title = tk.Label(frame_admin, text="Administración de Usuarios", font=(
'Times', 20), fg="white", bg='#fa5c5c', pady=20, padx=120)
title.pack(side="top", expand=tk.YES, fill=tk.X, anchor="n")
# etiquetas, entrys en el form admin
# identificador
etiqueta_identificador = tk.Label(frame_admin, text="Identificador:", font=(
'Times', 14), fg="white", bg='#fa5c5c', anchor="w")
etiqueta_identificador.pack(fill=tk.X, padx=20, pady=10)
self.identificador = ttk.Entry(
frame_admin, font=('Times', 14), width=20)
self.identificador.pack(fill=None, padx=20, pady=10, anchor="w")
# nombre
etiqueta_nombre = tk.Label(frame_admin, text="Nombre:", font=(
'Times', 14), fg="white", bg='#fa5c5c', anchor="w")
etiqueta_nombre.pack(fill=tk.X, padx=20, pady=10)
self.nombre = ttk.Entry(frame_admin, font=('Times', 14), width=20)
self.nombre.pack(fill=None, padx=20, pady=10, anchor="w")
# apellido_p
etiqueta_apellido_p = tk.Label(frame_admin, text="Apellido Paterno:", font=(
'Times', 14), fg="white", bg='#fa5c5c', anchor="w")
etiqueta_apellido_p.pack(fill=tk.X, padx=20, pady=10)
self.apellido_p = ttk.Entry(frame_admin, font=('Times', 14), width=20)
self.apellido_p.pack(fill=None, padx=20, pady=10, anchor="w")
# apellido_m
etiqueta_apellido_m = tk.Label(frame_admin, text="Apellido Materno:", font=(
'Times', 14), fg="white", bg='#fa5c5c', anchor="w")
etiqueta_apellido_m.pack(fill=tk.X, padx=20, pady=10)
self.apellido_m = ttk.Entry(frame_admin, font=('Times', 14), width=20)
self.apellido_m.pack(fill=None, padx=20, pady=10, anchor="w")
# matricula o numero de cuenta
etiqueta_matricula = tk.Label(frame_admin, text="Número de cuenta:", font=(
'Times', 14), fg="white", bg='#fa5c5c', anchor="w")
etiqueta_matricula.pack(fill=tk.X, padx=20, pady=10)
self.matricula = ttk.Entry(frame_admin, font=('Times', 14), width=20)
self.matricula.pack(fill=None, padx=20, pady=10, anchor="w")
# tipo_usuario
etiqueta_tipo_usuario = tk.Label(frame_admin, text="Tipo de Usuario:", font=(
'Times', 14), fg="white", bg='#fa5c5c', anchor="w")
etiqueta_tipo_usuario.pack(fill=tk.X, padx=20, pady=10)
self.tipo_usuario = ttk.Entry(
frame_admin, font=('Times', 14), width=20)
self.tipo_usuario.pack(fill=None, padx=20, pady=10, anchor="w")
# contraseña
etiqueta_contrasena = tk.Label(frame_admin, text="Contraseña:", font=(
'Times', 14), fg="white", bg='#fa5c5c', anchor="w")
etiqueta_contrasena.pack(fill=tk.X, padx=20, pady=10)
self.contrasena = ttk.Entry(frame_admin, font=('Times', 14), width=20)
self.contrasena.pack(fill=None, padx=20, pady=10, anchor="w")
# boton Agregar
agregar = tk.Button(frame_admin, text="Agregar", font=(
'Times', 15), bd=0, bg='#fcfcfc', width=15, command=self.agregar_usuario)
agregar.pack(side=tk.LEFT, fill=None, padx=15, pady=100)
agregar.bind("<Return>", (lambda event: self.agregar_usuario()))
# boton eliminar
eliminar = tk.Button(frame_admin, text="Eliminar", font=(
'Times', 15), bd=0, bg='#fcfcfc', width=15, command=self.eliminar_usuario)
eliminar.pack(side=tk.RIGHT, fill=None, padx=15, pady=100)
# frame_tabla
frame_tabla = tk.Frame(
self.ventana, bd=0, relief=tk.SOLID, bg='#fcfcfc')
frame_tabla.pack(side="left", expand=tk.YES, fill=tk.BOTH)
# frame_tabla_top
frame_tabla_top = tk.Frame(
frame_tabla, height=50, bd=0, relief=tk.SOLID, bg="#fcfcfc")
frame_tabla_top.pack(side="top", fill=tk.X)
title = tk.Label(frame_tabla_top, text="Registros", font=(
'Times', 25), fg="black", bg='#fcfcfc', pady=20)
title.grid(row=0, column=3, padx=20, pady=5)
# frame_tabla_buttom
frame_tabla_buttom = tk.Frame(
frame_tabla, height=50, bd=0, relief=tk.SOLID, bg='#fcfcfc')
frame_tabla_buttom.pack(side="bottom", expand=tk.YES, fill=tk.BOTH)
# Crear la tabla para mostrar los registros
self.tabla_db = tk.Frame(frame_tabla_buttom, bg='#fcfcfc')
self.tabla_db.pack(fill=tk.BOTH, expand=tk.YES)
# Crear un canvas para permitir el desplazamiento de los registros de la tabla
self.canvas_tabla = tk.Canvas(self.tabla_db, bg="#FFFFFF")
self.canvas_tabla.pack(side="left", fill="both", expand=True)
# Crear un scrollbar vertical para el canvas
self.scrollbar_tabla = tk.Scrollbar(
self.tabla_db, orient="vertical", command=self.canvas_tabla.yview)
self.scrollbar_tabla.pack(side="right", fill="y")
# Configurar el canvas para vincularlo al scrollbar
self.canvas_tabla.configure(yscrollcommand=self.scrollbar_tabla.set)
self.canvas_tabla.bind("<Configure>", lambda e: self.canvas_tabla.configure(
scrollregion=self.canvas_tabla.bbox("all")))
# Crear un frame dentro del canvas para contener los labels de la tabla
self.frame_contenido = tk.Frame(self.canvas_tabla, bg="#FFFFFF")
self.canvas_tabla.create_window(
(0, 0), window=self.frame_contenido, anchor="nw")
### HEADER TABLA ###
# Crear etiquetas para la cabecera de la tabla
cabecera_identificador = tk.Label(frame_tabla_top, text="Identificador", font=(
'Times', 14, 'bold'), fg="black", bg='#fcfcfc')
cabecera_nombre = tk.Label(frame_tabla_top, text="Nombre", font=(
'Times', 14, 'bold'), fg="black", bg='#fcfcfc')
cabecera_apellido_p = tk.Label(frame_tabla_top, text="Apellido Paterno", font=(
'Times', 14, 'bold'), fg="black", bg='#fcfcfc')
cabecera_apellido_m = tk.Label(frame_tabla_top, text="Apellido Materno", font=(
'Times', 14, 'bold'), fg="black", bg='#fcfcfc')
cabecera_matricula = tk.Label(frame_tabla_top, text="Número de Cuenta", font=(
'Times', 14, 'bold'), fg="black", bg='#fcfcfc')
cabecera_tipo_usuario = tk.Label(frame_tabla_top, text="Tipo de usuario", font=(
'Times', 14, 'bold'), fg="black", bg='#fcfcfc')
cabecera_contrasena = tk.Label(frame_tabla_top, text="Contraseña", font=(
'Times', 14, 'bold'), fg="black", bg='#fcfcfc')
cabecera_identificador.grid(row=1, column=0, padx=55, pady=5,)
cabecera_nombre.grid(row=1, column=1, padx=55, pady=5)
cabecera_apellido_p.grid(row=1, column=2, padx=30, pady=5)
cabecera_apellido_m.grid(row=1, column=3, padx=0, pady=5)
cabecera_matricula.grid(row=1, column=4, padx=5, pady=5)
cabecera_tipo_usuario.grid(row=1, column=5, padx=0, pady=5)
cabecera_contrasena.grid(row=1, column=6, padx=45, pady=5)
# Obtener los registros de la tabla
self.celdas = []
conexion = self.conectar_bd()
cursor = conexion.cursor()
cursor.execute(
'SELECT * FROM proyecto_accesos.usuarios ORDER BY nombre')
registros = cursor.fetchall()
for i, fila in enumerate(registros):
for j, valor in enumerate(fila):
celda = tk.Label(self.frame_contenido, text=str(
valor), font=('Times', 14), fg="black", bg='#fcfcfc')
celda.grid(row=i+1, column=j, padx=55, pady=10)
self.celdas.append(celda)
cursor.close()
conexion.close()
self.ventana.mainloop()
# Crear una función para eliminar las celdas
def eliminar_celdas(self):
for celda in self.celdas:
celda.destroy()
|
Guuuuussss/Proyecto-Acceso-y-Pase-de-Lista
|
Interfaz/forms/form_master.py
|
form_master.py
|
py
| 13,497 |
python
|
es
|
code
| 0 |
github-code
|
6
|
23706350056
|
#!/usr/bin/env python
"""Plot sky positions onto an Aitoff map of the sky.
Usage:
%s <filename>... [--racol=<racol>] [--deccol=<deccol>] [--mjdcol=<mjdcol>] [--filtercol=<filtercol>] [--expnamecol=<expnamecol>] [--commentcol=<commentcol>] [--usepatches] [--alpha=<alpha>] [--outfile=<outfile>] [--tight] [--delimiter=<delimiter>] [--pointsize=<pointsize>]
%s (-h | --help)
%s --version
Options:
-h --help Show this screen.
--version Show version.
--racol=<racol> Column that represents RA. [default: ra]
--deccol=<deccol> Column that represents declination. [default: dec]
--mjdcol=<mjdcol> Column that represents MJD. [default: mjd]
--filtercol=<filtercol> Column that represents filter. [default: filter]
--expnamecol=<expnamecol> Column that represents exposure name.
--commentcol=<commentcol> Column that represents a comment (e.g. a survey comment, for Pan-STARRS).
--usepatches Plot patches (defined shapes), not points, e.g. ATLAS square footprints or Pan-STARRS circles mapped onto the sky.
--outfile=<outfile> Output file.
--alpha=<alpha> Transparency. [default: 0.1]
--tight Tight layout.
--delimiter=<delimiter> Delimiter to use [default: \t]
--pointsize=<pointsize> Point size [default: 0.1]
E.g.:
%s ~/atlas/dophot/small_area_fields_subset.txt --alpha=0.1 --usepatches --outfile=/tmp/test.png
"""
import sys
__doc__ = __doc__ % (sys.argv[0], sys.argv[0], sys.argv[0], sys.argv[0])
from docopt import docopt
from gkutils.commonutils import Struct, readGenericDataFile, cleanOptions, sexToDec, getMJDFromSqlDate, GalactictoJ2000, EcliptictoJ2000, getDateFromMJD, transform
import csv
import numpy as np
import matplotlib.pyplot as pl
from matplotlib import colors
import matplotlib.patches as patches
import math
# ###########################################################################################
# Main program
# ###########################################################################################
# Colors as defined in lightcurve.js
colors = ["#6A5ACD", #SlateBlue
"#008000", #Green
"#DAA520", #GoldenRod
"#A0522D", #Sienna
"#FF69B4", #HotPink
"#DC143C", #Crimson
"#008B8B", #DarkCyan
"#FF8C00", #Darkorange
"#FFD700", #Gold
"#0000FF", #Blue
"#4B0082", #Indigo
"#800080", #Purple
"#A52A2A", #Brown
"#DB7093", #PaleVioletRed
"#708090", #SlateGray
"#800000", #Maroon
"#B22222", #FireBrick
"#9ACD32", #YellowGreen
"#FA8072", #Salmon
"#000000"]; #Black
def doPlot(options, objects, plotNumber = 111, alpha = 0.2, minMJD = 0.0, maxMJD = 60000.0, usePatches = False):
gx = []
gy = []
rx = []
ry = []
ix = []
iy = []
zx = []
zy = []
yx = []
yy = []
wx = []
wy = []
cx = []
cy = []
ox = []
oy = []
for row in objects:
try:
ra = float(row[options.racol])
except ValueError as e:
ra = sexToDec(row[options.racol], ra=True)
try:
dec = float(row[options.deccol])
except ValueError as e:
dec = sexToDec(row[options.deccol], ra=False)
if ra > 180.0:
ra = 360.0 - ra
else:
ra = (-1.0) * ra
try:
mjd = float(row[options.mjdcol])
# Maybe we got JD, not MJD - check.
if mjd > 2400000.5:
mjd = mjd - 2400000.5
except ValueError as e:
mjd = getMJDFromSqlDate(row[options.mjdcol])
#if dec > -9.0 and dec < -8.0:
#if mjd > 57053: # January 31st
#if mjd > 57174: # June 1st
if mjd is not None and mjd > minMJD and mjd < maxMJD:
if row[options.filtercol][0] == 'g':
gx.append(ra)
gy.append(dec)
elif row[options.filtercol][0] == 'r':
rx.append(ra)
ry.append(dec)
elif row[options.filtercol][0] == 'i':
ix.append(ra)
iy.append(dec)
elif row[options.filtercol][0] == 'z':
zx.append(ra)
zy.append(dec)
elif row[options.filtercol][0] == 'y':
yx.append(ra)
yy.append(dec)
elif row[options.filtercol][0] == 'w':
wx.append(ra)
wy.append(dec)
elif row[options.filtercol][0] == 'c':
cx.append(ra)
cy.append(dec)
elif row[options.filtercol][0] == 'o':
ox.append(ra)
oy.append(dec)
#print (row[options.racol], row[options.deccol], row[options.expnamecol], row[options.commentcol], row[options.filtercol])
degtorad = math.pi/180.
gx = np.array(gx) * degtorad
gy = np.array(gy) * degtorad
rx = np.array(rx) * degtorad
ry = np.array(ry) * degtorad
ix = np.array(ix) * degtorad
iy = np.array(iy) * degtorad
zx = np.array(zx) * degtorad
zy = np.array(zy) * degtorad
yx = np.array(yx) * degtorad
yy = np.array(yy) * degtorad
wx = np.array(wx) * degtorad
wy = np.array(wy) * degtorad
cx = np.array(cx) * degtorad
cy = np.array(cy) * degtorad
ox = np.array(ox) * degtorad
oy = np.array(oy) * degtorad
fig2 = pl.figure(2)
ax1 = fig2.add_subplot(plotNumber, projection="hammer")
s = 5.4 * degtorad
r = 1.4 * degtorad
if usePatches:
# Square exposures for ATLAS, circular ones for PS1
for x,y in zip(gx,gy):
ax1.add_patch(patches.Circle((x, y), r, color=colors[0], alpha = float(options.alpha)))
for x,y in zip(rx,ry):
ax1.add_patch(patches.Circle((x, y), r, color=colors[1], alpha = float(options.alpha)))
for x,y in zip(ix,iy):
ax1.add_patch(patches.Circle((x, y), r, color=colors[2], alpha = float(options.alpha)))
for x,y in zip(zx,zy):
ax1.add_patch(patches.Circle((x, y), r, color=colors[3], alpha = float(options.alpha)))
for x,y in zip(yx,yy):
ax1.add_patch(patches.Circle((x, y), r, color=colors[4], alpha = float(options.alpha)))
for x,y in zip(wx,wy):
ax1.add_patch(patches.Circle((x, y), r, color=colors[5], alpha = float(options.alpha)))
for x,y in zip(cx,cy):
ax1.add_patch(patches.Rectangle((x-s/2.0, y-s/2.0), s/math.cos(y), s, color=colors[6], alpha = float(options.alpha)))
for x,y in zip(ox,oy):
ax1.add_patch(patches.Rectangle((x-s/2.0, y-s/2.0), s/math.cos(y), s, color=colors[7], alpha = float(options.alpha)))
else:
ax1.scatter(gx,gy, alpha=float(options.alpha), edgecolors='none', color=colors[0], s = float(options.pointsize))
ax1.scatter(rx,ry, alpha=float(options.alpha), edgecolors='none', color=colors[1], s = float(options.pointsize))
ax1.scatter(ix,iy, alpha=float(options.alpha), edgecolors='none', color=colors[2], s = float(options.pointsize))
ax1.scatter(zx,zy, alpha=float(options.alpha), edgecolors='none', color=colors[3], s = float(options.pointsize))
ax1.scatter(yx,yy, alpha=float(options.alpha), edgecolors='none', color=colors[4], s = float(options.pointsize))
ax1.scatter(wx,wy, alpha=float(options.alpha), edgecolors='none', color=colors[5], s = float(options.pointsize))
ax1.scatter(cx,cy, alpha=float(options.alpha), edgecolors='none', color=colors[6], s = float(options.pointsize))
ax1.scatter(ox,oy, alpha=float(options.alpha), edgecolors='none', color=colors[7], s = float(options.pointsize))
gleg = ax1.scatter(-10,-10, alpha=1.0, edgecolors='none', color=colors[0])
rleg = ax1.scatter(-10,-10, alpha=1.0, edgecolors='none', color=colors[1])
ileg = ax1.scatter(-10,-10, alpha=1.0, edgecolors='none', color=colors[2])
zleg = ax1.scatter(-10,-10, alpha=1.0, edgecolors='none', color=colors[3])
yleg = ax1.scatter(-10,-10, alpha=1.0, edgecolors='none', color=colors[4])
wleg = ax1.scatter(-10,-10, alpha=1.0, edgecolors='none', color=colors[5])
cleg = ax1.scatter(-10,-10, alpha=1.0, edgecolors='none', color=colors[6])
oleg = ax1.scatter(-10,-10, alpha=1.0, edgecolors='none', color=colors[7])
#leg = ax1.legend(loc='upper right', scatterpoints = 1, prop = {'size':6})
#leg = ax1.legend([gleg, rleg, ileg, zleg], ['g', 'r', 'i', 'z'], loc='upper right', scatterpoints = 1, prop = {'size':6})
#leg = ax1.legend([gleg, rleg, ileg, zleg, yleg], ['g', 'r', 'i', 'z', 'y'], loc='upper right', scatterpoints = 1, prop = {'size':6})
#leg = ax1.legend([gleg, rleg, ileg, zleg, yleg, wleg], ['g', 'r', 'i', 'z', 'y', 'w'], loc='upper right', scatterpoints = 1, prop = {'size':4})
#leg = ax1.legend([gleg, rleg, ileg, zleg, yleg, wleg, cleg, oleg], ['g', 'r', 'i', 'z', 'y', 'w', 'c', 'o'], loc='upper right', scatterpoints = 1, prop = {'size':4})
#leg = ax1.legend([cleg, oleg], ['c', 'o'], loc='upper right', scatterpoints = 1, prop = {'size':4})
#leg.get_frame().set_linewidth(0.0)
#leg.get_frame().set_alpha(0.0)
ax1.plot([-math.pi, math.pi], [0,0],'r-')
ax1.plot([0,0],[-math.pi, math.pi], 'r-')
labels = ['10h', '8h', '6h', '4h', '2h', '0h', '22h', '20h', '18h', '16h', '14h']
ax1.axes.xaxis.set_ticklabels(labels)
# Plot the galactic plane
gp = []
for l in range(0, 36000, 1):
equatorialCoords = transform([l/100.0, 0.0], GalactictoJ2000)
gp.append(equatorialCoords)
ras = []
decs = []
for row in gp:
ra, dec = row
if ra > 180.0:
ra = 360.0 - ra
else:
ra = (-1.0) * ra
ras.append(ra)
decs.append(dec)
ras = np.array(ras) * degtorad
decs = np.array(decs) * degtorad
ax1.plot(ras,decs, 'k.', markersize=1.0)
# Plot the ecliptic plane
ep = []
for l in range(0, 36000, 1):
equatorialCoords = transform([l/100.0, 0.0], EcliptictoJ2000)
ep.append(equatorialCoords)
ras = []
decs = []
for row in ep:
ra, dec = row
if ra > 180.0:
ra = 360.0 - ra
else:
ra = (-1.0) * ra
ras.append(ra)
decs.append(dec)
ras = np.array(ras) * degtorad
decs = np.array(decs) * degtorad
ax1.plot(ras,decs, 'b.', markersize=1.0)
#ax1.axes.yaxis.set_ticklabels([])
# plot celestial equator
#ax1.plot(longitude2,latitude2,'g-')
#for i in range(0,6):
# ax1.text(xrad[i], yrad[i], lab[i])
#pl.title("%s" % getDateFromMJD(maxMJD).split(' ')[0], color='b', fontsize=12)
pl.grid(True)
return pl
def plotHammerProjection(options, filename, objects, alpha = 0.2, minMJD = 0.0, maxMJD = 60000.0, usePatches = False):
print (maxMJD -1, maxMJD)
# pl = doPlot(options, objects, plotNumber = 212, alpha = alpha, minMJD = maxMJD - 1, maxMJD = maxMJD)
pl = doPlot(options, objects, plotNumber = 111, alpha = alpha, minMJD = minMJD, maxMJD = maxMJD, usePatches = usePatches)
#pl = doPlot(options, objects, plotNumber = 212, alpha = alpha, minMJD = 57168, maxMJD = 57169)
if options.tight:
pl.tight_layout()
if options.outfile:
pl.savefig(options.outfile, dpi=600)
pl.clf()
else:
pl.show()
#pl.savefig(filename + '_%s' % str(maxMJD) + '.png', dpi=600)
def doStats(options, filename, objects):
"""Do some stats on the list of objects - e.g. How many occurrences of something"""
from collections import Counter
mjds = []
fp = {}
for row in objects:
try:
mjd = float(row['mjd'])
except ValueError as e:
mjd = getMJDFromSqlDate(row['mjd'])
wholeMJD = int(mjd)
mjds.append(wholeMJD)
try:
fp[wholeMJD].append(row[options.expnamecol])
except KeyError as e:
fp[wholeMJD] = [row[options.expnamecol]]
# Count the number of exposures per night
mjdFrequency = Counter(mjds)
for k,v in mjdFrequency.items():
print (k,v)
print ()
# Now count the frequency of fpa_object per night. This will tell us how much
# sky is ACTUALLY covered each night.
for k,v in fp.items():
footprintFrequency = Counter(fp[k])
print (k, len(footprintFrequency))
def main(argv = None):
opts = docopt(__doc__, version='0.1')
opts = cleanOptions(opts)
options = Struct(**opts)
# maxMJD = 57169 = 27th May 2015. GPC1 out of sync after that.
# minMJD = 57053 = 31st January 2015.
# minMJD = 56991 = 30th November 2014 - when we restarted the survey
# plotHammerProjection(options, filename, objectsList, alpha=0.7, minMJD = 57032.0, maxMJD = 57169.0)
# plotHammerProjection(options, filename, objectsList, alpha=0.2, minMJD = 56991.0, maxMJD = 57169.0)
# plotHammerProjection(options, filename, objectsList, alpha=0.7, minMJD = 0.0, maxMJD = 57169.0)
#for mjd in range(55230, 57169):
# plotHammerProjection(options, filename, objectsList, alpha=0.2, minMJD = 55229, maxMJD = mjd)
# For object plots min MJD is 56444 and (current) max is 57410
# for mjd in range(56443, 57411):
# plotHammerProjection(options, filename, objectsList, alpha=0.4, minMJD = 56443, maxMJD = mjd)
# 2016-06-23 KWS Added code to use "patches" to plot accurate ATLAS and PS1 footprints. We don't
# want to use patches if we are plotting object locations.
# jan01 = 57388
# feb01 = 57419
# mar01 = 57448
# apr01 = 57479
# may01 = 57509
# jun01 = 57540
# jul01 = 57570
# aug01 = 57601
# sep01 = 57632
# oct01 = 57662
# plotHammerProjection(options, filename, objectsList, alpha=0.02, usePatches = True, minMJD = jan01, maxMJD = feb01)
# plotHammerProjection(options, filename, objectsList, alpha=0.02, usePatches = True, minMJD = feb01, maxMJD = mar01)
# plotHammerProjection(options, filename, objectsList, alpha=0.02, usePatches = True, minMJD = mar01, maxMJD = apr01)
# plotHammerProjection(options, filename, objectsList, alpha=0.02, usePatches = True, minMJD = apr01, maxMJD = may01)
# plotHammerProjection(options, filename, objectsList, alpha=0.02, usePatches = True, minMJD = may01, maxMJD = jun01)
# plotHammerProjection(options, filename, objectsList, alpha=0.02, usePatches = True, minMJD = jun01, maxMJD = jul01)
# plotHammerProjection(options, filename, objectsList, alpha=0.02, usePatches = True, minMJD = jul01, maxMJD = aug01)
# plotHammerProjection(options, filename, objectsList, alpha=0.02, usePatches = True, minMJD = aug01, maxMJD = sep01)
# plotHammerProjection(options, filename, objectsList, alpha=0.02, usePatches = True, minMJD = sep01, maxMJD = oct01)
#alpha = 0.002
for filename in options.filename:
objectsList = readGenericDataFile(filename, delimiter=options.delimiter)
plotHammerProjection(options, filename, objectsList, alpha=float(options.alpha), usePatches = options.usepatches)
#doStats(options, filename, objectsList)
if __name__ == '__main__':
main()
|
genghisken/gkplot
|
gkplot/scripts/skyplot.py
|
skyplot.py
|
py
| 15,360 |
python
|
en
|
code
| 0 |
github-code
|
6
|
7711698828
|
from PyPDF2 import PdfReader
def get_pdf_text(pdfs):
"""
Get the pdf and extract the text content
Parameters:
pdf_docs (pdf) : all the pdfs
Returns:
string : returns text from the pdfs
"""
text = ""
for pdf in pdfs:
pdf_reader = PdfReader(pdf)
for page in pdf_reader.pages:
text += page.extract_text()
return text
|
arunavabasu-03/PDFAssist
|
src/helpers/getPdf.py
|
getPdf.py
|
py
| 399 |
python
|
en
|
code
| 0 |
github-code
|
6
|
23844088819
|
log_file = open("um-server-01.txt") #the variable log-file opens the parameter of um-server01.txt as readable, because the default is to read the text you do not have to declare the the file modes
def sales_reports(log_file): #define function sales_reports from the argument log_file
for line in log_file: #loops the file define in log_file
line = line.rstrip() # returns the copy of the string by removing the trailing characters specified
day = line[0:3] #day is defined on each line as the first 3 characters
if day == "Mon": #if the day is Monday
print(line) #print the line
sales_reports(log_file)
#log_file.close() #it is best practice to close the file when you are done with it
def qty_report(log_file, qty): #defines the function qty_report that takes in two params, log_file and qty
for line in log_file: #starts a loop through each line in log_file
line = line.rstrip() #removes white space from the right side of the line
data = line.rstrip(' ') #splits the line string into a list
if int(data[2]) > qty: #converts the data at index 2 into and int and checks if it exceeds the quantity requested
print(line) #prints the lines where the quantity is greater than the requested quantity
log_file = open("um-server-01.txt") #reopens file to run report again
qty_report(log_file, 10) #runs the function qty_reports
|
heimdalalr/assessment-data
|
process.py
|
process.py
|
py
| 1,410 |
python
|
en
|
code
| 0 |
github-code
|
6
|
25687922492
|
import astroid
from hypothesis import assume, given, settings, HealthCheck
from .. import custom_hypothesis_support as cs
from typing import Any, Dict, List, Set, Tuple
settings.load_profile("pyta")
@given(cs.subscript_node())
@settings(suppress_health_check=[HealthCheck.too_slow])
def test_index(node):
module, _ = cs._parse_text(node)
for index_node in module.nodes_of_class(astroid.Index):
assert index_node.inf_type.getValue() == index_node.value.inf_type.getValue()
@given(cs.expr_node())
@settings(suppress_health_check=[HealthCheck.too_slow])
def test_expr(expr):
module, _ = cs._parse_text(expr)
for expr_node in module.nodes_of_class(astroid.Expr):
assert expr_node.inf_type.getValue() == expr_node.value.inf_type.getValue()
|
ihasan98/pyta
|
tests/test_type_inference/test_literals.py
|
test_literals.py
|
py
| 772 |
python
|
en
|
code
| null |
github-code
|
6
|
27712887237
|
class Solution:
def canPartition(self, nums: List[int]) -> bool:
sum_array = sum(nums)
n = len(nums)
if sum_array % 2 != 0:
return False
dp = set()
dp.add(0)
target = sum_array//2
for i in range(len(nums)):
nextDp =set()
for t in dp:
nextDp.add(nums[i] + t )
nextDp.add(t)
dp =nextDp
return True if target in dp else False
|
jemis140/DSA_Practice
|
Dynamic Programming/Partiton_equal_sum.py
|
Partiton_equal_sum.py
|
py
| 506 |
python
|
en
|
code
| 0 |
github-code
|
6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.