hexsha
stringlengths 40
40
| size
int64 6
782k
| ext
stringclasses 7
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
237
| max_stars_repo_name
stringlengths 6
72
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
list | max_stars_count
int64 1
53k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
184
| max_issues_repo_name
stringlengths 6
72
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
list | max_issues_count
int64 1
27.1k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
184
| max_forks_repo_name
stringlengths 6
72
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
list | max_forks_count
int64 1
12.2k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 6
782k
| avg_line_length
float64 2.75
664k
| max_line_length
int64 5
782k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4e15706f4a20f000dbde4cef576b8962fdfad9d3
| 113 |
py
|
Python
|
rasa/core/training/converters/__init__.py
|
chaneyjd/rasa
|
104a9591fc10b96eaa7fe402b6d64ca652b7ebe2
|
[
"Apache-2.0"
] | 37 |
2019-06-07T07:39:00.000Z
|
2022-01-27T08:32:57.000Z
|
rasa/core/training/converters/__init__.py
|
chaneyjd/rasa
|
104a9591fc10b96eaa7fe402b6d64ca652b7ebe2
|
[
"Apache-2.0"
] | 209 |
2020-03-18T18:28:12.000Z
|
2022-03-01T13:42:29.000Z
|
rasa/core/training/converters/__init__.py
|
chaneyjd/rasa
|
104a9591fc10b96eaa7fe402b6d64ca652b7ebe2
|
[
"Apache-2.0"
] | 65 |
2019-05-21T12:16:53.000Z
|
2022-02-23T10:54:15.000Z
|
from rasa.core.training.converters.story_markdown_to_yaml_converter import (
StoryMarkdownToYamlConverter,
)
| 28.25 | 76 | 0.849558 |
ee3d5c500c3ceffae36f82424036f01909047b13
| 799 |
py
|
Python
|
flask/microblog-db/app/__init__.py
|
qsunny/python
|
ace8c3178a9a9619de2b60ca242c2079dd2f825e
|
[
"MIT"
] | null | null | null |
flask/microblog-db/app/__init__.py
|
qsunny/python
|
ace8c3178a9a9619de2b60ca242c2079dd2f825e
|
[
"MIT"
] | 2 |
2021-03-25T22:00:07.000Z
|
2022-01-20T15:51:48.000Z
|
flask/microblog-login/app/__init__.py
|
qsunny/python
|
ace8c3178a9a9619de2b60ca242c2079dd2f825e
|
[
"MIT"
] | null | null | null |
from flask import Flask
from config import config
from flask_sqlalchemy import SQLAlchemy
# app.config['SECRET_KEY'] = '666666'
# ... add more variables here as needed
# app.config.from_object('config') # 载入配置文件
# app.config.from_object(config[config_name])
# config[config_name].init_app(app)
db = SQLAlchemy()
def create_app(config_name):
app = Flask(__name__) # , static_url_path='/app/static')
app.config.from_object(config[config_name])
config[config_name].init_app(app)
# view导入不能比db的前,会导致db导入错误
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
# from .admin import admin as admin_blueprint
# app.register_blueprint(admin_blueprint, url_prefix='/admin')
db.init_app(app)
return app
# from app.front import routes
| 26.633333 | 66 | 0.743429 |
ee84a98b10d262f7fd66759d611baf9d0f1cf5d0
| 512 |
py
|
Python
|
python/argparse/power.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/argparse/power.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
python/argparse/power.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('x', type=int, help='the base')
parser.add_argument('y', type=int, help='the exponent')
# action='count'를 통해 -v 옵션의 갯수를 리턴 받을 수 있다.
# ex) -vv -> 2, -v -> 1, 기본값은 0으로 설정(default=0)
parser.add_argument('-v', '--verbosity', action='count', default=0)
args = parser.parse_args()
answer = args.x ** args.y
if args.verbosity >= 2:
print(f'Running "{__file__}"')
if args.verbosity >= 1:
print(f'{args.x}^{args.y} == ', end='')
print(answer)
| 36.571429 | 67 | 0.660156 |
c9d5727ae103d1267d3ee7871bd00a41ee83b324
| 389 |
py
|
Python
|
utils/mixins.py
|
TheKiddos/StaRat
|
33807d73276563f636b430e1bbfcb65b645869f7
|
[
"MIT"
] | 1 |
2021-05-18T16:33:10.000Z
|
2021-05-18T16:33:10.000Z
|
utils/mixins.py
|
TheKiddos/StaRat
|
33807d73276563f636b430e1bbfcb65b645869f7
|
[
"MIT"
] | 3 |
2021-05-18T16:02:32.000Z
|
2021-05-21T15:20:12.000Z
|
utils/mixins.py
|
TheKiddos/StaRat
|
33807d73276563f636b430e1bbfcb65b645869f7
|
[
"MIT"
] | 1 |
2021-09-12T22:56:09.000Z
|
2021-09-12T22:56:09.000Z
|
from rest_framework.permissions import AllowAny
class PublicListRetrieveViewSetMixin:
"""Allow anyone to use list and retrieve actions, return default permissions and auth otherwise"""
allowed_actions = ['list', 'retrieve']
def get_permissions(self):
if self.action in self.allowed_actions:
return [AllowAny(), ]
return super().get_permissions()
| 32.416667 | 102 | 0.714653 |
4eb8602e6c346eacd4ef6e909b75e4dba74d5b64
| 48 |
py
|
Python
|
src/main/python/gps/__init__.py
|
BikeAtor/WoMoAtor
|
700cc8b970dcfdd5af2f471df1a223d2a38cb1bf
|
[
"Apache-2.0"
] | null | null | null |
src/main/python/gps/__init__.py
|
BikeAtor/WoMoAtor
|
700cc8b970dcfdd5af2f471df1a223d2a38cb1bf
|
[
"Apache-2.0"
] | null | null | null |
src/main/python/gps/__init__.py
|
BikeAtor/WoMoAtor
|
700cc8b970dcfdd5af2f471df1a223d2a38cb1bf
|
[
"Apache-2.0"
] | null | null | null |
from .gps import GPS
from .gpsmap import GPSMap
| 16 | 26 | 0.791667 |
eeff0777cb915dbd2f27d26dfc851f53b6fd0775
| 7,039 |
py
|
Python
|
start.py
|
DestinyofYeet/antonstechbot
|
b01372431a3a2b51fb83180cf8caa1a168e294ad
|
[
"MIT"
] | 1 |
2021-04-21T09:01:26.000Z
|
2021-04-21T09:01:26.000Z
|
start.py
|
DestinyofYeet/antonstechbot
|
b01372431a3a2b51fb83180cf8caa1a168e294ad
|
[
"MIT"
] | null | null | null |
start.py
|
DestinyofYeet/antonstechbot
|
b01372431a3a2b51fb83180cf8caa1a168e294ad
|
[
"MIT"
] | null | null | null |
from consolemenu import *
from consolemenu.items import *
import os
import sys
import subprocess
import webbrowser
import time
import requests
import json
from colorama import *
import mysql.connector
VERSION = subprocess.check_output(["git", "describe", "--tags", "--always"]).decode('ascii').strip()
def browser():
webbrowser.open("https://git.io/antonsbot")
print("Kein Webbrowser gefunden, geh doch bitte auf https://git.io/antonsbot ")
time.sleep(5)
def tokenchecker():
# Riot
with open('config/config.json', 'r') as f:
json_stuff = json.load(f)
riotapi = json_stuff["riotapi"]
base_riot_url = "https://euw1.api.riotgames.com/lol/summoner/v4/summoners/by-name/DCGALAXY?api_key="
rioturl = base_riot_url + riotapi
response = requests.get(rioturl)
if response.status_code == 200:
print(Fore.GREEN + "Riot Games API Key ✅")
else:
print(Fore.RED + "Riot Games API Key ❌")
# Osu
with open('config/config.json', 'r') as f:
json_stuff = json.load(f)
osuapi = json_stuff["osuapi"]
base_osu_url = "https://osu.ppy.sh/api/get_user_best?u=Aftersh0ock&k="
osuurl = base_osu_url + osuapi
osuresponse = requests.get(osuurl)
if osuresponse.status_code == 200:
print(Fore.GREEN + "Osu API Key ✅")
else:
print(Fore.RED + "Osu API Key ❌")
# Discord
with open('config/config.json', 'r') as f:
json_stuff = json.load(f)
token = json_stuff["token"]
headers = {
"Authorization": "Bot " + token
}
response = requests.get('https://discordapp.com/api/v8/auth/login', headers=headers)
if response.status_code == 200:
print(Fore.GREEN + "Discord Token ✅")
else:
print(Fore.RED + "Discord Token ❌")
# ipdata
with open('config/config.json', 'r') as f:
json_stuff = json.load(f)
ipdata = json_stuff["ipdata"]
baseipurl = "https://api.ipdata.co/8.8.8.8"
ipurl = baseipurl + "?api-key=" + ipdata
ipresponse = requests.get(ipurl)
if ipresponse.status_code == 200:
print(Fore.GREEN + "ipdata API Key ✅")
else:
print(Fore.RED + "ipdata API Key ❌")
print(Style.RESET_ALL)
time.sleep(7)
def mysqlsetup():
print("MySql Setup")
print("Für Hilfe bitte das Wiki auf Github lesen")
print("WICHTIG!!!!")
yesorno = input(
"Es wird eine NEUE MySQL Datenbank UND Tabelle erzeugt, welche dann anschließend auch nach einem Neustarten vom Bot bentutzt wird!!! (j/n): ")
if yesorno == "j":
config = {"enable": True, "host": input("Host: "), "user": input("Benutzername: "),
"passwort": input("Dein Passwort: "), "datenbank": input("Datenbank: "),
"tablename": input("Name der Tablle: "), "port": input("Port: ")}
with open("config/mysql.json", "w+") as file:
json.dump(config, file, indent=2)
with open('config/mysql.json', 'r') as f:
json_stuff = json.load(f)
host = json_stuff["host"]
user = json_stuff["user"]
passwort = json_stuff["passwort"]
datenbank = json_stuff["datenbank"]
table_name = json_stuff["tablename"]
port = json_stuff["port"]
mydb = mysql.connector.connect(
host=host,
user=user,
password=passwort,
database="mysql",
port=port)
mycursor = mydb.cursor()
mycursor.execute("CREATE DATABASE " + datenbank)
mydb = mysql.connector.connect(
host=host,
user=user,
password=passwort,
database=datenbank,
port=port)
mycursor = mydb.cursor()
mycursor.execute(
"CREATE TABLE " + table_name + " (time timestamp null, content text null, attachement text null, membername varchar(255) null, memberid bigint null, guildid bigint null, guildname varchar(255) null, channelid bigint null, channelname varchar(255) null, id bigint not null primary key)")
else:
pass
def tokens():
print("Wichtig: Dieses Script erstellt eine neue config.json")
config = {'token': input("Dein Bot Token: "), 'prefix': input("Dein Bot Prefix: "),
"riotapi": input("Dein Riot Games Api Token: "), "osuapi": input("Dein Osu Api Token: "),
"ipdata": input("Dein ipdata.co Token: ")}
with open('config/config.json', 'w+') as file:
json.dump(config, file, indent=2)
def mysqldisable():
if os.path.exists("config/mysql.json"):
os.rename("config/mysql.json", "config/disabled_mysql.json")
print("MySQL ist nun DEAKTIVIEREN!")
print("Du musst den Bot 1x neustarten damit die Änderung wirksam wird!")
else:
if os.path.exists("config/disabled_mysql.json"):
print("MySQL ist bereits deaktiviert")
else:
print("Iwas ist falsch gelaufen. Hier gibt es Hilfe:")
print("https://github.com/antonstech/antonstechbot/wiki/Support")
def mysqlenable():
if os.path.exists("config/disabled_mysql.json"):
os.rename("config/disabled_mysql.json", "config/mysql.json")
print("MySQL ist nun AKTIVIERT!")
print("Du musst den Bot 1x neustarten damit die Änderung wirksam wird!")
else:
if os.path.exists("config/mysql.json"):
print("MySQL ist bereits aktiviert")
else:
print("Iwas ist falsch gelaufen. Hier gibt es Hilfe:")
print("https://github.com/antonstech/antonstechbot/wiki/Support")
def run_bot():
if sys.platform == "win32":
os.system("py -3 bot.py")
else:
os.system("python3 bot.py")
def update_bot():
if sys.platform == "win32":
os.system("py -3 update.py")
else:
os.system("python3 update.py")
menu = ConsoleMenu(f"antonstechbot Version {VERSION} by antonstech",
"https://git.io/antonsbot")
starten = FunctionItem("Bot starten", run_bot)
config = FunctionItem("Config bearbeiten", tokens)
updaten = FunctionItem("Bot Updaten", update_bot)
tokencheck = FunctionItem("Token-Checker", tokenchecker)
infos = FunctionItem("Infos über den Bot&Code", browser)
mysqlsetup = FunctionItem("MySQL Setup", mysqlsetup)
mysqldisable = FunctionItem("MySQL deaktivieren", mysqldisable)
mysqlenable = FunctionItem("MySQL aktivieren", mysqlenable)
submenu = ConsoleMenu("MySQL Menü")
mysqlmenu = SubmenuItem("MySQL Menü", submenu, menu)
updatemenu = ConsoleMenu("Menü um Sachen zu updaten")
updateeee = SubmenuItem("Updaten", updatemenu, menu)
pipupdate = CommandItem("pip Module updaten", "pip install --upgrade --force-reinstall -r requirements.txt")
menu.append_item(starten)
menu.append_item(config)
menu.append_item(mysqlmenu)
menu.append_item(updateeee)
menu.append_item(tokencheck)
menu.append_item(infos)
submenu.append_item(mysqlsetup)
submenu.append_item(mysqldisable)
submenu.append_item(mysqlenable)
updatemenu.append_item(updaten)
updatemenu.append_item(pipupdate)
menu.show()
| 36.471503 | 298 | 0.641568 |
e11e4871477974dc938efcc467ec95e8585f0631
| 25,317 |
py
|
Python
|
src/Sephrasto/UI/CharakterInfo.py
|
Ilaris-Tools/Sephrasto
|
8574a5b45da8ebfa5f69a775066fd3136da1c718
|
[
"MIT"
] | 1 |
2022-02-02T16:15:59.000Z
|
2022-02-02T16:15:59.000Z
|
src/Sephrasto/UI/CharakterInfo.py
|
Ilaris-Tools/Sephrasto
|
8574a5b45da8ebfa5f69a775066fd3136da1c718
|
[
"MIT"
] | 1 |
2022-01-14T11:04:19.000Z
|
2022-01-14T11:04:19.000Z
|
src/Sephrasto/UI/CharakterInfo.py
|
lukruh/Sephrasto
|
8574a5b45da8ebfa5f69a775066fd3136da1c718
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'CharakterInfo.ui'
#
# Created by: PyQt5 UI code generator 5.15.6
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(974, 721)
self.gridLayout = QtWidgets.QGridLayout(Form)
self.gridLayout.setContentsMargins(20, 20, 20, 20)
self.gridLayout.setHorizontalSpacing(20)
self.gridLayout.setVerticalSpacing(10)
self.gridLayout.setObjectName("gridLayout")
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.labelEinstellungen = QtWidgets.QLabel(Form)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.labelEinstellungen.setFont(font)
self.labelEinstellungen.setObjectName("labelEinstellungen")
self.verticalLayout_4.addWidget(self.labelEinstellungen)
self.groupBox_3 = QtWidgets.QGroupBox(Form)
self.groupBox_3.setTitle("")
self.groupBox_3.setObjectName("groupBox_3")
self.gridLayout_5 = QtWidgets.QGridLayout(self.groupBox_3)
self.gridLayout_5.setContentsMargins(20, 20, 20, 20)
self.gridLayout_5.setObjectName("gridLayout_5")
self.checkReq = QtWidgets.QCheckBox(self.groupBox_3)
self.checkReq.setChecked(True)
self.checkReq.setObjectName("checkReq")
self.gridLayout_5.addWidget(self.checkReq, 1, 0, 1, 2)
self.comboHausregeln = QtWidgets.QComboBox(self.groupBox_3)
self.comboHausregeln.setObjectName("comboHausregeln")
self.gridLayout_5.addWidget(self.comboHausregeln, 4, 1, 1, 1)
self.label_5 = QtWidgets.QLabel(self.groupBox_3)
self.label_5.setObjectName("label_5")
self.gridLayout_5.addWidget(self.label_5, 4, 0, 1, 1)
self.label_7 = QtWidgets.QLabel(self.groupBox_3)
self.label_7.setObjectName("label_7")
self.gridLayout_5.addWidget(self.label_7, 9, 0, 1, 1)
self.checkUeberPDF = QtWidgets.QCheckBox(self.groupBox_3)
self.checkUeberPDF.setObjectName("checkUeberPDF")
self.gridLayout_5.addWidget(self.checkUeberPDF, 3, 0, 1, 2)
self.label_6 = QtWidgets.QLabel(self.groupBox_3)
self.label_6.setObjectName("label_6")
self.gridLayout_5.addWidget(self.label_6, 6, 0, 1, 1)
self.checkFinanzen = QtWidgets.QCheckBox(self.groupBox_3)
self.checkFinanzen.setChecked(True)
self.checkFinanzen.setObjectName("checkFinanzen")
self.gridLayout_5.addWidget(self.checkFinanzen, 2, 0, 1, 2)
self.comboCharsheet = QtWidgets.QComboBox(self.groupBox_3)
self.comboCharsheet.setObjectName("comboCharsheet")
self.gridLayout_5.addWidget(self.comboCharsheet, 6, 1, 1, 1)
self.labelReload = QtWidgets.QLabel(self.groupBox_3)
self.labelReload.setStyleSheet("background-color: rgb(255, 255, 0); color: black;")
self.labelReload.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.labelReload.setWordWrap(True)
self.labelReload.setObjectName("labelReload")
self.gridLayout_5.addWidget(self.labelReload, 11, 0, 1, 2)
self.comboRegelnGroesse = QtWidgets.QComboBox(self.groupBox_3)
self.comboRegelnGroesse.setObjectName("comboRegelnGroesse")
self.comboRegelnGroesse.addItem("")
self.comboRegelnGroesse.addItem("")
self.comboRegelnGroesse.addItem("")
self.gridLayout_5.addWidget(self.comboRegelnGroesse, 9, 1, 1, 1)
self.checkRegeln = QtWidgets.QCheckBox(self.groupBox_3)
self.checkRegeln.setChecked(True)
self.checkRegeln.setTristate(False)
self.checkRegeln.setObjectName("checkRegeln")
self.gridLayout_5.addWidget(self.checkRegeln, 8, 0, 1, 2)
self.label_10 = QtWidgets.QLabel(self.groupBox_3)
self.label_10.setObjectName("label_10")
self.gridLayout_5.addWidget(self.label_10, 10, 0, 1, 1)
self.listRegelKategorien = QtWidgets.QListView(self.groupBox_3)
self.listRegelKategorien.setMaximumSize(QtCore.QSize(280, 80))
self.listRegelKategorien.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.listRegelKategorien.setObjectName("listRegelKategorien")
self.gridLayout_5.addWidget(self.listRegelKategorien, 10, 1, 1, 1)
self.verticalLayout_4.addWidget(self.groupBox_3)
spacerItem = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
self.verticalLayout_4.addItem(spacerItem)
self.labelEP = QtWidgets.QLabel(Form)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.labelEP.setFont(font)
self.labelEP.setObjectName("labelEP")
self.verticalLayout_4.addWidget(self.labelEP)
self.groupBox_2 = QtWidgets.QGroupBox(Form)
self.groupBox_2.setTitle("")
self.groupBox_2.setObjectName("groupBox_2")
self.gridLayout_4 = QtWidgets.QGridLayout(self.groupBox_2)
self.gridLayout_4.setContentsMargins(20, 20, 20, 20)
self.gridLayout_4.setObjectName("gridLayout_4")
self.gridLayout_2 = QtWidgets.QGridLayout()
self.gridLayout_2.setObjectName("gridLayout_2")
self.spinFertigkeitenSpent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinFertigkeitenSpent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinFertigkeitenSpent.setAlignment(QtCore.Qt.AlignCenter)
self.spinFertigkeitenSpent.setReadOnly(True)
self.spinFertigkeitenSpent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinFertigkeitenSpent.setMaximum(999999)
self.spinFertigkeitenSpent.setObjectName("spinFertigkeitenSpent")
self.gridLayout_2.addWidget(self.spinFertigkeitenSpent, 3, 1, 1, 1)
self.spinUebernatuerlichPercent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinUebernatuerlichPercent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinUebernatuerlichPercent.setAlignment(QtCore.Qt.AlignCenter)
self.spinUebernatuerlichPercent.setReadOnly(True)
self.spinUebernatuerlichPercent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinUebernatuerlichPercent.setMaximum(100)
self.spinUebernatuerlichPercent.setObjectName("spinUebernatuerlichPercent")
self.gridLayout_2.addWidget(self.spinUebernatuerlichPercent, 6, 2, 1, 1)
self.labelUeber3 = QtWidgets.QLabel(self.groupBox_2)
self.labelUeber3.setMinimumSize(QtCore.QSize(230, 0))
font = QtGui.QFont()
font.setItalic(False)
self.labelUeber3.setFont(font)
self.labelUeber3.setObjectName("labelUeber3")
self.gridLayout_2.addWidget(self.labelUeber3, 8, 0, 1, 1)
self.spinProfanPercent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinProfanPercent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinProfanPercent.setAlignment(QtCore.Qt.AlignCenter)
self.spinProfanPercent.setReadOnly(True)
self.spinProfanPercent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinProfanPercent.setMaximum(100)
self.spinProfanPercent.setObjectName("spinProfanPercent")
self.gridLayout_2.addWidget(self.spinProfanPercent, 2, 2, 1, 1)
self.spinVorteileSpent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinVorteileSpent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinVorteileSpent.setAlignment(QtCore.Qt.AlignCenter)
self.spinVorteileSpent.setReadOnly(True)
self.spinVorteileSpent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinVorteileSpent.setMaximum(99999999)
self.spinVorteileSpent.setObjectName("spinVorteileSpent")
self.gridLayout_2.addWidget(self.spinVorteileSpent, 1, 1, 1, 1)
self.spinAttributeSpent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinAttributeSpent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinAttributeSpent.setAlignment(QtCore.Qt.AlignCenter)
self.spinAttributeSpent.setReadOnly(True)
self.spinAttributeSpent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinAttributeSpent.setMaximum(99999999)
self.spinAttributeSpent.setObjectName("spinAttributeSpent")
self.gridLayout_2.addWidget(self.spinAttributeSpent, 0, 1, 1, 1)
self.spinUeberTalenteSpent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinUeberTalenteSpent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinUeberTalenteSpent.setAlignment(QtCore.Qt.AlignCenter)
self.spinUeberTalenteSpent.setReadOnly(True)
self.spinUeberTalenteSpent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinUeberTalenteSpent.setMaximum(999999)
self.spinUeberTalenteSpent.setObjectName("spinUeberTalenteSpent")
self.gridLayout_2.addWidget(self.spinUeberTalenteSpent, 8, 1, 1, 1)
self.spinFreieSpent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinFreieSpent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinFreieSpent.setAlignment(QtCore.Qt.AlignCenter)
self.spinFreieSpent.setReadOnly(True)
self.spinFreieSpent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinFreieSpent.setMaximum(999999)
self.spinFreieSpent.setObjectName("spinFreieSpent")
self.gridLayout_2.addWidget(self.spinFreieSpent, 5, 1, 1, 1)
self.spinUeberFertigkeitenPercent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinUeberFertigkeitenPercent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinUeberFertigkeitenPercent.setAlignment(QtCore.Qt.AlignCenter)
self.spinUeberFertigkeitenPercent.setReadOnly(True)
self.spinUeberFertigkeitenPercent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinUeberFertigkeitenPercent.setMaximum(100)
self.spinUeberFertigkeitenPercent.setObjectName("spinUeberFertigkeitenPercent")
self.gridLayout_2.addWidget(self.spinUeberFertigkeitenPercent, 7, 2, 1, 1)
self.label_2 = QtWidgets.QLabel(self.groupBox_2)
self.label_2.setMinimumSize(QtCore.QSize(230, 0))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.gridLayout_2.addWidget(self.label_2, 1, 0, 1, 1)
self.spinAttributePercent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinAttributePercent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinAttributePercent.setAlignment(QtCore.Qt.AlignCenter)
self.spinAttributePercent.setReadOnly(True)
self.spinAttributePercent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinAttributePercent.setMaximum(100)
self.spinAttributePercent.setObjectName("spinAttributePercent")
self.gridLayout_2.addWidget(self.spinAttributePercent, 0, 2, 1, 1)
self.spinUeberTalentePercent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinUeberTalentePercent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinUeberTalentePercent.setAlignment(QtCore.Qt.AlignCenter)
self.spinUeberTalentePercent.setReadOnly(True)
self.spinUeberTalentePercent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinUeberTalentePercent.setMaximum(100)
self.spinUeberTalentePercent.setObjectName("spinUeberTalentePercent")
self.gridLayout_2.addWidget(self.spinUeberTalentePercent, 8, 2, 1, 1)
self.labelUeber1 = QtWidgets.QLabel(self.groupBox_2)
self.labelUeber1.setMinimumSize(QtCore.QSize(230, 0))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.labelUeber1.setFont(font)
self.labelUeber1.setObjectName("labelUeber1")
self.gridLayout_2.addWidget(self.labelUeber1, 6, 0, 1, 1)
self.label_4 = QtWidgets.QLabel(self.groupBox_2)
self.label_4.setMinimumSize(QtCore.QSize(230, 0))
font = QtGui.QFont()
font.setItalic(False)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.gridLayout_2.addWidget(self.label_4, 5, 0, 1, 1)
self.spinUebernatuerlichSpent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinUebernatuerlichSpent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinUebernatuerlichSpent.setAlignment(QtCore.Qt.AlignCenter)
self.spinUebernatuerlichSpent.setReadOnly(True)
self.spinUebernatuerlichSpent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinUebernatuerlichSpent.setMaximum(999999)
self.spinUebernatuerlichSpent.setObjectName("spinUebernatuerlichSpent")
self.gridLayout_2.addWidget(self.spinUebernatuerlichSpent, 6, 1, 1, 1)
self.spinUeberFertigkeitenSpent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinUeberFertigkeitenSpent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinUeberFertigkeitenSpent.setAlignment(QtCore.Qt.AlignCenter)
self.spinUeberFertigkeitenSpent.setReadOnly(True)
self.spinUeberFertigkeitenSpent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinUeberFertigkeitenSpent.setMaximum(999999)
self.spinUeberFertigkeitenSpent.setObjectName("spinUeberFertigkeitenSpent")
self.gridLayout_2.addWidget(self.spinUeberFertigkeitenSpent, 7, 1, 1, 1)
self.spinFreiePercent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinFreiePercent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinFreiePercent.setAlignment(QtCore.Qt.AlignCenter)
self.spinFreiePercent.setReadOnly(True)
self.spinFreiePercent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinFreiePercent.setMaximum(100)
self.spinFreiePercent.setObjectName("spinFreiePercent")
self.gridLayout_2.addWidget(self.spinFreiePercent, 5, 2, 1, 1)
self.spinFertigkeitenPercent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinFertigkeitenPercent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinFertigkeitenPercent.setAlignment(QtCore.Qt.AlignCenter)
self.spinFertigkeitenPercent.setReadOnly(True)
self.spinFertigkeitenPercent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinFertigkeitenPercent.setMaximum(100)
self.spinFertigkeitenPercent.setObjectName("spinFertigkeitenPercent")
self.gridLayout_2.addWidget(self.spinFertigkeitenPercent, 3, 2, 1, 1)
self.spinTalentePercent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinTalentePercent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinTalentePercent.setAlignment(QtCore.Qt.AlignCenter)
self.spinTalentePercent.setReadOnly(True)
self.spinTalentePercent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinTalentePercent.setMaximum(100)
self.spinTalentePercent.setObjectName("spinTalentePercent")
self.gridLayout_2.addWidget(self.spinTalentePercent, 4, 2, 1, 1)
self.spinProfanSpent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinProfanSpent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinProfanSpent.setAlignment(QtCore.Qt.AlignCenter)
self.spinProfanSpent.setReadOnly(True)
self.spinProfanSpent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinProfanSpent.setMaximum(999999)
self.spinProfanSpent.setObjectName("spinProfanSpent")
self.gridLayout_2.addWidget(self.spinProfanSpent, 2, 1, 1, 1)
self.label = QtWidgets.QLabel(self.groupBox_2)
self.label.setMinimumSize(QtCore.QSize(230, 0))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 0, 0, 1, 1)
self.label_9 = QtWidgets.QLabel(self.groupBox_2)
self.label_9.setMinimumSize(QtCore.QSize(230, 0))
font = QtGui.QFont()
font.setItalic(False)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.gridLayout_2.addWidget(self.label_9, 4, 0, 1, 1)
self.labelUeber2 = QtWidgets.QLabel(self.groupBox_2)
self.labelUeber2.setMinimumSize(QtCore.QSize(230, 0))
font = QtGui.QFont()
font.setItalic(False)
self.labelUeber2.setFont(font)
self.labelUeber2.setObjectName("labelUeber2")
self.gridLayout_2.addWidget(self.labelUeber2, 7, 0, 1, 1)
self.label_8 = QtWidgets.QLabel(self.groupBox_2)
self.label_8.setMinimumSize(QtCore.QSize(230, 0))
font = QtGui.QFont()
font.setItalic(False)
self.label_8.setFont(font)
self.label_8.setObjectName("label_8")
self.gridLayout_2.addWidget(self.label_8, 3, 0, 1, 1)
self.label_3 = QtWidgets.QLabel(self.groupBox_2)
self.label_3.setMinimumSize(QtCore.QSize(230, 0))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.gridLayout_2.addWidget(self.label_3, 2, 0, 1, 1)
self.spinTalenteSpent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinTalenteSpent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinTalenteSpent.setAlignment(QtCore.Qt.AlignCenter)
self.spinTalenteSpent.setReadOnly(True)
self.spinTalenteSpent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinTalenteSpent.setMaximum(999999)
self.spinTalenteSpent.setObjectName("spinTalenteSpent")
self.gridLayout_2.addWidget(self.spinTalenteSpent, 4, 1, 1, 1)
self.spinVorteilePercent = QtWidgets.QSpinBox(self.groupBox_2)
self.spinVorteilePercent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinVorteilePercent.setAlignment(QtCore.Qt.AlignCenter)
self.spinVorteilePercent.setReadOnly(True)
self.spinVorteilePercent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinVorteilePercent.setMaximum(100)
self.spinVorteilePercent.setObjectName("spinVorteilePercent")
self.gridLayout_2.addWidget(self.spinVorteilePercent, 1, 2, 1, 1)
self.gridLayout_4.addLayout(self.gridLayout_2, 0, 0, 1, 1)
self.verticalLayout_4.addWidget(self.groupBox_2)
spacerItem1 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_4.addItem(spacerItem1)
self.gridLayout.addLayout(self.verticalLayout_4, 0, 1, 1, 1)
self.verticalLayout_3 = QtWidgets.QVBoxLayout()
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.labelNotiz = QtWidgets.QLabel(Form)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.labelNotiz.setFont(font)
self.labelNotiz.setObjectName("labelNotiz")
self.verticalLayout_3.addWidget(self.labelNotiz)
self.groupBox = QtWidgets.QGroupBox(Form)
self.groupBox.setTitle("")
self.groupBox.setObjectName("groupBox")
self.gridLayout_3 = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout_3.setContentsMargins(20, 20, 20, 20)
self.gridLayout_3.setObjectName("gridLayout_3")
self.teNotiz = QtWidgets.QPlainTextEdit(self.groupBox)
self.teNotiz.setPlainText("")
self.teNotiz.setObjectName("teNotiz")
self.gridLayout_3.addWidget(self.teNotiz, 0, 0, 1, 1)
self.verticalLayout_3.addWidget(self.groupBox)
self.gridLayout.addLayout(self.verticalLayout_3, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
Form.setTabOrder(self.teNotiz, self.checkReq)
Form.setTabOrder(self.checkReq, self.checkFinanzen)
Form.setTabOrder(self.checkFinanzen, self.checkUeberPDF)
Form.setTabOrder(self.checkUeberPDF, self.comboHausregeln)
Form.setTabOrder(self.comboHausregeln, self.comboCharsheet)
Form.setTabOrder(self.comboCharsheet, self.checkRegeln)
Form.setTabOrder(self.checkRegeln, self.comboRegelnGroesse)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.labelEinstellungen.setText(_translate("Form", "Charakter-Einstellungen"))
self.checkReq.setToolTip(_translate("Form", "Falls abgewählt, werden sämtliche Voraussetzungsprüfungen für Vorteile, übernatürliche Fertigkeiten usw. deaktiviert."))
self.checkReq.setText(_translate("Form", "Voraussetzungen überprüfen"))
self.label_5.setText(_translate("Form", "Hausregeln:"))
self.label_7.setText(_translate("Form", "Regelschriftgröße:"))
self.checkUeberPDF.setToolTip(_translate("Form", "<html><head/><body><p>Sephrasto übernimmt automatisch alle übernatürlichen Fertigkeiten in den Charakterbogen, deren FW mindestens 1 beträgt und für welche du mindestens ein Talent aktiviert hast. Wenn du diese Option aktivierst, zeigt Sephrasto eine PDF-Spalte bei den übernatürlichen Fertigkeiten an. Mit dieser kannst du selbst entscheiden, welche Fertigkeiten in den Charakterbogen übernommen werden sollen.</p></body></html>"))
self.checkUeberPDF.setText(_translate("Form", "PDF-Ausgabe von übernatürlichen Fertigkeiten manuell auswählen"))
self.label_6.setText(_translate("Form", "Charakterbogen:"))
self.checkFinanzen.setToolTip(_translate("Form", "<html><head/><body><p>Die Finanzen spielen nur bei einem neuen Charakter eine Rolle und können nach dem ersten Abenteuer ausgeblendet werden. Auch die aktuellen Schicksalspunkte werden dann nicht mehr ausgegeben, da diese ab dem ersten Abenteuer händisch verwaltet werden.</p></body></html>"))
self.checkFinanzen.setText(_translate("Form", "Finanzen anzeigen und aktuelle Schicksalspunkte ausgeben"))
self.labelReload.setText(_translate("Form", "Der Charakter muss gespeichert und neu geladen werden, damit alle Änderungen übernommen werden können!"))
self.comboRegelnGroesse.setItemText(0, _translate("Form", "Klein"))
self.comboRegelnGroesse.setItemText(1, _translate("Form", "Mittel"))
self.comboRegelnGroesse.setItemText(2, _translate("Form", "Groß"))
self.checkRegeln.setText(_translate("Form", "Dem Charakterbogen relevante Ilaris Regeln anhängen"))
self.label_10.setText(_translate("Form", "Regelkategorien:"))
self.labelEP.setText(_translate("Form", "EP-Verteilung"))
self.spinFertigkeitenSpent.setSuffix(_translate("Form", " EP"))
self.spinUebernatuerlichPercent.setSuffix(_translate("Form", " %"))
self.labelUeber3.setText(_translate("Form", " Talente"))
self.spinProfanPercent.setSuffix(_translate("Form", " %"))
self.spinVorteileSpent.setSuffix(_translate("Form", " EP"))
self.spinAttributeSpent.setSuffix(_translate("Form", " EP"))
self.spinUeberTalenteSpent.setSuffix(_translate("Form", " EP"))
self.spinFreieSpent.setSuffix(_translate("Form", " EP"))
self.spinUeberFertigkeitenPercent.setSuffix(_translate("Form", " %)"))
self.spinUeberFertigkeitenPercent.setPrefix(_translate("Form", "("))
self.label_2.setText(_translate("Form", "Vorteile"))
self.spinAttributePercent.setSuffix(_translate("Form", " %"))
self.spinUeberTalentePercent.setSuffix(_translate("Form", " %)"))
self.spinUeberTalentePercent.setPrefix(_translate("Form", "("))
self.labelUeber1.setText(_translate("Form", "Übernatürliche Fertigkeiten und Talente"))
self.label_4.setText(_translate("Form", " Freie Fertigkeiten"))
self.spinUebernatuerlichSpent.setSuffix(_translate("Form", " EP"))
self.spinUeberFertigkeitenSpent.setSuffix(_translate("Form", " EP"))
self.spinFreiePercent.setSuffix(_translate("Form", " %)"))
self.spinFreiePercent.setPrefix(_translate("Form", "("))
self.spinFertigkeitenPercent.setSuffix(_translate("Form", " %)"))
self.spinFertigkeitenPercent.setPrefix(_translate("Form", "("))
self.spinTalentePercent.setSuffix(_translate("Form", " %)"))
self.spinTalentePercent.setPrefix(_translate("Form", "("))
self.spinProfanSpent.setSuffix(_translate("Form", " EP"))
self.label.setText(_translate("Form", "Attribute"))
self.label_9.setText(_translate("Form", " Talente"))
self.labelUeber2.setText(_translate("Form", " Fertigkeiten"))
self.label_8.setText(_translate("Form", " Fertigkeiten"))
self.label_3.setText(_translate("Form", "Profane Fertigkeiten und Talente"))
self.spinTalenteSpent.setSuffix(_translate("Form", " EP"))
self.spinVorteilePercent.setSuffix(_translate("Form", " %"))
self.labelNotiz.setText(_translate("Form", "Notiz"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
| 60.566986 | 490 | 0.727258 |
0163df4b94688e25551d1e8cbc3582f32d6b4f39
| 21,716 |
py
|
Python
|
Packs/Ansible_Powered_Integrations/Integrations/Linux/Linux.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799 |
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Packs/Ansible_Powered_Integrations/Integrations/Linux/Linux.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317 |
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Packs/Ansible_Powered_Integrations/Integrations/Linux/Linux.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297 |
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
import json
import traceback
from typing import Dict, cast
import ansible_runner
import demistomock as demisto # noqa: F401
import ssh_agent_setup
from CommonServerPython import * # noqa: F401
# Dict to Markdown Converter adapted from https://github.com/PolBaladas/torsimany/
def dict2md(json_block, depth=0):
markdown = ""
if isinstance(json_block, dict):
markdown = parseDict(json_block, depth)
if isinstance(json_block, list):
markdown = parseList(json_block, depth)
return markdown
def parseDict(d, depth):
markdown = ""
for k in d:
if isinstance(d[k], (dict, list)):
markdown += addHeader(k, depth)
markdown += dict2md(d[k], depth + 1)
else:
markdown += buildValueChain(k, d[k], depth)
return markdown
def parseList(rawlist, depth):
markdown = ""
for value in rawlist:
if not isinstance(value, (dict, list)):
index = rawlist.index(value)
markdown += buildValueChain(index, value, depth)
else:
markdown += parseDict(value, depth)
return markdown
def buildHeaderChain(depth):
list_tag = '* '
htag = '#'
chain = list_tag * (bool(depth)) + htag * (depth + 1) + \
' value ' + (htag * (depth + 1) + '\n')
return chain
def buildValueChain(key, value, depth):
tab = " "
list_tag = '* '
chain = tab * (bool(depth - 1)) + list_tag + \
str(key) + ": " + str(value) + "\n"
return chain
def addHeader(value, depth):
chain = buildHeaderChain(depth)
chain = chain.replace('value', value.title())
return chain
# Remove ansible branding from results
def rec_ansible_key_strip(obj):
if isinstance(obj, dict):
return {key.replace('ansible_', ''): rec_ansible_key_strip(val) for key, val in obj.items()}
return obj
# COMMAND FUNCTIONS
def generic_ansible(integration_name, command, args: Dict[str, Any]) -> CommandResults:
readable_output = ""
sshkey = ""
fork_count = 1 # default to executing against 1 host at a time
if args.get('concurrency'):
fork_count = cast(int, args.get('concurrency'))
inventory: Dict[str, dict] = {}
inventory['all'] = {}
inventory['all']['hosts'] = {}
if type(args['host']) is list:
# host arg can be a array of multiple hosts
hosts = args['host']
else:
# host arg could also be csv
hosts = [host.strip() for host in args['host'].split(',')]
for host in hosts:
new_host = {}
new_host['ansible_host'] = host
if ":" in host:
address = host.split(':')
new_host['ansible_port'] = address[1]
new_host['ansible_host'] = address[0]
else:
new_host['ansible_host'] = host
if demisto.params().get('port'):
new_host['ansible_port'] = demisto.params().get('port')
# Linux
# Different credential options
# SSH Key saved in credential manager selection
if demisto.params().get('creds', {}).get('credentials').get('sshkey'):
username = demisto.params().get('creds', {}).get('credentials').get('user')
sshkey = demisto.params().get('creds', {}).get('credentials').get('sshkey')
new_host['ansible_user'] = username
# Password saved in credential manager selection
elif demisto.params().get('creds', {}).get('credentials').get('password'):
username = demisto.params().get('creds', {}).get('credentials').get('user')
password = demisto.params().get('creds', {}).get('credentials').get('password')
new_host['ansible_user'] = username
new_host['ansible_password'] = password
# username/password individually entered
else:
username = demisto.params().get('creds', {}).get('identifier')
password = demisto.params().get('creds', {}).get('password')
new_host['ansible_user'] = username
new_host['ansible_password'] = password
inventory['all']['hosts'][host] = new_host
module_args = ""
# build module args list
for arg_key, arg_value in args.items():
# skip hardcoded host arg, as it doesn't related to module
if arg_key == 'host':
continue
module_args += "%s=\"%s\" " % (arg_key, arg_value)
r = ansible_runner.run(inventory=inventory, host_pattern='all', module=command, quiet=True,
omit_event_data=True, ssh_key=sshkey, module_args=module_args, forks=fork_count)
results = []
for each_host_event in r.events:
# Troubleshooting
# demisto.log("%s: %s\n" % (each_host_event['event'], each_host_event))
if each_host_event['event'] in ["runner_on_ok", "runner_on_unreachable", "runner_on_failed"]:
# parse results
result = json.loads('{' + each_host_event['stdout'].split('{', 1)[1])
host = each_host_event['stdout'].split('|', 1)[0].strip()
status = each_host_event['stdout'].replace('=>', '|').split('|', 3)[1]
# if successful build outputs
if each_host_event['event'] == "runner_on_ok":
if 'fact' in command:
result = result['ansible_facts']
else:
if result.get(command) is not None:
result = result[command]
else:
result.pop("ansible_facts", None)
result = rec_ansible_key_strip(result)
if host != "localhost":
readable_output += "# %s - %s\n" % (host, status)
else:
# This is integration is not host based
readable_output += "# %s\n" % status
readable_output += dict2md(result)
# add host and status to result
result['host'] = host
result['status'] = status
results.append(result)
if each_host_event['event'] == "runner_on_unreachable":
msg = "Host %s unreachable\nError Details: %s" % (host, result)
return_error(msg)
if each_host_event['event'] == "runner_on_failed":
msg = "Host %s failed running command\nError Details: %s" % (host, result)
return_error(msg)
return CommandResults(
readable_output=readable_output,
outputs_prefix=integration_name + '.' + command,
outputs_key_field='',
outputs=results
)
# MAIN FUNCTION
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
# SSH Key integration requires ssh_agent to be running in the background
ssh_agent_setup.setup()
try:
if demisto.command() == 'test-module':
# This is the call made when pressing the integration Test button.
return_results('ok')
elif demisto.command() == 'linux-alternatives':
return_results(generic_ansible('linux', 'alternatives', demisto.args()))
elif demisto.command() == 'linux-at':
return_results(generic_ansible('linux', 'at', demisto.args()))
elif demisto.command() == 'linux-authorized-key':
return_results(generic_ansible('linux', 'authorized_key', demisto.args()))
elif demisto.command() == 'linux-capabilities':
return_results(generic_ansible('linux', 'capabilities', demisto.args()))
elif demisto.command() == 'linux-cron':
return_results(generic_ansible('linux', 'cron', demisto.args()))
elif demisto.command() == 'linux-cronvar':
return_results(generic_ansible('linux', 'cronvar', demisto.args()))
elif demisto.command() == 'linux-dconf':
return_results(generic_ansible('linux', 'dconf', demisto.args()))
elif demisto.command() == 'linux-debconf':
return_results(generic_ansible('linux', 'debconf', demisto.args()))
elif demisto.command() == 'linux-filesystem':
return_results(generic_ansible('linux', 'filesystem', demisto.args()))
elif demisto.command() == 'linux-firewalld':
return_results(generic_ansible('linux', 'firewalld', demisto.args()))
elif demisto.command() == 'linux-gather-facts':
return_results(generic_ansible('linux', 'gather_facts', demisto.args()))
elif demisto.command() == 'linux-gconftool2':
return_results(generic_ansible('linux', 'gconftool2', demisto.args()))
elif demisto.command() == 'linux-getent':
return_results(generic_ansible('linux', 'getent', demisto.args()))
elif demisto.command() == 'linux-group':
return_results(generic_ansible('linux', 'group', demisto.args()))
elif demisto.command() == 'linux-hostname':
return_results(generic_ansible('linux', 'hostname', demisto.args()))
elif demisto.command() == 'linux-interfaces-file':
return_results(generic_ansible('linux', 'interfaces_file', demisto.args()))
elif demisto.command() == 'linux-iptables':
return_results(generic_ansible('linux', 'iptables', demisto.args()))
elif demisto.command() == 'linux-java-cert':
return_results(generic_ansible('linux', 'java_cert', demisto.args()))
elif demisto.command() == 'linux-java-keystore':
return_results(generic_ansible('linux', 'java_keystore', demisto.args()))
elif demisto.command() == 'linux-kernel-blacklist':
return_results(generic_ansible('linux', 'kernel_blacklist', demisto.args()))
elif demisto.command() == 'linux-known-hosts':
return_results(generic_ansible('linux', 'known_hosts', demisto.args()))
elif demisto.command() == 'linux-listen-ports-facts':
return_results(generic_ansible('linux', 'listen_ports_facts', demisto.args()))
elif demisto.command() == 'linux-locale-gen':
return_results(generic_ansible('linux', 'locale_gen', demisto.args()))
elif demisto.command() == 'linux-modprobe':
return_results(generic_ansible('linux', 'modprobe', demisto.args()))
elif demisto.command() == 'linux-mount':
return_results(generic_ansible('linux', 'mount', demisto.args()))
elif demisto.command() == 'linux-open-iscsi':
return_results(generic_ansible('linux', 'open_iscsi', demisto.args()))
elif demisto.command() == 'linux-pam-limits':
return_results(generic_ansible('linux', 'pam_limits', demisto.args()))
elif demisto.command() == 'linux-pamd':
return_results(generic_ansible('linux', 'pamd', demisto.args()))
elif demisto.command() == 'linux-parted':
return_results(generic_ansible('linux', 'parted', demisto.args()))
elif demisto.command() == 'linux-pids':
return_results(generic_ansible('linux', 'pids', demisto.args()))
elif demisto.command() == 'linux-ping':
return_results(generic_ansible('linux', 'ping', demisto.args()))
elif demisto.command() == 'linux-python-requirements-info':
return_results(generic_ansible('linux', 'python_requirements_info', demisto.args()))
elif demisto.command() == 'linux-reboot':
return_results(generic_ansible('linux', 'reboot', demisto.args()))
elif demisto.command() == 'linux-seboolean':
return_results(generic_ansible('linux', 'seboolean', demisto.args()))
elif demisto.command() == 'linux-sefcontext':
return_results(generic_ansible('linux', 'sefcontext', demisto.args()))
elif demisto.command() == 'linux-selinux':
return_results(generic_ansible('linux', 'selinux', demisto.args()))
elif demisto.command() == 'linux-selinux-permissive':
return_results(generic_ansible('linux', 'selinux_permissive', demisto.args()))
elif demisto.command() == 'linux-selogin':
return_results(generic_ansible('linux', 'selogin', demisto.args()))
elif demisto.command() == 'linux-seport':
return_results(generic_ansible('linux', 'seport', demisto.args()))
elif demisto.command() == 'linux-service':
return_results(generic_ansible('linux', 'service', demisto.args()))
elif demisto.command() == 'linux-service-facts':
return_results(generic_ansible('linux', 'service_facts', demisto.args()))
elif demisto.command() == 'linux-setup':
return_results(generic_ansible('linux', 'setup', demisto.args()))
elif demisto.command() == 'linux-sysctl':
return_results(generic_ansible('linux', 'sysctl', demisto.args()))
elif demisto.command() == 'linux-systemd':
return_results(generic_ansible('linux', 'systemd', demisto.args()))
elif demisto.command() == 'linux-sysvinit':
return_results(generic_ansible('linux', 'sysvinit', demisto.args()))
elif demisto.command() == 'linux-timezone':
return_results(generic_ansible('linux', 'timezone', demisto.args()))
elif demisto.command() == 'linux-ufw':
return_results(generic_ansible('linux', 'ufw', demisto.args()))
elif demisto.command() == 'linux-user':
return_results(generic_ansible('linux', 'user', demisto.args()))
elif demisto.command() == 'linux-xfs-quota':
return_results(generic_ansible('linux', 'xfs_quota', demisto.args()))
elif demisto.command() == 'linux-htpasswd':
return_results(generic_ansible('linux', 'htpasswd', demisto.args()))
elif demisto.command() == 'linux-supervisorctl':
return_results(generic_ansible('linux', 'supervisorctl', demisto.args()))
elif demisto.command() == 'linux-openssh-cert':
return_results(generic_ansible('linux', 'openssh_cert', demisto.args()))
elif demisto.command() == 'linux-openssh-keypair':
return_results(generic_ansible('linux', 'openssh_keypair', demisto.args()))
elif demisto.command() == 'linux-acl':
return_results(generic_ansible('linux', 'acl', demisto.args()))
elif demisto.command() == 'linux-archive':
return_results(generic_ansible('linux', 'archive', demisto.args()))
elif demisto.command() == 'linux-assemble':
return_results(generic_ansible('linux', 'assemble', demisto.args()))
elif demisto.command() == 'linux-blockinfile':
return_results(generic_ansible('linux', 'blockinfile', demisto.args()))
elif demisto.command() == 'linux-file':
return_results(generic_ansible('linux', 'file', demisto.args()))
elif demisto.command() == 'linux-find':
return_results(generic_ansible('linux', 'find', demisto.args()))
elif demisto.command() == 'linux-ini-file':
return_results(generic_ansible('linux', 'ini_file', demisto.args()))
elif demisto.command() == 'linux-iso-extract':
return_results(generic_ansible('linux', 'iso_extract', demisto.args()))
elif demisto.command() == 'linux-lineinfile':
return_results(generic_ansible('linux', 'lineinfile', demisto.args()))
elif demisto.command() == 'linux-replace':
return_results(generic_ansible('linux', 'replace', demisto.args()))
elif demisto.command() == 'linux-stat':
return_results(generic_ansible('linux', 'stat', demisto.args()))
elif demisto.command() == 'linux-synchronize':
return_results(generic_ansible('linux', 'synchronize', demisto.args()))
elif demisto.command() == 'linux-tempfile':
return_results(generic_ansible('linux', 'tempfile', demisto.args()))
elif demisto.command() == 'linux-unarchive':
return_results(generic_ansible('linux', 'unarchive', demisto.args()))
elif demisto.command() == 'linux-xml':
return_results(generic_ansible('linux', 'xml', demisto.args()))
elif demisto.command() == 'linux-expect':
return_results(generic_ansible('linux', 'expect', demisto.args()))
elif demisto.command() == 'linux-bower':
return_results(generic_ansible('linux', 'bower', demisto.args()))
elif demisto.command() == 'linux-bundler':
return_results(generic_ansible('linux', 'bundler', demisto.args()))
elif demisto.command() == 'linux-composer':
return_results(generic_ansible('linux', 'composer', demisto.args()))
elif demisto.command() == 'linux-cpanm':
return_results(generic_ansible('linux', 'cpanm', demisto.args()))
elif demisto.command() == 'linux-gem':
return_results(generic_ansible('linux', 'gem', demisto.args()))
elif demisto.command() == 'linux-maven-artifact':
return_results(generic_ansible('linux', 'maven_artifact', demisto.args()))
elif demisto.command() == 'linux-npm':
return_results(generic_ansible('linux', 'npm', demisto.args()))
elif demisto.command() == 'linux-pear':
return_results(generic_ansible('linux', 'pear', demisto.args()))
elif demisto.command() == 'linux-pip':
return_results(generic_ansible('linux', 'pip', demisto.args()))
elif demisto.command() == 'linux-pip-package-info':
return_results(generic_ansible('linux', 'pip_package_info', demisto.args()))
elif demisto.command() == 'linux-yarn':
return_results(generic_ansible('linux', 'yarn', demisto.args()))
elif demisto.command() == 'linux-apk':
return_results(generic_ansible('linux', 'apk', demisto.args()))
elif demisto.command() == 'linux-apt':
return_results(generic_ansible('linux', 'apt', demisto.args()))
elif demisto.command() == 'linux-apt-key':
return_results(generic_ansible('linux', 'apt_key', demisto.args()))
elif demisto.command() == 'linux-apt-repo':
return_results(generic_ansible('linux', 'apt_repo', demisto.args()))
elif demisto.command() == 'linux-apt-repository':
return_results(generic_ansible('linux', 'apt_repository', demisto.args()))
elif demisto.command() == 'linux-apt-rpm':
return_results(generic_ansible('linux', 'apt_rpm', demisto.args()))
elif demisto.command() == 'linux-dpkg-selections':
return_results(generic_ansible('linux', 'dpkg_selections', demisto.args()))
elif demisto.command() == 'linux-flatpak':
return_results(generic_ansible('linux', 'flatpak', demisto.args()))
elif demisto.command() == 'linux-flatpak-remote':
return_results(generic_ansible('linux', 'flatpak_remote', demisto.args()))
elif demisto.command() == 'linux-homebrew':
return_results(generic_ansible('linux', 'homebrew', demisto.args()))
elif demisto.command() == 'linux-homebrew-cask':
return_results(generic_ansible('linux', 'homebrew_cask', demisto.args()))
elif demisto.command() == 'linux-homebrew-tap':
return_results(generic_ansible('linux', 'homebrew_tap', demisto.args()))
elif demisto.command() == 'linux-layman':
return_results(generic_ansible('linux', 'layman', demisto.args()))
elif demisto.command() == 'linux-package':
return_results(generic_ansible('linux', 'package', demisto.args()))
elif demisto.command() == 'linux-package-facts':
return_results(generic_ansible('linux', 'package_facts', demisto.args()))
elif demisto.command() == 'linux-yum':
return_results(generic_ansible('linux', 'yum', demisto.args()))
elif demisto.command() == 'linux-yum-repository':
return_results(generic_ansible('linux', 'yum_repository', demisto.args()))
elif demisto.command() == 'linux-zypper':
return_results(generic_ansible('linux', 'zypper', demisto.args()))
elif demisto.command() == 'linux-zypper-repository':
return_results(generic_ansible('linux', 'zypper_repository', demisto.args()))
elif demisto.command() == 'linux-snap':
return_results(generic_ansible('linux', 'snap', demisto.args()))
elif demisto.command() == 'linux-redhat-subscription':
return_results(generic_ansible('linux', 'redhat_subscription', demisto.args()))
elif demisto.command() == 'linux-rhn-channel':
return_results(generic_ansible('linux', 'rhn_channel', demisto.args()))
elif demisto.command() == 'linux-rhn-register':
return_results(generic_ansible('linux', 'rhn_register', demisto.args()))
elif demisto.command() == 'linux-rhsm-release':
return_results(generic_ansible('linux', 'rhsm_release', demisto.args()))
elif demisto.command() == 'linux-rhsm-repository':
return_results(generic_ansible('linux', 'rhsm_repository', demisto.args()))
elif demisto.command() == 'linux-rpm-key':
return_results(generic_ansible('linux', 'rpm_key', demisto.args()))
elif demisto.command() == 'linux-get-url':
return_results(generic_ansible('linux', 'get_url', demisto.args()))
# Log exceptions and return errors
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
# ENTRY POINT
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| 49.020316 | 107 | 0.615399 |
6d95bfbe3fdec9c2f5c5a4abe98999bc24baaa26
| 2,571 |
py
|
Python
|
isj_proj04.py
|
SnasiCze/ISJ
|
2284cb0d53aad5dd0bfc6230224700628be9e454
|
[
"MIT"
] | null | null | null |
isj_proj04.py
|
SnasiCze/ISJ
|
2284cb0d53aad5dd0bfc6230224700628be9e454
|
[
"MIT"
] | null | null | null |
isj_proj04.py
|
SnasiCze/ISJ
|
2284cb0d53aad5dd0bfc6230224700628be9e454
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Vrátí vstupní položku item, pokud tato může být prvkem množiny v Pythonu, v opačném případě frozenset(item)
#""" Pomocí kontroly datových typů se program větví na 2 části return item a retun frozenset(item) """
def can_be_a_set_member_or_frozenset(item):
""" Pomocí kontroly datových typů se program větví na 2 části return item a retun frozenset(item) """
if type(item) == list:
return frozenset(item)
elif type(item) == dict:
return frozenset(item)
else:
return item
# Na vstupu dostane seznam a pouze s použitím vestavěných funkcí (tedy bez použití "import") z něj vytvoří seznam, odpovídající množině všech podmnožin, tedy např.
def all_subsets(lst):
''' Pomocí bitových posunů si pohlídám zprávné zapsání a pak už jen přidávám do listu '''
list = []
for i in range(1 << len(lst)):
subset = [lst[bit] for bit in range(len(lst)) if i & (1 << bit) > 0]
list.append(subset)
return list
#obdoba předchozího, ale při volání dostane prvky seznamu přímo jako argumenty a navíc má volitelný parametr exclude_empty, který, když není ve volání uveden, nebo je jeho hodnota True, vrátí výsledek bez prázdného seznamu. Pokud je hodnota tohoto argumentu False, je výsledek stejný jako u předchozí funkce.
def all_subsets_excl_empty(*args, **arg):
''' Z argumetu funkce si přečtu nejprve všechny argumenty a pak se rozhoduji. nejprve provedu funkci "all_subsets" a pak dle exclude_empty rozhoduji co vrátím '''
listtmp = all_subsets(list(args))
if not arg:
del listtmp[0]
return listtmp
elif arg['exclude_empty'] == False:
return listtmp
elif arg['exclude_empty']:
del listtmp[0]
return listtmp
def test():
''' test funkce "can_be_a_set_member_or_frozenset" '''
assert can_be_a_set_member_or_frozenset(1) == 1
assert can_be_a_set_member_or_frozenset((1,2)) == (1,2)
assert can_be_a_set_member_or_frozenset([1,2]) == frozenset([1,2])
''' test funkce "all_subsets" '''
assert all_subsets(['a', 'b', 'c']) == [[], ['a'], ['b'], ['a', 'b'], ['c'], ['a', 'c'], ['b', 'c'], ['a', 'b', 'c']]
''' test funkce "all_subsets_excl_empty" '''
assert all_subsets_excl_empty('a', 'b', 'c') == [['a'], ['b'], ['a', 'b'], ['c'], ['a', 'c'], ['b', 'c'], ['a', 'b', 'c']]
#assert all_subsets_excl_empty('a', 'b', 'c', exclude_empty = True) == [['a'], ['b'], ['a', 'b'], ['c'], ['a', 'c'], ['b', 'c'], ['a', 'b', 'c']]
assert all_subsets_excl_empty('a', 'b', 'c', exclude_empty = False) == [[], ['a'], ['b'], ['a', 'b'], ['c'], ['a', 'c'], ['b', 'c'], ['a', 'b', 'c']]
if __name__ == '__main__':
test()
| 48.509434 | 308 | 0.657332 |
6daaf1ed74ecda43692f9d1ac51d1867c61bbbb3
| 1,977 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/accounts/doctype/payment_terms_template/test_payment_terms_template.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | 1 |
2021-04-29T14:55:29.000Z
|
2021-04-29T14:55:29.000Z
|
frappe-bench/apps/erpnext/erpnext/accounts/doctype/payment_terms_template/test_payment_terms_template.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/accounts/doctype/payment_terms_template/test_payment_terms_template.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | 1 |
2021-04-29T14:39:01.000Z
|
2021-04-29T14:39:01.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
class TestPaymentTermsTemplate(unittest.TestCase):
def tearDown(self):
frappe.delete_doc('Payment Terms Template', '_Test Payment Terms Template For Test', force=1)
def test_create_template(self):
template = frappe.get_doc({
'doctype': 'Payment Terms Template',
'template_name': '_Test Payment Terms Template For Test',
'terms': [{
'doctype': 'Payment Terms Template Detail',
'invoice_portion': 50.00,
'credit_days_based_on': 'Day(s) after invoice date',
'credit_days': 30
}]
})
self.assertRaises(frappe.ValidationError, template.insert)
template.append('terms', {
'doctype': 'Payment Terms Template Detail',
'invoice_portion': 50.00,
'credit_days_based_on': 'Day(s) after invoice date',
'credit_days': 0
})
template.insert()
def test_credit_days(self):
template = frappe.get_doc({
'doctype': 'Payment Terms Template',
'template_name': '_Test Payment Terms Template For Test',
'terms': [{
'doctype': 'Payment Terms Template Detail',
'invoice_portion': 100.00,
'credit_days_based_on': 'Day(s) after invoice date',
'credit_days': -30
}]
})
self.assertRaises(frappe.ValidationError, template.insert)
def test_duplicate_terms(self):
template = frappe.get_doc({
'doctype': 'Payment Terms Template',
'template_name': '_Test Payment Terms Template For Test',
'terms': [
{
'doctype': 'Payment Terms Template Detail',
'invoice_portion': 50.00,
'credit_days_based_on': 'Day(s) after invoice date',
'credit_days': 30
},
{
'doctype': 'Payment Terms Template Detail',
'invoice_portion': 50.00,
'credit_days_based_on': 'Day(s) after invoice date',
'credit_days': 30
}
]
})
self.assertRaises(frappe.ValidationError, template.insert)
| 27.082192 | 95 | 0.684876 |
e927e0e39d4cf38d69e685057632d7dde672ee04
| 12,024 |
py
|
Python
|
layouts/community/ergodox/algernon/tools/log-to-heatmap.py
|
fzf/qmk_toolbox
|
10d6b425bd24b45002555022baf16fb11254118b
|
[
"MIT"
] | 2 |
2019-05-13T05:19:02.000Z
|
2021-11-29T09:07:43.000Z
|
layouts/community/ergodox/algernon/tools/log-to-heatmap.py
|
fzf/qmk_toolbox
|
10d6b425bd24b45002555022baf16fb11254118b
|
[
"MIT"
] | null | null | null |
layouts/community/ergodox/algernon/tools/log-to-heatmap.py
|
fzf/qmk_toolbox
|
10d6b425bd24b45002555022baf16fb11254118b
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
import json
import os
import sys
import re
import argparse
import time
from math import floor
from os.path import dirname
from subprocess import Popen, PIPE, STDOUT
from blessings import Terminal
class Heatmap(object):
coords = [
[
# Row 0
[ 4, 0], [ 4, 2], [ 2, 0], [ 1, 0], [ 2, 2], [ 3, 0], [ 3, 2],
[ 3, 4], [ 3, 6], [ 2, 4], [ 1, 2], [ 2, 6], [ 4, 4], [ 4, 6],
],
[
# Row 1
[ 8, 0], [ 8, 2], [ 6, 0], [ 5, 0], [ 6, 2], [ 7, 0], [ 7, 2],
[ 7, 4], [ 7, 6], [ 6, 4], [ 5, 2], [ 6, 6], [ 8, 4], [ 8, 6],
],
[
# Row 2
[12, 0], [12, 2], [10, 0], [ 9, 0], [10, 2], [11, 0], [ ],
[ ], [11, 2], [10, 4], [ 9, 2], [10, 6], [12, 4], [12, 6],
],
[
# Row 3
[17, 0], [17, 2], [15, 0], [14, 0], [15, 2], [16, 0], [13, 0],
[13, 2], [16, 2], [15, 4], [14, 2], [15, 6], [17, 4], [17, 6],
],
[
# Row 4
[20, 0], [20, 2], [19, 0], [18, 0], [19, 2], [], [], [], [],
[19, 4], [18, 2], [19, 6], [20, 4], [20, 6], [], [], [], []
],
[
# Row 5
[ ], [23, 0], [22, 2], [22, 0], [22, 4], [21, 0], [21, 2],
[24, 0], [24, 2], [25, 0], [25, 4], [25, 2], [26, 0], [ ],
],
]
def set_attr_at(self, block, n, attr, fn, val):
blk = self.heatmap[block][n]
if attr in blk:
blk[attr] = fn(blk[attr], val)
else:
blk[attr] = fn(None, val)
def coord(self, col, row):
return self.coords[row][col]
@staticmethod
def set_attr(orig, new):
return new
def set_bg(self, coords, color):
(block, n) = coords
self.set_attr_at(block, n, "c", self.set_attr, color)
#self.set_attr_at(block, n, "g", self.set_attr, False)
def set_tap_info(self, coords, count, cap):
(block, n) = coords
def _set_tap_info(o, _count, _cap):
ns = 4 - o.count ("\n")
return o + "\n" * ns + "%.02f%%" % (float(_count) / float(_cap) * 100)
if not cap:
cap = 1
self.heatmap[block][n + 1] = _set_tap_info (self.heatmap[block][n + 1], count, cap)
@staticmethod
def heatmap_color (v):
colors = [ [0.3, 0.3, 1], [0.3, 1, 0.3], [1, 1, 0.3], [1, 0.3, 0.3]]
fb = 0
if v <= 0:
idx1, idx2 = 0, 0
elif v >= 1:
idx1, idx2 = len(colors) - 1, len(colors) - 1
else:
val = v * (len(colors) - 1)
idx1 = int(floor(val))
idx2 = idx1 + 1
fb = val - float(idx1)
r = (colors[idx2][0] - colors[idx1][0]) * fb + colors[idx1][0]
g = (colors[idx2][1] - colors[idx1][1]) * fb + colors[idx1][1]
b = (colors[idx2][2] - colors[idx1][2]) * fb + colors[idx1][2]
r, g, b = [x * 255 for x in (r, g, b)]
return "#%02x%02x%02x" % (int(r), int(g), int(b))
def __init__(self, layout):
self.log = {}
self.total = 0
self.max_cnt = 0
self.layout = layout
def update_log(self, coords):
(c, r) = coords
if not (c, r) in self.log:
self.log[(c, r)] = 0
self.log[(c, r)] = self.log[(c, r)] + 1
self.total = self.total + 1
if self.max_cnt < self.log[(c, r)]:
self.max_cnt = self.log[(c, r)]
def get_heatmap(self):
with open("%s/heatmap-layout.%s.json" % (dirname(sys.argv[0]), self.layout), "r") as f:
self.heatmap = json.load (f)
## Reset colors
for row in self.coords:
for coord in row:
if coord != []:
self.set_bg (coord, "#d9dae0")
for (c, r) in self.log:
coords = self.coord(c, r)
b, n = coords
cap = self.max_cnt
if cap == 0:
cap = 1
v = float(self.log[(c, r)]) / cap
self.set_bg (coords, self.heatmap_color (v))
self.set_tap_info (coords, self.log[(c, r)], self.total)
return self.heatmap
def get_stats(self):
usage = [
# left hand
[0, 0, 0, 0, 0],
# right hand
[0, 0, 0, 0, 0]
]
finger_map = [0, 0, 1, 2, 3, 3, 3, 1, 1, 1, 2, 3, 4, 4]
for (c, r) in self.log:
if r == 5: # thumb cluster
if c <= 6: # left side
usage[0][4] = usage[0][4] + self.log[(c, r)]
else:
usage[1][0] = usage[1][0] + self.log[(c, r)]
elif r == 4 and (c == 4 or c == 9): # bottom row thumb keys
if c <= 6: # left side
usage[0][4] = usage[0][4] + self.log[(c, r)]
else:
usage[1][0] = usage[1][0] + self.log[(c, r)]
else:
fc = c
hand = 0
if fc >= 7:
hand = 1
fm = finger_map[fc]
usage[hand][fm] = usage[hand][fm] + self.log[(c, r)]
hand_usage = [0, 0]
for f in usage[0]:
hand_usage[0] = hand_usage[0] + f
for f in usage[1]:
hand_usage[1] = hand_usage[1] + f
total = self.total
if total == 0:
total = 1
stats = {
"total-keys": total,
"hands": {
"left": {
"usage": round(float(hand_usage[0]) / total * 100, 2),
"fingers": {
"pinky": 0,
"ring": 0,
"middle": 0,
"index": 0,
"thumb": 0,
}
},
"right": {
"usage": round(float(hand_usage[1]) / total * 100, 2),
"fingers": {
"thumb": 0,
"index": 0,
"middle": 0,
"ring": 0,
"pinky": 0,
}
},
}
}
hmap = ['left', 'right']
fmap = ['pinky', 'ring', 'middle', 'index', 'thumb',
'thumb', 'index', 'middle', 'ring', 'pinky']
for hand_idx in range(len(usage)):
hand = usage[hand_idx]
for finger_idx in range(len(hand)):
stats['hands'][hmap[hand_idx]]['fingers'][fmap[finger_idx + hand_idx * 5]] = round(float(hand[finger_idx]) / total * 100, 2)
return stats
def dump_all(out_dir, heatmaps):
stats = {}
t = Terminal()
t.clear()
sys.stdout.write("\x1b[2J\x1b[H")
print ('{t.underline}{outdir}{t.normal}\n'.format(t=t, outdir=out_dir))
keys = list(heatmaps.keys())
keys.sort()
for layer in keys:
if len(heatmaps[layer].log) == 0:
continue
with open ("%s/%s.json" % (out_dir, layer), "w") as f:
json.dump(heatmaps[layer].get_heatmap(), f)
stats[layer] = heatmaps[layer].get_stats()
left = stats[layer]['hands']['left']
right = stats[layer]['hands']['right']
print ('{t.bold}{layer}{t.normal} ({total:,} taps):'.format(t=t, layer=layer,
total=int(stats[layer]['total-keys'] / 2)))
print (('{t.underline} | ' + \
'left ({l[usage]:6.2f}%) | ' + \
'right ({r[usage]:6.2f}%) |{t.normal}').format(t=t, l=left, r=right))
print ((' {t.bright_magenta}pinky{t.white} | {left[pinky]:6.2f}% | {right[pinky]:6.2f}% |\n' + \
' {t.bright_cyan}ring{t.white} | {left[ring]:6.2f}% | {right[ring]:6.2f}% |\n' + \
' {t.bright_blue}middle{t.white} | {left[middle]:6.2f}% | {right[middle]:6.2f}% |\n' + \
' {t.bright_green}index{t.white} | {left[index]:6.2f}% | {right[index]:6.2f}% |\n' + \
' {t.bright_red}thumb{t.white} | {left[thumb]:6.2f}% | {right[thumb]:6.2f}% |\n' + \
'').format(left=left['fingers'], right=right['fingers'], t=t))
def process_line(line, heatmaps, opts, stamped_log = None):
m = re.search ('KL: col=(\d+), row=(\d+), pressed=(\d+), layer=(.*)', line)
if not m:
return False
if stamped_log is not None:
if line.startswith("KL:"):
print ("%10.10f %s" % (time.time(), line),
file = stamped_log, end = '')
else:
print (line,
file = stamped_log, end = '')
stamped_log.flush()
(c, r, l) = (int(m.group (2)), int(m.group (1)), m.group (4))
if (c, r) not in opts.allowed_keys:
return False
heatmaps[l].update_log ((c, r))
return True
def setup_allowed_keys(opts):
if len(opts.only_key):
incmap={}
for v in opts.only_key:
m = re.search ('(\d+),(\d+)', v)
if not m:
continue
(c, r) = (int(m.group(1)), int(m.group(2)))
incmap[(c, r)] = True
else:
incmap={}
for r in range(0, 6):
for c in range(0, 14):
incmap[(c, r)] = True
for v in opts.ignore_key:
m = re.search ('(\d+),(\d+)', v)
if not m:
continue
(c, r) = (int(m.group(1)), int(m.group(2)))
del(incmap[(c, r)])
return incmap
def main(opts):
heatmaps = {"Dvorak": Heatmap("Dvorak"),
"ADORE": Heatmap("ADORE")
}
cnt = 0
out_dir = opts.outdir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
opts.allowed_keys = setup_allowed_keys(opts)
if not opts.one_shot:
try:
with open("%s/stamped-log" % out_dir, "r") as f:
while True:
line = f.readline()
if not line:
break
if not process_line(line, heatmaps, opts):
continue
except:
pass
stamped_log = open ("%s/stamped-log" % (out_dir), "a+")
else:
stamped_log = None
while True:
line = sys.stdin.readline()
if not line:
break
if not process_line(line, heatmaps, opts, stamped_log):
continue
cnt = cnt + 1
if opts.dump_interval != -1 and cnt >= opts.dump_interval and not opts.one_shot:
cnt = 0
dump_all(out_dir, heatmaps)
dump_all (out_dir, heatmaps)
if __name__ == "__main__":
parser = argparse.ArgumentParser (description = "keylog to heatmap processor")
parser.add_argument ('outdir', action = 'store',
help = 'Output directory')
parser.add_argument ('--dump-interval', dest = 'dump_interval', action = 'store', type = int,
default = 100, help = 'Dump stats and heatmap at every Nth event, -1 for dumping at EOF only')
parser.add_argument ('--ignore-key', dest = 'ignore_key', action = 'append', type = str,
default = [], help = 'Ignore the key at position (x, y)')
parser.add_argument ('--only-key', dest = 'only_key', action = 'append', type = str,
default = [], help = 'Only include key at position (x, y)')
parser.add_argument ('--one-shot', dest = 'one_shot', action = 'store_true',
help = 'Do not load previous data, and do not update it, either.')
args = parser.parse_args()
if len(args.ignore_key) and len(args.only_key):
print ("--ignore-key and --only-key are mutually exclusive, please only use one of them!",
file = sys.stderr)
sys.exit(1)
main(args)
| 34.852174 | 140 | 0.435213 |
3a6f2d5646fb0a5ff9edfc84c3466099d4ce9a92
| 1,208 |
py
|
Python
|
src/_abc247.py
|
nullputra/nlptr-lib
|
618c2c74c3d3033bae04de9ba9a253bb6eb31a6c
|
[
"CC0-1.0"
] | null | null | null |
src/_abc247.py
|
nullputra/nlptr-lib
|
618c2c74c3d3033bae04de9ba9a253bb6eb31a6c
|
[
"CC0-1.0"
] | null | null | null |
src/_abc247.py
|
nullputra/nlptr-lib
|
618c2c74c3d3033bae04de9ba9a253bb6eb31a6c
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/python3
# region abc247 A.
# '''
# verification-helper: PROBLEM https://atcoder.jp/contests/abc247/tasks/abc247_a
import sys
n = int(input())
a = list(map(int, input().split()))
print(*a)
# '''
# endregion
# region abc247 B.
'''
# verification-helper: PROBLEM https://atcoder.jp/contests/abc247/tasks/abc247_b
import sys
n = int(input())
a = list(map(int, input().split()))
print(*a)
# '''
# endregion
# region abc247 C.
'''
# verification-helper: PROBLEM https://atcoder.jp/contests/abc247/tasks/abc247_c
import sys
n = int(input())
a = list(map(int, input().split()))
print(*a)
# '''
# endregion
# region abc247 D.
'''
# verification-helper: PROBLEM https://atcoder.jp/contests/abc247/tasks/abc247_d
import sys
n = int(input())
a = list(map(int, input().split()))
print(*a)
# '''
# endregion
# region abc247 E.
'''
# verification-helper: PROBLEM https://atcoder.jp/contests/abc247/tasks/abc247_e
import sys
n = int(input())
a = list(map(int, input().split()))
print(*a)
# '''
# endregion
# region abc247 F.
'''
# verification-helper: PROBLEM https://atcoder.jp/contests/abc247/tasks/abc247_f
import sys
n = int(input())
a = list(map(int, input().split()))
print(*a)
# '''
# endregion
| 18.029851 | 80 | 0.671358 |
c9650e519bb85b460822b72024b2f67c193c1505
| 3,651 |
py
|
Python
|
Course_1/Week_02/2_ClosestPairs.py
|
KnightZhang625/Stanford_Algorithm
|
7dacbbfa50e7b0e8380cf500df24af60cb9f42df
|
[
"Apache-2.0"
] | null | null | null |
Course_1/Week_02/2_ClosestPairs.py
|
KnightZhang625/Stanford_Algorithm
|
7dacbbfa50e7b0e8380cf500df24af60cb9f42df
|
[
"Apache-2.0"
] | 1 |
2020-07-16T08:03:22.000Z
|
2020-07-16T08:09:34.000Z
|
Course_1/Week_02/2_ClosestPairs.py
|
KnightZhang625/Stanford_Algorithm
|
7dacbbfa50e7b0e8380cf500df24af60cb9f42df
|
[
"Apache-2.0"
] | null | null | null |
"""Merge Sort"""
def merge(array_left, array_right):
i, j = 0, 0
merged_array = []
while (i < len(array_left)) and (j < len(array_right)):
if array_left[i] < array_right[j]:
merged_array.append(array_left[i])
i +=1
else:
merged_array.append(array_right[j])
j +=1
if i < len(array_left):
merged_array.extend(array_left[i :])
if j < len(array_right):
merged_array.extend(array_right[j :])
return merged_array
def sort(array):
print(array)
if len(array) < 2:
return array
mid = len(array) // 2
array_left = sort(array[: mid])
array_right = sort(array[mid :])
return merge(array_left, array_right)
"""Point Object"""
class Point(object):
def __init__(self, x, y):
self.x = x
self.y = y
def __setattr__(self, name, value):
if name in self.__dir__():
raise ValueError('{} exists'.format(name))
return super().__setattr__(name, value)
def __getattr__(self, name):
if name not in self.__dir__():
raise AttributeError('{} nit exists'.format(name))
return super().__getattr__(name)
def __str__(self):
return '({},{})'.format(self.x, self.y)
def __gt__(self, others):
if isinstance(others, Point):
return self.x > others.x
else:
raise ValueError
def __eq__(self, others):
if isinstance(others, Point):
return self.x == others.x
else:
raise ValueError
"""Euclidean Distance"""
calculate_distance = lambda p1, p2: (p1.x - p2.x) **2 + (p1.y - p2.y) **2
"""Closest Pairs"""
def brute_force(points):
if len(points) == 2:
return calculate_distance(points[0], points[1]), points
best = -1
best_pair = None
for i in range(len(points)):
for j in range(1, len(points)):
p1, p2 = points[0], points[1]
distance = calculate_distance(p1, p2)
if distance > best:
best = distance
best_pair = [p1, p2]
return best, best_pair
def closestSplitPair(px, py, delta):
mid_idx = len(px) // 2
x_bar = px[mid_idx]
candidates = [x for i, x in enumerate(py) if abs(x_bar.x - x.x) <= delta]
best = delta
best_pair = None
for i in range(len(candidates)-1):
for j in range(1, min(7, len(candidates)-i)):
p, q = candidates[i], candidates[i+j]
distance = calculate_distance(p, q)
if distance < best:
best = distance
best_pair = [p, q]
return best, best_pair
def closestPair(points_x, points_y):
assert len(points_x) == len(points_y)
if len(points_x) <= 3:
return brute_force(points)
mid = len(points_x) // 2
LX = points_x[: mid]
RX = points_x[mid :]
LY = []
RY = []
mid_x = points_x[mid].x
# this step is curcial, it ensures that the sorted Y of left and right splitted by mid_x
# to be saved, however, in y sorted rank.
for py in points_y:
if py.x < mid_x:
LY.append(py)
else:
RY.append(py)
d_l, p_l = closestPair(LX, LY)
d_r, p_r = closestPair(RX, RY)
if d_l < d_r:
delta = d_l
best_pair = p_l
else:
delta = d_r
best_pair = p_r
best, best_p = closestSplitPair(points_x, points_y, delta)
if best_p is not None:
best_pair = best_p
return best, best_pair
if __name__ == '__main__':
import copy
# array = [5, 2, 1, 6, 7, 12, 20, 15]
# sorted_array = sort(array)
# print(sorted_array)
points = [Point(1, 2), Point(100, 200), Point(1000, 2000),
Point(150, 2000.1), Point(100.1, 250), Point(123, 567),
Point(12, 65), Point(3, 100), Point(20.1, 3),
Point(50.1, 100.1), Point(1.1, 2.2), Point(5, 6),
Point(8, 12), Point(20, 30), Point(50, 60)]
points_copy = copy.deepcopy(points)
points_x = sorted(points, key=lambda p: p.x)
points_y = sorted(points, key=lambda p: p.y)
best, best_pair = closestPair(points_x, points_y)
print(best)
print(best_pair[0], best_pair[1])
| 24.019737 | 89 | 0.654889 |
a39740778fe6a56d808c94b9017a9ee68a2d916e
| 2,450 |
py
|
Python
|
python/coursera_python/MICHIGAN/web/2/dict_fin.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 16 |
2018-11-26T08:39:42.000Z
|
2019-05-08T10:09:52.000Z
|
python/coursera_python/MICHIGAN/web/2/dict_fin.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 8 |
2020-05-04T06:29:26.000Z
|
2022-02-12T05:33:16.000Z
|
python/coursera_python/MICHIGAN/web/2/dict_fin.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 5 |
2020-02-11T16:02:21.000Z
|
2021-02-05T07:48:30.000Z
|
import urllib.request, urllib.parse, urllib.error
# http://www.py4e.com/code3/bs4.zip
# and unzip it in the same directory as this file
from urllib.request import urlopen
import re
from bs4 import BeautifulSoup
import ssl
import sqlite3
conn = sqlite3.connect('wiki2.sqlite')
cur = conn.cursor()
cur.executescript('''
CREATE TABLE IF NOT EXISTS dict (
word TEXT UNIQUE PRIMARY KEY
);
''')
fhand=''
comm = 0
#print(list_link)
#for link_T in list_link:
# print(link_T)
# Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
#url = input('Enter - ')
#html = urlopen(url, context=ctx).read()
# html.parser is the HTML parser included in the standard Python 3 library.
# information on other HTML parsers is here:
# http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser
#soup = BeautifulSoup(html, "html.parser")
arr_junk =['http:','https:','/','<','>','=','1','2','3','4','5','6','7','8','9','0','\'','\"','}','{',']','[','(',')',':','-','+','!','~','|','\\','*','?',';','_','.','#','$','@','%','^','&','`']
cdummy = 0
dummy = 0
for i in range(100000):
list_link = cur.execute(''' SELECT link FROM data where flag = ?''',(1,))
for tlink in list_link:
print(tlink)
tlink1 = ''.join(tlink)
print(tlink1)
dummy = 0
try:
fhand = urllib.request.urlopen(tlink1)
dummy = 1
except:
print("Sorry Link cannot be opened!",tlink1)
cur.execute('''UPDATE data SET flag = 2 WHERE link = ?''',(tlink1,))
continue
if dummy == 1: #link extracted sucessfully
print("Extracting words in the link .... : ",tlink1)
for line in fhand:
big_junk=line.decode().strip().split(' ')
for junk in big_junk:
flag=1
for needle in arr_junk:
if needle in junk:
flag=0
continue
if ',' in junk:
com_pos = junk.find(',') # comma postion
ext_wrd = junk[:com_pos] # to extract word
else:
ext_wrd = junk
if flag==1:
#commit_Var = commit_Var + 1
if ext_wrd != '':
#print(ext_wrd)
ex_wrd_l = ext_wrd.lower()
print(ex_wrd_l)
cur.execute('''INSERT OR IGNORE INTO dict (word)
VALUES ( ? )''', ( ex_wrd_l, ) )
cur.execute('''UPDATE data SET flag = 2 WHERE link = ?''',(tlink1,))
cdummy = cdummy + 1
if cdummy % 20 == 0:
conn.commit()
conn.commit()
#print("Var comm = ",comm)
| 25.520833 | 196 | 0.600816 |
6e29068ed65ac299fd2f3be4dbe36b67f90ed76a
| 777 |
py
|
Python
|
Python/Exercícios_Python/036_analisando_triângulo.py
|
vdonoladev/aprendendo-programacao
|
83abbcd6701b2105903b28fd549738863418cfb8
|
[
"MIT"
] | null | null | null |
Python/Exercícios_Python/036_analisando_triângulo.py
|
vdonoladev/aprendendo-programacao
|
83abbcd6701b2105903b28fd549738863418cfb8
|
[
"MIT"
] | null | null | null |
Python/Exercícios_Python/036_analisando_triângulo.py
|
vdonoladev/aprendendo-programacao
|
83abbcd6701b2105903b28fd549738863418cfb8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""036 - Analisando Triângulo
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1oQmCX-HLeaqxWxaz9zsjj-5Na35XtNXM
"""
print('*' * 54)
print('------- Condição de existência de um triângulo -------'.upper())
print('*' * 54)
r1 = float(input('Informe o comprimento da 1ª reta: '))
r2 = float(input('Informe o comprimento da 2ª reta: '))
r3 = float(input('Informe o comprimento da 3ª reta: '))
sit_1 = ((r2 - r3) < r1 < (r2 + r3))
sit_2 = ((r1 - r3) < r2 < (r1 + r3))
sit_3 = ((r1 - r2) < r3 < (r1 + r2))
if (sit_1 and sit_2 and sit_3):
print('PARABÉNS! É possível formar um triângulo com essas retas!')
else:
print('DESCULPA. Não é possível formar um triângulo com essas retas.')
| 31.08 | 77 | 0.65251 |
6c207a740fe5051fe7eeec344f04e986def3816d
| 875 |
py
|
Python
|
listings/chapter04/month_calendar.py
|
SaschaKersken/Daten-Prozessanalyse
|
370f07a75b9465329deb3671adbfbef8483f76f6
|
[
"Apache-2.0"
] | 2 |
2021-09-20T06:16:41.000Z
|
2022-01-17T14:24:43.000Z
|
listings/chapter04/month_calendar.py
|
SaschaKersken/Daten-Prozessanalyse
|
370f07a75b9465329deb3671adbfbef8483f76f6
|
[
"Apache-2.0"
] | null | null | null |
listings/chapter04/month_calendar.py
|
SaschaKersken/Daten-Prozessanalyse
|
370f07a75b9465329deb3671adbfbef8483f76f6
|
[
"Apache-2.0"
] | null | null | null |
from datetime import date
from sys import argv
if len(argv) > 2:
year = int(argv[1])
month = int(argv[2])
if len(argv) > 3:
day = int(argv[3])
else:
day = 1
else:
today = date.today()
day = today.day
month = today.month
year = today.year
first_of_month = date(year, month, 1)
weekday = first_of_month.weekday()
month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
if year % 400 == 0 or (year % 4 == 0 and year % 100 != 0):
month_lengths[1] = 29
print("{:02d}/{}".format(month, year))
print(" Mo Di Mi Do Fr Sa So")
print(' ' * weekday * 4, end = '')
for d in range(1, month_lengths[month - 1] + 1):
if d == day:
print("[{:2d}]".format(d), end = '')
else:
print(" {:2d} ".format(d), end = '')
weekday += 1
if weekday % 7 == 0:
weekday = 0
print()
print()
| 24.305556 | 64 | 0.536 |
6c515bcc0378b767f2ca3dc2a2f5830efe8fce57
| 11,500 |
py
|
Python
|
src/using_tips/using_tips_2.py
|
HuangHuaBingZiGe/GitHub-Demo
|
f3710f73b0828ef500343932d46c61d3b1e04ba9
|
[
"Apache-2.0"
] | null | null | null |
src/using_tips/using_tips_2.py
|
HuangHuaBingZiGe/GitHub-Demo
|
f3710f73b0828ef500343932d46c61d3b1e04ba9
|
[
"Apache-2.0"
] | null | null | null |
src/using_tips/using_tips_2.py
|
HuangHuaBingZiGe/GitHub-Demo
|
f3710f73b0828ef500343932d46c61d3b1e04ba9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
50个话题
9章
1.课程简介
2.数据结构相关话题
3.迭代器与生成器相关话题
4.字符串处理相关话题
5.文件I/O操作相关话题
6.数据编码与处理相关话题
7.类与对象相关话题
8.多线程与多进程相关话题
9.装饰器相关话题
"""
"""
第1章 课程简介
1-1 课程简介
1-2 在线编码工具WebIDE使用指南
第2章 数据结构与算法进阶训练
2-1 如何在列表, 字典, 集合中根据条件筛选数据
2-2 如何为元组中的每个元素命名, 提高程序可读性
2-3 如何统计序列中元素的出现频度
2-4 如何根据字典中值的大小, 对字典中的项排序
2-5 如何快速找到多个字典中的公共键(key)
2-6 如何让字典保持有序
2-7 如何实现用户的历史记录功能(最多n条)
第3章 对象迭代与反迭代技巧训练
3-1 如何实现可迭代对象和迭代器对象(1)
3-2 如何实现可迭代对象和迭代器对象(2)
3-3 如何使用生成器函数实现可迭代对象
3-4 如何进行反向迭代以及如何实现反向迭代
3-5 如何对迭代器做切片操作
3-6 如何在一个for语句中迭代多个可迭代对象
第4章 字符串处理技巧训练
4-1 如何拆分含有多种分隔符的字符串
4-2 如何判断字符串a是否以字符串b开头或结尾
4-3 如何调整字符串中文本的格式
4-4 如何将多个小字符串拼接成一个大的字符串
4-5 如何对字符串进行左, 右, 居中对齐
4-6 如何去掉字符串中不需要的字符
第5章 文件I/O高效处理技巧训练
5-1 如何读写文本文件
5-2 如何处理二进制文件
5-3 如何设置文件的缓冲
5-4 如何将文件映射到内存
5-5 如何访问文件的状态
5-6 如何使用临时文件
第6章 csv,json,xml,excel高效解析与构建技巧训练
6-1 如何读写csv数据
6-2 如何读写json数据
6-3 如何解析简单的xml文档
6-4 如何构建xml文档
6-5 如何读写excel文件
第7章 类与对象深度技术进阶训练
7-1 如何派生内置不可变类型并修改实例化行为
7-2 如何为创建大量实例节省内存
7-3 如何让对象支持上下文管理
7-4 如何创建可管理的对象属性
7-5 如何让类支持比较操作
7-6 如何使用描述符对实例属性做类型检查
7-7 如何在环状数据结构中管理内存
7-8 如何通过实例方法名字的字符串调用方法
第8章 多线程编程核心技术应用进阶训练
8-1 如何使用多线程
8-2 如何线程间通信
8-3 如何在线程间进行事件通知
8-4 如何使用线程本地数据
8-5 如何使用线程池
8-6 如何使用多进程
第9章 装饰器使用技巧进阶训练
9-1 如何使用函数装饰器
9-2 如何为被装饰的函数保存元数据
9-3 如何定义带参数的装饰器
9-4 如何实现属性可修改的函数装饰器
9-5 如何在类中定义装饰器
"""
"""
6-1 如何读写csv数据
实际案例:
http://table.finance.yahoo.com/table.csv?s=000001.sz我们可以通过雅虎网站获取了中国股市(深市)数据集,它以csv数据格式存储:
Date,Open,High,Low,Close,Volume,Adj Close
2016-06-30,8.69,8.74,8.66,8.70,36220400,8.70
2016-06-29,8.63,8.69,8.62,8.69,36961100,8.69
2016-06-28,8.58,8.64,8.56,8.63,33651900,8.63
请将平安银行这支股票,在2016奶奶中成交量超过50000000的纪录存储到另一个csv文件中
解决方案:
使用标准库中的csv模块,可以使用其中reader和writer完成csv文件读写
"""
'''
urllib.request.urlretrieve("http://table.finance.yahoo.com/table.csv?s=000001.sz",'pingan.csv')
cat pingan.csv | less
'''
"""
# 使用二进制打开
# 有问题,其实csv文件不是二进制文件
rf = open(file_name,'rb')
reader = csv.reader(rf)
print(reader)
for row in reader:
print(row)
"""
'''
file = 'test.csv'
file_name = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'csv' + '\\' + file
file_copy = 'pingan_copy.csv'
file_name_copy = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'csv' + '\\' + file_copy
with open(file_name,"rt",encoding="utf-8") as csvfile:
reader = csv.reader(csvfile)
rows = [row for row in reader]
print(rows)
wf = open(file_name_copy,'w')
writer = csv.writer(wf)
writer.writerow(['Date','Open','High','Low','Close','Volume','Adj Close'])
writer.writerow(['Date','Open','High','Low','Close','Volume','Adj Close'])
wf.flush()
print("-----最好的方法-----")
print("python2和python3的csv.reader.next的方法有所区别")
file_copy_2 = 'pingan2.csv'
file_name_copy2 = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'csv' + '\\' + file_copy_2
with open(file_name,'r') as rf:
reader = csv.reader(rf)
with open(file_name_copy2,'w') as wf:
writer = csv.writer(wf)
headers = next(reader)
writer.writerow(headers)
for row in reader:
#if row[0] < '2016-01-01':
#break
if int(row[5]) > 36961100:
writer.writerow(row)
print("end")
'''
'''
6-2 如何读写json数据
实际案例:
在web应用中常用JSON(JavaScript Object Notation)格式传输数据,例如我们利用Baidu语音识别服务做语音识别,将本地音频数据post到Baidu语音识别服务器,服务器响应结果为json字符串
{"corpus_no":"6303355448008565863","err_msg":"success.","err_no":0,"result":["你好 ,"],"sn":"418359718861467614305"}
在python中如何读写json数据?
解决方案:
使用标准库中的json模块,其中loads,dumps函数可以完成json数据的读写
'''
'''
#coding:utf-8
import requests
import json
# 录音
from record import Record
record = Record(channels=1)
audioData = record.record(2)
# 获取token
from secret import API_KEY,SECRET_KEY
authUrl = "https://openapi.baidu.com/oauth/2.0/token?grant_type=client_credentials&client_id=" + API_KEY + "&client_secret=" + SECRET_KEY
response = requests.get(authUrl)
res = json.loads(response.content)
token = res['access_token']
# 语音识别
cuid = 'xxxxxxxxxxx'
srvUrl = 'http://vop.baidu.com/server_api' + '?cuid=' + cuid + '&token=' + token
httpHeader = {
'Content-Type':'audio/wav; rate = 8000',
}
response = requests.post(srvUrl,headers=httpHeader,data=audioData)
res = json.loads(response.content)
text = res['result'][0]
print(u'\n识别结果:')
print(text)
'''
'''
# dumps将python对象转换为json的字符串
l = [1,2,'abc',{'name': 'Bob','age':13}]
print(json.dumps(l))
d = {'b':None,'a':5,'c':'abc'}
print(json.dumps(d))
# 将逗号后的空格和冒号后的空格删除,将空格压缩掉
print(json.dumps(l,separators=[',', ':']))
# 对输出的字典中的键进行排序
print(json.dumps(d,sort_keys=True))
# 把json字符串转换为python对象
l2 = json.loads('[1,2,"abc",{"name": "Bob","age":13}]')
print(type(l2))
d2 = json.loads('{"b":null,"a":5,"c":"abc"}')
print(type(d2))
'''
'''
file = 'demo.json'
file_name = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'json' + '\\' + file
l = [1,2,'abc',{'name': 'Bob','age':13}]
# 将json写入文件当中,dump和load同理
with open(file_name,'w') as f:
json.dump(l,f)
'''
'''
6-3 如何解析简单的xml文档
实际案例:
xml是一种十分常用的标记性语言,可提供统一的方法来描述应用程序的结构化数据:
<?xml version="1.0"?>
<data>
<country name="Liechtenstein">
<rank updated="yes">2</rank>
<year>2008</year>
<gdppc>141100</gdppc>
<neighbor name="Austria" direction="E"/>
<neighbor name="Switzerland" direction="W"/>
</country>
</data>
python中如何解析xml文档?
解决方案:
使用标准库中的xml.etree.ElementTree,其中的parse函数可以解析xml文档
from xml.etree.ElementTree import parse
import os
file = 'demo.xml'
file_name = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'xml' + '\\' + file
f = open(file_name)
et = parse(f)
print(et)
root = et.getroot()
print(root)
print(root.tag)
print(root.attrib)
print(root.text)
print(root.text.strip())
print(root.getchildren())
for child in root:
print(child.get('name'))
print(root.find('country'))
print(root.findall('country'))
print(root.iterfind('country'))
for e in root.iterfind('country'):
print(e.get('name'))
print(root.findall('rank')) # 找不到非子元素
print(root.iter())
print(list(root.iter()))
print(list(root.iter('rank')))
print(root.findall('country/*')) # *表示匹配孙子节点
print(root.findall('rank')) # 直接查找子元素
print(root.findall('.//rank')) # //表示查找所有层次
print(root.findall('.//rank/..')) # ..表示查找rank的所有父节点
print(root.findall('country[@name]')) # 查找包含name属性的country
print(root.findall('country[@name="Singapore"]'))#查找属性等于特定值的
print(root.findall('country[rank]'))# 查找包含rank的country
print(root.findall('country[rank="5"]'))
print(root.findall('country[1]')) #查找序号为1的country
print(root.findall('country[2]'))
print(root.findall('country[last()]')) #找最后一个country标签
print(root.findall('country[last()-1]')) #找倒数第二个
'''
'''
6-4 如何构建xml文档
实际案例:
某些时候,我们需要将其他格式数据转换为xml
例如,我们要把平安股票csv文件,转换成相应的xml,
test.csv
Date,Open,High,Low,Close,Volume,Adj Close
2016/6/1,8.69,8.74,8.66,8.7,36220400,8.7
pingan.xml
<Data>
<Row>
<Date>2016-07-05</Date>
<Open>8.80</Open>
<High>8.83</High>
<Low>8.77</Low>
<Close>8.81</Close>
<Volume>42203700</Volume>
<AdjClose>8.81</AdjClose>
</Row>
</Data>
解决方案:
使用标准库中的xml.etree.ElementTree,构建ElementTree,使用write方法写入文件
from xml.etree.ElementTree import Element,ElementTree
e = Element('Data') # tag名字 Data 创建元素
print(e.tag)
print(e.set('name','abc')) # 设置Data的属性
from xml.etree.ElementTree import tostring
print(tostring(e))
e.text='123'
print(tostring(e))
e2 = Element('Row') #创建子元素
e3 = Element('Open')
e3.text='8.80'
e2.append(e3)
print(tostring(e2))
e.text = None
e.append(e2)
print(tostring(e))
import os
file = 'demo1.xml'
file_name = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'xml' + '\\' + file
et = ElementTree(e)
et.write(file_name)
'''
'''
import csv
from xml.etree.cElementTree import Element,ElementTree
import os
file = 'pingan.csv'
file_name = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'csv' + '\\' + file
file1 = 'pingan.xml'
file_name1 = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'xml' + '\\' + file1
def xml_pretty(e,level=0):
if len(e) > 0:
e.text = '\n' + '\t' * (level + 1)
for child in e:
xml_pretty(child,level + 1)
child.tail = child.tail[:-1]
e.tail = '\n' + '\t' * level
def csvToXml(fname):
with open(fname,'r') as f:
reader = csv.reader(f)
headers = next(reader)
root = Element('Data')
for row in reader:
eRow = Element('Row')
root.append(eRow)
for tag,text in zip(headers,row):
e = Element(tag)
e.text = text
eRow.append(e)
xml_pretty(root)
return ElementTree(root)
et = csvToXml(file_name)
et.write(file_name1)
'''
'''
6-5 如何读写excel文件
实际案例:
Microsoft Excel是日常办公中使用最频繁的软件,其数据格式为xls、xlsx,一种非常常用的电子表格,小学某班成绩,记录在excel文件中
姓名 语文 数学 外语
李雷 95 99 96
韩梅 98 100 93
张峰 94 95 95
利用python读写excel,添加“总分”列,计算每人的总分
解决方案:
使用pip安装, $ pip install xlrd xlwt
使用第三方库xlrd和xlwt,这两个库分别用于excel读和写
'''
'''
import xlrd
import os
file = 'sum_point.xlsx'
file_name = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'excel' + '\\' + file
book = xlrd.open_workbook(file_name)
print(book.sheets())
sheet = book.sheet_by_index(0)
print(sheet.nrows)
print(sheet.ncols)
cell = sheet.cell(0,0)
print(cell)
# cell.ctype 是枚举值 xlrd.XL...
print(type(cell.value))
print(cell.value)
cell2 = sheet.cell(1,1)
print(cell2)
print(type(cell2))
print(cell2.ctype)
print(sheet.row(1))
print(sheet.row_values(1))
print(sheet.row_values(1,1)) # 跳过第一个,第2个1表示从第一个开始
# sheet.put_cell 为表添加1个单元格
import xlwt
wbook = xlwt.Workbook()
wsheet = wbook.add_sheet('sheet1')
# wsheet.write
# wbook.save('output.xlsx')
'''
'''
# 写入失败,有问题!!!!!!!!!
import os
import xlrd
import xlwt
file = 'sum_point.xlsx'
file_name = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'excel' + '\\' + file
file1 = 'sum_point_copy.xlsx'
file_name1 = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + \
'\\' + 'docs' + '\\' + 'excel' + '\\' + file1
rbook = xlrd.open_workbook(file_name)
rsheet = rbook.sheet_by_index(0)
nc = rsheet.ncols
rsheet.put_cell(0, nc, xlrd.XL_CELL_TEXT, u'总分', None) # 添加总分的文字,第0行,第rsheet.ncols列,类型,文本
for row in range(1, rsheet.nrows): # 第1行开始,跳过第0列
t = sum(rsheet.row_values(row, 1))
rsheet.put_cell(row, nc, xlrd.XL_CELL_NUMBER, t, None)
wbook = xlwt.Workbook()
wsheet = wbook.add_sheet(rsheet.name)
style = xlwt.easyxf('align:vertical center,horizontal center')
for r in range(rsheet.nrows):
for c in range(rsheet.ncols):
wsheet.write(r, c, rsheet.cell_value(r, c), style)
wbook.save(u'output.xlsx')
'''
| 22.460938 | 137 | 0.646174 |
b09674c3e57bc227eba73f4fa3feb2352cd398e8
| 9,032 |
py
|
Python
|
tests/test_server_utils.py
|
jecki/DHParser
|
c6c1bd7db2de85b5997a3640242f4f444532304e
|
[
"Apache-2.0"
] | 2 |
2020-12-25T19:37:42.000Z
|
2021-03-26T04:59:12.000Z
|
tests/test_server_utils.py
|
jecki/DHParser
|
c6c1bd7db2de85b5997a3640242f4f444532304e
|
[
"Apache-2.0"
] | 6 |
2018-08-07T22:48:52.000Z
|
2021-10-07T18:38:20.000Z
|
tests/test_server_utils.py
|
jecki/DHParser
|
c6c1bd7db2de85b5997a3640242f4f444532304e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
"""test_server_utils.py - tests for utility-functions in the,
server-module of DHParse.
Author: Eckhart Arnold <[email protected]>
Copyright 2020 Bavarian Academy of Sciences and Humanities
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import asyncio
import concurrent.futures
import collections.abc
import json
import os
import sys
import traceback
scriptpath = os.path.dirname(__file__) or '.'
sys.path.append(os.path.abspath(os.path.join(scriptpath, '..')))
from DHParser.server import pp_json, ExecutionEnvironment, asyncio_run
from DHParser.toolkit import json_dumps, json_encode_string
class TestExecutionEnvironment:
def test_execenv(self):
def fault():
raise AssertionError
async def main():
loop = asyncio.get_running_loop() if sys.version_info >= (3, 7) \
else asyncio.get_event_loop()
env = ExecutionEnvironment(loop)
return await env.execute(None, fault, [])
result, rpc_error = asyncio_run(main())
json_str = '{"jsonrpc": "2.0", "error": {"code": %i, "message": %s}}' % \
(rpc_error[0], json_encode_string(rpc_error[1]))
assert json_str.find('Traceback') >= 0
class TestUtils:
data = ('{"jsonrpc":"2.0","id":0,"method":"initialize","params":{"processId":17666,'
'"rootPath":"/home/eckhart/Entwicklung/DHParser/examples/EBNF_fork","rootUri":'
'"file:///home/eckhart/Entwicklung/DHParser/examples/EBNF_fork","capabilities":'
'{"workspace":{"applyEdit":true,"workspaceEdit":{"documentChanges":true},'
'"didChangeConfiguration":{"dynamicRegistration":true},"didChangeWatchedFiles":'
'{"dynamicRegistration":true},"symbol":{"dynamicRegistration":true,"symbolKind":'
'{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,'
'26]}},"executeCommand":{"dynamicRegistration":true},"configuration":true,'
'"workspaceFolders":true},"textDocument":{"publishDiagnostics":'
'{"relatedInformation":true},"synchronization":{"dynamicRegistration":true,'
'"willSave":true,"willSaveWaitUntil":true,"didSave":true},"completion":'
'{"dynamicRegistration":true,"contextSupport":true,"completionItem":'
'{"snippetSupport":true,"commitCharactersSupport":true,"documentationFormat":'
'["markdown","plaintext"],"deprecatedSupport":true,"preselectSupport":true},'
'"completionItemKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,'
'19,20,21,22,23,24,25]}},"hover":{"dynamicRegistration":true,"contentFormat":'
'["markdown","plaintext"]},"signatureHelp":{"dynamicRegistration":true,'
'"signatureInformation":{"documentationFormat":["markdown","plaintext"]}},'
'"definition":{"dynamicRegistration":true},"references":{"dynamicRegistration":'
'true},"documentHighlight":{"dynamicRegistration":true},"documentSymbol":'
'{"dynamicRegistration":true,"symbolKind":{"valueSet":[1,2,3,4,5,6,7,8,9,10,11,'
'12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},'
'"hierarchicalDocumentSymbolSupport":true},"codeAction":{"dynamicRegistration":'
'true,"codeActionLiteralSupport":{"codeActionKind":{"valueSet":["","quickfix",'
'"refactor","refactor.extract","refactor.inline","refactor.rewrite","source",'
'"source.organizeImports"]}}},"codeLens":{"dynamicRegistration":true},'
'"formatting":{"dynamicRegistration":true},"rangeFormatting":'
'{"dynamicRegistration":true},"onTypeFormatting":{"dynamicRegistration":true},'
'"rename":{"dynamicRegistration":true},"documentLink":{"dynamicRegistration":'
'true},"typeDefinition":{"dynamicRegistration":true},"implementation":'
'{"dynamicRegistration":true},"colorProvider":{"dynamicRegistration":true},'
'"foldingRange":{"dynamicRegistration":true,"rangeLimit":5000,"lineFoldingOnly":'
'true}}},"trace":"off","workspaceFolders":[{"uri":'
'"file:///home/eckhart/Entwicklung/DHParser/examples/EBNF_fork",'
'"name":"EBNF_fork"}]}}')
expected = """{
"jsonrpc": "2.0",
"id": 0,
"method": "initialize",
"params": {
"processId": 17666,
"rootPath": "/home/eckhart/Entwicklung/DHParser/examples/EBNF_fork",
"rootUri": "file:///home/eckhart/Entwicklung/DHParser/examples/EBNF_fork",
"capabilities": {
"workspace": {
"applyEdit": true,
"workspaceEdit": {"documentChanges": true},
"didChangeConfiguration": {"dynamicRegistration": true},
"didChangeWatchedFiles": {"dynamicRegistration": true},
"symbol": {
"dynamicRegistration": true,
"symbolKind": {
"valueSet": [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]}},
"executeCommand": {"dynamicRegistration": true},
"configuration": true,
"workspaceFolders": true},
"textDocument": {
"publishDiagnostics": {"relatedInformation": true},
"synchronization": {
"dynamicRegistration": true,
"willSave": true,
"willSaveWaitUntil": true,
"didSave": true},
"completion": {
"dynamicRegistration": true,
"contextSupport": true,
"completionItem": {
"snippetSupport": true,
"commitCharactersSupport": true,
"documentationFormat": ["markdown","plaintext"],
"deprecatedSupport": true,
"preselectSupport": true},
"completionItemKind": {
"valueSet": [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]}},
"hover": {
"dynamicRegistration": true,
"contentFormat": ["markdown","plaintext"]},
"signatureHelp": {
"dynamicRegistration": true,
"signatureInformation": {
"documentationFormat": ["markdown","plaintext"]}},
"definition": {"dynamicRegistration": true},
"references": {"dynamicRegistration": true},
"documentHighlight": {"dynamicRegistration": true},
"documentSymbol": {
"dynamicRegistration": true,
"symbolKind": {
"valueSet": [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]},
"hierarchicalDocumentSymbolSupport": true},
"codeAction": {
"dynamicRegistration": true,
"codeActionLiteralSupport": {
"codeActionKind": {
"valueSet": ["","quickfix","refactor","refactor.extract","refactor.inline","refactor.rewrite","source","source.organizeImports"]}}},
"codeLens": {"dynamicRegistration": true},
"formatting": {"dynamicRegistration": true},
"rangeFormatting": {"dynamicRegistration": true},
"onTypeFormatting": {"dynamicRegistration": true},
"rename": {"dynamicRegistration": true},
"documentLink": {"dynamicRegistration": true},
"typeDefinition": {"dynamicRegistration": true},
"implementation": {"dynamicRegistration": true},
"colorProvider": {"dynamicRegistration": true},
"foldingRange": {
"dynamicRegistration": true,
"rangeLimit": 5000,
"lineFoldingOnly": true}}},
"trace": "off",
"workspaceFolders": [{
"uri": "file:///home/eckhart/Entwicklung/DHParser/examples/EBNF_fork",
"name": "EBNF_fork"}]}}"""
def test_pp_json(self):
obj = json.loads(self.data)
serialized = pp_json(obj)
assert sys.version_info < (3, 6) or serialized == self.expected, serialized
def test_pp_json_stacktrace(self):
try:
raise AssertionError()
except AssertionError:
tb = traceback.format_exc()
ppjsn = pp_json({'error' : tb}).replace('\\\\', '/')
expected = '{"error": "Traceback (most recent call last):"\n' \
' " File \\"$SCRIPTPATH/test_server_utils.py\\", ' \
'line 178, in test_pp_json_stacktrace"\n' \
' " raise AssertionError()"\n' \
' "AssertionError"\n ""}'.\
replace('$SCRIPTPATH', scriptpath.replace('\\', '/'), 1).replace('./', '')
# print(ppjsn)
# print(expected)
assert ppjsn == expected, '\n\n' + ppjsn + '\n\n' + expected
if __name__ == "__main__":
from DHParser.testing import runner
runner("", globals())
| 46.317949 | 146 | 0.618357 |
b04fd2b86635d1f4f03caa3bd7e3f16ba0f1405e
| 2,000 |
py
|
Python
|
Python2/Modulo_3_exe_pratico.py
|
Belaschich/SoulON
|
9f908b025b34fc79187b4efd5ea93a78dca0ef7e
|
[
"MIT"
] | null | null | null |
Python2/Modulo_3_exe_pratico.py
|
Belaschich/SoulON
|
9f908b025b34fc79187b4efd5ea93a78dca0ef7e
|
[
"MIT"
] | null | null | null |
Python2/Modulo_3_exe_pratico.py
|
Belaschich/SoulON
|
9f908b025b34fc79187b4efd5ea93a78dca0ef7e
|
[
"MIT"
] | null | null | null |
"""
1. Crie uma base de dados chamada sistema_escolar_soul_on
2. Crie uma tabela alunos com os campos id, nome, matricula, turma.
3. Alimente a tabela com os seguintes dados:
"""
import mysql.connector
db = mysql.connector.connect(
host = "localhost",
user = "root",
password = "",
database = "sistema_escolar_soul_on"
)
mycursor = db.cursor()
#mycursor.execute("CREATE DATABASE sistema_escolar_soul_on")
#print("Database criada com sucesso!")
#mycursor.execute("CREATE TABLE alunos(id INT AUTO_INCREMENT PRIMARY KEY, name VARCHAR(255), matricula VARCHAR(255), turma VARCHAR(255))")
#print("Tabela criada com sucesso!")
#adicionar = "INSERT INTO alunos (name, matricula, turma) VALUES(%s, %s, %s)"
#val = [
# ("José Lima", "MAT90551", "BCW22"),
# ("Carlos Augusto", "MAT90552", "BCW22"),
# ("Lívia Lima", "MAT90553", "BCW22"),
# ("Sandra Gomes", "MAT90554", "BCW23"),
# ("João Augusto", "MAT90555", "BCW23"),
# ("Breno Lima", "MAT90556", "BCW24"),
# ("José Vinícius", "MAT90557", "BCW25")
#]
#mycursor.executemany(adicionar, val)
#print(mycursor.rowcount, "linha(s) alterada(s)!")
#db.commit()
"""4. Faça as seguintes consultas:
• Liste todos os registros de sua tabela.
• Liste apenas nome e matrícula dos alunos do BCW23.
• Liste apenas o nome dos alunos que tiverem o sobrenome Lima.
"""
#mycursor.execute("SELECT * FROM alunos")
#mycursor.execute("SELECT name FROM alunos WHERE turma = 'BCW23' ")
#adicionar = "SELECT name FROM alunos WHERE name LIKE '%Lima%'"
#mycursor.execute(adicionar)
#myresult = mycursor.fetchall()
#for x in myresult:
# print(x)
'''
5. O aluno Carlos Augusto está na turma errada. Matricule o mesmo no BCW25.
6. O aluno José Vinicius desistiu do curso, ele deve ser excluído do sistema.
'''
#adicionar = "UPDATE alunos SET turma = 'BCW25' WHERE name = 'Carlos Augusto'"
adicionar = "DELETE FROM alunos WHERE name = 'José Vinicius'"
mycursor.execute(adicionar)
#db.commit()
print(mycursor.rowcount, "Linha(s) afetada(s)")
| 33.333333 | 138 | 0.698 |
e948b3f1a84db7afd6220136835cc456ab27e70d
| 1,492 |
py
|
Python
|
xls_worker.py
|
VNCompany/PyDafm
|
9634f5428b9f3739dbf7c159daad34856b372165
|
[
"Unlicense"
] | null | null | null |
xls_worker.py
|
VNCompany/PyDafm
|
9634f5428b9f3739dbf7c159daad34856b372165
|
[
"Unlicense"
] | null | null | null |
xls_worker.py
|
VNCompany/PyDafm
|
9634f5428b9f3739dbf7c159daad34856b372165
|
[
"Unlicense"
] | null | null | null |
import xlrd
import xlwt
import os
class XlsReader:
def __init__(self, file: str):
self.file_path = file
def read_data(self, n:int = 0):
try:
wb = xlrd.open_workbook(self.file_path)
sheet = wb.sheet_by_index(0)
result_list = []
for nr in range(n, sheet.nrows):
row = sheet.row(nr)
cells = []
for cell in row:
cell = str(cell).replace("'", "").split(":")
cells.append((cell[0], ":".join(cell[1:])))
result_list.append(cells)
return result_list
except Exception:
return None
class XlsWriter:
def __init__(self, file: str):
if os.path.exists(file):
os.remove(file)
self.file_path = file
self.sheets = []
self.tables = []
self.wb = xlwt.Workbook()
def add_sheet(self, text, table: list):
header_font = xlwt.Font()
header_font.bold = True
header_style = xlwt.XFStyle()
header_style.font = header_font
ws = self.wb.add_sheet(text)
if len(table) > 0:
for c in range(len(table[0])):
ws.write(0, c, table[0][c], header_style)
if len(table) > 1:
for r in range(1, len(table)):
for c in range(len(table[r])):
ws.write(r, c, table[r][c])
def save(self):
self.wb.save(self.file_path)
| 27.127273 | 64 | 0.505362 |
e99c451a3b49e72a4447a8051fe7eeedbb0cc2c8
| 2,203 |
py
|
Python
|
IdeaProjects/MachineLearning/DecisionTree.py
|
sinomiko/project
|
00fadb0033645f103692f5b06c861939a9d4aa0e
|
[
"BSD-3-Clause"
] | 1 |
2018-12-30T14:07:42.000Z
|
2018-12-30T14:07:42.000Z
|
IdeaProjects/MachineLearning/DecisionTree.py
|
sinomiko/project
|
00fadb0033645f103692f5b06c861939a9d4aa0e
|
[
"BSD-3-Clause"
] | null | null | null |
IdeaProjects/MachineLearning/DecisionTree.py
|
sinomiko/project
|
00fadb0033645f103692f5b06c861939a9d4aa0e
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
@author: miko
"""
from sklearn.feature_extraction import DictVectorizer
import csv
from sklearn import tree
from sklearn import preprocessing
from sklearn.externals.six import StringIO
import numpy as np
np.set_printoptions(threshold = 1e6)#设置打印数量的阈值
# Read in the csv file and put features into list of dict and list of class label
allElectronicsData = open(r'D:\development\DailyImprove\July机器学习与深度学习\(Part One)深度学习基础\代码与素材\代码与素材(1)\01DTree\AllElectronics.csv', 'r')
reader = csv.reader(allElectronicsData)
#headers = reader.next()
headers = next(reader)
print(headers)
print("~"*10+"headers end"+"~"*10)
featureList = []
labelList = []
for row in reader: # 遍历每一列
labelList.append(row[len(row)-1]) # 标签列表
rowDict = {} # 每一行的所有特征放入一个字典
for i in range(1, len(row)-1): # 左闭右开 遍历从age到credit_rating
rowDict[headers[i]] = row[i] # 字典的赋值
featureList.append(rowDict) #将每一行的特征字典装入特征列表内
print(featureList)
print("~"*10+"featureList end"+"~"*10)
# Vetorize features
vec = DictVectorizer() # Vectorizer 矢量化
dummyX = vec.fit_transform(featureList).toarray()
print("dummyX: " + str(dummyX))
print(vec.get_feature_names())
print("~"*10+"dummyX end"+"~"*10)
print("labelList: " + str(labelList))
print("~"*10+"labelList end"+"~"*10)
# vectorize class labels
lb = preprocessing.LabelBinarizer()
dummyY = lb.fit_transform(labelList)
print("dummyY: " + str(dummyY))
print("~"*10+"dummyY end"+"~"*10)
# Using decision tree for classification
# clf = tree.DecisionTreeClassifier()
clf = tree.DecisionTreeClassifier(criterion='entropy') # 标准 熵
clf = clf.fit(dummyX, dummyY)
print("clf: " + str(clf))
# Visualize model
with open("allElectronicInformationGainOri.dot", 'w') as f:
# 输出到dot文件里,安装 Graphviz软件后,可使用 dot -Tpdf allElectronicInformationGainOri.dot -o outpu.pdf 命令 转化dot文件至pdf可视化决策树
f = tree.export_graphviz(clf, feature_names=vec.get_feature_names(), out_file=f)
oneRowX = dummyX[0, :]
print("oneRowX: " + str(oneRowX))
newRowX = oneRowX
newRowX[0] = 1
newRowX[2] = 0
print("newRowX: " + str(newRowX))
predictedY = clf.predict(newRowX)
print("predictedY: " + str(predictedY))
| 22.947917 | 135 | 0.702678 |
e9c1a85292219c121131945e11ba2a6e28e0dafd
| 2,834 |
py
|
Python
|
model/topoml_util/test_Tokenizer.py
|
Empythy/geometry-learning
|
5300d421ef848c2748a2ba41ced5c6e2fba93200
|
[
"MIT"
] | 21 |
2018-10-09T08:15:29.000Z
|
2022-03-16T08:23:08.000Z
|
model/topoml_util/test_Tokenizer.py
|
reinvantveer/Topology-Learning
|
5300d421ef848c2748a2ba41ced5c6e2fba93200
|
[
"MIT"
] | 31 |
2017-09-20T13:30:37.000Z
|
2018-03-01T13:24:58.000Z
|
model/topoml_util/test_Tokenizer.py
|
reinvantveer/Topology-Learning
|
5300d421ef848c2748a2ba41ced5c6e2fba93200
|
[
"MIT"
] | 7 |
2018-11-29T11:39:02.000Z
|
2022-01-12T07:10:26.000Z
|
import unittest
import pandas
from Tokenizer import Tokenize
TOPOLOGY_TRAINING_CSV = 'test_files/example.csv'
source_data = pandas.read_csv(TOPOLOGY_TRAINING_CSV)
raw_training_set = source_data['brt_wkt'] + ' ' + source_data['osm_wkt']
raw_target_set = source_data['intersection_wkt']
class TestUtil(unittest.TestCase):
def test_truncate(self):
max_len = 500
(input_set, _) = Tokenize.truncate(max_len, raw_training_set, raw_target_set)
for record in input_set:
for field in record:
self.assertLessEqual(len(field), max_len)
def test_batch_truncate(self):
batch_size = 3
max_len = 1000
validation_split = 0.1
training_set, target_set = Tokenize.batch_truncate(batch_size, max_len, validation_split, raw_training_set,
raw_target_set)
self.assertEqual(len(training_set), 30)
def test_tokenize(self):
test_strings = ['A test string']
tokenizer = Tokenize(test_strings)
tokenized = tokenizer.char_level_tokenize(test_strings)
self.assertEqual((tokenizer.word_index, tokenized),
({' ': 2, 'A': 4, 'e': 5, 'g': 9, 'i': 7, 'n': 8, 'r': 6, 's': 3, 't': 1},
[[4, 2, 1, 5, 3, 1, 2, 3, 1, 6, 7, 8, 9]]))
def test_tokenize_example(self):
self.maxDiff = None
test_strings = source_data.as_matrix()
word_index = {'5': 1, '4': 2, '.': 3, '1': 4, '2': 5, '8': 6, ' ': 7, ',': 8, '3': 9, '6': 10, '0': 11,
'9': 12, '7': 13, 'O': 14, '(': 15, ')': 16, 'L': 17, 'Y': 18, 'P': 19, 'G': 20, 'N': 21,
'T': 22, 'E': 23, 'M': 24, 'I': 25, 'C': 26, 'U': 27, 'R': 28}
tokenizer = Tokenize(test_strings[0] + test_strings[1] + test_strings[2])
tokenized = tokenizer.char_level_tokenize(test_strings[0])
self.assertEqual((tokenizer.word_index, tokenized[0][0:15]),
(word_index,
[19, 14, 17, 18, 20, 14, 21, 15, 15, 2, 3, 6, 4, 4, 6]))
def test_one_hot(self):
source_matrix = source_data.as_matrix()
test_strings = source_matrix[0] + source_matrix[1]
max_len = 0
for sentence in test_strings:
if len(sentence) > max_len:
max_len = len(sentence)
tokenizer = Tokenize(test_strings)
matrix = tokenizer.one_hot(test_strings, max_len)
self.assertEqual(matrix[0][0][19], True) # 'P' for POLYGON
def test_detokenize(self):
test_strings = ['A test string']
tokenizer = Tokenize(test_strings)
tokenized = tokenizer.char_level_tokenize(test_strings)
detokenized = tokenizer.decypher(tokenized)
self.assertEqual(detokenized, test_strings)
| 42.939394 | 115 | 0.577629 |
fd570050f48eac83a8faa8670acf80bffd4d583d
| 3,084 |
py
|
Python
|
research/cv/ProtoNet/src/IterDatasetGenerator.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/ProtoNet/src/IterDatasetGenerator.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/ProtoNet/src/IterDatasetGenerator.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
dataset iter generator script.
"""
import numpy as np
from tqdm import tqdm
class IterDatasetGenerator:
"""
dataloader class
"""
def __init__(self, data, classes_per_it, num_samples, iterations):
self.__iterations = iterations
self.__data = data.x
self.__labels = data.y
self.__iter = 0
self.classes_per_it = classes_per_it
self.sample_per_class = num_samples
self.classes, self.counts = np.unique(self.__labels, return_counts=True)
self.idxs = range(len(self.__labels))
self.indexes = np.empty((len(self.classes), max(self.counts)), dtype=int) * np.nan
self.numel_per_class = np.zeros_like(self.classes)
for idx, label in tqdm(enumerate(self.__labels)):
label_idx = np.argwhere(self.classes == label).item()
self.indexes[label_idx, np.where(np.isnan(self.indexes[label_idx]))[0][0]] = idx
self.numel_per_class[label_idx] = int(self.numel_per_class[label_idx]) + 1
print('init end')
def __next__(self):
spc = self.sample_per_class
cpi = self.classes_per_it
if self.__iter >= self.__iterations:
raise StopIteration
batch_size = spc * cpi
batch = np.random.randint(low=batch_size, high=10 * batch_size, size=(batch_size), dtype=np.int32)
c_idxs = np.random.permutation(len(self.classes))[:cpi]
for indx, c in enumerate(self.classes[c_idxs]):
index = indx*spc
ci = [c_i for c_i in range(len(self.classes)) if self.classes[c_i] == c][0]
label_idx = list(range(len(self.classes)))[ci]
sample_idxs = np.random.permutation(int(self.numel_per_class[label_idx]))[:spc]
ind = 0
for sid in sample_idxs:
batch[index+ind] = self.indexes[label_idx][sid]
ind = ind + 1
batch = batch[np.random.permutation(len(batch))]
data_x = []
data_y = []
for b in batch:
data_x.append(self.__data[b])
data_y.append(self.__labels[b])
self.__iter += 1
data_y = np.asarray(data_y, np.int32)
data_class = np.asarray(np.unique(data_y), np.int32)
item = (data_x, data_y, data_class)
return item
def __iter__(self):
self.__iter = 0
return self
def __len__(self):
return self.__iterations
| 38.074074 | 106 | 0.624514 |
c095e0406851668f536f037523c078b508b0b407
| 6,004 |
py
|
Python
|
public/chart/integrations/django/samples/fusioncharts/samples/Single_Event_Overlay.py
|
AizaDapitan/PMC-IMS_V3
|
271ce3193edbf5182a9e232666ca417561ba2d16
|
[
"MIT"
] | 14 |
2016-11-03T19:06:21.000Z
|
2021-11-24T09:05:09.000Z
|
public/chart/integrations/django/samples/fusioncharts/samples/Single_Event_Overlay.py
|
AizaDapitan/PMC-IMS_V3
|
271ce3193edbf5182a9e232666ca417561ba2d16
|
[
"MIT"
] | 10 |
2022-02-16T07:17:49.000Z
|
2022-03-08T12:43:51.000Z
|
asset/integrations/django/samples/fusioncharts/samples/Single_Event_Overlay.py
|
Piusshungu/catherine-junior-school
|
5356f4ff5a5c8383849d32e22a60d638c35b1a48
|
[
"MIT"
] | 17 |
2016-05-19T13:16:34.000Z
|
2021-04-30T14:38:42.000Z
|
from django.shortcuts import render
from django.http import HttpResponse
# Include the `fusioncharts.py` file which has required functions to embed the charts in html page
from ..fusioncharts import FusionCharts
from ..fusioncharts import FusionTable
from ..fusioncharts import TimeSeries
import requests
# Loading Data and schema from a Static JSON String url
# The `chart` method is defined to load chart data from an JSON string.
def chart(request):
data = requests.get('https://s3.eu-central-1.amazonaws.com/fusion.store/ft/data/single-event-overlay-data.json').text
schema = requests.get('https://s3.eu-central-1.amazonaws.com/fusion.store/ft/schema/single-event-overlay-schema.json').text
fusionTable = FusionTable(schema, data)
timeSeries = TimeSeries(fusionTable)
timeSeries.AddAttribute("caption", """{
text: 'Interest Rate Analysis'
}""")
timeSeries.AddAttribute("subCaption", """{
text: 'Federal Reserve (USA)'
}""")
timeSeries.AddAttribute("yAxis", """[{
plot: 'Interest Rate',
format:{
suffix: '%'
},
title: 'Interest Rate'
}]""")
timeSeries.AddAttribute("xAxis", """{
plot: 'Time',
timemarker: [{
start: 'Mar-1980',
label: 'US inflation peaked at 14.8%.',
timeFormat: ' %b -%Y',
style: {
marker:
{
fill: '#D0D6F4'
}
}
}, {
start: 'May-1981',
label: 'To control inflation, the Fed started {br} raising interest rates to over {br} 20%.',
timeFormat: '%b-%Y'
}, {
start: 'Jun-1983',
label: 'By proactive actions of Mr.Volcker, {br} the inflation falls to 2.4% {br} from the peak of over 14% {br} just three years ago.',
timeFormat: '%b-%Y',
style: {
marker: {
fill: '#D0D6F4'
}
}
}, {
start: 'Oct-1987',
label: 'The Dow Jones Industrial Average lost {br} about 30% of it’s value.',
timeFormat: '%b-%Y',
style: {
marker: {
fill: '#FBEFCC'
}
}
}, {
start: 'Jan-1989',
label: 'George H.W. Bush becomes {br} the 41st president of US!',
timeFormat: '%b-%Y'
}, {
start: 'Aug-1990',
label: 'The oil prices spiked to $35 {br} per barrel from $15 per barrel {br} because of the Gulf War.',
timeFormat: '%b-%Y'
}, {
start: 'Dec-1996',
label: 'Alan Greenspan warns of the dangers {br} of \"irrational exuberance\" in financial markets, {br} an admonition that goes unheeded',
timeFormat: '%b-%Y'
}, {
start: 'Sep-2008',
label: 'Lehman Brothers collapsed!',
timeFormat: '%b-%Y'
},{
start: 'Mar-2009',
label: 'The net worth of US households {br} stood at a trough of $55 trillion.',
timeFormat: '%b-%Y',
style: {
marker: {
fill: '#FBEFCC'
}
}
}, {
start: 'Oct-2009',
label: 'Unemployment rate peaked {br} in given times to 10%.',
timeFormat: '%b-%Y'
}]
}""")
# Create an object for the chart using the FusionCharts class constructor
fcChart = FusionCharts("timeseries", "ex1", 700, 450, "chart-1", "json", timeSeries)
# returning complete JavaScript and HTML code, which is used to generate chart in the browsers.
return render(request, 'index.html', {'output' : fcChart.render(),'chartTitle': "Single event overlay"})
| 56.11215 | 179 | 0.340273 |
23d67731e693aa173ae0a92b5d6d9fd1362a3e6c
| 3,880 |
py
|
Python
|
tests/views/test_oauth2_client.py
|
DanielGrams/gsevp
|
e94034f7b64de76f38754b56455e83092378261f
|
[
"MIT"
] | 1 |
2021-06-01T14:49:18.000Z
|
2021-06-01T14:49:18.000Z
|
tests/views/test_oauth2_client.py
|
DanielGrams/gsevp
|
e94034f7b64de76f38754b56455e83092378261f
|
[
"MIT"
] | 286 |
2020-12-04T14:13:00.000Z
|
2022-03-09T19:05:16.000Z
|
tests/views/test_oauth2_client.py
|
DanielGrams/gsevpt
|
a92f71694388e227e65ed1b24446246ee688d00e
|
[
"MIT"
] | null | null | null |
import pytest
def test_read(client, seeder, utils):
user_id, admin_unit_id = seeder.setup_base(True)
oauth2_client_id = seeder.insert_default_oauth2_client(user_id)
url = utils.get_url("oauth2_client", id=oauth2_client_id)
utils.get_ok(url)
def test_read_notOwner(client, seeder, utils):
user_id = seeder.create_user(email="[email protected]", admin=True)
oauth2_client_id = seeder.insert_default_oauth2_client(user_id)
seeder.setup_base(True)
url = utils.get_url("oauth2_client", id=oauth2_client_id)
utils.get_unauthorized(url)
def test_list(client, seeder, utils):
user_id, admin_unit_id = seeder.setup_base(True)
url = utils.get_url("oauth2_clients")
utils.get_ok(url)
@pytest.mark.parametrize("db_error", [True, False])
def test_create_authorization_code(client, app, utils, seeder, mocker, db_error):
from project.api import scope_list
user_id, admin_unit_id = seeder.setup_base(True)
url = utils.get_url("oauth2_client_create")
response = utils.get_ok(url)
if db_error:
utils.mock_db_commit(mocker)
response = utils.post_form(
url,
response,
{
"client_name": "Mein Client",
"scope": scope_list,
"redirect_uris": utils.get_url("swagger_oauth2_redirect"),
},
)
if db_error:
utils.assert_response_db_error(response)
return
with app.app_context():
from project.models import OAuth2Client
oauth2_client = OAuth2Client.query.filter(
OAuth2Client.user_id == user_id
).first()
assert oauth2_client is not None
client_id = oauth2_client.id
utils.assert_response_redirect(response, "oauth2_client", id=client_id)
@pytest.mark.parametrize("db_error", [True, False])
def test_update(client, seeder, utils, app, mocker, db_error):
user_id, admin_unit_id = seeder.setup_base(True)
oauth2_client_id = seeder.insert_default_oauth2_client(user_id)
url = utils.get_url("oauth2_client_update", id=oauth2_client_id)
response = utils.get_ok(url)
if db_error:
utils.mock_db_commit(mocker)
response = utils.post_form(
url,
response,
{
"client_name": "Neuer Name",
"redirect_uris": "localhost:1337\nlocalhost:1338",
},
)
if db_error:
utils.assert_response_db_error(response)
return
utils.assert_response_redirect(response, "oauth2_client", id=oauth2_client_id)
with app.app_context():
from project.models import OAuth2Client
oauth2_client = OAuth2Client.query.get(oauth2_client_id)
assert oauth2_client.client_name == "Neuer Name"
assert oauth2_client.redirect_uris == ["localhost:1337", "localhost:1338"]
@pytest.mark.parametrize("db_error", [True, False])
@pytest.mark.parametrize("non_match", [True, False])
def test_delete(client, seeder, utils, app, mocker, db_error, non_match):
user_id, admin_unit_id = seeder.setup_base(True)
oauth2_client_id = seeder.insert_default_oauth2_client(user_id)
url = utils.get_url("oauth2_client_delete", id=oauth2_client_id)
response = utils.get_ok(url)
if db_error:
utils.mock_db_commit(mocker)
form_name = "Mein Client"
if non_match:
form_name = "Falscher Name"
response = utils.post_form(
url,
response,
{
"name": form_name,
},
)
if non_match:
utils.assert_response_error_message(response)
return
if db_error:
utils.assert_response_db_error(response)
return
utils.assert_response_redirect(response, "oauth2_clients")
with app.app_context():
from project.models import OAuth2Client
oauth2_client = OAuth2Client.query.get(oauth2_client_id)
assert oauth2_client is None
| 27.714286 | 82 | 0.682732 |
f1c743338a1ac1f386344474508b8fe9622e0d4b
| 12,514 |
py
|
Python
|
Apps/Vorverarbeitung/create_subject_annotation_classes.py
|
RGreinacher/bachelor-thesis
|
60dbc03ce40e3ec42f2538d67a6aabfea6fbbfc8
|
[
"MIT"
] | 1 |
2021-04-13T10:00:46.000Z
|
2021-04-13T10:00:46.000Z
|
Apps/Vorverarbeitung/create_subject_annotation_classes.py
|
RGreinacher/bachelor-thesis
|
60dbc03ce40e3ec42f2538d67a6aabfea6fbbfc8
|
[
"MIT"
] | null | null | null |
Apps/Vorverarbeitung/create_subject_annotation_classes.py
|
RGreinacher/bachelor-thesis
|
60dbc03ce40e3ec42f2538d67a6aabfea6fbbfc8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# import python libs
import re
import json
import argparse
import json
import random
from os import listdir
from os.path import isfile, join
from pprint import pprint as pp
from collections import deque
# import project libs
import create_annotated_corpus
# defining globals & constants
GOLD_ANNOTATED_CORPUS_FILES = '/Users/rg/Nextcloud/Uni/Bachelorarbeit/Korpora/Implisense/json gold/'
SUBJECTS_TABEL_JSON = '/Users/rg/Nextcloud/Uni/Bachelorarbeit/Apps/Vorverarbeitung/subjects_tabel.json'
TARGET_DISTRIBUTION = [
[
10, # correct annotation
16.5141, # manipulated span
6.606, # manipulated cat
0, # manipulated cat & span
3.3021, # unnecessary annotation
63.5778 # no annotation
],
[
50, # correct annotation
9.1745, # manipulated span
3.67, # manipulated cat
0, # manipulated cat & span
1.8345, # unnecessary annotation
35.321 # no annotation
],
[
90, # correct annotation
1.8349, # manipulated span
0.734, # manipulated cat
0, # manipulated cat & span
0.3669, # unnecessary annotation
7.0642 # no annotation
]
]
TOTAL_ANNOTATIONS = 310
MIN_NUMBER_OF_ANNOTATIONS_PER_BLOCK = 75
NUMBER_OF_SUBJECTS = 66
# methods
def read_corpus_files(path):
corpus = []
for file_name in sorted(listdir(path)):
if not (isfile(join(path, file_name)) and file_name.endswith('.json')): continue
file_handler = open(path + file_name, 'r', encoding='utf-8')
raw_content = file_handler.read()
file_handler.close()
deconded_content = json.JSONDecoder().decode(raw_content)
corpus.append(deconded_content)
return corpus
def define_blocks(annotated_corpus):
annotations_per_paragraph = []
# create list with annotations per sentence / paragraph
for document in annotated_corpus:
for paragraph in document['data']:
annotations_per_sentences = []
for sentence in paragraph:
annotations_per_sentence = 0
for token in sentence:
if 'annotation' in token:
annotations_per_sentence += 1
annotations_per_sentences.append(annotations_per_sentence)
annotations_per_paragraph.append(annotations_per_sentences)
# define blocks
blocks = []
annotations_per_block = 0
last_index = 0
for index, annotations_per_sentence_in_paragraph in enumerate(annotations_per_paragraph):
annotations_per_block += sum(annotations_per_sentence_in_paragraph)
if annotations_per_block >= MIN_NUMBER_OF_ANNOTATIONS_PER_BLOCK or index == (len(annotations_per_paragraph) - 1):
print('add', annotations_per_block, 'annotations to block')
index_for_partitioning = index + 1
blocks.append(annotations_per_paragraph[last_index:index_for_partitioning])
last_index = index_for_partitioning
annotations_per_block = 0
return blocks
def create_reference_distributions(blocks):
def distribution_per_block(annotations, level):
factor = annotations / 100.0
absolute_distribution = list(map(lambda x: int(x * factor), TARGET_DISTRIBUTION[level]))
difference = annotations - sum(absolute_distribution)
if difference > 0:
for i in range(0, difference):
index = random.choice([0, 1, 2, 4, 5]) # 3 is missing, because it is not occuring
absolute_distribution[index] += 1
annotation_class_list = []
for class_id, count in enumerate(absolute_distribution):
for i in range(0, count):
annotation_class_list.append(class_id)
random.shuffle(annotation_class_list)
return annotation_class_list
def collate_distribution_to_block_structure(block, distribution):
block_with_annotation_classes = []
for document in block:
annotation_classes_per_document = []
for number_of_annotations_per_sentence in document:
annotation_classes_per_sentence = []
for i in range(0, number_of_annotations_per_sentence):
annotation_classes_per_sentence.append(distribution.pop(0))
annotation_classes_per_document.append(annotation_classes_per_sentence)
block_with_annotation_classes.append(annotation_classes_per_document)
return block_with_annotation_classes
annotation_classes_in_blocks_per_level = []
for level in range(0, 3):
annotation_classes_in_blocks = []
for block in blocks:
annotations = sum(map(lambda sentence: sum(sentence), block))
distribution = distribution_per_block(annotations, level)
collated_block = collate_distribution_to_block_structure(block, distribution)
annotation_classes_in_blocks.append(collated_block)
annotation_classes_in_blocks_per_level.append(annotation_classes_in_blocks)
return annotation_classes_in_blocks_per_level
def create_distribution_per_subject(reference_annotation_classes, subject_id):
def clear_block(block):
clean_block = []
for document in block:
cleaned_document = []
for sentence in document:
cleaned_sentence = []
for annotation in sentence:
cleaned_sentence.append(5)
cleaned_document.append(cleaned_sentence)
clean_block.append(cleaned_document)
return clean_block
def collate_distribution_to_block_structure(block, distribution):
block_with_annotation_classes = []
for document in block:
annotation_classes_per_document = []
for sentence in document:
annotation_classes_per_sentence = []
for annotation_class in sentence:
annotation_classes_per_sentence.append(distribution.pop(0))
annotation_classes_per_document.append(annotation_classes_per_sentence)
block_with_annotation_classes.append(annotation_classes_per_document)
return block_with_annotation_classes
def shift_annotation_classes(block, offset):
distribution = flat_block_structure(block)
items = deque(distribution)
items.rotate(offset)
rotated_distribution = list(items)
return collate_distribution_to_block_structure(block, rotated_distribution)
subject_blocks = []
for i in range(subject_id, (subject_id + 4)):
reference_block = reference_annotation_classes[i - subject_id]
if i % 2 == 0:
block = clear_block(reference_block)
else:
block = shift_annotation_classes(reference_block, subject_id - 1)
subject_blocks.append(block)
return subject_blocks
def add_no_annotations_to_unnecessary_annotations(blocks, gold_annotated_corpus):
def flatten_blocks(blocks_per_subject):
# return a list of paragraphs for each subject
paragraphs = []
for block in blocks_per_subject:
for paragraph in block:
paragraphs.append(paragraph)
return paragraphs
def insert_index_addition(token_index, sentence):
current_annotation_length = sentence[token_index]['annotation']['length']
space_to_the_left = token_index - create_annotated_corpus.earliest_chunk_start_index(sentence, token_index)
space_to_the_right = create_annotated_corpus.maximum_chunk_length(sentence, token_index) - current_annotation_length
# print('sentence')
# pp(sentence)
# print('index', token_index, 'left', space_to_the_left, 'right', space_to_the_right)
if space_to_the_left > space_to_the_right: return 1
return 0
def collate_paragraphs_to_blocks_structure(blocks, paragraphs):
total_paragraph_index = -1
for block in blocks:
for i in range(0, len(block)):
total_paragraph_index += 1
block[i] = paragraphs[total_paragraph_index]
return blocks
total_paragraph_index = -1
annotations_per_paragraph = flatten_blocks(blocks)
for document_index, document in enumerate(gold_annotated_corpus):
for paragraph_index, paragraph in enumerate(document['data']):
total_paragraph_index += 1
for sentence_index, sentence in enumerate(paragraph):
annotations_per_sentence = annotations_per_paragraph[total_paragraph_index][sentence_index]
annotation_index = -1
for token_index, token in enumerate(sentence):
if 'annotation' in token:
annotation_index += 1
if annotations_per_sentence[annotation_index] == 4:
# print('annotations list before', annotations_per_sentence)
insert_index = annotation_index + insert_index_addition(token_index, sentence)
# print('insert_index', insert_index)
annotations_per_sentence.insert(insert_index, 5)
# print('annotations list after', annotations_per_sentence)
annotation_index += 1
return collate_paragraphs_to_blocks_structure(blocks, annotations_per_paragraph)
# helpers
def validate_target_distribution():
for index, distribution in enumerate(TARGET_DISTRIBUTION):
if sum(distribution) != 100:
print('TARGET_DISTRIBUTION is not valid!', index)
exit()
def flat_block_structure(block):
flatted_list = []
for document in block:
for sentence in document:
for annotation_class in sentence:
flatted_list.append(annotation_class)
return flatted_list
def save_document_to_file(document):
json_encoded_document = json.dumps(document)
file_handler = open(SUBJECTS_TABEL_JSON, 'w')
file_handler.write(json_encoded_document)
file_handler.close()
def find_four(blocks, prefix = False):
return find_occurences(4, blocks, prefix)
def find_occurences(number, blocks, prefix = False):
relevant_sentences = []
for block_index, block in enumerate(blocks):
for paragraph_index, paragraph in enumerate(block):
for sentence_index, sentence in enumerate(paragraph):
if number in sentence:
relevant_sentences.append(sentence)
if prefix: print(prefix, sentence)
return relevant_sentences
# entry point as a stand alone script
if __name__ == '__main__':
validate_target_distribution()
gold_annotated_corpus = read_corpus_files(GOLD_ANNOTATED_CORPUS_FILES)
blocks = define_blocks(gold_annotated_corpus)
reference_annotation_classes_in_blocks = create_reference_distributions(blocks)
for level, reference_annotation_classes_per_level in enumerate(reference_annotation_classes_in_blocks):
find_occurences(3, reference_annotation_classes_per_level, 'found forbidden number in level' + str(level))
subject_table = []
for subject_id in range(0, NUMBER_OF_SUBJECTS):
level = subject_id % 3
reference_annotation_classes = reference_annotation_classes_in_blocks[level]
subject_annotation_classes = create_distribution_per_subject(reference_annotation_classes, subject_id)
with_no_annotations = add_no_annotations_to_unnecessary_annotations(subject_annotation_classes, gold_annotated_corpus)
subject_table.append(with_no_annotations)
save_document_to_file(subject_table)
# blocks = [[[[4, 1]]]]
# gold = [
# {
# 'data': [
# [
# [
# {'annotation': {'label': 'COM', 'length': 2}, 'term': 'eBay'},
# {'term': 'Kleinanzeigen'},
# {'term': 'gehört'},
# {'term': 'zur'},
# {'term': 'internationalen'},
# {'annotation': {'label': 'COM', 'length': 3}, 'term': 'eBay'},
# {'term': 'Classifieds'},
# {'term': 'Group'},
# {'term': '.'}
# ]
# ]
# ]
# }
# ]
# add_no_annotations_to_unnecessary_annotations(blocks, gold)
| 40.762215 | 126 | 0.661179 |
f1d1599f26e449d04856a7475216653f3eebd62d
| 290 |
py
|
Python
|
packages/watchmen-model/src/watchmen_model/gui/favorite.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-model/src/watchmen_model/gui/favorite.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
packages/watchmen-model/src/watchmen_model/gui/favorite.py
|
Indexical-Metrics-Measure-Advisory/watchmen
|
c54ec54d9f91034a38e51fd339ba66453d2c7a6d
|
[
"MIT"
] | null | null | null |
from typing import List
from pydantic import BaseModel
from watchmen_model.common import ConnectedSpaceId, DashboardId, LastVisit, UserBasedTuple
class Favorite(UserBasedTuple, LastVisit, BaseModel):
connectedSpaceIds: List[ConnectedSpaceId] = []
dashboardIds: List[DashboardId] = []
| 26.363636 | 90 | 0.813793 |
f1d9c9b6a9e0cdba7c384b29d07173cd6aac3663
| 9,769 |
py
|
Python
|
lib/widgets/wizard.py
|
moreorem/graphdener
|
ad8a4b98ad79afb2da820248d656384614721882
|
[
"BSD-3-Clause"
] | null | null | null |
lib/widgets/wizard.py
|
moreorem/graphdener
|
ad8a4b98ad79afb2da820248d656384614721882
|
[
"BSD-3-Clause"
] | null | null | null |
lib/widgets/wizard.py
|
moreorem/graphdener
|
ad8a4b98ad79afb2da820248d656384614721882
|
[
"BSD-3-Clause"
] | null | null | null |
from PyQt5.QtWidgets import (QPushButton, QLabel, QFileDialog,
QComboBox, QWizard, QWizardPage, QLineEdit,
QVBoxLayout, QApplication, QHBoxLayout)
from ..services.actions import Call
from ..func import get_pattern
from ..statics import NODECNAMES, EDGECNAMES
# TODO: Call new graph after import wizard
class ImportWizard(QWizard):
def __init__(self, parent=None, isSingleFile=False):
super(ImportWizard, self).__init__(parent)
if not isSingleFile:
self.addPage(Page1(self))
self.addPage(Page2(self))
else:
self.addPage(Page2b(self))
self.setWindowTitle("Import Wizard")
# Trigger close event when pressing Finish button to redirect variables to backend
self.button(QWizard.FinishButton).clicked.connect(self.onFinished)
self.button(QWizard.NextButton).clicked.connect(self.page(0).receiveInputs)
# Initialize variables to send to backend
self.filepath = [None, None]
self.nodeColumns = []
self.nodeDelimiters = []
self.edgeColumns = []
self.edgeDelimiters = []
def onFinished(self):
print("Finish")
# Ask input from edge import page
self.page(1).receiveInputs()
regexN = get_pattern(self.nodeColumns, self.nodeDelimiters)
regexE = get_pattern(self.edgeColumns, self.edgeDelimiters)
# Communicate and transmit to backend
Call.connect()
result = Call.send_paths(self.filepath, regexN, regexE, self.nodeColumns, self.edgeColumns)
# TODO: Make use of return state to enable graph controls
if result == 'paths imported':
return True
class Page1(QWizardPage):
def __init__(self, parent=None):
super(Page1, self).__init__(parent)
self.columnSelectors = []
self.delimiterFields = []
nCols = len(NODECNAMES)
# Initialize comboboxes and text fields
for i in range(nCols):
self.columnSelectors.append(QComboBox())
for i in range(nCols + 1):
self.delimiterFields.append(QLineEdit())
self.openFileBtn = QPushButton("Import Node List")
self.stepLabel = QLabel()
self.formatLabel = QLabel()
layout = QVBoxLayout()
layout.addWidget(self.stepLabel)
layout.addWidget(self.openFileBtn)
layout.addWidget(self.formatLabel)
patternLayout = QHBoxLayout()
for i in range(nCols + 1):
patternLayout.addWidget(self.delimiterFields[i])
if i < nCols:
patternLayout.addWidget(self.columnSelectors[i])
self.setLayout(layout)
# Insert the layout of the regexp elements
layout.addLayout(patternLayout)
# Bind actions
self.openFileBtn.clicked.connect(self.openFileNameDialog)
def openFileNameDialog(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getOpenFileName(
self, "QFileDialog.getOpenFileName()", "",
"All Files (*);;Python Files (*.py)", options=options)
# if user selected a file store its path to a variable
if fileName:
self.wizard().filepath[0] = fileName
def initializePage(self):
self.stepLabel.setText("Nodes information")
self.formatLabel.setText("Nodes file format")
i = 0
for comboBox in self.columnSelectors:
comboBox.addItems(NODECNAMES)
comboBox.addItem('-')
# Initialize first selection to avoid error
comboBox.setCurrentIndex(i)
comboBox.activated.connect(self.handleActivated)
comboBox.selection = comboBox.currentText()
i += 1
# Initialize textboxes with multi-space expression
for delimiterField in self.delimiterFields:
delimiterField.setText('\\s+')
self.delimiterFields[0].setText('^')
self.delimiterFields[-1].setText('')
def handleActivated(self, index):
self.sender().selection = self.sender().itemText(index)
def receiveInputs(self):
''' activates on next button and sends the input to wizard '''
self.wizard().nodeDelimiters = [delim.text() for delim in self.delimiterFields]
self.wizard().nodeColumns = [comboBox.selection for comboBox in self.columnSelectors]
class Page2(QWizardPage):
def __init__(self, parent=None):
super(Page2, self).__init__(parent)
nCols = len(EDGECNAMES)
self.setWindowTitle("Edge phase")
self.stepLabel = QLabel()
self.openFileBtn = QPushButton("Import Edge List")
self.columnSelectors = []
self.delimiterFields = []
# Initialize comboboxes and text fields
for i in range(nCols):
self.columnSelectors.append(QComboBox())
for i in range(nCols + 1):
self.delimiterFields.append(QLineEdit())
layout = QVBoxLayout()
layout.addWidget(self.stepLabel)
layout.addWidget(self.openFileBtn)
patternLayout = QHBoxLayout()
for i in range(nCols + 1):
patternLayout.addWidget(self.delimiterFields[i])
if i < nCols:
patternLayout.addWidget(self.columnSelectors[i])
self.setLayout(layout)
# Insert the layout of the regexp elements
layout.addLayout(patternLayout)
# Bind actions
self.openFileBtn.clicked.connect(self.openFileNameDialog)
def openFileNameDialog(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getOpenFileName(
self, "QFileDialog.getOpenFileName()", "",
"All Files (*);;Python Files (*.py)", options=options)
# if user selected a file store its path to a variable
if fileName:
self.wizard().filepath[1] = fileName
def initializePage(self):
self.stepLabel.setText("Edges information")
i = 0
for comboBox in self.columnSelectors:
comboBox.addItems(EDGECNAMES)
comboBox.addItem('-')
# Initialize first selection to avoid error
comboBox.setCurrentIndex(i)
comboBox.activated.connect(self.handleActivated)
comboBox.selection = comboBox.currentText()
i += 1
# Initialize textboxes with multi-space expression
for delimiterField in self.delimiterFields:
delimiterField.setText('\\s+')
self.delimiterFields[0].setText('^')
self.delimiterFields[-1].setText('')
def handleActivated(self, index):
self.sender().selection = self.sender().itemText(index)
def receiveInputs(self):
''' activates on next button and sends the input to wizard '''
self.wizard().edgeDelimiters = [delim.text() for delim in self.delimiterFields]
self.wizard().edgeColumns = [comboBox.selection for comboBox in self.columnSelectors]
# To be called only on single file Import
class Page2b(QWizardPage):
def __init__(self, parent=None):
super(Page2b, self).__init__(parent)
nCols = len(EDGECNAMES)
self.setWindowTitle("Edge phase")
self.stepLabel = QLabel()
self.openFileBtn = QPushButton("Import Edge List")
self.columnSelectors = []
self.delimiterFields = []
# Initialize comboboxes and text fields
for i in range(nCols):
self.columnSelectors.append(QComboBox())
for i in range(nCols + 1):
self.delimiterFields.append(QLineEdit())
layout = QVBoxLayout()
layout.addWidget(self.stepLabel)
layout.addWidget(self.openFileBtn)
patternLayout = QHBoxLayout()
for i in range(nCols + 1):
patternLayout.addWidget(self.delimiterFields[i])
if i < nCols:
patternLayout.addWidget(self.columnSelectors[i])
self.setLayout(layout)
# Insert the layout of the regexp elements
layout.addLayout(patternLayout)
# Bind actions
self.openFileBtn.clicked.connect(self.openFileNameDialog)
def openFileNameDialog(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getOpenFileName(
self, "QFileDialog.getOpenFileName()", "",
"All Files (*);;Python Files (*.py)", options=options)
# if user selected a file store its path to a variable
if fileName:
self.wizard().filepath[1] = fileName
def initializePage(self):
self.stepLabel.setText("Edges information")
i = 0
for comboBox in self.columnSelectors:
comboBox.addItems(EDGECNAMES)
comboBox.addItem('-')
# Initialize first selection to avoid error
comboBox.setCurrentIndex(i)
comboBox.activated.connect(self.handleActivated)
comboBox.selection = comboBox.currentText()
i += 1
# Initialize textboxes with multi-space expression
for delimiterField in self.delimiterFields:
delimiterField.setText('\\s+')
self.delimiterFields[0].setText('^')
self.delimiterFields[-1].setText('')
def handleActivated(self, index):
self.sender().selection = self.sender().itemText(index)
def receiveInputs(self):
''' activates on next button and sends the input to wizard '''
self.wizard().edgeDelimiters = [delim.text() for delim in self.delimiterFields]
self.wizard().edgeColumns = [comboBox.selection for comboBox in self.columnSelectors]
| 38.011673 | 99 | 0.636708 |
9e402ec0c81a78ec15ba15355351ab2d4dbbc70a
| 728 |
py
|
Python
|
DataStructures/Tree/InorderTraversal.py
|
baby5/HackerRank
|
1e68a85f40499adb9b52a4da16936f85ac231233
|
[
"MIT"
] | null | null | null |
DataStructures/Tree/InorderTraversal.py
|
baby5/HackerRank
|
1e68a85f40499adb9b52a4da16936f85ac231233
|
[
"MIT"
] | null | null | null |
DataStructures/Tree/InorderTraversal.py
|
baby5/HackerRank
|
1e68a85f40499adb9b52a4da16936f85ac231233
|
[
"MIT"
] | null | null | null |
#coding:utf-8
def inOrder(root):
if root:
inOrder(root.left)
print root.data,
inOrder(root.right)
def inOrder_stupid(root):
stack = [root]
while stack:
node = stack.pop()
if node.left:
stack.append(node)
stack.append(node.left)
node.left = None
elif node.right:
stack.append(node.right)
stack.append(node)
node.right = None
else:
print node.data,
def inOrder_smart(root):
stack = []
while root or stack:
while root:
stack.append(root)
root = root.left
root = stack.pop()
print root.data,
root = root.right
| 20.222222 | 36 | 0.51511 |
9e928952d13965ae6933a184402058f74bf8046f
| 613 |
py
|
Python
|
py-td3-cinema/gui/new.py
|
HuguesGuilleus/istyPOO
|
f460665799be2b2f34a1ebaa9878e06bb028a410
|
[
"BSD-3-Clause"
] | null | null | null |
py-td3-cinema/gui/new.py
|
HuguesGuilleus/istyPOO
|
f460665799be2b2f34a1ebaa9878e06bb028a410
|
[
"BSD-3-Clause"
] | null | null | null |
py-td3-cinema/gui/new.py
|
HuguesGuilleus/istyPOO
|
f460665799be2b2f34a1ebaa9878e06bb028a410
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from gi.repository import Gtk
from . import edit
db = None
builder = None
initialize = False
def init(b, d):
"Prépare la création d'un nouvel objet."
global db, builder, initialize
builder = b
db = d
builder.get_object("indexList").hide()
builder.get_object("newZone").show()
builder.get_object("newName").set_text("")
if not initialize:
initialize = True
builder.get_object("newNext").connect("clicked", next)
def next(_):
"Créé le nouvel objet"
s = db.new()
s.setId(builder.get_object("newName").get_text())
s.save()
edit.create(builder, s)
| 19.774194 | 56 | 0.694943 |
cde0959f52bb4074c331bb3d2dd0e16c9ecc2855
| 962 |
py
|
Python
|
algorithms/ar-bsrt/python3/bubble_sort_exe.py
|
NuclearCactus/FOSSALGO
|
eb66f3bdcd6c42c66e8fc7110a32ac021596ca66
|
[
"MIT"
] | 59 |
2018-09-11T17:40:25.000Z
|
2022-03-03T14:40:39.000Z
|
algorithms/ar-bsrt/python3/bubble_sort_exe.py
|
RitvikDayal/FOSSALGO
|
ae225a5fffbd78d0dff83fd7b178ba47bfd7a769
|
[
"MIT"
] | 468 |
2018-08-28T17:04:29.000Z
|
2021-12-03T15:16:34.000Z
|
algorithms/ar-bsrt/python3/bubble_sort_exe.py
|
RitvikDayal/FOSSALGO
|
ae225a5fffbd78d0dff83fd7b178ba47bfd7a769
|
[
"MIT"
] | 253 |
2018-08-28T17:08:51.000Z
|
2021-11-01T12:30:39.000Z
|
# This is function for sorting the array using bubble sort
def bubble_sort(length, array): # It takes two arguments -> Length of the array and the array itself.
for i in range(length):
j = 0
for j in range(0, length-i-1):
if array[j] > array[j+1]:
array[j], array[j+1] = array[j+1], array[j]
return array #Returns sorted array
# This is the main function of the program
def main():
length = int(input('Enter the length of the array to be entered : ')) # Taking the length of array
array = [int(i) for i in input('Enter Array Elements : ').split()] # Taking array elements
sorted_array = bubble_sort(length,array) # Calling the function for sorting the array using bubble sort
print("Sorted Array is : ")
for i in sorted_array: # Printing the sorted array
print(i, end = " ")
# Running the main code of the program
if __name__ == '__main__':
main()
| 45.809524 | 111 | 0.634096 |
cdff1dcac24122449b1b75eeb430fbb61d6f8c80
| 1,267 |
py
|
Python
|
exercises/de/test_04_11_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 2,085 |
2019-04-17T13:10:40.000Z
|
2022-03-30T21:51:46.000Z
|
exercises/de/test_04_11_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 79 |
2019-04-18T14:42:55.000Z
|
2022-03-07T08:15:43.000Z
|
exercises/de/test_04_11_01.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 361 |
2019-04-17T13:34:32.000Z
|
2022-03-28T04:42:45.000Z
|
def test():
assert (
len(TRAINING_DATA) == 3
), "Irgendetwas scheint mit deinen Daten nicht zu stimmen. Erwartet werden 3 Beispiele."
assert all(
len(entry) == 2 and isinstance(entry[1], dict) for entry in TRAINING_DATA
), "Die Trainingsdaten haben nicht das richtige Format. Erwartet wird eine Liste von Tuples, bestehend aus Text und einem Dictionary als zweites Element."
ents = [entry[1].get("entities", []) for entry in TRAINING_DATA]
assert len(ents[0]) == 2, "Das erste Beispiel sollte zwei Entitäten enhalten."
ent_0_0 = (0, 6, "WEBSITE")
ent_0_1 = (11, 18, "WEBSITE")
assert (
ents[0][0] == ent_0_0
), "Überprüfe nochmal die erste Entität im ersten Beispiel."
assert (
ents[0][1] == ent_0_1
), "Überprüfe nochmal die zweite Entität im ersten Beispiel."
assert len(ents[1]) == 1, "Das zweite Beispiel sollte eine Entität enthalten."
assert ents[1] == [
(28, 35, "WEBSITE",)
], "Überprüfe nochmal die Entität im zweiten Beispiel."
assert len(ents[2]) == 1, "Das dritte Beispiel sollte eine Entität enthalten."
assert ents[2] == [
(15, 21, "WEBSITE",)
], "Überprüfe nochmal die Entität im dritten Beispiel."
__msg__.good("Sehr schön!")
| 45.25 | 158 | 0.650355 |
93ff5227a48408e109f4148250f0bc3fa8839969
| 39 |
py
|
Python
|
src/server/__init__.py
|
muenstermannmarius/ElectionSystem
|
a6e60d9147423787e869587b808def4771f89cb7
|
[
"RSA-MD"
] | null | null | null |
src/server/__init__.py
|
muenstermannmarius/ElectionSystem
|
a6e60d9147423787e869587b808def4771f89cb7
|
[
"RSA-MD"
] | null | null | null |
src/server/__init__.py
|
muenstermannmarius/ElectionSystem
|
a6e60d9147423787e869587b808def4771f89cb7
|
[
"RSA-MD"
] | null | null | null |
print("server package initializes ...")
| 39 | 39 | 0.74359 |
274cf06283a0a2bd1e52541c05a4b38428b8f0f7
| 879 |
py
|
Python
|
intro-vietstack/tools/tagcloud.py
|
vietstack/vietstack.github.io
|
6571001dc72751ee89115d9ed520c2b9ca2f2b86
|
[
"MIT"
] | 1 |
2016-11-29T09:35:50.000Z
|
2016-11-29T09:35:50.000Z
|
intro-vietstack/tools/tagcloud.py
|
vietstack/vietstack.github.io
|
6571001dc72751ee89115d9ed520c2b9ca2f2b86
|
[
"MIT"
] | 8 |
2016-09-23T01:25:12.000Z
|
2016-11-14T04:02:16.000Z
|
intro-vietstack/tools/tagcloud.py
|
vietstack/vietstack.github.io
|
6571001dc72751ee89115d9ed520c2b9ca2f2b86
|
[
"MIT"
] | 14 |
2016-09-22T01:46:57.000Z
|
2019-09-05T05:42:11.000Z
|
#!/usr/bin/python
import os
import sys
import argparse
import askbot
def parse_args():
p = argparse.ArgumentParser()
p.add_argument('--endpoint', '-e',
default='https://ask.openstack.org/en/api/v1')
p.add_argument('--limit', '-l',
default=1000,
type=int)
return p.parse_args()
def main():
args = parse_args()
ask = askbot.Askbot(endpoint='https://ask.openstack.org/en/api/v1')
tags = {}
for i,q in enumerate(ask.questions()):
for tag in q['tags']:
tag = tag.lower()
try:
tags[tag] += 1
except KeyError:
tags[tag] = 1
if i >= args.limit:
break
for tag, count in sorted(tags.items(), key=lambda x: x[1]):
print '%s\t%s' % (tag, count)
if __name__ == '__main__':
main()
| 20.928571 | 71 | 0.518771 |
fda26313fad19e7f4d8018c00129a2d68c331683
| 1,744 |
py
|
Python
|
src/test/tests/databases/h5part.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 226 |
2018-12-29T01:13:49.000Z
|
2022-03-30T19:16:31.000Z
|
src/test/tests/databases/h5part.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 5,100 |
2019-01-14T18:19:25.000Z
|
2022-03-31T23:08:36.000Z
|
src/test/tests/databases/h5part.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 84 |
2019-01-24T17:41:50.000Z
|
2022-03-10T10:01:46.000Z
|
# ----------------------------------------------------------------------------
# CLASSES: nightly
#
# Test Case: h5part.py
#
# Programmer: Gunther Weber
# Date: January, 2009
#
# Modifications:
# Mark C. Miller, Wed Jan 21 09:36:13 PST 2009
# Took Gunther's original code and integrated it with test suite.
#
# ----------------------------------------------------------------------------
RequiredDatabasePlugin("H5Part")
TurnOffAllAnnotations()
OpenDatabase(data_path("h5part_test_data/sample.h5part"), 0)
AddPlot("Pseudocolor", "GaussianField", 1, 0)
DrawPlots()
Test("h5part_01")
ChangeActivePlotsVar("LinearField")
View3DAtts = GetView3D()
View3DAtts.viewNormal = (1.000000, 0.000000, 0.0000000)
View3DAtts.focus = (31.5, 31.5, 31.5)
View3DAtts.viewUp = (0.000000, 1.000000, 0.0000000)
View3DAtts.viewAngle = 30
View3DAtts.parallelScale = 54.5596
View3DAtts.nearPlane = -109.119
View3DAtts.farPlane = 109.119
View3DAtts.imagePan = (0, 0)
View3DAtts.imageZoom = 1
View3DAtts.perspective = 1
View3DAtts.eyeAngle = 2
View3DAtts.centerOfRotationSet = 0
View3DAtts.centerOfRotation = (31.5, 31.5, 31.5)
SetView3D(View3DAtts)
Test("h5part_02")
DeleteActivePlots()
AddPlot("Pseudocolor", "px", 1, 0)
PseudocolorAtts = PseudocolorAttributes()
PseudocolorAtts.pointType = PseudocolorAtts.Sphere
PseudocolorAtts.pointSize = 1.5
SetPlotOptions(PseudocolorAtts)
DrawPlots()
Test("h5part_03")
AddPlot("Pseudocolor", "LinearField", 1, 0)
AddOperator("Slice", 0)
SliceAtts = SliceAttributes()
SliceAtts.originType = SliceAtts.Intercept
SliceAtts.originIntercept = 30
SliceAtts.axisType = SliceAtts.XAxis
SliceAtts.project2d = 0
SliceAtts.meshName = "particles"
SetOperatorOptions(SliceAtts)
DrawPlots()
Test("h5part_04")
Exit()
| 27.25 | 78 | 0.693807 |
d0233f635149000801c897af99a3c62d23a51a3b
| 6,372 |
py
|
Python
|
src/preprocessing_classification.py
|
B1T0/DeUs
|
392f3bd3a97190cc2bc5dda9385b9728252cb975
|
[
"MIT"
] | 2 |
2018-03-13T06:49:32.000Z
|
2018-03-16T16:04:06.000Z
|
src/preprocessing_classification.py
|
B1T0/DeUs
|
392f3bd3a97190cc2bc5dda9385b9728252cb975
|
[
"MIT"
] | null | null | null |
src/preprocessing_classification.py
|
B1T0/DeUs
|
392f3bd3a97190cc2bc5dda9385b9728252cb975
|
[
"MIT"
] | null | null | null |
import numpy as np
import tensorflow as tf
alphabet = "abcdefghijklmnopqrstuvwxyz0123456789-,;.!?:'\"/\\|_@#$%^&*~`+-=<>()[]{}\n"
def get_batched_one_hot(char_seqs_indices, labels, start_index, end_index, nn="cnn"):
x_batch = char_seqs_indices[start_index:end_index]
y_batch = labels[start_index:end_index]
if nn == "rnn":
x_batch_one_hot = np.zeros(shape=[len(x_batch), len(x_batch[0]), len(alphabet)])
for example_i, char_seq_indices in enumerate(x_batch):
for char_pos_in_seq, char_seq_char_ind in enumerate(char_seq_indices):
if char_seq_char_ind != -1:
x_batch_one_hot[example_i][char_pos_in_seq][char_seq_char_ind] = 1
elif nn == "cnn":
x_batch_one_hot = np.zeros(shape=[len(x_batch), len(alphabet), len(x_batch[0]), 1])
for example_i, char_seq_indices in enumerate(x_batch):
for char_pos_in_seq, char_seq_char_ind in enumerate(char_seq_indices):
if char_seq_char_ind != -1:
x_batch_one_hot[example_i][char_seq_char_ind][char_pos_in_seq][0] = 1
return [x_batch_one_hot, y_batch]
def load_data(filename):
examples = []
labels = []
examples_train = []
examples_eval = []
labels_train = []
labels_eval = []
with open(filename, encoding="utf-8") as f:
i = 0
author, authordouble = 1, 1
for line in f:
author, text = line.split("|SEPERATOR|")
author = int(author)
if author != authordouble:
print(str(author-1) + " ends at " + str(i))
# split = int(i/25)
# examples_train.extend(examples[:-split])
# examples_eval.extend(examples[-split:])
# labels_train.extend(labels[:-split])
# labels_eval.extend(labels[-split:])
examples_train.extend(examples[0:20000])
examples_eval.extend(examples[20001:26000])
labels_train.extend(labels[0:20000])
labels_eval.extend(labels[20001:26000])
examples = []
labels = []
authordouble = author
# shorten text if it is too long
if len(text) > tf.flags.FLAGS.text_length:
text_end_extracted = text.lower()[-tf.flags.FLAGS.text_length:]
else:
text_end_extracted = text.lower()
# pad text with spaces if it is too short
num_padding = tf.flags.FLAGS.text_length - len(text_end_extracted)
padded = text_end_extracted + " " * num_padding
text_int8_repr = np.array([alphabet.find(char) for char in padded], dtype=np.int8)
author_one_hot = []
for author_i in range(tf.flags.FLAGS.num_authors):
if author_i == author-1:
author_one_hot.append(1)
else:
author_one_hot.append(0)
labels.append(author_one_hot)
examples.append(text_int8_repr)
i += 1
print(str(author - 1) + " ends at " + str(i))
split = int(i / 25)
# examples_train.extend(examples[:-split])
# examples_eval.extend(examples[-split:])
# labels_train.extend(labels[:-split])
# labels_eval.extend(labels[-split:])
examples_train.extend(examples[0:20000])
examples_eval.extend(examples[20001:26000])
labels_train.extend(labels[0:20000])
labels_eval.extend(labels[20001:26000])
print("Non-neutral instances processed: " + str(i))
x_train = np.array(examples_train, dtype=np.int8)
x_eval = np.array(examples_eval, dtype=np.int8)
y_train = np.array(labels_train, dtype=np.int8)
y_eval = np.array(labels_eval, dtype=np.int8)
print("x_char_seq_ind=" + str(x_train.shape))
print("y shape=" + str(y_train.shape))
return x_train, x_eval, y_train, y_eval
def batch_iterator(x, y, batch_size, num_epochs, nn, shuffle=True):
data_size = len(x)
num_batches_per_epoch = int(data_size/batch_size) + 1
for epoch in range(num_epochs):
print("Epoch: " + str(epoch + 1))
# Shuffle the data at each epoch
if shuffle or epoch > 0:
shuffle_indices = np.random.permutation(np.arange(data_size))
x_shuffled = x[shuffle_indices]
y_shuffled = y[shuffle_indices]
else:
x_shuffled = x
y_shuffled = y
for batch_num in range(num_batches_per_epoch):
start_index = batch_num * batch_size
end_index = min((batch_num + 1) * batch_size, data_size)
x_batch, y_batch = get_batched_one_hot(x_shuffled, y_shuffled, start_index, end_index, nn)
batch = list(zip(x_batch, y_batch))
yield batch
def prepare_cnn_api_input(txt, text_length):
# shorten text if it is too long
if len(txt) > text_length: # tf.flags.FLAGS.text_length:
text_end_extracted = txt.lower()[0:text_length]
else:
text_end_extracted = txt.lower()
# pad text with spaces if it is too short
num_padding = text_length - len(text_end_extracted)
padded = text_end_extracted + " " * num_padding
text_int8_repr = np.array([alphabet.find(char) for char in padded], dtype=np.int8)
x_batch_one_hot = np.zeros(shape=[1, len(alphabet), len(text_int8_repr), 1])
for char_pos_in_seq, char_seq_char_ind in enumerate(text_int8_repr):
if char_seq_char_ind != -1:
x_batch_one_hot[0][char_seq_char_ind][char_pos_in_seq][0] = 1
return x_batch_one_hot
def prepare_rnn_api_input(txt, text_length):
# shorten text if it is too long
if len(txt) > text_length: # tf.flags.FLAGS.text_length:
text_end_extracted = txt.lower()[0:text_length]
else:
text_end_extracted = txt.lower()
# pad text with spaces if it is too short
num_padding = text_length - len(text_end_extracted)
padded = text_end_extracted + " " * num_padding
text_int8_repr = np.array([alphabet.find(char) for char in padded], dtype=np.int8)
x_batch_one_hot = np.zeros(shape=[1, text_length, len(alphabet)])
for char_pos_in_seq, char_seq_char_ind in enumerate(text_int8_repr):
if char_seq_char_ind != -1:
x_batch_one_hot[0][char_pos_in_seq][char_seq_char_ind] = 1
return x_batch_one_hot
| 45.191489 | 102 | 0.62806 |
d0580caa50b6671941d52853c4e077901668ddc7
| 5,277 |
py
|
Python
|
mongodb/mongodb_consistent_backup/official/mongodb_consistent_backup/Upload/Rsync/Rsync.py
|
smthkissinger/docker-images
|
35e868295d04fa780325ada4168381f1e80e8fe4
|
[
"BSD-3-Clause"
] | 282 |
2016-06-16T14:41:44.000Z
|
2022-03-02T03:43:02.000Z
|
mongodb/mongodb_consistent_backup/official/mongodb_consistent_backup/Upload/Rsync/Rsync.py
|
smthkissinger/docker-images
|
35e868295d04fa780325ada4168381f1e80e8fe4
|
[
"BSD-3-Clause"
] | 146 |
2016-06-16T08:55:45.000Z
|
2020-09-08T10:37:32.000Z
|
mongodb/mongodb_consistent_backup/official/mongodb_consistent_backup/Upload/Rsync/Rsync.py
|
smthkissinger/docker-images
|
35e868295d04fa780325ada4168381f1e80e8fe4
|
[
"BSD-3-Clause"
] | 94 |
2016-06-16T10:49:07.000Z
|
2022-03-28T09:14:03.000Z
|
import os
import logging
import re
from copy_reg import pickle
from multiprocessing import Pool
from subprocess import check_output
from types import MethodType
from RsyncUploadThread import RsyncUploadThread
from mongodb_consistent_backup.Common import config_to_string
from mongodb_consistent_backup.Errors import OperationError
from mongodb_consistent_backup.Pipeline import Task
# Allows pooled .apply_async()s to work on Class-methods:
def _reduce_method(m):
if m.im_self is None:
return getattr, (m.im_class, m.im_func.func_name)
else:
return getattr, (m.im_self, m.im_func.func_name)
pickle(MethodType, _reduce_method)
class Rsync(Task):
def __init__(self, manager, config, timer, base_dir, backup_dir, **kwargs):
super(Rsync, self).__init__(self.__class__.__name__, manager, config, timer, base_dir, backup_dir, **kwargs)
self.backup_location = self.config.backup.location
self.backup_name = self.config.backup.name
self.remove_uploaded = self.config.upload.remove_uploaded
self.retries = self.config.upload.retries
self.rsync_path = self.config.upload.rsync.path
self.rsync_user = self.config.upload.rsync.user
self.rsync_host = self.config.upload.rsync.host
self.rsync_port = self.config.upload.rsync.port
self.rsync_ssh_key = self.config.upload.rsync.ssh_key
self.rsync_binary = "rsync"
self.rsync_flags = ["--archive", "--compress"]
self.rsync_version = None
self._rsync_info = None
self.threads(self.config.upload.threads)
self._pool = Pool(processes=self.threads())
def init(self):
if not self.host_has_rsync():
raise OperationError("Cannot find rsync binary on this host!")
if not os.path.isdir(self.backup_dir):
logging.error("The source directory: %s does not exist or is not a directory! Skipping Rsync upload!" % self.backup_dir)
raise OperationError("The source directory: %s does not exist or is not a directory! Skipping Rsync upload!" % self.backup_dir)
def rsync_info(self):
if not self._rsync_info:
output = check_output([self.rsync_binary, "--version"])
search = re.search(r"^rsync\s+version\s([0-9.-]+)\s+protocol\sversion\s(\d+)", output)
self.rsync_version = search.group(1)
self._rsync_info = {"version": self.rsync_version, "protocol_version": int(search.group(2))}
return self._rsync_info
def host_has_rsync(self):
if self.rsync_info():
return True
return False
def get_dest_path(self):
return os.path.join(self.rsync_path, self.base_dir)
def prepare_dest_dir(self):
# mkdir -p the rsync dest path via ssh
ssh_mkdir_cmd = ["ssh"]
if self.rsync_ssh_key:
ssh_mkdir_cmd.extend(["-i", self.rsync_ssh_key])
ssh_mkdir_cmd.extend([
"%s@%s" % (self.rsync_user, self.rsync_host),
"mkdir", "-p", self.get_dest_path()
])
# run the mkdir via ssh
try:
check_output(ssh_mkdir_cmd)
except Exception, e:
logging.error("Creating rsync dest path with ssh failed for %s: %s" % (
self.rsync_host,
e
))
raise e
return True
def done(self, data):
logging.info(data)
def run(self):
try:
self.init()
self.timer.start(self.timer_name)
logging.info("Preparing destination path on %s" % self.rsync_host)
self.prepare_dest_dir()
rsync_config = {
"dest": "%s@%s:%s" % (self.rsync_user, self.rsync_host, self.get_dest_path()),
"threads": self.threads(),
"retries": self.retries
}
rsync_config.update(self.rsync_info())
logging.info("Starting upload using rsync version %s (%s)" % (
self.rsync_info()['version'],
config_to_string(rsync_config)
))
for child in os.listdir(self.backup_dir):
self._pool.apply_async(RsyncUploadThread(
os.path.join(self.backup_dir, child),
self.base_dir,
self.rsync_flags,
self.rsync_path,
self.rsync_user,
self.rsync_host,
self.rsync_port,
self.rsync_ssh_key,
self.remove_uploaded,
self.retries
).run, callback=self.done)
self.wait()
except Exception, e:
logging.error("Rsync upload failed! Error: %s" % e)
raise OperationError(e)
finally:
self.timer.stop(self.timer_name)
self.completed = True
def wait(self):
if self._pool:
logging.info("Waiting for Rsync upload threads to stop")
self._pool.close()
self._pool.join()
def close(self):
if self._pool:
logging.error("Stopping Rsync upload threads")
self._pool.terminate()
self._pool.join()
| 35.897959 | 139 | 0.600341 |
3ec18b017cefbc6f8d7136d75fbbe02aae206e4f
| 747 |
py
|
Python
|
vkapp/bot/dao/moneyDAO.py
|
ParuninPavel/lenta4_hack
|
6d3340201deadf5757e37ddd7cf5580b928d7bda
|
[
"MIT"
] | 1 |
2017-11-23T13:33:13.000Z
|
2017-11-23T13:33:13.000Z
|
vkapp/bot/dao/moneyDAO.py
|
ParuninPavel/lenta4_hack
|
6d3340201deadf5757e37ddd7cf5580b928d7bda
|
[
"MIT"
] | null | null | null |
vkapp/bot/dao/moneyDAO.py
|
ParuninPavel/lenta4_hack
|
6d3340201deadf5757e37ddd7cf5580b928d7bda
|
[
"MIT"
] | null | null | null |
from vkapp.bot.models import Income, Payment, Blogger, News
from .usersDAO import get_or_create_blogger
def new_income_proposal(amount, news):
income = Income(amount=amount, news=news, type=Income.PROPOSAL)
blogger = news.blogger
blogger.balance += amount
blogger.save()
income.save()
def re_count_balance(uid):
blogger = get_or_create_blogger(uid)
incomes = Income.objects.filter(news__blogger__vk_user__vk_id=uid).select_related()
new_balance = 0
for value in incomes:
new_balance += value.amount
payments = Payment.objects.filter(blogger=blogger)
for value in payments:
new_balance -= value.amount
blogger.balance = new_balance
blogger.save()
return new_balance
| 24.096774 | 87 | 0.721553 |
de1697c853b523656c534204afe853ba808d1454
| 257 |
py
|
Python
|
0-notes/job-search/Cracking the Coding Interview/C13Java/questions/13.8-question.py
|
eengineergz/Lambda
|
1fe511f7ef550aed998b75c18a432abf6ab41c5f
|
[
"MIT"
] | null | null | null |
0-notes/job-search/Cracking the Coding Interview/C13Java/questions/13.8-question.py
|
eengineergz/Lambda
|
1fe511f7ef550aed998b75c18a432abf6ab41c5f
|
[
"MIT"
] | null | null | null |
0-notes/job-search/Cracking the Coding Interview/C13Java/questions/13.8-question.py
|
eengineergz/Lambda
|
1fe511f7ef550aed998b75c18a432abf6ab41c5f
|
[
"MIT"
] | null | null | null |
# 13.8 Lambda Random
# Using Lambda expressions, write a function
# List<Integer> getRandomSubset(List<Integer> list)
# that returns a random subset of arbitrary size.
# All subsets, including the empty set, should be equally likely to be chosen.
| 42.833333 | 78 | 0.747082 |
de84a1991ceda7b7c74380bf83e67fd25b40d8c7
| 1,434 |
py
|
Python
|
books/PythonAutomate/webscrap/search.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
books/PythonAutomate/webscrap/search.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
books/PythonAutomate/webscrap/search.py
|
zeroam/TIL
|
43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1
|
[
"MIT"
] | null | null | null |
"""search.py
인자값을 전달한 키워드 기반으로 상위 n개 링크
웹 브라우저 실행"""
import sys
import requests, webbrowser, bs4
URLS = {
'google': 'https://google.com/search?q=',
'duckduckgo': 'https://duckduckgo.com/?q='
}
def parse_args() -> list:
if len(sys.argv) < 2:
print(f'python {__file__} <search query>')
sys.exit(1)
return sys.argv[1:]
def get_http_resp(query, url):
print('Searching...')
USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:65.0) Gecko/20100101 Firefox/65.0"
headers = {"user-agent" : USER_AGENT}
resp = requests.get(f'{url}{query}', headers=headers)
resp.raise_for_status()
return resp
def find_google_elems(resp):
soup = bs4.BeautifulSoup(resp.text, 'lxml')
link_elemns = soup.select('.r > a')
return link_elemns
def get_duckduckgo_resp(resp):
soup = bs4.BeautifulSoup(resp.text, 'lxml')
link_elemns = soup.select('.result__a')
return link_elemns
def main():
args = parse_args()
query = ' '.join(args).replace(' ', '+')
resp = get_http_resp(query, URLS['google'])
link_elemns = find_google_elems(resp)
num_open = min(5, len(link_elemns))
index = 0
while index < num_open:
href = link_elemns[index].get('href')
print(href)
if href.startswith('http'):
webbrowser.open(link_elemns[index].get('href'))
index += 1
if __name__ == "__main__":
main()
| 21.727273 | 101 | 0.623431 |
cd290cf05838f412872c8c6c94ba3cc084824729
| 50 |
py
|
Python
|
torba/torba/client/errors.py
|
mittalkartik2000/lbry-sdk
|
a07b17ec0c9c5d0a88bc730caf6ab955e0971b38
|
[
"MIT"
] | 4,076 |
2018-06-01T05:54:24.000Z
|
2022-03-07T21:05:52.000Z
|
torba/torba/client/errors.py
|
mittalkartik2000/lbry-sdk
|
a07b17ec0c9c5d0a88bc730caf6ab955e0971b38
|
[
"MIT"
] | 80 |
2018-06-14T01:02:03.000Z
|
2019-06-19T10:45:39.000Z
|
torba/torba/client/errors.py
|
braveheart12/lbry-sdk
|
dc709b468f9dce60d206161785def5c7ace2b763
|
[
"MIT"
] | 20 |
2018-06-27T21:52:22.000Z
|
2022-03-08T11:25:23.000Z
|
class InsufficientFundsError(Exception):
pass
| 16.666667 | 40 | 0.8 |
f84004d3e63f1c2b996499d2deeb18c3df1165b4
| 1,266 |
py
|
Python
|
exercises/python/data-types/collections/pilling-up-loop.py
|
rogeriosantosf/hacker-rank-profile
|
d4b9c131524d138c415e5c5de4e38c6b8c35dd77
|
[
"MIT"
] | null | null | null |
exercises/python/data-types/collections/pilling-up-loop.py
|
rogeriosantosf/hacker-rank-profile
|
d4b9c131524d138c415e5c5de4e38c6b8c35dd77
|
[
"MIT"
] | null | null | null |
exercises/python/data-types/collections/pilling-up-loop.py
|
rogeriosantosf/hacker-rank-profile
|
d4b9c131524d138c415e5c5de4e38c6b8c35dd77
|
[
"MIT"
] | null | null | null |
# Given a number of linear cubes with it's size lengths,
# print if you can stack the cubes by grabing the rightmost or leftmost each time
# print 'Yes' if you can pile the cubes or 'No' otherwise
# Sample Input
# 2
# 4
# 4 3 2 4
# 3
# 3 2 1
# Sample Output
# Yes
# No
from collections import deque
def grab_a_cube(cubes):
if len(cubes) == 1:
return cubes.pop()
else:
leftmost = cubes.popleft()
rightmost = cubes.pop()
if leftmost > rightmost:
cubes.append(rightmost)
return leftmost
else:
cubes.appendleft(leftmost)
return rightmost
def is_stackable(cubes):
stackable = False
cube = grab_a_cube(cubes)
for i in range(len(cubes)):
next_cube = grab_a_cube(cubes)
if cube >= next_cube:
stackable = True
cube = next_cube
else:
stackable = False
break
return stackable
if __name__ == '__main__':
cases_number = int(input())
for i in range(cases_number):
cubes_number = int(input())
d = deque(map(int, input().split()))
try:
print("Yes" if is_stackable(d) else "No")
except RuntimeError as e:
print(e.message)
| 21.457627 | 81 | 0.583728 |
39f5aecafae8bf1ee9b03439a6457473f7deef49
| 754 |
py
|
Python
|
Jahr 1/M/Gleichungen/LineareGleichungen.py
|
BackInBash/Technikerschule
|
6e30654417732fae065e36a40789866ccca6aa7e
|
[
"MIT"
] | 2 |
2021-01-20T16:16:41.000Z
|
2022-01-12T15:37:53.000Z
|
Jahr 1/M/Gleichungen/LineareGleichungen.py
|
BackInBash/Technikerschule
|
6e30654417732fae065e36a40789866ccca6aa7e
|
[
"MIT"
] | 2 |
2020-06-17T21:55:24.000Z
|
2021-09-08T20:40:41.000Z
|
Jahr 1/M/Gleichungen/LineareGleichungen.py
|
BackInBash/Technikerschule
|
6e30654417732fae065e36a40789866ccca6aa7e
|
[
"MIT"
] | 1 |
2020-12-28T13:03:34.000Z
|
2020-12-28T13:03:34.000Z
|
GlowScript 3.0 VPython
#setup
spielfeld = box(size = vec(100, 50, 5))
tor_l = box(size = vec(10,30,5),
pos = vec(-45,0,0),
color = color.green)
tor_r = box(size = vec(10,30,5),
pos = vec(+45,0,0),
color = color.green)
puck = sphere(radius = 3,
color = color.red,
pos = vec(-50,0,5))
attach_trail(puck, color = color.blue)
m = 0.375
b = -0
# x Startwert
puck.pos.x = -40
puck.pos.y = -15
# game loop
fr = 30 #framerate
#Spiel läuft
laeuft = True
while laeuft:
rate(fr)
puck.pos.x = puck.pos.x+1
#Geradengleichung y=m*x+b
puck.pos.y = m*puck.pos.x + b
#falls Puck außerhalb Spielfeld: abbrechen
if puck.pos.x > 50 or puck.pos.x <-50:
laeuft = False
| 19.842105 | 44 | 0.570292 |
f2e55fe6dd28c25ce694b3a09198bd3a4ea8a287
| 388 |
py
|
Python
|
source/pkgsrc/archivers/py-czipfile/patches/patch-setup.py
|
Scottx86-64/dotfiles-1
|
51004b1e2b032664cce6b553d2052757c286087d
|
[
"Unlicense"
] | 1 |
2021-11-20T22:46:39.000Z
|
2021-11-20T22:46:39.000Z
|
source/pkgsrc/archivers/py-czipfile/patches/patch-setup.py
|
Scottx86-64/dotfiles-1
|
51004b1e2b032664cce6b553d2052757c286087d
|
[
"Unlicense"
] | null | null | null |
source/pkgsrc/archivers/py-czipfile/patches/patch-setup.py
|
Scottx86-64/dotfiles-1
|
51004b1e2b032664cce6b553d2052757c286087d
|
[
"Unlicense"
] | null | null | null |
$NetBSD: patch-setup.py,v 1.1 2016/07/09 21:13:02 wiz Exp $
Use setuptools for packaging to create nice eggs.
--- setup.py.orig 2010-07-28 06:19:12.000000000 +0900
+++ setup.py 2016-07-07 21:11:32.000000000 +0900
@@ -2,6 +2,7 @@
# vim: set expandtab tabstop=4 shiftwidth=4:
import os
+import setuptools
from distutils.core import setup
from distutils.extension import Extension
| 25.866667 | 59 | 0.724227 |
299463637c8265dd9b83e9adc974be667b820e5e
| 1,576 |
py
|
Python
|
HW1/p6.py
|
kvswim/kv_jhu_cv
|
2ddf7a9d497aef116a7c043157b8631cea45000d
|
[
"MIT"
] | null | null | null |
HW1/p6.py
|
kvswim/kv_jhu_cv
|
2ddf7a9d497aef116a7c043157b8631cea45000d
|
[
"MIT"
] | null | null | null |
HW1/p6.py
|
kvswim/kv_jhu_cv
|
2ddf7a9d497aef116a7c043157b8631cea45000d
|
[
"MIT"
] | null | null | null |
#Kyle Verdeyen
#[email protected]
#Computer Vision EN.601.461
#Assignment 1
#Programming section 2, p6.py
#Thresholds an edge image to only return strong edges.
#Also performs Hough transform and scales to maxval=255
import cv2
import numpy as np
#loosely based on an implementation found here
# https://rosettacode.org/wiki/Hough_transform#Python
def p6(edge_image, edge_thresh): #return [edge_thresh_image, hough_image]
#y=mx+b is not suitable
#use xsin(theta)-ycos(theta)+rho=0
dimensions = np.shape(edge_image)
rows, columns = dimensions[0], dimensions[1]
hypo = int(np.sqrt((rows**2) + (columns**2)))
theta = 180
rho = 800
diag = hypo/(rho/2)
edge_thresh_image = np.zeros((rows,columns))
accumulator = np.zeros((rho, theta))
hough_image = np.zeros((rho, theta))
#compute hough transform
for x in range(rows):
for y in range(columns):
if edge_image[x][y] > edge_thresh:
edge_thresh_image[x][y] = 255 #over threshold, set to white
#build accumulator
for a in range(theta):
temp_theta = a * (np.pi / theta)
temp_rho = int(round(x*np.cos(temp_theta) + y*np.sin(temp_theta)) + diag)
accumulator[temp_rho][a] += 1
else:
edge_thresh_image[x][y] = 0 #under threshold, set to black
#scale hough image based on highest number of votes (max=255)
most_votes = np.amax(accumulator)
acc_dims = np.shape(accumulator)
dim_rho = acc_dims[0]
dim_theta = acc_dims[1]
for x in range(dim_rho):
for y in range(dim_theta):
hough_image[x][y] = int((accumulator[x][y]/most_votes) * 255)
return [edge_thresh_image, hough_image]
| 32.163265 | 78 | 0.714467 |
29f95fd8b1fe319e6fba67756fffb13695a133c2
| 1,927 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/restaurant/doctype/restaurant_menu/test_restaurant_menu.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | 1 |
2021-04-29T14:55:29.000Z
|
2021-04-29T14:55:29.000Z
|
frappe-bench/apps/erpnext/erpnext/restaurant/doctype/restaurant_menu/test_restaurant_menu.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/restaurant/doctype/restaurant_menu/test_restaurant_menu.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | 1 |
2021-04-29T14:39:01.000Z
|
2021-04-29T14:39:01.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
test_records = [
dict(doctype='Item', item_code='Food Item 1',
item_group='Products', is_stock_item=0),
dict(doctype='Item', item_code='Food Item 2',
item_group='Products', is_stock_item=0),
dict(doctype='Item', item_code='Food Item 3',
item_group='Products', is_stock_item=0),
dict(doctype='Item', item_code='Food Item 4',
item_group='Products', is_stock_item=0),
dict(doctype='Restaurant Menu', restaurant='Test Restaurant 1', name='Test Restaurant 1 Menu 1',
items = [
dict(item='Food Item 1', rate=400),
dict(item='Food Item 2', rate=300),
dict(item='Food Item 3', rate=200),
dict(item='Food Item 4', rate=100),
]),
dict(doctype='Restaurant Menu', restaurant='Test Restaurant 1', name='Test Restaurant 1 Menu 2',
items = [
dict(item='Food Item 1', rate=450),
dict(item='Food Item 2', rate=350),
])
]
class TestRestaurantMenu(unittest.TestCase):
def test_price_list_creation_and_editing(self):
menu1 = frappe.get_doc('Restaurant Menu', 'Test Restaurant 1 Menu 1')
menu1.save()
menu2 = frappe.get_doc('Restaurant Menu', 'Test Restaurant 1 Menu 2')
menu2.save()
self.assertTrue(frappe.db.get_value('Price List', 'Test Restaurant 1 Menu 1'))
self.assertEqual(frappe.db.get_value('Item Price',
dict(price_list = 'Test Restaurant 1 Menu 1', item_code='Food Item 1'), 'price_list_rate'), 400)
self.assertEqual(frappe.db.get_value('Item Price',
dict(price_list = 'Test Restaurant 1 Menu 2', item_code='Food Item 1'), 'price_list_rate'), 450)
menu1.items[0].rate = 401
menu1.save()
self.assertEqual(frappe.db.get_value('Item Price',
dict(price_list = 'Test Restaurant 1 Menu 1', item_code='Food Item 1'), 'price_list_rate'), 401)
menu1.items[0].rate = 400
menu1.save()
| 35.685185 | 99 | 0.707317 |
4b254f85ca684200ef8973061a3a39dab730b676
| 2,632 |
py
|
Python
|
spo/utils/mandat_invoice.py
|
libracore/spo
|
c6617a4624d683e27ee3fde745313c30504f3fd1
|
[
"MIT"
] | null | null | null |
spo/utils/mandat_invoice.py
|
libracore/spo
|
c6617a4624d683e27ee3fde745313c30504f3fd1
|
[
"MIT"
] | 6 |
2019-08-23T18:36:26.000Z
|
2019-11-12T13:12:12.000Z
|
spo/utils/mandat_invoice.py
|
libracore/spo
|
efff6da53a776c4483f06d9ef1acc8a7aa96b28e
|
[
"MIT"
] | 1 |
2021-08-14T22:22:43.000Z
|
2021-08-14T22:22:43.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, libracore and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
@frappe.whitelist()
def get_mandat_logs(mandat):
mandat = frappe.get_doc("Mandat", mandat)
referenz_anfrage = mandat.anfragen
if referenz_anfrage:
referenz_anfrage = " OR `spo_referenz` = '{referenz_anfrage}'".format(referenz_anfrage=referenz_anfrage)
else:
referenz_anfrage = ''
logs = frappe.db.sql("""SELECT
`tabTimesheet Detail`.`hours`,
`tabTimesheet Detail`.`spo_dokument`,
`tabTimesheet Detail`.`spo_remark`,
`tabTimesheet Detail`.`from_time`,
`tabTimesheet Detail`.`owner`,
`employee` AS `employee_name`
FROM `tabTimesheet Detail`
INNER JOIN `tabEmployee` ON `tabTimesheet Detail`.`owner` = `tabEmployee`.`user_id`
WHERE
`tabTimesheet Detail`.`nicht_verrechnen` != 1
AND `tabTimesheet Detail`.`spo_referenz` = '{reference}'
OR `tabTimesheet Detail`.`spo_referenz` IN (
SELECT `name` FROM `tabAnforderung Patientendossier` WHERE `mandat` = '{reference}')
OR `tabTimesheet Detail`.`spo_referenz` IN (
SELECT `name` FROM `tabMedizinischer Bericht` WHERE `mandat` = '{reference}')
OR `tabTimesheet Detail`.`spo_referenz` IN (
SELECT `name` FROM `tabTriage` WHERE `mandat` = '{reference}')
OR `tabTimesheet Detail`.`spo_referenz` IN (
SELECT `name` FROM `tabVollmacht` WHERE `mandat` = '{reference}')
OR `tabTimesheet Detail`.`spo_referenz` IN (
SELECT `name` FROM `tabAbschlussbericht` WHERE `mandat` = '{reference}')
{referenz_anfrage}
ORDER BY `tabTimesheet Detail`.`from_time`, `tabTimesheet Detail`.`idx` ASC""".format(reference=mandat.name, referenz_anfrage=referenz_anfrage), as_dict=True)
return {
'logs': logs,
'rsv': mandat.rsv,
'rate': mandat.stundensatz
}
| 59.818182 | 191 | 0.50228 |
8a9c790066953dd47a829fdb4114813f539cba83
| 717 |
py
|
Python
|
pythonProj/FZPython/demo/update_symbol.py
|
iHamburg/FZQuant
|
86b750ec33d01badfd3f324d6f1599118b9bf8ff
|
[
"MIT"
] | null | null | null |
pythonProj/FZPython/demo/update_symbol.py
|
iHamburg/FZQuant
|
86b750ec33d01badfd3f324d6f1599118b9bf8ff
|
[
"MIT"
] | null | null | null |
pythonProj/FZPython/demo/update_symbol.py
|
iHamburg/FZQuant
|
86b750ec33d01badfd3f324d6f1599118b9bf8ff
|
[
"MIT"
] | 2 |
2019-04-10T10:05:00.000Z
|
2021-11-24T17:17:23.000Z
|
#!/usr/bin/env python
# coding: utf8
from pyquant.db_models import *
def add_exchange_id():
""" 0开头的添加 exchangeId: SZ
6开头添加 exchangeId: SH
"""
for symbol in Symbol.get_all( limit=0):
# print(symbol)
if symbol.ticker.startswith('6'):
symbol.exchange_id = 'SH'
else:
symbol.exchange_id = 'SZ'
session.commit()
if __name__ == '__main__':
""""""
# update_tick()
# download_index()
# insert_index_to_symbol()
# import_sz180()
# import_symbol('../datas/沪深300_000300.txt')
# add_exchange_id()
# ticker1 = '6001'
# ticker2 = '0002'
# print(ticker1.startswith('6'))
# print(ticker2.startswith('6'))
| 18.868421 | 48 | 0.582985 |
0a111dfe51310c5dc255496f0991da199ecbe61a
| 5,927 |
py
|
Python
|
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/pn_port_cos_rate_setting.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/pn_port_cos_rate_setting.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/pn_port_cos_rate_setting.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# Copyright: (c) 2018, Pluribus Networks
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: pn_port_cos_rate_setting
author: "Pluribus Networks (@rajaspachipulusu17)"
short_description: CLI command to modify port-cos-rate-setting
description:
- This modules can be used to update the port cos rate limit.
options:
pn_cliswitch:
description:
- Target switch to run the CLI on.
required: false
type: str
state:
description:
- State the action to perform. Use C(update) to modify
the port-cos-rate-setting.
required: true
type: str
choices: ['update']
pn_cos0_rate:
description:
- cos0 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos1_rate:
description:
- cos1 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos2_rate:
description:
- cos2 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos3_rate:
description:
- cos3 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos4_rate:
description:
- cos4 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos5_rate:
description:
- cos5 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos6_rate:
description:
- cos6 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_cos7_rate:
description:
- cos7 rate limit (pps) unlimited or 0 to 10000000.
required: false
type: str
pn_port:
description:
- port.
required: false
type: str
choices: ['control-port', 'data-port', 'span-ports']
'''
EXAMPLES = """
- name: port cos rate modify
pn_port_cos_rate_setting:
pn_cliswitch: "sw01"
state: "update"
pn_port: "control-port"
pn_cos1_rate: "1000"
pn_cos5_rate: "1000"
pn_cos2_rate: "1000"
pn_cos0_rate: "1000"
- name: port cos rate modify
pn_port_cos_rate_setting:
pn_cliswitch: "sw01"
state: "update"
pn_port: "data-port"
pn_cos1_rate: "2000"
pn_cos5_rate: "2000"
pn_cos2_rate: "2000"
pn_cos0_rate: "2000"
"""
RETURN = """
command:
description: the CLI command run on the target node.
returned: always
type: str
stdout:
description: set of responses from the port-cos-rate-setting command.
returned: always
type: list
stderr:
description: set of error responses from the port-cos-rate-setting command.
returned: on error
type: list
changed:
description: indicates whether the CLI caused changes on the target.
returned: always
type: bool
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.general.plugins.module_utils.network.netvisor.pn_nvos import pn_cli, run_cli
def main():
""" This section is for arguments parsing """
state_map = dict(
update='port-cos-rate-setting-modify'
)
module = AnsibleModule(
argument_spec=dict(
pn_cliswitch=dict(required=False, type='str'),
state=dict(required=True, type='str',
choices=state_map.keys()),
pn_cos1_rate=dict(required=False, type='str'),
pn_cos5_rate=dict(required=False, type='str'),
pn_cos2_rate=dict(required=False, type='str'),
pn_cos0_rate=dict(required=False, type='str'),
pn_cos6_rate=dict(required=False, type='str'),
pn_cos3_rate=dict(required=False, type='str'),
pn_cos4_rate=dict(required=False, type='str'),
pn_cos7_rate=dict(required=False, type='str'),
pn_port=dict(required=False, type='str',
choices=['control-port', 'data-port', 'span-ports']),
),
required_if=(
['state', 'update', ['pn_port']],
),
required_one_of=[['pn_cos0_rate',
'pn_cos1_rate',
'pn_cos2_rate',
'pn_cos3_rate',
'pn_cos4_rate',
'pn_cos5_rate',
'pn_cos6_rate',
'pn_cos7_rate']],
)
# Accessing the arguments
cliswitch = module.params['pn_cliswitch']
state = module.params['state']
cos1_rate = module.params['pn_cos1_rate']
cos5_rate = module.params['pn_cos5_rate']
cos2_rate = module.params['pn_cos2_rate']
cos0_rate = module.params['pn_cos0_rate']
cos6_rate = module.params['pn_cos6_rate']
cos3_rate = module.params['pn_cos3_rate']
cos4_rate = module.params['pn_cos4_rate']
cos7_rate = module.params['pn_cos7_rate']
port = module.params['pn_port']
command = state_map[state]
# Building the CLI command string
cli = pn_cli(module, cliswitch)
if command == 'port-cos-rate-setting-modify':
cli += ' %s ' % command
if cos1_rate:
cli += ' cos1-rate ' + cos1_rate
if cos5_rate:
cli += ' cos5-rate ' + cos5_rate
if cos2_rate:
cli += ' cos2-rate ' + cos2_rate
if cos0_rate:
cli += ' cos0-rate ' + cos0_rate
if cos6_rate:
cli += ' cos6-rate ' + cos6_rate
if cos3_rate:
cli += ' cos3-rate ' + cos3_rate
if cos4_rate:
cli += ' cos4-rate ' + cos4_rate
if cos7_rate:
cli += ' cos7-rate ' + cos7_rate
if port:
cli += ' port ' + port
run_cli(module, cli, state_map)
if __name__ == '__main__':
main()
| 28.63285 | 111 | 0.614139 |
6aaa7c461797333ca76a5b3ef9cd3e7b8a096722
| 2,471 |
py
|
Python
|
Python/00_little_helper/configured_logger/configured_logger.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
Python/00_little_helper/configured_logger/configured_logger.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
Python/00_little_helper/configured_logger/configured_logger.py
|
Kreijeck/learning
|
eaffee08e61f2a34e01eb8f9f04519aac633f48c
|
[
"MIT"
] | null | null | null |
#########################################
# author: Thomas Strehler
# created: 25.02.2021
#######################################
import logging
import os
def printit():
print("import geht endlich1!!!")
class MyLogger:
"""
Usage:
Import Class My Logger
Add optional loglevels for stream and file-Hander
loglevel: "debug", "info", "warning", "error"
Add Loggers: add_logger(stream_handler=True, file_handler=True, folder='log', filename='log.log')
Use that logger
"""
def __init__(self, name, loglevel_stream="debug", loglevel_file="debug"):
# set loglevel
self.loglevel_stream = self.__loglevel(loglevel_stream)
self.loglevel_file = self.__loglevel(loglevel_file)
# set formatter
self.formatter = logging.Formatter('%(asctime)s: %(name)s - %(levelname)s - %(message)s', "%Y-%m-%d %H:%M:%S")
# set logger
self.logger = logging.getLogger(name)
self.logger.setLevel(logging.DEBUG)
@staticmethod
def __loglevel(level):
if level == "debug":
return logging.DEBUG
elif level == "info":
return logging.INFO
elif level == "warning":
return logging.WARNING
elif level == "error":
return logging.ERROR
else:
raise KeyError(f"Loglevel {level} not allowed!")
def add_stream_logger(self):
lh = logging.StreamHandler()
lh.setLevel(self.loglevel_stream)
lh.setFormatter(self.formatter)
self.logger.addHandler(lh)
def add_file_logger(self, folder, file):
# create folder
if not os.path.exists(folder):
os.makedirs(folder)
save = os.path.join(folder, file)
lh = logging.FileHandler(save)
lh.setLevel(self.loglevel_file)
lh.setFormatter(self.formatter)
self.logger.addHandler(lh)
def add_logger(self, stream_handler=True, file_handler=True, folder='log', filename='log.log'):
"""
Add wishend logger
:param stream_handler: bool, add StreamHandler
:param file_handler: bool, add FileHandler
:param folder: folder from Working Dir
:param filename: filename, inkl. ending
:return: get a logger for messages
"""
if stream_handler:
self.add_stream_logger()
if file_handler:
self.add_file_logger(folder=folder, file=filename)
return self.logger
| 30.134146 | 118 | 0.601781 |
0ac13c344ecec4e6aa6af8c78c5ca650561fc969
| 1,033 |
py
|
Python
|
nz_django/day4/method_decorator_demo/article/views.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | null | null | null |
nz_django/day4/method_decorator_demo/article/views.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | 27 |
2020-02-12T07:55:58.000Z
|
2022-03-12T00:19:09.000Z
|
nz_django/day4/method_decorator_demo/article/views.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | 2 |
2020-02-18T01:54:55.000Z
|
2020-02-21T11:36:28.000Z
|
from django.shortcuts import render,redirect,reverse
from django.http import HttpResponse
from django.views.decorators.http import require_http_methods,require_GET,require_POST,require_safe
#require_GET 等同于require_http_methods(['GET'])
#require_POST 等同于require_http_methods(['POST'])
#require_safe get heade 请求 只能查看 不能修改
#等同于require_http_methods(['GET','HEAD'])
from .models import Article
#返回所有的文章 get请求
# @require_http_methods(['GET'])
# @require_GET
@require_safe
def index(request):
articles = Article.objects.all()
# return render(request,'index.html',context={'articles':articles})
return HttpResponse('只能安全访问')
#get post
@require_http_methods(['GET','POST'])
def add_article(request):
if request.method == 'GET':
return render(request,'add.html')
else:
title = request.POST.get('title')
content = request.POST.get('content')
price = request.POST.get('price')
Article.objects.create(title=title,content=content,price=price)
return redirect(reverse('index'))
| 36.892857 | 99 | 0.734753 |
0ae8dfe260a788275dde173063b457da4b7d3c27
| 3,237 |
py
|
Python
|
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/source_control/gitlab/test_gitlab_runner.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/source_control/gitlab/test_gitlab_runner.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/source_control/gitlab/test_gitlab_runner.py
|
tr3ck3r/linklight
|
5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Guillaume Martinez ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import
import pytest
from ansible_collections.community.general.plugins.modules.source_control.gitlab.gitlab_runner import GitLabRunner
def _dummy(x):
"""Dummy function. Only used as a placeholder for toplevel definitions when the test is going
to be skipped anyway"""
return x
pytestmark = []
try:
from ..gitlab import (GitlabModuleTestCase,
python_version_match_requirement,
resp_find_runners_list, resp_get_runner,
resp_create_runner, resp_delete_runner)
# GitLab module requirements
if python_version_match_requirement():
from gitlab.v4.objects import Runner
except ImportError:
pytestmark.append(pytest.mark.skip("Could not load gitlab module required for testing"))
# Need to set these to something so that we don't fail when parsing
GitlabModuleTestCase = object
resp_find_runners_list = _dummy
resp_get_runner = _dummy
resp_create_runner = _dummy
resp_delete_runner = _dummy
# Unit tests requirements
try:
from httmock import with_httmock # noqa
except ImportError:
pytestmark.append(pytest.mark.skip("Could not load httmock module required for testing"))
with_httmock = _dummy
class TestGitlabRunner(GitlabModuleTestCase):
def setUp(self):
super(TestGitlabRunner, self).setUp()
self.moduleUtil = GitLabRunner(module=self.mock_module, gitlab_instance=self.gitlab_instance)
@with_httmock(resp_find_runners_list)
@with_httmock(resp_get_runner)
def test_runner_exist(self):
rvalue = self.moduleUtil.existsRunner("test-1-20150125")
self.assertEqual(rvalue, True)
rvalue = self.moduleUtil.existsRunner("test-3-00000000")
self.assertEqual(rvalue, False)
@with_httmock(resp_create_runner)
def test_create_runner(self):
runner = self.moduleUtil.createRunner({"token": "token", "description": "test-1-20150125"})
self.assertEqual(type(runner), Runner)
self.assertEqual(runner.description, "test-1-20150125")
@with_httmock(resp_find_runners_list)
@with_httmock(resp_get_runner)
def test_update_runner(self):
runner = self.moduleUtil.findRunner("test-1-20150125")
changed, newRunner = self.moduleUtil.updateRunner(runner, {"description": "Runner description"})
self.assertEqual(changed, True)
self.assertEqual(type(newRunner), Runner)
self.assertEqual(newRunner.description, "Runner description")
changed, newRunner = self.moduleUtil.updateRunner(runner, {"description": "Runner description"})
self.assertEqual(changed, False)
self.assertEqual(newRunner.description, "Runner description")
@with_httmock(resp_find_runners_list)
@with_httmock(resp_get_runner)
@with_httmock(resp_delete_runner)
def test_delete_runner(self):
self.moduleUtil.existsRunner("test-1-20150125")
rvalue = self.moduleUtil.deleteRunner()
self.assertEqual(rvalue, None)
| 34.073684 | 114 | 0.718876 |
7c1aa50a6f137de7f86c9cddce8a90d9fb5f4ee0
| 4,974 |
py
|
Python
|
modeling/model_utils/non_local_parts.py
|
UESTC-Liuxin/SkmtSeg
|
1251de57fae967aca395644d1c70a9ba0bb52271
|
[
"Apache-2.0"
] | 2 |
2020-12-22T08:40:05.000Z
|
2021-03-30T08:09:44.000Z
|
modeling/model_utils/non_local_parts.py
|
UESTC-Liuxin/SkmtSeg
|
1251de57fae967aca395644d1c70a9ba0bb52271
|
[
"Apache-2.0"
] | null | null | null |
modeling/model_utils/non_local_parts.py
|
UESTC-Liuxin/SkmtSeg
|
1251de57fae967aca395644d1c70a9ba0bb52271
|
[
"Apache-2.0"
] | null | null | null |
import torch
import torch.nn.functional as F
import torch.nn as nn
class multi_head_attention_2d(torch.nn.Module):
def __init__(self, in_channel, key_filters, value_filters,
output_filters, num_heads, dropout_prob=0.5, layer_type='SAME'):
super().__init__()
"""Multihead scaled-dot-product attention with input/output transformations.
Args:
inputs: a Tensor with shape [batch, h, w, channels]
key_filters: an integer. Note that queries have the same number
of channels as keys
value_filters: an integer
output_depth: an integer
num_heads: an integer dividing key_filters and value_filters
layer_type: a string, type of this layer -- SAME, DOWN, UP
Returns:
A Tensor of shape [batch, _h, _w, output_filters]
Raises:
ValueError: if the key_filters or value_filters are not divisible
by the number of attention heads.
"""
if key_filters % num_heads != 0:
raise ValueError("Key depth (%d) must be divisible by the number of "
"attention heads (%d)." % (key_filters, num_heads))
if value_filters % num_heads != 0:
raise ValueError("Value depth (%d) must be divisible by the number of "
"attention heads (%d)." % (value_filters, num_heads))
if layer_type not in ['SAME', 'DOWN', 'UP']:
raise ValueError("Layer type (%s) must be one of SAME, "
"DOWN, UP." % (layer_type))
self.num_heads = num_heads
self.layer_type = layer_type
self.QueryTransform = None
if layer_type == 'SAME':
self.QueryTransform = nn.Conv2d(in_channel, key_filters, kernel_size=1, stride=1,
padding=0, bias=True)
elif layer_type == 'DOWN':
self.QueryTransform = nn.Conv2d(in_channel, key_filters, kernel_size=3, stride=2,
padding=1, bias=True) # author use bias
elif layer_type == 'UP':
self.QueryTransform = nn.ConvTranspose2d(in_channel, key_filters, kernel_size=3, stride=2,
padding=1, bias=True)
self.KeyTransform = nn.Conv2d(in_channel, key_filters, kernel_size=1, stride=1, padding=0, bias=True)
self.ValueTransform = nn.Conv2d(in_channel, value_filters, kernel_size=1, stride=1, padding=0, bias=True)
self.attention_dropout = nn.Dropout(dropout_prob)
self.outputConv = nn.Conv2d(value_filters, output_filters, kernel_size=1, stride=1, padding=0, bias=True)
self._scale = (key_filters // num_heads) ** 0.5
def forward(self, inputs):
"""
:param inputs: B, C, H, W
:return: inputs: B, Co, Hq, Wq
"""
if self.layer_type == 'SAME' or self.layer_type == 'DOWN':
q = self.QueryTransform(inputs)
elif self.layer_type == 'UP':
q = self.QueryTransform(inputs, output_size=(inputs.shape[2]*2, inputs.shape[3]*2))
# [B, Hq, Wq, Ck]
k = self.KeyTransform(inputs).permute(0, 2, 3, 1)
v = self.ValueTransform(inputs).permute(0, 2, 3, 1)
q = q.permute(0, 2, 3, 1)
Batch, Hq, Wq = q.shape[0], q.shape[1], q.shape[2]
#[B, H, W, N, Ck]
k = self.split_heads(k, self.num_heads)
v = self.split_heads(v, self.num_heads)
q = self.split_heads(q, self.num_heads)
#[(B, H, W, N), c]
k = torch.flatten(k, 0, 3)
v = torch.flatten(v, 0, 3)
q = torch.flatten(q, 0, 3)
# normalize
q = q / self._scale
# attention
#[(B, Hq, Wq, N), (B, H, W, N)]
A = torch.matmul(q, k.transpose(0, 1))
A = torch.softmax(A, dim=1)
A = self.attention_dropout(A)
# [(B, Hq, Wq, N), C]
O = torch.matmul(A, v)
# [B, Hq, Wq, C]
O = O.view(Batch, Hq, Wq, v.shape[-1]*self.num_heads)
# [B, C, Hq, Wq]
O = O.permute(0, 3, 1, 2)
# [B, Co, Hq, Wq]
O = self.outputConv(O)
return O
def split_heads(self, x, num_heads):
"""Split channels (last dimension) into multiple heads.
Args:
x: a Tensor with shape [batch, h, w, channels]
num_heads: an integer
Returns:
a Tensor with shape [batch, h, w, num_heads, channels / num_heads]
"""
channel_num = x.shape[-1]
return x.view(x.shape[0], x.shape[1], x.shape[2], num_heads, int(channel_num/num_heads))
if __name__ == '__main__':
device = torch.device('cpu') #cuda:0
inputs = torch.rand(20, 50, 50).unsqueeze(0).to(device)
net = multi_head_attention_2d(20, 4, 4, 11, 4, 0.5, 'UP') # 'SAME', 'DOWN', 'UP'
res = net(inputs)
print('input shape:', inputs.shape)
print('res shape:', res.shape)
| 38.859375 | 113 | 0.564335 |
863c0c02bb99e728169da6f3b2675c093dd3363d
| 192 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/patches/v7_2/update_attendance_docstatus.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/patches/v7_2/update_attendance_docstatus.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/patches/v7_2/update_attendance_docstatus.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
import frappe
def execute():
frappe.reload_doc("education", "doctype", "student_attendance")
frappe.db.sql('''
update `tabStudent Attendance` set
docstatus=0
where
docstatus=1''')
| 21.333333 | 64 | 0.713542 |
07e8f666efd1b4dde3af6c27c5d9d3646d216262
| 165 |
py
|
Python
|
Online-Judges/DimikOJ/Python/28-ramdom-array.py
|
shihab4t/Competitive-Programming
|
e8eec7d4f7d86bfa1c00b7fbbedfd6a1518f19be
|
[
"Unlicense"
] | 3 |
2021-06-15T01:19:23.000Z
|
2022-03-16T18:23:53.000Z
|
Online-Judges/DimikOJ/Python/28-ramdom-array.py
|
shihab4t/Competitive-Programming
|
e8eec7d4f7d86bfa1c00b7fbbedfd6a1518f19be
|
[
"Unlicense"
] | null | null | null |
Online-Judges/DimikOJ/Python/28-ramdom-array.py
|
shihab4t/Competitive-Programming
|
e8eec7d4f7d86bfa1c00b7fbbedfd6a1518f19be
|
[
"Unlicense"
] | null | null | null |
material = []
for i in range(int(input())):
material.append(int(input()))
srted = sorted(material)
if srted == material:
print("YES")
else:
print("NO")
| 16.5 | 33 | 0.618182 |
9c51e7500f9f84b7fd9fa0c4efe0760648c1e466
| 1,539 |
py
|
Python
|
pykasso/abrechnung.py
|
pekh/pykasso
|
86ece737d62ce28aef00805a19a789714e788453
|
[
"MIT"
] | null | null | null |
pykasso/abrechnung.py
|
pekh/pykasso
|
86ece737d62ce28aef00805a19a789714e788453
|
[
"MIT"
] | null | null | null |
pykasso/abrechnung.py
|
pekh/pykasso
|
86ece737d62ce28aef00805a19a789714e788453
|
[
"MIT"
] | null | null | null |
from collections import defaultdict
from dataclasses import dataclass
from datetime import date, datetime
from decimal import Decimal
from typing import Dict, List
@dataclass(eq=True, frozen=True)
class Mitglied:
name: str
vorname: str
anrede: str = 'Hallo'
strasse: str = ''
plz: str = ''
ort: str = ''
land: str = ''
email: str = ''
@dataclass
class AbrechnungsPosition:
datum: date
text: str
wert: Decimal
@dataclass
class Abrechnung:
mitglied: Mitglied
positionen: List[AbrechnungsPosition]
# evtl könnte das Iterator-Protokoll implementiert werden, um über die
# Positionen zu iterieren
@property
def betrag(self):
return sum([pos.wert for pos in self.positionen])
def abrechnungen_aus_transaktionen(transaktionen: List[Dict[str, str]]) -> List[Abrechnung]:
positionen = defaultdict(list)
for transaktion in transaktionen:
vorname, name = transaktion['Account Name'].split(':')[-1].split(' ')
positionen[Mitglied(name, vorname)].append(
AbrechnungsPosition(
datum=datetime.strptime(transaktion['Date'], '%d.%m.%Y').date(),
text=transaktion['Description'],
wert=Decimal(transaktion['Amount Num.'].replace(',', '.')),
)
)
result = []
for mitglied, a_positionen in positionen.items():
result.append(
Abrechnung(mitglied, positionen=sorted(a_positionen, key=lambda x: x.datum))
)
return result
| 26.084746 | 92 | 0.638077 |
b9b5a2cb650de0028f3b2df5d2a206f43aa1a7d9
| 800 |
py
|
Python
|
nz_django/day5/orm_homework/front/models.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | null | null | null |
nz_django/day5/orm_homework/front/models.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | 27 |
2020-02-12T07:55:58.000Z
|
2022-03-12T00:19:09.000Z
|
nz_django/day5/orm_homework/front/models.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | 2 |
2020-02-18T01:54:55.000Z
|
2020-02-21T11:36:28.000Z
|
from django.db import models
class Student(models.Model):
"""学生表"""
name = models.CharField(max_length=100)
gender = models.SmallIntegerField()
class Meta:
db_table = 'student'
class Course(models.Model):
"""课程表"""
name = models.CharField(max_length=100)
teacher = models.ForeignKey("Teacher",on_delete=models.SET_NULL,null=True)
class Meta:
db_table = 'course'
class Score(models.Model):
"""分数表"""
student = models.ForeignKey("Student",on_delete=models.CASCADE)
course = models.ForeignKey("Course",on_delete=models.CASCADE)
number = models.FloatField()
class Meta:
db_table = 'score'
class Teacher(models.Model):
"""老师表"""
name = models.CharField(max_length=100)
class Meta:
db_table = 'teacher'
| 24.242424 | 78 | 0.66 |
6a0485e3b0b3438cfb8e9eb18619f96102aedf38
| 13,219 |
py
|
Python
|
evo_algorithm/ea_fields/ea_template_field.py
|
MateRyze/InformatiCup-2019
|
eeca3ff7f8a102f4093697c6badee21ce25e2e87
|
[
"MIT"
] | 3 |
2019-03-21T17:02:55.000Z
|
2019-04-04T18:16:10.000Z
|
evo_algorithm/ea_fields/ea_template_field.py
|
MateRyze/InformatiCup-2019
|
eeca3ff7f8a102f4093697c6badee21ce25e2e87
|
[
"MIT"
] | 11 |
2019-10-30T12:05:39.000Z
|
2022-03-11T23:43:54.000Z
|
evo_algorithm/ea_fields/ea_template_field.py
|
MateRyze/InformatiCup-2019
|
eeca3ff7f8a102f4093697c6badee21ce25e2e87
|
[
"MIT"
] | 1 |
2019-10-30T12:04:00.000Z
|
2019-10-30T12:04:00.000Z
|
# -*- coding: utf-8 -*-
import requests
import os
import skimage
import random
import json
import webbrowser
import time
import pandas as pd
import matplotlib.pyplot as plt
import os
import sys
from PIL import Image, ImageDraw
global population
global api_calls
global stop
global MUTATION_RATE
classList = []
confidenceList = []
confidenzList = []
population = []
api_calls = 0
stop = False
MUTATION_RATE = 10
# initial random generation of an image
def generateImage():
# set image format
img = Image.new('RGB', (64, 64), color='black')
draw = ImageDraw.Draw(img)
# draw four rectangles with random colors
positions = [
((0, 0), (16, 16)),
((16, 0), (32, 16)),
((0, 16), (16, 32)),
((16, 16), (32, 32)),
((0, 32), (16, 48)),
((16, 32), (32, 48)),
((0, 48), (16, 64)),
((16, 48), (32, 64)),
((32,0), (48, 16)),
((48, 0), (64, 16)),
((32, 16), (48, 32)),
((48, 16), (64, 32)),
((32, 32), (48, 48)),
((48, 32), (64, 48)),
((32, 48), (48, 64)),
((48, 48), (64, 64)),
]
#positions = generateFields(4)
colors = []
for position in positions:
color = (random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))
draw.rectangle(position, fill=color)
colors.append(color)
return {"image": img, "confidence": 0, "colors": colors, "class": ""}
# eval fitness for each individual
def evalFitness():
global api_calls
global stop
global classList
global confidenceList
print("doing api calls (evaluation)")
for individual in population:
if(individual["class"] == ""):
name = 'toEval.png'
image = individual["image"]
image.save(name)
while(True):
try:
payload = {'key': 'Engeibei1uok4xaecahChug6eihos0wo'}
r = requests.post(
'https://phinau.de/trasi',
data=payload,
files={'image': open(name, 'rb')}
)
api_calls += 1
individual["confidence"] = r.json()[0]["confidence"]
individual["class"] = str(r.json()[0]["class"])
classList.append(individual["class"])
confidenceList.append(individual["confidence"])
break
except ValueError:
time.sleep(1)
# print("Decoding JSON failed -> hit API rate :(")
# stop = True
except:
print("Unexpected error:", sys.exc_info()[0])
print("Retrying...")
time.sleep(1)
print("api calls: " + str(api_calls))
# create initial population
def initPopulation(count):
for i in range(count):
population.append(generateImage())
# select best individuals from population
def selection(bestCount, sameClassCount):
print("doing selection")
global population
# sort by confidence
population.sort(
key=lambda individual: individual["confidence"],
reverse=True
)
classesContained = []
selectedPopulation = []
for individual in population:
# limit count of individuals with same class
if(classesContained.count(individual["class"]) < sameClassCount):
# do not take individuals with confidence > 90 %
if(not any(
selectedIndividual["confidence"] >= 0.9 and
selectedIndividual["class"] == individual["class"]
for selectedIndividual in selectedPopulation
)):
selectedPopulation.append(individual)
classesContained.append(individual["class"])
population = selectedPopulation
# reduce individuals -> reduce API calls
if sameClassCount is 2:
# del population[int(INITIAL_POPULATION/2):]
print("no individuals deleted from selection")
elif sameClassCount is 1:
del population[bestCount:]
# crossover between individuals in the population
def crossover():
print("doing crossover")
# use only for same classes from inital population
# sort duplicates with same classes like [vorfahrt99%, vorfahrt97%, ...]
seen = [] # helper list
duplicates = []
# append one individual from every class
for individual in population:
if individual["class"] not in seen:
duplicates.append([individual])
seen.append(individual["class"])
# print(duplicates)
# append other individuals from same class
for index, entry in enumerate(duplicates):
for individual in population:
if (
individual not in entry and
individual["class"] == entry[0]["class"]
):
duplicates[index] = duplicates[index] + [individual]
# filter duplicates for crossover by confidence and length
# crossover makes sense for at least two individuals and confidence < 90%
duplicates = [
entry for entry in duplicates
if len(entry) > 1 and entry[0]["confidence"] < 0.90
]
# cross rectangles, generate new images
for duplicate in duplicates:
# duplicate contain two individuals with same class
#TODO for Moheb: combine the rectangles from two duplicates to one image
""" # + population[0 + j]["colors"] + population[0 + j]["colors"] + population[0 + j]["colors"]
colorsFirst = population[0 + j]["colors"]
# + population[1 + j]["colors"] + population[1 + j]["colors"] + population[1 + j]["colors"]
colorsSecond = population[1 + j]["colors"]
img = Image.new('RGB', (64, 64), color='black')
draw = ImageDraw.Draw(img)
positions = [
((0, 0), (16, 16)),
((16, 0), (32, 16)),
((0, 16), (16, 32)),
((16, 16), (32, 32)),
((0, 32), (16, 48)),
((16, 32), (32, 48)),
((0, 48), (16, 64)),
((16, 48), (32, 64)),
((32, 0), (48, 16)),
((48, 0), (64, 16)),
((32, 16), (48, 32)),
((48, 16), (64, 32)),
((32, 32), (48, 48)),
((48, 32), (64, 48)),
((32, 48), (48, 64)),
((48, 48), (64, 64)),
]
# [colorsFirst[0], colorsFirst[1], colorsSecond[2], colorsSecond[3]]
colors = colorsFirst[:8] + colorsSecond[:8]
for i in range(16):
draw.rectangle(positions[i], fill=colors[i])
population.append({"image": img, "confidence": 0,
"colors": colors, "class": ""}) """
# mutate each individual in the population and delete old population
def mutate(confidence):
# IMPLEMENT HERE YOUR MUTATION FUNCTION
# EXAMPLE: mutate colors of random rectangle
population_size = len(population)
for j in range(len(population)):
img = Image.new('RGB', (64, 64), color='black')
draw = ImageDraw.Draw(img)
positions = [
((0, 0), (16, 16)),
((16, 0), (32, 16)),
((0, 16), (16, 32)),
((16, 16), (32, 32)),
((0, 32), (16, 48)),
((16, 32), (32, 48)),
((0, 48), (16, 64)),
((16, 48), (32, 64)),
((32,0), (48, 16)),
((48, 0), (64, 16)),
((32, 16), (48, 32)),
((48, 16), (64, 32)),
((32, 32), (48, 48)),
((48, 32), (64, 48)),
((32, 48), (48, 64)),
((48, 48), (64, 64)),
]
colors = population[j]["colors"]# + population[j]["colors"] + population[j]["colors"] + population[j]["colors"] + population[j]["colors"]
if(population[j]["confidence"] < confidence):
# change the color of a random square
rect = random.randint(0, 3)
colors[rect] = (
colors[rect][0] + 1 + random.randint(-10, 10) * MUTATION_RATE,
colors[rect][1] + 1 + random.randint(-10, 10) * MUTATION_RATE,
colors[rect][2] + 1 + random.randint(-10, 10) * MUTATION_RATE
)
for i in range(16):
draw.rectangle(positions[i], fill=colors[i])
population.append({"image": img, "confidence": 0, "colors": colors, "class": ""})
# delete old
del population[:population_size]
def generateFields(n):
positions = [
((0, 0), (16, 16)),
((16, 0), (32, 16)),
((0, 16), (16, 32)),
((16, 16), (32, 32)),
((0, 32), (16, 48)),
((16, 32), (32, 48)),
((0, 48), (16, 64)),
((16, 48), (32, 64)),
((32,0), (48, 16)),
((48, 0), (64, 16)),
((32, 16), (48, 32)),
((48, 16), (64, 32)),
((32, 32), (48, 48)),
((48, 32), (64, 48)),
((32, 48), (48, 64)),
((48, 48), (64, 64)),
]
# beschreibt die Ursprungsform des geteilten Bildes mit vier gleichgroßen Rechtecken
positions_origin = [
((0, 0), (32, 32)),
((32, 0), (64, 32)),
((0, 32), (32, 64)),
((32, 32), (64, 64)),
]
dimension = 16
#die Ursprungsform wird skaliert, um diese dann als Grundlage für die Generierung weiterer Rechtecke zu benutzen
position_scaled = [[(point[0]/dimension, point[1]/dimension) for point in row] for row in positions_origin]
#print(position_scaled)
positions = []
#die weiteren Rechtecke werden generiert und in der Variable position als eine Liste mit Listen aus Tupeln gespeichert
for i in range(dimension):
for j in range(dimension):
offset = 64/dimension
position= [tuple([(point[0] + offset*i, point[1] + offset*j) for point in k]) for k in position_scaled]
positions.append(position)
# keine Endlosschleife :D sondern die Konsolenausgabe war sehr gross
# print(positions)
return positions
def saveResults():
# set image format
img = Image.new('RGB', (64, 64), color='black')
draw = ImageDraw.Draw(img)
colors = [(255,0,0), (0,255,0), (0,0,255), (0,0,0)]
for i in range(32):
for fields in generateFields(i+1):
for j in range(len(fields)):
draw.rectangle(fields[j], fill=colors[j])
evalFitness()
#Antwort in Liste
#in csv
# i, anzahl der felder, klasse, konfidenz
def printResults():
global confidenzList
for individual in population:
print("confidence: ", individual["confidence"], " class: ", individual["class"])
confidenzList.append(individual["confidence"])
print("..")
def getBestResult():
best = 0
for individual in population:
if(individual["confidence"] > best):
best = individual["confidence"]
return best
# get the count of images that match the confidence
def getCountThatMatch(confidence):
count = 0
seen = []
for individual in population:
if(
individual["confidence"] >= confidence and
individual["class"] not in seen
):
seen.append(individual["class"])
count += 1
return count
# init parameters
INITIAL_POPULATION = 20 # EXPERIMENT
SELECTED_COUNT = 5 # specification
DESIRED_CONFIDENCE = 0.90 # specification
# run evolutionary algorithm (init -> selection -> loop(crossover-> mutate -> selection) until confidence matches all images)
def runEvoAlgorithm():
initPopulation(INITIAL_POPULATION)
evalFitness()
selection(SELECTED_COUNT, 2)
printResults()
while getCountThatMatch(DESIRED_CONFIDENCE) < SELECTED_COUNT and stop == False:
crossover()
mutate(DESIRED_CONFIDENCE)
evalFitness()
selection(SELECTED_COUNT, 2)
if (stop == False):
printResults()
selection(SELECTED_COUNT, 1)
printResults()
print(api_calls)
# save generated images with desired confidence
def saveImages():
for i in range(len(population)):
if(population[i]["confidence"] > DESIRED_CONFIDENCE):
image = population[i]["image"]
name = "img" + \
str(i) + "_" + str(population[i]["confidence"]
) + "_" + str(population[i]["class"]) + ".png"
image.save(name)
webbrowser.open(name)
def clearList(listy):
data = []
counter = len(listy)
print(counter)
for i in range(counter):
string = listy[i]
print(string)
data.append(string.encode("utf-8"))
return data
def listToCSV():
new_class = clearList(classList)
data = zip(confidenceList, new_class)
df = pd.DataFrame(data)
df.to_csv('result.csv', index=False)
print(df)
plt.title('Einfarbige Bilder')
plt.xlabel('Konfidenz')
plt.ylabel('Klassifikation')
plt.scatter(df[0], df[1])
plt.show()
if __name__ == '__main__':
runEvoAlgorithm()
saveImages()
print("api calls: ", api_calls)
# generateFields(4)
# saveResults()
# listToCSV()
| 33.130326 | 145 | 0.543158 |
e04c9b24f600b16b94b183fe669b811016df28a5
| 1,074 |
py
|
Python
|
listings/chapter05/bubblesort_with_counter.py
|
SaschaKersken/Daten-Prozessanalyse
|
370f07a75b9465329deb3671adbfbef8483f76f6
|
[
"Apache-2.0"
] | 2 |
2021-09-20T06:16:41.000Z
|
2022-01-17T14:24:43.000Z
|
listings/chapter05/bubblesort_with_counter.py
|
SaschaKersken/Daten-Prozessanalyse
|
370f07a75b9465329deb3671adbfbef8483f76f6
|
[
"Apache-2.0"
] | null | null | null |
listings/chapter05/bubblesort_with_counter.py
|
SaschaKersken/Daten-Prozessanalyse
|
370f07a75b9465329deb3671adbfbef8483f76f6
|
[
"Apache-2.0"
] | null | null | null |
def bubblesort(unsorted):
counter = 0
# Vorerst Endlosschleife
while True:
# Bis auf Weiteres gilt die Liste als sortiert
is_sorted = True
# Erstes bis vorletztes Element
for i in range(0, len(unsorted) - 1):
counter += 1
# Aktuelles Element größer als sein Nachfolger?
if unsorted[i] > unsorted[i + 1]:
# Elemente vertauschen
unsorted[i], unsorted[i + 1] = unsorted[i + 1], unsorted[i]
# Feststellung: Liste ist noch nicht sortiert
is_sorted = False
# Falls hier sortiert, Ende
if is_sorted:
break
return counter
if __name__ == '__main__':
list1 = [7, 2, 9, 1, 8, 4, 6, 3, 5, 0, 9]
list2 = ['Katze', 'Hund', 'Elefant', 'Maus', 'Affe', 'Giraffe']
list3 = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
s1 = bubblesort(list1)
print(f"{list1}: {s1} Durchläufe")
s2 = bubblesort(list2)
print(f"{list2}: {s2} Durchläufe")
s3 = bubblesort(list3)
print(f"{list3}: {s3} Durchläufe")
| 34.645161 | 75 | 0.550279 |
0ec80f64a12322fb90d737e554079b2db6c51bf6
| 19,444 |
py
|
Python
|
MoPulseGen/data/sim_for_model_4.py
|
tushar-agarwal2909/MoPulseGen
|
a0edeaf67f956e7db27e70f07e47d2f992bcb6f3
|
[
"BSD-2-Clause"
] | null | null | null |
MoPulseGen/data/sim_for_model_4.py
|
tushar-agarwal2909/MoPulseGen
|
a0edeaf67f956e7db27e70f07e47d2f992bcb6f3
|
[
"BSD-2-Clause"
] | null | null | null |
MoPulseGen/data/sim_for_model_4.py
|
tushar-agarwal2909/MoPulseGen
|
a0edeaf67f956e7db27e70f07e47d2f992bcb6f3
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 6 15:00:21 2019
@author: agarwal.270a
"""
# Import Libraries
import numpy as np
import matplotlib.pyplot as plt
from scipy import signal as sig
from scipy.signal import windows as win
import pandas as pd
from scipy import io
import pickle
from scipy.stats import norm
# Import CC functions
#from cerebralcortex.core.datatypes import DataStream
#from cerebralcortex.core.metadata_manager.stream.metadata import Metadata, DataDescriptor, ModuleMetadata
#from cerebralcortex.core.util.spark_helper import get_or_create_sc
# Declare constants and load data
Fs=25 #Hz
len_in_s=20.48 #s
len_out=4
len_in=Fs*len_in_s
#arr_t=np.arange(250,290,len_in_s) #change time duration when longer noise exists
arr_t=np.arange(250,900,len_in_s) #change time duration when longer noise exists
path_prefix= 'E:/Box Sync/' #'C:/Users/agarwal.270/Box/'
path=path_prefix+'SU19/Research/PPG_ECG_Proj/py_code/MA_function/'
mdict=pickle.load(open(path+'data/sim_data.dat','rb'))
RR_distro=mdict['RR_distro']
HR_clusters=mdict['HR_clusters']
del mdict
#peak_data=mdict['peaks']
#led_id=mdict['led_id']
#verify after meeting
list_pdf_RR_joint=[RR_distro[j,0] for j in range(len(RR_distro))]
list_pdf_RR_row_sum=[np.sum(arr,axis=0) for arr in list_pdf_RR_joint]
list_pdf_RR_col_sum=[np.sum(arr,axis=1) for arr in list_pdf_RR_joint]
diff_arr=np.array([np.linalg.norm(list_pdf_RR_row_sum[k]-list_pdf_RR_col_sum[k])\
for k in range(len(list_pdf_RR_row_sum))]).round(4)
# =============================================================================
# plt.figure();
# for j in range(len(list_pdf_RR_row_sum)):
# plt.subplot(7,2,j+1);plt.plot(list_pdf_RR_row_sum[j],'b-o')
# plt.plot(list_pdf_RR_col_sum[j],'r--x');plt.legend(['row','col'])
# plt.grid(True);plt.title('z={}, rmse={}'.format(j+1,diff_arr[j]))
#
# =============================================================================
#%% Helper funcs
# =============================================================================
# def sample_RR(HR,RR_prev):
# #get cluster
# HR_up=(HR_clusters>HR).astype(int)
# z=(np.arange(len(HR_clusters)-1))[(np.diff(HR_up)).astype(bool)][0]
# #RR_z=RR_distro[z]
# RR_z_distro=RR_distro[z,0];RR_z_vals=RR_distro[z,1].reshape(-1)
# if RR_prev==0: #beginning of sampling. sample uniform randomly
# RR_next=RR_z_vals[np.random.randint(len(RR_z_vals))]
# else:
# idx_Rp=np.arange(len(RR_z_vals))[RR_z_vals==RR_prev]
# RR_z_Rp=RR_z_distro[idx_Rp,:] #conditional distro given z, RR_p
# idx_Rn=np.random.choice(len(RR_z_vals),p=RR_z_Rp/np.sum(RR_z_Rp)) #sample RR_next idx
# RR_next=RR_z_vals[idx_Rn]
# return RR_next
# =============================================================================
def sample_RR(HR,RR_prev):
#get cluster
HR_up=(HR_clusters>HR).astype(int)
z=(np.arange(len(HR_clusters)-1))[(np.diff(HR_up)).astype(bool)][0]
#get distros
RR_z_distro=list_pdf_RR_row_sum[z]
RR_z_vals=RR_distro[z,1].reshape(-1)
#sample
idx_Rn=np.random.choice(len(RR_z_vals),p=RR_z_distro) #sample RR_next idx
RR_next=RR_z_vals[idx_Rn]
return RR_next
def sinusoid(t,w,phi,Fs=25):
'''
Takes in inputs as numpy arrays of same size. Returns the sinewave with
desired characteristics.
t: array of time values in seconds. If a scalar is supplied, it is
considered as duration of the time series in seconds starting from 0. It is
divided into t*Fs divisions.
w: array of angular frequencies in radians/seconds. If a scalar is
supplied, it is made into a constant array of same shape as t and value w.
phi: array of phase values in radians. If a scalar is supplied, it is made
into a constant array of same shape as t and value phi.
Fs= Sampling frequency in Hz. Only needed in case t is not an array.
returns: t, s=np.sin(w*t+phi)
'''
# Handle Scalar inputs
if not(hasattr(t, "__len__")):
t=np.linspace(0,t,num=t*Fs,endpoint=False)
if not(hasattr(w, "__len__")):
w=w*np.ones(t.shape)
if not(hasattr(phi, "__len__")):
phi=phi*np.ones(t.shape)
# Check shapes are same
if (w.shape!=t.shape and phi.shape!=t.shape):
raise TypeError('Dimensional mismatch between input arrays. Please check the dimensions are same')
s=np.sin(w*t+phi)
return t,s
def HR_func_generator(t1):
arr_HR=np.arange(50,180) # Possible heart rates
# make a array of functions
f1=lambda B,D:((D*win.triang(len(t1))).astype(int)+B).astype(np.float32) #triang
f2=lambda B,D:((D*win.triang(2*len(t1))).astype(int)+B).astype(np.float32)\
[:len(t1)] # 1st half of triang
f3=lambda B,D:((D*win.tukey(len(t1),alpha=(0.3*np.random.rand()+0.7))).astype(int)+B).astype(np.float32) #tukey
f4=lambda B,D:((D*win.tukey(2*len(t1),alpha=(0.3*np.random.rand()+0.7))).astype(int)+B)\
.astype(np.float32)[:len(t1)] # 1st half of tukey
arr_f=np.array(1*[f1]+1*[f2]+1*[f3]+1*[f4]) # possible to change the proportion of functions
#randomly select elements
D_HR=0;HRs=[];D_HR_max=50
while D_HR==0: # we don't want D_HR to be zero so keep resampling
HRs+=[arr_HR[np.random.randint(len(arr_HR))]]
HR_range=np.arange(HRs[0]+1,min([HRs[0]+D_HR_max,180])+1)
HRs+=[HR_range[np.random.randint(len(HR_range))]]
B_HR,D_HR=HRs[0],HRs[1]-HRs[0]
#B_HR,D_HR=arr_B_HR[np.random.randint(len(arr_B_HR))],arr_D_HR[np.random.randint(len(arr_D_HR))]
HR_curve_f=arr_f[np.random.randint(len(arr_f))](B_HR,D_HR) #trend
return HR_curve_f,D_HR
def filtr(X0,Fs=25,filt=True):
nyq=Fs/2;flag=False
if len(X0.shape)==1:
X0=X0.reshape(-1,1)
flag=True
X1 = sig.detrend(X0,type='constant',axis=0); # Subtract mean
if filt:
# filter design used from Ju's code with slight changes for python syntax
b = sig.firls(219,np.array([0,0.3,0.5,4.5,5,nyq]),np.array([0,0,1,1,0,0]),np.array([10,1,1]),nyq=nyq);
X=np.zeros(X1.shape)
for i in range(X1.shape[1]):
#X[:,i] = sig.convolve(X1[:,i],b,mode='same'); # filtering using convolution, mode='same' returns the centered signal without any delay
X[:,i] = sig.filtfilt(b, [1], X1[:,i])
else:
X=X1
if flag:
X=X.reshape(-1)
#X=sig.detrend(X,type='constant',axis=0); # subtracted mean again to center around x=0 just in case things changed during filtering
return X
def filtr_HR(X0,Fs=25,filt=True):
nyq=Fs/2;flag=False
if len(X0.shape)==1:
X0=X0.reshape(-1,1)
flag=True
X1 = np.copy(X0)#sig.detrend(X0,type='constant',axis=0); # Subtract mean
if filt:
# filter design used from Ju's code with slight changes for python syntax
b = sig.firls(219,np.array([0,0.5,1,nyq]),np.array([1,1,0,0]),np.array([1,1]),nyq=nyq);
X=np.zeros(X1.shape)
for i in range(X1.shape[1]):
#X[:,i] = sig.convolve(X1[:,i],b,mode='same'); # filtering using convolution, mode='same' returns the centered signal without any delay
X[:,i] = sig.filtfilt(b, [1], X1[:,i])
else:
X=X1
if flag:
X=X.reshape(-1)
#X=sig.detrend(X,type='constant',axis=0); # subtracted mean again to center around x=0 just in case things changed during filtering
return X
def normalize_AC(data_left_filt,Fn=25,c=0,make_plots=False):
'''
data_left_filt: filtered ppg data
Fn: Sampling frequency in Hz
c: Column (Channel) in the array to be normalized
'''
data_left_filt=1*data_left_filt
flag=False
if len(data_left_filt.shape)==1:
data_left_filt=data_left_filt.reshape((-1,1))
flag=True
prc_l=50
pk_idx_start=2*Fn;pk_idx_end=29*Fn;
y=data_left_filt[pk_idx_start:pk_idx_end,c]
locs,pk_props = sig.find_peaks(y,distance=8,height=0);
pks_l=y[locs]
locs=locs+pk_idx_start;
if make_plots:
plt.figure(); plt.subplot(211);
plt.plot(data_left_filt[:pk_idx_end,c]);plt.plot(locs,pks_l,'r+')
temp_mins_l=[];
#for j=[-5,-4,-3,-2,-1,1,2,3,4,5]
for j in range(-7,0):
temp_mins_l+=[data_left_filt[locs+j,c]];
temp_min_l=np.min(np.array(temp_mins_l),axis=0);
amp_left=np.nanpercentile(pks_l-temp_min_l,prc_l);
#amp_left=np.mean(pks_l-temp_min_l);
data_left_filt[:,c]=data_left_filt[:,c]/amp_left;
if flag:
data_left_filt=data_left_filt.reshape(-1)
return data_left_filt
def form_data(X,Y,len_in,len_out):
'''
X:timeseries with inputs
Y:timeseries with outputs
'''
in_size=int(len_in)
out_size=int(len_out)
step_size=int(len_out/4)#np.max([out_size,4]) #change this as desired
#clip timeseries to nearest multiple of step_size
#lenth1=(((len(X)-in_size)//step_size)*step_size)+in_size
lenth=len(X)
#print(lenth1,lenth)
X,Y=X.T,Y.T # Transpose to make it look like time-series
X,Y=X.reshape(X.shape+(1,)),Y.reshape(Y.shape+(1,)) # add a dimension for concatenation
#print(X.shape,Y.shape)
#idx=np.arange(0,lenth-in_size,step_size)+in_size
idx=step_size*np.arange(0,1+((lenth-in_size)//step_size))+in_size
#print(idx[-1])
#print(lenth,X.shape[1],len(idx),(X.shape[1]-in_size+1)//step_size)
#print(X.shape,Y.shape,HR.shape)
data_X=np.concatenate([X[:,i-in_size:i,:] for i in idx],axis=-1).T
data_Y=np.concatenate([Y[i-out_size:i,:] for i in idx],axis=-1).T
#kernel_size=100;stride=1
#idxHR=np.arange(i-out_size+kernel_size,i,stride)
return data_X,data_Y
def pd_ffill(arr):
df = pd.DataFrame(arr)
df.fillna(method='ffill', axis=0, inplace=True)
out = df.values.reshape(arr.shape)
return out
def add_motion_noise(ppg1,flag=True):
# Noise for SNR=10log10(P_s/P_n)=20 dB => sigma=(ppg_pow**0.5)/10
acc1=0.00*np.random.standard_normal(ppg1.shape) # random normal noise with (0,0.1^2)
if flag: #extreme motion artefacts to be added or not
acc1=acc1+(2*np.random.random_sample(ppg1.shape)-1) # [-2,2] random uniform
#f=lambda z: (3 / (1 + np.exp(-10*z))) # A saturating sigmoid
f=lambda z: 2*np.tanh(2*z)
ppg1=ppg1+f(acc1) #noise added making values [-2,2] or [-4,4] depending on mode
return ppg1,acc1
def extract_rand_noise(noiz_list,lenth):
'''
noiz_list: Available components to choose from
lenth: Desired length of the noise signal
'''
noiz_list=[n for n in noiz_list if len(n)>lenth]
if len(noiz_list)==0:
raise AssertionError('Please use a smaller duration of ppg.')
noiz=noiz_list[np.random.randint(len(noiz_list))]
idx_start=np.random.randint(len(noiz)-lenth)
noiz=noiz[idx_start:idx_start+lenth]
return noiz
def gen_ppg_from_HR(t1,HR_curve_f,D_HR,peak_id,make_plots=False):
'''
mode={0:basic sinusoid, 1:mixture of sinusoids, 2:mixture of sinusoids with
a lot of motion artifacts}
'''
# Randomly insert consecutive Nan's and then ffill
perc_change=5;cons_reps=len(t1)//(np.abs(D_HR*2))
#idx=1+np.random.RandomState(seed=seed1).permutation(len(t1)-2-cons_reps)[:int((len(t1)-2)/cons_reps*perc_change/100)]
idx=1+np.random.permutation(len(t1)-2-cons_reps)[:int((len(t1)-2)/cons_reps*perc_change/100)]
try:
idx=np.concatenate([np.arange(i,i+cons_reps) for i in idx])
HR_curve_f[idx]=np.nan
HR_curve1=pd_ffill(HR_curve_f)
except ValueError:
HR_curve1=1*HR_curve_f
# TODO: Removed 0.1 Hz and 0.4 Hz in HRV
#HRV_w1=2*np.pi*0.1;HRV_w2=2*np.pi*0.4
#rand_mix=np.repeat(np.random.random_sample(1+(len(t1)//1500)),1500)[:len(t1)]
#rand_mix=0.55
#print(len(t1),rand_mix)
#gain_list=np.array([0,1,2,2,1,1,1,1])
#HR_curve1+=0.03*((rand_mix*sinusoid(t1,HRV_w1,phi=0)[-1])+\
# ((1-rand_mix)*sinusoid(t1,HRV_w2,phi=0)[-1]))#*gain_list[(300/HR_curve1).astype(int)]
#plt.figure();plt.plot(t1,sinusoid(t1,HRV_w1,phi=0)[-1],t1,sinusoid(t1,HRV_w2,phi=0)[-1])
#HR_curve1,_=add_motion_noise(HR_curve1,flag=False)
#print(HR_curve1.shape,t1.shape)
# =============================================================================
# w1=2*np.pi*(HR_curve1/60)
# #phi_PTT=(0.5*np.pi)/(HR_curve1/60)
# phi_PTT=0
# _,ppg0=sinusoid(t1,w1,phi=phi_PTT)
#
# ppg1=ppg0*2
# PTT=np.random.randint(4,6) #sample a PTT value
# ppg1=np.concatenate([np.zeros(PTT),ppg1[:-1*PTT]])
#
#
# # Peak Detection & check figure for its accuracy
# #out = ecg.ecg(signal=ppg01, sampling_rate=25,show=False)
# #ind=out['rpeaks']
# #arr_peaks=np.zeros(len(ppg01));arr_peaks[ind]=1
# #arr_peaks=(ppg01==np.max(ppg01)).astype(int)
# ind,_=find_peaks(ppg1,distance=6,height=0.9)
#
# =============================================================================
w_l=12;w_pk=25;w_r=w_pk-w_l-1
n_peaks=int(len(HR_curve1)/5)
#remove terminal pk_locs
#ind=ind[ind>=w_l]
#ind=ind[ind<(len(ppg1)-w_r)]
#sample bunch of peaks using PCA components
path2base='E:/Box Sync/'+\
'AU19/Research/PPG_ECG_proj/data/Wen_data_28_Sep/clean_lrsynced\\'
base_dict = io.loadmat(path2base+"green_ppg_basis.mat")
#base_dict=mdict[peak_id+'_G']['peaks']
eig_vec=base_dict['eig_vec'];eig_val=base_dict['eig_val'].reshape((-1,1))
avg=base_dict['mean'].reshape((-1,1))
k=10;eig_vec=eig_vec[:,:k];eig_val=eig_val[:k]
l_peaks,n_coeff=eig_vec.shape
weights=np.random.random_sample((n_coeff,n_peaks))*(eig_val**0.5)
rand_pks=np.matmul(eig_vec,weights)+avg #form peaks
#rand_pks=rand_pks[int(l_peaks/2)-w_l:int(l_peaks/2)+w_r+1,:] #keep desired width
#OR
# =============================================================================
# # Sample peaks randomly from those available in peak_mat
# peak_mat=peak_dict[peak_id];l_peaks=peak_mat.shape[0]
# rand_pks_idx=np.random.randint(peak_mat.shape[1],size=n_peaks)
# rand_pks=peak_mat[int(l_peaks/2)-w_l:int(l_peaks/2)+w_r+1,rand_pks_idx]
#
# =============================================================================
arr_ppg=np.zeros(len(HR_curve1))
arr_pk=np.zeros(len(HR_curve1))
#TODO: bunch of changes here
gauss=norm(loc = 0., scale = 1.5).pdf(np.arange(-3,3+1))
PTT=np.random.randint(4,8) #sample a PTT value
#plt.figure();plt.plot(gauss)
RR_prev=0;i=1*w_l;cntr=0
while i < (len(HR_curve1)-w_r-1):
#get next RR
arr_ppg[i-w_l:i+w_r+1]+=rand_pks[:,cntr]
arr_pk[i-3-PTT:i+3+1-PTT]=gauss
#get next RR_interval
#avg_HR=np.mean(HR_curve1[i-w_l:i+w_r+1])
avg_HR=np.mean(HR_curve1[i+w_r+1:i+w_r+1+Fs]) #look ahead HR
RR_next=sample_RR(avg_HR,RR_prev)
i+=RR_next
cntr+=1
# =============================================================================
# #sample bunch of noise using PCA components
# noise_dict=mdict[peak_id+'_G']['noise']
# #DC_list=noise_dict['DC']
# NP_list=noise_dict['NP']
# P_list=noise_dict['P'];N_list=noise_dict['N']
# # Randomly pick one element from each list
# #DC=DC_list[np.random.randint(len(DC_list))]
# NP=extract_rand_noise(NP_list,len(arr_ppg))
# P=extract_rand_noise(P_list,len(arr_ppg))
# N=extract_rand_noise(N_list,len(arr_ppg))
#
# #get random gains for noise signals
# gain_NP=(1-0.5)*np.random.rand()+0.5 #in [0.5,1)
# gain_P,gain_N=gain_NP*np.random.rand(2) # in [0,gain_NP)
# #if j<2:
# # gain_NP,gain_P,gain_N=0,0,0
#
# arr_ppg+=(gain_NP*NP+gain_P*P+gain_N*N) #Add noise
# #arr_ppg=arr_ppg[:,j]*DC
#
#
#
# #arr_ppg_norm=1*arr_ppg
# #plt.figure();plt.plot(arr_ppg);plt.plot(arr_ppg_norm,'--')
# #plt.legend(['actual','AC Normalized'])
# #add motion noise
# #ppg2,acc1=add_motion_noise(arr_ppg,False)
#
#
# ppg2=1*arr_ppg
# ppg2_filt=filtr(ppg2.reshape(-1,1),Fs=25)
# # Normalize AC component
# ppg2_filt=normalize_AC(ppg2_filt,make_plots=False)
# =============================================================================
#TODO: Converted HR to Hz from BPM and made it smoother
ppg2=1*arr_ppg
ppg2_filt=filtr(ppg2.reshape(-1,1),Fs=25)
HR_filt=filtr_HR(HR_curve1/60)
#arr_pk_filt=filtr(arr_pk,Fs=25)
#ppg2=((ppg2+2)/4) # normalize using min-max of [-2,2]
#acc1=((acc1+1)/2) # normalize using min-max of [-2,2]
#plots
if make_plots:
#plt.figure()
#plt.psd(HR_curve1[-Fs*10:], NFFT=Fs*10, Fs=Fs,detrend='constant')
plt.figure()
ax1=plt.subplot(311);ax1.plot(t1,HR_filt)
ax1.set_title('HR');plt.grid(True)
#ax2=plt.subplot(412,sharex=ax1);ax2.plot(t1,ppg1,t1[ind],ppg1[ind],'r+')
#ax2.set_title('PPG_clean with detected peaks');plt.grid(True)
#ax3=plt.subplot(413,sharex=ax1);ax3.plot(t1,acc1)
#ax3.set_title('Acc');plt.grid(True)
ax3=plt.subplot(312,sharex=ax1);ax3.plot(t1,arr_pk)
ax3.set_title('filtered peak train');plt.grid(True)
ax4=plt.subplot(313,sharex=ax1);ax4.plot(t1,ppg2_filt)
ax4.set_title('filtered_PPG');plt.grid(True)
return ppg2_filt,HR_filt
#%% Main
def main(data_size=10000,for_test=False,make_plots=False,save_data=False):
while(True):
t=arr_t[np.random.randint(len(arr_t))] # sample seq. length in s.
# form HR curve
t1=np.linspace(0,t,num=t*Fs,endpoint=False)
HR_curve_f,D_HR=HR_func_generator(t1)
peak_id='white'
ppg1,HR1=gen_ppg_from_HR(t1,HR_curve_f,D_HR,peak_id,make_plots=make_plots)
#print(HR1.shape,ppg1.shape)
len_in=Fs*len_in_s;len_out=1*len_in
data_X,data_Y=form_data(ppg1,HR1,len_in=len_in,len_out=len_out)
#test
if for_test:
if save_data:
mdict={'ppg':ppg1,'HR':HR1}
io.savemat('eig_peaks_s.mat',mdict=mdict)
return ppg1,HR1,data_X,data_Y
if 'dataset_X' not in locals():
dataset_X,dataset_Y=data_X,data_Y
else:
dataset_X=np.concatenate([dataset_X,data_X],axis=0)
dataset_Y=np.concatenate([dataset_Y,data_Y],axis=0)
if (len(dataset_Y)>=data_size):
break
dataset_X=dataset_X[:data_size].astype(np.float32)
dataset_Y=dataset_Y[:data_size].astype(np.float32)
#separate
ratio=0.1;cut_idx=int(ratio*len(dataset_X))
val_data = (dataset_X[:cut_idx],dataset_Y[:cut_idx])
train_data = (dataset_X[cut_idx:],dataset_Y[cut_idx:])
#shuffle
idx = np.random.permutation(cut_idx)
val_data=(val_data[0][idx],val_data[1][idx])
idx = np.random.permutation(len(dataset_Y)-cut_idx)
train_data=(train_data[0][idx],train_data[1][idx])
return train_data,val_data
if __name__=='__main__':
plt.close('all')
X,Y=main()
| 40.848739 | 148 | 0.613351 |
161f91d8b95c7a757527e07d67a4e5ab0740a5bd
| 566 |
py
|
Python
|
noticias/migrations/0003_auto_20190507_1727.py
|
miglesias91/dt
|
6e00f883ebdb581f87750852f18cf9e3058aae2f
|
[
"MIT"
] | null | null | null |
noticias/migrations/0003_auto_20190507_1727.py
|
miglesias91/dt
|
6e00f883ebdb581f87750852f18cf9e3058aae2f
|
[
"MIT"
] | null | null | null |
noticias/migrations/0003_auto_20190507_1727.py
|
miglesias91/dt
|
6e00f883ebdb581f87750852f18cf9e3058aae2f
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.1 on 2019-05-07 20:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('noticias', '0002_remove_nota_comentario'),
]
operations = [
migrations.AddField(
model_name='nota',
name='slug',
field=models.SlugField(default='#', max_length=225),
),
migrations.AddField(
model_name='periodista',
name='slug',
field=models.SlugField(default='#', max_length=225),
),
]
| 23.583333 | 64 | 0.572438 |
bc94452fb3032b28a2ec41cb2020077af1748c27
| 22,737 |
py
|
Python
|
python/objectdetector/utils.py
|
thomas-trendsoft/BeerBot
|
9ac9bfcede3f37dc8aaa3ee09e06272a1b43a9e9
|
[
"MIT"
] | null | null | null |
python/objectdetector/utils.py
|
thomas-trendsoft/BeerBot
|
9ac9bfcede3f37dc8aaa3ee09e06272a1b43a9e9
|
[
"MIT"
] | null | null | null |
python/objectdetector/utils.py
|
thomas-trendsoft/BeerBot
|
9ac9bfcede3f37dc8aaa3ee09e06272a1b43a9e9
|
[
"MIT"
] | 1 |
2020-12-31T01:22:19.000Z
|
2020-12-31T01:22:19.000Z
|
#================================================================
#
# File name : utils.py
# Author : PyLessons
# Created date: 2020-09-27
# Website : https://pylessons.com/
# GitHub : https://github.com/pythonlessons/TensorFlow-2.x-YOLOv3
# Description : additional yolov3 and yolov4 functions
#
#================================================================
from multiprocessing import Process, Queue, Pipe
import cv2
import time
import random
import colorsys
import numpy as np
import tensorflow as tf
from yolov3.configs import *
from yolov3.yolov4 import *
from tensorflow.python.saved_model import tag_constants
def load_yolo_weights(model, weights_file):
tf.keras.backend.clear_session() # used to reset layer names
# load Darknet original weights to TensorFlow model
if YOLO_TYPE == "yolov3":
range1 = 75 if not TRAIN_YOLO_TINY else 13
range2 = [58, 66, 74] if not TRAIN_YOLO_TINY else [9, 12]
if YOLO_TYPE == "yolov4":
range1 = 110 if not TRAIN_YOLO_TINY else 21
range2 = [93, 101, 109] if not TRAIN_YOLO_TINY else [17, 20]
with open(weights_file, 'rb') as wf:
major, minor, revision, seen, _ = np.fromfile(wf, dtype=np.int32, count=5)
j = 0
for i in range(range1):
if i > 0:
conv_layer_name = 'conv2d_%d' %i
else:
conv_layer_name = 'conv2d'
if j > 0:
bn_layer_name = 'batch_normalization_%d' %j
else:
bn_layer_name = 'batch_normalization'
conv_layer = model.get_layer(conv_layer_name)
filters = conv_layer.filters
k_size = conv_layer.kernel_size[0]
in_dim = conv_layer.input_shape[-1]
if i not in range2:
# darknet weights: [beta, gamma, mean, variance]
bn_weights = np.fromfile(wf, dtype=np.float32, count=4 * filters)
# tf weights: [gamma, beta, mean, variance]
bn_weights = bn_weights.reshape((4, filters))[[1, 0, 2, 3]]
bn_layer = model.get_layer(bn_layer_name)
j += 1
else:
conv_bias = np.fromfile(wf, dtype=np.float32, count=filters)
# darknet shape (out_dim, in_dim, height, width)
conv_shape = (filters, in_dim, k_size, k_size)
conv_weights = np.fromfile(wf, dtype=np.float32, count=np.product(conv_shape))
# tf shape (height, width, in_dim, out_dim)
conv_weights = conv_weights.reshape(conv_shape).transpose([2, 3, 1, 0])
if i not in range2:
conv_layer.set_weights([conv_weights])
bn_layer.set_weights(bn_weights)
else:
conv_layer.set_weights([conv_weights, conv_bias])
assert len(wf.read()) == 0, 'failed to read all data'
def Load_Yolo_model():
gpus = tf.config.experimental.list_physical_devices('GPU')
if len(gpus) > 0:
print(f'GPUs {gpus}')
try: tf.config.experimental.set_memory_growth(gpus[0], True)
except RuntimeError: pass
if YOLO_FRAMEWORK == "tf": # TensorFlow detection
if YOLO_TYPE == "yolov4":
Darknet_weights = YOLO_V4_TINY_WEIGHTS if TRAIN_YOLO_TINY else YOLO_V4_WEIGHTS
if YOLO_TYPE == "yolov3":
Darknet_weights = YOLO_V3_TINY_WEIGHTS if TRAIN_YOLO_TINY else YOLO_V3_WEIGHTS
if YOLO_CUSTOM_WEIGHTS == False:
print("Loading Darknet_weights from:", Darknet_weights)
yolo = Create_Yolo(input_size=YOLO_INPUT_SIZE, CLASSES=YOLO_COCO_CLASSES)
load_yolo_weights(yolo, Darknet_weights) # use Darknet weights
else:
print("Loading custom weights from:", YOLO_CUSTOM_WEIGHTS)
yolo = Create_Yolo(input_size=YOLO_INPUT_SIZE, CLASSES=TRAIN_CLASSES)
yolo.load_weights(f"./checkpoints/{TRAIN_MODEL_NAME}") # use custom weights
elif YOLO_FRAMEWORK == "trt": # TensorRT detection
saved_model_loaded = tf.saved_model.load(YOLO_CUSTOM_WEIGHTS, tags=[tag_constants.SERVING])
signature_keys = list(saved_model_loaded.signatures.keys())
yolo = saved_model_loaded.signatures['serving_default']
return yolo
def image_preprocess(image, target_size, gt_boxes=None):
ih, iw = target_size
h, w, _ = image.shape
scale = min(iw/w, ih/h)
nw, nh = int(scale * w), int(scale * h)
image_resized = cv2.resize(image, (nw, nh))
image_paded = np.full(shape=[ih, iw, 3], fill_value=128.0)
dw, dh = (iw - nw) // 2, (ih-nh) // 2
image_paded[dh:nh+dh, dw:nw+dw, :] = image_resized
image_paded = image_paded / 255.
if gt_boxes is None:
return image_paded
else:
gt_boxes[:, [0, 2]] = gt_boxes[:, [0, 2]] * scale + dw
gt_boxes[:, [1, 3]] = gt_boxes[:, [1, 3]] * scale + dh
return image_paded, gt_boxes
def draw_bbox(image, bboxes, CLASSES=YOLO_COCO_CLASSES, show_label=True, show_confidence = True, Text_colors=(255,255,0), rectangle_colors='', tracking=False):
NUM_CLASS = read_class_names(CLASSES)
num_classes = len(NUM_CLASS)
image_h, image_w, _ = image.shape
hsv_tuples = [(1.0 * x / num_classes, 1., 1.) for x in range(num_classes)]
#print("hsv_tuples", hsv_tuples)
colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
colors = list(map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)), colors))
random.seed(0)
random.shuffle(colors)
random.seed(None)
for i, bbox in enumerate(bboxes):
coor = np.array(bbox[:4], dtype=np.int32)
score = bbox[4]
class_ind = int(bbox[5])
bbox_color = rectangle_colors if rectangle_colors != '' else colors[class_ind]
bbox_thick = int(0.6 * (image_h + image_w) / 1000)
if bbox_thick < 1: bbox_thick = 1
fontScale = 0.75 * bbox_thick
(x1, y1), (x2, y2) = (coor[0], coor[1]), (coor[2], coor[3])
# put object rectangle
cv2.rectangle(image, (x1, y1), (x2, y2), bbox_color, bbox_thick*2)
if show_label:
# get text label
score_str = " {:.2f}".format(score) if show_confidence else ""
if tracking: score_str = " "+str(score)
try:
label = "{}".format(NUM_CLASS[class_ind]) + score_str
except KeyError:
print("You received KeyError, this might be that you are trying to use yolo original weights")
print("while using custom classes, if using custom model in configs.py set YOLO_CUSTOM_WEIGHTS = True")
# get text size
(text_width, text_height), baseline = cv2.getTextSize(label, cv2.FONT_HERSHEY_COMPLEX_SMALL,
fontScale, thickness=bbox_thick)
# put filled text rectangle
cv2.rectangle(image, (x1, y1), (x1 + text_width, y1 - text_height - baseline), bbox_color, thickness=cv2.FILLED)
# put text above rectangle
cv2.putText(image, label, (x1, y1-4), cv2.FONT_HERSHEY_COMPLEX_SMALL,
fontScale, Text_colors, bbox_thick, lineType=cv2.LINE_AA)
return image
def bboxes_iou(boxes1, boxes2):
boxes1 = np.array(boxes1)
boxes2 = np.array(boxes2)
boxes1_area = (boxes1[..., 2] - boxes1[..., 0]) * (boxes1[..., 3] - boxes1[..., 1])
boxes2_area = (boxes2[..., 2] - boxes2[..., 0]) * (boxes2[..., 3] - boxes2[..., 1])
left_up = np.maximum(boxes1[..., :2], boxes2[..., :2])
right_down = np.minimum(boxes1[..., 2:], boxes2[..., 2:])
inter_section = np.maximum(right_down - left_up, 0.0)
inter_area = inter_section[..., 0] * inter_section[..., 1]
union_area = boxes1_area + boxes2_area - inter_area
ious = np.maximum(1.0 * inter_area / union_area, np.finfo(np.float32).eps)
return ious
def nms(bboxes, iou_threshold, sigma=0.3, method='nms'):
"""
:param bboxes: (xmin, ymin, xmax, ymax, score, class)
Note: soft-nms, https://arxiv.org/pdf/1704.04503.pdf
https://github.com/bharatsingh430/soft-nms
"""
classes_in_img = list(set(bboxes[:, 5]))
best_bboxes = []
for cls in classes_in_img:
cls_mask = (bboxes[:, 5] == cls)
cls_bboxes = bboxes[cls_mask]
# Process 1: Determine whether the number of bounding boxes is greater than 0
while len(cls_bboxes) > 0:
# Process 2: Select the bounding box with the highest score according to socre order A
max_ind = np.argmax(cls_bboxes[:, 4])
best_bbox = cls_bboxes[max_ind]
best_bboxes.append(best_bbox)
cls_bboxes = np.concatenate([cls_bboxes[: max_ind], cls_bboxes[max_ind + 1:]])
# Process 3: Calculate this bounding box A and
# Remain all iou of the bounding box and remove those bounding boxes whose iou value is higher than the threshold
iou = bboxes_iou(best_bbox[np.newaxis, :4], cls_bboxes[:, :4])
weight = np.ones((len(iou),), dtype=np.float32)
assert method in ['nms', 'soft-nms']
if method == 'nms':
iou_mask = iou > iou_threshold
weight[iou_mask] = 0.0
if method == 'soft-nms':
weight = np.exp(-(1.0 * iou ** 2 / sigma))
cls_bboxes[:, 4] = cls_bboxes[:, 4] * weight
score_mask = cls_bboxes[:, 4] > 0.
cls_bboxes = cls_bboxes[score_mask]
return best_bboxes
def postprocess_boxes(pred_bbox, original_image, input_size, score_threshold):
valid_scale=[0, np.inf]
pred_bbox = np.array(pred_bbox)
pred_xywh = pred_bbox[:, 0:4]
pred_conf = pred_bbox[:, 4]
pred_prob = pred_bbox[:, 5:]
# 1. (x, y, w, h) --> (xmin, ymin, xmax, ymax)
pred_coor = np.concatenate([pred_xywh[:, :2] - pred_xywh[:, 2:] * 0.5,
pred_xywh[:, :2] + pred_xywh[:, 2:] * 0.5], axis=-1)
# 2. (xmin, ymin, xmax, ymax) -> (xmin_org, ymin_org, xmax_org, ymax_org)
org_h, org_w = original_image.shape[:2]
resize_ratio = min(input_size / org_w, input_size / org_h)
dw = (input_size - resize_ratio * org_w) / 2
dh = (input_size - resize_ratio * org_h) / 2
pred_coor[:, 0::2] = 1.0 * (pred_coor[:, 0::2] - dw) / resize_ratio
pred_coor[:, 1::2] = 1.0 * (pred_coor[:, 1::2] - dh) / resize_ratio
# 3. clip some boxes those are out of range
pred_coor = np.concatenate([np.maximum(pred_coor[:, :2], [0, 0]),
np.minimum(pred_coor[:, 2:], [org_w - 1, org_h - 1])], axis=-1)
invalid_mask = np.logical_or((pred_coor[:, 0] > pred_coor[:, 2]), (pred_coor[:, 1] > pred_coor[:, 3]))
pred_coor[invalid_mask] = 0
# 4. discard some invalid boxes
bboxes_scale = np.sqrt(np.multiply.reduce(pred_coor[:, 2:4] - pred_coor[:, 0:2], axis=-1))
scale_mask = np.logical_and((valid_scale[0] < bboxes_scale), (bboxes_scale < valid_scale[1]))
# 5. discard boxes with low scores
classes = np.argmax(pred_prob, axis=-1)
scores = pred_conf * pred_prob[np.arange(len(pred_coor)), classes]
score_mask = scores > score_threshold
mask = np.logical_and(scale_mask, score_mask)
coors, scores, classes = pred_coor[mask], scores[mask], classes[mask]
return np.concatenate([coors, scores[:, np.newaxis], classes[:, np.newaxis]], axis=-1)
def detect_image(Yolo, image_path, output_path, input_size=416, show=False, CLASSES=YOLO_COCO_CLASSES, score_threshold=0.3, iou_threshold=0.45, rectangle_colors=''):
original_image = cv2.imread(image_path)
original_image = cv2.cvtColor(original_image, cv2.COLOR_BGR2RGB)
original_image = cv2.cvtColor(original_image, cv2.COLOR_BGR2RGB)
image_data = image_preprocess(np.copy(original_image), [input_size, input_size])
image_data = image_data[np.newaxis, ...].astype(np.float32)
if YOLO_FRAMEWORK == "tf":
pred_bbox = Yolo.predict(image_data)
elif YOLO_FRAMEWORK == "trt":
batched_input = tf.constant(image_data)
result = Yolo(batched_input)
pred_bbox = []
for key, value in result.items():
value = value.numpy()
pred_bbox.append(value)
pred_bbox = [tf.reshape(x, (-1, tf.shape(x)[-1])) for x in pred_bbox]
pred_bbox = tf.concat(pred_bbox, axis=0)
bboxes = postprocess_boxes(pred_bbox, original_image, input_size, score_threshold)
bboxes = nms(bboxes, iou_threshold, method='nms')
print(bboxes)
image = draw_bbox(original_image, bboxes, CLASSES=CLASSES, rectangle_colors=rectangle_colors)
# CreateXMLfile("XML_Detections", str(int(time.time())), original_image, bboxes, read_class_names(CLASSES))
if output_path != '': cv2.imwrite(output_path, image)
return bboxes
def Predict_bbox_mp(Frames_data, Predicted_data, Processing_times):
gpus = tf.config.experimental.list_physical_devices('GPU')
if len(gpus) > 0:
try: tf.config.experimental.set_memory_growth(gpus[0], True)
except RuntimeError: print("RuntimeError in tf.config.experimental.list_physical_devices('GPU')")
Yolo = Load_Yolo_model()
times = []
while True:
if Frames_data.qsize()>0:
image_data = Frames_data.get()
t1 = time.time()
Processing_times.put(time.time())
if YOLO_FRAMEWORK == "tf":
pred_bbox = Yolo.predict(image_data)
elif YOLO_FRAMEWORK == "trt":
batched_input = tf.constant(image_data)
result = Yolo(batched_input)
pred_bbox = []
for key, value in result.items():
value = value.numpy()
pred_bbox.append(value)
pred_bbox = [tf.reshape(x, (-1, tf.shape(x)[-1])) for x in pred_bbox]
pred_bbox = tf.concat(pred_bbox, axis=0)
Predicted_data.put(pred_bbox)
def postprocess_mp(Predicted_data, original_frames, Processed_frames, Processing_times, input_size, CLASSES, score_threshold, iou_threshold, rectangle_colors, realtime):
times = []
while True:
if Predicted_data.qsize()>0:
pred_bbox = Predicted_data.get()
if realtime:
while original_frames.qsize() > 1:
original_image = original_frames.get()
else:
original_image = original_frames.get()
bboxes = postprocess_boxes(pred_bbox, original_image, input_size, score_threshold)
bboxes = nms(bboxes, iou_threshold, method='nms')
image = draw_bbox(original_image, bboxes, CLASSES=CLASSES, rectangle_colors=rectangle_colors)
times.append(time.time()-Processing_times.get())
times = times[-20:]
ms = sum(times)/len(times)*1000
fps = 1000 / ms
image = cv2.putText(image, "Time: {:.1f}FPS".format(fps), (0, 30), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (0, 0, 255), 2)
#print("Time: {:.2f}ms, Final FPS: {:.1f}".format(ms, fps))
Processed_frames.put(image)
def Show_Image_mp(Processed_frames, show, Final_frames):
while True:
if Processed_frames.qsize()>0:
image = Processed_frames.get()
Final_frames.put(image)
if show:
cv2.imshow('output', image)
if cv2.waitKey(25) & 0xFF == ord("q"):
cv2.destroyAllWindows()
break
# detect from webcam
def detect_video_realtime_mp(video_path, output_path, input_size=416, show=False, CLASSES=YOLO_COCO_CLASSES, score_threshold=0.3, iou_threshold=0.45, rectangle_colors='', realtime=False):
if realtime:
vid = cv2.VideoCapture(0)
else:
vid = cv2.VideoCapture(video_path)
# by default VideoCapture returns float instead of int
width = int(vid.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(vid.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = int(vid.get(cv2.CAP_PROP_FPS))
codec = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(output_path, codec, fps, (width, height)) # output_path must be .mp4
no_of_frames = int(vid.get(cv2.CAP_PROP_FRAME_COUNT))
original_frames = Queue()
Frames_data = Queue()
Predicted_data = Queue()
Processed_frames = Queue()
Processing_times = Queue()
Final_frames = Queue()
p1 = Process(target=Predict_bbox_mp, args=(Frames_data, Predicted_data, Processing_times))
p2 = Process(target=postprocess_mp, args=(Predicted_data, original_frames, Processed_frames, Processing_times, input_size, CLASSES, score_threshold, iou_threshold, rectangle_colors, realtime))
p3 = Process(target=Show_Image_mp, args=(Processed_frames, show, Final_frames))
p1.start()
p2.start()
p3.start()
while True:
ret, img = vid.read()
if not ret:
break
original_image = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
original_image = cv2.cvtColor(original_image, cv2.COLOR_BGR2RGB)
original_frames.put(original_image)
image_data = image_preprocess(np.copy(original_image), [input_size, input_size])
image_data = image_data[np.newaxis, ...].astype(np.float32)
Frames_data.put(image_data)
while True:
if original_frames.qsize() == 0 and Frames_data.qsize() == 0 and Predicted_data.qsize() == 0 and Processed_frames.qsize() == 0 and Processing_times.qsize() == 0 and Final_frames.qsize() == 0:
p1.terminate()
p2.terminate()
p3.terminate()
break
elif Final_frames.qsize()>0:
image = Final_frames.get()
if output_path != '': out.write(image)
cv2.destroyAllWindows()
def detect_video(Yolo, video_path, output_path, input_size=416, show=False, CLASSES=YOLO_COCO_CLASSES, score_threshold=0.3, iou_threshold=0.45, rectangle_colors=''):
times, times_2 = [], []
vid = cv2.VideoCapture(video_path)
# by default VideoCapture returns float instead of int
width = int(vid.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(vid.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = int(vid.get(cv2.CAP_PROP_FPS))
codec = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(output_path, codec, fps, (width, height)) # output_path must be .mp4
while True:
_, img = vid.read()
try:
original_image = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
original_image = cv2.cvtColor(original_image, cv2.COLOR_BGR2RGB)
except:
break
image_data = image_preprocess(np.copy(original_image), [input_size, input_size])
image_data = image_data[np.newaxis, ...].astype(np.float32)
t1 = time.time()
if YOLO_FRAMEWORK == "tf":
pred_bbox = Yolo.predict(image_data)
elif YOLO_FRAMEWORK == "trt":
batched_input = tf.constant(image_data)
result = Yolo(batched_input)
pred_bbox = []
for key, value in result.items():
value = value.numpy()
pred_bbox.append(value)
t2 = time.time()
pred_bbox = [tf.reshape(x, (-1, tf.shape(x)[-1])) for x in pred_bbox]
pred_bbox = tf.concat(pred_bbox, axis=0)
bboxes = postprocess_boxes(pred_bbox, original_image, input_size, score_threshold)
bboxes = nms(bboxes, iou_threshold, method='nms')
image = draw_bbox(original_image, bboxes, CLASSES=CLASSES, rectangle_colors=rectangle_colors)
t3 = time.time()
times.append(t2-t1)
times_2.append(t3-t1)
times = times[-20:]
times_2 = times_2[-20:]
ms = sum(times)/len(times)*1000
fps = 1000 / ms
fps2 = 1000 / (sum(times_2)/len(times_2)*1000)
image = cv2.putText(image, "Time: {:.1f}FPS".format(fps), (0, 30), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (0, 0, 255), 2)
# CreateXMLfile("XML_Detections", str(int(time.time())), original_image, bboxes, read_class_names(CLASSES))
print("Time: {:.2f}ms, Detection FPS: {:.1f}, total FPS: {:.1f}".format(ms, fps, fps2))
if output_path != '': out.write(image)
if show:
cv2.imshow('output', image)
if cv2.waitKey(25) & 0xFF == ord("q"):
cv2.destroyAllWindows()
break
cv2.destroyAllWindows()
# detect from webcam
def detect_realtime(Yolo, output_path, input_size=416, show=False, CLASSES=YOLO_COCO_CLASSES, score_threshold=0.3, iou_threshold=0.45, rectangle_colors=''):
times = []
vid = cv2.VideoCapture(0)
print(YOLO_COCO_CLASSES)
# by default VideoCapture returns float instead of int
width = int(vid.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(vid.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = int(vid.get(cv2.CAP_PROP_FPS))
codec = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(output_path, codec, fps, (width, height)) # output_path must be .mp4
while True:
_, frame = vid.read()
try:
original_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
original_frame = cv2.cvtColor(original_frame, cv2.COLOR_BGR2RGB)
except:
break
image_data = image_preprocess(np.copy(original_frame), [input_size, input_size])
image_data = image_data[np.newaxis, ...].astype(np.float32)
t1 = time.time()
if YOLO_FRAMEWORK == "tf":
pred_bbox = Yolo.predict(image_data)
elif YOLO_FRAMEWORK == "trt":
batched_input = tf.constant(image_data)
result = Yolo(batched_input)
pred_bbox = []
for key, value in result.items():
value = value.numpy()
pred_bbox.append(value)
t2 = time.time()
pred_bbox = [tf.reshape(x, (-1, tf.shape(x)[-1])) for x in pred_bbox]
pred_bbox = tf.concat(pred_bbox, axis=0)
bboxes = postprocess_boxes(pred_bbox, original_frame, input_size, score_threshold)
bboxes = nms(bboxes, iou_threshold, method='nms')
times.append(t2-t1)
times = times[-20:]
ms = sum(times)/len(times)*1000
fps = 1000 / ms
print("Time: {:.2f}ms, {:.1f} FPS".format(ms, fps))
frame = draw_bbox(original_frame, bboxes, CLASSES=CLASSES, rectangle_colors=rectangle_colors)
# CreateXMLfile("XML_Detections", str(int(time.time())), original_frame, bboxes, read_class_names(CLASSES))
image = cv2.putText(frame, "Time: {:.1f}FPS".format(fps), (0, 30),
cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (0, 0, 255), 2)
if output_path != '': out.write(frame)
cv2.destroyAllWindows()
| 40.747312 | 201 | 0.617892 |
4c3e65c30aa21036d636e7b7b76adc0bd973b0e3
| 2,964 |
py
|
Python
|
WiSe-2122/Uebung-11/Gruppe-C/U11-A2.py
|
jonasrdt/Wirtschaftsinformatik2
|
30d5d896808b98664c55cb6fbb3b30a7f1904d9f
|
[
"MIT"
] | 1 |
2022-03-23T09:40:39.000Z
|
2022-03-23T09:40:39.000Z
|
WiSe-2122/Uebung-11/Gruppe-C/U11-A2.py
|
jonasrdt/Wirtschaftsinformatik2
|
30d5d896808b98664c55cb6fbb3b30a7f1904d9f
|
[
"MIT"
] | null | null | null |
WiSe-2122/Uebung-11/Gruppe-C/U11-A2.py
|
jonasrdt/Wirtschaftsinformatik2
|
30d5d896808b98664c55cb6fbb3b30a7f1904d9f
|
[
"MIT"
] | null | null | null |
# Entwickeln Sie eine Version von Hangman in Python. Hierbei soll randomisiert aus einer Liste von Ihnen
# vorgegebener Wörter eines ausgewählt und dem Nutzer zugewiesen werden. Dieser rät dann einzelne Buchstaben.
# Ist der Buchstabe korrekt, wird dieser in der nächsten Ausgabe an der richtigen Stelle angezeigt. Ist er nicht korrekt,
# verliert der Nutzer weiter Leben. Wenn Sie möchten, können Sie stattdessen auch versuchen ein Hangman-Männchen zu zeichnen.
import random
def trenner(anzahl):
for i in range(anzahl):
print("-", end="")
print()
def successor():
if "_" in gesuchtes_wort:
return False
else:
return True
def spielmodus():
falsche_eingabe = True
while falsche_eingabe:
try:
modus = int(input("Wählen Sie eine Schwierigkeitsstufe: (1) Schwer - 3 Versuche (2) Mittel - 6 Versuchen (3) Einfach - 9 Versuchen: "))
if modus == 1:
falsche_eingabe = False
return 3
elif modus == 2:
falsche_eingabe = False
return 6
elif modus == 3:
falsche_eingabe = False
return 9
else:
print("Ihre Schwierigkeit wird standardmäßig auf Mittel gesetzt.")
falsche_eingabe = False
return 6
except:
print("Bitte geben Sie nur ganze Zahlen ein.")
# Variablendefinition
words = ["Jessica", "Jonas", "Maike", "Milan", "Damla", "Merda", "Andre", "Sarangoo"]
# Alternative Lösung:
# print(random.choice(words))
zufaelliger_wert = random.randint(0,len(words)-1)
zufaelliges_wort = words[zufaelliger_wert]
gesuchtes_wort = []
versuche = 0
# Hauptprogramm
trenner(50)
print("Willkommen bei Hangman - Dem Wortratespiel")
print("Im Folgenden müssen Sie ein Wort erraten.")
versuche = spielmodus()
print("Dafür haben Sie",versuche,"Versuche.")
trenner(50)
print("Das von Ihnen zu erratene Wort hat", len(zufaelliges_wort), "Zeichen.")
for element in range(len(zufaelliges_wort)):
gesuchtes_wort.append("_")
print(gesuchtes_wort)
while not successor() and versuche > 0:
buchstabe = input("Bitte raten Sie einen Buchstaben: ")
if buchstabe.upper() in zufaelliges_wort.upper():
for char in range(len(zufaelliges_wort)):
if buchstabe.upper() == zufaelliges_wort[char].upper():
# Einfügen des Buchstaben am korrekten Index
gesuchtes_wort.insert(char, buchstabe)
# Entfernen des aufgeschobenen Platzhalters
gesuchtes_wort.pop(char+1)
else:
versuche -= 1
print("Schade, das war nicht richtig. Du hast noch", versuche,"Versuche.")
print(gesuchtes_wort)
if successor():
print("Herzlichen Glückwunsch. Sie haben das Wort", zufaelliges_wort, "erraten.")
if versuche == 0:
print("DUUUU HAST VERLOREN!")
print("Das richtige Wort wäre gewesen:", zufaelliges_wort)
| 38 | 147 | 0.654858 |
54f068388216c6dedb95ce2987ca476a9ac75ee9
| 686 |
py
|
Python
|
Integrations/test_python/jpy_test.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | 55 |
2021-05-11T16:01:59.000Z
|
2022-03-30T14:30:33.000Z
|
Integrations/test_python/jpy_test.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | 943 |
2021-05-10T14:00:02.000Z
|
2022-03-31T21:28:15.000Z
|
Integrations/test_python/jpy_test.py
|
devinrsmith/deephaven-core
|
3a6930046faf1cd556f62a914ce1cfd7860147b9
|
[
"MIT"
] | 29 |
2021-05-10T11:33:16.000Z
|
2022-03-30T21:01:54.000Z
|
#
# Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending
#
# add JDK to path (otherwise jnius gives DLL load error)
import os
os.environ['PATH'] = os.environ['PATH'] + ";C:\\Program Files\\Java\jdk1.8.0_72\\jre\\bin\\server"
os.environ['PATH'] = os.environ['PATH'] + ";C:\\Program Files\\Java\jdk1.8.0_60\\jre\\bin\\server"
print(os.environ['PATH'])
import jpyutil
jpyutil.init_jvm()
# jpyutil.init_jvm(jvm_maxmem='512M', jvm_classpath=['target/test-classes'])
import jpy
Stack = jpy.get_type('java.util.Stack')
stack = Stack()
stack.push('hello')
stack.push('world')
print(stack.pop()) # --> 'world'
print(stack.pop()) # --> 'hello'
print(stack.getClass().getName())
| 24.5 | 98 | 0.688047 |
0708b286f2eede3d619fe161ea6768baef98e1f6
| 8,518 |
py
|
Python
|
scripts/style/check-header-guards.py
|
opensource-assist/fuschia
|
66646c55b3d0b36aae90a4b6706b87f1a6261935
|
[
"BSD-3-Clause"
] | 14 |
2020-10-25T05:48:36.000Z
|
2021-09-20T02:46:20.000Z
|
scripts/style/check-header-guards.py
|
DamieFC/fuchsia
|
f78a4a1326f4a4bb5834500918756173c01bab4f
|
[
"BSD-2-Clause"
] | 1 |
2022-01-14T23:38:40.000Z
|
2022-01-14T23:38:40.000Z
|
scripts/style/check-header-guards.py
|
DamieFC/fuchsia
|
f78a4a1326f4a4bb5834500918756173c01bab4f
|
[
"BSD-2-Clause"
] | 4 |
2020-12-28T17:04:45.000Z
|
2022-03-12T03:20:44.000Z
|
#!/usr/bin/env python2.7
# Copyright 2016 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script to check C and C++ file header guards.
This script accepts a list of file or directory arguments. If a given
path is a file, it runs the checker on it. If the path is a directory,
it runs the checker on all files in that directory.
In addition, this script checks for potential header guard
collisions. This is useful since we munge / to _, and so
lib/abc/xyz/xyz.h
and
lib/abc_xyz/xyz.h
both want to use LIB_ABC_XYZ_XYZ_H_ as a header guard.
"""
import argparse
import collections
import fileinput
import os.path
import re
import string
import sys
FUCHSIA_ROOT = os.path.dirname( # $root
os.path.dirname( # scripts
os.path.dirname( # style
os.path.realpath(
os.path.abspath(__file__)))))
SYSROOT_PREFIXES = [
'ZIRCON_SYSTEM_PUBLIC',
'ZIRCON_THIRD_PARTY_ULIB_MUSL_INCLUDE',
]
sysroot_prefix = re.compile('^(' + string.join(SYSROOT_PREFIXES, '|') + ')_')
PUBLIC_PREFIXES = [
'ZIRCON_SYSTEM_ULIB_.*_INCLUDE',
'GARNET_PUBLIC',
'PERIDOT_PUBLIC',
'TOPAZ_PUBLIC',
'SDK'
]
public_prefix = re.compile('^(' + string.join(PUBLIC_PREFIXES, '|') + ')_')
all_header_guards = collections.defaultdict(list)
pragma_once = re.compile('^#pragma once$')
disallowed_header_characters = re.compile('[^a-zA-Z0-9_]')
def adjust_for_location(header_guard):
"""Remove internal location prefix from public headers if applicable."""
# Remove public prefixes
header_guard = public_prefix.sub('', header_guard, 1)
# Replace sysroot prefixes
header_guard = sysroot_prefix.sub('SYSROOT_', header_guard, 1)
return header_guard
def header_guard_from_path(path):
"""Compute the header guard from the path"""
assert(path.startswith(FUCHSIA_ROOT))
relative_path = path[len(FUCHSIA_ROOT):].strip('/')
upper_path = relative_path.upper()
header_guard = re.sub(disallowed_header_characters, '_', upper_path) + '_'
header_guard = adjust_for_location(header_guard)
return header_guard
def check_file(path, fix_guards=False):
"""Check whether the file has a correct header guard.
A header guard can either be a #pragma once, or else a matching set of
#ifndef PATH_TO_FILE_
#define PATH_TO_FILE_
...
#endif // PATH_TO_FILE_
preprocessor directives, where both '.' and '/' in the path are
mapped to '_', and a trailing '_' is appended.
In either the #pragma once case or the header guard case, it is
assumed that there is no trailing or leading whitespace.
"""
# Only check .h files
if path[-2:] != '.h':
return True
header_guard = header_guard_from_path(path)
all_header_guards[header_guard].append(path)
ifndef = re.compile('^#ifndef %s$' % header_guard)
define = re.compile('^#define %s$' % header_guard)
endif = re.compile('^#endif +// *%s$' % header_guard)
found_pragma_once = False
found_ifndef = False
found_define = False
found_endif = False
with open(path, 'r') as f:
for line in f.readlines():
match = pragma_once.match(line)
if match:
if found_pragma_once:
print('%s contains multiple #pragma once' % path)
return False
found_pragma_once = True
match = ifndef.match(line)
if match:
if found_ifndef:
print('%s contains multiple ifndef header guards' % path)
return False
found_ifndef = True
match = define.match(line)
if match:
if found_define:
print('%s contains multiple define header guards' % path)
return False
found_define = True
match = endif.match(line)
if match:
if found_endif:
print('%s contains multiple endif header guards' % path)
return False
found_endif = True
if found_pragma_once:
if found_ifndef or found_define or found_endif:
print('%s contains both #pragma once and header guards' % path)
return False
if not fix_guards:
return True
if found_ifndef and found_define and found_endif:
return True
if not found_ifndef:
print('%s did not contain ifndef part of its header guard' % path)
elif not found_define:
print('%s did not contain define part of its header guard' % path)
elif not found_endif:
print('%s did not contain endif part of its header guard' % path)
elif fix_guards:
if found_pragma_once:
print('%s contained #pragma once instead of a header guard' % path)
else:
print('%s did not contain a header guard or the header guard did '
'not match the file path' % path)
else:
print('%s contained neither a proper header guard nor #pragma once' %
path)
header_guards_fixed = False
if fix_guards:
header_guards_fixed = fix_header_guard(path, header_guard)
if not header_guards_fixed:
print('Allowable header guard values are %s' % all_header_guards.keys());
return False
def fix_header_guard(path, header_guard):
"""Attempt to fix the header guard in the given file."""
ifndef = re.compile('^#ifndef [^\s]+_H_$')
define = re.compile('^#define [^\s]+_H_$')
endif = re.compile('^#endif +// *[^\s]+_H_$')
fixed_ifndef = False
fixed_define = False
fixed_endif = False
fixed_pragma_once = False
for line in fileinput.input(path, inplace=1):
(new_line, changes) = re.subn(ifndef,
'#ifndef %s' % header_guard,
line)
if changes:
fixed_ifndef = True
sys.stdout.write(new_line)
continue
(new_line, changes) = re.subn(define,
'#define %s' % header_guard,
line)
if changes:
fixed_define = True
sys.stdout.write(new_line)
continue
(new_line, changes) = re.subn(endif,
'#endif // %s' % header_guard,
line)
if changes:
fixed_endif = True
sys.stdout.write(new_line)
continue
if pragma_once.match(line):
fixed_pragma_once = True
sys.stdout.write('#ifndef %s\n' % header_guard)
sys.stdout.write('#define %s\n' % header_guard)
continue
sys.stdout.write(line)
if fixed_pragma_once:
with open(path, 'a') as file:
file.write('\n')
file.write('#endif // %s\n' % header_guard)
if (fixed_ifndef and fixed_define and fixed_endif) or fixed_pragma_once:
print('Fixed!')
return True
print('Not fixed...')
return False
def check_dir(p, fix_guards=False):
"""Walk recursively over a directory checking .h files"""
def prune(d):
if d[0] == '.' or d == 'third_party':
return True
return False
for root, dirs, paths in os.walk(p):
# Prune dot directories like .git
[dirs.remove(d) for d in list(dirs) if prune(d)]
for path in paths:
check_file(os.path.join(root, path), fix_guards=fix_guards)
def check_collisions():
for header_guard, paths in all_header_guards.iteritems():
if len(paths) == 1:
continue
print('Multiple files could use %s as a header guard:' % header_guard)
for path in paths:
print(' %s' % path)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--fix',
help='Correct wrong header guards',
action='store_true')
(arg_results, other_args) = parser.parse_known_args()
fix_guards = arg_results.fix
for p in other_args:
p = os.path.realpath(os.path.abspath(p))
if os.path.isdir(p):
check_dir(p, fix_guards=fix_guards)
else:
check_file(p, fix_guards=fix_guards)
check_collisions()
if __name__ == "__main__":
sys.exit(main())
| 31.783582 | 81 | 0.603193 |
0768cc4ce44f76022f300170aa4906165d6a0726
| 3,512 |
py
|
Python
|
DataStructure/U11/u76.py
|
qiaw99/Data-Structure
|
3b1cdce96d4f35329ccfec29c03de57378ef0552
|
[
"MIT"
] | 1 |
2019-10-29T08:21:41.000Z
|
2019-10-29T08:21:41.000Z
|
DataStructure/U11/u76.py
|
qiaw99/Data-Structure
|
3b1cdce96d4f35329ccfec29c03de57378ef0552
|
[
"MIT"
] | null | null | null |
DataStructure/U11/u76.py
|
qiaw99/Data-Structure
|
3b1cdce96d4f35329ccfec29c03de57378ef0552
|
[
"MIT"
] | null | null | null |
# nxn chessboard
n = 10
# number of dragons on the chessboard
dragons = 10
solution = [-1]*n
captured = [[0 for i in range(n)] for i in range(n)]
number = 0
local_calls = 0
total_calls = 0
def init():
global captured
def isCaptured(x, y):
global captured
return captured[x][y]
def capture(x, y):
for i in range(n):
captured[i][y] += 1
captured[x][i] += 1
# this point double counted in prev. for-loop,
captured[x][y] -= 1
i = x + 1
j = y + 1
while (i < n and j < n):
captured[i][j] += 1
i += 1
j += 1
i = x + 1
j = y - 1
while (i < n and j >= 0):
captured[i][j] += 1
i += 1
j -= 1
i = x - 1
j = y - 1
while (i >= 0 and j >= 0):
captured[i][j] += 1
i -= 1
j -= 1
i = x - 1
j = y + 1
while (i >= 0 and j < n):
captured[i][j] += 1
i -= 1
j += 1
if x - 2 >= 0:
if y - 1 >= 0:
captured[x-2][y-1] += 1
if y + 1 < n:
captured[x-2][y+1] += 1
if x + 2 < n:
if y - 1 >= 0:
captured[x + 2][y - 1] += 1
if y + 1 < n:
captured[x + 2][y + 1] += 1
if y - 2 >= 0:
if x - 1 >= 0:
captured[x - 1][y - 2] += 1
if x + 1 < n:
captured[x + 1][y - 2] += 1
if y + 2 < n:
if x - 1 >= 0:
captured[x - 1][y + 2] += 1
if x + 1 < n:
captured[x + 1][y + 2] += 1
def free (x, y):
for i in range(n):
captured[i][y] -= 1
captured[x][i] -= 1
# this point double counted in prev. for-loop,
captured[x][y] += 1
i = x + 1
j = y + 1
while (i < n and j < n):
captured[i][j] -= 1
i += 1
j += 1
i = x + 1
j = y - 1
while (i < n and j >= 0):
captured[i][j] -= 1
i += 1
j -= 1
i = x - 1
j = y - 1
while (i >= 0 and j >= 0):
captured[i][j] -= 1
i -= 1
j -= 1
i = x - 1
j = y + 1
while (i >= 0 and j < n):
captured[i][j] -= 1
i -= 1
j += 1
if x - 2 >= 0:
if y - 1 >= 0:
captured[x-2][y-1] -= 1
if y + 1 < n:
captured[x-2][y+1] -= 1
if x + 2 < n:
if y - 1 >= 0:
captured[x + 2][y - 1] -= 1
if y + 1 < n:
captured[x + 2][y + 1] -= 1
if y - 2 >= 0:
if x - 1 >= 0:
captured[x - 1][y - 2] -= 1
if x + 1 < n:
captured[x + 1][y - 2] -= 1
if y + 2 < n:
if x - 1 >= 0:
captured[x - 1][y + 2] -= 1
if x + 1 < n:
captured[x + 1][y + 2] -= 1
def find(x, d):
global captured, solution, number, total_calls, local_calls, dragons
total_calls += 1
local_calls += 1
if x == d:
number += 1
print("Soluiton: ", number, " Coord: ", solution)
print("Number of local calls ", local_calls)
local_calls = 0
return
for j in range(n):
if not isCaptured(x, j):
solution[x] = j
capture(x, j)
find(x + 1, dragons)
free(x, j)
print("")
print("Coordinate '-1' means no Dragon in that line")
print("")
find(0, dragons)
print("")
print("Number of total calls ", total_calls)
| 21.156627 | 73 | 0.375854 |
ab36310e71aab5b7fae4dae1f2ab0fda8f0edcf3
| 98 |
py
|
Python
|
Python/Aula10/ExerciciosPython/ExerciciosElif/ComparandoNumeros.py
|
ekballo/Back-End
|
b252e3b2a16ce36486344823f14afa6691fde9bc
|
[
"MIT"
] | null | null | null |
Python/Aula10/ExerciciosPython/ExerciciosElif/ComparandoNumeros.py
|
ekballo/Back-End
|
b252e3b2a16ce36486344823f14afa6691fde9bc
|
[
"MIT"
] | null | null | null |
Python/Aula10/ExerciciosPython/ExerciciosElif/ComparandoNumeros.py
|
ekballo/Back-End
|
b252e3b2a16ce36486344823f14afa6691fde9bc
|
[
"MIT"
] | null | null | null |
#Escreva um programa que leia dois números inteiros e compare-os. mostrando na tela uma mensagem:
| 49 | 97 | 0.806122 |
db4d967f97727f747037a4e8a241667be46b1839
| 328 |
py
|
Python
|
Object Oriented Programming/Object Oriented Programming dossier/enterprise.py
|
severinhaller/einf-machinelearning
|
4dfc8f1da0d81c5aa800d1459f81b72d1bf6dd9b
|
[
"MIT"
] | null | null | null |
Object Oriented Programming/Object Oriented Programming dossier/enterprise.py
|
severinhaller/einf-machinelearning
|
4dfc8f1da0d81c5aa800d1459f81b72d1bf6dd9b
|
[
"MIT"
] | null | null | null |
Object Oriented Programming/Object Oriented Programming dossier/enterprise.py
|
severinhaller/einf-machinelearning
|
4dfc8f1da0d81c5aa800d1459f81b72d1bf6dd9b
|
[
"MIT"
] | null | null | null |
import main
crew_mitglied_1 = main.CrewMitglied()
crew_mitglied_1.nachname = "Quirk"
print(crew_mitglied_1.nachname)
crew_mitglied_4 = main.CrewMitglied()
crew_mitglied_4.nachname = "Quirky"
print(crew_mitglied_4.nachname)
print(main.CrewMitglied)
print(id(crew_mitglied_4.nachname))
print(crew_mitglied_4==crew_mitglied_1)
| 21.866667 | 39 | 0.82622 |
72d7637395c4e4d053af5e766c0a56c1770c1cec
| 1,369 |
py
|
Python
|
hardware/chip/rtl872xd/build_bin.py
|
wstong999/AliOS-Things
|
6554769cb5b797e28a30a4aa89b3f4cb2ef2f5d9
|
[
"Apache-2.0"
] | 4,538 |
2017-10-20T05:19:03.000Z
|
2022-03-30T02:29:30.000Z
|
hardware/chip/rtl872xd/build_bin.py
|
wstong999/AliOS-Things
|
6554769cb5b797e28a30a4aa89b3f4cb2ef2f5d9
|
[
"Apache-2.0"
] | 1,088 |
2017-10-21T07:57:22.000Z
|
2022-03-31T08:15:49.000Z
|
hardware/chip/rtl872xd/build_bin.py
|
willianchanlovegithub/AliOS-Things
|
637c0802cab667b872d3b97a121e18c66f256eab
|
[
"Apache-2.0"
] | 1,860 |
2017-10-20T05:22:35.000Z
|
2022-03-27T10:54:14.000Z
|
#! /usr/bin/env python
import os
import platform
import argparse
import sys
import shutil
print(sys.argv)
parser = argparse.ArgumentParser()
parser.add_argument('--target', dest='target', action='store')
args = parser.parse_args()
mypath = os.path.dirname(sys.argv[0])
os.chdir(mypath)
print(os.getcwd())
target = args.target
cur_os = platform.system()
arch = platform.architecture()
path = ''
magic = '0xefefefef'
if cur_os == 'Linux':
if '64bit' in arch:
path = 'linux64'
else:
path = 'linux32'
elif cur_os == 'Darwin':
path = 'osx'
elif cur_os == 'Windows':
path = 'win32'
if path:
path = os.path.join("tools", path, "xz")
hw_module = 0
cmd_str = "python haas1000_genbin.py %d \"%s\"" % (hw_module, target)
os.system(cmd_str)
bin_path = os.path.join("..", "write_flash_gui", "ota_bin")
shutil.copy(os.path.join(bin_path, "ota_rtos.bin"), os.path.join(bin_path, "ota_rtos_ota.bin"))
cmd_str = "\"%s\" -f --lzma2=dict=32KiB --check=crc32 -k %s" % (os.path.abspath(path), os.path.join(bin_path, "ota_rtos_ota.bin"))
os.system(cmd_str)
cmd_str = "python ota_gen_md5_bin.py \"%s\" -m %s" % (os.path.join(bin_path, "ota_rtos_ota.bin"), magic)
os.system(cmd_str)
cmd_str = "python ota_gen_md5_bin.py \"%s\" -m %s" % (os.path.join(bin_path, "ota_rtos_ota.bin.xz"), magic)
os.system(cmd_str)
print("run external script success")
| 26.843137 | 131 | 0.677867 |
f42871b4830f3266f76df5525df3b179c5855904
| 6,556 |
py
|
Python
|
official/cv/alexnet/train.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | null | null | null |
official/cv/alexnet/train.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | null | null | null |
official/cv/alexnet/train.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 2 |
2019-09-01T06:17:04.000Z
|
2019-10-04T08:39:45.000Z
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
######################## train alexnet example ########################
train alexnet and get network model files(.ckpt) :
python train.py --data_path /YourDataPath
"""
import os
from src.model_utils.config import config
from src.model_utils.moxing_adapter import moxing_wrapper
from src.model_utils.device_adapter import get_device_id, get_device_num, get_rank_id, get_job_id
from src.dataset import create_dataset_cifar10, create_dataset_imagenet
from src.generator_lr import get_lr_cifar10, get_lr_imagenet
from src.alexnet import AlexNet
from src.get_param_groups import get_param_groups
import mindspore.nn as nn
from mindspore.communication.management import init, get_rank
from mindspore import dataset as de
from mindspore import context
from mindspore import Tensor
from mindspore.train import Model
from mindspore.context import ParallelMode
from mindspore.nn.metrics import Accuracy
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor
from mindspore.common import set_seed
set_seed(1234)
de.config.set_seed(2345)
def modelarts_pre_process():
pass
# config.ckpt_path = os.path.join(config.output_path, str(get_rank_id()), config.checkpoint_path)
@moxing_wrapper(pre_process=modelarts_pre_process)
def train_alexnet():
print('device id:', get_device_id())
print('device num:', get_device_num())
print('rank id:', get_rank_id())
print('job id:', get_job_id())
device_target = config.device_target
context.set_context(mode=context.GRAPH_MODE, device_target=config.device_target)
context.set_context(save_graphs=False)
if device_target == "GPU":
context.set_context(enable_graph_kernel=True)
context.set_context(graph_kernel_flags="--enable_cluster_ops=MatMul")
device_num = get_device_num()
if config.dataset_name == "cifar10":
if device_num > 1:
config.learning_rate = config.learning_rate * device_num
config.epoch_size = config.epoch_size * 2
elif config.dataset_name == "imagenet":
pass
else:
raise ValueError("Unsupported dataset.")
if device_num > 1:
context.reset_auto_parallel_context()
context.set_auto_parallel_context(device_num=device_num, \
parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True)
if device_target == "Ascend":
context.set_context(device_id=get_device_id())
init()
elif device_target == "GPU":
init()
else:
context.set_context(device_id=get_device_id())
_off_load = False
if config.dataset_name == "cifar10":
ds_train = create_dataset_cifar10(config, config.data_path, config.batch_size, target=config.device_target)
elif config.dataset_name == "imagenet":
# Imagenet dataset normalize and transpose will work on device
_off_load = True
ds_train = create_dataset_imagenet(config, config.data_path, config.batch_size)
else:
raise ValueError("Unsupported dataset.")
if ds_train.get_dataset_size() == 0:
raise ValueError("Please check dataset size > 0 and batch_size <= dataset size")
network = AlexNet(config.num_classes, phase='train', off_load=_off_load)
loss_scale_manager = None
metrics = None
step_per_epoch = ds_train.get_dataset_size() if config.sink_size == -1 else config.sink_size
if config.dataset_name == 'cifar10':
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
lr = Tensor(get_lr_cifar10(0, config.learning_rate, config.epoch_size, step_per_epoch))
opt = nn.Momentum(network.trainable_params(), lr, config.momentum)
metrics = {"Accuracy": Accuracy()}
elif config.dataset_name == 'imagenet':
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
lr = Tensor(get_lr_imagenet(config.learning_rate, config.epoch_size, step_per_epoch))
opt = nn.Momentum(params=get_param_groups(network),
learning_rate=lr,
momentum=config.momentum,
weight_decay=config.weight_decay,
loss_scale=config.loss_scale)
from mindspore.train.loss_scale_manager import DynamicLossScaleManager, FixedLossScaleManager
if config.is_dynamic_loss_scale == 1:
loss_scale_manager = DynamicLossScaleManager(init_loss_scale=65536, scale_factor=2, scale_window=2000)
else:
loss_scale_manager = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False)
else:
raise ValueError("Unsupported dataset.")
if device_target == "Ascend":
model = Model(network, loss_fn=loss, optimizer=opt, metrics=metrics, amp_level="O2", keep_batchnorm_fp32=False,
loss_scale_manager=loss_scale_manager)
elif device_target == "GPU":
model = Model(network, loss_fn=loss, optimizer=opt, metrics=metrics, amp_level="O2",
loss_scale_manager=loss_scale_manager)
else:
raise ValueError("Unsupported platform.")
if device_num > 1:
ckpt_save_dir = os.path.join(config.ckpt_path + "_" + str(get_rank()))
else:
ckpt_save_dir = config.ckpt_path
time_cb = TimeMonitor(data_size=step_per_epoch)
config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_steps,
keep_checkpoint_max=config.keep_checkpoint_max)
ckpoint_cb = ModelCheckpoint(prefix="checkpoint_alexnet", directory=ckpt_save_dir, config=config_ck)
print("============== Starting Training ==============")
model.train(config.epoch_size, ds_train, callbacks=[time_cb, ckpoint_cb, LossMonitor()],
dataset_sink_mode=config.dataset_sink_mode, sink_size=config.sink_size)
if __name__ == "__main__":
train_alexnet()
| 43.417219 | 119 | 0.705003 |
be74cb5278563e2c0c727d6764696d19f41a4004
| 657 |
py
|
Python
|
Python/Courses/Python-Tutorials.Telusko/01.Object-Oriented-Programming/13.Types-of-Methods.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Courses/Python-Tutorials.Telusko/01.Object-Oriented-Programming/13.Types-of-Methods.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
Python/Courses/Python-Tutorials.Telusko/01.Object-Oriented-Programming/13.Types-of-Methods.py
|
shihab4t/Books-Code
|
b637b6b2ad42e11faf87d29047311160fe3b2490
|
[
"Unlicense"
] | null | null | null |
# There are three type methods
# Instance methods
# Class methods
# Static methods
class Student:
school = "Telusko"
@classmethod
def get_school(cls):
return cls.school
@staticmethod
def info():
print("This is Student Class")
def __init__(self, m1, m2, m3):
self.m1 = m1
self.m2 = m2
self.m3 = m3
def avg(self):
return (self.m1 + self.m2 + self.m3) / 3
def get_m1(self):
return self.m1
def set_m1(self, value):
self.m1 = value
s1 = Student(34, 47, 32)
s2 = Student(89, 32, 12)
print(s1.avg(), s2.avg())
print(Student.get_school())
Student.info()
| 16.846154 | 48 | 0.585997 |
fe5dd6ef946e01550dbf96ea873e0e6a94b6223c
| 2,507 |
py
|
Python
|
huTools/http/engine_httplib2.py
|
gadventures/huTools
|
8bc58d63491bcd3cfc3e78d219be703d943ffeb5
|
[
"BSD-3-Clause"
] | null | null | null |
huTools/http/engine_httplib2.py
|
gadventures/huTools
|
8bc58d63491bcd3cfc3e78d219be703d943ffeb5
|
[
"BSD-3-Clause"
] | null | null | null |
huTools/http/engine_httplib2.py
|
gadventures/huTools
|
8bc58d63491bcd3cfc3e78d219be703d943ffeb5
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
engine_httplib2.py implements httplib2 based queries for huTools.http
Created by Maximillian Dornseif on 2010-10-24.
Copyright (c) 2010, 2011 HUDORA. All rights reserved.
"""
from __future__ import unicode_literals
from builtins import object
import socket
import huTools.http.tools
from huTools.http import exceptions
_http = None
def request(url, method, content, headers, timeout=50, caching=None):
"""Does a HTTP Request via httplib2."""
global _http
if not _http:
import huTools.http._httplib2
_http = huTools.http._httplib2.Http(timeout=timeout)
headers['User-Agent'] = headers.get('User-Agent', '') + ' (httplib2)'
_http.clear_credentials()
_http.forward_authorization_headers = True
# Do not re-use the global Http object after a timeout.
# To achieve this, it is set to None.
try:
resp, content = _http.request(url, method, content, headers=headers)
except socket.timeout:
_http = None
raise exceptions.Timeout
replyheaders = {}
replyheaders.update(resp)
return int(resp.status), replyheaders, content
class AsyncHttpResult(object):
"""Syncronous emulation for plain Python.
See `engine_appengine.AsyncHttpResult()` for the thinking behind it.
`huTools.http.fetch_async()` is a somewhat more high-level interface.
"""
def __init__(self, caching=None):
self.url, self.method, self.content, self.headers, self.timeout = None, None, None, None, None
self._result = None
def fetch(self, url, content='', method='GET', credentials=None, headers=None, multipart=False, ua='',
timeout=25, returnhandler=lambda x, y, z: (x, y, z)):
"""Save parameters but delay request execution until get_result() is called."""
self.url, self.method, self.content, self.headers, self.timeout, _dummy = \
huTools.http.tools.prepare_headers(url, content, method, credentials, headers, multipart,
ua, timeout)
self.returnhandler = returnhandler
def get_result(self):
"""Execute request pass it to returnhandler and return."""
# Cache the result because we mght get called more than once
if not self._result:
self._result = self.returnhandler(*request(self.url, self.method, self.content,
self.headers, self.timeout))
return self._result
| 37.41791 | 106 | 0.66414 |
fe9ab7da55844d4a1fdd39cf799a850e990bc7bc
| 216 |
py
|
Python
|
mainwin.py
|
916958205/HanWei
|
66cd9d4495cd95145fe72de4cbcce5a0a76b716c
|
[
"MIT"
] | null | null | null |
mainwin.py
|
916958205/HanWei
|
66cd9d4495cd95145fe72de4cbcce5a0a76b716c
|
[
"MIT"
] | null | null | null |
mainwin.py
|
916958205/HanWei
|
66cd9d4495cd95145fe72de4cbcce5a0a76b716c
|
[
"MIT"
] | null | null | null |
import sys
from PyQt5.QtWidgets import *
app = QApplication(sys.argv)
mywidget = QWidget()
mywidget.setGeometry(200,200,600,300)
mywidget.setWindowTitle("Hello PyQt5")
mywidget.show()
sys.exit(app.exec_())
| 21.6 | 39 | 0.740741 |
4a9210e69a5b01e157689bfffe2fe1f0d3533460
| 54 |
py
|
Python
|
Problems/Dynamic Programming/Easy/DivisorGame/divisor_game.py
|
dolong2110/Algorithm-By-Problems-Python
|
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
|
[
"MIT"
] | 1 |
2021-08-16T14:52:05.000Z
|
2021-08-16T14:52:05.000Z
|
Problems/Dynamic Programming/Easy/DivisorGame/divisor_game.py
|
dolong2110/Algorithm-By-Problems-Python
|
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
|
[
"MIT"
] | null | null | null |
Problems/Dynamic Programming/Easy/DivisorGame/divisor_game.py
|
dolong2110/Algorithm-By-Problems-Python
|
31ecc7367aaabdd2b0ac0af7f63ca5796d70c730
|
[
"MIT"
] | null | null | null |
def divisorGame(n: int) -> bool:
return n % 2 == 0
| 27 | 32 | 0.574074 |
6072ec4a6cf38366a34046b4f169e8782e764658
| 997 |
py
|
Python
|
chapter01/postgres_study.py
|
thiagola92/learning-databases-with-python
|
cf23c34d7fd1ecd36dd3e7b30dc5916eb23eaf1e
|
[
"MIT"
] | null | null | null |
chapter01/postgres_study.py
|
thiagola92/learning-databases-with-python
|
cf23c34d7fd1ecd36dd3e7b30dc5916eb23eaf1e
|
[
"MIT"
] | null | null | null |
chapter01/postgres_study.py
|
thiagola92/learning-databases-with-python
|
cf23c34d7fd1ecd36dd3e7b30dc5916eb23eaf1e
|
[
"MIT"
] | null | null | null |
# pip install psycopg
import psycopg
# Connect to database
client = psycopg.connect("postgres://username:[email protected]")
cursor = client.cursor()
# Create table
cursor.execute(
"""
CREATE TABLE table_name(
sku integer,
name varchar(255),
description text,
category varchar(255)
)
"""
)
# Insert information
cursor.execute(
"""
INSERT INTO table_name
VALUES(134218478, 'Rb-01 - Robô Aspirador De Pó Fast Clean Bivolt - Mondial', 'Use a tecnologia a seu favor para aproveitar a vida longe da faxina. Conheça mais essa facilidade para o seu lar e deixe tuuuudo limpinho :)', 'eletroportáteis')
"""
)
# Query information
cursor.execute("SELECT * FROM table_name")
p = cursor.fetchone()
print(p)
# Destroy table
cursor.execute("DROP TABLE table_name")
# IMPORTANT: Close connection
#
# Is dangerous to live a connection open,
# it could block access from other users to tables
cursor.close()
client.commit()
client.close()
| 23.186047 | 248 | 0.698094 |
71a89e1600d74e6aadd3137ef02d9118ef093d4d
| 576 |
py
|
Python
|
algorithms/sorting/countingsort2.py
|
PlamenHristov/HackerRank
|
2c875995f0d51d7026c5cf92348d9fb94fa509d6
|
[
"MIT"
] | null | null | null |
algorithms/sorting/countingsort2.py
|
PlamenHristov/HackerRank
|
2c875995f0d51d7026c5cf92348d9fb94fa509d6
|
[
"MIT"
] | null | null | null |
algorithms/sorting/countingsort2.py
|
PlamenHristov/HackerRank
|
2c875995f0d51d7026c5cf92348d9fb94fa509d6
|
[
"MIT"
] | null | null | null |
import sys
def print_list(ar):
print(' '.join(map(str, ar)))
def insertion_sort(ar):
if len(ar) == 1:
print_list(ar)
return (ar)
else:
for j in range(1, len(ar)):
for i in reversed(range(j)):
if ar[i + 1] < ar[i]:
ar[i], ar[i + 1] = ar[i + 1], ar[i]
else:
break
print_list(ar)
return (ar)
if __name__ == '__main__':
s = int(sys.stdin.readline())
ar = list(map(int, sys.stdin.readline().split()))
insertion_sort(ar)
| 19.2 | 55 | 0.465278 |
e08182f902793a6bb25cc908d68f31a715f0aa01
| 3,432 |
py
|
Python
|
research/cv/PGAN/src/customer_layer.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77 |
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/PGAN/src/customer_layer.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3 |
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/PGAN/src/customer_layer.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24 |
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""cell define"""
import math
import mindspore
from mindspore import ops, nn
import numpy as np
from numpy import prod
def num_flat_features(x):
return x.size//x.shape[0]
def getLayerNormalizationFactor(x):
"""
Get He's constant for the given layer
Returns:
output.
"""
size = x.weight.shape
fan_in = prod(size[1:])
return math.sqrt(2.0 / fan_in)
class ConstrainedLayer(nn.Cell):
"""
A handy refactor that allows the user to:
- initialize one layer's bias to zero
- apply He's initialization at runtime
"""
def __init__(self,
module,
equalized=True,
lrMul=1.0,
initBiasToZero=True):
super(ConstrainedLayer, self).__init__()
self.module = module
self.equalized = equalized
if initBiasToZero:
bias_shape = self.module.bias.shape
zeros = ops.Zeros()
self.module.bias.set_data(zeros(bias_shape, mindspore.float32))
if self.equalized:
weight_shape = self.module.weight.shape
wight_init = np.random.normal(loc=0.0, scale=1.0, size=weight_shape) / lrMul
self.module.weight.set_data(mindspore.Tensor(wight_init, mindspore.float32))
self.lr_weight = getLayerNormalizationFactor(self.module) * lrMul
def construct(self, x):
x = self.module(x)
if self.equalized:
x *= self.lr_weight
return x
class EqualizedLinear(ConstrainedLayer):
"""
EqualizedLinear
"""
def __init__(self,
nChannelsPrevious,
nChannels):
"""
A nn.Linear module with specific constraints
Args:
nChannelsPrevious (int): number of channels in the previous layer
nChannels (int): number of channels of the current layer
"""
ConstrainedLayer.__init__(self,
nn.Dense(nChannelsPrevious, nChannels))
class EqualizedConv2d(ConstrainedLayer):
"""
EqualizedConv2d
"""
def __init__(self, depthNewScale, out, kernnel, padding, pad_mode="pad", has_bias=True):
"""
A nn.Conv2d module with specific constraints
Args:
depthNewScale (int): number of channels in the previous layer
out (int): number of channels of the current layer
kernnel (int): size of the convolutional kernel
padding (int): convolution's padding
has_bias (bool): with bias ?
"""
ConstrainedLayer.__init__(self,
nn.Conv2d(depthNewScale, out, kernnel, padding=padding, pad_mode=pad_mode,
has_bias=has_bias))
| 33.647059 | 108 | 0.611305 |
e81f3beaef0460340adf80110f1e58e34051372f
| 455 |
py
|
Python
|
insomniac/globals.py
|
shifenis/Insomniac
|
7c9d572b83c29049bc3075073be5549fe821a739
|
[
"MIT"
] | 533 |
2020-06-01T10:40:11.000Z
|
2022-03-29T17:05:50.000Z
|
insomniac/globals.py
|
shifenis/Insomniac
|
7c9d572b83c29049bc3075073be5549fe821a739
|
[
"MIT"
] | 399 |
2020-06-01T22:01:55.000Z
|
2022-03-29T20:39:29.000Z
|
insomniac/globals.py
|
shifenis/Insomniac
|
7c9d572b83c29049bc3075073be5549fe821a739
|
[
"MIT"
] | 166 |
2020-06-01T21:51:52.000Z
|
2022-03-12T14:14:44.000Z
|
# These constants can be set by the external UI-layer process, don't change them manually
is_ui_process = False
execution_id = ''
task_id = ''
executable_name = 'insomniac'
do_location_permission_dialog_checks = True # no need in these checks if location permission is denied beforehand
def callback(profile_name):
pass
hardban_detected_callback = callback
softban_detected_callback = callback
def is_insomniac():
return execution_id == ''
| 23.947368 | 114 | 0.78022 |
c7e77f8396527723480d65285dbe49196381cf4a
| 3,480 |
py
|
Python
|
api/scripts/image_requests.py
|
CsabaWirnhardt/cbm
|
1822addd72881057af34ac6a7c2a1f02ea511225
|
[
"BSD-3-Clause"
] | 17 |
2021-01-18T07:27:01.000Z
|
2022-03-10T12:26:21.000Z
|
api/scripts/image_requests.py
|
CsabaWirnhardt/cbm
|
1822addd72881057af34ac6a7c2a1f02ea511225
|
[
"BSD-3-Clause"
] | 4 |
2021-04-29T11:20:44.000Z
|
2021-12-06T10:19:17.000Z
|
api/scripts/image_requests.py
|
CsabaWirnhardt/cbm
|
1822addd72881057af34ac6a7c2a1f02ea511225
|
[
"BSD-3-Clause"
] | 47 |
2021-01-21T08:25:22.000Z
|
2022-03-21T14:28:42.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This file is part of CbM (https://github.com/ec-jrc/cbm).
# Author : Guido Lemoine, Konstantinos Anastasakis
# Credits : GTCAP Team
# Copyright : 2021 European Commission, Joint Research Centre
# License : 3-Clause BSD
import logging
from scripts import backgroundExtract as bgext
from scripts import chipS2Extractor2 as ces2
from scripts import rawChipBatchExtract as rceb
from scripts import rawChipExtractor as rce
from scripts import rawS1ChipBatchExtract as rces1
# logging.basicConfig(filename='logs/queryHandler.log', filemode='w',
# format='%(name)s - %(levelname)s - %(message)s',
# level=logging.ERROR)
# Parcel Images
def getBackgroundByLocation(lon, lat, chipsize, chipextend, tms,
unique_id, iformat, withGeometry):
try:
logging.debug(unique_id)
logging.debug(f"{unique_id} {iformat}")
bgext.getBackgroundExtract(lon, lat, chipsize, chipextend,
unique_id, tms, iformat, withGeometry)
return True
except Exception as err:
print(err)
def getChipsByLocation(lon, lat, start_date, end_date, unique_id, lut='5_95',
bands='B08_B04_B03', plevel='LEVEL2A'):
logging.debug(lut)
logging.debug(bands)
logging.debug(f"""{lon} {lat} {start_date} {end_date}
{unique_id} {lut} {bands} {plevel}""")
numchips = ces2.parallelExtract(
lon, lat, start_date, end_date, unique_id, lut, bands, plevel)
if numchips == -1:
print(f"Request results in too many chips, please revise selection")
elif numchips > 0:
print(f"New chips ara in {unique_id}")
else:
print(f"Chips already cached in {unique_id}")
print(ces2.buildHTML(unique_id, start_date, end_date))
return True
def getRawChipByLocation(lon, lat, start_date, end_date, unique_id, band,
chipsize='1280', plevel='LEVEL2A'):
logging.debug(
f"{lon} {lat} {start_date} {end_date} {unique_id} {band} {plevel}")
numchips = rce.parallelExtract(lon, lat, start_date, end_date, unique_id,
band, chipsize, plevel)
if numchips == -1:
print(f"Request results in too many chips, please revise selection")
elif numchips > 0:
print(f"New chips ara in {unique_id}")
else:
print(f"Chips already cached in {unique_id}")
print(rce.buildJSON(unique_id, start_date, end_date))
return True
def getRawChipsBatch(unique_id):
# params are dumped in params.json on unique_id directory
logging.debug(unique_id)
numchips = rceb.parallelExtract(unique_id)
if numchips == -1:
print(f"Request results in too many chips, please revise selection")
elif numchips > 0:
print(rceb.chipCollect(unique_id))
else:
print(f"Chips already cached in {unique_id}")
print(rceb.buildJSON(unique_id))
return True
def getRawS1ChipsBatch(unique_id):
# params are dumped in params.json on unique_id directory
logging.debug(unique_id)
numchips = rces1.parallelExtract(unique_id)
if numchips == -1:
print(f"Request results in too many chips, please revise selection")
elif numchips > 0:
print(rces1.chipCollect(unique_id))
else:
print(f"Chips already cached in {unique_id}")
print(rces1.buildJSON(unique_id))
return True
| 31.351351 | 77 | 0.656609 |
40d65ae3d9a535bb7b8d32538ff709d83482fe89
| 322 |
py
|
Python
|
exercises/pt/test_03_14_03.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 2,085 |
2019-04-17T13:10:40.000Z
|
2022-03-30T21:51:46.000Z
|
exercises/pt/test_03_14_03.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 79 |
2019-04-18T14:42:55.000Z
|
2022-03-07T08:15:43.000Z
|
exercises/pt/test_03_14_03.py
|
Jette16/spacy-course
|
32df0c8f6192de6c9daba89740a28c0537e4d6a0
|
[
"MIT"
] | 361 |
2019-04-17T13:34:32.000Z
|
2022-03-28T04:42:45.000Z
|
def test():
assert (
"patterns = list(nlp.pipe(people))" in __solution__
), "Você está usando nlp.pipe envolvido em uma lista (list)?"
__msg__.good(
"Bom trabalho! Vamos seguir agora com um exemplo prático que "
"usa nlp.pipe para processar documentos com metadados adicionais."
)
| 32.2 | 74 | 0.65528 |
295b0e2c98328609058fa98007fec71342d1c8a6
| 967 |
py
|
Python
|
src/nfz_module/writer/ImageFileWriter.py
|
hwroitzsch/BikersLifeSaver
|
469c738fdd6352c44a3f20689b17fa8ac04ad8a2
|
[
"MIT"
] | null | null | null |
src/nfz_module/writer/ImageFileWriter.py
|
hwroitzsch/BikersLifeSaver
|
469c738fdd6352c44a3f20689b17fa8ac04ad8a2
|
[
"MIT"
] | null | null | null |
src/nfz_module/writer/ImageFileWriter.py
|
hwroitzsch/BikersLifeSaver
|
469c738fdd6352c44a3f20689b17fa8ac04ad8a2
|
[
"MIT"
] | null | null | null |
import sys
import os
import cv2 as opencv
class ImageFileWriter:
def __init__(self):
pass
def write_images(self, *args):
image_directory = '/root/ghisallo_venv/src/opencv_direction_indicator_detection/oop/test_images/'
if len(args) % 2 != 0:
print('argument error: got not enough names for files to write.')
else:
# save all images with their given names
index = 0
while index < len(args):
file_name = args[index+0]
image_data = args[index+1]
file_path = os.path.join(image_directory, file_name)
print('writing image file to', file_path)
print('IMAGE_DATA.SHAPE:', image_data.shape)
# if image_data.shape is not None:
# converted_image_data = opencv.cvtColor(image_data, opencv.COLOR_RGB2BGR) # images might need to be converted to be correctly written by opencv
# else:
# print('image_data.shape is None')
opencv.imwrite(file_path, image_data, (opencv.IMWRITE_PNG_COMPRESSION, 0))
index += 2
| 31.193548 | 150 | 0.716649 |
2981346c3db0a3b37e7df408634135c10107f128
| 1,691 |
py
|
Python
|
src/visitpy/visit_flow/visit_flow_vpe/examples/flow_vpe_pyocl_compile_dw_mag.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 226 |
2018-12-29T01:13:49.000Z
|
2022-03-30T19:16:31.000Z
|
src/visitpy/visit_flow/visit_flow_vpe/examples/flow_vpe_pyocl_compile_dw_mag.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 5,100 |
2019-01-14T18:19:25.000Z
|
2022-03-31T23:08:36.000Z
|
src/visitpy/visit_flow/visit_flow_vpe/examples/flow_vpe_pyocl_compile_dw_mag.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 84 |
2019-01-24T17:41:50.000Z
|
2022-03-10T10:01:46.000Z
|
# Copyright (c) Lawrence Livermore National Security, LLC and other VisIt
# Project developers. See the top-level LICENSE file for dates and other
# details. No copyright assignment is required to contribute to VisIt.
"""
file: vpe_flow_npy_ops_example_1.py
author: Cyrus Harrison <[email protected]>
created: 3/28/2012
description:
vpe flow example demonstrating use of flow.filters.npy_ops.
"""
from flow import *
from flow.filters import pyocl_compile
def setup_workspace():
w = Workspace()
w.register_filters(pyocl_compile)
ctx = w.add_context("pyocl_compile","root")
ctx.start()
ctx.add_filter("decompose","dwdx",{"index":0})
ctx.add_filter("decompose","dwdy",{"index":1})
ctx.add_filter("decompose","dwdz",{"index":2})
ctx.add_filter("grad","dw")
ctx.add_filter("mult","vx_sq")
ctx.add_filter("mult","vy_sq")
ctx.add_filter("mult","vz_sq")
ctx.add_filter("add","v_add_1")
ctx.add_filter("add","v_add")
ctx.add_filter("sqrt","v_sqrt")
ctx.connect(":vz","dw:in")
ctx.connect(":dims","dw:dims")
ctx.connect(":x","dw:x")
ctx.connect(":y","dw:y")
ctx.connect(":z","dw:z")
ctx.connect("dw","dwdx:in")
ctx.connect("dw","dwdy:in")
ctx.connect("dw","dwdz:in")
ctx.connect("dwdx","vx_sq:in_a")
ctx.connect("dwdx","vx_sq:in_b")
ctx.connect("dwdy","vy_sq:in_a")
ctx.connect("dwdy","vy_sq:in_b")
ctx.connect("dwdz","vz_sq:in_a")
ctx.connect("dwdz","vz_sq:in_b")
ctx.connect("vx_sq","v_add_1:in_a")
ctx.connect("vy_sq","v_add_1:in_b")
ctx.connect("v_add_1","v_add:in_a")
ctx.connect("vz_sq","v_add:in_b")
ctx.connect("v_add","v_sqrt:in")
return w
| 30.196429 | 73 | 0.655825 |
461cb689aa037e31bf8e85bbf5e6e6ee9983c93d
| 598 |
py
|
Python
|
Python/M01_ProgrammingBasics/L03_ConditionalStatementsAdvanced/Exercises/Solutions/P07_HotelRoom.py
|
todorkrastev/softuni-software-engineering
|
cfc0b5eaeb82951ff4d4668332ec3a31c59a5f84
|
[
"MIT"
] | null | null | null |
Python/M01_ProgrammingBasics/L03_ConditionalStatementsAdvanced/Exercises/Solutions/P07_HotelRoom.py
|
todorkrastev/softuni-software-engineering
|
cfc0b5eaeb82951ff4d4668332ec3a31c59a5f84
|
[
"MIT"
] | null | null | null |
Python/M01_ProgrammingBasics/L03_ConditionalStatementsAdvanced/Exercises/Solutions/P07_HotelRoom.py
|
todorkrastev/softuni-software-engineering
|
cfc0b5eaeb82951ff4d4668332ec3a31c59a5f84
|
[
"MIT"
] | 1 |
2022-02-23T13:03:14.000Z
|
2022-02-23T13:03:14.000Z
|
month = input()
days = int(input())
cost_a = 0
cost_s = 0
if month == "May" or month == "October":
cost_a = days * 65
cost_s = days * 50
if 7 < days <= 14:
cost_s = cost_s * 0.95
if days > 14:
cost_s = cost_s * 0.7
elif month == "June" or month == "September":
cost_a = days * 68.70
cost_s = days * 75.20
if days > 14:
cost_s = cost_s * 0.8
elif month == "July" or month == "August":
cost_a = days * 77
cost_s = days * 76
if days > 14:
cost_a = cost_a * 0.9
print(f"Apartment: {cost_a:.2f} lv.")
print(f"Studio: {cost_s:.2f} lv.")
| 22.148148 | 45 | 0.548495 |
d3f0263c1029fc8074ff0ba2ade5f3885f319e79
| 478 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/patches/v7_1/save_stock_settings.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | 1 |
2021-04-29T14:55:29.000Z
|
2021-04-29T14:55:29.000Z
|
frappe-bench/apps/erpnext/erpnext/patches/v7_1/save_stock_settings.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/patches/v7_1/save_stock_settings.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | 1 |
2021-04-29T14:39:01.000Z
|
2021-04-29T14:39:01.000Z
|
from __future__ import unicode_literals
import frappe
def execute():
stock_settings = frappe.get_doc('Stock Settings')
if stock_settings.default_warehouse \
and not frappe.db.exists("Warehouse", stock_settings.default_warehouse):
stock_settings.default_warehouse = None
if stock_settings.stock_uom and not frappe.db.exists("UOM", stock_settings.stock_uom):
stock_settings.stock_uom = None
stock_settings.flags.ignore_mandatory = True
stock_settings.save()
| 29.875 | 87 | 0.797071 |
3169ea721f11da3ef897e1239e58961393bdb454
| 2,837 |
py
|
Python
|
Voting.py
|
Jugendhackt/Werwolf_Brettspiel
|
fc8ba953e005d2fc9b618c7bb2a93e57fbde6338
|
[
"MIT"
] | null | null | null |
Voting.py
|
Jugendhackt/Werwolf_Brettspiel
|
fc8ba953e005d2fc9b618c7bb2a93e57fbde6338
|
[
"MIT"
] | null | null | null |
Voting.py
|
Jugendhackt/Werwolf_Brettspiel
|
fc8ba953e005d2fc9b618c7bb2a93e57fbde6338
|
[
"MIT"
] | null | null | null |
import random
votes = []
stimme = None
Rollennummer = 1 # Spieler ID
gesamt_spieler = [] # Ort zum Sammeln aller Spielernamen
spieler_nummer = {} # Ort zum Sammeln von Spieler ID und zugewiesener Spieler
for x in range(0, 4): # 4 Spieler eingeben
spielername = input("Was ist ihr Spielername? ") # Spielernamen eingeben
spieler_nummer[Rollennummer] = spielername # Dict spieler_nummer key Rollennummer und value spielername hinzufügen
Rollennummer = Rollennummer + 1 # Spieler ID ist einzigartig
gesamt_spieler.append(spielername) # Liste gesamtspieler spielername hinzufügen
spieler_nummer_const = spieler_nummer # konstantes Dict erstellen, um das andere zu verändern
gesamt_spieler_const = gesamt_spieler # konstantes Dict erstellen, um das andere zu verändern
def Voting():
stimme = None
Voting_DE = ["Für welchen Spieler möchtest du abstimmen ?", # Liste mit Sprüchen (Spruch 1)
"Wen möchtest du tot sehen ?"] # (Spruch 2)
voting_spruch = random.randint(1, 2) # zufallszahl 1 oder 2 generieren
if voting_spruch == 1: # wenn zufallszahl 1 ist
stimme = int(float(input(Voting_DE[0]))) # Spruch 1 schreiben
if voting_spruch == 2: # wenn zufallszahl = 2
stimme = int(float(input(Voting_DE[1]))) # Spruch 2 schreiben
votes.append(stimme)
spieler_wahl = spieler_nummer.get(stimme)
print("Du hast", spieler_wahl,"gewählt.")
def Stimmen_Auszählen():
votes_pro_spieler = {}
Rollennummer = 0
for i in range(0, len(gesamt_spieler_const)):
stimmen_zaehler = 0
Rollennummer = Rollennummer + 1
if Rollennummer in votes:
for x in range(0, len(votes)):
if Rollennummer == votes[x]:
stimmen_zaehler = stimmen_zaehler + 1
votes_pro_spieler[Rollennummer] = stimmen_zaehler
print(votes_pro_spieler)
else:
None
else:
None
print(max(votes_pro_spieler.values()))
highest = max(votes_pro_spieler.values())
print([key for key in votes_pro_spieler if votes_pro_spieler[key] == highest])
most_votes = [key for key in votes_pro_spieler if votes_pro_spieler[key] == highest]
if len(most_votes) == 1:
key = int(str(most_votes)[1:-1])
else:
None
if len(most_votes) == 1:
voted_player = spieler_nummer.get(key)
print("Das Dorf hat ", [key for key in votes_pro_spieler if votes_pro_spieler[key] == highest], "getötet. ")
print(key for key in votes_pro_spieler if votes_pro_spieler[key] == highest)
print("Das Dorf hat ", voted_player, "getötet. ")
else:
print("Das Dorf konnte sich nicht entscheiden. ")
print(spieler_nummer)
Voting()
Voting()
Voting()
Stimmen_Auszählen()
| 31.876404 | 119 | 0.662672 |
31ecabf8f218b507e5f90a6d73407b82a4266e4d
| 2,322 |
py
|
Python
|
Python/Buch_ATBS/Teil_1/Kapitel_06_Stringbearbeitung/12_kapitel_6_repetitionsfragen.py
|
Apop85/Scripts
|
1d8dad316c55e1f1343526eac9e4b3d0909e4873
|
[
"MIT"
] | null | null | null |
Python/Buch_ATBS/Teil_1/Kapitel_06_Stringbearbeitung/12_kapitel_6_repetitionsfragen.py
|
Apop85/Scripts
|
1d8dad316c55e1f1343526eac9e4b3d0909e4873
|
[
"MIT"
] | 6 |
2020-12-24T15:15:09.000Z
|
2022-01-13T01:58:35.000Z
|
Python/Buch_ATBS/Teil_1/Kapitel_06_Stringbearbeitung/12_kapitel_6_repetitionsfragen.py
|
Apop85/Scripts
|
1d8dad316c55e1f1343526eac9e4b3d0909e4873
|
[
"MIT"
] | null | null | null |
print('1. Was sind Maskierungssequenzen?')
input()
print('Maskierungszeichen stehen für Zeichen die sonst nur\nschwer in einem Code wiedergeben lassen')
print(r'Beispiele: \n für newline \t für tabulator \\ für backslash ... ')
print('')
input()
print('2. Wofür stehen die Maskierungssequenzen \\t und \\n')
input()
print(r'\n steht für Newline und \t für Tabulator')
print('')
input()
print(r'3. Wie können sie einen Backslash (\) in einen String einfügen?')
input()
print('Es gibt zwei Möglichkeiten.')
print(r"Einmal mittels mittels '\\' und einmal mittels r'\'")
print('')
input()
print('4. "How\'s your day?" ist ein gültiger Stringwert. \nWarum führt das als Apostroph verwendete einfache\nAnführungszeichen in How\'s nicht zu einem Problem \nobwohl es nicht maskiert ist?')
input()
print('Da der String mit doppelten Anführungszeichen\ngeschrieben wurde werden die einfachen\nAnführungszeichen innerhalb des Strings nicht\nausgewertet.')
print('')
input()
print('5. Wie können sie einen String mit Zeilenumbruch schreiben ohne \\n zu verwenden?')
input()
print('Man kann die Zeilenumbrüche auch direkt in Strings verwenden ohne \\n nutzen zu müssen.')
print('')
input()
print('6. Wozu werden folgende Ausdrücke ausgewertet?\n\'Hello World\'[1]\n\'Hello World\'[0:5]\n\'Hello World\'[:5]\n\'Hello World\'[3:]')
input()
print('1. e - 2. Hello - 3. Hello - 4. lo World')
print('')
input()
print('7. Wozu werden folgende Ausdrücke ausgewertet?\n\'Hello\'.upper()\n\'Hello\'.upper().isupper()\n\'Hello\'.upper().lower()')
input()
print('1. HELLO - 2. True - 3. hello')
print('')
input()
print('8. Wozu werden folgende Ausdrücke ausgewertet?\n\'Remember, remember, the fifth of november.\'.split()\n\'-\'.join(\'There can be only one.\'.split())')
input()
print('1. [ \'Remember,\', \'remember,\', \'the\', \'fifth\', \'of\', \'november.\' ]')
print('2. There-can-be-only-one.')
input()
print('9. Mit welchen Stringmethoden können sie einen String\nRechtsbünding, Zentriert oder Linksbündig ausrichten?')
input()
print('Mit den Methoden string.center(n), string.rjust(n) und string.ljust(n)')
print('')
input()
print('10. Wie können Weissraumzeichen/Leerschläge am Ende\n und am Anfang eines Strings entfernt werden?')
input()
print('Mit string.strip(), string.lstrip() und string.rstrip()')
print('')
input()
| 36.28125 | 195 | 0.710594 |
9ed178365b543bb10c9662617049b7f042a2b479
| 2,537 |
py
|
Python
|
03 Python/Smart Home Dashboard/loesung/widgets/label.py
|
DennisSchulmeister/dhbwka-wwi-iottech-quellcodes
|
58f86907af31187f267a9ea476f061cc59098ebd
|
[
"CC-BY-4.0"
] | null | null | null |
03 Python/Smart Home Dashboard/loesung/widgets/label.py
|
DennisSchulmeister/dhbwka-wwi-iottech-quellcodes
|
58f86907af31187f267a9ea476f061cc59098ebd
|
[
"CC-BY-4.0"
] | null | null | null |
03 Python/Smart Home Dashboard/loesung/widgets/label.py
|
DennisSchulmeister/dhbwka-wwi-iottech-quellcodes
|
58f86907af31187f267a9ea476f061cc59098ebd
|
[
"CC-BY-4.0"
] | 1 |
2020-10-10T20:24:05.000Z
|
2020-10-10T20:24:05.000Z
|
import pygame
from pygame.locals import *
from .widget import Widget
class LabelWidget(Widget):
"""
Ein einfaches Label mit einem statischen Text.
"""
def __init__(self, x, y, w, h, font, color, label, outline=False):
"""
Konstruktor.
@param x: X-Position
@param y: Y-Position
@param w: Breite
@param h: Höhe
@param font: pygame.Font-Objekt der Schriftart
@param color: pygame.Color-Objekt der Schriftfarbe
@param label: Text des Labels
@param outline: Rahmen um das Label zeichnen
"""
super().__init__(x, y, w, h)
self.font = font
self.color = color
self.label = label
self.outline = outline
self._redraw = True
self._bg_backup = None
def set_label(self, label):
"""
Diese Methode setzt einen neuen Anzeigetext für das Label.
"""
self.label = label
self._redraw = True
def update(self, events, surface):
"""
Für das Element relevante Ereignisse prüfen und verarbeiten.
@param events: Liste mit pygame.Event-Objekten
@param surface: pygame.Surface des Hauptfensters
"""
for event in events:
pass
if self._redraw:
self.draw(surface)
def draw(self, surface):
"""
Element auf den Bildschirm zeichnen. Damit das Label jederzeit geändert
werden kann, wird hier eine Sicherungskopie des vom Label überdeckten
Hintergrunds gemacht. Bei einer Änderung des Textes wird der Hintergrund
anhand dieser Kopie erst wiederhergestellt, bevor der neue Text gemalt
wird.
@param surface: pygame.Surface des Hauptfensters
"""
self.clip(surface, True)
if self._bg_backup:
surface.blit(self._bg_backup, (self.x, self.y))
text_surface = self.font.render(self.label, True, self.color)
self._bg_backup = pygame.Surface((text_surface.get_width(), text_surface.get_height()))
self._bg_backup.blit(surface, (0,0), pygame.Rect(self.x, self.y, text_surface.get_width(), text_surface.get_height()))
surface.blit(text_surface, (self.x, self.y))
if self.outline:
pygame.draw.rect(surface, self.color, pygame.Rect(self.x, self.y, self.w, self.h), 1)
self.clip(surface, False)
self._redraw = False
| 32.948052 | 126 | 0.592038 |
731fb0b256aedf6f79e5439087f61efa1a97c3aa
| 4,258 |
py
|
Python
|
PlaidCTF/2020/misc/BonziScheme/app/bonzi/acsparse.py
|
mystickev/ctf-archives
|
89e99a5cd5fb6b2923cad3fe1948d3ff78649b4e
|
[
"MIT"
] | 1 |
2021-11-02T20:53:58.000Z
|
2021-11-02T20:53:58.000Z
|
PlaidCTF/2020/misc/BonziScheme/app/bonzi/acsparse.py
|
ruhan-islam/ctf-archives
|
8c2bf6a608c821314d1a1cfaa05a6cccef8e3103
|
[
"MIT"
] | null | null | null |
PlaidCTF/2020/misc/BonziScheme/app/bonzi/acsparse.py
|
ruhan-islam/ctf-archives
|
8c2bf6a608c821314d1a1cfaa05a6cccef8e3103
|
[
"MIT"
] | 1 |
2021-12-19T11:06:24.000Z
|
2021-12-19T11:06:24.000Z
|
import os
import struct
from tabulate import tabulate
import numpy as np
import bitstring
from PIL import Image
SZ_ULONG = 4
SZ_LONG = 4
SZ_USHORT = 2
SZ_SHORT = 2
SZ_BYTE = 1
SZ_WCHAR = 2
class ACSParseException(Exception):
def __init__(self, value):
super(ACSParseException, self).__init__()
self._value = value
def __str__(self):
return f"ACSParseException: {self._value}"
class ACS(object):
def __init__(self, buf, offset):
self._buf = buf
self._offset = offset
self._size = 0
def unpack_data(self, format):
res = struct.unpack_from(format, self._buf, offset=self._offset+self._size)
self._size += struct.calcsize(format)
return res
def unpack_struct(self, ClassType, *argv):
if argv == ():
res = ClassType(self._buf, self._offset+self._size)
else:
res = ClassType(self._buf, self._offset+self._size, *argv)
self._size += res.get_size()
return res
def unpack_chunk(self, size):
res = self._buf[self._offset+self._size:self._offset+self._size+size]
self._size += size
return res
def get_size(self):
return self._size
def get_offset(self):
return self._offset
# ====================
class ACSHeader(ACS):
# Bonz says no
class ACSLocator(ACS):
# Bonz says no
class ACSList(ACS):
# Bonz says no
class ACSCharacterInfo(ACS):
# Bonz says no
class TrayIcon(ACS):
# Bonz says no
class IconImage(ACS):
# Bonz says no
class BitmapInfoHeader(ACS):
# Bonz says no
class Guid(ACS):
# Bonz says no
class LocalizedInfo(ACS):
# Bonz says no
class VoiceInfo(ACS):
# Bonz says no
class BalloonInfo(ACS):
# Bonz says no
class PaletteColor(ACS):
# Bonz says no
class StateInfo(ACS):
# Bonz says no
class LangID(ACS):
# Bonz says no
class String(ACS):
# Bonz says no
class RGBQuad(ACS):
# Bonz says no
class ACSAnimationInfo(ACS):
# Bonz says no
class ACSImageInfo(ACS):
# Bonz says no
class ACSAudioInfo(ACS):
# Bonz says no
class DataBlock(ACS):
# Bonz says no
class ImageInfo(ACS):
# Bonz says no
# But then bonz says yes
def bytes_to_bitstream(self, data_bytes):
data_bitstream = bitstring.BitArray(data_bytes)
for i in range(0, len(data_bitstream), 8):
data_bitstream.reverse(i, i+8)
return data_bitstream
def bitstream_to_bytes(self, data_bitstream, offset, length):
data_bytes = data_bitstream[offset:offset+length]
for i in range(0, len(data_bytes), 8):
data_bytes.reverse(i, min(len(data_bytes), i+8))
if len(data_bytes) % 8 != 0:
data_bytes.prepend("0b" + "0"*(8-(len(data_bytes) % 8)))
return data_bytes.bytes
def bitstream_to_value(self, data_bitstream, offset, length):
data_bytes = data_bitstream[offset:offset+length]
data_bytes.reverse()
if len(data_bytes) % 8 != 0:
data_bytes.prepend("0b" + "0"*(8-(len(data_bytes) % 8)))
return int(data_bytes.hex, 16)
def get_image(self, data, filename, color_table, idx_transparent):
lSrcScanBytes = (self.width + 3) & 0xfc
# Each is RGBQUAD (4 bytes: R,G,B,Reserved)
lTrgScanBytes = self.width * 4
image_data = np.zeros((self.height,self.width,3), dtype=np.uint8)
count = 0
for y in range(self.height):
lSrcNdx = y * self.width
for x in range(self.width):
try:
color = color_table[data[lSrcNdx]].color
except Exception as e:
# TODO: why not just exit? why catch exception?
continue
image_data[self.height-1-y,x] = [color.red, color.green, color.blue]
lSrcNdx += 1
pic = Image.fromarray(image_data)
pic.save(filename)
def decompress_img_in_place(self):
# Bonz say NO.
| 22.892473 | 85 | 0.57891 |
b451487f15ab6120eb0d8548df4f0621080d7fdf
| 2,936 |
py
|
Python
|
python/coursera_python/MICHIGAN/WEB/week6/json_1.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 16 |
2018-11-26T08:39:42.000Z
|
2019-05-08T10:09:52.000Z
|
python/coursera_python/MICHIGAN/WEB/week6/json_1.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 8 |
2020-05-04T06:29:26.000Z
|
2022-02-12T05:33:16.000Z
|
python/coursera_python/MICHIGAN/WEB/week6/json_1.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 5 |
2020-02-11T16:02:21.000Z
|
2021-02-05T07:48:30.000Z
|
import json
data = '''
[
{
"name":"Sister",
"count":95
},
{
"name":"Sidharth",
"count":94
},
{
"name":"Ilona",
"count":93
},
{
"name":"Ruairidh",
"count":93
},
{
"name":"Virginie",
"count":92
},
{
"name":"Alanda",
"count":91
},
{
"name":"Taegen",
"count":90
},
{
"name":"Dexter",
"count":89
},
{
"name":"Ricards",
"count":87
},
{
"name":"Talorcan",
"count":79
},
{
"name":"Etienne",
"count":76
},
{
"name":"Dannii",
"count":75
},
{
"name":"Claire",
"count":74
},
{
"name":"Kerry",
"count":72
},
{
"name":"Kobe",
"count":71
},
{
"name":"Meghana",
"count":69
},
{
"name":"Flint",
"count":62
},
{
"name":"Alexia",
"count":61
},
{
"name":"Sabrina",
"count":58
},
{
"name":"Sanna",
"count":56
},
{
"name":"Nelly",
"count":53
},
{
"name":"Sukhpreet",
"count":50
},
{
"name":"Merina",
"count":50
},
{
"name":"Sammie",
"count":48
},
{
"name":"Ophelia",
"count":47
},
{
"name":"Alanas",
"count":46
},
{
"name":"Macie",
"count":46
},
{
"name":"Lukmaan",
"count":42
},
{
"name":"Paisley",
"count":38
},
{
"name":"Roos",
"count":37
},
{
"name":"Kaceylee",
"count":36
},
{
"name":"Annagayle",
"count":36
},
{
"name":"Pamela",
"count":35
},
{
"name":"Jaime",
"count":33
},
{
"name":"Leilani",
"count":30
},
{
"name":"Syeda",
"count":28
},
{
"name":"Maddison",
"count":28
},
{
"name":"Oonagh",
"count":27
},
{
"name":"Tammylee",
"count":24
},
{
"name":"Bohbi",
"count":20
},
{
"name":"Rodrigo",
"count":16
},
{
"name":"Alfee",
"count":16
},
{
"name":"Ebeny",
"count":16
},
{
"name":"Aleishia",
"count":13
},
{
"name":"Rosanna",
"count":11
},
{
"name":"Kaidey",
"count":10
},
{
"name":"Maisy",
"count":8
},
{
"name":"Bader",
"count":3
},
{
"name":"Jarred",
"count":1
},
{
"name":"Indy",
"count":1
}
]
'''
info = json.loads(data)
print('User count:',len(info))
s=0
count1=0
for item in info:
print('Name : ',item["name"])
print('Count : ',item["count"])
s=s+int(item["count"])
count1=count1+1
print("Sum : ",s)
print("Count : ",count1)
| 13.048889 | 32 | 0.347752 |
c3579ddd4e8860bdd726d3ec8789219cad6e4451
| 938 |
py
|
Python
|
src/shl-deep-learning-prototyping-architectures/fcn.py
|
PhilippMatthes/diplom
|
b51356ec5773b385a308bc8d0d7867f519c390d6
|
[
"MIT"
] | 1 |
2021-04-13T20:16:36.000Z
|
2021-04-13T20:16:36.000Z
|
src/shl-deep-learning-prototyping-architectures/fcn.py
|
PhilippMatthes/diplom
|
b51356ec5773b385a308bc8d0d7867f519c390d6
|
[
"MIT"
] | null | null | null |
src/shl-deep-learning-prototyping-architectures/fcn.py
|
PhilippMatthes/diplom
|
b51356ec5773b385a308bc8d0d7867f519c390d6
|
[
"MIT"
] | null | null | null |
from tensorflow import keras
def make_fcn(input_shape, output_classes):
input_layer = keras.layers.Input(input_shape)
conv1 = keras.layers.Conv1D(filters=128, kernel_size=8, padding='same')(input_layer)
conv1 = keras.layers.BatchNormalization()(conv1)
conv1 = keras.layers.Activation(activation='relu')(conv1)
conv2 = keras.layers.Conv1D(filters=256, kernel_size=5, padding='same')(conv1)
conv2 = keras.layers.BatchNormalization()(conv2)
conv2 = keras.layers.Activation('relu')(conv2)
conv3 = keras.layers.Conv1D(128, kernel_size=3,padding='same')(conv2)
conv3 = keras.layers.BatchNormalization()(conv3)
conv3 = keras.layers.Activation('relu')(conv3)
gap_layer = keras.layers.GlobalAveragePooling1D()(conv3)
output_layer = keras.layers.Dense(output_classes, activation='softmax')(gap_layer)
model = keras.models.Model(inputs=input_layer, outputs=output_layer)
return model
| 36.076923 | 88 | 0.738806 |
c373143c027028354f1a8f5d197f94f7458d7b42
| 3,201 |
py
|
Python
|
rad/_DeepAE.py
|
dlegor/rad
|
7a34bf55ce21adc860ff28a2c9d66943acd6ae6a
|
[
"BSD-3-Clause"
] | 10 |
2020-10-08T21:57:23.000Z
|
2022-02-18T04:30:46.000Z
|
rad/_DeepAE.py
|
dlegor/rad
|
7a34bf55ce21adc860ff28a2c9d66943acd6ae6a
|
[
"BSD-3-Clause"
] | 1 |
2020-07-07T17:27:27.000Z
|
2020-07-09T12:20:43.000Z
|
rad/_DeepAE.py
|
dlegor/rad
|
7a34bf55ce21adc860ff28a2c9d66943acd6ae6a
|
[
"BSD-3-Clause"
] | 1 |
2021-01-18T15:46:29.000Z
|
2021-01-18T15:46:29.000Z
|
import tensorflow as tf
import numpy as np
def batches(l, n):
"""Yield successive n-sized batches from l, the last batch is the left indexes."""
for i in range(0, l, n):
yield range(i,min(l,i+n))
class Deep_Autoencoder(object):
def __init__(self, sess, input_dim_list=[7,64,64,7],transfer_function=tf.nn.relu,learning_rate=0.001):
"""input_dim_list must include the original data dimension"""
#assert len(input_dim_list) < 2
#raise ValueError(
# "Do you need more one layer!")
self.W_list = []
self.encoding_b_list = []
self.decoding_b_list = []
self.dim_list = input_dim_list
self.transfer = transfer_function
self.learning_rate=0.001
## Encoders parameters
for i in range(len(input_dim_list)-1):
init_max_value = 4*np.sqrt(6. / (self.dim_list[i] + self.dim_list[i+1]))
self.W_list.append(tf.Variable(tf.random_uniform([self.dim_list[i],self.dim_list[i+1]],
np.negative(init_max_value),init_max_value)))
self.encoding_b_list.append(tf.Variable(tf.random_uniform([self.dim_list[i+1]],-0.1,0.1)))
## Decoders parameters
for i in range(len(input_dim_list)-2,-1,-1):
self.decoding_b_list.append(tf.Variable(tf.random_uniform([self.dim_list[i]],-0.1,0.1)))
## Placeholder for input
self.input_x = tf.placeholder(tf.float32,[None,self.dim_list[0]])
## coding graph :
last_layer = self.input_x
for weight,bias in zip(self.W_list,self.encoding_b_list):
hidden = self.transfer(tf.matmul(last_layer,weight) + bias)
last_layer = hidden
self.hidden = hidden
## decode graph:
for weight,bias in zip(reversed(self.W_list),self.decoding_b_list):
hidden = self.transfer(tf.matmul(last_layer,tf.transpose(weight)) + bias)
last_layer = hidden
self.recon = last_layer
#self.cost = tf.reduce_mean(tf.square(self.input_x - self.recon))
self.cost =0.5 * tf.reduce_sum(tf.pow(tf.subtract(self.recon, self.input_x), 2.0))
self.train_step = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.cost)
sess.run(tf.global_variables_initializer())
def fit(self, X, sess,iteration=100, batch_size=12, init=False,verbose=False):
assert X.shape[1] == self.dim_list[0]
if init:
sess.run(tf.global_variables_initializer())
sample_size = X.shape[0]
for i in range(iteration):
for one_batch in batches(sample_size, batch_size):
e,op=sess.run((self.cost,self.train_step),feed_dict = {self.input_x:X[one_batch]})
if verbose and i%20==0:
#e = self.cost.eval(session = sess,feed_dict = {self.input_x: X[one_batch]})
print(" iteration :", i ,", cost:", e)
def transform(self, X, sess):
return self.hidden.eval(session = sess, feed_dict={self.input_x: X})
def getRecon(self, X, sess):
return self.recon.eval(session = sess,feed_dict={self.input_x: X})
| 44.458333 | 106 | 0.619494 |
5eed8fe31f55d4ab61ed6132e152cbbd398cd3b7
| 595 |
py
|
Python
|
crack/hashing/passwdtest.py
|
wittrup/crap
|
a77474588fd54a5a998e24df7b1e6e2ab473ded1
|
[
"MIT"
] | 1 |
2017-12-12T13:58:08.000Z
|
2017-12-12T13:58:08.000Z
|
crack/hashing/passwdtest.py
|
wittrup/crap
|
a77474588fd54a5a998e24df7b1e6e2ab473ded1
|
[
"MIT"
] | null | null | null |
crack/hashing/passwdtest.py
|
wittrup/crap
|
a77474588fd54a5a998e24df7b1e6e2ab473ded1
|
[
"MIT"
] | 1 |
2019-11-03T10:16:35.000Z
|
2019-11-03T10:16:35.000Z
|
import re
from passwdformats import FILE_FORMAT # FILE_FORMAT['smbpasswd'] = ['name', 'uid', 'LM_hash', 'NTLM_hash', 'Account Flags', 'Last Change Time']
file = 'target.passwd'
file_format = 'custom'
userlist = {}
pattern = r'(.*)(:)(.*)(:)(.*)(:)(.*)(:)(.*)(:)'
f = open(file)
for line in f:
match = re.match(pattern, line)#, re.I | re.U)
if hasattr(match, 'group'):
print(match.groups())
uname = match.group(1)
userlist[uname] = dict(zip(FILE_FORMAT[file_format], match.groups()[::2]))
print('-'*50)
for key, value in userlist.items():
print(key, value)
| 29.75 | 143 | 0.603361 |
82657dfefdc6cc6b96d89ad990bde80ff0c4fecb
| 738 |
py
|
Python
|
ImageData.py
|
FrieAT/MD_CompressedWavelet
|
82bd10edd611485cd5f0b81da744e07a3b7c98eb
|
[
"MIT"
] | 2 |
2020-03-28T11:50:45.000Z
|
2020-12-08T13:36:26.000Z
|
ImageData.py
|
FrieAT/MD_CompressedWavelet
|
82bd10edd611485cd5f0b81da744e07a3b7c98eb
|
[
"MIT"
] | 2 |
2020-04-20T11:12:59.000Z
|
2020-05-11T05:37:36.000Z
|
ImageData.py
|
FrieAT/MD_CompressedWavelet
|
82bd10edd611485cd5f0b81da744e07a3b7c98eb
|
[
"MIT"
] | null | null | null |
from OrigPic import OrigPic
from WaveletPic import WaveletPic
from StationaryWaveletPic import StationaryWaveletPic
from FVExtraction import FVExtraction
from ScanAssets import ScanAssets
from PipelineManager import PipelineManager
from LOOCV import LOOCV
from EuclideanDistance import EuclideanDistance
from kNearestNeighbour import kNearestNeighbour
from DTCWaveletPic import DTCWaveletPic
from CachedFile import CachedFile
from CropImageByClass import CropImageByClass
from SavePic import SavePic
from TargetCompressedByType import TargetCompressedByType
from ConvertFormat import ConvertFormat
from NIQE import NIQE
from BIQAA import BIQAA
from EncodeToFileList import EncodeToFileList
from IProcess import CachedFileLoadedException
| 35.142857 | 57 | 0.895664 |
8185e15a8b6a72ac97cf93c79161000ca6bfa057
| 247 |
py
|
Python
|
python/python_backup/Python_Progs/PYTHON_LEGACY_PROJECTS/hypotenuese_of_triangle_sqrt.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 16 |
2018-11-26T08:39:42.000Z
|
2019-05-08T10:09:52.000Z
|
python/python_backup/Python_Progs/PYTHON_LEGACY_PROJECTS/hypotenuese_of_triangle_sqrt.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 8 |
2020-05-04T06:29:26.000Z
|
2022-02-12T05:33:16.000Z
|
python/python_backup/Python_Progs/PYTHON_LEGACY_PROJECTS/hypotenuese_of_triangle_sqrt.py
|
SayanGhoshBDA/code-backup
|
8b6135facc0e598e9686b2e8eb2d69dd68198b80
|
[
"MIT"
] | 5 |
2020-02-11T16:02:21.000Z
|
2021-02-05T07:48:30.000Z
|
from math import sqrt
print("Input the lengths of the shorter sides of the triangle")
a=float(input("enter the 1 side of the triangle"))
b=float(input("enter the 2 side of the triangle"))
c=sqrt(a**2+b**2)
print("The length of the triangle = ",c)
| 35.285714 | 63 | 0.724696 |
81d7541d7d020b438677dddabf2aae1a5a9876c6
| 37,097 |
py
|
Python
|
src/test/tests/unit/atts_assign.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 226 |
2018-12-29T01:13:49.000Z
|
2022-03-30T19:16:31.000Z
|
src/test/tests/unit/atts_assign.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 5,100 |
2019-01-14T18:19:25.000Z
|
2022-03-31T23:08:36.000Z
|
src/test/tests/unit/atts_assign.py
|
visit-dav/vis
|
c08bc6e538ecd7d30ddc6399ec3022b9e062127e
|
[
"BSD-3-Clause"
] | 84 |
2019-01-24T17:41:50.000Z
|
2022-03-10T10:01:46.000Z
|
# ----------------------------------------------------------------------------
# CLASSES: nightly
#
# Test Case: atts_assign.py
#
# Tests: Behavior of assignment for attribute objects. Ensures good cases
# succeed and bad cases fail with specific python exceptions. Tests variety
# of types present in members of VisIt attribute objects. Tests both
# assignment usage (e.g. atts.memberName=...) and setter function usage
# (e.g. atts.SetMemberName(...))
#
# Mark C. Miller, Tue Jun 8 15:51:59 PDT 2021
#
# Modifications:
# Kathleen Biagas, Tue July 27, 2021
# Assigning Max32BitInt+1 to int on Windows causes TypeError, not
# ValueError, so change expected results in those cases.
#
# ----------------------------------------------------------------------------
import copy, io, sys
# Some useful global variables
X = [2,4,6]
Max32BitInt = 2147483647
Max32BitInt1 = Max32BitInt+1
MaxIntAs32BitFloat = 16777216
MaxIntAs32BitFloat1 = MaxIntAs32BitFloat+1
MaxIntAs64BitFloat = 9007199254740992
MaxIntAs64BitFloat1 = MaxIntAs64BitFloat+1
Max32BitFloat = 3.402823E+38
Max32BitFloatA = 3.402820E+37 # One order mag down from Max
Max32BitFloatB = 3.402823E+39 # One order mag up from Max
Min32BitFloat = 1.175494E-38
# version of repr that strips parens at end
def repr2(s):
return repr(s).lstrip('(').rstrip(')')
def TestAssignmentToTuple():
TestSection('Assignment to tuple, "point1", member (of CylinderAttributes())')
ca = CylinderAttributes()
# Non-existent member name 'point'
try:
ca.point = 1,2,3
TestFOA('ca.point=1,2,3', LINE())
except NameError:
TestPOA('ca.point=1,2,3')
pass
except:
TestFOA('ca.point=1,2,3', LINE())
pass
# Non-existent member name 'point'
try:
ca.SetPoint(1,2,3)
TestFOA('ca.SetPoint(1,2,3)', LINE())
except ValueError:
TestPOA('ca.SetPoint(1,2,3)')
pass
except:
TestFOA('ca.SetPoint(1,2,3)', LINE())
pass
# CSV too short
try:
ca.point1 = 1,2
TestFOA('ca.point1=1,2', LINE())
except TypeError:
TestPOA('ca.point1=1,2')
pass
except:
TestFOA('ca.point1=1,2', LINE())
pass
# CSV too long
try:
ca.point1 = 1,2,3,4
TestFOA('ca.point1=1,2,3,4', LINE())
except TypeError:
TestPOA('ca.point1=1,2,3,4')
pass
except:
TestFOA('ca.point1=1,2,3,4', LINE())
pass
# The above cases can't be put in a loop. Put remaining cases in a loop
fails = [(1,2), (1,2,3,4), '123', (1,1+2j,3), (1,X,3), (1,'b',3), (1,None,3)]
for i in range(len(fails)):
try:
ca.point1 = fails[i]
TestFOA('ca.point1=%s'%repr2(fails[i]), LINE())
except TypeError:
TestPOA('ca.point1=%s'%repr2(fails[i]))
pass
except:
TestFOA('ca.point1=%s'%repr2(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
ca.SetPoint1(fails[i])
TestFOA('ca.SetPoint1(%s)'%repr2(fails[i]), LINE())
except TypeError:
TestPOA('ca.SetPoint1(%s)'%repr2(fails[i]))
pass
except:
TestFOA('ca.SetPoint1(%s)'%repr2(fails[i]), LINE())
pass
try:
ca.point1 = 1,2,3
TestPOA('ca.point1=1,2,3')
except:
TestFOA('ca.point1=1,2,3', LINE())
pass
works = [(1,2,3), (1.1,2.2,3.3), tuple(X)]
for i in range(len(works)):
try:
ca.point1 = works[i]
TestPOA('ca.point1=%s'%repr2(works[i]))
except:
TestFOA('ca.point1=%s'%repr2(works[i]), LINE())
pass
for i in range(len(works)):
try:
ca.SetPoint1(*works[i])
TestPOA('ca.SetPoint1(%s)'%repr2(works[i]))
except:
TestFOA('ca.SetPoint1(%s)'%repr2(works[i]), LINE())
pass
def TestAssignmentToBool():
TestSection('Assignment to bool member, "inverse", (of CylinderAttributes())')
ca = CylinderAttributes()
try:
ca.inverse = 1,2
TestFOA('ca.inverse=1,2', LINE())
except TypeError:
TestPOA('ca.inverse=1,2')
pass
except:
TestFOA('ca.inverse=1,2', LINE())
pass
fails = [ '123', 1+2j, X, None, 5]
excpts = [TypeError, TypeError, TypeError, TypeError, ValueError]
for i in range(len(fails)):
try:
ca.inverse = fails[i]
TestFOA('ca.inverse=%s'%repr(fails[i]), LINE())
except excpts[i]:
TestPOA('ca.inverse=%s'%repr(fails[i]))
pass
except:
TestFOA('ca.inverse=%s'%repr(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
ca.SetInverse(fails[i])
TestFOA('ca.SetInverse(%s)'%repr(fails[i]), LINE())
except excpts[i]:
TestPOA('ca.SetInverse(%s)'%repr(fails[i]))
pass
except:
TestFOA('ca.SetInverse(%s)'%repr(fails[i]), LINE())
pass
works = [0, 1, True, False]
for i in range(len(works)):
try:
ca.inverse = works[i]
TestPOA('ca.inverse=%s'%repr(works[i]))
except:
TestFOA('ca.inverse=%s'%repr(works[i]), LINE())
for i in range(len(works)):
try:
ca.SetInverse(works[i])
TestPOA('ca.SetInverse(%s)'%repr(works[i]))
except:
TestFOA('ca.SetInverse(%s)'%repr(works[i]), LINE())
def TestAssignmentToInt():
TestSection('Assignment to int member, "samplesPerRay", (of VolumeAttributes())')
va = VolumeAttributes()
try:
va.samplesPerRay = 1,2
TestFOA('va.samplesPerRay=1,2', LINE())
except TypeError:
TestPOA('va.samplesPerRay=1,2')
pass
except:
TestFOA('va.samplesPerRay=1,2', LINE())
pass
fails = [ '123', 1+2j, None, X, Max32BitInt1]
if sys.platform.startswith("win"):
excpts = [TypeError, TypeError, TypeError, TypeError, TypeError]
else:
excpts = [TypeError, TypeError, TypeError, TypeError, ValueError]
for i in range(len(fails)):
try:
va.samplesPerRay = fails[i]
TestFOA('va.samplesPerRay=%s'%repr(fails[i]), LINE())
except excpts[i]:
TestPOA('va.samplesPerRay=%s'%repr(fails[i]))
pass
except:
TestFOA('va.samplesPerRay=%s'%repr(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
va.SetSamplesPerRay(fails[i])
TestFOA('va.SetSamplesPerRay(%s)'%repr(fails[i]), LINE())
except excpts[i]:
TestPOA('va.SetSamplesPerRay(%s)'%repr(fails[i]))
pass
except:
TestFOA('va.SetSamplesPerRay(%s)'%repr(fails[i]), LINE())
pass
works = [0, 1, -1, 5, True, False, Max32BitInt]
for i in range(len(works)):
try:
va.samplesPerRay = works[i]
TestPOA('va.samplesPerRay=%s'%repr(works[i]))
except:
TestFOA('va.samplesPerRay=%s'%repr(works[i]), LINE())
for i in range(len(works)):
try:
va.SetSamplesPerRay(works[i])
TestPOA('va.SetSamplesPerRay(%s)'%repr(works[i]))
except:
TestFOA('va.SetSamplesPerRay(%s)'%repr(works[i]), LINE())
def TestAssignmentToFloat():
TestSection('Assignment to float member, "opacityAttenuation", (of VolumeAttributes())')
va = VolumeAttributes()
try:
va.opacityAttenuation = 1,2
TestFOA('va.opacityAttenuation=1,2', LINE())
except TypeError:
TestPOA('va.opacityAttenuation=1,2')
pass
except:
TestFOA('va.opacityAttenuation=1,2', LINE())
pass
fails = [ '123', 1+2j, None, X, Max32BitFloatB]
excpts = [TypeError, TypeError, TypeError, TypeError, ValueError]
for i in range(len(fails)):
try:
va.opacityAttenuation = fails[i]
TestFOA('va.opacityAttenuation=%s'%repr(fails[i]), LINE())
except excpts[i]:
TestPOA('va.opacityAttenuation=%s'%repr(fails[i]))
pass
except:
TestFOA('va.opacityAttenuation=%s'%repr(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
va.SetOpacityAttenuation(fails[i])
TestFOA('va.SetOpacityAttenuation(%s)'%repr(fails[i]), LINE())
except excpts[i]:
TestPOA('va.SetOpacityAttenuation(%s)'%repr(fails[i]))
pass
except:
TestFOA('va.SetOpacityAttenuation(%s)'%repr(fails[i]), LINE())
pass
works = [0, 1, -1, 0.3, Max32BitFloatA, True, False]
for i in range(len(works)):
try:
va.opacityAttenuation = works[i]
TestPOA('va.opacityAttenuation=%s'%repr(works[i]))
except:
TestFOA('va.opacityAttenuation=%s'%repr(works[i]), LINE())
for i in range(len(works)):
try:
va.SetOpacityAttenuation(works[i])
TestPOA('va.SetOpacityAttenuation(%s)'%repr(works[i]))
except:
TestFOA('va.SetOpacityAttenuation(%s)'%repr(works[i]), LINE())
def TestAssignmentToDouble():
TestSection('Assignment to double member, "radius", (of CylinderAttributes())')
ca = CylinderAttributes()
try:
ca.radius = 1,2
TestFOA('ca.radius=1,2', LINE())
except TypeError:
TestPOA('ca.radius=1,2')
pass
except:
TestFOA('ca.radius=1,2', LINE())
pass
fails = ['123', 1+2j, None, X]
for i in range(len(fails)):
try:
ca.radius = fails[i]
TestFOA('ca.radius=%s'%repr(fails[i]), LINE())
except TypeError:
TestPOA('ca.radius=%s'%repr(fails[i]))
pass
except:
TestFOA('ca.radius=%s'%repr(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
ca.SetRadius(fails[i])
TestFOA('ca.SetRadius(%s)'%repr(fails[i]), LINE())
except TypeError:
TestPOA('ca.SetRadius(%s)'%repr(fails[i]))
pass
except:
TestFOA('ca.SetRadius(%s)'%repr(fails[i]), LINE())
pass
works = [0, 1, -1, 5.5, 1.1E-479, 1.1E+479, True, False]
for i in range(len(works)):
try:
ca.radius = works[i]
TestPOA('ca.radius=%s'%repr(works[i]))
except:
TestFOA('ca.radius=%s'%repr(works[i]), LINE())
for i in range(len(works)):
try:
ca.SetRadius(works[i])
TestPOA('ca.SetRadius(%s)'%repr(works[i]))
except:
TestFOA('ca.SetRadius(%s)'%repr(works[i]), LINE())
def TestAssignmentToString():
TestSection('Assignment to string member, "designator", (of CurveAttributes())')
ca = CurveAttributes()
try:
ca.designator = "123","abc"
TestFOA('ca.designator="123","abc"', LINE())
except TypeError:
TestPOA('ca.designator="123","abc"')
pass
except:
TestFOA('ca.designator="123","abc"', LINE())
pass
fails = [0, 1, 1.1, 1+2j, None, X]
for i in range(len(fails)):
try:
ca.designator = fails[i]
TestFOA('ca.designator=%s'%repr(fails[i]), LINE())
except TypeError:
TestPOA('ca.designator=%s'%repr(fails[i]))
pass
except:
TestFOA('ca.designator=%s'%repr(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
ca.SetDesignator(fails[i])
TestFOA('ca.SetDesignator(%s)'%repr(fails[i]), LINE())
except TypeError:
TestPOA('ca.SetDesignator(%s)'%repr(fails[i]))
pass
except:
TestFOA('ca.SetDesignator(%s)'%repr(fails[i]), LINE())
pass
works = ['123', 'abc', '']
for i in range(len(works)):
try:
ca.designator = works[i]
TestPOA('ca.designator=%s'%repr(works[i]))
except:
TestFOA('ca.designator=%s'%repr(works[i]), LINE())
for i in range(len(works)):
try:
ca.SetDesignator(works[i])
TestPOA('ca.SetDesignator(%s)'%repr(works[i]))
except:
TestFOA('ca.SetDesignator(%s)'%repr(works[i]), LINE())
def TestAssignmentToGlyphType():
TestSection('Assignment to GlyphType member, "pointType", (of MeshAttributes())')
ma = MeshAttributes()
# Test direct assignment with = operator
try:
ma.pointType = 1
TestPOA('ma.pointType=1')
except:
TestFOA('ma.pointType=1', LINE())
pass
fails = [ '123', 1+2j, None, X, -1, 123123123123123123123123123123]
excpts = [TypeError, TypeError, TypeError, TypeError, ValueError, TypeError]
for i in range(len(fails)):
try:
ma.pointType = fails[i]
TestFOA('ma.pointType=%s'%repr(fails[i]), LINE())
except excpts[i]:
TestPOA('ma.pointType=%s'%repr(fails[i]))
pass
except:
TestFOA('ma.pointType=%s'%repr(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
ma.SetPointType(fails[i])
TestFOA('ma.SetPointType(%s)'%repr(fails[i]), LINE())
except excpts[i]:
TestPOA('ma.SetPointType(%s)'%repr(fails[i]))
pass
except:
TestFOA('ma.SetPointType(%s)'%repr(fails[i]), LINE())
pass
works = [0, 1, 5, True, False, ma.Point]
for i in range(len(works)):
try:
ma.pointType = works[i]
TestPOA('ma.pointType=%s'%repr(works[i]))
except:
TestFOA('ma.pointType=%s'%repr(works[i]), LINE())
for i in range(len(works)):
try:
ma.SetPointType(works[i])
TestPOA('ma.SetPointType(%s)'%repr(works[i]))
except:
TestFOA('ma.SetPointType(%s)'%repr(works[i]), LINE())
def TestAssignmentToEnum():
TestSection('Assignment to Enum member, "smoothingLevel", (of MeshAttributes())')
ma = MeshAttributes()
# Test direct assignment with = operator
try:
ma.smoothingLevel = 1
TestPOA('ma.smoothingLevel=1')
except:
TestFOA('ma.smoothingLevel=1', LINE())
pass
fails = [ '123', 1+2j, None, X, -1, 123123123, 123123123123123123123123123123]
excpts = [TypeError, TypeError, TypeError, TypeError, ValueError, ValueError, TypeError]
for i in range(len(fails)):
try:
ma.smoothingLevel = fails[i]
TestFOA('ma.smoothingLevel=%s'%repr(fails[i]), LINE())
except excpts[i]:
TestPOA('ma.smoothingLevel=%s'%repr(fails[i]))
pass
except:
TestFOA('ma.smoothingLevel=%s'%repr(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
ma.SetSmoothingLevel(fails[i])
TestFOA('ma.SetSmoothingLevel(%s)'%repr(fails[i]), LINE())
except excpts[i]:
TestPOA('ma.SetSmoothingLevel(%s)'%repr(fails[i]))
pass
except:
TestFOA('ma.SetSmoothingLevel(%s)'%repr(fails[i]), LINE())
pass
works = [0, 1, 2, True, False, ma.Fast]
for i in range(len(works)):
try:
ma.smoothingLevel = works[i]
TestPOA('ma.smoothingLevel=%s'%repr(works[i]))
except:
TestFOA('ma.smoothingLevel=%s'%repr(works[i]), LINE())
for i in range(len(works)):
try:
ma.SetSmoothingLevel(works[i])
TestPOA('ma.SmoothingLevel(%s)'%repr(works[i]))
except:
TestFOA('ma.SetSmoothingLevel(%s)'%repr(works[i]), LINE())
def TestAssignmentToUCharVector():
TestSection('Assignment to ucharVector member, "changedColors", (of MultiCurveAttributes())')
mca = MultiCurveAttributes()
# Test direct assignment with = operator
try:
mca.changedColors = 1,2,3
TestPOA('mca.changedColors=1,2,3')
except:
TestFOA('mca.changedColors=1,2,3', LINE())
pass
fails = [(1,123123123123123123123123123123,3), (1,1+2j,3), (1,X,3), (1,'b',3), (1,None,3), ('123',)]
for i in range(len(fails)):
try:
mca.changedColors = fails[i]
TestFOA('mca.changedColors=%s'%repr2(fails[i]), LINE())
except TypeError:
TestPOA('mca.changedColors=%s'%repr2(fails[i]))
pass
except:
TestFOA('mca.changedColors=%s'%repr2(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
mca.SetChangedColors(*fails[i])
TestFOA('mca.SetChangedColors(%s)'%repr2(fails[i]), LINE())
except TypeError:
TestPOA('mca.SetChangedColors(%s)'%repr2(fails[i]))
pass
except:
TestFOA('mca.SetChangedColors(%s)'%repr2(fails[i]), LINE())
pass
works = [(1,2,3), tuple(X), (1,True,3), (1,False,3)]
for i in range(len(works)):
try:
mca.changedColors = works[i]
TestPOA('mca.changedColors=%s'%repr2(works[i]))
except:
TestFOA('mca.changedColors=%s'%repr2(works[i]), LINE())
for i in range(len(works)):
try:
mca.SetChangedColors(*works[i])
TestPOA('mca.SetChangedColors(%s)'%repr2(works[i]))
except:
TestFOA('mca.SetChangedColors(%s)'%repr2(works[i]), LINE())
def TestAssignmentToIntVector():
TestSection('Assignment to intVector member, "index", (of OnionPeelAttributes())')
opa = OnionPeelAttributes()
# Test direct assignment with = operator
try:
opa.index = 1,2,3
TestPOA('opa.index=1,2,3')
except:
TestFOA('opa.index=1,2,3', LINE())
pass
fails = [(Max32BitInt1,), (1+2j,), ('b',), (None,), (1,Max32BitInt1,3),
(1,1+2j,3), (1,X,3), (1,'b',3), (1,None,3)]
if sys.platform.startswith("win"):
excpts = [TypeError, TypeError, TypeError, TypeError, TypeError,
TypeError, TypeError, TypeError, TypeError]
else:
excpts = [ValueError, TypeError, TypeError, TypeError, ValueError,
TypeError, TypeError, TypeError, TypeError]
for i in range(len(fails)):
try:
opa.index = fails[i]
TestFOA('opa.index=%s'%repr2(fails[i]), LINE())
except excpts[i]:
TestPOA('opa.index=%s'%repr2(fails[i]))
pass
except:
TestFOA('opa.index=%s'%repr2(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
opa.SetIndex(*fails[i])
TestFOA('opa.SetIndex(%s)'%repr2(fails[i]), LINE())
except excpts[i]:
TestPOA('opa.SetIndex(%s)'%repr2(fails[i]))
pass
except:
TestFOA('opa.SetIndex(%s)'%repr2(fails[i]), LINE())
pass
works = [(1,2,3), X, tuple(X), (1,True,3), (1,False,3), (1,Max32BitInt,3)]
for i in range(len(works)):
try:
opa.index = works[i]
TestPOA('opa.index=%s'%repr2(works[i]))
except:
TestFOA('opa.index=%s'%repr2(works[i]), LINE())
for i in range(len(works)):
try:
opa.SetIndex(*works[i])
TestPOA('opa.SetIndex(%s)'%repr2(works[i]))
except:
TestFOA('opa.SetIndex(%s)'%repr2(works[i]), LINE())
def TestAssignmentToDoubleVector():
TestSection('Assignment to doubleVector member, "values", (of ContourAttributes())')
ca = ContourAttributes()
# Test direct assignment with = operator
try:
ca.contourValue = 1,2,3
TestPOA('ca.contourValue=1,2,3')
except:
TestFOA('ca.contourValue=1,2,3', LINE())
pass
fails = [(1+2j,), ('b',), (None,), (1,1+2j,3), (1,X,3), (1,'b',3), (1,None,3)]
for i in range(len(fails)):
try:
ca.contourValue = fails[i]
TestFOA('ca.contourValue=%s'%repr2(fails[i]), LINE())
except TypeError:
TestPOA('ca.contourValue=%s'%repr2(fails[i]))
pass
except:
TestFOA('ca.contourValue=%s'%repr2(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
ca.SetContourValue(*fails[i])
TestFOA('ca.SetContourValue(%s)'%repr2(fails[i]), LINE())
except TypeError:
TestPOA('ca.SetContourValue(%s)'%repr2(fails[i]))
pass
except:
TestFOA('ca.SetContourValue(%s)'%repr2(fails[i]), LINE())
pass
works = [(1,2,3), X, tuple(X), (1,True,3), (1,False,3)]
for i in range(len(works)):
try:
ca.contourValue = works[i]
TestPOA('ca.contourValue=%s'%repr2(works[i]))
except:
TestFOA('ca.contourValue=%s'%repr2(works[i]), LINE())
for i in range(len(works)):
try:
ca.SetContourValue(*works[i])
TestPOA('ca.SetContourValue(%s)'%repr2(works[i]))
except:
TestFOA('ca.SetContourValue(%s)'%repr2(works[i]), LINE())
def TestAssignmentToUCharArray():
TestSection('Assignment to ucharArray member, "freeformOpacity", (of VolumeAttributes())')
arr = [17,]*256
va = VolumeAttributes()
# Test assigning to individual entry via direct (operator =) assignment
try:
va.freeformOpacity = 3,17
TestPOA('va.freeformOpacity=3,17')
except:
TestFOA('va.freeformOpacity=3,17', LINE())
pass
# Test assigning to individual entry via Set method
try:
va.SetFreeformOpacity(3,17)
TestPOA('va.SetFreeformOpacity(3,17)')
except:
TestFOA('va.SetFreeformOpacity(3,17)', LINE())
pass
# Test assigning to whole array via (operator =) assignment
try:
va.freeformOpacity = tuple(arr)
TestPOA('va.freeformOpacity=tuple(arr)')
except:
TestFOA('va.freeformOpacity=tuple(arr)', LINE())
pass
# Test assigning to whole array via Set method
try:
va.SetFreeformOpacity(*tuple(arr))
TestPOA('va.SetFreeformOpacity(*tuple(arr))')
except:
TestFOA('va.SetFreeformOpacity(*tuple(arr))', LINE())
pass
# Test assigning to individual entry via direct (operator =) assignment
# failures for type of second argument (color value)
fails = [ (3,None), (3,1+2j), (3,X), (3,'123'), (None,17), (1+2j,17), (X,17),('123',17), (-3,17), (3,1700)]
excpts = [TypeError, TypeError, TypeError, TypeError, TypeError, TypeError, TypeError, TypeError, IndexError, ValueError]
for i in range(len(fails)):
try:
va.freeformOpacity = fails[i][0],fails[i][1]
TestFOA('va.freeformOpacity=%s,%s'%(repr(fails[i][0]),repr(fails[i][1])), LINE())
except excpts[i]:
TestPOA('va.freeformOpacity=%s,%s'%(repr(fails[i][0]),repr(fails[i][1])))
pass
except:
TestFOA('va.freeformOpacity=%s,%s'%(repr(fails[i][0]),repr(fails[i][1])), LINE())
pass
for i in range(len(fails)):
try:
va.SetFreeformOpacity(fails[i][0],fails[i][1])
TestFOA('va.SetFreeformOpacity(%s,%s)'%(repr(fails[i][0]),repr(fails[i][1])), LINE())
except excpts[i]:
TestPOA('va.SetFreeformOpacity(%s,%s)'%(repr(fails[i][0]),repr(fails[i][1])))
pass
except:
TestFOA('va.SetFreeformOpacity(%s,%s)'%(repr(fails[i][0]),repr(fails[i][1])), LINE())
pass
# Test assigning to whole member via direct (operator =) assignment
try:
va.freeformOpacity = (17,)*256
TestPOA('va.freeformOpacity=(17,)*256')
except:
TestFOA('va.freeformOpacity=(17,)*256', LINE())
pass
# Test assigning to whole member via Set method
try:
va.SetFreeformOpacity(*(17,)*256)
TestPOA('va.SetFreeformOpacity((17,)*256)')
except:
TestFOA('va.SetFreeformOpacity((17,)*256)', LINE())
pass
# Test assigning to whole member via direct (operator =) assignment
# failures for type of first argument (index)
arr1 = copy.deepcopy(arr)
arr2 = copy.deepcopy(arr)
arr3 = copy.deepcopy(arr)
arr4 = copy.deepcopy(arr)
arr5 = copy.deepcopy(arr)
arr1[3] = None
arr2[3] = 1+2j
arr3[3] = X
arr4[3] = (1,2,3)
arr5[3] = '123'
fails = [tuple(arr1), tuple(arr2), tuple(arr3), tuple(arr4), tuple(arr5)]
for i in range(len(fails)):
try:
va.freeformOpacity = fails[i]
TestFOA('va.freeformOpacity=%s'%repr(fails[i][:7]).replace(')',', ...'), LINE())
except TypeError:
TestPOA('va.freeformOpacity=%s'%repr(fails[i][:7]).replace(')',', ...'))
pass
except:
TestFOA('va.freeformOpacity=%s'%repr(fails[i][:7]).replace(')',', ...'), LINE())
pass
# Test star-deref of tuple
for i in range(len(fails)):
try:
va.SetFreeformOpacity(*fails[i])
TestFOA('va.SetFreeformOpacity%s'%repr(fails[i][:7]).replace(')',', ...)'), LINE())
except TypeError:
TestPOA('va.SetFreeformOpacity%s'%repr(fails[i][:7]).replace(')',', ...)'))
pass
except:
TestFOA('va.SetFreeformOpacity%s'%repr(fails[i][:7]).replace(')',', ...)'), LINE())
pass
# Test just passing the tuple
for i in range(len(fails)):
try:
va.SetFreeformOpacity(fails[i])
TestFOA('va.SetFreeformOpacity(fails[%d])'%i, LINE())
except TypeError:
TestPOA('va.SetFreeformOpacity(fails[%d])'%i)
pass
except:
TestFOA('va.SetFreeformOpacity(fails[%d])'%i, LINE())
pass
def TestAssignmentToIntArray():
TestSection('Assignment to intArray member, "reflections", (of ReflectAttributes())')
ra = ReflectAttributes()
# Test assigning via (operator =) assignment
try:
ra.reflections = 0,1,0,1,0,1,0,1
TestPOA('ra.reflections=0,1,0,1,0,1,0,1')
except:
TestFOA('ra.reflections=0,1,0,1,0,1,0,1', LINE())
pass
fails = [(0,1,None,1,0,1,0,1), (0,1,1+2j,1,0,1,0,1), (0,1,X,1,0,1,0,1),
(0,1,Max32BitInt1,1,0,1,0,1), (0,1,'123',1,0,1,0,1),
(0,1,0,1,0,1,0,1,1), (0,1,0,1,0,1,0)]
if sys.platform.startswith("win"):
excpts = [TypeError, TypeError, TypeError, TypeError, TypeError, TypeError, TypeError]
else:
excpts = [TypeError, TypeError, TypeError, ValueError, TypeError, TypeError, TypeError]
for i in range(len(fails)):
try:
ra.reflections = fails[i]
TestFOA('ra.reflections=%s'%repr2(fails[i]), LINE())
except excpts[i]:
TestPOA('ra.reflections=%s'%repr2(fails[i]))
pass
except:
TestFOA('ra.reflections=%s'%repr2(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
ra.SetReflections(*fails[i])
TestFOA('ra.SetReflections(%s)'%repr2(fails[i]), LINE())
except excpts[i]:
TestPOA('ra.SetReflections(%s)'%repr2(fails[i]))
pass
except:
TestFOA('ra.SetReflections(%s)'%repr2(fails[i]), LINE())
pass
works = [(0,1,0,1,0,1,0,1), (-1,100,-1,100,-1,100,-1,100), (0,True,False,1,0,1,0,1), (0,1,Max32BitInt,1,0,1,0,1)]
for i in range(len(works)):
try:
ra.reflections = works[i]
TestPOA('ra.reflections=%s'%repr2(works[i]))
except:
TestFOA('ra.reflections=%s'%repr2(works[i]), LINE())
for i in range(len(works)):
try:
ra.SetReflections(*works[i])
TestPOA('ra.SetReflections(%s)'%repr2(works[i]))
except:
TestFOA('ra.SetReflections(%s)'%repr2(works[i]), LINE())
def TestAssignmentToFloatArray():
TestSection('Assignment to floatArray member, "center", (of RadialResampleAttributes())')
rra = RadialResampleAttributes()
# Test assigning via (operator =) assignment
try:
rra.center = 0,1,2
TestPOA('rra.center=0,1,2')
except:
TestFOA('rra.center=0,1,2', LINE())
pass
try:
rra.center = 0,1
TestFOA('rra.center=0,1', LINE())
except:
TestPOA('rra.center=0,1')
pass
try:
rra.center = 0,1,2,3
TestFOA('rra.center=0,1,2,3', LINE())
except:
TestPOA('rra.center=0,1,2,3')
pass
fails = [(0,1), (0,1,2,3), (0,None,2), (0,1+2j,2), (0,X,2), (0,'123',2), (0, Max32BitFloatB,2)]
excpts = [TypeError, TypeError, TypeError, TypeError, TypeError, TypeError, ValueError]
for i in range(len(fails)):
try:
rra.center = fails[i]
TestFOA('rra.center=%s'%repr2(fails[i]), LINE())
except excpts[i]:
TestPOA('rra.center=%s'%repr2(fails[i]))
pass
except:
TestFOA('rra.center=%s'%repr2(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
rra.SetCenter(*fails[i])
TestFOA('rra.SetCenter(%s)'%repr2(fails[i]), LINE())
except excpts[i]:
TestPOA('rra.SetCenter(%s)'%repr2(fails[i]))
pass
except:
TestFOA('rra.SetCenter(%s)'%repr2(fails[i]), LINE())
pass
works = [(1,2,3), (1.1,2.2,3.3), tuple(X), (1,True,3), (1,False,3), (1,Max32BitFloatA,3)]
for i in range(len(works)):
try:
rra.center = works[i]
TestPOA('rra.center=%s'%repr2(works[i]))
except:
TestFOA('rra.center=%s'%repr2(works[i]), LINE())
for i in range(len(works)):
try:
rra.SetCenter(*works[i])
TestPOA('rra.SetCenter(%s)'%repr2(works[i]))
except:
TestFOA('rra.SetCenter(%s)'%repr2(works[i]), LINE())
def TestAssignmentToDoubleArray():
TestSection('Assignment to doubleArray member, "materialProperties", (of VolumeAttributes())')
va = VolumeAttributes()
# Test assigning via (operator =) assignment
try:
va.materialProperties = 0,1,2,3
TestPOA('va.materialProperties=0,1,2,3')
except:
TestFOA('va.materialProperites=0,1,2,3', LINE())
pass
try:
va.materialProperties = 0,1,2
TestFOA('va.materialProperties=0,1,2', LINE())
except:
TestPOA('va.materialProperties=0,1,2')
pass
try:
va.materialProperties = 0,1,2,3,4
TestFOA('va.materialProperties=0,1,2,3,4', LINE())
except:
TestPOA('va.materialProperties=0,1,2,3,4')
pass
fails = [(0,1), (0,1,2,3,4), (0,None,2,3), (0,1+2j,2,3), (0,X,2,3), (0,'123',2,3)]
for i in range(len(fails)):
try:
va.materialProperties = fails[i]
TestFOA('va.materialProperties=%s'%repr2(fails[i]), LINE())
except TypeError:
TestPOA('va.materialProperties=%s'%repr2(fails[i]))
pass
except:
TestFOA('va.materialProperties=%s'%repr2(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
va.SetMaterialProperties(*fails[i])
TestFOA('va.SetMaterialProperties(%s)'%repr2(fails[i]), LINE())
except TypeError:
TestPOA('va.SetMaterialProperties(%s)'%repr2(fails[i]))
pass
except:
TestFOA('va.SetMaterialProperties(%s)'%repr2(fails[i]), LINE())
pass
works = [(1,2,3,4), (1.1,2.2,3.3,4.4), (1,True,3,4), (1,False,3,4)]
for i in range(len(works)):
try:
va.materialProperties = works[i]
TestPOA('va.materialProperties=%s'%repr2(works[i]))
except:
TestFOA('va.materialProperties=%s'%repr2(works[i]), LINE())
for i in range(len(works)):
try:
va.SetMaterialProperties(*works[i])
TestPOA('va.SetMaterialProperties(%s)'%repr2(works[i]))
except:
TestFOA('va.SetMaterialProperties(%s)'%repr2(works[i]), LINE())
def TestColorAttributeStuff():
TestSection('ColorAttribute stuff')
cla = ColorAttributeList()
ca = ColorAttribute()
fails = [(0,1,2), (0,1,2,3,4), (0,None,2,3), (0,1+2j,2,3), (0,X,2,3),
(0,'123',2,3), (0,-1,2,3), (0,256,2,3)]
excpts = [TypeError, TypeError, TypeError, TypeError, TypeError, TypeError, ValueError, ValueError]
for i in range(len(fails)):
try:
ca.color = fails[i]
TestFOA('ca.color=%s'%repr2(fails[i]), LINE())
except excpts[i]:
TestPOA('ca.color=%s'%repr2(fails[i]))
pass
except:
TestFOA('ca.color=%s'%repr2(fails[i]), LINE())
pass
for i in range(len(fails)):
try:
ca.SetColor(*fails[i])
TestFOA('ca.SetColor(%s)'%repr2(fails[i]), LINE())
except excpts[i]:
TestPOA('ca.SetColor(%s)'%repr2(fails[i]))
pass
except:
TestFOA('ca.SetColor(%s)'%repr2(fails[i]), LINE())
pass
try:
ca.color = (5,5,5,5)
cla.AddColors(ca)
ca.color = (255,0,0,255)
cla.AddColors(ca)
TestPOA('cla.AddColors')
except:
TestFOA('cla.AddColors', LINE())
pass
try:
cla.colors
TestFOA('cla.colors', LINE())
except NameError:
TestPOA('cla.colors')
except:
TestFOA('cla.colors', LINE())
pass
try:
if cla.GetColors(0).color != (5,5,5,5) or cla.GetColors(1).color != (255,0,0,255):
raise ValueError
TestPOA('cla.GetColors(0)')
except:
TestFOA('cla.Getcolors(0)', LINE())
pass
try:
cla.GetColors(2)
TestFOA('cla.Getcolors(2)', LINE())
except ValueError:
TestPOA('cla.GetColors(2)')
except:
TestFOA('cla.Getcolors(2)', LINE())
pass
def TestDirOutput(obj, minlen = 5, names = None):
TestSection('behavior of dir()')
try:
x = [f for f in dir(obj) if not (f.startswith('__') and f.endswith('__'))]
if minlen and len(x) < minlen:
TestFOA('dir(%s): minlen: %d < %d'%(repr(obj),len(x),minlen), LINE())
x = [n for n in names if n in x]
if len(x) != len(names):
TestFOA('dir(%s): names: %s'%(repr(obj), names), LINE())
TestPOA('dir(%s)'%repr())
except:
TestFOA('dir(%s)'%repr(obj), LINE())
# Class to facilitate stdout redirect for testing `help()`
class my_redirect_stdout(list):
def __enter__(self):
self._stdout = sys.stdout
sys.stdout = self._stringio = io.StringIO()
return self
def __exit__(self, *args):
self.extend(self._stringio.getvalue().splitlines())
del self._stringio # free up some memory
sys.stdout = self._stdout
# Below import works only for Python > 3.4
# So, we use the class def above
# from contextlib import redirect_stdout
def TestHelpOutput(thing, minlen = 200, words = None):
TestSection('behavior of help()')
try:
with my_redirect_stdout() as output:
help(thing)
if minlen and len(str(output)) < minlen:
TestFOA('dir(%s): minlen: %d < %d'%(repr(thing),len(output),minlen), LINE())
x = [w for w in words if w in str(output)]
if len(x) != len(words):
TestFOA('dir(%s): words: %s'%(repr(thing), words), LINE())
except:
TestFOA('help(%s)'%repr(thing), LINE())
# Scalar assignments
# TestAssignmentToUChar() No instances in any .xml files
TestAssignmentToBool()
TestAssignmentToInt()
TestAssignmentToFloat()
TestAssignmentToDouble()
TestAssignmentToString()
TestAssignmentToGlyphType()
TestAssignmentToEnum()
TestAssignmentToTuple()
# Vector assignments
TestAssignmentToUCharVector()
#TestAssignmentToBoolVector() No instances in any .xml files
TestAssignmentToIntVector()
#TestAssignmentToFloatVector() No instances in any .xml files
TestAssignmentToDoubleVector()
# Array assignments
TestAssignmentToUCharArray()
#TestAssignmentToBoolArray() No instances in any .xml files
TestAssignmentToIntArray()
TestAssignmentToFloatArray()
TestAssignmentToDoubleArray()
# Attribute Assignments
TestColorAttributeStuff()
# Test that dir(x) appears to work
#TestDirOutput(SILRestriction(), None, ['NumSets', 'TurnOnAll', 'Wholes', 'TopSets'])
#TestDirOutput(PseudocolorAttributes(), 50)
#TestDirOutput(ColorAttributeList(), None, ['AddColors', 'ClearColors', 'GetColors'])
# Test Help
#TestHelpOutput(AddPlot, None, ['plotType', 'variableName', 'inheritSIL'])
#TestHelpOutput(CreateDatabaseCorrelation, None,
# ['IndexForIndexCorrelation', 'CycleCorrelation', 'StretchedIndexCorrelation'])
Exit()
| 32.742277 | 125 | 0.552848 |
c4ed2a6ac17eedb94300e52e351a0d35ed7a1ac4
| 2,376 |
py
|
Python
|
rasa/cli/arguments/interactive.py
|
chaneyjd/rasa
|
104a9591fc10b96eaa7fe402b6d64ca652b7ebe2
|
[
"Apache-2.0"
] | 30 |
2020-11-30T12:55:45.000Z
|
2022-01-20T02:53:03.000Z
|
rasa/cli/arguments/interactive.py
|
chaneyjd/rasa
|
104a9591fc10b96eaa7fe402b6d64ca652b7ebe2
|
[
"Apache-2.0"
] | 209 |
2020-03-18T18:28:12.000Z
|
2022-03-01T13:42:29.000Z
|
rasa/cli/arguments/interactive.py
|
chaneyjd/rasa
|
104a9591fc10b96eaa7fe402b6d64ca652b7ebe2
|
[
"Apache-2.0"
] | 10 |
2021-01-11T02:08:43.000Z
|
2021-11-19T09:12:46.000Z
|
import argparse
import uuid
from rasa.cli.arguments.default_arguments import (
add_domain_param,
add_stories_param,
add_model_param,
add_endpoint_param,
)
from rasa.cli.arguments.train import (
add_force_param,
add_data_param,
add_config_param,
add_out_param,
add_debug_plots_param,
add_augmentation_param,
add_persist_nlu_data_param,
)
from rasa.cli.arguments.run import add_port_argument
def set_interactive_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"--e2e",
action="store_true",
help="Save story files in e2e format. In this format user messages "
"will be included in the stories.",
)
add_port_argument(parser)
add_model_param(parser, default=None)
add_data_param(parser)
_add_common_params(parser)
train_arguments = _add_training_arguments(parser)
add_force_param(train_arguments)
add_persist_nlu_data_param(train_arguments)
def set_interactive_core_arguments(parser: argparse.ArgumentParser) -> None:
add_model_param(parser, model_name="Rasa Core", default=None)
add_stories_param(parser)
_add_common_params(parser)
_add_training_arguments(parser)
add_port_argument(parser)
def _add_common_params(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"--skip-visualization",
default=False,
action="store_true",
help="Disable plotting the visualization during interactive learning.",
)
parser.add_argument(
"--conversation-id",
default=uuid.uuid4().hex,
help="Specify the id of the conversation the messages are in. Defaults to a "
"UUID that will be randomly generated.",
)
add_endpoint_param(
parser,
help_text="Configuration file for the model server and the connectors as a yml file.",
)
# noinspection PyProtectedMember
def _add_training_arguments(parser: argparse.ArgumentParser) -> argparse._ArgumentGroup:
train_arguments = parser.add_argument_group("Train Arguments")
add_config_param(train_arguments)
add_domain_param(train_arguments)
add_out_param(
train_arguments, help_text="Directory where your models should be stored."
)
add_augmentation_param(train_arguments)
add_debug_plots_param(train_arguments)
return train_arguments
| 28.626506 | 94 | 0.732744 |
96bf52a9309ccdb253885c7cd0154552247cc74f
| 11,553 |
py
|
Python
|
ui.py
|
wdr-data/1live-plays
|
68ed22a659ea1540550bc9f5e70fdb83f362fa06
|
[
"MIT"
] | 3 |
2020-07-31T11:22:04.000Z
|
2020-07-31T19:59:18.000Z
|
ui.py
|
wdr-data/1live-plays
|
68ed22a659ea1540550bc9f5e70fdb83f362fa06
|
[
"MIT"
] | null | null | null |
ui.py
|
wdr-data/1live-plays
|
68ed22a659ea1540550bc9f5e70fdb83f362fa06
|
[
"MIT"
] | null | null | null |
from enum import Enum
import os
import pygame
import pygame.gfxdraw
import pygame.ftfont
import pygame.image
import pygame.transform
import numpy as np
import game_logic as game
from square_rect import SquareRect
from config import config
SQUARESIZE = 100
HALF_SQUARE = int(SQUARESIZE / 2)
RADIUS = int(HALF_SQUARE - 5)
COLOR_BOARD = (137, 149, 155)
BLACK = (0, 0, 0)
COLOR_LEFT_PLAYER = config["players"]["left_player"]["color"]
COLOR_RIGHT_PLAYER = config["players"]["right_player"]["color"]
BOARD_OFFSET_X = 4.5
BOARD_OFFSET_Y = 3
screen_width = 16 * SQUARESIZE
screen_height = 9 * SQUARESIZE
size = (screen_width, screen_height)
pygame.ftfont.init()
if os.environ.get("FULLSCREEN"):
screen = pygame.display.set_mode(size, pygame.FULLSCREEN)
else:
screen = pygame.display.set_mode(size)
class Fonts:
VOFFSET = {"left_player": 0.08 / SQUARESIZE, "right_player": -0.035 / SQUARESIZE}
HOFFSET = {"left_player": 0, "right_player": 0.05 / SQUARESIZE}
SCORE = {
"left_player": pygame.ftfont.Font(
"fonts/WDRSansUL-ExtraBold.otf", int((SQUARESIZE / 4) * 3)
),
"right_player": pygame.ftfont.Font(
"fonts/Barlow-Bold.otf", int((SQUARESIZE / 4) * 2.9)
),
}
NUMBERS = pygame.ftfont.Font(
"fonts/WDRSansUL-ExtraBold.otf", int((SQUARESIZE / 4) * 3)
)
GAME_END = SCORE
COUNTDOWN = {
"left_player": pygame.ftfont.Font(
"fonts/WDRSansUL-ExtraBold.otf", int(SQUARESIZE * 1.5)
),
"right_player": pygame.ftfont.Font(
"fonts/Barlow-Bold.otf", int(SQUARESIZE * 1.5)
),
}
STATUS = {
"left_player": pygame.ftfont.Font(
"fonts/WDRSans-Bold.otf", int((SQUARESIZE / 5) * 3)
),
"right_player": pygame.ftfont.Font(
"fonts/Barlow-Bold.otf", int((SQUARESIZE / 5) * 3)
),
}
STATUS_LARGE = {
"left_player": pygame.ftfont.Font(
"fonts/WDRSansUL-ExtraBold.otf",
int(
(SQUARESIZE / 4)
* 5
* (5 / len(config["players"]["left_player"]["name"]))
),
),
"right_player": pygame.ftfont.Font(
"fonts/Barlow-Bold.otf",
int(
(SQUARESIZE / 4)
* 5
* (5 / len(config["players"]["right_player"]["name"]))
),
),
}
class Images:
LOGOS = {
player: pygame.image.load(f"images/logo_{player}.png").convert_alpha()
for player in config["players"]
}
SCORE_LOGOS = {
player: pygame.transform.smoothscale(
surf, (int(surf.get_width() * SQUARESIZE / surf.get_height()), SQUARESIZE)
)
for player, surf in LOGOS.items()
}
STATUS_LOGOS = {
player: pygame.transform.smoothscale(
surf,
(
int(4 * SQUARESIZE),
int(surf.get_height() * 4 * SQUARESIZE / surf.get_width()),
),
)
for player, surf in LOGOS.items()
}
class Positions:
SCORE_HEIGHT = 1.0
CURRENT_PLAYER_LEFT_PLAYER_LEFT = 0.25
CURRENT_PLAYER_RIGHT_PLAYER_LEFT = 11.75
CURRENT_PLAYER = SquareRect(0, BOARD_OFFSET_Y - 1, 4, 3)
GAME_END = SquareRect(0, 1, 16, 1)
CURRENT_VOTE = SquareRect(BOARD_OFFSET_X, 1, game.COLUMN_COUNT, 1)
COUNTDOWN = SquareRect(0, 6, 4, 2)
class Align(Enum):
CENTER = "center"
LEFT = "left"
RIGHT = "right"
def draw_erase(square_rect, color=BLACK):
rect = square_rect.get_rect(SQUARESIZE)
pygame.draw.rect(screen, color, rect)
def draw_text(text, color, font, square_rect, align=Align.CENTER):
rect = square_rect.get_rect(SQUARESIZE)
draw_erase(square_rect)
drawn_text = font.render(text, 1, color)
if not text:
height_offset_umlaut = 0
elif len(text) == 1:
height_offset_umlaut = font.get_ascent() - font.metrics(text)[0][3]
else:
height_offset_umlaut = font.get_ascent() - max(
*[metric[3] for metric in font.metrics(text)]
)
height_offset_umlaut = min(0, height_offset_umlaut)
text_rect = drawn_text.get_rect(
center=(rect.left + int(rect.width / 2), rect.top + int(rect.height / 2))
)
text_rect.top += height_offset_umlaut / 2
if align is Align.LEFT:
text_rect.left = rect.left
if align is Align.RIGHT:
text_rect.right = rect.right
screen.blit(drawn_text, text_rect)
return SquareRect.from_rect(text_rect, SQUARESIZE)
def draw_hack_text(text, color, font, square_rect, align=Align.CENTER):
"""
We need this for the WDRSansUL font, because that is the only font with correct numbers,
but also has weird underlines baked into the font.
So we draw the text and then erase the underline as a hack.
"""
text_rect = draw_text(text, color, font, square_rect, align=align)
erase_rect = text_rect.copy()
erase_rect.top = erase_rect.bottom - 0.11 * text_rect.height
erase_rect.height = 0.07 * text_rect.height
erase_rect.left -= 0.05 * text_rect.height
erase_rect.width += 0.1 * text_rect.height
draw_erase(erase_rect)
return text_rect
def draw_piece(left, top, color, scale=1):
pygame.gfxdraw.filled_circle(
screen,
int(left * SQUARESIZE) + HALF_SQUARE,
int(top * SQUARESIZE) + HALF_SQUARE,
int(RADIUS * scale),
color,
)
for _ in range(2):
pygame.gfxdraw.aacircle(
screen,
int(left * SQUARESIZE) + HALF_SQUARE,
int(top * SQUARESIZE) + HALF_SQUARE,
int(RADIUS * scale),
color,
)
def draw_image(source, rect, vertical_align="top", horizontal_align="left"):
draw_erase(rect)
rect = rect.get_rect(SQUARESIZE)
if vertical_align == "center":
rect.top += int((rect.height - source.get_height()) / 2)
elif vertical_align == "bottom":
rect.top += int(rect.height - source.get_height())
if horizontal_align == "center":
rect.left += int((rect.width - source.get_width()) / 2)
elif horizontal_align == "right":
rect.left += int(rect.width - source.get_width())
return SquareRect.from_rect(screen.blit(source, rect), SQUARESIZE,)
def draw_board():
flipped_board = np.flip(game.board, 0)
for c in range(game.COLUMN_COUNT):
for r in range(game.ROW_COUNT):
left = c + BOARD_OFFSET_X
top = r + BOARD_OFFSET_Y
pygame.draw.rect(
screen,
COLOR_BOARD,
(int(left * SQUARESIZE), int(top * SQUARESIZE), SQUARESIZE, SQUARESIZE),
)
if flipped_board[r][c] == 1:
draw_piece(left, top, COLOR_LEFT_PLAYER)
elif flipped_board[r][c] == 2:
draw_piece(left, top, COLOR_RIGHT_PLAYER)
else:
draw_piece(left, top, BLACK)
def draw_current_vote(vote, turn):
color = config["players"][turn]["color"]
left = BOARD_OFFSET_X + vote
top = Positions.CURRENT_VOTE.top
draw_erase(Positions.CURRENT_VOTE)
draw_piece(left, top, color)
def draw_column_labels():
for c in range(game.COLUMN_COUNT):
square_rect = SquareRect(BOARD_OFFSET_X + c, BOARD_OFFSET_Y - 0.8, 1, 0.8,)
draw_hack_text(str(c + 1), COLOR_BOARD, Fonts.NUMBERS, square_rect)
def draw_game_end(turn, tie=False):
if tie:
color = COLOR_BOARD
text = "Unentschieden!".upper()
else:
color = config["players"][turn]["color"]
text = f"{config['players'][turn]['name']} gewinnt!".upper()
draw_hack_text(text, color, Fonts.GAME_END[turn], Positions.GAME_END)
def draw_current_player(turn):
color = config["players"][turn]["color"]
text = config["players"][turn]["name"]
if turn == "left_player":
text_left = Positions.CURRENT_PLAYER_LEFT_PLAYER_LEFT
erase_left = Positions.CURRENT_PLAYER_RIGHT_PLAYER_LEFT
else:
text_left = Positions.CURRENT_PLAYER_RIGHT_PLAYER_LEFT
erase_left = Positions.CURRENT_PLAYER_LEFT_PLAYER_LEFT
square_rect_logo = Positions.CURRENT_PLAYER.copy()
square_rect_logo.left = text_left
square_rect_erase = Positions.CURRENT_PLAYER.copy()
square_rect_erase.left = erase_left
draw_erase(square_rect_erase)
draw_image(Images.STATUS_LOGOS[turn], square_rect_logo, vertical_align="center")
font = Fonts.STATUS[turn]
font_voffset = font.get_height() * Fonts.VOFFSET[turn]
square_rect_text = square_rect_logo.copy()
square_rect_text.height = 1
square_rect_erase.height = 1
square_rect_text.top += 3 + font_voffset
square_rect_erase.top += 3
draw_erase(square_rect_erase)
draw_text("ist dran", color, font, square_rect_text)
def draw_countdown(turn, time_left, no_votes_message):
color = config["players"][turn]["color"]
if turn == "left_player":
text_left = Positions.CURRENT_PLAYER_LEFT_PLAYER_LEFT
erase_left = Positions.CURRENT_PLAYER_RIGHT_PLAYER_LEFT
else:
text_left = Positions.CURRENT_PLAYER_RIGHT_PLAYER_LEFT
erase_left = Positions.CURRENT_PLAYER_LEFT_PLAYER_LEFT
font = Fonts.COUNTDOWN[turn]
font_voffset = font.get_height() * Fonts.VOFFSET[turn]
square_rect_text = Positions.COUNTDOWN.copy()
square_rect_text.left = text_left
square_rect_text.top += font_voffset
square_rect_erase = Positions.COUNTDOWN.copy()
square_rect_erase.left = erase_left
draw_erase(square_rect_erase)
square_rect_countdown = draw_text(str(time_left), color, font, square_rect_text)
square_rect_countdown.top = square_rect_countdown.bottom - 0.15
square_rect_countdown.height = 0.1
draw_erase(square_rect_countdown)
# No votes text
font = Fonts.STATUS[turn]
font_voffset = font.get_height() * Fonts.VOFFSET[turn]
square_rect_text.top = 8 + font_voffset
square_rect_text.height = 1
draw_erase(square_rect_text, color=BLACK)
if no_votes_message:
draw_text("Keine Votes!", color, font, square_rect_text)
def draw_scoreboard(score):
player = "left_player"
font = Fonts.SCORE[player]
font_voffset = font.get_height() * Fonts.VOFFSET[player]
font_hoffset = font.get_height() * Fonts.HOFFSET[player]
colon_rect = SquareRect(7.85, font_voffset, 0.3, Positions.SCORE_HEIGHT)
draw_hack_text(":", COLOR_BOARD, font, colon_rect)
left_player_rect = SquareRect(
0, font_voffset, colon_rect.left, Positions.SCORE_HEIGHT,
)
left_player_rect.right = colon_rect.left - font_hoffset
left_text_rect = draw_hack_text(
f"{config['players'][player]['name']} {score[player]}",
COLOR_LEFT_PLAYER,
Fonts.SCORE[player],
left_player_rect,
align=Align.RIGHT,
)
draw_piece(left_text_rect.left - 1, 0, COLOR_LEFT_PLAYER, scale=0.75)
player = "right_player"
font = Fonts.SCORE[player]
font_voffset = font.get_height() * Fonts.VOFFSET[player]
font_hoffset = font.get_height() * Fonts.HOFFSET[player]
right_player_rect = SquareRect(
colon_rect.right + 0.01 + font_hoffset,
font_voffset,
colon_rect.left,
Positions.SCORE_HEIGHT,
)
right_text_rect = draw_hack_text(
f"{score[player]} {config['players'][player]['name']}",
COLOR_RIGHT_PLAYER,
font,
right_player_rect,
align=Align.LEFT,
)
draw_piece(right_text_rect.right, 0, COLOR_RIGHT_PLAYER, scale=0.75)
| 30.243455 | 92 | 0.64321 |
fbde4302833ec88b5839af1a2bdd4789b1c8ae09
| 478 |
py
|
Python
|
main.py
|
fossabot/superstructure
|
f4ab5cac269fb3dedfbd3a54c441af23edf3840b
|
[
"MIT"
] | null | null | null |
main.py
|
fossabot/superstructure
|
f4ab5cac269fb3dedfbd3a54c441af23edf3840b
|
[
"MIT"
] | null | null | null |
main.py
|
fossabot/superstructure
|
f4ab5cac269fb3dedfbd3a54c441af23edf3840b
|
[
"MIT"
] | null | null | null |
from redisworks import Root
from superstructure.geist import Bewusstsein
# TODO find way to pickle objects
def main():
root = Root # redis.Redis('localhost')
try:
weltgeist = root.weltgeist
except BaseException:
print("Creating new weltgeist")
weltgeist = Bewusstsein(name="Weltgeist")
root.weltgeist = weltgeist
# print(weltgeist)
print(root.weltgeist)
root.weltgeist.spill()
if __name__ == "__main__":
main()
| 20.782609 | 49 | 0.669456 |
f7cf7f75169ba1389eb6a8f9cbdc9b4d34034585
| 813 |
py
|
Python
|
resources/mechanics_lib/Rectangle.py
|
PRECISE/ROSLab
|
2a6a295b71d4c73bc5c6ae2ec0330274afa31d0d
|
[
"Apache-2.0"
] | 7 |
2016-01-20T02:33:00.000Z
|
2021-02-04T04:06:57.000Z
|
resources/mechanics_lib/Rectangle.py
|
PRECISE/ROSLab
|
2a6a295b71d4c73bc5c6ae2ec0330274afa31d0d
|
[
"Apache-2.0"
] | null | null | null |
resources/mechanics_lib/Rectangle.py
|
PRECISE/ROSLab
|
2a6a295b71d4c73bc5c6ae2ec0330274afa31d0d
|
[
"Apache-2.0"
] | 3 |
2016-10-05T07:20:30.000Z
|
2017-11-20T10:36:50.000Z
|
from api.component import Component
from api.shapes import Rectangle as Rect
class Rectangle(Component):
def defParameters(self):
self.newParameter("l")
self.newParameter("w")
def defInterfaces(self):
self.newInterface("t")
self.newInterface("b")
self.newInterface("l")
self.newInterface("r")
def assemble(self):
dx = self.getParameter("l")
dy = self.getParameter("w")
self.drawing = Rect(dx, dy)
self.setInterface("b", "e0")
self.setInterface("r", "e1")
self.setInterface("t", "e2")
self.setInterface("l", "e3")
if __name__ == "__main__":
import utils.display
h = Rectangle()
h.setParameter("l", 10)
h.setParameter("w", 10)
h.make()
h.drawing.transform(origin = (0, 0), scale=10)
utils.display.displayTkinter(h.drawing)
| 23.911765 | 50 | 0.648216 |
7902ce0e0f474b9d52cbbbf288f0fab89b6b4bfe
| 4,422 |
py
|
Python
|
app.py
|
skrzypak/soaf
|
f742d4b090fad72893ed1f509f4abdbb020aa99d
|
[
"MIT"
] | null | null | null |
app.py
|
skrzypak/soaf
|
f742d4b090fad72893ed1f509f4abdbb020aa99d
|
[
"MIT"
] | null | null | null |
app.py
|
skrzypak/soaf
|
f742d4b090fad72893ed1f509f4abdbb020aa99d
|
[
"MIT"
] | null | null | null |
import glob
import shutil
import subprocess
import os
import sys
import argparse
# Read and save metadata from file
def exiftool_metadata(path):
metadata = {}
exifToolPath = 'exifTool.exe'
''' use Exif tool to get the metadata '''
process = subprocess.Popen(
[
exifToolPath,
path
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True
)
''' get the tags in dict '''
for tag in process.stdout:
tag = tag.strip()
key = tag[:tag.find(':')].strip()
value = tag[tag.find(':') + 1:].strip()
metadata[key] = value
return metadata
class File:
def __init__(self, path):
self.metadata = exiftool_metadata(path)
def _get_file_metadata(self, key, no=''):
if key in self.metadata:
return self.metadata[key]
else:
return no
def copyCore(self, source, dst_dir: str, copy_duplicate=False):
logs = []
# if value of metadata not exists - folder name
no_metadata = 'none'
date = File._get_file_metadata(self, 'Date/Time Original')
if date == '':
date = File._get_file_metadata(self, 'Create Date', no_metadata)
mime_type = File._get_file_metadata(self, 'MIME Type', no_metadata)
dst_dir += f'''/{mime_type[:mime_type.find('/')]}/{date[:4]}/{date[5:7]}'''
filename = File._get_file_metadata(self, 'File Name')
f_name = filename
dst = dst_dir + '/' + filename
# File with the same name exists in dst. If source and dst have same size then determines 'copy_exists'
if os.path.isfile(dst):
i = 0
f_pth = File(dst)
if_same_size: bool = f_pth._get_file_metadata("File Size") == File._get_file_metadata(self, 'File Size')
if (not if_same_size) or copy_duplicate:
while os.path.isfile(dst):
filename = f'''{f_name[:f_name.find('.')]}_D{str(i)}.{File._get_file_metadata(self, 'File Type Extension')}'''
dst = f'''{dst_dir}/{filename}'''
i = i + 1
if if_same_size:
logs.append(f"Warning: file already exists but I must copy all files"
f" [copy_duplicate={copy_duplicate}], so I try do it ...")
else:
logs.append(f"Warning: file already exists but have other size, so I try copy it ...")
else:
logs.append(f"Warning: file already duplicate [copy_exists={copy_duplicate}]."
f"\nCopy aboard: {source} -> {dst}")
return logs
try:
if not os.path.isdir(dst_dir):
os.makedirs(dst_dir)
logs.append(f"New directory created: {dst_dir}")
shutil.copy(source, dst)
logs.append(f'''Copy done: {source} -> {dst}''')
except Exception as e:
logs.append(f'''Copy error [{e}]: {source} -> {dst}''')
return logs
def main():
# Arguments from console
parser = argparse.ArgumentParser()
parser.add_argument('-s', help="Obligatory: source directory path")
parser.add_argument('-d', help="Obligatory: destination folder path")
parser.add_argument('-e', help="Obligatory: copy duplicate files (T/True/F/False)")
args = parser.parse_args(sys.argv[1:])
# Setup variable
source_dir = args.s
dst_dir = args.d
df = {
"T": True,
"TRUE": True,
"F": False,
"FALSE": False
}
try:
copy_duplicate = df.get(args.e.upper(), False)
except AttributeError:
copy_duplicate = False
print(f"app.py: error: unrecognized arguments. Use -h or --help to see options")
exit(1)
# Number of log
l_lpm = 0
# source_dir = 'C:/Users'
# dst_dir = 'C:/Users'
# copy_duplicate = False
for f_inx, source in enumerate(glob.glob(source_dir + '/**/*.*', recursive=True)):
try:
f = File(source)
print("----------")
for log in f.copyCore(source, dst_dir, copy_duplicate):
l_lpm = l_lpm + 1
print(f'''{str(l_lpm)}.{f_inx + 1}) {log}''')
except Exception as e:
print(f'Copy error [{e}]: {source}')
if __name__ == '__main__':
main()
| 32.755556 | 130 | 0.556083 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.